repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
azaghal/ansible | test/integration/targets/no_log/library/module.py | 61 | 1216 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2019 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.basic import AnsibleModule
def main():
module = AnsibleModule(
argument_spec={
'state': {},
'secret': {'no_log': True},
'subopt_dict': {
'type': 'dict',
'options': {
'str_sub_opt1': {'no_log': True},
'str_sub_opt2': {},
'nested_subopt': {
'type': 'dict',
'options': {
'n_subopt1': {'no_log': True},
}
}
}
},
'subopt_list': {
'type': 'list',
'elements': 'dict',
'options': {
'subopt1': {'no_log': True},
'subopt2': {},
}
}
}
)
module.exit_json(msg='done')
if __name__ == '__main__':
main()
| gpl-3.0 |
gregdek/ansible | test/units/module_utils/common/collections.py | 27 | 4881 | # -*- coding: utf-8 -*-
# Copyright (c), Sviatoslav Sydorenko <ssydoren@redhat.com> 2018
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
"""Test low-level utility functions from ``module_utils.common.collections``."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible.module_utils.common._collections_compat import Sequence
from ansible.module_utils.common.collections import ImmutableDict, is_iterable, is_sequence
class SeqStub:
"""Stub emulating a sequence type.
>>> from collections.abc import Sequence
>>> assert issubclass(SeqStub, Sequence)
>>> assert isinstance(SeqStub(), Sequence)
"""
Sequence.register(SeqStub)
class IteratorStub:
def __next__(self):
raise StopIteration
class IterableStub:
def __iter__(self):
return IteratorStub()
TEST_STRINGS = u'he', u'Україна', u'Česká republika'
TEST_STRINGS = TEST_STRINGS + tuple(s.encode('utf-8') for s in TEST_STRINGS)
TEST_ITEMS_NON_SEQUENCES = (
{}, object(), frozenset(),
4, 0.,
) + TEST_STRINGS
TEST_ITEMS_SEQUENCES = (
[], (),
SeqStub(),
)
TEST_ITEMS_SEQUENCES = TEST_ITEMS_SEQUENCES + (
# Iterable effectively containing nested random data:
TEST_ITEMS_NON_SEQUENCES,
)
@pytest.mark.parametrize('sequence_input', TEST_ITEMS_SEQUENCES)
def test_sequence_positive(sequence_input):
"""Test that non-string item sequences are identified correctly."""
assert is_sequence(sequence_input)
assert is_sequence(sequence_input, include_strings=False)
@pytest.mark.parametrize('non_sequence_input', TEST_ITEMS_NON_SEQUENCES)
def test_sequence_negative(non_sequence_input):
"""Test that non-sequences are identified correctly."""
assert not is_sequence(non_sequence_input)
@pytest.mark.parametrize('string_input', TEST_STRINGS)
def test_sequence_string_types_with_strings(string_input):
"""Test that ``is_sequence`` can separate string and non-string."""
assert is_sequence(string_input, include_strings=True)
@pytest.mark.parametrize('string_input', TEST_STRINGS)
def test_sequence_string_types_without_strings(string_input):
"""Test that ``is_sequence`` can separate string and non-string."""
assert not is_sequence(string_input, include_strings=False)
@pytest.mark.parametrize(
'seq',
([], (), {}, set(), frozenset(), IterableStub()),
)
def test_iterable_positive(seq):
assert is_iterable(seq)
@pytest.mark.parametrize(
'seq', (IteratorStub(), object(), 5, 9.)
)
def test_iterable_negative(seq):
assert not is_iterable(seq)
@pytest.mark.parametrize('string_input', TEST_STRINGS)
def test_iterable_including_strings(string_input):
assert is_iterable(string_input, include_strings=True)
@pytest.mark.parametrize('string_input', TEST_STRINGS)
def test_iterable_excluding_strings(string_input):
assert not is_iterable(string_input, include_strings=False)
class TestImmutableDict:
def test_scalar(self):
imdict = ImmutableDict({1: 2})
assert imdict[1] == 2
def test_string(self):
imdict = ImmutableDict({u'café': u'くらとみ'})
assert imdict[u'café'] == u'くらとみ'
def test_container(self):
imdict = ImmutableDict({(1, 2): ['1', '2']})
assert imdict[(1, 2)] == ['1', '2']
def test_from_tuples(self):
imdict = ImmutableDict((('a', 1), ('b', 2)))
assert frozenset(imdict.items()) == frozenset((('a', 1), ('b', 2)))
def test_from_kwargs(self):
imdict = ImmutableDict(a=1, b=2)
assert frozenset(imdict.items()) == frozenset((('a', 1), ('b', 2)))
def test_immutable(self):
imdict = ImmutableDict({1: 2})
with pytest.raises(TypeError) as exc_info:
imdict[1] = 3
assert exc_info.value.args[0] == "'ImmutableDict' object does not support item assignment"
with pytest.raises(TypeError) as exc_info:
imdict[5] = 3
assert exc_info.value.args[0] == "'ImmutableDict' object does not support item assignment"
def test_hashable(self):
# ImmutableDict is hashable when all of its values are hashable
imdict = ImmutableDict({u'café': u'くらとみ'})
assert hash(imdict)
def test_nonhashable(self):
# ImmutableDict is unhashable when one of its values is unhashable
imdict = ImmutableDict({u'café': u'くらとみ', 1: [1, 2]})
with pytest.raises(TypeError) as exc_info:
hash(imdict)
assert exc_info.value.args[0] == "unhashable type: 'list'"
def test_len(self):
imdict = ImmutableDict({1: 2, 'a': 'b'})
assert len(imdict) == 2
def test_repr(self):
imdict = ImmutableDict({1: 2, 'a': 'b'})
assert repr(imdict) == "ImmutableDict({1: 2, 'a': 'b'})"
| gpl-3.0 |
Lohamon/gmail-backup-com | svc/scripting/extractors.py | 26 | 11415 | # -*- coding: utf-8 -*-
# Copyright (C) 2008 Jan Svec and Filip Jurcicek
#
# YOU USE THIS TOOL ON YOUR OWN RISK!
#
# email: info@gmail-backup.com
#
#
# Disclaimer of Warranty
# ----------------------
#
# Unless required by applicable law or agreed to in writing, licensor provides
# this tool (and each contributor provides its contributions) on an "AS IS"
# BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied, including, without limitation, any warranties or conditions of
# TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR
# PURPOSE. You are solely responsible for determining the appropriateness of
# using this work and assume any risks associated with your exercise of
# permissions under this license.
"""Extraktory sloužící třídě `Script`
Všechny extraktory musí implementovat rozhraní `Extractor`. Je-li jim jako
zdroj pomocí `Extractor.setSource` předána hodnota ``None``, musí být použit
implicitní zdroj. Extraktory vytváří metoda `Script.createExtractors`, proto po
případném přidání extraktoru nezapomeňte tuto metodu modifikovat.
.. image:: ../uml6.png
"""
__docformat__ = 'restructuredtext cs'
import sys
from os.path import expanduser, isfile
import os
from svc.utils import issequence, seqIntoDict
from svc.scripting import Extractor, Multiple, EnvVar, JoinSources
from svc.scripting.conversions import Flag
from getopt import gnu_getopt as getopt
class CmdlineExtractor(Extractor):
def __init__(self, short_opts={}, pos_opts=[]):
self.setSource(None)
self.setPosOpts(pos_opts)
self.setShortOpts(short_opts)
def getPosOpts(self):
return self._posOpts
def setPosOpts(self, pos_opts):
seqIntoDict([], pos_opts)
self._posOpts = pos_opts
def getShortOpts(self):
return self._shortOpts
def setShortOpts(self, short_opts):
for key, value in short_opts.iteritems():
if len(key) != 1:
raise ValueError("Bad key in short_opts dictionary: %r" % key)
if value in self.posOpts:
raise ValueError("Positional option cannot have short form: %r" % key)
self._shortOpts = short_opts
def getSource(self):
return self._source
def setSource(self, source):
self._source = source
def getSourceName(self):
return 'argv'
def extract(self, state):
source = self.getSource()
if source is None:
source = sys.argv[1:]
short = self._getoptShort()
long = self._getoptLong()
options, positional = getopt(source, short, long)
self._extractGetoptOpt(state, options)
self._extractGetoptPos(state, positional)
def _extractGetoptOpt(self, state, options):
with_arg = self._optionsWithArg()
source_name = self.getSourceName()
for option, value in options:
if option.startswith('--'):
opt_name = option[2:]
else:
opt_name = self._shortOpts[option[1:]]
# Map back from the command-line form into underscored form
opt_name = opt_name.replace('-', '_')
if opt_name in with_arg:
state.append( (opt_name, value, source_name, option) )
else:
state.append( (opt_name, 'true', source_name, option) )
def _extractGetoptPos(self, state, positional):
source_name = self.getSourceName()
pos_opts = self.posOpts
d = seqIntoDict(positional, pos_opts)
state.addObjects(d, source_name, positional)
def setManager(self, manager):
self._manager = manager
def _optionsWithArg(self):
m = self._manager
return set(m.paramToOption(p) for p in m.params()
if m.conversion(p)[0] != Flag)
def _getoptShort(self):
with_arg = self._optionsWithArg()
ret = []
for short, long in self._shortOpts.iteritems():
if long in self.posOpts:
# Positional options cannot have short-option form
continue
ret.append(short)
if long in with_arg:
ret.append(':')
return ''.join(ret)
def _getoptLong(self):
with_arg = self._optionsWithArg()
ret = []
for o in self._manager.options():
if o in self.posOpts:
# Positional options don't have long-option form
continue
if o in with_arg:
o += '='
# Map into command-line form (tj. '_' maps to '-')
o = o.replace('_', '-')
ret.append(o)
return ret
def getHelpCmdline(self):
def mapEllipsis(item):
if item is not Ellipsis:
return str(item).title()
else:
return '...'
opts = self._posOpts
return ' '.join(mapEllipsis(i) for i in self._posOpts)
def getHelpForOptions(self):
ret = {}
reverse_short = dict((item, key) for (key, item) in self.shortOpts.iteritems())
for o in self._manager.options():
help = []
if o in reverse_short:
help.append('-%s, ' % reverse_short[o])
else:
help.append(' ')
if o in self._posOpts:
help.append(o.title())
else:
help.append('--%s' % o.replace('_', '-'))
ret[o] = ''.join(help)
return ret
class PyFileExtractor(Extractor):
def __init__(self, globals=None, app_source=None):
self.setSource(None)
self.setAppSource(app_source)
if globals is None:
globals = {}
self.setGlobals(globals)
self._processedFiles = set()
def getSource(self):
return self._source
def setSource(self, source):
self._source = source
def getAppSource(self):
return self._appSource
def setAppSource(self, source):
self._appSource = source
def getGlobals(self):
return self._globals
def setGlobals(self, globals):
self._globals = globals
def getSourceName(self):
return 'pyfiles'
def _prepareSource(self, source):
"""Předpřipraví zdroj `source`
Je-li `source` None, vrátí [], není-li `source` posloupnost, vrátí
``[source]``. Ve výsledku expanduje znak tilda ``~`` na domovský
adresář aktuálního uživatele. Z výsledku odstraní již zpracované
soubory podle `processedFiles`.
:See:
processedFiles
"""
if source is None:
source = []
elif not issequence(source):
source = [source]
source = [f for f in (expanduser(f) for f in source) if isfile(f)]
source = [f for f in source if f not in self.processedFiles]
return source
def _extractFromFile(self, pyfile):
globals = self.getGlobals()
locals = {}
self._processedFiles.add(pyfile)
execfile(pyfile, globals, locals)
ret = []
for opt_name, value in locals.iteritems():
if isinstance(value, (list, tuple)):
# If option has assigned the list- or tuple-value, insert
# distinct items from this sequence
for item in value:
ret.append( (opt_name, item, pyfile, '') )
else:
ret.append( (opt_name, value, pyfile, '') )
return ret
def extract(self, state):
self._processedFiles.clear()
while True:
source = self._prepareSource(self.getSource()) \
+ self._prepareSource(self.getAppSource())
if not source:
break
state.extend(self._extractFromFile(source[0]))
def getProcessedFiles(self):
return self._processedFiles
def setManager(self, manager):
self._manager = manager
class EnvironExtractor(Extractor):
def __init__(self, env_prefix=None, split_char=None):
self.setSource(None)
self.setEnvPrefix(env_prefix)
if split_char is None:
if os.name == 'nt':
self.setSplitChar(';')
else:
self.setSplitChar(':')
else:
self.setSplitChar(split_char)
def getSource(self):
return self._source
def setSource(self, source):
self._source = source
def getEnvPrefix(self):
return self._envPrefix
def setEnvPrefix(self, prefix):
self._envPrefix = prefix
def getSplitChar(self):
return self._splitChar
def setSplitChar(self, s_char):
self._splitChar = s_char
def getSourceName(self):
return 'env'
def setManager(self, manager):
self._manager = manager
def extract(self, state):
source = self._source
if source is None:
source = os.environ
env_vars = self._manager.optionsWithSpecifier(EnvVar)
multiple_vars = self._manager.optionsWithSpecifier(Multiple)
multiple_vars |= self._manager.optionsWithSpecifier(JoinSources) # Union of sets
prefix = self.getEnvPrefix()
if not issequence(prefix):
prefix = [prefix]
if not prefix:
prefix = [None]
prefix = [p or '' for p in prefix]
split_char = self.getSplitChar()
for p in prefix:
for var in env_vars:
whole_var = (p+var).upper()
if whole_var in source:
value = source[whole_var]
if var in multiple_vars:
for item in value.split(split_char):
state.append( (var, item, 'env:%s'%p, whole_var) )
else:
state.append( (var, value, 'env:%s'%p, whole_var) )
class CmdPosOptsExtractor(Extractor):
def __init__(self, exscript):
super(CmdPosOptsExtractor, self).__init__()
self._exscript = exscript
def getSource(self):
return None
def setSource(self, source):
pass
def getSourceName(self):
return 'cmdPosOpts'
def getPosOpts(self, state):
cmdPosOption = '__premain__._command_pos_opts'
enabled = state.enabled
try:
state.disableAll()
state.enable([cmdPosOption])
try:
objects = state.getObjects()
return objects['__premain__']['_command_pos_opts']
except KeyError:
return []
finally:
state.disableExcept(enabled)
def extract(self, state):
try:
command = self._exscript.getCommandValue().pop()
except ValueError:
# Command wasn't specified
return
if command in self._exscript.cmdPosOpts:
format = self._exscript.cmdPosOpts[command]
pos_opts = self.getPosOpts(state)
d = seqIntoDict(pos_opts, format)
self._exscript.state.addObjects(d, 'cmdPosOpts')
def setManager(self, manager):
pass
def getHelpForOptions(self):
return {}
def getHelpForExtractor(self):
return ''
| gpl-3.0 |
tvalacarta/tvalacarta | python/main-classic/lib/youtube_dl/extractor/drtv.py | 6 | 13802 | # coding: utf-8
from __future__ import unicode_literals
import binascii
import hashlib
import re
from .common import InfoExtractor
from ..aes import aes_cbc_decrypt
from ..compat import compat_urllib_parse_unquote
from ..utils import (
bytes_to_intlist,
ExtractorError,
int_or_none,
intlist_to_bytes,
float_or_none,
mimetype2ext,
str_or_none,
try_get,
unified_timestamp,
update_url_query,
url_or_none,
)
class DRTVIE(InfoExtractor):
_VALID_URL = r'''(?x)
https?://
(?:
(?:www\.)?dr\.dk/(?:tv/se|nyheder|radio(?:/ondemand)?)/(?:[^/]+/)*|
(?:www\.)?(?:dr\.dk|dr-massive\.com)/drtv/(?:se|episode)/
)
(?P<id>[\da-z_-]+)
'''
_GEO_BYPASS = False
_GEO_COUNTRIES = ['DK']
IE_NAME = 'drtv'
_TESTS = [{
'url': 'https://www.dr.dk/tv/se/boern/ultra/klassen-ultra/klassen-darlig-taber-10',
'md5': '25e659cccc9a2ed956110a299fdf5983',
'info_dict': {
'id': 'klassen-darlig-taber-10',
'ext': 'mp4',
'title': 'Klassen - Dårlig taber (10)',
'description': 'md5:815fe1b7fa656ed80580f31e8b3c79aa',
'timestamp': 1539085800,
'upload_date': '20181009',
'duration': 606.84,
'series': 'Klassen',
'season': 'Klassen I',
'season_number': 1,
'season_id': 'urn:dr:mu:bundle:57d7e8216187a4031cfd6f6b',
'episode': 'Episode 10',
'episode_number': 10,
'release_year': 2016,
},
'expected_warnings': ['Unable to download f4m manifest'],
}, {
# embed
'url': 'https://www.dr.dk/nyheder/indland/live-christianias-rydning-af-pusher-street-er-i-gang',
'info_dict': {
'id': 'urn:dr:mu:programcard:57c926176187a50a9c6e83c6',
'ext': 'mp4',
'title': 'christiania pusher street ryddes drdkrjpo',
'description': 'md5:2a71898b15057e9b97334f61d04e6eb5',
'timestamp': 1472800279,
'upload_date': '20160902',
'duration': 131.4,
},
'params': {
'skip_download': True,
},
'expected_warnings': ['Unable to download f4m manifest'],
}, {
# with SignLanguage formats
'url': 'https://www.dr.dk/tv/se/historien-om-danmark/-/historien-om-danmark-stenalder',
'info_dict': {
'id': 'historien-om-danmark-stenalder',
'ext': 'mp4',
'title': 'Historien om Danmark: Stenalder',
'description': 'md5:8c66dcbc1669bbc6f873879880f37f2a',
'timestamp': 1546628400,
'upload_date': '20190104',
'duration': 3502.56,
'formats': 'mincount:20',
},
'params': {
'skip_download': True,
},
}, {
'url': 'https://www.dr.dk/radio/p4kbh/regionale-nyheder-kh4/p4-nyheder-2019-06-26-17-30-9',
'only_matching': True,
}, {
'url': 'https://www.dr.dk/drtv/se/bonderoeven_71769',
'info_dict': {
'id': '00951930010',
'ext': 'mp4',
'title': 'Bonderøven (1:8)',
'description': 'md5:3cf18fc0d3b205745d4505f896af8121',
'timestamp': 1546542000,
'upload_date': '20190103',
'duration': 2576.6,
},
'params': {
'skip_download': True,
},
}, {
'url': 'https://www.dr.dk/drtv/episode/bonderoeven_71769',
'only_matching': True,
}, {
'url': 'https://dr-massive.com/drtv/se/bonderoeven_71769',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
if '>Programmet er ikke længere tilgængeligt' in webpage:
raise ExtractorError(
'Video %s is not available' % video_id, expected=True)
video_id = self._search_regex(
(r'data-(?:material-identifier|episode-slug)="([^"]+)"',
r'data-resource="[^>"]+mu/programcard/expanded/([^"]+)"'),
webpage, 'video id', default=None)
if not video_id:
video_id = self._search_regex(
r'(urn(?:%3A|:)dr(?:%3A|:)mu(?:%3A|:)programcard(?:%3A|:)[\da-f]+)',
webpage, 'urn', default=None)
if video_id:
video_id = compat_urllib_parse_unquote(video_id)
_PROGRAMCARD_BASE = 'https://www.dr.dk/mu-online/api/1.4/programcard'
query = {'expanded': 'true'}
if video_id:
programcard_url = '%s/%s' % (_PROGRAMCARD_BASE, video_id)
else:
programcard_url = _PROGRAMCARD_BASE
page = self._parse_json(
self._search_regex(
r'data\s*=\s*({.+?})\s*(?:;|</script)', webpage,
'data'), '1')['cache']['page']
page = page[list(page.keys())[0]]
item = try_get(
page, (lambda x: x['item'], lambda x: x['entries'][0]['item']),
dict)
video_id = item['customId'].split(':')[-1]
query['productionnumber'] = video_id
data = self._download_json(
programcard_url, video_id, 'Downloading video JSON', query=query)
title = str_or_none(data.get('Title')) or re.sub(
r'\s*\|\s*(?:TV\s*\|\s*DR|DRTV)$', '',
self._og_search_title(webpage))
description = self._og_search_description(
webpage, default=None) or data.get('Description')
timestamp = unified_timestamp(
data.get('PrimaryBroadcastStartTime') or data.get('SortDateTime'))
thumbnail = None
duration = None
restricted_to_denmark = False
formats = []
subtitles = {}
assets = []
primary_asset = data.get('PrimaryAsset')
if isinstance(primary_asset, dict):
assets.append(primary_asset)
secondary_assets = data.get('SecondaryAssets')
if isinstance(secondary_assets, list):
for secondary_asset in secondary_assets:
if isinstance(secondary_asset, dict):
assets.append(secondary_asset)
def hex_to_bytes(hex):
return binascii.a2b_hex(hex.encode('ascii'))
def decrypt_uri(e):
n = int(e[2:10], 16)
a = e[10 + n:]
data = bytes_to_intlist(hex_to_bytes(e[10:10 + n]))
key = bytes_to_intlist(hashlib.sha256(
('%s:sRBzYNXBzkKgnjj8pGtkACch' % a).encode('utf-8')).digest())
iv = bytes_to_intlist(hex_to_bytes(a))
decrypted = aes_cbc_decrypt(data, key, iv)
return intlist_to_bytes(
decrypted[:-decrypted[-1]]).decode('utf-8').split('?')[0]
for asset in assets:
kind = asset.get('Kind')
if kind == 'Image':
thumbnail = url_or_none(asset.get('Uri'))
elif kind in ('VideoResource', 'AudioResource'):
duration = float_or_none(asset.get('DurationInMilliseconds'), 1000)
restricted_to_denmark = asset.get('RestrictedToDenmark')
asset_target = asset.get('Target')
for link in asset.get('Links', []):
uri = link.get('Uri')
if not uri:
encrypted_uri = link.get('EncryptedUri')
if not encrypted_uri:
continue
try:
uri = decrypt_uri(encrypted_uri)
except Exception:
self.report_warning(
'Unable to decrypt EncryptedUri', video_id)
continue
uri = url_or_none(uri)
if not uri:
continue
target = link.get('Target')
format_id = target or ''
if asset_target in ('SpokenSubtitles', 'SignLanguage', 'VisuallyInterpreted'):
preference = -1
format_id += '-%s' % asset_target
elif asset_target == 'Default':
preference = 1
else:
preference = None
if target == 'HDS':
f4m_formats = self._extract_f4m_formats(
uri + '?hdcore=3.3.0&plugin=aasp-3.3.0.99.43',
video_id, preference, f4m_id=format_id, fatal=False)
if kind == 'AudioResource':
for f in f4m_formats:
f['vcodec'] = 'none'
formats.extend(f4m_formats)
elif target == 'HLS':
formats.extend(self._extract_m3u8_formats(
uri, video_id, 'mp4', entry_protocol='m3u8_native',
preference=preference, m3u8_id=format_id,
fatal=False))
else:
bitrate = link.get('Bitrate')
if bitrate:
format_id += '-%s' % bitrate
formats.append({
'url': uri,
'format_id': format_id,
'tbr': int_or_none(bitrate),
'ext': link.get('FileFormat'),
'vcodec': 'none' if kind == 'AudioResource' else None,
'preference': preference,
})
subtitles_list = asset.get('SubtitlesList') or asset.get('Subtitleslist')
if isinstance(subtitles_list, list):
LANGS = {
'Danish': 'da',
}
for subs in subtitles_list:
if not isinstance(subs, dict):
continue
sub_uri = url_or_none(subs.get('Uri'))
if not sub_uri:
continue
lang = subs.get('Language') or 'da'
subtitles.setdefault(LANGS.get(lang, lang), []).append({
'url': sub_uri,
'ext': mimetype2ext(subs.get('MimeType')) or 'vtt'
})
if not formats and restricted_to_denmark:
self.raise_geo_restricted(
'Unfortunately, DR is not allowed to show this program outside Denmark.',
countries=self._GEO_COUNTRIES)
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'timestamp': timestamp,
'duration': duration,
'formats': formats,
'subtitles': subtitles,
'series': str_or_none(data.get('SeriesTitle')),
'season': str_or_none(data.get('SeasonTitle')),
'season_number': int_or_none(data.get('SeasonNumber')),
'season_id': str_or_none(data.get('SeasonUrn')),
'episode': str_or_none(data.get('EpisodeTitle')),
'episode_number': int_or_none(data.get('EpisodeNumber')),
'release_year': int_or_none(data.get('ProductionYear')),
}
class DRTVLiveIE(InfoExtractor):
IE_NAME = 'drtv:live'
_VALID_URL = r'https?://(?:www\.)?dr\.dk/(?:tv|TV)/live/(?P<id>[\da-z-]+)'
_GEO_COUNTRIES = ['DK']
_TEST = {
'url': 'https://www.dr.dk/tv/live/dr1',
'info_dict': {
'id': 'dr1',
'ext': 'mp4',
'title': 're:^DR1 [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
},
'params': {
# m3u8 download
'skip_download': True,
},
}
def _real_extract(self, url):
channel_id = self._match_id(url)
channel_data = self._download_json(
'https://www.dr.dk/mu-online/api/1.0/channel/' + channel_id,
channel_id)
title = self._live_title(channel_data['Title'])
formats = []
for streaming_server in channel_data.get('StreamingServers', []):
server = streaming_server.get('Server')
if not server:
continue
link_type = streaming_server.get('LinkType')
for quality in streaming_server.get('Qualities', []):
for stream in quality.get('Streams', []):
stream_path = stream.get('Stream')
if not stream_path:
continue
stream_url = update_url_query(
'%s/%s' % (server, stream_path), {'b': ''})
if link_type == 'HLS':
formats.extend(self._extract_m3u8_formats(
stream_url, channel_id, 'mp4',
m3u8_id=link_type, fatal=False, live=True))
elif link_type == 'HDS':
formats.extend(self._extract_f4m_formats(update_url_query(
'%s/%s' % (server, stream_path), {'hdcore': '3.7.0'}),
channel_id, f4m_id=link_type, fatal=False))
self._sort_formats(formats)
return {
'id': channel_id,
'title': title,
'thumbnail': channel_data.get('PrimaryImageUri'),
'formats': formats,
'is_live': True,
}
| gpl-3.0 |
cydenix/OpenGLCffi | OpenGLCffi/EGL/EXT/EXT/output_base.py | 1 | 1086 | from OpenGLCffi.EGL import params
@params(api='egl', prms=['dpy', 'attrib_list', 'layers', 'max_layers', 'num_layers'])
def eglGetOutputLayersEXT(dpy, attrib_list, layers, max_layers, num_layers):
pass
@params(api='egl', prms=['dpy', 'attrib_list', 'ports', 'max_ports', 'num_ports'])
def eglGetOutputPortsEXT(dpy, attrib_list, ports, max_ports, num_ports):
pass
@params(api='egl', prms=['dpy', 'layer', 'attribute', 'value'])
def eglOutputLayerAttribEXT(dpy, layer, attribute):
pass
@params(api='egl', prms=['dpy', 'layer', 'attribute', 'value'])
def eglQueryOutputLayerAttribEXT(dpy, layer, attribute):
pass
@params(api='egl', prms=['dpy', 'layer', 'name'])
def eglQueryOutputLayerStringEXT(dpy, layer, name):
pass
@params(api='egl', prms=['dpy', 'port', 'attribute', 'value'])
def eglOutputPortAttribEXT(dpy, port, attribute):
pass
@params(api='egl', prms=['dpy', 'port', 'attribute', 'value'])
def eglQueryOutputPortAttribEXT(dpy, port, attribute):
pass
@params(api='egl', prms=['dpy', 'port', 'name'])
def eglQueryOutputPortStringEXT(dpy, port, name):
pass
| mit |
adam111316/SickGear | lib/html5lib/sanitizer.py | 805 | 16428 | from __future__ import absolute_import, division, unicode_literals
import re
from xml.sax.saxutils import escape, unescape
from .tokenizer import HTMLTokenizer
from .constants import tokenTypes
class HTMLSanitizerMixin(object):
""" sanitization of XHTML+MathML+SVG and of inline style attributes."""
acceptable_elements = ['a', 'abbr', 'acronym', 'address', 'area',
'article', 'aside', 'audio', 'b', 'big', 'blockquote', 'br', 'button',
'canvas', 'caption', 'center', 'cite', 'code', 'col', 'colgroup',
'command', 'datagrid', 'datalist', 'dd', 'del', 'details', 'dfn',
'dialog', 'dir', 'div', 'dl', 'dt', 'em', 'event-source', 'fieldset',
'figcaption', 'figure', 'footer', 'font', 'form', 'header', 'h1',
'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img', 'input', 'ins',
'keygen', 'kbd', 'label', 'legend', 'li', 'm', 'map', 'menu', 'meter',
'multicol', 'nav', 'nextid', 'ol', 'output', 'optgroup', 'option',
'p', 'pre', 'progress', 'q', 's', 'samp', 'section', 'select',
'small', 'sound', 'source', 'spacer', 'span', 'strike', 'strong',
'sub', 'sup', 'table', 'tbody', 'td', 'textarea', 'time', 'tfoot',
'th', 'thead', 'tr', 'tt', 'u', 'ul', 'var', 'video']
mathml_elements = ['maction', 'math', 'merror', 'mfrac', 'mi',
'mmultiscripts', 'mn', 'mo', 'mover', 'mpadded', 'mphantom',
'mprescripts', 'mroot', 'mrow', 'mspace', 'msqrt', 'mstyle', 'msub',
'msubsup', 'msup', 'mtable', 'mtd', 'mtext', 'mtr', 'munder',
'munderover', 'none']
svg_elements = ['a', 'animate', 'animateColor', 'animateMotion',
'animateTransform', 'clipPath', 'circle', 'defs', 'desc', 'ellipse',
'font-face', 'font-face-name', 'font-face-src', 'g', 'glyph', 'hkern',
'linearGradient', 'line', 'marker', 'metadata', 'missing-glyph',
'mpath', 'path', 'polygon', 'polyline', 'radialGradient', 'rect',
'set', 'stop', 'svg', 'switch', 'text', 'title', 'tspan', 'use']
acceptable_attributes = ['abbr', 'accept', 'accept-charset', 'accesskey',
'action', 'align', 'alt', 'autocomplete', 'autofocus', 'axis',
'background', 'balance', 'bgcolor', 'bgproperties', 'border',
'bordercolor', 'bordercolordark', 'bordercolorlight', 'bottompadding',
'cellpadding', 'cellspacing', 'ch', 'challenge', 'char', 'charoff',
'choff', 'charset', 'checked', 'cite', 'class', 'clear', 'color',
'cols', 'colspan', 'compact', 'contenteditable', 'controls', 'coords',
'data', 'datafld', 'datapagesize', 'datasrc', 'datetime', 'default',
'delay', 'dir', 'disabled', 'draggable', 'dynsrc', 'enctype', 'end',
'face', 'for', 'form', 'frame', 'galleryimg', 'gutter', 'headers',
'height', 'hidefocus', 'hidden', 'high', 'href', 'hreflang', 'hspace',
'icon', 'id', 'inputmode', 'ismap', 'keytype', 'label', 'leftspacing',
'lang', 'list', 'longdesc', 'loop', 'loopcount', 'loopend',
'loopstart', 'low', 'lowsrc', 'max', 'maxlength', 'media', 'method',
'min', 'multiple', 'name', 'nohref', 'noshade', 'nowrap', 'open',
'optimum', 'pattern', 'ping', 'point-size', 'poster', 'pqg', 'preload',
'prompt', 'radiogroup', 'readonly', 'rel', 'repeat-max', 'repeat-min',
'replace', 'required', 'rev', 'rightspacing', 'rows', 'rowspan',
'rules', 'scope', 'selected', 'shape', 'size', 'span', 'src', 'start',
'step', 'style', 'summary', 'suppress', 'tabindex', 'target',
'template', 'title', 'toppadding', 'type', 'unselectable', 'usemap',
'urn', 'valign', 'value', 'variable', 'volume', 'vspace', 'vrml',
'width', 'wrap', 'xml:lang']
mathml_attributes = ['actiontype', 'align', 'columnalign', 'columnalign',
'columnalign', 'columnlines', 'columnspacing', 'columnspan', 'depth',
'display', 'displaystyle', 'equalcolumns', 'equalrows', 'fence',
'fontstyle', 'fontweight', 'frame', 'height', 'linethickness', 'lspace',
'mathbackground', 'mathcolor', 'mathvariant', 'mathvariant', 'maxsize',
'minsize', 'other', 'rowalign', 'rowalign', 'rowalign', 'rowlines',
'rowspacing', 'rowspan', 'rspace', 'scriptlevel', 'selection',
'separator', 'stretchy', 'width', 'width', 'xlink:href', 'xlink:show',
'xlink:type', 'xmlns', 'xmlns:xlink']
svg_attributes = ['accent-height', 'accumulate', 'additive', 'alphabetic',
'arabic-form', 'ascent', 'attributeName', 'attributeType',
'baseProfile', 'bbox', 'begin', 'by', 'calcMode', 'cap-height',
'class', 'clip-path', 'color', 'color-rendering', 'content', 'cx',
'cy', 'd', 'dx', 'dy', 'descent', 'display', 'dur', 'end', 'fill',
'fill-opacity', 'fill-rule', 'font-family', 'font-size',
'font-stretch', 'font-style', 'font-variant', 'font-weight', 'from',
'fx', 'fy', 'g1', 'g2', 'glyph-name', 'gradientUnits', 'hanging',
'height', 'horiz-adv-x', 'horiz-origin-x', 'id', 'ideographic', 'k',
'keyPoints', 'keySplines', 'keyTimes', 'lang', 'marker-end',
'marker-mid', 'marker-start', 'markerHeight', 'markerUnits',
'markerWidth', 'mathematical', 'max', 'min', 'name', 'offset',
'opacity', 'orient', 'origin', 'overline-position',
'overline-thickness', 'panose-1', 'path', 'pathLength', 'points',
'preserveAspectRatio', 'r', 'refX', 'refY', 'repeatCount',
'repeatDur', 'requiredExtensions', 'requiredFeatures', 'restart',
'rotate', 'rx', 'ry', 'slope', 'stemh', 'stemv', 'stop-color',
'stop-opacity', 'strikethrough-position', 'strikethrough-thickness',
'stroke', 'stroke-dasharray', 'stroke-dashoffset', 'stroke-linecap',
'stroke-linejoin', 'stroke-miterlimit', 'stroke-opacity',
'stroke-width', 'systemLanguage', 'target', 'text-anchor', 'to',
'transform', 'type', 'u1', 'u2', 'underline-position',
'underline-thickness', 'unicode', 'unicode-range', 'units-per-em',
'values', 'version', 'viewBox', 'visibility', 'width', 'widths', 'x',
'x-height', 'x1', 'x2', 'xlink:actuate', 'xlink:arcrole',
'xlink:href', 'xlink:role', 'xlink:show', 'xlink:title', 'xlink:type',
'xml:base', 'xml:lang', 'xml:space', 'xmlns', 'xmlns:xlink', 'y',
'y1', 'y2', 'zoomAndPan']
attr_val_is_uri = ['href', 'src', 'cite', 'action', 'longdesc', 'poster',
'xlink:href', 'xml:base']
svg_attr_val_allows_ref = ['clip-path', 'color-profile', 'cursor', 'fill',
'filter', 'marker', 'marker-start', 'marker-mid', 'marker-end',
'mask', 'stroke']
svg_allow_local_href = ['altGlyph', 'animate', 'animateColor',
'animateMotion', 'animateTransform', 'cursor', 'feImage', 'filter',
'linearGradient', 'pattern', 'radialGradient', 'textpath', 'tref',
'set', 'use']
acceptable_css_properties = ['azimuth', 'background-color',
'border-bottom-color', 'border-collapse', 'border-color',
'border-left-color', 'border-right-color', 'border-top-color', 'clear',
'color', 'cursor', 'direction', 'display', 'elevation', 'float', 'font',
'font-family', 'font-size', 'font-style', 'font-variant', 'font-weight',
'height', 'letter-spacing', 'line-height', 'overflow', 'pause',
'pause-after', 'pause-before', 'pitch', 'pitch-range', 'richness',
'speak', 'speak-header', 'speak-numeral', 'speak-punctuation',
'speech-rate', 'stress', 'text-align', 'text-decoration', 'text-indent',
'unicode-bidi', 'vertical-align', 'voice-family', 'volume',
'white-space', 'width']
acceptable_css_keywords = ['auto', 'aqua', 'black', 'block', 'blue',
'bold', 'both', 'bottom', 'brown', 'center', 'collapse', 'dashed',
'dotted', 'fuchsia', 'gray', 'green', '!important', 'italic', 'left',
'lime', 'maroon', 'medium', 'none', 'navy', 'normal', 'nowrap', 'olive',
'pointer', 'purple', 'red', 'right', 'solid', 'silver', 'teal', 'top',
'transparent', 'underline', 'white', 'yellow']
acceptable_svg_properties = ['fill', 'fill-opacity', 'fill-rule',
'stroke', 'stroke-width', 'stroke-linecap', 'stroke-linejoin',
'stroke-opacity']
acceptable_protocols = ['ed2k', 'ftp', 'http', 'https', 'irc',
'mailto', 'news', 'gopher', 'nntp', 'telnet', 'webcal',
'xmpp', 'callto', 'feed', 'urn', 'aim', 'rsync', 'tag',
'ssh', 'sftp', 'rtsp', 'afs']
# subclasses may define their own versions of these constants
allowed_elements = acceptable_elements + mathml_elements + svg_elements
allowed_attributes = acceptable_attributes + mathml_attributes + svg_attributes
allowed_css_properties = acceptable_css_properties
allowed_css_keywords = acceptable_css_keywords
allowed_svg_properties = acceptable_svg_properties
allowed_protocols = acceptable_protocols
# Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and
# stripping out all # attributes not in ALLOWED_ATTRIBUTES. Style
# attributes are parsed, and a restricted set, # specified by
# ALLOWED_CSS_PROPERTIES and ALLOWED_CSS_KEYWORDS, are allowed through.
# attributes in ATTR_VAL_IS_URI are scanned, and only URI schemes specified
# in ALLOWED_PROTOCOLS are allowed.
#
# sanitize_html('<script> do_nasty_stuff() </script>')
# => <script> do_nasty_stuff() </script>
# sanitize_html('<a href="javascript: sucker();">Click here for $100</a>')
# => <a>Click here for $100</a>
def sanitize_token(self, token):
# accommodate filters which use token_type differently
token_type = token["type"]
if token_type in list(tokenTypes.keys()):
token_type = tokenTypes[token_type]
if token_type in (tokenTypes["StartTag"], tokenTypes["EndTag"],
tokenTypes["EmptyTag"]):
if token["name"] in self.allowed_elements:
return self.allowed_token(token, token_type)
else:
return self.disallowed_token(token, token_type)
elif token_type == tokenTypes["Comment"]:
pass
else:
return token
def allowed_token(self, token, token_type):
if "data" in token:
attrs = dict([(name, val) for name, val in
token["data"][::-1]
if name in self.allowed_attributes])
for attr in self.attr_val_is_uri:
if attr not in attrs:
continue
val_unescaped = re.sub("[`\000-\040\177-\240\s]+", '',
unescape(attrs[attr])).lower()
# remove replacement characters from unescaped characters
val_unescaped = val_unescaped.replace("\ufffd", "")
if (re.match("^[a-z0-9][-+.a-z0-9]*:", val_unescaped) and
(val_unescaped.split(':')[0] not in
self.allowed_protocols)):
del attrs[attr]
for attr in self.svg_attr_val_allows_ref:
if attr in attrs:
attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)',
' ',
unescape(attrs[attr]))
if (token["name"] in self.svg_allow_local_href and
'xlink:href' in attrs and re.search('^\s*[^#\s].*',
attrs['xlink:href'])):
del attrs['xlink:href']
if 'style' in attrs:
attrs['style'] = self.sanitize_css(attrs['style'])
token["data"] = [[name, val] for name, val in list(attrs.items())]
return token
def disallowed_token(self, token, token_type):
if token_type == tokenTypes["EndTag"]:
token["data"] = "</%s>" % token["name"]
elif token["data"]:
attrs = ''.join([' %s="%s"' % (k, escape(v)) for k, v in token["data"]])
token["data"] = "<%s%s>" % (token["name"], attrs)
else:
token["data"] = "<%s>" % token["name"]
if token.get("selfClosing"):
token["data"] = token["data"][:-1] + "/>"
if token["type"] in list(tokenTypes.keys()):
token["type"] = "Characters"
else:
token["type"] = tokenTypes["Characters"]
del token["name"]
return token
def sanitize_css(self, style):
# disallow urls
style = re.compile('url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style)
# gauntlet
if not re.match("""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style):
return ''
if not re.match("^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style):
return ''
clean = []
for prop, value in re.findall("([-\w]+)\s*:\s*([^:;]*)", style):
if not value:
continue
if prop.lower() in self.allowed_css_properties:
clean.append(prop + ': ' + value + ';')
elif prop.split('-')[0].lower() in ['background', 'border', 'margin',
'padding']:
for keyword in value.split():
if not keyword in self.acceptable_css_keywords and \
not re.match("^(#[0-9a-f]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword):
break
else:
clean.append(prop + ': ' + value + ';')
elif prop.lower() in self.allowed_svg_properties:
clean.append(prop + ': ' + value + ';')
return ' '.join(clean)
class HTMLSanitizer(HTMLTokenizer, HTMLSanitizerMixin):
def __init__(self, stream, encoding=None, parseMeta=True, useChardet=True,
lowercaseElementName=False, lowercaseAttrName=False, parser=None):
# Change case matching defaults as we only output lowercase html anyway
# This solution doesn't seem ideal...
HTMLTokenizer.__init__(self, stream, encoding, parseMeta, useChardet,
lowercaseElementName, lowercaseAttrName, parser=parser)
def __iter__(self):
for token in HTMLTokenizer.__iter__(self):
token = self.sanitize_token(token)
if token:
yield token
| gpl-3.0 |
Edraak/circleci-edx-platform | common/djangoapps/edraak_i18n/management/commands/i18n_edraak_theme_push.py | 1 | 2382 | # * Handling merge/forks of UserProfile.meta
from django.core.management.base import BaseCommand
from django.conf import settings
import os
from subprocess import call
import polib
class Command(BaseCommand):
help = '''Run theme's ./scripts/edraak_i18n_theme_push.sh'''
@staticmethod
def remove_ignored_messages(theme_root):
theme_pofile = theme_root / 'conf/locale/en/LC_MESSAGES/edraak-platform-2015-theme.po'
theme_po = polib.pofile(theme_pofile)
# `reversed()` is used to allow removing from the bottom
# instead of changing the index and introducing bugs
for entry in reversed(theme_po):
if 'edraak-ignore' in entry.comment.lower():
theme_po.remove(entry)
print 'Removed ignored translation: ', entry.msgid, '=>', entry.msgstr
theme_po.save()
@staticmethod
def generate_pofile(theme_root):
mako_pofile_relative = 'conf/locale/en/LC_MESSAGES/mako.po'
mako_pofile = theme_root / mako_pofile_relative
if not mako_pofile.dirname().exists():
os.makedirs(mako_pofile.dirname())
open(mako_pofile, 'w').close() # Make sure the file exists and empty
call([
'pybabel',
'-q', 'extract',
'--mapping=conf/locale/babel_mako.cfg',
'--add-comments', 'Translators:',
'--keyword', 'interpolate',
'.',
'--output={}'.format(mako_pofile_relative),
], cwd=theme_root)
call(['i18n_tool', 'segment', '--config', 'conf/locale/config.yaml', 'en'], cwd=theme_root)
if mako_pofile.exists():
mako_pofile.unlink()
@staticmethod
def transifex_push(theme_root):
call(['tx', 'push', '-l', 'en', '-s', '-r', 'edraak.edraak-platform-2015-theme'], cwd=theme_root)
def handle(self, *args, **options):
if settings.FEATURES.get('USE_CUSTOM_THEME', False) and settings.THEME_NAME:
theme_root = settings.ENV_ROOT / "themes" / settings.THEME_NAME
self.generate_pofile(theme_root)
self.remove_ignored_messages(theme_root)
self.transifex_push(theme_root)
else:
print "Error: theme files not found."
print "Are you sure the config is correct? Press <Enter> to continue without theme i18n..."
raw_input()
| agpl-3.0 |
johnkeepmoving/oss-ftp | python27/win32/Lib/test/test_imageop.py | 34 | 6854 | """Test script for the imageop module. This has the side
effect of partially testing the imgfile module as well.
Roger E. Masse
"""
from test.test_support import verbose, unlink, import_module, run_unittest
imageop = import_module('imageop', deprecated=True)
import uu, os, unittest
SIZES = (1, 2, 3, 4)
_VALUES = (1, 2, 2**10, 2**15-1, 2**15, 2**15+1, 2**31-2, 2**31-1)
VALUES = tuple( -x for x in reversed(_VALUES) ) + (0,) + _VALUES
AAAAA = "A" * 1024
MAX_LEN = 2**20
class InputValidationTests(unittest.TestCase):
def _check(self, name, size=None, *extra):
func = getattr(imageop, name)
for height in VALUES:
for width in VALUES:
strlen = abs(width * height)
if size:
strlen *= size
if strlen < MAX_LEN:
data = "A" * strlen
else:
data = AAAAA
if size:
arguments = (data, size, width, height) + extra
else:
arguments = (data, width, height) + extra
try:
func(*arguments)
except (ValueError, imageop.error):
pass
def check_size(self, name, *extra):
for size in SIZES:
self._check(name, size, *extra)
def check(self, name, *extra):
self._check(name, None, *extra)
def test_input_validation(self):
self.check_size("crop", 0, 0, 0, 0)
self.check_size("scale", 1, 0)
self.check_size("scale", -1, -1)
self.check_size("tovideo")
self.check("grey2mono", 128)
self.check("grey2grey4")
self.check("grey2grey2")
self.check("dither2mono")
self.check("dither2grey2")
self.check("mono2grey", 0, 0)
self.check("grey22grey")
self.check("rgb2rgb8") # nlen*4 == len
self.check("rgb82rgb")
self.check("rgb2grey")
self.check("grey2rgb")
def test_main():
run_unittest(InputValidationTests)
try:
import imgfile
except ImportError:
return
# Create binary test files
uu.decode(get_qualified_path('testrgb'+os.extsep+'uue'), 'test'+os.extsep+'rgb')
image, width, height = getimage('test'+os.extsep+'rgb')
# Return the selected part of image, which should by width by height
# in size and consist of pixels of psize bytes.
if verbose:
print 'crop'
newimage = imageop.crop (image, 4, width, height, 0, 0, 1, 1)
# Return image scaled to size newwidth by newheight. No interpolation
# is done, scaling is done by simple-minded pixel duplication or removal.
# Therefore, computer-generated images or dithered images will
# not look nice after scaling.
if verbose:
print 'scale'
scaleimage = imageop.scale(image, 4, width, height, 1, 1)
# Run a vertical low-pass filter over an image. It does so by computing
# each destination pixel as the average of two vertically-aligned source
# pixels. The main use of this routine is to forestall excessive flicker
# if the image two vertically-aligned source pixels, hence the name.
if verbose:
print 'tovideo'
videoimage = imageop.tovideo (image, 4, width, height)
# Convert an rgb image to an 8 bit rgb
if verbose:
print 'rgb2rgb8'
greyimage = imageop.rgb2rgb8(image, width, height)
# Convert an 8 bit rgb image to a 24 bit rgb image
if verbose:
print 'rgb82rgb'
image = imageop.rgb82rgb(greyimage, width, height)
# Convert an rgb image to an 8 bit greyscale image
if verbose:
print 'rgb2grey'
greyimage = imageop.rgb2grey(image, width, height)
# Convert an 8 bit greyscale image to a 24 bit rgb image
if verbose:
print 'grey2rgb'
image = imageop.grey2rgb(greyimage, width, height)
# Convert a 8-bit deep greyscale image to a 1-bit deep image by
# thresholding all the pixels. The resulting image is tightly packed
# and is probably only useful as an argument to mono2grey.
if verbose:
print 'grey2mono'
monoimage = imageop.grey2mono (greyimage, width, height, 0)
# monoimage, width, height = getimage('monotest.rgb')
# Convert a 1-bit monochrome image to an 8 bit greyscale or color image.
# All pixels that are zero-valued on input get value p0 on output and
# all one-value input pixels get value p1 on output. To convert a
# monochrome black-and-white image to greyscale pass the values 0 and
# 255 respectively.
if verbose:
print 'mono2grey'
greyimage = imageop.mono2grey (monoimage, width, height, 0, 255)
# Convert an 8-bit greyscale image to a 1-bit monochrome image using a
# (simple-minded) dithering algorithm.
if verbose:
print 'dither2mono'
monoimage = imageop.dither2mono (greyimage, width, height)
# Convert an 8-bit greyscale image to a 4-bit greyscale image without
# dithering.
if verbose:
print 'grey2grey4'
grey4image = imageop.grey2grey4 (greyimage, width, height)
# Convert an 8-bit greyscale image to a 2-bit greyscale image without
# dithering.
if verbose:
print 'grey2grey2'
grey2image = imageop.grey2grey2 (greyimage, width, height)
# Convert an 8-bit greyscale image to a 2-bit greyscale image with
# dithering. As for dither2mono, the dithering algorithm is currently
# very simple.
if verbose:
print 'dither2grey2'
grey2image = imageop.dither2grey2 (greyimage, width, height)
# Convert a 4-bit greyscale image to an 8-bit greyscale image.
if verbose:
print 'grey42grey'
greyimage = imageop.grey42grey (grey4image, width, height)
# Convert a 2-bit greyscale image to an 8-bit greyscale image.
if verbose:
print 'grey22grey'
image = imageop.grey22grey (grey2image, width, height)
# Cleanup
unlink('test'+os.extsep+'rgb')
def getimage(name):
"""return a tuple consisting of
image (in 'imgfile' format) width and height
"""
import imgfile
try:
sizes = imgfile.getsizes(name)
except imgfile.error:
name = get_qualified_path(name)
sizes = imgfile.getsizes(name)
if verbose:
print 'imgfile opening test image: %s, sizes: %s' % (name, str(sizes))
image = imgfile.read(name)
return (image, sizes[0], sizes[1])
def get_qualified_path(name):
""" return a more qualified path to name"""
import sys
import os
path = sys.path
try:
path = [os.path.dirname(__file__)] + path
except NameError:
pass
for dir in path:
fullname = os.path.join(dir, name)
if os.path.exists(fullname):
return fullname
return name
if __name__ == '__main__':
test_main()
| mit |
gisprogrammer/wsp.geo.pl | external/OpenLayers-2.13.1/tests/selenium/remotecontrol/selenium.py | 254 | 69389 |
"""
Copyright 2006 ThoughtWorks, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__docformat__ = "restructuredtext en"
# This file has been automatically generated via XSL
import httplib
import urllib
import re
class selenium:
"""
Defines an object that runs Selenium commands.
Element Locators
~~~~~~~~~~~~~~~~
Element Locators tell Selenium which HTML element a command refers to.
The format of a locator is:
\ *locatorType*\ **=**\ \ *argument*
We support the following strategies for locating elements:
* \ **identifier**\ =\ *id*:
Select the element with the specified @id attribute. If no match is
found, select the first element whose @name attribute is \ *id*.
(This is normally the default; see below.)
* \ **id**\ =\ *id*:
Select the element with the specified @id attribute.
* \ **name**\ =\ *name*:
Select the first element with the specified @name attribute.
* username
* name=username
The name may optionally be followed by one or more \ *element-filters*, separated from the name by whitespace. If the \ *filterType* is not specified, \ **value**\ is assumed.
* name=flavour value=chocolate
* \ **dom**\ =\ *javascriptExpression*:
Find an element by evaluating the specified string. This allows you to traverse the HTML Document Object
Model using JavaScript. Note that you must not return a value in this string; simply make it the last expression in the block.
* dom=document.forms['myForm'].myDropdown
* dom=document.images[56]
* dom=function foo() { return document.links[1]; }; foo();
* \ **xpath**\ =\ *xpathExpression*:
Locate an element using an XPath expression.
* xpath=//img[@alt='The image alt text']
* xpath=//table[@id='table1']//tr[4]/td[2]
* xpath=//a[contains(@href,'#id1')]
* xpath=//a[contains(@href,'#id1')]/@class
* xpath=(//table[@class='stylee'])//th[text()='theHeaderText']/../td
* xpath=//input[@name='name2' and @value='yes']
* xpath=//\*[text()="right"]
* \ **link**\ =\ *textPattern*:
Select the link (anchor) element which contains text matching the
specified \ *pattern*.
* link=The link text
* \ **css**\ =\ *cssSelectorSyntax*:
Select the element using css selectors. Please refer to CSS2 selectors, CSS3 selectors for more information. You can also check the TestCssLocators test in the selenium test suite for an example of usage, which is included in the downloaded selenium core package.
* css=a[href="#id3"]
* css=span#firstChild + span
Currently the css selector locator supports all css1, css2 and css3 selectors except namespace in css3, some pseudo classes(:nth-of-type, :nth-last-of-type, :first-of-type, :last-of-type, :only-of-type, :visited, :hover, :active, :focus, :indeterminate) and pseudo elements(::first-line, ::first-letter, ::selection, ::before, ::after).
Without an explicit locator prefix, Selenium uses the following default
strategies:
* \ **dom**\ , for locators starting with "document."
* \ **xpath**\ , for locators starting with "//"
* \ **identifier**\ , otherwise
Element Filters
~~~~~~~~~~~~~~~
Element filters can be used with a locator to refine a list of candidate elements. They are currently used only in the 'name' element-locator.
Filters look much like locators, ie.
\ *filterType*\ **=**\ \ *argument*
Supported element-filters are:
\ **value=**\ \ *valuePattern*
Matches elements based on their values. This is particularly useful for refining a list of similarly-named toggle-buttons.
\ **index=**\ \ *index*
Selects a single element based on its position in the list (offset from zero).
String-match Patterns
~~~~~~~~~~~~~~~~~~~~~
Various Pattern syntaxes are available for matching string values:
* \ **glob:**\ \ *pattern*:
Match a string against a "glob" (aka "wildmat") pattern. "Glob" is a
kind of limited regular-expression syntax typically used in command-line
shells. In a glob pattern, "\*" represents any sequence of characters, and "?"
represents any single character. Glob patterns match against the entire
string.
* \ **regexp:**\ \ *regexp*:
Match a string using a regular-expression. The full power of JavaScript
regular-expressions is available.
* \ **regexpi:**\ \ *regexpi*:
Match a string using a case-insensitive regular-expression.
* \ **exact:**\ \ *string*:
Match a string exactly, verbatim, without any of that fancy wildcard
stuff.
If no pattern prefix is specified, Selenium assumes that it's a "glob"
pattern.
For commands that return multiple values (such as verifySelectOptions),
the string being matched is a comma-separated list of the return values,
where both commas and backslashes in the values are backslash-escaped.
When providing a pattern, the optional matching syntax (i.e. glob,
regexp, etc.) is specified once, as usual, at the beginning of the
pattern.
"""
### This part is hard-coded in the XSL
def __init__(self, host, port, browserStartCommand, browserURL):
self.host = host
self.port = port
self.browserStartCommand = browserStartCommand
self.browserURL = browserURL
self.sessionId = None
def start(self):
result = self.get_string("getNewBrowserSession", [self.browserStartCommand, self.browserURL])
try:
self.sessionId = result
except ValueError:
raise Exception, result
def stop(self):
self.do_command("testComplete", [])
self.sessionId = None
def do_command(self, verb, args):
conn = httplib.HTTPConnection(self.host, self.port)
commandString = u'/selenium-server/driver/?cmd=' + urllib.quote_plus(unicode(verb).encode('utf-8'))
for i in range(len(args)):
commandString = commandString + '&' + unicode(i+1) + '=' + urllib.quote_plus(unicode(args[i]).encode('utf-8'))
if (None != self.sessionId):
commandString = commandString + "&sessionId=" + unicode(self.sessionId)
conn.request("GET", commandString)
response = conn.getresponse()
#print response.status, response.reason
data = unicode(response.read(), "UTF-8")
result = response.reason
#print "Selenium Result: " + repr(data) + "\n\n"
if (not data.startswith('OK')):
raise Exception, data
return data
def get_string(self, verb, args):
result = self.do_command(verb, args)
return result[3:]
def get_string_array(self, verb, args):
csv = self.get_string(verb, args)
token = ""
tokens = []
escape = False
for i in range(len(csv)):
letter = csv[i]
if (escape):
token = token + letter
escape = False
continue
if (letter == '\\'):
escape = True
elif (letter == ','):
tokens.append(token)
token = ""
else:
token = token + letter
tokens.append(token)
return tokens
def get_number(self, verb, args):
# Is there something I need to do here?
return self.get_string(verb, args)
def get_number_array(self, verb, args):
# Is there something I need to do here?
return self.get_string_array(verb, args)
def get_boolean(self, verb, args):
boolstr = self.get_string(verb, args)
if ("true" == boolstr):
return True
if ("false" == boolstr):
return False
raise ValueError, "result is neither 'true' nor 'false': " + boolstr
def get_boolean_array(self, verb, args):
boolarr = self.get_string_array(verb, args)
for i in range(len(boolarr)):
if ("true" == boolstr):
boolarr[i] = True
continue
if ("false" == boolstr):
boolarr[i] = False
continue
raise ValueError, "result is neither 'true' nor 'false': " + boolarr[i]
return boolarr
### From here on, everything's auto-generated from XML
def click(self,locator):
"""
Clicks on a link, button, checkbox or radio button. If the click action
causes a new page to load (like a link usually does), call
waitForPageToLoad.
'locator' is an element locator
"""
self.do_command("click", [locator,])
def double_click(self,locator):
"""
Double clicks on a link, button, checkbox or radio button. If the double click action
causes a new page to load (like a link usually does), call
waitForPageToLoad.
'locator' is an element locator
"""
self.do_command("doubleClick", [locator,])
def context_menu(self,locator):
"""
Simulates opening the context menu for the specified element (as might happen if the user "right-clicked" on the element).
'locator' is an element locator
"""
self.do_command("contextMenu", [locator,])
def click_at(self,locator,coordString):
"""
Clicks on a link, button, checkbox or radio button. If the click action
causes a new page to load (like a link usually does), call
waitForPageToLoad.
'locator' is an element locator
'coordString' is specifies the x,y position (i.e. - 10,20) of the mouse event relative to the element returned by the locator.
"""
self.do_command("clickAt", [locator,coordString,])
def double_click_at(self,locator,coordString):
"""
Doubleclicks on a link, button, checkbox or radio button. If the action
causes a new page to load (like a link usually does), call
waitForPageToLoad.
'locator' is an element locator
'coordString' is specifies the x,y position (i.e. - 10,20) of the mouse event relative to the element returned by the locator.
"""
self.do_command("doubleClickAt", [locator,coordString,])
def context_menu_at(self,locator,coordString):
"""
Simulates opening the context menu for the specified element (as might happen if the user "right-clicked" on the element).
'locator' is an element locator
'coordString' is specifies the x,y position (i.e. - 10,20) of the mouse event relative to the element returned by the locator.
"""
self.do_command("contextMenuAt", [locator,coordString,])
def fire_event(self,locator,eventName):
"""
Explicitly simulate an event, to trigger the corresponding "on\ *event*"
handler.
'locator' is an element locator
'eventName' is the event name, e.g. "focus" or "blur"
"""
self.do_command("fireEvent", [locator,eventName,])
def focus(self,locator):
"""
Move the focus to the specified element; for example, if the element is an input field, move the cursor to that field.
'locator' is an element locator
"""
self.do_command("focus", [locator,])
def key_press(self,locator,keySequence):
"""
Simulates a user pressing and releasing a key.
'locator' is an element locator
'keySequence' is Either be a string("\" followed by the numeric keycode of the key to be pressed, normally the ASCII value of that key), or a single character. For example: "w", "\119".
"""
self.do_command("keyPress", [locator,keySequence,])
def shift_key_down(self):
"""
Press the shift key and hold it down until doShiftUp() is called or a new page is loaded.
"""
self.do_command("shiftKeyDown", [])
def shift_key_up(self):
"""
Release the shift key.
"""
self.do_command("shiftKeyUp", [])
def meta_key_down(self):
"""
Press the meta key and hold it down until doMetaUp() is called or a new page is loaded.
"""
self.do_command("metaKeyDown", [])
def meta_key_up(self):
"""
Release the meta key.
"""
self.do_command("metaKeyUp", [])
def alt_key_down(self):
"""
Press the alt key and hold it down until doAltUp() is called or a new page is loaded.
"""
self.do_command("altKeyDown", [])
def alt_key_up(self):
"""
Release the alt key.
"""
self.do_command("altKeyUp", [])
def control_key_down(self):
"""
Press the control key and hold it down until doControlUp() is called or a new page is loaded.
"""
self.do_command("controlKeyDown", [])
def control_key_up(self):
"""
Release the control key.
"""
self.do_command("controlKeyUp", [])
def key_down(self,locator,keySequence):
"""
Simulates a user pressing a key (without releasing it yet).
'locator' is an element locator
'keySequence' is Either be a string("\" followed by the numeric keycode of the key to be pressed, normally the ASCII value of that key), or a single character. For example: "w", "\119".
"""
self.do_command("keyDown", [locator,keySequence,])
def key_up(self,locator,keySequence):
"""
Simulates a user releasing a key.
'locator' is an element locator
'keySequence' is Either be a string("\" followed by the numeric keycode of the key to be pressed, normally the ASCII value of that key), or a single character. For example: "w", "\119".
"""
self.do_command("keyUp", [locator,keySequence,])
def mouse_over(self,locator):
"""
Simulates a user hovering a mouse over the specified element.
'locator' is an element locator
"""
self.do_command("mouseOver", [locator,])
def mouse_out(self,locator):
"""
Simulates a user moving the mouse pointer away from the specified element.
'locator' is an element locator
"""
self.do_command("mouseOut", [locator,])
def mouse_down(self,locator):
"""
Simulates a user pressing the mouse button (without releasing it yet) on
the specified element.
'locator' is an element locator
"""
self.do_command("mouseDown", [locator,])
def mouse_down_at(self,locator,coordString):
"""
Simulates a user pressing the mouse button (without releasing it yet) at
the specified location.
'locator' is an element locator
'coordString' is specifies the x,y position (i.e. - 10,20) of the mouse event relative to the element returned by the locator.
"""
self.do_command("mouseDownAt", [locator,coordString,])
def mouse_up(self,locator):
"""
Simulates the event that occurs when the user releases the mouse button (i.e., stops
holding the button down) on the specified element.
'locator' is an element locator
"""
self.do_command("mouseUp", [locator,])
def mouse_up_at(self,locator,coordString):
"""
Simulates the event that occurs when the user releases the mouse button (i.e., stops
holding the button down) at the specified location.
'locator' is an element locator
'coordString' is specifies the x,y position (i.e. - 10,20) of the mouse event relative to the element returned by the locator.
"""
self.do_command("mouseUpAt", [locator,coordString,])
def mouse_move(self,locator):
"""
Simulates a user pressing the mouse button (without releasing it yet) on
the specified element.
'locator' is an element locator
"""
self.do_command("mouseMove", [locator,])
def mouse_move_at(self,locator,coordString):
"""
Simulates a user pressing the mouse button (without releasing it yet) on
the specified element.
'locator' is an element locator
'coordString' is specifies the x,y position (i.e. - 10,20) of the mouse event relative to the element returned by the locator.
"""
self.do_command("mouseMoveAt", [locator,coordString,])
def type(self,locator,value):
"""
Sets the value of an input field, as though you typed it in.
Can also be used to set the value of combo boxes, check boxes, etc. In these cases,
value should be the value of the option selected, not the visible text.
'locator' is an element locator
'value' is the value to type
"""
self.do_command("type", [locator,value,])
def type_keys(self,locator,value):
"""
Simulates keystroke events on the specified element, as though you typed the value key-by-key.
This is a convenience method for calling keyDown, keyUp, keyPress for every character in the specified string;
this is useful for dynamic UI widgets (like auto-completing combo boxes) that require explicit key events.
Unlike the simple "type" command, which forces the specified value into the page directly, this command
may or may not have any visible effect, even in cases where typing keys would normally have a visible effect.
For example, if you use "typeKeys" on a form element, you may or may not see the results of what you typed in
the field.
In some cases, you may need to use the simple "type" command to set the value of the field and then the "typeKeys" command to
send the keystroke events corresponding to what you just typed.
'locator' is an element locator
'value' is the value to type
"""
self.do_command("typeKeys", [locator,value,])
def set_speed(self,value):
"""
Set execution speed (i.e., set the millisecond length of a delay which will follow each selenium operation). By default, there is no such delay, i.e.,
the delay is 0 milliseconds.
'value' is the number of milliseconds to pause after operation
"""
self.do_command("setSpeed", [value,])
def get_speed(self):
"""
Get execution speed (i.e., get the millisecond length of the delay following each selenium operation). By default, there is no such delay, i.e.,
the delay is 0 milliseconds.
See also setSpeed.
"""
return self.get_string("getSpeed", [])
def check(self,locator):
"""
Check a toggle-button (checkbox/radio)
'locator' is an element locator
"""
self.do_command("check", [locator,])
def uncheck(self,locator):
"""
Uncheck a toggle-button (checkbox/radio)
'locator' is an element locator
"""
self.do_command("uncheck", [locator,])
def select(self,selectLocator,optionLocator):
"""
Select an option from a drop-down using an option locator.
Option locators provide different ways of specifying options of an HTML
Select element (e.g. for selecting a specific option, or for asserting
that the selected option satisfies a specification). There are several
forms of Select Option Locator.
* \ **label**\ =\ *labelPattern*:
matches options based on their labels, i.e. the visible text. (This
is the default.)
* label=regexp:^[Oo]ther
* \ **value**\ =\ *valuePattern*:
matches options based on their values.
* value=other
* \ **id**\ =\ *id*:
matches options based on their ids.
* id=option1
* \ **index**\ =\ *index*:
matches an option based on its index (offset from zero).
* index=2
If no option locator prefix is provided, the default behaviour is to match on \ **label**\ .
'selectLocator' is an element locator identifying a drop-down menu
'optionLocator' is an option locator (a label by default)
"""
self.do_command("select", [selectLocator,optionLocator,])
def add_selection(self,locator,optionLocator):
"""
Add a selection to the set of selected options in a multi-select element using an option locator.
@see #doSelect for details of option locators
'locator' is an element locator identifying a multi-select box
'optionLocator' is an option locator (a label by default)
"""
self.do_command("addSelection", [locator,optionLocator,])
def remove_selection(self,locator,optionLocator):
"""
Remove a selection from the set of selected options in a multi-select element using an option locator.
@see #doSelect for details of option locators
'locator' is an element locator identifying a multi-select box
'optionLocator' is an option locator (a label by default)
"""
self.do_command("removeSelection", [locator,optionLocator,])
def remove_all_selections(self,locator):
"""
Unselects all of the selected options in a multi-select element.
'locator' is an element locator identifying a multi-select box
"""
self.do_command("removeAllSelections", [locator,])
def submit(self,formLocator):
"""
Submit the specified form. This is particularly useful for forms without
submit buttons, e.g. single-input "Search" forms.
'formLocator' is an element locator for the form you want to submit
"""
self.do_command("submit", [formLocator,])
def open(self,url):
"""
Opens an URL in the test frame. This accepts both relative and absolute
URLs.
The "open" command waits for the page to load before proceeding,
ie. the "AndWait" suffix is implicit.
\ *Note*: The URL must be on the same domain as the runner HTML
due to security restrictions in the browser (Same Origin Policy). If you
need to open an URL on another domain, use the Selenium Server to start a
new browser session on that domain.
'url' is the URL to open; may be relative or absolute
"""
self.do_command("open", [url,])
def open_window(self,url,windowID):
"""
Opens a popup window (if a window with that ID isn't already open).
After opening the window, you'll need to select it using the selectWindow
command.
This command can also be a useful workaround for bug SEL-339. In some cases, Selenium will be unable to intercept a call to window.open (if the call occurs during or before the "onLoad" event, for example).
In those cases, you can force Selenium to notice the open window's name by using the Selenium openWindow command, using
an empty (blank) url, like this: openWindow("", "myFunnyWindow").
'url' is the URL to open, which can be blank
'windowID' is the JavaScript window ID of the window to select
"""
self.do_command("openWindow", [url,windowID,])
def select_window(self,windowID):
"""
Selects a popup window using a window locator; once a popup window has been selected, all
commands go to that window. To select the main window again, use null
as the target.
Window locators provide different ways of specifying the window object:
by title, by internal JavaScript "name," or by JavaScript variable.
* \ **title**\ =\ *My Special Window*:
Finds the window using the text that appears in the title bar. Be careful;
two windows can share the same title. If that happens, this locator will
just pick one.
* \ **name**\ =\ *myWindow*:
Finds the window using its internal JavaScript "name" property. This is the second
parameter "windowName" passed to the JavaScript method window.open(url, windowName, windowFeatures, replaceFlag)
(which Selenium intercepts).
* \ **var**\ =\ *variableName*:
Some pop-up windows are unnamed (anonymous), but are associated with a JavaScript variable name in the current
application window, e.g. "window.foo = window.open(url);". In those cases, you can open the window using
"var=foo".
If no window locator prefix is provided, we'll try to guess what you mean like this:
1.) if windowID is null, (or the string "null") then it is assumed the user is referring to the original window instantiated by the browser).
2.) if the value of the "windowID" parameter is a JavaScript variable name in the current application window, then it is assumed
that this variable contains the return value from a call to the JavaScript window.open() method.
3.) Otherwise, selenium looks in a hash it maintains that maps string names to window "names".
4.) If \ *that* fails, we'll try looping over all of the known windows to try to find the appropriate "title".
Since "title" is not necessarily unique, this may have unexpected behavior.
If you're having trouble figuring out the name of a window that you want to manipulate, look at the Selenium log messages
which identify the names of windows created via window.open (and therefore intercepted by Selenium). You will see messages
like the following for each window as it is opened:
``debug: window.open call intercepted; window ID (which you can use with selectWindow()) is "myNewWindow"``
In some cases, Selenium will be unable to intercept a call to window.open (if the call occurs during or before the "onLoad" event, for example).
(This is bug SEL-339.) In those cases, you can force Selenium to notice the open window's name by using the Selenium openWindow command, using
an empty (blank) url, like this: openWindow("", "myFunnyWindow").
'windowID' is the JavaScript window ID of the window to select
"""
self.do_command("selectWindow", [windowID,])
def select_frame(self,locator):
"""
Selects a frame within the current window. (You may invoke this command
multiple times to select nested frames.) To select the parent frame, use
"relative=parent" as a locator; to select the top frame, use "relative=top".
You can also select a frame by its 0-based index number; select the first frame with
"index=0", or the third frame with "index=2".
You may also use a DOM expression to identify the frame you want directly,
like this: ``dom=frames["main"].frames["subframe"]``
'locator' is an element locator identifying a frame or iframe
"""
self.do_command("selectFrame", [locator,])
def get_whether_this_frame_match_frame_expression(self,currentFrameString,target):
"""
Determine whether current/locator identify the frame containing this running code.
This is useful in proxy injection mode, where this code runs in every
browser frame and window, and sometimes the selenium server needs to identify
the "current" frame. In this case, when the test calls selectFrame, this
routine is called for each frame to figure out which one has been selected.
The selected frame will return true, while all others will return false.
'currentFrameString' is starting frame
'target' is new frame (which might be relative to the current one)
"""
return self.get_boolean("getWhetherThisFrameMatchFrameExpression", [currentFrameString,target,])
def get_whether_this_window_match_window_expression(self,currentWindowString,target):
"""
Determine whether currentWindowString plus target identify the window containing this running code.
This is useful in proxy injection mode, where this code runs in every
browser frame and window, and sometimes the selenium server needs to identify
the "current" window. In this case, when the test calls selectWindow, this
routine is called for each window to figure out which one has been selected.
The selected window will return true, while all others will return false.
'currentWindowString' is starting window
'target' is new window (which might be relative to the current one, e.g., "_parent")
"""
return self.get_boolean("getWhetherThisWindowMatchWindowExpression", [currentWindowString,target,])
def wait_for_pop_up(self,windowID,timeout):
"""
Waits for a popup window to appear and load up.
'windowID' is the JavaScript window "name" of the window that will appear (not the text of the title bar)
'timeout' is a timeout in milliseconds, after which the action will return with an error
"""
self.do_command("waitForPopUp", [windowID,timeout,])
def choose_cancel_on_next_confirmation(self):
"""
By default, Selenium's overridden window.confirm() function will
return true, as if the user had manually clicked OK; after running
this command, the next call to confirm() will return false, as if
the user had clicked Cancel. Selenium will then resume using the
default behavior for future confirmations, automatically returning
true (OK) unless/until you explicitly call this command for each
confirmation.
"""
self.do_command("chooseCancelOnNextConfirmation", [])
def choose_ok_on_next_confirmation(self):
"""
Undo the effect of calling chooseCancelOnNextConfirmation. Note
that Selenium's overridden window.confirm() function will normally automatically
return true, as if the user had manually clicked OK, so you shouldn't
need to use this command unless for some reason you need to change
your mind prior to the next confirmation. After any confirmation, Selenium will resume using the
default behavior for future confirmations, automatically returning
true (OK) unless/until you explicitly call chooseCancelOnNextConfirmation for each
confirmation.
"""
self.do_command("chooseOkOnNextConfirmation", [])
def answer_on_next_prompt(self,answer):
"""
Instructs Selenium to return the specified answer string in response to
the next JavaScript prompt [window.prompt()].
'answer' is the answer to give in response to the prompt pop-up
"""
self.do_command("answerOnNextPrompt", [answer,])
def go_back(self):
"""
Simulates the user clicking the "back" button on their browser.
"""
self.do_command("goBack", [])
def refresh(self):
"""
Simulates the user clicking the "Refresh" button on their browser.
"""
self.do_command("refresh", [])
def close(self):
"""
Simulates the user clicking the "close" button in the titlebar of a popup
window or tab.
"""
self.do_command("close", [])
def is_alert_present(self):
"""
Has an alert occurred?
This function never throws an exception
"""
return self.get_boolean("isAlertPresent", [])
def is_prompt_present(self):
"""
Has a prompt occurred?
This function never throws an exception
"""
return self.get_boolean("isPromptPresent", [])
def is_confirmation_present(self):
"""
Has confirm() been called?
This function never throws an exception
"""
return self.get_boolean("isConfirmationPresent", [])
def get_alert(self):
"""
Retrieves the message of a JavaScript alert generated during the previous action, or fail if there were no alerts.
Getting an alert has the same effect as manually clicking OK. If an
alert is generated but you do not get/verify it, the next Selenium action
will fail.
NOTE: under Selenium, JavaScript alerts will NOT pop up a visible alert
dialog.
NOTE: Selenium does NOT support JavaScript alerts that are generated in a
page's onload() event handler. In this case a visible dialog WILL be
generated and Selenium will hang until someone manually clicks OK.
"""
return self.get_string("getAlert", [])
def get_confirmation(self):
"""
Retrieves the message of a JavaScript confirmation dialog generated during
the previous action.
By default, the confirm function will return true, having the same effect
as manually clicking OK. This can be changed by prior execution of the
chooseCancelOnNextConfirmation command. If an confirmation is generated
but you do not get/verify it, the next Selenium action will fail.
NOTE: under Selenium, JavaScript confirmations will NOT pop up a visible
dialog.
NOTE: Selenium does NOT support JavaScript confirmations that are
generated in a page's onload() event handler. In this case a visible
dialog WILL be generated and Selenium will hang until you manually click
OK.
"""
return self.get_string("getConfirmation", [])
def get_prompt(self):
"""
Retrieves the message of a JavaScript question prompt dialog generated during
the previous action.
Successful handling of the prompt requires prior execution of the
answerOnNextPrompt command. If a prompt is generated but you
do not get/verify it, the next Selenium action will fail.
NOTE: under Selenium, JavaScript prompts will NOT pop up a visible
dialog.
NOTE: Selenium does NOT support JavaScript prompts that are generated in a
page's onload() event handler. In this case a visible dialog WILL be
generated and Selenium will hang until someone manually clicks OK.
"""
return self.get_string("getPrompt", [])
def get_location(self):
"""
Gets the absolute URL of the current page.
"""
return self.get_string("getLocation", [])
def get_title(self):
"""
Gets the title of the current page.
"""
return self.get_string("getTitle", [])
def get_body_text(self):
"""
Gets the entire text of the page.
"""
return self.get_string("getBodyText", [])
def get_value(self,locator):
"""
Gets the (whitespace-trimmed) value of an input field (or anything else with a value parameter).
For checkbox/radio elements, the value will be "on" or "off" depending on
whether the element is checked or not.
'locator' is an element locator
"""
return self.get_string("getValue", [locator,])
def get_text(self,locator):
"""
Gets the text of an element. This works for any element that contains
text. This command uses either the textContent (Mozilla-like browsers) or
the innerText (IE-like browsers) of the element, which is the rendered
text shown to the user.
'locator' is an element locator
"""
return self.get_string("getText", [locator,])
def highlight(self,locator):
"""
Briefly changes the backgroundColor of the specified element yellow. Useful for debugging.
'locator' is an element locator
"""
self.do_command("highlight", [locator,])
def get_eval(self,script):
"""
Gets the result of evaluating the specified JavaScript snippet. The snippet may
have multiple lines, but only the result of the last line will be returned.
Note that, by default, the snippet will run in the context of the "selenium"
object itself, so ``this`` will refer to the Selenium object. Use ``window`` to
refer to the window of your application, e.g. ``window.document.getElementById('foo')``
If you need to use
a locator to refer to a single element in your application page, you can
use ``this.browserbot.findElement("id=foo")`` where "id=foo" is your locator.
'script' is the JavaScript snippet to run
"""
return self.get_string("getEval", [script,])
def is_checked(self,locator):
"""
Gets whether a toggle-button (checkbox/radio) is checked. Fails if the specified element doesn't exist or isn't a toggle-button.
'locator' is an element locator pointing to a checkbox or radio button
"""
return self.get_boolean("isChecked", [locator,])
def get_table(self,tableCellAddress):
"""
Gets the text from a cell of a table. The cellAddress syntax
tableLocator.row.column, where row and column start at 0.
'tableCellAddress' is a cell address, e.g. "foo.1.4"
"""
return self.get_string("getTable", [tableCellAddress,])
def get_selected_labels(self,selectLocator):
"""
Gets all option labels (visible text) for selected options in the specified select or multi-select element.
'selectLocator' is an element locator identifying a drop-down menu
"""
return self.get_string_array("getSelectedLabels", [selectLocator,])
def get_selected_label(self,selectLocator):
"""
Gets option label (visible text) for selected option in the specified select element.
'selectLocator' is an element locator identifying a drop-down menu
"""
return self.get_string("getSelectedLabel", [selectLocator,])
def get_selected_values(self,selectLocator):
"""
Gets all option values (value attributes) for selected options in the specified select or multi-select element.
'selectLocator' is an element locator identifying a drop-down menu
"""
return self.get_string_array("getSelectedValues", [selectLocator,])
def get_selected_value(self,selectLocator):
"""
Gets option value (value attribute) for selected option in the specified select element.
'selectLocator' is an element locator identifying a drop-down menu
"""
return self.get_string("getSelectedValue", [selectLocator,])
def get_selected_indexes(self,selectLocator):
"""
Gets all option indexes (option number, starting at 0) for selected options in the specified select or multi-select element.
'selectLocator' is an element locator identifying a drop-down menu
"""
return self.get_string_array("getSelectedIndexes", [selectLocator,])
def get_selected_index(self,selectLocator):
"""
Gets option index (option number, starting at 0) for selected option in the specified select element.
'selectLocator' is an element locator identifying a drop-down menu
"""
return self.get_string("getSelectedIndex", [selectLocator,])
def get_selected_ids(self,selectLocator):
"""
Gets all option element IDs for selected options in the specified select or multi-select element.
'selectLocator' is an element locator identifying a drop-down menu
"""
return self.get_string_array("getSelectedIds", [selectLocator,])
def get_selected_id(self,selectLocator):
"""
Gets option element ID for selected option in the specified select element.
'selectLocator' is an element locator identifying a drop-down menu
"""
return self.get_string("getSelectedId", [selectLocator,])
def is_something_selected(self,selectLocator):
"""
Determines whether some option in a drop-down menu is selected.
'selectLocator' is an element locator identifying a drop-down menu
"""
return self.get_boolean("isSomethingSelected", [selectLocator,])
def get_select_options(self,selectLocator):
"""
Gets all option labels in the specified select drop-down.
'selectLocator' is an element locator identifying a drop-down menu
"""
return self.get_string_array("getSelectOptions", [selectLocator,])
def get_attribute(self,attributeLocator):
"""
Gets the value of an element attribute. The value of the attribute may
differ across browsers (this is the case for the "style" attribute, for
example).
'attributeLocator' is an element locator followed by an @ sign and then the name of the attribute, e.g. "foo@bar"
"""
return self.get_string("getAttribute", [attributeLocator,])
def is_text_present(self,pattern):
"""
Verifies that the specified text pattern appears somewhere on the rendered page shown to the user.
'pattern' is a pattern to match with the text of the page
"""
return self.get_boolean("isTextPresent", [pattern,])
def is_element_present(self,locator):
"""
Verifies that the specified element is somewhere on the page.
'locator' is an element locator
"""
return self.get_boolean("isElementPresent", [locator,])
def is_visible(self,locator):
"""
Determines if the specified element is visible. An
element can be rendered invisible by setting the CSS "visibility"
property to "hidden", or the "display" property to "none", either for the
element itself or one if its ancestors. This method will fail if
the element is not present.
'locator' is an element locator
"""
return self.get_boolean("isVisible", [locator,])
def is_editable(self,locator):
"""
Determines whether the specified input element is editable, ie hasn't been disabled.
This method will fail if the specified element isn't an input element.
'locator' is an element locator
"""
return self.get_boolean("isEditable", [locator,])
def get_all_buttons(self):
"""
Returns the IDs of all buttons on the page.
If a given button has no ID, it will appear as "" in this array.
"""
return self.get_string_array("getAllButtons", [])
def get_all_links(self):
"""
Returns the IDs of all links on the page.
If a given link has no ID, it will appear as "" in this array.
"""
return self.get_string_array("getAllLinks", [])
def get_all_fields(self):
"""
Returns the IDs of all input fields on the page.
If a given field has no ID, it will appear as "" in this array.
"""
return self.get_string_array("getAllFields", [])
def get_attribute_from_all_windows(self,attributeName):
"""
Returns every instance of some attribute from all known windows.
'attributeName' is name of an attribute on the windows
"""
return self.get_string_array("getAttributeFromAllWindows", [attributeName,])
def dragdrop(self,locator,movementsString):
"""
deprecated - use dragAndDrop instead
'locator' is an element locator
'movementsString' is offset in pixels from the current location to which the element should be moved, e.g., "+70,-300"
"""
self.do_command("dragdrop", [locator,movementsString,])
def set_mouse_speed(self,pixels):
"""
Configure the number of pixels between "mousemove" events during dragAndDrop commands (default=10).
Setting this value to 0 means that we'll send a "mousemove" event to every single pixel
in between the start location and the end location; that can be very slow, and may
cause some browsers to force the JavaScript to timeout.
If the mouse speed is greater than the distance between the two dragged objects, we'll
just send one "mousemove" at the start location and then one final one at the end location.
'pixels' is the number of pixels between "mousemove" events
"""
self.do_command("setMouseSpeed", [pixels,])
def get_mouse_speed(self):
"""
Returns the number of pixels between "mousemove" events during dragAndDrop commands (default=10).
"""
return self.get_number("getMouseSpeed", [])
def drag_and_drop(self,locator,movementsString):
"""
Drags an element a certain distance and then drops it
'locator' is an element locator
'movementsString' is offset in pixels from the current location to which the element should be moved, e.g., "+70,-300"
"""
self.do_command("dragAndDrop", [locator,movementsString,])
def drag_and_drop_to_object(self,locatorOfObjectToBeDragged,locatorOfDragDestinationObject):
"""
Drags an element and drops it on another element
'locatorOfObjectToBeDragged' is an element to be dragged
'locatorOfDragDestinationObject' is an element whose location (i.e., whose center-most pixel) will be the point where locatorOfObjectToBeDragged is dropped
"""
self.do_command("dragAndDropToObject", [locatorOfObjectToBeDragged,locatorOfDragDestinationObject,])
def window_focus(self):
"""
Gives focus to the currently selected window
"""
self.do_command("windowFocus", [])
def window_maximize(self):
"""
Resize currently selected window to take up the entire screen
"""
self.do_command("windowMaximize", [])
def get_all_window_ids(self):
"""
Returns the IDs of all windows that the browser knows about.
"""
return self.get_string_array("getAllWindowIds", [])
def get_all_window_names(self):
"""
Returns the names of all windows that the browser knows about.
"""
return self.get_string_array("getAllWindowNames", [])
def get_all_window_titles(self):
"""
Returns the titles of all windows that the browser knows about.
"""
return self.get_string_array("getAllWindowTitles", [])
def get_html_source(self):
"""
Returns the entire HTML source between the opening and
closing "html" tags.
"""
return self.get_string("getHtmlSource", [])
def set_cursor_position(self,locator,position):
"""
Moves the text cursor to the specified position in the given input element or textarea.
This method will fail if the specified element isn't an input element or textarea.
'locator' is an element locator pointing to an input element or textarea
'position' is the numerical position of the cursor in the field; position should be 0 to move the position to the beginning of the field. You can also set the cursor to -1 to move it to the end of the field.
"""
self.do_command("setCursorPosition", [locator,position,])
def get_element_index(self,locator):
"""
Get the relative index of an element to its parent (starting from 0). The comment node and empty text node
will be ignored.
'locator' is an element locator pointing to an element
"""
return self.get_number("getElementIndex", [locator,])
def is_ordered(self,locator1,locator2):
"""
Check if these two elements have same parent and are ordered siblings in the DOM. Two same elements will
not be considered ordered.
'locator1' is an element locator pointing to the first element
'locator2' is an element locator pointing to the second element
"""
return self.get_boolean("isOrdered", [locator1,locator2,])
def get_element_position_left(self,locator):
"""
Retrieves the horizontal position of an element
'locator' is an element locator pointing to an element OR an element itself
"""
return self.get_number("getElementPositionLeft", [locator,])
def get_element_position_top(self,locator):
"""
Retrieves the vertical position of an element
'locator' is an element locator pointing to an element OR an element itself
"""
return self.get_number("getElementPositionTop", [locator,])
def get_element_width(self,locator):
"""
Retrieves the width of an element
'locator' is an element locator pointing to an element
"""
return self.get_number("getElementWidth", [locator,])
def get_element_height(self,locator):
"""
Retrieves the height of an element
'locator' is an element locator pointing to an element
"""
return self.get_number("getElementHeight", [locator,])
def get_cursor_position(self,locator):
"""
Retrieves the text cursor position in the given input element or textarea; beware, this may not work perfectly on all browsers.
Specifically, if the cursor/selection has been cleared by JavaScript, this command will tend to
return the position of the last location of the cursor, even though the cursor is now gone from the page. This is filed as SEL-243.
This method will fail if the specified element isn't an input element or textarea, or there is no cursor in the element.
'locator' is an element locator pointing to an input element or textarea
"""
return self.get_number("getCursorPosition", [locator,])
def get_expression(self,expression):
"""
Returns the specified expression.
This is useful because of JavaScript preprocessing.
It is used to generate commands like assertExpression and waitForExpression.
'expression' is the value to return
"""
return self.get_string("getExpression", [expression,])
def get_xpath_count(self,xpath):
"""
Returns the number of nodes that match the specified xpath, eg. "//table" would give
the number of tables.
'xpath' is the xpath expression to evaluate. do NOT wrap this expression in a 'count()' function; we will do that for you.
"""
return self.get_number("getXpathCount", [xpath,])
def assign_id(self,locator,identifier):
"""
Temporarily sets the "id" attribute of the specified element, so you can locate it in the future
using its ID rather than a slow/complicated XPath. This ID will disappear once the page is
reloaded.
'locator' is an element locator pointing to an element
'identifier' is a string to be used as the ID of the specified element
"""
self.do_command("assignId", [locator,identifier,])
def allow_native_xpath(self,allow):
"""
Specifies whether Selenium should use the native in-browser implementation
of XPath (if any native version is available); if you pass "false" to
this function, we will always use our pure-JavaScript xpath library.
Using the pure-JS xpath library can improve the consistency of xpath
element locators between different browser vendors, but the pure-JS
version is much slower than the native implementations.
'allow' is boolean, true means we'll prefer to use native XPath; false means we'll only use JS XPath
"""
self.do_command("allowNativeXpath", [allow,])
def ignore_attributes_without_value(self,ignore):
"""
Specifies whether Selenium will ignore xpath attributes that have no
value, i.e. are the empty string, when using the non-native xpath
evaluation engine. You'd want to do this for performance reasons in IE.
However, this could break certain xpaths, for example an xpath that looks
for an attribute whose value is NOT the empty string.
The hope is that such xpaths are relatively rare, but the user should
have the option of using them. Note that this only influences xpath
evaluation when using the ajaxslt engine (i.e. not "javascript-xpath").
'ignore' is boolean, true means we'll ignore attributes without value at the expense of xpath "correctness"; false means we'll sacrifice speed for correctness.
"""
self.do_command("ignoreAttributesWithoutValue", [ignore,])
def wait_for_condition(self,script,timeout):
"""
Runs the specified JavaScript snippet repeatedly until it evaluates to "true".
The snippet may have multiple lines, but only the result of the last line
will be considered.
Note that, by default, the snippet will be run in the runner's test window, not in the window
of your application. To get the window of your application, you can use
the JavaScript snippet ``selenium.browserbot.getCurrentWindow()``, and then
run your JavaScript in there
'script' is the JavaScript snippet to run
'timeout' is a timeout in milliseconds, after which this command will return with an error
"""
self.do_command("waitForCondition", [script,timeout,])
def set_timeout(self,timeout):
"""
Specifies the amount of time that Selenium will wait for actions to complete.
Actions that require waiting include "open" and the "waitFor\*" actions.
The default timeout is 30 seconds.
'timeout' is a timeout in milliseconds, after which the action will return with an error
"""
self.do_command("setTimeout", [timeout,])
def wait_for_page_to_load(self,timeout):
"""
Waits for a new page to load.
You can use this command instead of the "AndWait" suffixes, "clickAndWait", "selectAndWait", "typeAndWait" etc.
(which are only available in the JS API).
Selenium constantly keeps track of new pages loading, and sets a "newPageLoaded"
flag when it first notices a page load. Running any other Selenium command after
turns the flag to false. Hence, if you want to wait for a page to load, you must
wait immediately after a Selenium command that caused a page-load.
'timeout' is a timeout in milliseconds, after which this command will return with an error
"""
self.do_command("waitForPageToLoad", [timeout,])
def wait_for_frame_to_load(self,frameAddress,timeout):
"""
Waits for a new frame to load.
Selenium constantly keeps track of new pages and frames loading,
and sets a "newPageLoaded" flag when it first notices a page load.
See waitForPageToLoad for more information.
'frameAddress' is FrameAddress from the server side
'timeout' is a timeout in milliseconds, after which this command will return with an error
"""
self.do_command("waitForFrameToLoad", [frameAddress,timeout,])
def get_cookie(self):
"""
Return all cookies of the current page under test.
"""
return self.get_string("getCookie", [])
def get_cookie_by_name(self,name):
"""
Returns the value of the cookie with the specified name, or throws an error if the cookie is not present.
'name' is the name of the cookie
"""
return self.get_string("getCookieByName", [name,])
def is_cookie_present(self,name):
"""
Returns true if a cookie with the specified name is present, or false otherwise.
'name' is the name of the cookie
"""
return self.get_boolean("isCookiePresent", [name,])
def create_cookie(self,nameValuePair,optionsString):
"""
Create a new cookie whose path and domain are same with those of current page
under test, unless you specified a path for this cookie explicitly.
'nameValuePair' is name and value of the cookie in a format "name=value"
'optionsString' is options for the cookie. Currently supported options include 'path', 'max_age' and 'domain'. the optionsString's format is "path=/path/, max_age=60, domain=.foo.com". The order of options are irrelevant, the unit of the value of 'max_age' is second. Note that specifying a domain that isn't a subset of the current domain will usually fail.
"""
self.do_command("createCookie", [nameValuePair,optionsString,])
def delete_cookie(self,name,optionsString):
"""
Delete a named cookie with specified path and domain. Be careful; to delete a cookie, you
need to delete it using the exact same path and domain that were used to create the cookie.
If the path is wrong, or the domain is wrong, the cookie simply won't be deleted. Also
note that specifying a domain that isn't a subset of the current domain will usually fail.
Since there's no way to discover at runtime the original path and domain of a given cookie,
we've added an option called 'recurse' to try all sub-domains of the current domain with
all paths that are a subset of the current path. Beware; this option can be slow. In
big-O notation, it operates in O(n\*m) time, where n is the number of dots in the domain
name and m is the number of slashes in the path.
'name' is the name of the cookie to be deleted
'optionsString' is options for the cookie. Currently supported options include 'path', 'domain' and 'recurse.' The optionsString's format is "path=/path/, domain=.foo.com, recurse=true". The order of options are irrelevant. Note that specifying a domain that isn't a subset of the current domain will usually fail.
"""
self.do_command("deleteCookie", [name,optionsString,])
def delete_all_visible_cookies(self):
"""
Calls deleteCookie with recurse=true on all cookies visible to the current page.
As noted on the documentation for deleteCookie, recurse=true can be much slower
than simply deleting the cookies using a known domain/path.
"""
self.do_command("deleteAllVisibleCookies", [])
def set_browser_log_level(self,logLevel):
"""
Sets the threshold for browser-side logging messages; log messages beneath this threshold will be discarded.
Valid logLevel strings are: "debug", "info", "warn", "error" or "off".
To see the browser logs, you need to
either show the log window in GUI mode, or enable browser-side logging in Selenium RC.
'logLevel' is one of the following: "debug", "info", "warn", "error" or "off"
"""
self.do_command("setBrowserLogLevel", [logLevel,])
def run_script(self,script):
"""
Creates a new "script" tag in the body of the current test window, and
adds the specified text into the body of the command. Scripts run in
this way can often be debugged more easily than scripts executed using
Selenium's "getEval" command. Beware that JS exceptions thrown in these script
tags aren't managed by Selenium, so you should probably wrap your script
in try/catch blocks if there is any chance that the script will throw
an exception.
'script' is the JavaScript snippet to run
"""
self.do_command("runScript", [script,])
def add_location_strategy(self,strategyName,functionDefinition):
"""
Defines a new function for Selenium to locate elements on the page.
For example,
if you define the strategy "foo", and someone runs click("foo=blah"), we'll
run your function, passing you the string "blah", and click on the element
that your function
returns, or throw an "Element not found" error if your function returns null.
We'll pass three arguments to your function:
* locator: the string the user passed in
* inWindow: the currently selected window
* inDocument: the currently selected document
The function must return null if the element can't be found.
'strategyName' is the name of the strategy to define; this should use only letters [a-zA-Z] with no spaces or other punctuation.
'functionDefinition' is a string defining the body of a function in JavaScript. For example: ``return inDocument.getElementById(locator);``
"""
self.do_command("addLocationStrategy", [strategyName,functionDefinition,])
def capture_entire_page_screenshot(self,filename):
"""
Saves the entire contents of the current window canvas to a PNG file.
Currently this only works in Mozilla and when running in chrome mode.
Contrast this with the captureScreenshot command, which captures the
contents of the OS viewport (i.e. whatever is currently being displayed
on the monitor), and is implemented in the RC only. Implementation
mostly borrowed from the Screengrab! Firefox extension. Please see
http://www.screengrab.org for details.
'filename' is the path to the file to persist the screenshot as. No filename extension will be appended by default. Directories will not be created if they do not exist, and an exception will be thrown, possibly by native code.
"""
self.do_command("captureEntirePageScreenshot", [filename,])
def set_context(self,context):
"""
Writes a message to the status bar and adds a note to the browser-side
log.
'context' is the message to be sent to the browser
"""
self.do_command("setContext", [context,])
def attach_file(self,fieldLocator,fileLocator):
"""
Sets a file input (upload) field to the file listed in fileLocator
'fieldLocator' is an element locator
'fileLocator' is a URL pointing to the specified file. Before the file can be set in the input field (fieldLocator), Selenium RC may need to transfer the file to the local machine before attaching the file in a web page form. This is common in selenium grid configurations where the RC server driving the browser is not the same machine that started the test. Supported Browsers: Firefox ("\*chrome") only.
"""
self.do_command("attachFile", [fieldLocator,fileLocator,])
def capture_screenshot(self,filename):
"""
Captures a PNG screenshot to the specified file.
'filename' is the absolute path to the file to be written, e.g. "c:\blah\screenshot.png"
"""
self.do_command("captureScreenshot", [filename,])
def shut_down_selenium_server(self):
"""
Kills the running Selenium Server and all browser sessions. After you run this command, you will no longer be able to send
commands to the server; you can't remotely start the server once it has been stopped. Normally
you should prefer to run the "stop" command, which terminates the current browser session, rather than
shutting down the entire server.
"""
self.do_command("shutDownSeleniumServer", [])
def key_down_native(self,keycode):
"""
Simulates a user pressing a key (without releasing it yet) by sending a native operating system keystroke.
This function uses the java.awt.Robot class to send a keystroke; this more accurately simulates typing
a key on the keyboard. It does not honor settings from the shiftKeyDown, controlKeyDown, altKeyDown and
metaKeyDown commands, and does not target any particular HTML element. To send a keystroke to a particular
element, focus on the element first before running this command.
'keycode' is an integer keycode number corresponding to a java.awt.event.KeyEvent; note that Java keycodes are NOT the same thing as JavaScript keycodes!
"""
self.do_command("keyDownNative", [keycode,])
def key_up_native(self,keycode):
"""
Simulates a user releasing a key by sending a native operating system keystroke.
This function uses the java.awt.Robot class to send a keystroke; this more accurately simulates typing
a key on the keyboard. It does not honor settings from the shiftKeyDown, controlKeyDown, altKeyDown and
metaKeyDown commands, and does not target any particular HTML element. To send a keystroke to a particular
element, focus on the element first before running this command.
'keycode' is an integer keycode number corresponding to a java.awt.event.KeyEvent; note that Java keycodes are NOT the same thing as JavaScript keycodes!
"""
self.do_command("keyUpNative", [keycode,])
def key_press_native(self,keycode):
"""
Simulates a user pressing and releasing a key by sending a native operating system keystroke.
This function uses the java.awt.Robot class to send a keystroke; this more accurately simulates typing
a key on the keyboard. It does not honor settings from the shiftKeyDown, controlKeyDown, altKeyDown and
metaKeyDown commands, and does not target any particular HTML element. To send a keystroke to a particular
element, focus on the element first before running this command.
'keycode' is an integer keycode number corresponding to a java.awt.event.KeyEvent; note that Java keycodes are NOT the same thing as JavaScript keycodes!
"""
self.do_command("keyPressNative", [keycode,])
| mit |
richard-willowit/odoo | addons/stock/__manifest__.py | 2 | 2878 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Inventory Management',
'version': '1.1',
'summary': 'Inventory, Logistics, Warehousing',
'description': "",
'website': 'https://www.odoo.com/page/warehouse',
'depends': ['product', 'barcodes', 'web_planner'],
'category': 'Warehouse',
'sequence': 13,
'demo': [
'data/stock_demo_pre.yml',
'data/procurement_demo.xml',
'data/stock_demo.xml',
'data/stock_orderpoint_demo.xml',
'data/stock_orderpoint_demo.yml',
'data/stock_demo.yml',
'data/stock_location_demo_cpu1.xml',
'data/stock_location_demo_cpu3.yml',
'data/stock_quant_demo.xml',
],
'data': [
'security/stock_security.xml',
'security/ir.model.access.csv',
'views/stock_menu_views.xml',
'data/stock_traceability_report_data.xml',
'data/procurement_data.xml',
'report/report_stock_forecast.xml',
'report/stock_report_views.xml',
'report/report_package_barcode.xml',
'report/report_lot_barcode.xml',
'report/report_location_barcode.xml',
'report/report_stockpicking_operations.xml',
'report/report_deliveryslip.xml',
'report/report_stockinventory.xml',
'wizard/stock_change_product_qty_views.xml',
'wizard/stock_picking_return_views.xml',
'wizard/stock_scheduler_compute_views.xml',
'wizard/stock_immediate_transfer_views.xml',
'wizard/stock_backorder_confirmation_views.xml',
'views/res_partner_views.xml',
'views/product_strategy_views.xml',
'views/stock_incoterms_views.xml',
'views/stock_production_lot_views.xml',
'views/stock_picking_views.xml',
'views/stock_scrap_views.xml',
'views/stock_inventory_views.xml',
'views/stock_quant_views.xml',
'views/stock_location_views.xml',
'views/stock_warehouse_views.xml',
'views/stock_move_line_views.xml',
'views/stock_move_views.xml',
'views/product_views.xml',
'views/res_config_settings_views.xml',
'views/report_stock_traceability.xml',
'views/stock_template.xml',
'views/procurement_views.xml',
'data/default_barcode_patterns.xml',
'data/stock_data.xml',
'data/stock_data.yml',
'data/stock_incoterms_data.xml',
'data/stock_sequence_data.xml',
'data/web_planner_data.xml',
],
'qweb': [
'static/src/xml/stock_traceability_report_backend.xml',
],
'test': [
'test/stock_users.yml',
'test/packing.yml',
'test/packingneg.yml',
'test/procrule.yml',
'test/wiseoperator.yml',
],
'installable': True,
'application': True,
'auto_install': False,
}
| gpl-3.0 |
seshin/namebench | libnamebench/tk.py | 173 | 13586 | # Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tk user interface implementation for namebench."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import datetime
import os
import Queue
import sys
import threading
import tkFont
# Wildcard imports are evil.
from Tkinter import *
import tkMessageBox
import traceback
import addr_util
import base_ui
import conn_quality
import nameserver_list
import sys_nameservers
import util
THREAD_UNSAFE_TK = 0
LOG_FILE_PATH = util.GenerateOutputFilename('log')
def closedWindowHandler():
print 'Au revoir, mes amis!'
sys.exit(1)
global_message_queue = Queue.Queue()
global_last_message = None
def AddMsg(message, master=None, backup_notifier=None, **kwargs):
"""Add a message to the global queue for output."""
global global_message_queue
global global_last_message
global THREAD_UNSAFE_TK
new_message = StatusMessage(message, **kwargs)
if new_message != global_last_message:
global_message_queue.put(new_message)
if master:
try:
master.event_generate('<<msg>>', when='tail')
global_last_message = new_message
# Tk thread-safety workaround #1
except TclError:
# If we aren't thread safe, we already assume this won't work.
if not THREAD_UNSAFE_TK:
print 'First TCL Error:'
traceback.print_exc()
try:
backup_notifier(-1)
THREAD_UNSAFE_TK = 1
except:
print 'Backup notifier failure:'
traceback.print_exc()
class StatusMessage(object):
"""Messages to be passed from to the main thread from children.
Used to avoid thread issues inherent with Tk.
"""
def __init__(self, message, error=False, count=False, total=False,
enable_button=None, debug=False):
self.message = message
self.error = error
self.count = count
self.debug = debug
self.total = total
self.enable_button = enable_button
class WorkerThread(threading.Thread, base_ui.BaseUI):
"""Handle benchmarking and preparation in a separate UI thread."""
def __init__(self, supplied_ns, global_ns, regional_ns, options, data_source=None, master=None,
backup_notifier=None):
threading.Thread.__init__(self)
self.SetupDataStructures()
self.status_callback = self.msg
self.data_src = data_source
self.backup_notifier = backup_notifier
self.include_internal = False
self.supplied_ns = supplied_ns
self.global_ns = global_ns
self.regional_ns = regional_ns
self.master = master
self.options = options
self.resource_dir = os.path.dirname(os.path.dirname(__file__))
def msg(self, message, **kwargs):
"""Add messages to the main queue."""
return AddMsg(message, master=self.master, backup_notifier=self.backup_notifier, **kwargs)
def run(self):
self.msg('Started thread', enable_button=False)
try:
self.PrepareTestRecords()
self.PrepareNameServers()
self.PrepareBenchmark()
self.RunAndOpenReports()
except nameserver_list.OutgoingUdpInterception:
(exc_type, exception, tb) = sys.exc_info()
self.msg('Outgoing requests were intercepted!', error=exception)
except nameserver_list.TooFewNameservers:
(exc_type, exception, tb) = sys.exc_info()
self.msg('Too few nameservers to test', error=exception)
except conn_quality.OfflineConnection:
(exc_type, exception, tb) = sys.exc_info()
self.msg('The connection appears to be offline!', error=exception)
except:
(exc_type, exception, tb) = sys.exc_info()
traceback.print_exc(tb)
error_msg = '\n'.join(traceback.format_tb(tb)[-4:])
self.msg(exception, error=error_msg)
self.msg(None, enable_button=True)
class NameBenchGui(object):
"""The main GUI."""
def __init__(self, options, supplied_ns, global_ns, regional_ns, version=None):
self.options = options
self.supplied_ns = supplied_ns
self.global_ns = global_ns
self.regional_ns = regional_ns
self.version = version
def Execute(self):
self.root = Tk()
app = MainWindow(self.root, self.options, self.supplied_ns, self.global_ns,
self.regional_ns, self.version)
app.DrawWindow()
self.root.bind('<<msg>>', app.MessageHandler)
self.root.mainloop()
class MainWindow(Frame, base_ui.BaseUI):
"""The main Tk GUI class."""
def __init__(self, master, options, supplied_ns, global_ns, regional_ns, version=None):
"""TODO(tstromberg): Remove duplication from NameBenchGui class."""
Frame.__init__(self)
self.SetupDataStructures()
self.master = master
self.options = options
self.supplied_ns = supplied_ns
self.global_ns = global_ns
self.regional_ns = regional_ns
self.version = version
try:
self.log_file = open(LOG_FILE_PATH, 'w')
except:
print 'Failed to open %s for write' % LOG_FILE_PATH
self.master.protocol('WM_DELETE_WINDOW', closedWindowHandler)
def UpdateStatus(self, message, count=None, total=None, error=None, debug=False):
"""Update our little status window."""
if not message:
return None
if total:
state = '%s... [%s/%s]' % (message, count, total)
elif count:
state = '%s%s' % (message, '.' * count)
else:
state = message
print '> %s' % str(state)
try:
self.log_file.write('%s: %s\r\n' % (datetime.datetime.now(), state))
self.log_file.flush()
except:
pass
if not debug:
self.status.set(state[0:75])
def DrawWindow(self):
"""Draws the user interface."""
self.nameserver_form = StringVar()
self.status = StringVar()
self.query_count = IntVar()
self.data_source = StringVar()
self.health_performance = StringVar()
self.location = StringVar()
self.use_global = IntVar()
self.use_regional = IntVar()
self.use_censor_checks = IntVar()
self.share_results = IntVar()
self.master.title('namebench')
outer_frame = Frame(self.master)
outer_frame.grid(row=0, padx=16, pady=16)
inner_frame = Frame(outer_frame, relief=GROOVE, bd=2, padx=12, pady=12)
inner_frame.grid(row=0, columnspan=2)
status = Label(outer_frame, text='...', textvariable=self.status)
status.grid(row=15, sticky=W, column=0)
if sys.platform[:3] == 'win':
seperator_width = 490
else:
seperator_width = 585
bold_font = tkFont.Font(font=status['font'])
bold_font['weight'] = 'bold'
ns_label = Label(inner_frame, text='Nameservers')
ns_label.grid(row=0, columnspan=2, sticky=W)
ns_label['font'] = bold_font
nameservers = Entry(inner_frame, bg='white',
textvariable=self.nameserver_form,
width=80)
nameservers.grid(row=1, columnspan=2, sticky=W, padx=4, pady=2)
self.nameserver_form.set(', '.join(nameserver_list.InternalNameServers()))
global_button = Checkbutton(inner_frame,
text='Include global DNS providers (Google Public DNS, OpenDNS, UltraDNS, etc.)',
variable=self.use_global)
global_button.grid(row=2, columnspan=2, sticky=W)
global_button.toggle()
regional_button = Checkbutton(inner_frame,
text='Include best available regional DNS services',
variable=self.use_regional)
regional_button.grid(row=3, columnspan=2, sticky=W)
regional_button.toggle()
separator = Frame(inner_frame, height=2, width=seperator_width, bd=1, relief=SUNKEN)
separator.grid(row=4, padx=5, pady=5, columnspan=2)
ds_label = Label(inner_frame, text='Options')
ds_label.grid(row=5, column=0, sticky=W)
ds_label['font'] = bold_font
censorship_button = Checkbutton(inner_frame, text='Include censorship checks',
variable=self.use_censor_checks)
censorship_button.grid(row=6, columnspan=2, sticky=W)
share_button = Checkbutton(inner_frame,
text='Upload and share your anonymized results (help speed up the internet!)',
variable=self.share_results)
# Old versions of Tk do not support two-dimensional padding.
try:
share_button.grid(row=7, columnspan=2, sticky=W, pady=[0,10])
except TclError:
share_button.grid(row=7, columnspan=2, sticky=W)
loc_label = Label(inner_frame, text='Your location')
loc_label.grid(row=10, column=0, sticky=W)
loc_label['font'] = bold_font
run_count_label = Label(inner_frame, text='Health Check Performance')
run_count_label.grid(row=10, column=1, sticky=W)
run_count_label['font'] = bold_font
self.DiscoverLocation()
self.LoadDataSources()
source_titles = self.data_src.ListSourceTitles()
left_dropdown_width = max([len(x) for x in source_titles]) - 3
location_choices = [self.country, '(Other)']
location = OptionMenu(inner_frame, self.location, *location_choices)
location.configure(width=left_dropdown_width)
location.grid(row=11, column=0, sticky=W)
self.location.set(location_choices[0])
mode_choices = ['Fast', 'Slow (unstable network)']
right_dropdown_width = max([len(x) for x in mode_choices]) - 3
health_performance = OptionMenu(inner_frame, self.health_performance, *mode_choices)
health_performance.configure(width=right_dropdown_width)
health_performance.grid(row=11, column=1, sticky=W)
self.health_performance.set(mode_choices[0])
ds_label = Label(inner_frame, text='Query Data Source')
ds_label.grid(row=12, column=0, sticky=W)
ds_label['font'] = bold_font
numqueries_label = Label(inner_frame, text='Number of queries')
numqueries_label.grid(row=12, column=1, sticky=W)
numqueries_label['font'] = bold_font
data_source = OptionMenu(inner_frame, self.data_source, *source_titles)
data_source.configure(width=left_dropdown_width)
data_source.grid(row=13, column=0, sticky=W)
self.data_source.set(source_titles[0])
query_count = Entry(inner_frame, bg='white', textvariable=self.query_count)
query_count.grid(row=13, column=1, sticky=W, padx=4)
query_count.configure(width=right_dropdown_width + 6)
self.query_count.set(self.options.query_count)
self.button = Button(outer_frame, command=self.StartJob)
self.button.grid(row=15, sticky=E, column=1, pady=4, padx=1)
self.UpdateRunState(running=True)
self.UpdateRunState(running=False)
self.UpdateStatus('namebench %s is ready!' % self.version)
def MessageHandler(self, unused_event):
"""Pinged when there is a new message in our queue to handle."""
while global_message_queue.qsize():
m = global_message_queue.get()
if m.error:
self.ErrorPopup(m.message, m.error)
elif m.enable_button == False:
self.UpdateRunState(running=True)
elif m.enable_button == True:
self.UpdateRunState(running=False)
self.UpdateStatus(m.message, count=m.count, total=m.total, error=m.error, debug=m.debug)
def ErrorPopup(self, title, message):
print 'Showing popup: %s' % title
tkMessageBox.showerror(str(title), str(message), master=self.master)
def UpdateRunState(self, running=True):
"""Update the run state of the window, using nasty threading hacks."""
global THREAD_UNSAFE_TK
# try/except blocks added to work around broken Tcl/Tk libraries
# shipped with Fedora 11 (not thread-safe).
# See http://code.google.com/p/namebench/issues/detail?id=23'
if THREAD_UNSAFE_TK:
return
if running:
try:
self.button.config(state=DISABLED)
self.button.config(text='Running')
except TclError:
THREAD_UNSAFE_TK = True
self.UpdateStatus('Unable to disable button due to broken Tk library')
self.UpdateStatus('Running...')
else:
try:
self.button.config(state=NORMAL)
self.button.config(text='Start Benchmark')
except TclError:
pass
def StartJob(self):
"""Events that get called when the Start button is pressed."""
self.ProcessForm()
thread = WorkerThread(self.supplied_ns, self.global_ns, self.regional_ns, self.options,
data_source=self.data_src,
master=self.master, backup_notifier=self.MessageHandler)
thread.start()
def ProcessForm(self):
"""Read form and populate instance variables."""
self.supplied_ns = addr_util.ExtractIPTuplesFromString(self.nameserver_form.get())
if not self.use_global.get():
self.global_ns = []
if not self.use_regional.get():
self.regional_ns = []
if 'Slow' in self.health_performance.get():
self.options.health_thread_count = 10
self.options.query_count = self.query_count.get()
self.options.input_source = self.data_src.ConvertSourceTitleToType(self.data_source.get())
self.options.enable_censorship_checks = self.use_censor_checks.get()
self.options.upload_results = self.share_results.get()
| apache-2.0 |
dpnova/cyclone | appskel/signup/modname/views.py | 2 | 11674 | # coding: utf-8
#
$license
import OpenSSL
import cyclone.escape
import cyclone.locale
import cyclone.mail
import cyclone.web
import hashlib
import random
import string
from datetime import datetime
from twisted.internet import defer
from twisted.python import log
from $modname import storage
from $modname.utils import BaseHandler
from $modname.utils import SessionMixin
from $modname.utils import TemplateFields
class IndexHandler(BaseHandler, SessionMixin):
def get(self):
if self.current_user:
self.redirect("/dashboard")
else:
self.render("index.html")
class LangHandler(BaseHandler):
def get(self, lang_code):
if lang_code in cyclone.locale.get_supported_locales():
self.set_secure_cookie("lang", lang_code, expires_days=20)
self.redirect(self.request.headers.get("Referer",
self.get_argument("next", "/")))
class DashboardHandler(BaseHandler):
@cyclone.web.authenticated
def get(self):
self.render("dashboard.html")
class AccountHandler(BaseHandler, storage.DatabaseMixin):
@cyclone.web.authenticated
@storage.DatabaseSafe
@defer.inlineCallbacks
def get(self):
user = yield storage.users.find_first(
where=("user_email=%s", self.current_user["email"]))
if user:
self.render("account.html",
fields=TemplateFields(full_name=user["user_full_name"]))
else:
self.clear_current_user()
self.redirect("/")
@cyclone.web.authenticated
@storage.DatabaseSafe
@defer.inlineCallbacks
def post(self):
user = yield storage.users.find_first(
where=("user_email=%s", self.current_user["email"]))
if not user:
self.clear_current_user()
self.redirect("/")
defer.returnValue(None)
full_name = self.get_argument("full_name", None)
f = TemplateFields(full_name=full_name)
if full_name:
full_name = full_name.strip()
if len(full_name) > 80:
f["err"] = ["invalid_name"]
self.render("account.html", fields=f)
defer.returnValue(None)
elif full_name != user.user_full_name:
user.user_full_name = full_name
passwd_0 = self.get_argument("passwd_0", None)
passwd_1 = self.get_argument("passwd_1", None)
passwd_2 = self.get_argument("passwd_2", None)
if passwd_0 and passwd_1:
if hashlib.sha1(passwd_0).hexdigest() != user.user_passwd:
f["err"] = ["old_nomatch"]
self.render("account.html", fields=f)
defer.returnValue(None)
elif len(passwd_1) < 3 or len(passwd_1) > 20:
f["err"] = ["invalid_passwd"]
self.render("account.html", fields=f)
defer.returnValue(None)
elif passwd_1 != passwd_2:
f["err"] = ["nomatch"]
self.render("account.html", fields=f)
defer.returnValue(None)
else:
user.user_passwd = hashlib.sha1(passwd_1).hexdigest()
elif passwd_1:
f["err"] = ["old_missing"]
self.render("account.html", fields=f)
defer.returnValue(None)
if user.has_changes:
yield user.save()
f["updated"] = True
self.render("account.html", fields=f)
class SignUpHandler(BaseHandler, storage.DatabaseMixin):
def get(self):
if self.get_current_user():
self.redirect("/")
else:
self.render("signup.html", fields=TemplateFields())
@storage.DatabaseSafe
@defer.inlineCallbacks
def post(self):
email = self.get_argument("email", None)
legal = self.get_argument("legal", None)
f = TemplateFields(email=email, legal=legal)
if legal != "on":
f["err"] = ["legal"]
self.render("signup.html", fields=f)
defer.returnValue(None)
if not email:
f["err"] = ["email"]
self.render("signup.html", fields=f)
defer.returnValue(None)
if not self.valid_email(email):
f["err"] = ["email"]
self.render("signup.html", fields=f)
defer.returnValue(None)
# check if the email is awaiting confirmation
if (yield self.redis.exists("u:%s" % email)):
f["err"] = ["exists"]
self.render("signup.html", fields=f)
defer.returnValue(None)
# check if the email exists in the database
if (yield storage.users.find_first(where=("user_email=%s", email))):
f["err"] = ["exists"]
self.render("signup.html", fields=f)
defer.returnValue(None)
# create random password
random.seed(OpenSSL.rand.bytes(16))
passwd = "".join(random.choice(string.letters + string.digits)
for x in range(8))
# store temporary password in redis
k = "u:%s" % email
t = yield self.redis.multi()
t.set(k, passwd)
t.expire(k, 86400) # 1 day
yield t.commit()
# prepare the confirmation email
msg = cyclone.mail.Message(
mime="text/html",
charset="utf-8",
to_addrs=[email],
from_addr=self.settings.email_settings.username,
subject=self.render_string("signup_email_subject.txt")
.replace("\n", "").strip(),
message=self.render_string("signup_email.html",
passwd=passwd, ip=self.request.remote_ip,
date=datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S GMT")))
try:
r = yield cyclone.mail.sendmail(self.settings.email_settings, msg)
except Exception, e:
# delete password from redis
yield self.redis.delete(k)
log.err("failed to send signup email to %s: %s" % (email, e))
f["err"] = ["send"]
self.render("signup.html", fields=f)
else:
log.msg("signup email sent to %s: %s" % (email, r))
self.render("signup_ok.html", email=email)
class SignInHandler(BaseHandler, storage.DatabaseMixin):
def get(self):
if self.get_current_user():
self.redirect("/")
else:
self.render("signin.html", fields=TemplateFields())
@storage.DatabaseSafe
@defer.inlineCallbacks
def post(self):
email = self.get_argument("email", "")
passwd = self.get_argument("passwd", "")
remember = self.get_argument("remember", "")
f = TemplateFields(email=email, remember=remember)
if not email:
f["err"] = ["auth"]
self.render("signin.html", fields=f)
defer.returnValue(None)
if not self.valid_email(email):
f["err"] = ["auth"]
self.render("signin.html", fields=f)
defer.returnValue(None)
if not passwd:
f["err"] = ["auth"]
self.render("signin.html", fields=f)
defer.returnValue(None)
user = None
# check if the user is awaiting confirmation
k = "u:%s" % email
pwd = yield self.redis.get(k)
if pwd:
if pwd != passwd:
f["err"] = ["auth"]
self.render("signin.html", fields=f)
defer.returnValue(None)
else:
# check if the user is already in mysql
user = yield storage.users.find_first(
where=("user_email=%s", email))
if not user:
# create the user in mysql
user = storage.users.new(user_email=email)
user.user_passwd = hashlib.sha1(pwd).hexdigest()
user.user_is_active = True
yield user.save()
yield self.redis.delete(k)
if not user:
user = yield storage.users.find_first(
where=("user_email=%s and user_passwd=%s",
email, hashlib.sha1(passwd).hexdigest()))
if not user:
f["err"] = ["auth"]
self.render("signin.html", fields=f)
defer.returnValue(None)
# always update the lang cookie
if self.locale.code in cyclone.locale.get_supported_locales():
self.set_secure_cookie("lang", self.locale.code, expires_days=20)
# set session cookie
self.set_current_user(email=email,
expires_days=15 if remember else None)
self.redirect("/")
class SignOutHandler(BaseHandler):
@cyclone.web.authenticated
def get(self):
self.clear_current_user()
self.redirect("/")
class PasswdHandler(BaseHandler, storage.DatabaseMixin):
def get(self):
if self.get_current_user():
self.redirect("/")
else:
self.render("passwd.html", fields=TemplateFields())
@storage.DatabaseSafe
@defer.inlineCallbacks
def post(self):
email = self.get_argument("email", None)
f = TemplateFields(email=email)
if not email:
f["err"] = ["email"]
self.render("passwd.html", fields=f)
defer.returnValue(None)
if not self.valid_email(email):
f["err"] = ["email"]
self.render("passwd.html", fields=f)
defer.returnValue(None)
k = "u:%s" % email
# check if the user exists in redis, or mysql
if (yield self.redis.exists(k)):
f["err"] = ["pending"]
self.render("passwd.html", fields=f)
defer.returnValue(None)
elif not (yield storage.users.find_first(
where=("user_email=%s", email))):
f["err"] = ["notfound"]
self.render("passwd.html", fields=f)
defer.returnValue(None)
# create temporary password and store in redis
random.seed(OpenSSL.rand.bytes(16))
passwd = "".join(random.choice(string.letters + string.digits)
for x in range(8))
t = yield self.redis.multi()
t.set(k, passwd)
t.expire(k, 86400) # 1 day
yield t.commit()
# prepare the confirmation email
msg = cyclone.mail.Message(
mime="text/html",
charset="utf-8",
to_addrs=[email],
from_addr=self.settings.email_settings.username,
subject=self.render_string("passwd_email_subject.txt")
.replace("\n", "").strip(),
message=self.render_string("passwd_email.html",
passwd=passwd, ip=self.request.remote_ip,
date=datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S GMT")))
try:
r = yield cyclone.mail.sendmail(self.settings.email_settings, msg)
except Exception, e:
# do not delete from redis
# yield self.redis.delete(k)
log.err("failed to send passwd email to %s: %s" % (email, e))
f["err"] = ["send"]
self.render("passwd.html", fields=f)
else:
log.msg("passwd email sent to %s: %s" % (email, r))
self.render("passwd_ok.html", email=email)
| apache-2.0 |
mgamer/gyp | test/win/gyptest-cl-optimizations.py | 247 | 3416 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure optimization settings are extracted properly.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['ninja'])
CHDIR = 'compiler-flags'
test.run_gyp('optimizations.gyp', chdir=CHDIR)
# It's hard to map flags to output contents in a non-fragile way (especially
# handling both 2008/2010), so just verify the correct ninja command line
# contents.
ninja_file = test.built_file_path('obj/test_opt_off.ninja', chdir=CHDIR)
test.must_contain(ninja_file, 'cflags = /Od')
ninja_file = test.built_file_path('obj/test_opt_lev_size.ninja', chdir=CHDIR)
test.must_contain(ninja_file, 'cflags = /O1')
ninja_file = test.built_file_path('obj/test_opt_lev_speed.ninja', chdir=CHDIR)
test.must_contain(ninja_file, 'cflags = /O2')
ninja_file = test.built_file_path('obj/test_opt_lev_max.ninja', chdir=CHDIR)
test.must_contain(ninja_file, 'cflags = /Ox')
ninja_file = test.built_file_path('obj/test_opt_unset.ninja', chdir=CHDIR)
test.must_not_contain(ninja_file, '/Od')
test.must_not_contain(ninja_file, '/O1')
test.must_not_contain(ninja_file, '/Ox')
# Set by default if none specified.
test.must_contain(ninja_file, '/O2')
ninja_file = test.built_file_path('obj/test_opt_fpo.ninja', chdir=CHDIR)
test.must_contain(ninja_file, '/Oy')
test.must_not_contain(ninja_file, '/Oy-')
ninja_file = test.built_file_path('obj/test_opt_fpo_off.ninja', chdir=CHDIR)
test.must_contain(ninja_file, '/Oy-')
ninja_file = test.built_file_path('obj/test_opt_intrinsic.ninja',
chdir=CHDIR)
test.must_contain(ninja_file, '/Oi')
test.must_not_contain(ninja_file, '/Oi-')
ninja_file = test.built_file_path('obj/test_opt_intrinsic_off.ninja',
chdir=CHDIR)
test.must_contain(ninja_file, '/Oi-')
ninja_file = test.built_file_path('obj/test_opt_inline_off.ninja',
chdir=CHDIR)
test.must_contain(ninja_file, '/Ob0')
ninja_file = test.built_file_path('obj/test_opt_inline_manual.ninja',
chdir=CHDIR)
test.must_contain(ninja_file, '/Ob1')
ninja_file = test.built_file_path('obj/test_opt_inline_auto.ninja',
chdir=CHDIR)
test.must_contain(ninja_file, '/Ob2')
ninja_file = test.built_file_path('obj/test_opt_neither.ninja',
chdir=CHDIR)
test.must_not_contain(ninja_file, '/Os')
test.must_not_contain(ninja_file, '/Ot')
ninja_file = test.built_file_path('obj/test_opt_size.ninja',
chdir=CHDIR)
test.must_contain(ninja_file, '/Os')
ninja_file = test.built_file_path('obj/test_opt_speed.ninja',
chdir=CHDIR)
test.must_contain(ninja_file, '/Ot')
ninja_file = test.built_file_path('obj/test_opt_wpo.ninja',
chdir=CHDIR)
test.must_contain(ninja_file, '/GL')
ninja_file = test.built_file_path('obj/test_opt_sp.ninja',
chdir=CHDIR)
test.must_contain(ninja_file, '/GF')
ninja_file = test.built_file_path('obj/test_opt_sp_off.ninja',
chdir=CHDIR)
test.must_not_contain(ninja_file, '/GF')
ninja_file = test.built_file_path('obj/test_opt_fso.ninja',
chdir=CHDIR)
test.must_contain(ninja_file, '/GT')
ninja_file = test.built_file_path('obj/test_opt_fso_off.ninja',
chdir=CHDIR)
test.must_not_contain(ninja_file, '/GT')
test.pass_test()
| bsd-3-clause |
erijo/py-svndump | svndump/record.py | 1 | 5100 | # Copyright (c) 2012 Erik Johansson <erik@ejohansson.se>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
from .section import *
class Record(object):
def __init__(self, headers):
super(Record, self).__init__()
self.headers = headers
def discard(self):
pass
def write(self, stream):
self.headers.write(stream)
@staticmethod
def read(stream):
headers = HeaderSection.read(stream)
if headers is None:
return None
if NodeRecord.NODE_PATH_HEADER in headers:
return NodeRecord.read(headers, stream)
elif RevisionRecord.REVISION_NUMBER_HEADER in headers:
return RevisionRecord.read(headers, stream)
elif VersionStampRecord.VERSION_HEADER in headers:
return VersionStampRecord.read(headers, stream)
elif UuidRecord.UUID_HEADER in headers:
return UuidRecord.read(headers, stream)
stream.error("unknown record");
class VersionStampRecord(Record):
VERSION_HEADER = "SVN-fs-dump-format-version"
def __init__(self, headers):
super(VersionStampRecord, self).__init__(headers)
@staticmethod
def read(headers, stream):
return VersionStampRecord(headers)
class UuidRecord(Record):
UUID_HEADER = "UUID"
def __init__(self, headers):
super(UuidRecord, self).__init__(headers)
@staticmethod
def read(headers, stream):
return UuidRecord(headers)
class RevisionRecord(Record):
REVISION_NUMBER_HEADER = "Revision-number"
PROP_CONTENT_LENGTH = "Prop-content-length"
CONTENT_LENGTH = "Content-length"
def __init__(self, headers, properties):
super(RevisionRecord, self).__init__(headers)
self.properties = properties
def write(self, stream):
prop_length = self.properties.dump_length()
self.headers[self.PROP_CONTENT_LENGTH] = prop_length
self.headers[self.CONTENT_LENGTH] = prop_length
super(RevisionRecord, self).write(stream)
self.properties.write(stream)
stream.writeline()
@staticmethod
def read(headers, stream):
properties = PropertySection.read(stream)
return RevisionRecord(headers, properties)
class NodeRecord(Record):
NODE_PATH_HEADER = "Node-path"
NODE_KIND = "Node-kind"
NODE_ACTION = "Node-action"
NODE_COPYFROM_REV = "Node-copyfrom-rev"
NODE_COPYFROM_PATH = "Node-copyfrom-path"
TEXT_COPY_SOURCE_MD5 = "Text-copy-source-md5"
TEXT_CONTENT_MD5 = "Text-content-md5"
TEXT_CONTENT_LENGTH = "Text-content-length"
PROP_CONTENT_LENGTH = "Prop-content-length"
CONTENT_LENGTH = "Content-length"
# New in version 3
TEXT_DELTA = "Text-delta"
PROP_DELTA = "Prop-delta"
TEXT_DELTA_BASE_MD5 = "Text-delta-base-md5"
TEXT_DELTA_BASE_SHA1 = "Text-delta-base-sha1"
TEXT_COPY_SOURCE_SHA1 = "Text-copy-source-sha1"
TEXT_CONTENT_SHA1 = "Text-content-sha1"
def __init__(self, headers, properties, content):
super(NodeRecord, self).__init__(headers)
self.properties = properties
self.content = content
def discard(self):
if self.content is not None:
self.content.discard()
def write(self, stream):
prop_length = 0
if self.properties is not None:
prop_length = self.properties.dump_length()
self.headers[self.PROP_CONTENT_LENGTH] = prop_length
text_length = 0
if self.content is not None:
text_length = self.content.dump_length()
self.headers[self.TEXT_CONTENT_LENGTH] = text_length
if self.properties is not None or self.content is not None:
self.headers[self.CONTENT_LENGTH] = prop_length + text_length
super(NodeRecord, self).write(stream)
if self.properties is not None:
self.properties.write(stream)
if self.content is not None:
self.content.write(stream)
stream.writeline()
stream.writeline()
@staticmethod
def read(headers, stream):
properties = None
if NodeRecord.PROP_CONTENT_LENGTH in headers:
properties = PropertySection.read(stream)
content = None
if NodeRecord.TEXT_CONTENT_LENGTH in headers:
content = Content.read(
stream, headers[NodeRecord.TEXT_CONTENT_LENGTH])
return NodeRecord(headers, properties, content)
| gpl-3.0 |
Aaron1992/shadowsocks-final | shadowsocks/local.py | 1015 | 2248 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import sys
import os
import logging
import signal
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../'))
from shadowsocks import shell, daemon, eventloop, tcprelay, udprelay, asyncdns
def main():
shell.check_python()
# fix py2exe
if hasattr(sys, "frozen") and sys.frozen in \
("windows_exe", "console_exe"):
p = os.path.dirname(os.path.abspath(sys.executable))
os.chdir(p)
config = shell.get_config(True)
daemon.daemon_exec(config)
try:
logging.info("starting local at %s:%d" %
(config['local_address'], config['local_port']))
dns_resolver = asyncdns.DNSResolver()
tcp_server = tcprelay.TCPRelay(config, dns_resolver, True)
udp_server = udprelay.UDPRelay(config, dns_resolver, True)
loop = eventloop.EventLoop()
dns_resolver.add_to_loop(loop)
tcp_server.add_to_loop(loop)
udp_server.add_to_loop(loop)
def handler(signum, _):
logging.warn('received SIGQUIT, doing graceful shutting down..')
tcp_server.close(next_tick=True)
udp_server.close(next_tick=True)
signal.signal(getattr(signal, 'SIGQUIT', signal.SIGTERM), handler)
def int_handler(signum, _):
sys.exit(1)
signal.signal(signal.SIGINT, int_handler)
daemon.set_user(config.get('user', None))
loop.run()
except Exception as e:
shell.print_exception(e)
sys.exit(1)
if __name__ == '__main__':
main()
| apache-2.0 |
miguelinux/vbox | src/VBox/Additions/common/crOpenGL/getprocaddress.py | 10 | 3625 | # Copyright (c) 2001, Stanford University
# All rights reserved.
#
# See the file LICENSE.txt for information on redistributing this software.
import sys
import apiutil
apiutil.CopyrightC()
print """
/* DO NOT EDIT - THIS FILE GENERATED BY THE getprocaddress.py SCRIPT */
#include "chromium.h"
#include "cr_error.h"
#include "cr_string.h"
#include "cr_version.h"
#include "stub.h"
#include "dri_glx.h"
#if defined(VBOXOGL_DRI) || defined(VBOXOGL_FAKEDRI)
#include "cr_gl.h"
#include "fakedri_drv.h"
#endif
struct name_address {
const char *name;
CR_PROC address;
};
static struct name_address functions[] = {
"""
keys = apiutil.GetAllFunctions(sys.argv[1]+"/APIspec.txt")
for func_name in keys:
if "Chromium" == apiutil.Category(func_name):
continue
if "VBox" == apiutil.Category(func_name):
continue
if func_name == "BoundsInfoCR":
continue
if "GL_chromium" == apiutil.Category(func_name):
pass #continue
wrap = apiutil.GetCategoryWrapper(func_name)
name = "gl" + func_name
address = "VBOXGLTAG(gl" + func_name + ")"
if wrap:
print '#ifdef CR_%s' % wrap
print '\t{ "%s", (CR_PROC) %s },' % (name, address)
if wrap:
print '#endif'
print "\t/* Chromium binding/glue functions */"
for func_name in keys:
if (func_name == "Writeback" or
func_name == "BoundsInfoCR" or
func_name == "GetUniformsLocations" or
func_name == "GetAttribsLocations"):
continue
if apiutil.Category(func_name) == "Chromium":
print '\t{ "cr%s", (CR_PROC) cr%s },' % (func_name, func_name)
print """
{ NULL, NULL }
};
CR_PROC CR_APIENTRY crGetProcAddress( const char *name )
{
int i;
stubInit();
for (i = 0; functions[i].name; i++) {
if (crStrcmp(name, functions[i].name) == 0) {
return functions[i].address;
}
}
#define GLXAPI_ENTRY(Func) if (!crStrcmp(name, "glX"#Func)) return (CR_PROC) &VBOXGLXENTRYTAG(glX##Func);
#include "fakedri_glxfuncsList.h"
#undef GLXAPI_ENTRY
/*CR_EXT_texture_from_pixmap*/
if (!crStrcmp( name, "glXBindTexImageEXT" )) return (CR_PROC) VBOXGLXTAG(glXBindTexImageEXT);
if (!crStrcmp( name, "glXReleaseTexImageEXT" )) return (CR_PROC) VBOXGLXTAG(glXReleaseTexImageEXT);
#if defined(Linux) && defined(CR_EXT_framebuffer_blit)
/* Hacky way to make gnome3 happy on ubuntu 11.04, even though glBlitFramebuffer is part of OpenGL 3.0 spec,
* it expects to find glBlitFramebuffer and not glBlitFramebufferEXT after checking for EXT_framebuffer_blit support.
* Untill 3.0 support, it's better to go this way instead of adding an alias to src/VBox/GuestHost/OpenGL/glapi_parser/apispec.txt.
*/
if (!crStrcmp(name, "glBlitFramebuffer")) return crGetProcAddress("glBlitFramebufferEXT");
#endif
if (name) crDebug("Returning NULL for %s", name);
return NULL;
}
"""
# XXX should crGetProcAddress really handle WGL/GLX functions???
print_foo = """
/* As these are Windows specific (i.e. wgl), define these now.... */
#ifdef WINDOWS
{
wglGetExtensionsStringEXTFunc_t wglGetExtensionsStringEXT = NULL;
wglChoosePixelFormatFunc_t wglChoosePixelFormatEXT = NULL;
wglGetPixelFormatAttribivEXTFunc_t wglGetPixelFormatAttribivEXT = NULL;
wglGetPixelFormatAttribfvEXTFunc_t wglGetPixelFormatAttribfvEXT = NULL;
if (!crStrcmp( name, "wglGetExtensionsStringEXT" )) return (CR_PROC) wglGetExtensionsStringEXT;
if (!crStrcmp( name, "wglChoosePixelFormatEXT" )) return (CR_PROC) wglChoosePixelFormatEXT;
if (!crStrcmp( name, "wglGetPixelFormatAttribivEXT" )) return (CR_PROC) wglGetPixelFormatAttribivEXT;
if (!crStrcmp( name, "wglGetPixelFormatAttribfvEXT" )) return (CR_PROC) wglGetPixelFormatAttribfvEXT;
}
#endif
"""
| gpl-2.0 |
taaviteska/django | django/core/checks/model_checks.py | 72 | 6183 | import inspect
import types
from itertools import chain
from django.apps import apps
from django.core.checks import Error, Tags, register
@register(Tags.models)
def check_all_models(app_configs=None, **kwargs):
errors = []
if app_configs is None:
models = apps.get_models()
else:
models = chain.from_iterable(app_config.get_models() for app_config in app_configs)
for model in models:
if not inspect.ismethod(model.check):
errors.append(
Error(
"The '%s.check()' class method is currently overridden by %r."
% (model.__name__, model.check),
obj=model,
id='models.E020'
)
)
else:
errors.extend(model.check(**kwargs))
return errors
def _check_lazy_references(apps, ignore=None):
"""
Ensure all lazy (i.e. string) model references have been resolved.
Lazy references are used in various places throughout Django, primarily in
related fields and model signals. Identify those common cases and provide
more helpful error messages for them.
The ignore parameter is used by StateApps to exclude swappable models from
this check.
"""
pending_models = set(apps._pending_operations) - (ignore or set())
# Short circuit if there aren't any errors.
if not pending_models:
return []
from django.db.models import signals
model_signals = {
signal: name for name, signal in vars(signals).items()
if isinstance(signal, signals.ModelSignal)
}
def extract_operation(obj):
"""
Take a callable found in Apps._pending_operations and identify the
original callable passed to Apps.lazy_model_operation(). If that
callable was a partial, return the inner, non-partial function and
any arguments and keyword arguments that were supplied with it.
obj is a callback defined locally in Apps.lazy_model_operation() and
annotated there with a `func` attribute so as to imitate a partial.
"""
operation, args, keywords = obj, [], {}
while hasattr(operation, 'func'):
# The or clauses are redundant but work around a bug (#25945) in
# functools.partial in Python <= 3.5.1.
args.extend(getattr(operation, 'args', []) or [])
keywords.update(getattr(operation, 'keywords', {}) or {})
operation = operation.func
return operation, args, keywords
def app_model_error(model_key):
try:
apps.get_app_config(model_key[0])
model_error = "app '%s' doesn't provide model '%s'" % model_key
except LookupError:
model_error = "app '%s' isn't installed" % model_key[0]
return model_error
# Here are several functions which return CheckMessage instances for the
# most common usages of lazy operations throughout Django. These functions
# take the model that was being waited on as an (app_label, modelname)
# pair, the original lazy function, and its positional and keyword args as
# determined by extract_operation().
def field_error(model_key, func, args, keywords):
error_msg = (
"The field %(field)s was declared with a lazy reference "
"to '%(model)s', but %(model_error)s."
)
params = {
'model': '.'.join(model_key),
'field': keywords['field'],
'model_error': app_model_error(model_key),
}
return Error(error_msg % params, obj=keywords['field'], id='fields.E307')
def signal_connect_error(model_key, func, args, keywords):
error_msg = (
"%(receiver)s was connected to the '%(signal)s' signal with a "
"lazy reference to the sender '%(model)s', but %(model_error)s."
)
receiver = args[0]
# The receiver is either a function or an instance of class
# defining a `__call__` method.
if isinstance(receiver, types.FunctionType):
description = "The function '%s'" % receiver.__name__
elif isinstance(receiver, types.MethodType):
description = "Bound method '%s.%s'" % (receiver.__self__.__class__.__name__, receiver.__name__)
else:
description = "An instance of class '%s'" % receiver.__class__.__name__
signal_name = model_signals.get(func.__self__, 'unknown')
params = {
'model': '.'.join(model_key),
'receiver': description,
'signal': signal_name,
'model_error': app_model_error(model_key),
}
return Error(error_msg % params, obj=receiver.__module__, id='signals.E001')
def default_error(model_key, func, args, keywords):
error_msg = "%(op)s contains a lazy reference to %(model)s, but %(model_error)s."
params = {
'op': func,
'model': '.'.join(model_key),
'model_error': app_model_error(model_key),
}
return Error(error_msg % params, obj=func, id='models.E022')
# Maps common uses of lazy operations to corresponding error functions
# defined above. If a key maps to None, no error will be produced.
# default_error() will be used for usages that don't appear in this dict.
known_lazy = {
('django.db.models.fields.related', 'resolve_related_class'): field_error,
('django.db.models.fields.related', 'set_managed'): None,
('django.dispatch.dispatcher', 'connect'): signal_connect_error,
}
def build_error(model_key, func, args, keywords):
key = (func.__module__, func.__name__)
error_fn = known_lazy.get(key, default_error)
return error_fn(model_key, func, args, keywords) if error_fn else None
return sorted(filter(None, (
build_error(model_key, *extract_operation(func))
for model_key in pending_models
for func in apps._pending_operations[model_key]
)), key=lambda error: error.msg)
@register(Tags.models)
def check_lazy_references(app_configs=None, **kwargs):
return _check_lazy_references(apps)
| bsd-3-clause |
izapolsk/integration_tests | cfme/utils/appliance/services.py | 1 | 3411 | import attr
from cfme.utils.appliance.plugin import AppliancePlugin
from cfme.utils.appliance.plugin import AppliancePluginException
from cfme.utils.log import logger_wrap
from cfme.utils.quote import quote
from cfme.utils.wait import wait_for
class SystemdException(AppliancePluginException):
pass
@attr.s
class SystemdService(AppliancePlugin):
unit_name = attr.ib(type=str)
@logger_wrap('SystemdService command runner: {}')
def _run_service_command(
self,
command,
expected_exit_code=None,
unit_name=None,
log_callback=None
):
"""Wrapper around running the command and raising exception on unexpected code
Args:
command: string command for systemd (stop, start, restart, etc)
expected_exit_code: the exit code to expect, otherwise raise
unit_name: optional unit name, defaults to self.unit_name attribute
log_callback: logger to log against
Raises:
SystemdException: When expected_exit_code is not matched
"""
unit = self.unit_name if unit_name is None else unit_name
with self.appliance.ssh_client as ssh:
cmd = 'systemctl {} {}'.format(quote(command), quote(unit))
log_callback('Running {}'.format(cmd))
result = ssh.run_command(cmd,
container=self.appliance.ansible_pod_name)
if expected_exit_code is not None and result.rc != expected_exit_code:
# TODO: Bring back address
msg = 'Failed to {} {}\nError: {}'.format(
command, self.unit_name, result.output)
if log_callback:
log_callback(msg)
else:
self.logger.error(msg)
raise SystemdException(msg)
return result
def stop(self, log_callback=None):
return self._run_service_command(
'stop',
expected_exit_code=0,
log_callback=log_callback
)
def start(self, log_callback=None):
return self._run_service_command(
'start',
expected_exit_code=0,
log_callback=log_callback
)
def restart(self, log_callback=None):
return self._run_service_command(
'restart',
expected_exit_code=0,
log_callback=log_callback
)
def enable(self, log_callback=None):
return self._run_service_command(
'enable',
expected_exit_code=0,
log_callback=log_callback
)
@property
def enabled(self):
return self._run_service_command('is-enabled').rc == 0
@property
def is_active(self):
return self._run_service_command('is-active').rc == 0
@property
def running(self):
return self._run_service_command("status").rc == 0
def wait_for_running(self, timeout=600):
result, wait = wait_for(
lambda: self.running,
num_sec=timeout,
fail_condition=False,
delay=5,
)
return result
def daemon_reload(self, log_callback=None):
"""Call daemon-reload, no unit name for this"""
return self._run_service_command(
command='daemon-reload',
expected_exit_code=0,
unit_name='',
log_callback=log_callback
)
| gpl-2.0 |
vmthunder/nova | nova/debugger.py | 18 | 3021 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(markmc): this is imported before monkey patching in nova.cmd
# so we avoid extra imports here
import sys
def enabled():
return ('--remote_debug-host' in sys.argv and
'--remote_debug-port' in sys.argv)
def register_cli_opts():
from oslo.config import cfg
cli_opts = [
cfg.StrOpt('host',
help='Debug host (IP or name) to connect. Note '
'that using the remote debug option changes how '
'Nova uses the eventlet library to support async IO. '
'This could result in failures that do not occur '
'under normal operation. Use at your own risk.'),
cfg.IntOpt('port',
help='Debug port to connect. Note '
'that using the remote debug option changes how '
'Nova uses the eventlet library to support async IO. '
'This could result in failures that do not occur '
'under normal operation. Use at your own risk.')
]
cfg.CONF.register_cli_opts(cli_opts, 'remote_debug')
def init():
from oslo.config import cfg
CONF = cfg.CONF
# NOTE(markmc): gracefully handle the CLI options not being registered
if 'remote_debug' not in CONF:
return
if not (CONF.remote_debug.host and CONF.remote_debug.port):
return
from nova.i18n import _
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
LOG.debug('Listening on %(host)s:%(port)s for debug connection',
{'host': CONF.remote_debug.host,
'port': CONF.remote_debug.port})
try:
from pydev import pydevd
except ImportError:
import pydevd
pydevd.settrace(host=CONF.remote_debug.host,
port=CONF.remote_debug.port,
stdoutToServer=False,
stderrToServer=False)
LOG.warn(_('WARNING: Using the remote debug option changes how '
'Nova uses the eventlet library to support async IO. This '
'could result in failures that do not occur under normal '
'operation. Use at your own risk.'))
| apache-2.0 |
mukulsoni/android_kernel_samsung_ms013g-G4SWA | tools/perf/python/twatch.py | 7370 | 1334 | #! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <acme@redhat.com>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, watermark = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU | perf.SAMPLE_TID)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main()
| gpl-2.0 |
aoom/pattern | examples/01-web/03-bing.py | 21 | 1714 | import os, sys; sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
from pattern.web import Bing, asynchronous, plaintext
from pattern.web import SEARCH, IMAGE, NEWS
import time
# This example retrieves results from Bing based on a given query.
# The Bing search engine can retrieve up to a 1000 results (10x100) for a query.
# Bing's "Custom Search API" is a paid service.
# The pattern.web module uses a test account by default,
# with 5000 free queries per month shared by all Pattern users.
# If this limit is exceeded, SearchEngineLimitError is raised.
# You should obtain your own license key at:
# https://datamarket.azure.com/account/
engine = Bing(license=None, language="en")
# Quote a query to match it exactly:
q = "\"is more important than\""
# When you execute a query,
# the script will halt until all results are downloaded.
# In apps with an infinite main loop (e.g., GUI, game),
# it is often more useful if the app keeps on running
# while the search is executed in the background.
# This can be achieved with the asynchronous() function.
# It takes any function and that function's arguments and keyword arguments:
request = asynchronous(engine.search, q, start=1, count=100, type=SEARCH, timeout=10)
# This while-loop simulates an infinite application loop.
# In real-life you would have an app.update() or similar
# in which you can check request.done every now and then.
while not request.done:
time.sleep(0.01)
print ".",
print
print
# An error occured in engine.search(), raise it.
if request.error:
raise request.error
# Retrieve the list of search results.
for result in request.value:
print result.text
print result.url
print
| bsd-3-clause |
Changaco/oh-mainline | vendor/packages/Django/tests/modeltests/empty/tests.py | 51 | 1232 | from __future__ import absolute_import
from django.core.exceptions import ImproperlyConfigured
from django.db.models.loading import get_app
from django.test import TestCase
from django.test.utils import override_settings
from django.utils import six
from .models import Empty
class EmptyModelTests(TestCase):
def test_empty(self):
m = Empty()
self.assertEqual(m.id, None)
m.save()
Empty.objects.create()
self.assertEqual(len(Empty.objects.all()), 2)
self.assertTrue(m.id is not None)
existing = Empty(m.id)
existing.save()
class NoModelTests(TestCase):
"""
Test for #7198 to ensure that the proper error message is raised
when attempting to load an app with no models.py file.
Because the test runner won't currently load a test module with no
models.py file, this TestCase instead lives in this module.
It seemed like an appropriate home for it.
"""
@override_settings(INSTALLED_APPS=("modeltests.empty.no_models",))
def test_no_models(self):
with six.assertRaisesRegex(self, ImproperlyConfigured,
'App with label no_models is missing a models.py module.'):
get_app('no_models')
| agpl-3.0 |
SAM-IT-SA/odoo | addons/l10n_be_hr_payroll_account/__openerp__.py | 298 | 1626 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Belgium - Payroll with Accounting',
'category': 'Localization',
'author': 'OpenERP SA',
'depends': ['l10n_be_hr_payroll', 'hr_payroll_account', 'l10n_be'],
'version': '1.0',
'description': """
Accounting Data for Belgian Payroll Rules.
==========================================
""",
'auto_install': True,
'website': 'https://www.odoo.com/page/accounting',
'demo': [],
'data':[
'l10n_be_wizard.yml',
'l10n_be_hr_payroll_account_data.xml',
'data/hr.salary.rule.csv',
],
'installable': True
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
liorvh/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/layout_tests/lint_test_expectations.py | 119 | 4344 | # Copyright (C) 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import optparse
import signal
import traceback
from webkitpy.common.host import Host
from webkitpy.layout_tests.models import test_expectations
from webkitpy.port import platform_options
# This mirrors what the shell normally does.
INTERRUPTED_EXIT_STATUS = signal.SIGINT + 128
# This is a randomly chosen exit code that can be tested against to
# indicate that an unexpected exception occurred.
EXCEPTIONAL_EXIT_STATUS = 254
_log = logging.getLogger(__name__)
def lint(host, options, logging_stream):
logger = logging.getLogger()
logger.setLevel(logging.INFO)
handler = logging.StreamHandler(logging_stream)
logger.addHandler(handler)
try:
ports_to_lint = [host.port_factory.get(name) for name in host.port_factory.all_port_names(options.platform)]
files_linted = set()
lint_failed = False
for port_to_lint in ports_to_lint:
expectations_dict = port_to_lint.expectations_dict()
# FIXME: This won't work if multiple ports share a TestExpectations file but support different modifiers in the file.
for expectations_file in expectations_dict.keys():
if expectations_file in files_linted:
continue
try:
test_expectations.TestExpectations(port_to_lint,
expectations_to_lint={expectations_file: expectations_dict[expectations_file]})
except test_expectations.ParseError as e:
lint_failed = True
_log.error('')
for warning in e.warnings:
_log.error(warning)
_log.error('')
files_linted.add(expectations_file)
if lint_failed:
_log.error('Lint failed.')
return -1
_log.info('Lint succeeded.')
return 0
finally:
logger.removeHandler(handler)
def main(argv, _, stderr):
parser = optparse.OptionParser(option_list=platform_options(use_globs=True))
options, _ = parser.parse_args(argv)
if options.platform and 'test' in options.platform:
# It's a bit lame to import mocks into real code, but this allows the user
# to run tests against the test platform interactively, which is useful for
# debugging test failures.
from webkitpy.common.host_mock import MockHost
host = MockHost()
else:
host = Host()
try:
exit_status = lint(host, options, stderr)
except KeyboardInterrupt:
exit_status = INTERRUPTED_EXIT_STATUS
except Exception as e:
print >> stderr, '\n%s raised: %s' % (e.__class__.__name__, str(e))
traceback.print_exc(file=stderr)
exit_status = EXCEPTIONAL_EXIT_STATUS
return exit_status
| bsd-3-clause |
anryko/ansible | lib/ansible/modules/cloud/centurylink/clc_publicip.py | 47 | 12152 | #!/usr/bin/python
#
# Copyright (c) 2015 CenturyLink
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: clc_publicip
short_description: Add and Delete public ips on servers in CenturyLink Cloud.
description:
- An Ansible module to add or delete public ip addresses on an existing server or servers in CenturyLink Cloud.
version_added: "2.0"
options:
protocol:
description:
- The protocol that the public IP will listen for.
default: TCP
choices: ['TCP', 'UDP', 'ICMP']
ports:
description:
- A list of ports to expose. This is required when state is 'present'
server_ids:
description:
- A list of servers to create public ips on.
required: True
state:
description:
- Determine whether to create or delete public IPs. If present module will not create a second public ip if one
already exists.
default: present
choices: ['present', 'absent']
wait:
description:
- Whether to wait for the tasks to finish before returning.
type: bool
default: 'yes'
requirements:
- python = 2.7
- requests >= 2.5.0
- clc-sdk
author: "CLC Runner (@clc-runner)"
notes:
- To use this module, it is required to set the below environment variables which enables access to the
Centurylink Cloud
- CLC_V2_API_USERNAME, the account login id for the centurylink cloud
- CLC_V2_API_PASSWORD, the account password for the centurylink cloud
- Alternatively, the module accepts the API token and account alias. The API token can be generated using the
CLC account login and password via the HTTP api call @ https://api.ctl.io/v2/authentication/login
- CLC_V2_API_TOKEN, the API token generated from https://api.ctl.io/v2/authentication/login
- CLC_ACCT_ALIAS, the account alias associated with the centurylink cloud
- Users can set CLC_V2_API_URL to specify an endpoint for pointing to a different CLC environment.
'''
EXAMPLES = '''
# Note - You must set the CLC_V2_API_USERNAME And CLC_V2_API_PASSWD Environment variables before running these examples
- name: Add Public IP to Server
hosts: localhost
gather_facts: False
connection: local
tasks:
- name: Create Public IP For Servers
clc_publicip:
protocol: TCP
ports:
- 80
server_ids:
- UC1TEST-SVR01
- UC1TEST-SVR02
state: present
register: clc
- name: debug
debug:
var: clc
- name: Delete Public IP from Server
hosts: localhost
gather_facts: False
connection: local
tasks:
- name: Create Public IP For Servers
clc_publicip:
server_ids:
- UC1TEST-SVR01
- UC1TEST-SVR02
state: absent
register: clc
- name: debug
debug:
var: clc
'''
RETURN = '''
server_ids:
description: The list of server ids that are changed
returned: success
type: list
sample:
[
"UC1TEST-SVR01",
"UC1TEST-SVR02"
]
'''
__version__ = '${version}'
import os
import traceback
from distutils.version import LooseVersion
REQUESTS_IMP_ERR = None
try:
import requests
except ImportError:
REQUESTS_IMP_ERR = traceback.format_exc()
REQUESTS_FOUND = False
else:
REQUESTS_FOUND = True
#
# Requires the clc-python-sdk.
# sudo pip install clc-sdk
#
CLC_IMP_ERR = None
try:
import clc as clc_sdk
from clc import CLCException
except ImportError:
CLC_IMP_ERR = traceback.format_exc()
CLC_FOUND = False
clc_sdk = None
else:
CLC_FOUND = True
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
class ClcPublicIp(object):
clc = clc_sdk
module = None
def __init__(self, module):
"""
Construct module
"""
self.module = module
if not CLC_FOUND:
self.module.fail_json(msg=missing_required_lib('clc-sdk'), exception=CLC_IMP_ERR)
if not REQUESTS_FOUND:
self.module.fail_json(msg=missing_required_lib('requests'), exception=REQUESTS_IMP_ERR)
if requests.__version__ and LooseVersion(requests.__version__) < LooseVersion('2.5.0'):
self.module.fail_json(
msg='requests library version should be >= 2.5.0')
self._set_user_agent(self.clc)
def process_request(self):
"""
Process the request - Main Code Path
:return: Returns with either an exit_json or fail_json
"""
self._set_clc_credentials_from_env()
params = self.module.params
server_ids = params['server_ids']
ports = params['ports']
protocol = params['protocol']
state = params['state']
if state == 'present':
changed, changed_server_ids, requests = self.ensure_public_ip_present(
server_ids=server_ids, protocol=protocol, ports=ports)
elif state == 'absent':
changed, changed_server_ids, requests = self.ensure_public_ip_absent(
server_ids=server_ids)
else:
return self.module.fail_json(msg="Unknown State: " + state)
self._wait_for_requests_to_complete(requests)
return self.module.exit_json(changed=changed,
server_ids=changed_server_ids)
@staticmethod
def _define_module_argument_spec():
"""
Define the argument spec for the ansible module
:return: argument spec dictionary
"""
argument_spec = dict(
server_ids=dict(type='list', required=True),
protocol=dict(default='TCP', choices=['TCP', 'UDP', 'ICMP']),
ports=dict(type='list'),
wait=dict(type='bool', default=True),
state=dict(default='present', choices=['present', 'absent']),
)
return argument_spec
def ensure_public_ip_present(self, server_ids, protocol, ports):
"""
Ensures the given server ids having the public ip available
:param server_ids: the list of server ids
:param protocol: the ip protocol
:param ports: the list of ports to expose
:return: (changed, changed_server_ids, results)
changed: A flag indicating if there is any change
changed_server_ids : the list of server ids that are changed
results: The result list from clc public ip call
"""
changed = False
results = []
changed_server_ids = []
servers = self._get_servers_from_clc(
server_ids,
'Failed to obtain server list from the CLC API')
servers_to_change = [
server for server in servers if len(
server.PublicIPs().public_ips) == 0]
ports_to_expose = [{'protocol': protocol, 'port': port}
for port in ports]
for server in servers_to_change:
if not self.module.check_mode:
result = self._add_publicip_to_server(server, ports_to_expose)
results.append(result)
changed_server_ids.append(server.id)
changed = True
return changed, changed_server_ids, results
def _add_publicip_to_server(self, server, ports_to_expose):
result = None
try:
result = server.PublicIPs().Add(ports_to_expose)
except CLCException as ex:
self.module.fail_json(msg='Failed to add public ip to the server : {0}. {1}'.format(
server.id, ex.response_text
))
return result
def ensure_public_ip_absent(self, server_ids):
"""
Ensures the given server ids having the public ip removed if there is any
:param server_ids: the list of server ids
:return: (changed, changed_server_ids, results)
changed: A flag indicating if there is any change
changed_server_ids : the list of server ids that are changed
results: The result list from clc public ip call
"""
changed = False
results = []
changed_server_ids = []
servers = self._get_servers_from_clc(
server_ids,
'Failed to obtain server list from the CLC API')
servers_to_change = [
server for server in servers if len(
server.PublicIPs().public_ips) > 0]
for server in servers_to_change:
if not self.module.check_mode:
result = self._remove_publicip_from_server(server)
results.append(result)
changed_server_ids.append(server.id)
changed = True
return changed, changed_server_ids, results
def _remove_publicip_from_server(self, server):
result = None
try:
for ip_address in server.PublicIPs().public_ips:
result = ip_address.Delete()
except CLCException as ex:
self.module.fail_json(msg='Failed to remove public ip from the server : {0}. {1}'.format(
server.id, ex.response_text
))
return result
def _wait_for_requests_to_complete(self, requests_lst):
"""
Waits until the CLC requests are complete if the wait argument is True
:param requests_lst: The list of CLC request objects
:return: none
"""
if not self.module.params['wait']:
return
for request in requests_lst:
request.WaitUntilComplete()
for request_details in request.requests:
if request_details.Status() != 'succeeded':
self.module.fail_json(
msg='Unable to process public ip request')
def _set_clc_credentials_from_env(self):
"""
Set the CLC Credentials on the sdk by reading environment variables
:return: none
"""
env = os.environ
v2_api_token = env.get('CLC_V2_API_TOKEN', False)
v2_api_username = env.get('CLC_V2_API_USERNAME', False)
v2_api_passwd = env.get('CLC_V2_API_PASSWD', False)
clc_alias = env.get('CLC_ACCT_ALIAS', False)
api_url = env.get('CLC_V2_API_URL', False)
if api_url:
self.clc.defaults.ENDPOINT_URL_V2 = api_url
if v2_api_token and clc_alias:
self.clc._LOGIN_TOKEN_V2 = v2_api_token
self.clc._V2_ENABLED = True
self.clc.ALIAS = clc_alias
elif v2_api_username and v2_api_passwd:
self.clc.v2.SetCredentials(
api_username=v2_api_username,
api_passwd=v2_api_passwd)
else:
return self.module.fail_json(
msg="You must set the CLC_V2_API_USERNAME and CLC_V2_API_PASSWD "
"environment variables")
def _get_servers_from_clc(self, server_ids, message):
"""
Gets list of servers form CLC api
"""
try:
return self.clc.v2.Servers(server_ids).servers
except CLCException as exception:
self.module.fail_json(msg=message + ': %s' % exception)
@staticmethod
def _set_user_agent(clc):
if hasattr(clc, 'SetRequestsSession'):
agent_string = "ClcAnsibleModule/" + __version__
ses = requests.Session()
ses.headers.update({"Api-Client": agent_string})
ses.headers['User-Agent'] += " " + agent_string
clc.SetRequestsSession(ses)
def main():
"""
The main function. Instantiates the module and calls process_request.
:return: none
"""
module = AnsibleModule(
argument_spec=ClcPublicIp._define_module_argument_spec(),
supports_check_mode=True
)
clc_public_ip = ClcPublicIp(module)
clc_public_ip.process_request()
if __name__ == '__main__':
main()
| gpl-3.0 |
ttrifonov/horizon | horizon/horizon/dashboards/syspanel/users/tables.py | 1 | 4566 | import logging
from django import shortcuts
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from horizon import api
from horizon import tables
LOG = logging.getLogger(__name__)
class CreateUserLink(tables.LinkAction):
name = "create"
verbose_name = _("Create User")
url = "horizon:syspanel:users:create"
attrs = {
"class": "ajax-modal btn small",
}
class EditUserLink(tables.LinkAction):
name = "edit"
verbose_name = _("Edit")
url = "horizon:syspanel:users:update"
attrs = {
"class": "ajax-modal",
}
class EnableUsersAction(tables.Action):
name = "enable"
verbose_name = _("Enable")
verbose_name_plural = _("Enable Users")
def allowed(self, request, user):
return not user.enabled
def handle(self, data_table, request, object_ids):
failures = 0
enabled = []
for obj_id in object_ids:
try:
api.keystone.user_update_enabled(request, obj_id, True)
enabled.append(obj_id)
except Exception, e:
failures += 1
messages.error(request, _("Error enabling user: %s") % e)
LOG.exception("Error enabling user.")
if failures:
messages.info(request, _("Enabled the following users: %s")
% ", ".join(enabled))
else:
messages.success(request, _("Successfully enabled users: %s")
% ", ".join(enabled))
return shortcuts.redirect('horizon:syspanel:users:index')
class DisableUsersAction(tables.Action):
name = "disable"
verbose_name = _("Disable")
verbose_name_plural = _("Disable Users")
def allowed(self, request, user):
return user.enabled
def handle(self, data_table, request, object_ids):
failures = 0
disabled = []
for obj_id in object_ids:
if obj_id == request.user.id:
messages.info(request, _('You cannot disable the user you are '
'currently logged in as.'))
continue
try:
api.keystone.user_update_enabled(request, obj_id, False)
disabled.append(obj_id)
except Exception, e:
failures += 1
messages.error(request, _("Error disabling user: %s") % e)
LOG.exception("Error disabling user.")
if failures:
messages.info(request, _("Disabled the following users: %s")
% ", ".join(disabled))
else:
if disabled:
messages.success(request, _("Successfully disabled users: %s")
% ", ".join(disabled))
return shortcuts.redirect('horizon:syspanel:users:index')
class DeleteUsersAction(tables.DeleteAction):
data_type_singular = _("User")
data_type_plural = _("Users")
def allowed(self, request, datum):
if datum and datum.id == request.user.id:
return False
return True
def delete(self, request, obj_id):
api.keystone.user_delete(request, obj_id)
class UserFilterAction(tables.FilterAction):
def filter(self, table, users, filter_string):
""" Really naive case-insensitive search. """
# FIXME(gabriel): This should be smarter. Written for demo purposes.
q = filter_string.lower()
def comp(user):
if q in user.name.lower() or q in user.email.lower():
return True
return False
return filter(comp, users)
class UsersTable(tables.DataTable):
STATUS_CHOICES = (
("true", True),
("false", False)
)
id = tables.Column(_('id'))
name = tables.Column(_('name'))
email = tables.Column(_('email'))
# Default tenant is not returned from Keystone currently.
#default_tenant = tables.Column(_('default_tenant'),
# verbose_name="Default Project")
enabled = tables.Column(_('enabled'),
status=True,
status_choices=STATUS_CHOICES)
class Meta:
name = "users"
verbose_name = _("Users")
row_actions = (EditUserLink, EnableUsersAction, DisableUsersAction,
DeleteUsersAction)
table_actions = (UserFilterAction, CreateUserLink, DeleteUsersAction)
| apache-2.0 |
OliverWalter/amdtk | amdtk/models/mixture.py | 1 | 3497 |
"""Mixture of distributions/densities."""
import numpy as np
from scipy.misc import logsumexp
from .discrete_latent_model import DiscreteLatentModel
from .dirichlet import Dirichlet
class MixtureStats(object):
"""Sufficient statistics for :class:BayesianMixture`.
Methods
-------
__getitem__(key)
Index operator.
__add__(stats)
Addition operator.
__iadd__(stats)
In-place addition operator.
"""
def __init__(self, P_Z):
self.__stats = P_Z.sum(axis=0)
def __getitem__(self, key):
if type(key) is not int:
raise TypeError()
if key < 0 or key > 2:
raise IndexError
return self.__stats
def __add__(self, other):
new_stats = MixtureStats(len(self.__stats))
new_stats += self
new_stats += other
return new_stats
def __iadd__(self, other):
self.__stats += other.__stats
return self
class BayesianMixture(DiscreteLatentModel):
"""Bayesian mixture of probability distributions (or densities).
The prior is a Dirichlet density.
Attributes
----------
prior : :class:`Dirichlet`
Prior density.
posterior : :class:`Dirichlet`
Posterior density.
Methods
-------
expLogLikelihood(X)
Expected value of the log-likelihood of the data given the
model.
KLPosteriorPrior()
KL divergence between the posterior and the prior densities.
updatePosterior(mixture_stats, pdf_stats)
Update the parameters of the posterior distribution according to
the accumulated statistics.
"""
def __init__(self, alphas, components):
super().__init__(components)
self.prior = Dirichlet(alphas)
self.posterior = Dirichlet(alphas.copy())
def expLogLikelihood(self, X, weight=1.0):
"""Expected value of the log-likelihood of the data given the
model.
Parameters
----------
X : numpy.ndarray
Data matrix of N frames with D dimensions.
weight : float
Scaling weight for the log-likelihood
Returns
-------
E_llh : numpy.ndarray
The expected value of the log-likelihood for each frame.
E_log_P_Z: numpy.ndarray
Probability distribution of the latent states given the
data.
"""
E_log_weights = self.posterior.expLogPi()
E_log_p_X = np.zeros((X.shape[0], self.k))
for i, pdf in enumerate(self.components):
E_log_p_X[:, i] += E_log_weights[i]
E_log_p_X[:, i] += pdf.expLogLikelihood(X)
E_log_p_X[:, i] *= weight
log_norm = logsumexp(E_log_p_X, axis=1)
E_log_P_Z = (E_log_p_X.T - log_norm).T
return log_norm, E_log_P_Z
def KLPosteriorPrior(self):
"""KL divergence between the posterior and the prior densities.
Returns
-------
KL : float
KL divergence.
"""
KL = 0
for component in self.components:
KL += component.KLPosteriorPrior()
return KL + self.posterior.KL(self.prior)
def updatePosterior(self, mixture_stats):
"""Update the parameters of the posterior distribution.
Parameters
----------
mixture_stats : :class:MixtureStats
Statistics of the mixture weights.
"""
self.posterior = self.prior.newPosterior(mixture_stats)
| bsd-2-clause |
robinro/ansible | lib/ansible/module_utils/six/__init__.py | 59 | 4353 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c) 2017, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This code is based on code from Astropy and retains their 3-clause BSD license
# reproduced below:
#
# Copyright (c) 2011-2016, Astropy Developers
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the Astropy Team nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Astropy License: https://github.com/astropy/astropy/blob/cf3265e42a0db8e00bb90644db37c8150f5ac00c/licenses/LICENSE.rst
# Astropy Code: https://github.com/astropy/astropy/blob/cf3265e42a0db8e00bb90644db37c8150f5ac00c/astropy/extern/six.py
"""
Handle loading six package from system or from the bundled copy
"""
from __future__ import absolute_import
import imp as _imp
import sys as _sys
try:
from distutils.version import LooseVersion as _LooseVersion
except ImportError:
# Some platforms *cough*Solaris*cough* don't ship the whole stdlib
_LooseVersion = None
try:
import six as _system_six
except ImportError:
_system_six = None
from . import _six as _bundled_six
def _find_module(name, path=None):
"""Alternative to `imp.find_module` that can also search in subpackages"""
parts = name.split('.')
for part in parts:
if path is not None:
path = [path]
fh, path, descr = _imp.find_module(part, path)
return fh, path, descr
def _get_bundled_six_source():
# Special import loader (zipimport for instance)
found = False
for path in _sys.path:
importer = _sys.path_importer_cache.get(path)
if importer:
try:
found = importer.find_module('ansible/module_utils/six/_six')
except ImportError:
continue
if found:
break
else:
raise ImportError("Could not find ansible.module_utils.six._six")
module_source = importer.get_source('ansible/module_utils/six/_six')
return module_source
def _get_six_source():
"""Import the newest version of the six library that's available"""
mod_info = None
try:
if _system_six and _LooseVersion and \
_LooseVersion(_system_six.__version__) >= _LooseVersion(_bundled_six.__version__):
mod_info = _find_module('six')
except:
# Any errors finding the system library, use our bundled lib instead
pass
if not mod_info:
try:
mod_info = _find_module('ansible.module_utils.six._six')
except ImportError:
# zipimport
module_source = _get_bundled_six_source()
return module_source
return mod_info[0].read()
source = _get_six_source()
exec(source)
| gpl-3.0 |
javivi001/OctoPrint | src/octoprint/plugins/softwareupdate/version_checks/github_commit.py | 32 | 1842 | # coding=utf-8
from __future__ import absolute_import
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import requests
import logging
from ..exceptions import ConfigurationInvalid
BRANCH_HEAD_URL = "https://api.github.com/repos/{user}/{repo}/git/refs/heads/{branch}"
logger = logging.getLogger("octoprint.plugins.softwareupdate.version_checks.github_commit")
def _get_latest_commit(user, repo, branch):
r = requests.get(BRANCH_HEAD_URL.format(user=user, repo=repo, branch=branch))
from . import log_github_ratelimit
log_github_ratelimit(logger, r)
if not r.status_code == requests.codes.ok:
return None
reference = r.json()
if not "object" in reference or not "sha" in reference["object"]:
return None
return reference["object"]["sha"]
def get_latest(target, check):
if "user" not in check or "repo" not in check:
raise ConfigurationInvalid("Update configuration for %s of type github_commit needs all of user and repo" % target)
branch = "master"
if "branch" in check:
branch = check["branch"]
current = None
if "current" in check:
current = check["current"]
remote_commit = _get_latest_commit(check["user"], check["repo"], branch)
information = dict(
local=dict(name="Commit {commit}".format(commit=current if current is not None else "unknown"), value=current),
remote=dict(name="Commit {commit}".format(commit=remote_commit if remote_commit is not None else "unknown"), value=remote_commit)
)
is_current = (current is not None and current == remote_commit) or remote_commit is None
logger.debug("Target: %s, local: %s, remote: %s" % (target, current, remote_commit))
return information, is_current
| agpl-3.0 |
YongseopKim/crosswalk-test-suite | webapi/tct-download-tizen-tests/inst.xpk.py | 357 | 6759 | #!/usr/bin/env python
import os
import shutil
import glob
import time
import sys
import subprocess
import string
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PKG_NAME = os.path.basename(SCRIPT_DIR)
PARAMETERS = None
#XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/5000/dbus/user_bus_socket"
SRC_DIR = ""
PKG_SRC_DIR = ""
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code != None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def updateCMD(cmd=None):
if "pkgcmd" in cmd:
cmd = "su - %s -c '%s;%s'" % (PARAMETERS.user, XW_ENV, cmd)
return cmd
def getUSERID():
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell id -u %s" % (
PARAMETERS.device, PARAMETERS.user)
else:
cmd = "ssh %s \"id -u %s\"" % (
PARAMETERS.device, PARAMETERS.user )
return doCMD(cmd)
def getPKGID(pkg_name=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
else:
cmd = "ssh %s \"%s\"" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
(return_code, output) = doCMD(cmd)
if return_code != 0:
return None
test_pkg_id = None
for line in output:
if line.find("[" + pkg_name + "]") != -1:
pkgidIndex = line.split().index("pkgid")
test_pkg_id = line.split()[pkgidIndex+1].strip("[]")
break
return test_pkg_id
def doRemoteCMD(cmd=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (PARAMETERS.device, updateCMD(cmd))
else:
cmd = "ssh %s \"%s\"" % (PARAMETERS.device, updateCMD(cmd))
return doCMD(cmd)
def doRemoteCopy(src=None, dest=None):
if PARAMETERS.mode == "SDB":
cmd_prefix = "sdb -s %s push" % PARAMETERS.device
cmd = "%s %s %s" % (cmd_prefix, src, dest)
else:
cmd = "scp -r %s %s:/%s" % (src, PARAMETERS.device, dest)
(return_code, output) = doCMD(cmd)
doRemoteCMD("sync")
if return_code != 0:
return True
else:
return False
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
for file in files:
if file.endswith(".xpk"):
pkg_id = getPKGID(os.path.basename(os.path.splitext(file)[0]))
if not pkg_id:
action_status = False
continue
(return_code, output) = doRemoteCMD(
"pkgcmd -u -t xpk -q -n %s" % pkg_id)
for line in output:
if "Failure" in line:
action_status = False
break
(return_code, output) = doRemoteCMD(
"rm -rf %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
return action_status
def instPKGs():
action_status = True
(return_code, output) = doRemoteCMD(
"mkdir -p %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
for file in files:
if file.endswith(".xpk"):
if not doRemoteCopy(os.path.join(root, file), "%s/%s" % (SRC_DIR, file)):
action_status = False
(return_code, output) = doRemoteCMD(
"pkgcmd -i -t xpk -q -p %s/%s" % (SRC_DIR, file))
doRemoteCMD("rm -rf %s/%s" % (SRC_DIR, file))
for line in output:
if "Failure" in line:
action_status = False
break
# Do some special copy/delete... steps
'''
(return_code, output) = doRemoteCMD(
"mkdir -p %s/tests" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
if not doRemoteCopy("specname/tests", "%s/tests" % PKG_SRC_DIR):
action_status = False
'''
return action_status
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-m", dest="mode", action="store", help="Specify mode")
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
opts_parser.add_option(
"-a", dest="user", action="store", help="User name")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
except Exception, e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.user:
PARAMETERS.user = "app"
global SRC_DIR, PKG_SRC_DIR
SRC_DIR = "/home/%s/content" % PARAMETERS.user
PKG_SRC_DIR = "%s/tct/opt/%s" % (SRC_DIR, PKG_NAME)
if not PARAMETERS.mode:
PARAMETERS.mode = "SDB"
if PARAMETERS.mode == "SDB":
if not PARAMETERS.device:
(return_code, output) = doCMD("sdb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
else:
PARAMETERS.mode = "SSH"
if not PARAMETERS.device:
print "No device provided"
sys.exit(1)
user_info = getUSERID()
re_code = user_info[0]
if re_code == 0 :
global XW_ENV
userid = user_info[1][0]
XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/%s/dbus/user_bus_socket"%str(userid)
else:
print "[Error] cmd commands error : %s"%str(user_info[1])
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)
| bsd-3-clause |
wwj718/ANALYSE | cms/djangoapps/contentstore/features/course-team.py | 46 | 4242 | # pylint: disable=C0111
# pylint: disable=W0621
from lettuce import world, step
from nose.tools import assert_in # pylint: disable=E0611
@step(u'(I am viewing|s?he views) the course team settings$')
def view_grading_settings(_step, whom):
world.click_course_settings()
link_css = 'li.nav-course-settings-team a'
world.css_click(link_css)
@step(u'I add "([^"]*)" to the course team$')
def add_other_user(_step, name):
new_user_css = 'a.create-user-button'
world.css_click(new_user_css)
# Wait for the css animation to apply the is-shown class
shown_css = 'div.wrapper-create-user.is-shown'
world.wait_for_present(shown_css)
email_css = 'input#user-email-input'
world.css_fill(email_css, name + '@edx.org')
if world.is_firefox():
world.trigger_event(email_css)
confirm_css = 'form.create-user button.action-primary'
world.css_click(confirm_css)
@step(u'I delete "([^"]*)" from the course team$')
def delete_other_user(_step, name):
to_delete_css = '.user-item .item-actions a.remove-user[data-id="{email}"]'.format(
email="{0}{1}".format(name, '@edx.org'))
world.css_click(to_delete_css)
world.confirm_studio_prompt()
@step(u's?he deletes me from the course team$')
def other_delete_self(_step):
to_delete_css = '.user-item .item-actions a.remove-user[data-id="{email}"]'.format(
email="robot+studio@edx.org")
world.css_click(to_delete_css)
world.confirm_studio_prompt()
@step(u'I make "([^"]*)" a course team admin$')
def make_course_team_admin(_step, name):
admin_btn_css = '.user-item[data-email="{name}@edx.org"] .user-actions .add-admin-role'.format(
name=name)
world.css_click(admin_btn_css)
@step(u'I remove admin rights from ("([^"]*)"|myself)$')
def remove_course_team_admin(_step, outer_capture, name):
if outer_capture == "myself":
email = world.scenario_dict["USER"].email
else:
email = name + '@edx.org'
admin_btn_css = '.user-item[data-email="{email}"] .user-actions .remove-admin-role'.format(
email=email)
world.css_click(admin_btn_css)
@step(u'I( do not)? see the course on my page$')
@step(u's?he does( not)? see the course on (his|her) page$')
def see_course(_step, do_not_see, gender='self'):
class_css = 'h3.course-title'
if do_not_see:
assert world.is_css_not_present(class_css)
else:
all_courses = world.css_find(class_css)
all_names = [item.html for item in all_courses]
assert_in(world.scenario_dict['COURSE'].display_name, all_names)
@step(u'"([^"]*)" should( not)? be marked as an admin$')
def marked_as_admin(_step, name, not_marked_admin):
flag_css = '.user-item[data-email="{name}@edx.org"] .flag-role.flag-role-admin'.format(
name=name)
if not_marked_admin:
assert world.is_css_not_present(flag_css)
else:
assert world.is_css_present(flag_css)
@step(u'I should( not)? be marked as an admin$')
def self_marked_as_admin(_step, not_marked_admin):
return marked_as_admin(_step, "robot+studio", not_marked_admin)
@step(u'I can(not)? delete users$')
@step(u's?he can(not)? delete users$')
def can_delete_users(_step, can_not_delete):
to_delete_css = 'a.remove-user'
if can_not_delete:
assert world.is_css_not_present(to_delete_css)
else:
assert world.is_css_present(to_delete_css)
@step(u'I can(not)? add users$')
@step(u's?he can(not)? add users$')
def can_add_users(_step, can_not_add):
add_css = 'a.create-user-button'
if can_not_add:
assert world.is_css_not_present(add_css)
else:
assert world.is_css_present(add_css)
@step(u'I can(not)? make ("([^"]*)"|myself) a course team admin$')
@step(u's?he can(not)? make ("([^"]*)"|me) a course team admin$')
def can_make_course_admin(_step, can_not_make_admin, outer_capture, name):
if outer_capture == "myself":
email = world.scenario_dict["USER"].email
else:
email = name + '@edx.org'
add_button_css = '.user-item[data-email="{email}"] .add-admin-role'.format(email=email)
if can_not_make_admin:
assert world.is_css_not_present(add_button_css)
else:
assert world.is_css_present(add_button_css)
| agpl-3.0 |
clobrano/personfinder | app/pytz/zoneinfo/Asia/Amman.py | 7 | 5275 | '''tzinfo timezone information for Asia/Amman.'''
from pytz.tzinfo import DstTzInfo
from pytz.tzinfo import memorized_datetime as d
from pytz.tzinfo import memorized_ttinfo as i
class Amman(DstTzInfo):
'''Asia/Amman timezone definition. See datetime.tzinfo for details'''
zone = 'Asia/Amman'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1930,12,31,21,36,16),
d(1973,6,5,22,0,0),
d(1973,9,30,21,0,0),
d(1974,4,30,22,0,0),
d(1974,9,30,21,0,0),
d(1975,4,30,22,0,0),
d(1975,9,30,21,0,0),
d(1976,4,30,22,0,0),
d(1976,10,31,21,0,0),
d(1977,4,30,22,0,0),
d(1977,9,30,21,0,0),
d(1978,4,29,22,0,0),
d(1978,9,29,21,0,0),
d(1985,3,31,22,0,0),
d(1985,9,30,21,0,0),
d(1986,4,3,22,0,0),
d(1986,10,2,21,0,0),
d(1987,4,2,22,0,0),
d(1987,10,1,21,0,0),
d(1988,3,31,22,0,0),
d(1988,10,6,21,0,0),
d(1989,5,7,22,0,0),
d(1989,10,5,21,0,0),
d(1990,4,26,22,0,0),
d(1990,10,4,21,0,0),
d(1991,4,16,22,0,0),
d(1991,9,26,21,0,0),
d(1992,4,9,22,0,0),
d(1992,10,1,21,0,0),
d(1993,4,1,22,0,0),
d(1993,9,30,21,0,0),
d(1994,3,31,22,0,0),
d(1994,9,15,21,0,0),
d(1995,4,6,22,0,0),
d(1995,9,14,22,0,0),
d(1996,4,4,22,0,0),
d(1996,9,19,22,0,0),
d(1997,4,3,22,0,0),
d(1997,9,18,22,0,0),
d(1998,4,2,22,0,0),
d(1998,9,17,22,0,0),
d(1999,6,30,22,0,0),
d(1999,9,29,22,0,0),
d(2000,3,29,22,0,0),
d(2000,9,27,22,0,0),
d(2001,3,28,22,0,0),
d(2001,9,26,22,0,0),
d(2002,3,27,22,0,0),
d(2002,9,25,22,0,0),
d(2003,3,26,22,0,0),
d(2003,10,23,22,0,0),
d(2004,3,24,22,0,0),
d(2004,10,14,22,0,0),
d(2005,3,30,22,0,0),
d(2005,9,29,22,0,0),
d(2006,3,29,22,0,0),
d(2006,10,26,22,0,0),
d(2007,3,28,22,0,0),
d(2007,10,25,22,0,0),
d(2008,3,26,22,0,0),
d(2008,10,30,22,0,0),
d(2009,3,25,22,0,0),
d(2009,10,29,22,0,0),
d(2010,3,24,22,0,0),
d(2010,10,28,22,0,0),
d(2011,3,30,22,0,0),
d(2011,10,27,22,0,0),
d(2012,3,28,22,0,0),
d(2012,10,25,22,0,0),
d(2013,3,27,22,0,0),
d(2013,10,24,22,0,0),
d(2014,3,26,22,0,0),
d(2014,10,30,22,0,0),
d(2015,3,25,22,0,0),
d(2015,10,29,22,0,0),
d(2016,3,30,22,0,0),
d(2016,10,27,22,0,0),
d(2017,3,29,22,0,0),
d(2017,10,26,22,0,0),
d(2018,3,28,22,0,0),
d(2018,10,25,22,0,0),
d(2019,3,27,22,0,0),
d(2019,10,24,22,0,0),
d(2020,3,25,22,0,0),
d(2020,10,29,22,0,0),
d(2021,3,24,22,0,0),
d(2021,10,28,22,0,0),
d(2022,3,30,22,0,0),
d(2022,10,27,22,0,0),
d(2023,3,29,22,0,0),
d(2023,10,26,22,0,0),
d(2024,3,27,22,0,0),
d(2024,10,24,22,0,0),
d(2025,3,26,22,0,0),
d(2025,10,30,22,0,0),
d(2026,3,25,22,0,0),
d(2026,10,29,22,0,0),
d(2027,3,24,22,0,0),
d(2027,10,28,22,0,0),
d(2028,3,29,22,0,0),
d(2028,10,26,22,0,0),
d(2029,3,28,22,0,0),
d(2029,10,25,22,0,0),
d(2030,3,27,22,0,0),
d(2030,10,24,22,0,0),
d(2031,3,26,22,0,0),
d(2031,10,30,22,0,0),
d(2032,3,24,22,0,0),
d(2032,10,28,22,0,0),
d(2033,3,30,22,0,0),
d(2033,10,27,22,0,0),
d(2034,3,29,22,0,0),
d(2034,10,26,22,0,0),
d(2035,3,28,22,0,0),
d(2035,10,25,22,0,0),
d(2036,3,26,22,0,0),
d(2036,10,30,22,0,0),
d(2037,3,25,22,0,0),
d(2037,10,29,22,0,0),
]
_transition_info = [
i(8640,0,'LMT'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
i(10800,3600,'EEST'),
i(7200,0,'EET'),
]
Amman = Amman()
| apache-2.0 |
CamelBackNotation/CarnotKE | jyhton/lib-python/2.7/plat-mac/lib-scriptpackages/StdSuites/Table_Suite.py | 81 | 2036 | """Suite Table Suite: Classes for manipulating tables
Level 1, version 1
Generated from /Volumes/Sap/System Folder/Extensions/AppleScript
AETE/AEUT resource version 1/0, language 0, script 0
"""
import aetools
import MacOS
_code = 'tbls'
class Table_Suite_Events:
pass
class cell(aetools.ComponentItem):
"""cell - A cell """
want = 'ccel'
class _Prop_formula(aetools.NProperty):
"""formula - the formula of the cell """
which = 'pfor'
want = 'ctxt'
class _Prop_protection(aetools.NProperty):
"""protection - Indicates whether value or formula in the cell can be changed """
which = 'ppro'
want = 'prtn'
cells = cell
class column(aetools.ComponentItem):
"""column - A column """
want = 'ccol'
class _Prop_name(aetools.NProperty):
"""name - the name of the column """
which = 'pnam'
want = 'itxt'
columns = column
class rows(aetools.ComponentItem):
"""rows - """
want = 'crow'
row = rows
class tables(aetools.ComponentItem):
"""tables - """
want = 'ctbl'
table = tables
cell._superclassnames = []
cell._privpropdict = {
'formula' : _Prop_formula,
'protection' : _Prop_protection,
}
cell._privelemdict = {
}
column._superclassnames = []
column._privpropdict = {
'name' : _Prop_name,
}
column._privelemdict = {
}
rows._superclassnames = []
rows._privpropdict = {
}
rows._privelemdict = {
}
tables._superclassnames = []
tables._privpropdict = {
}
tables._privelemdict = {
}
_Enum_prtn = {
'read_only' : 'nmod', # Can\xd5t change values or formulas
'formulas_protected' : 'fpro', # Can changes values but not formulas
'read_2f_write' : 'modf', # Can change values and formulas
}
#
# Indices of types declared in this module
#
_classdeclarations = {
'ccel' : cell,
'ccol' : column,
'crow' : rows,
'ctbl' : tables,
}
_propdeclarations = {
'pfor' : _Prop_formula,
'pnam' : _Prop_name,
'ppro' : _Prop_protection,
}
_compdeclarations = {
}
_enumdeclarations = {
'prtn' : _Enum_prtn,
}
| apache-2.0 |
2014c2g6/c2g6 | wsgi/static/Brython2.1.0-20140419-113919/Lib/_codecs.py | 107 | 3745 |
def ascii_decode(*args,**kw):
pass
def ascii_encode(*args,**kw):
pass
def charbuffer_encode(*args,**kw):
pass
def charmap_build(*args,**kw):
pass
def charmap_decode(*args,**kw):
pass
def charmap_encode(*args,**kw):
pass
def decode(*args,**kw):
"""decode(obj, [encoding[,errors]]) -> object
Decodes obj using the codec registered for encoding. encoding defaults
to the default encoding. errors may be given to set a different error
handling scheme. Default is 'strict' meaning that encoding errors raise
a ValueError. Other possible values are 'ignore' and 'replace'
as well as any other name registered with codecs.register_error that is
able to handle ValueErrors."""
pass
def encode(*args,**kw):
"""encode(obj, [encoding[,errors]]) -> object
Encodes obj using the codec registered for encoding. encoding defaults
to the default encoding. errors may be given to set a different error
handling scheme. Default is 'strict' meaning that encoding errors raise
a ValueError. Other possible values are 'ignore', 'replace' and
'xmlcharrefreplace' as well as any other name registered with
codecs.register_error that can handle ValueErrors."""
pass
def escape_decode(*args,**kw):
pass
def escape_encode(*args,**kw):
pass
def latin_1_decode(*args,**kw):
pass
def latin_1_encode(*args,**kw):
pass
def lookup(encoding):
"""lookup(encoding) -> CodecInfo
Looks up a codec tuple in the Python codec registry and returns
a CodecInfo object."""
print('_codecs lookup',encoding)
return encoding
def lookup_error(*args,**kw):
"""lookup_error(errors) -> handler
Return the error handler for the specified error handling name
or raise a LookupError, if no handler exists under this name."""
pass
def mbcs_decode(*args,**kw):
pass
def mbcs_encode(*args,**kw):
pass
def raw_unicode_escape_decode(*args,**kw):
pass
def raw_unicode_escape_encode(*args,**kw):
pass
def readbuffer_encode(*args,**kw):
pass
def register(*args,**kw):
"""register(search_function)
Register a codec search function. Search functions are expected to take
one argument, the encoding name in all lower case letters, and return
a tuple of functions (encoder, decoder, stream_reader, stream_writer)
(or a CodecInfo object)."""
pass
def register_error(*args,**kw):
"""register_error(errors, handler)
Register the specified error handler under the name
errors. handler must be a callable object, that
will be called with an exception instance containing
information about the location of the encoding/decoding
error and must return a (replacement, new position) tuple."""
pass
def unicode_escape_decode(*args,**kw):
pass
def unicode_escape_encode(*args,**kw):
pass
def unicode_internal_decode(*args,**kw):
pass
def unicode_internal_encode(*args,**kw):
pass
def utf_16_be_decode(*args,**kw):
pass
def utf_16_be_encode(*args,**kw):
pass
def utf_16_decode(*args,**kw):
pass
def utf_16_encode(*args,**kw):
pass
def utf_16_ex_decode(*args,**kw):
pass
def utf_16_le_decode(*args,**kw):
pass
def utf_16_le_encode(*args,**kw):
pass
def utf_32_be_decode(*args,**kw):
pass
def utf_32_be_encode(*args,**kw):
pass
def utf_32_decode(*args,**kw):
pass
def utf_32_encode(*args,**kw):
pass
def utf_32_ex_decode(*args,**kw):
pass
def utf_32_le_decode(*args,**kw):
pass
def utf_32_le_encode(*args,**kw):
pass
def utf_7_decode(*args,**kw):
pass
def utf_7_encode(*args,**kw):
pass
def utf_8_decode(*args,**kw):
pass
def utf_8_encode(*args,**kw):
pass
| gpl-2.0 |
michaelhowden/eden | modules/tests/inv/send_item.py | 25 | 3148 | """ Sahana Eden Module Automated Tests - INV001 Send Items
@copyright: 2011-2012 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from helper import InvTestFunctions
class SendItem(InvTestFunctions):
"""
Inventory Test - Send Workflow (Send items)
@param items: This test sends a specific item to another party.
This test assume that test/inv-mngt has been added to prepop
- e.g. via demo/IFRC_Train
@Case: INV001
@TestDoc: https://docs.google.com/spreadsheet/ccc?key=0AmB3hMcgB-3idG1XNGhhRG9QWF81dUlKLXpJaFlCMFE
@Test Wiki: http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/Testing
"""
# -------------------------------------------------------------------------
def test_inv001_send_items(self):
""" Tests for Send Workflow """
user = "admin"
self.login(account="admin", nexturl="inv/send/create")
send_data = [("site_id",
"Timor-Leste Red Cross Society (CVTL) National Warehouse (Warehouse)",
),
("type",
"Internal Shipment",
),
("to_site_id",
"Lospalos Warehouse (Warehouse)",
),
("sender_id",
"Beatriz de Carvalho",
),
("recipient_id",
"Liliana Otilia",
)
]
item_data = [
[("send_inv_item_id",
"Blankets - Australian Red Cross",
"inv_widget",
),
("quantity",
"3",
),
],
]
result = self.send(user, send_data)
send_id = self.send_get_id(result)
for data in item_data:
result = self.track_send_item(user, send_id, data)
# Send the shipment
self.send_shipment(user, send_id)
| mit |
google-code/android-scripting | python/gdata/src/gdata/photos/__init__.py | 225 | 39076 | # -*-*- encoding: utf-8 -*-*-
#
# This is the base file for the PicasaWeb python client.
# It is used for lower level operations.
#
# $Id: __init__.py 148 2007-10-28 15:09:19Z havard.gulldahl $
#
# Copyright 2007 Håvard Gulldahl
# Portions (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module provides a pythonic, gdata-centric interface to Google Photos
(a.k.a. Picasa Web Services.
It is modelled after the gdata/* interfaces from the gdata-python-client
project[1] by Google.
You'll find the user-friendly api in photos.service. Please see the
documentation or live help() system for available methods.
[1]: http://gdata-python-client.googlecode.com/
"""
__author__ = u'havard@gulldahl.no'# (Håvard Gulldahl)' #BUG: pydoc chokes on non-ascii chars in __author__
__license__ = 'Apache License v2'
__version__ = '$Revision: 164 $'[11:-2]
import re
try:
from xml.etree import cElementTree as ElementTree
except ImportError:
try:
import cElementTree as ElementTree
except ImportError:
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import atom
import gdata
# importing google photo submodules
import gdata.media as Media, gdata.exif as Exif, gdata.geo as Geo
# XML namespaces which are often used in Google Photo elements
PHOTOS_NAMESPACE = 'http://schemas.google.com/photos/2007'
MEDIA_NAMESPACE = 'http://search.yahoo.com/mrss/'
EXIF_NAMESPACE = 'http://schemas.google.com/photos/exif/2007'
OPENSEARCH_NAMESPACE = 'http://a9.com/-/spec/opensearchrss/1.0/'
GEO_NAMESPACE = 'http://www.w3.org/2003/01/geo/wgs84_pos#'
GML_NAMESPACE = 'http://www.opengis.net/gml'
GEORSS_NAMESPACE = 'http://www.georss.org/georss'
PHEED_NAMESPACE = 'http://www.pheed.com/pheed/'
BATCH_NAMESPACE = 'http://schemas.google.com/gdata/batch'
class PhotosBaseElement(atom.AtomBase):
"""Base class for elements in the PHOTO_NAMESPACE. To add new elements,
you only need to add the element tag name to self._tag
"""
_tag = ''
_namespace = PHOTOS_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
def __init__(self, name=None, extension_elements=None,
extension_attributes=None, text=None):
self.name = name
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
#def __str__(self):
#return str(self.text)
#def __unicode__(self):
#return unicode(self.text)
def __int__(self):
return int(self.text)
def bool(self):
return self.text == 'true'
class GPhotosBaseFeed(gdata.GDataFeed, gdata.LinkFinder):
"Base class for all Feeds in gdata.photos"
_tag = 'feed'
_namespace = atom.ATOM_NAMESPACE
_attributes = gdata.GDataFeed._attributes.copy()
_children = gdata.GDataFeed._children.copy()
# We deal with Entry elements ourselves
del _children['{%s}entry' % atom.ATOM_NAMESPACE]
def __init__(self, author=None, category=None, contributor=None,
generator=None, icon=None, atom_id=None, link=None, logo=None,
rights=None, subtitle=None, title=None, updated=None,
entry=None, total_results=None, start_index=None,
items_per_page=None, extension_elements=None,
extension_attributes=None, text=None):
gdata.GDataFeed.__init__(self, author=author, category=category,
contributor=contributor, generator=generator,
icon=icon, atom_id=atom_id, link=link,
logo=logo, rights=rights, subtitle=subtitle,
title=title, updated=updated, entry=entry,
total_results=total_results,
start_index=start_index,
items_per_page=items_per_page,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
def kind(self):
"(string) Returns the kind"
try:
return self.category[0].term.split('#')[1]
except IndexError:
return None
def _feedUri(self, kind):
"Convenience method to return a uri to a feed of a special kind"
assert(kind in ('album', 'tag', 'photo', 'comment', 'user'))
here_href = self.GetSelfLink().href
if 'kind=%s' % kind in here_href:
return here_href
if not 'kind=' in here_href:
sep = '?'
if '?' in here_href: sep = '&'
return here_href + "%skind=%s" % (sep, kind)
rx = re.match('.*(kind=)(album|tag|photo|comment)', here_href)
return here_href[:rx.end(1)] + kind + here_href[rx.end(2):]
def _ConvertElementTreeToMember(self, child_tree):
"""Re-implementing the method from AtomBase, since we deal with
Entry elements specially"""
category = child_tree.find('{%s}category' % atom.ATOM_NAMESPACE)
if category is None:
return atom.AtomBase._ConvertElementTreeToMember(self, child_tree)
namespace, kind = category.get('term').split('#')
if namespace != PHOTOS_NAMESPACE:
return atom.AtomBase._ConvertElementTreeToMember(self, child_tree)
## TODO: is it safe to use getattr on gdata.photos?
entry_class = getattr(gdata.photos, '%sEntry' % kind.title())
if not hasattr(self, 'entry') or self.entry is None:
self.entry = []
self.entry.append(atom._CreateClassFromElementTree(
entry_class, child_tree))
class GPhotosBaseEntry(gdata.GDataEntry, gdata.LinkFinder):
"Base class for all Entry elements in gdata.photos"
_tag = 'entry'
_kind = ''
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None,
title=None, updated=None,
extended_property=None,
extension_elements=None, extension_attributes=None, text=None):
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id, link=link,
published=published, title=title,
updated=updated, text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.category.append(
atom.Category(scheme='http://schemas.google.com/g/2005#kind',
term = 'http://schemas.google.com/photos/2007#%s' % self._kind))
def kind(self):
"(string) Returns the kind"
try:
return self.category[0].term.split('#')[1]
except IndexError:
return None
def _feedUri(self, kind):
"Convenience method to get the uri to this entry's feed of the some kind"
try:
href = self.GetFeedLink().href
except AttributeError:
return None
sep = '?'
if '?' in href: sep = '&'
return '%s%skind=%s' % (href, sep, kind)
class PhotosBaseEntry(GPhotosBaseEntry):
pass
class PhotosBaseFeed(GPhotosBaseFeed):
pass
class GPhotosBaseData(object):
pass
class Access(PhotosBaseElement):
"""The Google Photo `Access' element.
The album's access level. Valid values are `public' or `private'.
In documentation, access level is also referred to as `visibility.'"""
_tag = 'access'
def AccessFromString(xml_string):
return atom.CreateClassFromXMLString(Access, xml_string)
class Albumid(PhotosBaseElement):
"The Google Photo `Albumid' element"
_tag = 'albumid'
def AlbumidFromString(xml_string):
return atom.CreateClassFromXMLString(Albumid, xml_string)
class BytesUsed(PhotosBaseElement):
"The Google Photo `BytesUsed' element"
_tag = 'bytesUsed'
def BytesUsedFromString(xml_string):
return atom.CreateClassFromXMLString(BytesUsed, xml_string)
class Client(PhotosBaseElement):
"The Google Photo `Client' element"
_tag = 'client'
def ClientFromString(xml_string):
return atom.CreateClassFromXMLString(Client, xml_string)
class Checksum(PhotosBaseElement):
"The Google Photo `Checksum' element"
_tag = 'checksum'
def ChecksumFromString(xml_string):
return atom.CreateClassFromXMLString(Checksum, xml_string)
class CommentCount(PhotosBaseElement):
"The Google Photo `CommentCount' element"
_tag = 'commentCount'
def CommentCountFromString(xml_string):
return atom.CreateClassFromXMLString(CommentCount, xml_string)
class CommentingEnabled(PhotosBaseElement):
"The Google Photo `CommentingEnabled' element"
_tag = 'commentingEnabled'
def CommentingEnabledFromString(xml_string):
return atom.CreateClassFromXMLString(CommentingEnabled, xml_string)
class Height(PhotosBaseElement):
"The Google Photo `Height' element"
_tag = 'height'
def HeightFromString(xml_string):
return atom.CreateClassFromXMLString(Height, xml_string)
class Id(PhotosBaseElement):
"The Google Photo `Id' element"
_tag = 'id'
def IdFromString(xml_string):
return atom.CreateClassFromXMLString(Id, xml_string)
class Location(PhotosBaseElement):
"The Google Photo `Location' element"
_tag = 'location'
def LocationFromString(xml_string):
return atom.CreateClassFromXMLString(Location, xml_string)
class MaxPhotosPerAlbum(PhotosBaseElement):
"The Google Photo `MaxPhotosPerAlbum' element"
_tag = 'maxPhotosPerAlbum'
def MaxPhotosPerAlbumFromString(xml_string):
return atom.CreateClassFromXMLString(MaxPhotosPerAlbum, xml_string)
class Name(PhotosBaseElement):
"The Google Photo `Name' element"
_tag = 'name'
def NameFromString(xml_string):
return atom.CreateClassFromXMLString(Name, xml_string)
class Nickname(PhotosBaseElement):
"The Google Photo `Nickname' element"
_tag = 'nickname'
def NicknameFromString(xml_string):
return atom.CreateClassFromXMLString(Nickname, xml_string)
class Numphotos(PhotosBaseElement):
"The Google Photo `Numphotos' element"
_tag = 'numphotos'
def NumphotosFromString(xml_string):
return atom.CreateClassFromXMLString(Numphotos, xml_string)
class Numphotosremaining(PhotosBaseElement):
"The Google Photo `Numphotosremaining' element"
_tag = 'numphotosremaining'
def NumphotosremainingFromString(xml_string):
return atom.CreateClassFromXMLString(Numphotosremaining, xml_string)
class Position(PhotosBaseElement):
"The Google Photo `Position' element"
_tag = 'position'
def PositionFromString(xml_string):
return atom.CreateClassFromXMLString(Position, xml_string)
class Photoid(PhotosBaseElement):
"The Google Photo `Photoid' element"
_tag = 'photoid'
def PhotoidFromString(xml_string):
return atom.CreateClassFromXMLString(Photoid, xml_string)
class Quotacurrent(PhotosBaseElement):
"The Google Photo `Quotacurrent' element"
_tag = 'quotacurrent'
def QuotacurrentFromString(xml_string):
return atom.CreateClassFromXMLString(Quotacurrent, xml_string)
class Quotalimit(PhotosBaseElement):
"The Google Photo `Quotalimit' element"
_tag = 'quotalimit'
def QuotalimitFromString(xml_string):
return atom.CreateClassFromXMLString(Quotalimit, xml_string)
class Rotation(PhotosBaseElement):
"The Google Photo `Rotation' element"
_tag = 'rotation'
def RotationFromString(xml_string):
return atom.CreateClassFromXMLString(Rotation, xml_string)
class Size(PhotosBaseElement):
"The Google Photo `Size' element"
_tag = 'size'
def SizeFromString(xml_string):
return atom.CreateClassFromXMLString(Size, xml_string)
class Snippet(PhotosBaseElement):
"""The Google Photo `snippet' element.
When searching, the snippet element will contain a
string with the word you're looking for, highlighted in html markup
E.g. when your query is `hafjell', this element may contain:
`... here at <b>Hafjell</b>.'
You'll find this element in searches -- that is, feeds that combine the
`kind=photo' and `q=yoursearch' parameters in the request.
See also gphoto:truncated and gphoto:snippettype.
"""
_tag = 'snippet'
def SnippetFromString(xml_string):
return atom.CreateClassFromXMLString(Snippet, xml_string)
class Snippettype(PhotosBaseElement):
"""The Google Photo `Snippettype' element
When searching, this element will tell you the type of element that matches.
You'll find this element in searches -- that is, feeds that combine the
`kind=photo' and `q=yoursearch' parameters in the request.
See also gphoto:snippet and gphoto:truncated.
Possible values and their interpretation:
o ALBUM_TITLE - The album title matches
o PHOTO_TAGS - The match is a tag/keyword
o PHOTO_DESCRIPTION - The match is in the photo's description
If you discover a value not listed here, please submit a patch to update this docstring.
"""
_tag = 'snippettype'
def SnippettypeFromString(xml_string):
return atom.CreateClassFromXMLString(Snippettype, xml_string)
class Thumbnail(PhotosBaseElement):
"""The Google Photo `Thumbnail' element
Used to display user's photo thumbnail (hackergotchi).
(Not to be confused with the <media:thumbnail> element, which gives you
small versions of the photo object.)"""
_tag = 'thumbnail'
def ThumbnailFromString(xml_string):
return atom.CreateClassFromXMLString(Thumbnail, xml_string)
class Timestamp(PhotosBaseElement):
"""The Google Photo `Timestamp' element
Represented as the number of milliseconds since January 1st, 1970.
Take a look at the convenience methods .isoformat() and .datetime():
photo_epoch = Time.text # 1180294337000
photo_isostring = Time.isoformat() # '2007-05-27T19:32:17.000Z'
Alternatively:
photo_datetime = Time.datetime() # (requires python >= 2.3)
"""
_tag = 'timestamp'
def isoformat(self):
"""(string) Return the timestamp as a ISO 8601 formatted string,
e.g. '2007-05-27T19:32:17.000Z'
"""
import time
epoch = float(self.text)/1000
return time.strftime('%Y-%m-%dT%H:%M:%S.000Z', time.gmtime(epoch))
def datetime(self):
"""(datetime.datetime) Return the timestamp as a datetime.datetime object
Requires python 2.3
"""
import datetime
epoch = float(self.text)/1000
return datetime.datetime.fromtimestamp(epoch)
def TimestampFromString(xml_string):
return atom.CreateClassFromXMLString(Timestamp, xml_string)
class Truncated(PhotosBaseElement):
"""The Google Photo `Truncated' element
You'll find this element in searches -- that is, feeds that combine the
`kind=photo' and `q=yoursearch' parameters in the request.
See also gphoto:snippet and gphoto:snippettype.
Possible values and their interpretation:
0 -- unknown
"""
_tag = 'Truncated'
def TruncatedFromString(xml_string):
return atom.CreateClassFromXMLString(Truncated, xml_string)
class User(PhotosBaseElement):
"The Google Photo `User' element"
_tag = 'user'
def UserFromString(xml_string):
return atom.CreateClassFromXMLString(User, xml_string)
class Version(PhotosBaseElement):
"The Google Photo `Version' element"
_tag = 'version'
def VersionFromString(xml_string):
return atom.CreateClassFromXMLString(Version, xml_string)
class Width(PhotosBaseElement):
"The Google Photo `Width' element"
_tag = 'width'
def WidthFromString(xml_string):
return atom.CreateClassFromXMLString(Width, xml_string)
class Weight(PhotosBaseElement):
"""The Google Photo `Weight' element.
The weight of the tag is the number of times the tag
appears in the collection of tags currently being viewed.
The default weight is 1, in which case this tags is omitted."""
_tag = 'weight'
def WeightFromString(xml_string):
return atom.CreateClassFromXMLString(Weight, xml_string)
class CommentAuthor(atom.Author):
"""The Atom `Author' element in CommentEntry entries is augmented to
contain elements from the PHOTOS_NAMESPACE
http://groups.google.com/group/Google-Picasa-Data-API/msg/819b0025b5ff5e38
"""
_children = atom.Author._children.copy()
_children['{%s}user' % PHOTOS_NAMESPACE] = ('user', User)
_children['{%s}nickname' % PHOTOS_NAMESPACE] = ('nickname', Nickname)
_children['{%s}thumbnail' % PHOTOS_NAMESPACE] = ('thumbnail', Thumbnail)
def CommentAuthorFromString(xml_string):
return atom.CreateClassFromXMLString(CommentAuthor, xml_string)
########################## ################################
class AlbumData(object):
_children = {}
_children['{%s}id' % PHOTOS_NAMESPACE] = ('gphoto_id', Id)
_children['{%s}name' % PHOTOS_NAMESPACE] = ('name', Name)
_children['{%s}location' % PHOTOS_NAMESPACE] = ('location', Location)
_children['{%s}access' % PHOTOS_NAMESPACE] = ('access', Access)
_children['{%s}bytesUsed' % PHOTOS_NAMESPACE] = ('bytesUsed', BytesUsed)
_children['{%s}timestamp' % PHOTOS_NAMESPACE] = ('timestamp', Timestamp)
_children['{%s}numphotos' % PHOTOS_NAMESPACE] = ('numphotos', Numphotos)
_children['{%s}numphotosremaining' % PHOTOS_NAMESPACE] = \
('numphotosremaining', Numphotosremaining)
_children['{%s}user' % PHOTOS_NAMESPACE] = ('user', User)
_children['{%s}nickname' % PHOTOS_NAMESPACE] = ('nickname', Nickname)
_children['{%s}commentingEnabled' % PHOTOS_NAMESPACE] = \
('commentingEnabled', CommentingEnabled)
_children['{%s}commentCount' % PHOTOS_NAMESPACE] = \
('commentCount', CommentCount)
## NOTE: storing media:group as self.media, to create a self-explaining api
gphoto_id = None
name = None
location = None
access = None
bytesUsed = None
timestamp = None
numphotos = None
numphotosremaining = None
user = None
nickname = None
commentingEnabled = None
commentCount = None
class AlbumEntry(GPhotosBaseEntry, AlbumData):
"""All metadata for a Google Photos Album
Take a look at AlbumData for metadata accessible as attributes to this object.
Notes:
To avoid name clashes, and to create a more sensible api, some
objects have names that differ from the original elements:
o media:group -> self.media,
o geo:where -> self.geo,
o photo:id -> self.gphoto_id
"""
_kind = 'album'
_children = GPhotosBaseEntry._children.copy()
_children.update(AlbumData._children.copy())
# child tags only for Album entries, not feeds
_children['{%s}where' % GEORSS_NAMESPACE] = ('geo', Geo.Where)
_children['{%s}group' % MEDIA_NAMESPACE] = ('media', Media.Group)
media = Media.Group()
geo = Geo.Where()
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None,
title=None, updated=None,
#GPHOTO NAMESPACE:
gphoto_id=None, name=None, location=None, access=None,
timestamp=None, numphotos=None, user=None, nickname=None,
commentingEnabled=None, commentCount=None, thumbnail=None,
# MEDIA NAMESPACE:
media=None,
# GEORSS NAMESPACE:
geo=None,
extended_property=None,
extension_elements=None, extension_attributes=None, text=None):
GPhotosBaseEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id, link=link,
published=published, title=title,
updated=updated, text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
## NOTE: storing photo:id as self.gphoto_id, to avoid name clash with atom:id
self.gphoto_id = gphoto_id
self.name = name
self.location = location
self.access = access
self.timestamp = timestamp
self.numphotos = numphotos
self.user = user
self.nickname = nickname
self.commentingEnabled = commentingEnabled
self.commentCount = commentCount
self.thumbnail = thumbnail
self.extended_property = extended_property or []
self.text = text
## NOTE: storing media:group as self.media, and geo:where as geo,
## to create a self-explaining api
self.media = media or Media.Group()
self.geo = geo or Geo.Where()
def GetAlbumId(self):
"Return the id of this album"
return self.GetFeedLink().href.split('/')[-1]
def GetPhotosUri(self):
"(string) Return the uri to this albums feed of the PhotoEntry kind"
return self._feedUri('photo')
def GetCommentsUri(self):
"(string) Return the uri to this albums feed of the CommentEntry kind"
return self._feedUri('comment')
def GetTagsUri(self):
"(string) Return the uri to this albums feed of the TagEntry kind"
return self._feedUri('tag')
def AlbumEntryFromString(xml_string):
return atom.CreateClassFromXMLString(AlbumEntry, xml_string)
class AlbumFeed(GPhotosBaseFeed, AlbumData):
"""All metadata for a Google Photos Album, including its sub-elements
This feed represents an album as the container for other objects.
A Album feed contains entries of
PhotoEntry, CommentEntry or TagEntry,
depending on the `kind' parameter in the original query.
Take a look at AlbumData for accessible attributes.
"""
_children = GPhotosBaseFeed._children.copy()
_children.update(AlbumData._children.copy())
def GetPhotosUri(self):
"(string) Return the uri to the same feed, but of the PhotoEntry kind"
return self._feedUri('photo')
def GetTagsUri(self):
"(string) Return the uri to the same feed, but of the TagEntry kind"
return self._feedUri('tag')
def GetCommentsUri(self):
"(string) Return the uri to the same feed, but of the CommentEntry kind"
return self._feedUri('comment')
def AlbumFeedFromString(xml_string):
return atom.CreateClassFromXMLString(AlbumFeed, xml_string)
class PhotoData(object):
_children = {}
## NOTE: storing photo:id as self.gphoto_id, to avoid name clash with atom:id
_children['{%s}id' % PHOTOS_NAMESPACE] = ('gphoto_id', Id)
_children['{%s}albumid' % PHOTOS_NAMESPACE] = ('albumid', Albumid)
_children['{%s}checksum' % PHOTOS_NAMESPACE] = ('checksum', Checksum)
_children['{%s}client' % PHOTOS_NAMESPACE] = ('client', Client)
_children['{%s}height' % PHOTOS_NAMESPACE] = ('height', Height)
_children['{%s}position' % PHOTOS_NAMESPACE] = ('position', Position)
_children['{%s}rotation' % PHOTOS_NAMESPACE] = ('rotation', Rotation)
_children['{%s}size' % PHOTOS_NAMESPACE] = ('size', Size)
_children['{%s}timestamp' % PHOTOS_NAMESPACE] = ('timestamp', Timestamp)
_children['{%s}version' % PHOTOS_NAMESPACE] = ('version', Version)
_children['{%s}width' % PHOTOS_NAMESPACE] = ('width', Width)
_children['{%s}commentingEnabled' % PHOTOS_NAMESPACE] = \
('commentingEnabled', CommentingEnabled)
_children['{%s}commentCount' % PHOTOS_NAMESPACE] = \
('commentCount', CommentCount)
## NOTE: storing media:group as self.media, exif:tags as self.exif, and
## geo:where as self.geo, to create a self-explaining api
_children['{%s}tags' % EXIF_NAMESPACE] = ('exif', Exif.Tags)
_children['{%s}where' % GEORSS_NAMESPACE] = ('geo', Geo.Where)
_children['{%s}group' % MEDIA_NAMESPACE] = ('media', Media.Group)
# These elements show up in search feeds
_children['{%s}snippet' % PHOTOS_NAMESPACE] = ('snippet', Snippet)
_children['{%s}snippettype' % PHOTOS_NAMESPACE] = ('snippettype', Snippettype)
_children['{%s}truncated' % PHOTOS_NAMESPACE] = ('truncated', Truncated)
gphoto_id = None
albumid = None
checksum = None
client = None
height = None
position = None
rotation = None
size = None
timestamp = None
version = None
width = None
commentingEnabled = None
commentCount = None
snippet=None
snippettype=None
truncated=None
media = Media.Group()
geo = Geo.Where()
tags = Exif.Tags()
class PhotoEntry(GPhotosBaseEntry, PhotoData):
"""All metadata for a Google Photos Photo
Take a look at PhotoData for metadata accessible as attributes to this object.
Notes:
To avoid name clashes, and to create a more sensible api, some
objects have names that differ from the original elements:
o media:group -> self.media,
o exif:tags -> self.exif,
o geo:where -> self.geo,
o photo:id -> self.gphoto_id
"""
_kind = 'photo'
_children = GPhotosBaseEntry._children.copy()
_children.update(PhotoData._children.copy())
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None,
title=None, updated=None, text=None,
# GPHOTO NAMESPACE:
gphoto_id=None, albumid=None, checksum=None, client=None, height=None,
position=None, rotation=None, size=None, timestamp=None, version=None,
width=None, commentCount=None, commentingEnabled=None,
# MEDIARSS NAMESPACE:
media=None,
# EXIF_NAMESPACE:
exif=None,
# GEORSS NAMESPACE:
geo=None,
extension_elements=None, extension_attributes=None):
GPhotosBaseEntry.__init__(self, author=author, category=category,
content=content,
atom_id=atom_id, link=link, published=published,
title=title, updated=updated, text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
## NOTE: storing photo:id as self.gphoto_id, to avoid name clash with atom:id
self.gphoto_id = gphoto_id
self.albumid = albumid
self.checksum = checksum
self.client = client
self.height = height
self.position = position
self.rotation = rotation
self.size = size
self.timestamp = timestamp
self.version = version
self.width = width
self.commentingEnabled = commentingEnabled
self.commentCount = commentCount
## NOTE: storing media:group as self.media, to create a self-explaining api
self.media = media or Media.Group()
self.exif = exif or Exif.Tags()
self.geo = geo or Geo.Where()
def GetPostLink(self):
"Return the uri to this photo's `POST' link (use it for updates of the object)"
return self.GetFeedLink()
def GetCommentsUri(self):
"Return the uri to this photo's feed of CommentEntry comments"
return self._feedUri('comment')
def GetTagsUri(self):
"Return the uri to this photo's feed of TagEntry tags"
return self._feedUri('tag')
def GetAlbumUri(self):
"""Return the uri to the AlbumEntry containing this photo"""
href = self.GetSelfLink().href
return href[:href.find('/photoid')]
def PhotoEntryFromString(xml_string):
return atom.CreateClassFromXMLString(PhotoEntry, xml_string)
class PhotoFeed(GPhotosBaseFeed, PhotoData):
"""All metadata for a Google Photos Photo, including its sub-elements
This feed represents a photo as the container for other objects.
A Photo feed contains entries of
CommentEntry or TagEntry,
depending on the `kind' parameter in the original query.
Take a look at PhotoData for metadata accessible as attributes to this object.
"""
_children = GPhotosBaseFeed._children.copy()
_children.update(PhotoData._children.copy())
def GetTagsUri(self):
"(string) Return the uri to the same feed, but of the TagEntry kind"
return self._feedUri('tag')
def GetCommentsUri(self):
"(string) Return the uri to the same feed, but of the CommentEntry kind"
return self._feedUri('comment')
def PhotoFeedFromString(xml_string):
return atom.CreateClassFromXMLString(PhotoFeed, xml_string)
class TagData(GPhotosBaseData):
_children = {}
_children['{%s}weight' % PHOTOS_NAMESPACE] = ('weight', Weight)
weight=None
class TagEntry(GPhotosBaseEntry, TagData):
"""All metadata for a Google Photos Tag
The actual tag is stored in the .title.text attribute
"""
_kind = 'tag'
_children = GPhotosBaseEntry._children.copy()
_children.update(TagData._children.copy())
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None,
title=None, updated=None,
# GPHOTO NAMESPACE:
weight=None,
extended_property=None,
extension_elements=None, extension_attributes=None, text=None):
GPhotosBaseEntry.__init__(self, author=author, category=category,
content=content,
atom_id=atom_id, link=link, published=published,
title=title, updated=updated, text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.weight = weight
def GetAlbumUri(self):
"""Return the uri to the AlbumEntry containing this tag"""
href = self.GetSelfLink().href
pos = href.find('/photoid')
if pos == -1:
return None
return href[:pos]
def GetPhotoUri(self):
"""Return the uri to the PhotoEntry containing this tag"""
href = self.GetSelfLink().href
pos = href.find('/tag')
if pos == -1:
return None
return href[:pos]
def TagEntryFromString(xml_string):
return atom.CreateClassFromXMLString(TagEntry, xml_string)
class TagFeed(GPhotosBaseFeed, TagData):
"""All metadata for a Google Photos Tag, including its sub-elements"""
_children = GPhotosBaseFeed._children.copy()
_children.update(TagData._children.copy())
def TagFeedFromString(xml_string):
return atom.CreateClassFromXMLString(TagFeed, xml_string)
class CommentData(GPhotosBaseData):
_children = {}
## NOTE: storing photo:id as self.gphoto_id, to avoid name clash with atom:id
_children['{%s}id' % PHOTOS_NAMESPACE] = ('gphoto_id', Id)
_children['{%s}albumid' % PHOTOS_NAMESPACE] = ('albumid', Albumid)
_children['{%s}photoid' % PHOTOS_NAMESPACE] = ('photoid', Photoid)
_children['{%s}author' % atom.ATOM_NAMESPACE] = ('author', [CommentAuthor,])
gphoto_id=None
albumid=None
photoid=None
author=None
class CommentEntry(GPhotosBaseEntry, CommentData):
"""All metadata for a Google Photos Comment
The comment is stored in the .content.text attribute,
with a content type in .content.type.
"""
_kind = 'comment'
_children = GPhotosBaseEntry._children.copy()
_children.update(CommentData._children.copy())
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None,
title=None, updated=None,
# GPHOTO NAMESPACE:
gphoto_id=None, albumid=None, photoid=None,
extended_property=None,
extension_elements=None, extension_attributes=None, text=None):
GPhotosBaseEntry.__init__(self, author=author, category=category,
content=content,
atom_id=atom_id, link=link, published=published,
title=title, updated=updated,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
self.gphoto_id = gphoto_id
self.albumid = albumid
self.photoid = photoid
def GetCommentId(self):
"""Return the globally unique id of this comment"""
return self.GetSelfLink().href.split('/')[-1]
def GetAlbumUri(self):
"""Return the uri to the AlbumEntry containing this comment"""
href = self.GetSelfLink().href
return href[:href.find('/photoid')]
def GetPhotoUri(self):
"""Return the uri to the PhotoEntry containing this comment"""
href = self.GetSelfLink().href
return href[:href.find('/commentid')]
def CommentEntryFromString(xml_string):
return atom.CreateClassFromXMLString(CommentEntry, xml_string)
class CommentFeed(GPhotosBaseFeed, CommentData):
"""All metadata for a Google Photos Comment, including its sub-elements"""
_children = GPhotosBaseFeed._children.copy()
_children.update(CommentData._children.copy())
def CommentFeedFromString(xml_string):
return atom.CreateClassFromXMLString(CommentFeed, xml_string)
class UserData(GPhotosBaseData):
_children = {}
_children['{%s}maxPhotosPerAlbum' % PHOTOS_NAMESPACE] = ('maxPhotosPerAlbum', MaxPhotosPerAlbum)
_children['{%s}nickname' % PHOTOS_NAMESPACE] = ('nickname', Nickname)
_children['{%s}quotalimit' % PHOTOS_NAMESPACE] = ('quotalimit', Quotalimit)
_children['{%s}quotacurrent' % PHOTOS_NAMESPACE] = ('quotacurrent', Quotacurrent)
_children['{%s}thumbnail' % PHOTOS_NAMESPACE] = ('thumbnail', Thumbnail)
_children['{%s}user' % PHOTOS_NAMESPACE] = ('user', User)
_children['{%s}id' % PHOTOS_NAMESPACE] = ('gphoto_id', Id)
maxPhotosPerAlbum=None
nickname=None
quotalimit=None
quotacurrent=None
thumbnail=None
user=None
gphoto_id=None
class UserEntry(GPhotosBaseEntry, UserData):
"""All metadata for a Google Photos User
This entry represents an album owner and all appropriate metadata.
Take a look at at the attributes of the UserData for metadata available.
"""
_children = GPhotosBaseEntry._children.copy()
_children.update(UserData._children.copy())
_kind = 'user'
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None,
title=None, updated=None,
# GPHOTO NAMESPACE:
gphoto_id=None, maxPhotosPerAlbum=None, nickname=None, quotalimit=None,
quotacurrent=None, thumbnail=None, user=None,
extended_property=None,
extension_elements=None, extension_attributes=None, text=None):
GPhotosBaseEntry.__init__(self, author=author, category=category,
content=content,
atom_id=atom_id, link=link, published=published,
title=title, updated=updated,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
self.gphoto_id=gphoto_id
self.maxPhotosPerAlbum=maxPhotosPerAlbum
self.nickname=nickname
self.quotalimit=quotalimit
self.quotacurrent=quotacurrent
self.thumbnail=thumbnail
self.user=user
def GetAlbumsUri(self):
"(string) Return the uri to this user's feed of the AlbumEntry kind"
return self._feedUri('album')
def GetPhotosUri(self):
"(string) Return the uri to this user's feed of the PhotoEntry kind"
return self._feedUri('photo')
def GetCommentsUri(self):
"(string) Return the uri to this user's feed of the CommentEntry kind"
return self._feedUri('comment')
def GetTagsUri(self):
"(string) Return the uri to this user's feed of the TagEntry kind"
return self._feedUri('tag')
def UserEntryFromString(xml_string):
return atom.CreateClassFromXMLString(UserEntry, xml_string)
class UserFeed(GPhotosBaseFeed, UserData):
"""Feed for a User in the google photos api.
This feed represents a user as the container for other objects.
A User feed contains entries of
AlbumEntry, PhotoEntry, CommentEntry, UserEntry or TagEntry,
depending on the `kind' parameter in the original query.
The user feed itself also contains all of the metadata available
as part of a UserData object."""
_children = GPhotosBaseFeed._children.copy()
_children.update(UserData._children.copy())
def GetAlbumsUri(self):
"""Get the uri to this feed, but with entries of the AlbumEntry kind."""
return self._feedUri('album')
def GetTagsUri(self):
"""Get the uri to this feed, but with entries of the TagEntry kind."""
return self._feedUri('tag')
def GetPhotosUri(self):
"""Get the uri to this feed, but with entries of the PhotosEntry kind."""
return self._feedUri('photo')
def GetCommentsUri(self):
"""Get the uri to this feed, but with entries of the CommentsEntry kind."""
return self._feedUri('comment')
def UserFeedFromString(xml_string):
return atom.CreateClassFromXMLString(UserFeed, xml_string)
def AnyFeedFromString(xml_string):
"""Creates an instance of the appropriate feed class from the
xml string contents.
Args:
xml_string: str A string which contains valid XML. The root element
of the XML string should match the tag and namespace of the desired
class.
Returns:
An instance of the target class with members assigned according to the
contents of the XML - or a basic gdata.GDataFeed instance if it is
impossible to determine the appropriate class (look for extra elements
in GDataFeed's .FindExtensions() and extension_elements[] ).
"""
tree = ElementTree.fromstring(xml_string)
category = tree.find('{%s}category' % atom.ATOM_NAMESPACE)
if category is None:
# TODO: is this the best way to handle this?
return atom._CreateClassFromElementTree(GPhotosBaseFeed, tree)
namespace, kind = category.get('term').split('#')
if namespace != PHOTOS_NAMESPACE:
# TODO: is this the best way to handle this?
return atom._CreateClassFromElementTree(GPhotosBaseFeed, tree)
## TODO: is getattr safe this way?
feed_class = getattr(gdata.photos, '%sFeed' % kind.title())
return atom._CreateClassFromElementTree(feed_class, tree)
def AnyEntryFromString(xml_string):
"""Creates an instance of the appropriate entry class from the
xml string contents.
Args:
xml_string: str A string which contains valid XML. The root element
of the XML string should match the tag and namespace of the desired
class.
Returns:
An instance of the target class with members assigned according to the
contents of the XML - or a basic gdata.GDataEndry instance if it is
impossible to determine the appropriate class (look for extra elements
in GDataEntry's .FindExtensions() and extension_elements[] ).
"""
tree = ElementTree.fromstring(xml_string)
category = tree.find('{%s}category' % atom.ATOM_NAMESPACE)
if category is None:
# TODO: is this the best way to handle this?
return atom._CreateClassFromElementTree(GPhotosBaseEntry, tree)
namespace, kind = category.get('term').split('#')
if namespace != PHOTOS_NAMESPACE:
# TODO: is this the best way to handle this?
return atom._CreateClassFromElementTree(GPhotosBaseEntry, tree)
## TODO: is getattr safe this way?
feed_class = getattr(gdata.photos, '%sEntry' % kind.title())
return atom._CreateClassFromElementTree(feed_class, tree)
| apache-2.0 |
Zsailer/epistasis | epistasis/__init__.py | 2 | 1202 | """\
A Python API for modeling statistical, high-order epistasis in genotype-phenotype maps.
This library provides methods for:
1. Decomposing genotype-phenotype maps into high-order epistatic interactions
2. Finding nonlinear scales in the genotype-phenotype map
3. Calculating the contributions of different epistatic orders
4. Estimating the uncertainty of epistatic coefficients amd
5. Interpreting the evolutionary importance of high-order interactions.
For more information about the epistasis models in this library, see our Genetics paper:
`Sailer, Z. R., & Harms, M. J. (2017). "Detecting High-Order Epistasis in Nonlinear Genotype-Phenotype Maps." Genetics, 205(3), 1079-1088.`_
.. _`Sailer, Z. R., & Harms, M. J. (2017). "Detecting High-Order Epistasis in Nonlinear Genotype-Phenotype Maps." Genetics, 205(3), 1079-1088.`: http://www.genetics.org/content/205/3/1079
Currently, this package works only as an API and there is no command-line
interface. Instead, we encourage you use this package inside `Jupyter notebooks`_ .
"""
from .__version__ import __version__
# from . import models
# from . import simulate
# from . import sampling
# from . import pyplot
| unlicense |
NoobieDog/nexmon | buildtools/b43/fwcutter/mklist.py | 7 | 3309 | #!/usr/bin/env python
#
# Script for creating a "struct extract" list for fwcutter_list.h
#
# Copyright (c) 2008 Michael Buesch <m@bues.ch>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import os
import re
import hashlib
if len(sys.argv) != 2:
print "Usage: %s path/to/wl.o" % sys.argv[0]
sys.exit(1)
fn = sys.argv[1]
pipe = os.popen("objdump -t %s" % fn)
syms = pipe.readlines()
pipe = os.popen("objdump --headers %s" % fn)
headers = pipe.readlines()
# Get the .rodata fileoffset
rodata_fileoffset = None
rofileoff_re = re.compile(r"\d+\s+\.rodata\s+[0-9a-fA-F]+\s+[0-9a-fA-F]+\s+[0-9a-fA-F]+\s+([0-9a-fA-F]+)\s+.")
for line in headers:
line = line.strip()
m = rofileoff_re.match(line)
if m:
rodata_fileoffset = int(m.group(1), 16)
break
if rodata_fileoffset == None:
print "ERROR: Could not find .rodata fileoffset"
sys.exit(1)
md5sum = hashlib.md5(file(fn, "r").read())
print "static struct extract _%s[] =" % md5sum.hexdigest()
print "{"
sym_re = re.compile(r"([0-9a-fA-F]+)\s+g\s+O\s+\.rodata\s+([0-9a-fA-F]+) d11([-_\s\w0-9]+)")
ucode_re = re.compile(r"ucode(\d+)")
for sym in syms:
sym = sym.strip()
m = sym_re.match(sym)
if not m:
continue
pos = int(m.group(1), 16) + rodata_fileoffset
size = int(m.group(2), 16)
name = m.group(3)
if name[-2:] == "sz":
continue
type = None
if "initvals" in name:
type = "EXT_IV"
size -= 8
if "pcm" in name:
type = "EXT_PCM"
if "bommajor" in name:
print "\t/* ucode major version at offset 0x%x */" % pos
continue
if "bomminor" in name:
print "\t/* ucode minor version at offset 0x%x */" % pos
continue
if "ucode_2w" in name:
continue
m = ucode_re.match(name)
if m:
corerev = int(m.group(1))
if corerev <= 4:
type = "EXT_UCODE_1"
elif corerev >= 5 and corerev <= 14:
type = "EXT_UCODE_2"
else:
type = "EXT_UCODE_3"
if not type:
print "\t/* ERROR: Could not guess data type for: %s */" % name
continue
print "\t{ .name = \"%s\", .offset = 0x%X, .type = %s, .length = 0x%X }," % (name, pos, type, size)
print "\tEXTRACT_LIST_END"
print "};"
| gpl-3.0 |
savoirfairelinux/influxdb-python | influxdb/tests/server_tests/base.py | 7 | 2031 | # -*- coding: utf-8 -*-
import sys
from influxdb.tests import using_pypy
from influxdb.tests.server_tests.influxdb_instance import InfluxDbInstance
from influxdb.client import InfluxDBClient
if not using_pypy:
from influxdb.dataframe_client import DataFrameClient
def _setup_influxdb_server(inst):
inst.influxd_inst = InfluxDbInstance(
inst.influxdb_template_conf,
udp_enabled=getattr(inst, 'influxdb_udp_enabled', False),
)
inst.cli = InfluxDBClient('localhost',
inst.influxd_inst.http_port,
'root',
'',
database='db')
if not using_pypy:
inst.cliDF = DataFrameClient('localhost',
inst.influxd_inst.http_port,
'root',
'',
database='db')
def _teardown_influxdb_server(inst):
remove_tree = sys.exc_info() == (None, None, None)
inst.influxd_inst.close(remove_tree=remove_tree)
class SingleTestCaseWithServerMixin(object):
''' A mixin for unittest.TestCase to start an influxdb server instance
in a temporary directory **for each test function/case**
'''
# 'influxdb_template_conf' attribute must be set
# on the TestCase class or instance.
setUp = _setup_influxdb_server
tearDown = _teardown_influxdb_server
class ManyTestCasesWithServerMixin(object):
''' Same than SingleTestCaseWithServerMixin
but creates a single instance for the whole class.
Also pre-creates a fresh database: 'db'.
'''
# 'influxdb_template_conf' attribute must be set on the class itself !
@classmethod
def setUpClass(cls):
_setup_influxdb_server(cls)
def setUp(self):
self.cli.create_database('db')
@classmethod
def tearDownClass(cls):
_teardown_influxdb_server(cls)
def tearDown(self):
self.cli.drop_database('db')
| mit |
divio/askbot-devel | askbot/conf/leading_sidebar.py | 16 | 1135 | """
Sidebar settings
"""
from askbot.conf.settings_wrapper import settings
from askbot.deps.livesettings import ConfigurationGroup
from askbot.deps.livesettings import values
from django.utils.translation import ugettext_lazy as _
from askbot.conf.super_groups import CONTENT_AND_UI
LEADING_SIDEBAR = ConfigurationGroup(
'LEADING_SIDEBAR',
_('Common left sidebar'),
super_group = CONTENT_AND_UI
)
settings.register(
values.BooleanValue(
LEADING_SIDEBAR,
'ENABLE_LEADING_SIDEBAR',
description = _('Enable left sidebar'),
default = False,
)
)
settings.register(
values.LongStringValue(
LEADING_SIDEBAR,
'LEADING_SIDEBAR',
description = _('HTML for the left sidebar'),
default = '',
help_text = _(
'Use this area to enter content at the LEFT sidebar'
'in HTML format. When using this option, please '
'use the HTML validation service to make sure that '
'your input is valid and works well in all browsers.'
)
)
)
| gpl-3.0 |
CMSS-BCRDB/RDSV1.0 | trove/network/neutron.py | 5 | 6185 | # Copyright 2014 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from trove.common import exception
from trove.common import remote
from trove.network import base
from trove.openstack.common import log as logging
from neutronclient.common import exceptions as neutron_exceptions
LOG = logging.getLogger(__name__)
CONST = {'IPv4': "IPv4",
'IPv6': "IPv6",
'INGRESS': "ingress",
'EGRESS': "egress",
'PROTO_NAME_TCP': 'tcp',
'PROTO_NAME_ICMP': 'icmp',
'PROTO_NAME_ICMP_V6': 'icmpv6',
'PROTO_NAME_UDP': 'udp'}
class NovaNetworkStruct(object):
def __init__(self, **properties):
self.__dict__.update(properties)
class NeutronDriver(base.NetworkDriver):
def __init__(self, context):
try:
self.client = remote.create_neutron_client(context)
except neutron_exceptions.NeutronClientException as e:
raise exception.TroveError(str(e))
def get_sec_group_by_id(self, group_id):
try:
return self.client.show_security_group(security_group=group_id)
except neutron_exceptions.NeutronClientException as e:
LOG.exception('Failed to get remote security group')
raise exception.TroveError(str(e))
def create_security_group(self, name, description):
try:
sec_group_body = {"security_group": {"name": name,
"description": description}}
sec_group = self.client.create_security_group(body=sec_group_body)
return self._convert_to_nova_security_group_format(
sec_group.get('security_group', sec_group))
except neutron_exceptions.NeutronClientException as e:
LOG.exception('Failed to create remote security group')
raise exception.SecurityGroupCreationError(str(e))
def delete_security_group(self, sec_group_id):
try:
self.client.delete_security_group(security_group=sec_group_id)
except neutron_exceptions.NeutronClientException as e:
LOG.exception('Failed to delete remote security group')
raise exception.SecurityGroupDeletionError(str(e))
def add_security_group_rule(self, sec_group_id, protocol,
from_port, to_port, cidr,
direction=CONST['INGRESS'],
ethertype=CONST['IPv4']):
try:
secgroup_rule_body = {"security_group_rule":
{"security_group_id": sec_group_id,
"protocol": protocol,
"port_range_min": from_port,
"port_range_max": to_port,
"remote_ip_prefix": cidr,
"direction": direction, # ingress | egress
"ethertype": ethertype, # IPv4 | IPv6
}}
secgroup_rule = self.client.create_security_group_rule(
secgroup_rule_body)
return self._convert_to_nova_security_group_rule_format(
secgroup_rule.get('security_group_rule', secgroup_rule))
except neutron_exceptions.NeutronClientException as e:
# ignore error if rule already exists
if e.status_code == 409:
LOG.exception("secgroup rule already exists")
else:
LOG.exception('Failed to add rule to remote security group')
raise exception.SecurityGroupRuleCreationError(str(e))
def delete_security_group_rule(self, sec_group_rule_id):
try:
self.client.delete_security_group_rule(
security_group_rule=sec_group_rule_id)
except neutron_exceptions.NeutronClientException as e:
LOG.exception('Failed to delete rule to remote security group')
raise exception.SecurityGroupRuleDeletionError(str(e))
def _convert_to_nova_security_group_format(self, security_group):
nova_group = {}
nova_group['id'] = security_group['id']
nova_group['description'] = security_group['description']
nova_group['name'] = security_group['name']
nova_group['project_id'] = security_group['tenant_id']
nova_group['rules'] = []
for rule in security_group.get('security_group_rules', []):
if rule['direction'] == 'ingress':
nova_group['rules'].append(
self._convert_to_nova_security_group_rule_format(rule))
return NovaNetworkStruct(**nova_group)
def _convert_to_nova_security_group_rule_format(self, rule):
nova_rule = {}
nova_rule['id'] = rule['id']
nova_rule['parent_group_id'] = rule['security_group_id']
nova_rule['protocol'] = rule['protocol']
if (nova_rule['protocol'] and rule.get('port_range_min') is None and
rule.get('port_range_max') is None):
if rule['protocol'].upper() in ['TCP', 'UDP']:
nova_rule['from_port'] = 1
nova_rule['to_port'] = 65535
else:
nova_rule['from_port'] = -1
nova_rule['to_port'] = -1
else:
nova_rule['from_port'] = rule.get('port_range_min')
nova_rule['to_port'] = rule.get('port_range_max')
nova_rule['group_id'] = rule['remote_group_id']
nova_rule['cidr'] = rule.get('remote_ip_prefix')
return NovaNetworkStruct(**nova_rule)
| apache-2.0 |
jdelight/django | tests/model_package/tests.py | 380 | 2668 | from __future__ import unicode_literals
from django.db import connection, models
from django.db.backends.utils import truncate_name
from django.test import TestCase
from .models.article import Article, Site
from .models.publication import Publication
class Advertisement(models.Model):
customer = models.CharField(max_length=100)
publications = models.ManyToManyField("model_package.Publication", blank=True)
class ModelPackageTests(TestCase):
def test_m2m_tables_in_subpackage_models(self):
"""
Regression for #12168: models split into subpackages still get M2M
tables.
"""
p = Publication.objects.create(title="FooBar")
site = Site.objects.create(name="example.com")
a = Article.objects.create(headline="a foo headline")
a.publications.add(p)
a.sites.add(site)
a = Article.objects.get(id=a.pk)
self.assertEqual(a.id, a.pk)
self.assertEqual(a.sites.count(), 1)
def test_models_in_the_test_package(self):
"""
Regression for #12245 - Models can exist in the test package, too.
"""
p = Publication.objects.create(title="FooBar")
ad = Advertisement.objects.create(customer="Lawrence Journal-World")
ad.publications.add(p)
ad = Advertisement.objects.get(id=ad.pk)
self.assertEqual(ad.publications.count(), 1)
def test_automatic_m2m_column_names(self):
"""
Regression for #12386 - field names on the autogenerated intermediate
class that are specified as dotted strings don't retain any path
component for the field or column name.
"""
self.assertEqual(
Article.publications.through._meta.fields[1].name, 'article'
)
self.assertEqual(
Article.publications.through._meta.fields[1].get_attname_column(),
('article_id', 'article_id')
)
self.assertEqual(
Article.publications.through._meta.fields[2].name, 'publication'
)
self.assertEqual(
Article.publications.through._meta.fields[2].get_attname_column(),
('publication_id', 'publication_id')
)
self.assertEqual(
Article._meta.get_field('publications').m2m_db_table(),
truncate_name('model_package_article_publications', connection.ops.max_name_length()),
)
self.assertEqual(
Article._meta.get_field('publications').m2m_column_name(), 'article_id'
)
self.assertEqual(
Article._meta.get_field('publications').m2m_reverse_name(),
'publication_id'
)
| bsd-3-clause |
breathe/ansible | test/units/template/test_safe_eval.py | 205 | 1956 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
from collections import defaultdict
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.template.safe_eval import safe_eval
class TestSafeEval(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_safe_eval_usage(self):
# test safe eval calls with different possible types for the
# locals dictionary, to ensure we don't run into problems like
# ansible/ansible/issues/12206 again
for locals_vars in (dict(), defaultdict(dict)):
self.assertEqual(safe_eval('True', locals=locals_vars), True)
self.assertEqual(safe_eval('False', locals=locals_vars), False)
self.assertEqual(safe_eval('0', locals=locals_vars), 0)
self.assertEqual(safe_eval('[]', locals=locals_vars), [])
self.assertEqual(safe_eval('{}', locals=locals_vars), {})
@unittest.skipUnless(sys.version_info[:2] >= (2, 7), "Python 2.6 has no set literals")
def test_set_literals(self):
self.assertEqual(safe_eval('{0}'), set([0]))
| gpl-3.0 |
lonewolf07/coala | coalib/results/TextRange.py | 10 | 4188 | import copy
from coalib.misc.Decorators import (
enforce_signature, generate_ordering, generate_repr)
from coalib.results.TextPosition import TextPosition
@generate_repr("start", "end")
@generate_ordering("start", "end")
class TextRange:
@enforce_signature
def __init__(self, start: TextPosition, end: (TextPosition, None)=None):
"""
Creates a new TextRange.
:param start: A TextPosition indicating the start of the range.
Can't be `None`.
:param end: A TextPosition indicating the end of the range. If
`None` is given, the start object will be used
here.
:raises TypeError: Raised when
- start is no TextPosition or None.
- end is no TextPosition.
:raises ValueError: Raised when end position is smaller than start
position, because negative ranges are not allowed.
"""
self._start = start
self._end = end or copy.deepcopy(start)
if self._end < start:
raise ValueError("End position can't be less than start position.")
@classmethod
def from_values(cls,
start_line=None,
start_column=None,
end_line=None,
end_column=None):
"""
Creates a new TextRange.
:param start_line: The line number of the start position. The first
line is 1.
:param start_column: The column number of the start position. The first
column is 1.
:param end_line: The line number of the end position. If this
parameter is `None`, then the end position is set
the same like start position and end_column gets
ignored.
:param end_column: The column number of the end position.
:return: A TextRange.
"""
start = TextPosition(start_line, start_column)
if end_line is None:
end = None
else:
end = TextPosition(end_line, end_column)
return cls(start, end)
@classmethod
def join(cls, a, b):
"""
Creates a new TextRange that covers the area of two overlapping ones
:param a: TextRange (needs to overlap b)
:param b: TextRange (needs to overlap a)
:return: A new TextRange covering the union of the Area of a and b
"""
if not isinstance(a, cls) or not isinstance(b, cls):
raise TypeError(
"only instances of {} can be joined".format(cls.__name__))
if not a.overlaps(b):
raise ValueError(
"{}s must overlap to be joined".format(cls.__name__))
return cls(min(a.start, b.start), max(a.end, b.end))
@property
def start(self):
return self._start
@property
def end(self):
return self._end
def overlaps(self, other):
return self.start <= other.end and self.end >= other.start
def expand(self, text_lines):
"""
Passes a new TextRange that covers the same area of a file as this one
would. All values of None get replaced with absolute values.
values of None will be interpreted as follows:
self.start.line is None: -> 1
self.start.column is None: -> 1
self.end.line is None: -> last line of file
self.end.column is None: -> last column of self.end.line
:param text_lines: File contents of the applicable file
:return: TextRange with absolute values
"""
start_line = self.start.line or 1
start_column = self.start.column or 1
end_line = self.end.line or len(text_lines)
end_column = self.end.column or len(text_lines[end_line - 1])
return TextRange.from_values(start_line,
start_column,
end_line,
end_column)
| agpl-3.0 |
davidbgk/udata | udata/search/commands.py | 1 | 8335 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
import sys
import signal
from contextlib import contextmanager
from datetime import datetime
from flask import current_app
from flask_script import prompt_bool
from udata.commands import submanager, IS_INTERACTIVE
from udata.search import es, adapter_catalog
from elasticsearch.helpers import reindex as es_reindex, streaming_bulk
log = logging.getLogger(__name__)
m = submanager(
'search',
help='Search/Indexation related operations',
description='Handle search and indexation related operations'
)
TIMESTAMP_FORMAT = '%Y-%m-%d-%H-%M'
DEPRECATION_MSG = '{cmd} command will be removed in udata 1.4, use index command instead'
def default_index_name():
'''Build a time based index name'''
return '-'.join([es.index_name, datetime.now().strftime(TIMESTAMP_FORMAT)])
def iter_adapters():
'''Iter over adapter in predictable way'''
adapters = adapter_catalog.values()
return sorted(adapters, key=lambda a: a.model.__name__)
def iter_qs(qs, adapter):
'''Safely iterate over a DB QuerySet yielding ES documents'''
for obj in qs.no_dereference().timeout(False):
if adapter.is_indexable(obj):
try:
doc = adapter.from_model(obj).to_dict(include_meta=True)
yield doc
except Exception as e:
model = adapter.model.__name__
log.error('Unable to index %s "%s": %s', model, str(obj.id),
str(e), exc_info=True)
def iter_for_index(docs, index_name):
'''Iterate over ES documents ensuring a given index'''
for doc in docs:
doc['_index'] = index_name
yield doc
def index_model(index_name, adapter):
''' Indel all objects given a model'''
model = adapter.model
log.info('Indexing {0} objects'.format(model.__name__))
qs = model.objects
if hasattr(model.objects, 'visible'):
qs = qs.visible()
if adapter.exclude_fields:
qs = qs.exclude(*adapter.exclude_fields)
docs = iter_qs(qs, adapter)
docs = iter_for_index(docs, index_name)
for ok, info in streaming_bulk(es.client, docs, raise_on_error=False):
if not ok:
log.error('Unable to index %s "%s": %s', model.__name__,
info['index']['_id'], info['index']['error'])
def disable_refresh(index_name):
'''
Disable refresh to optimize indexing
See: https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-update-settings.html#bulk
''' # noqa
es.indices.put_settings(index=index_name, body={
'index': {
'refresh_interval': '-1'
}
})
def enable_refresh(index_name):
'''
Enable refresh and force merge. To be used after indexing.
See: https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-update-settings.html#bulk
''' # noqa
refresh_interval = current_app.config['ELASTICSEARCH_REFRESH_INTERVAL']
es.indices.put_settings(index=index_name, body={
'index': {'refresh_interval': refresh_interval}
})
es.indices.forcemerge(index=index_name)
def set_alias(index_name, delete=True):
'''
Properly end an indexation by creating an alias.
Previous alias is deleted if needed.
'''
log.info('Creating alias "{0}" on index "{1}"'.format(
es.index_name, index_name))
if es.indices.exists_alias(name=es.index_name):
alias = es.indices.get_alias(name=es.index_name)
previous_indices = alias.keys()
if index_name not in previous_indices:
es.indices.put_alias(index=index_name, name=es.index_name)
for index in previous_indices:
if index != index_name:
es.indices.delete_alias(index=index, name=es.index_name)
if delete:
es.indices.delete(index=index)
else:
es.indices.put_alias(index=index_name, name=es.index_name)
@contextmanager
def handle_error(index_name, keep=False):
'''
Handle errors while indexing.
In case of error, properly log it, remove the index and exit.
If `keep` is `True`, index is not deleted.
'''
# Handle keyboard interrupt
signal.signal(signal.SIGINT, signal.default_int_handler)
signal.signal(signal.SIGTERM, signal.default_int_handler)
has_error = False
try:
yield
except KeyboardInterrupt:
print('') # Proper warning message under the "^C" display
log.warning('Interrupted by signal')
has_error = True
except Exception as e:
log.error(e)
has_error = True
if has_error:
if not keep:
log.info('Removing index %s', index_name)
es.indices.delete(index=index_name)
sys.exit(-1)
@m.option('-t', '--type', dest='doc_type', required=True,
help='Only reindex a given type')
def reindex(doc_type):
'''[DEPRECATED] Reindex models'''
log.warn(DEPRECATION_MSG.format(cmd='reindex'))
index([doc_type], force=True, keep=False)
@m.option('-n', '--name', default=None, help='Optionnal index name')
@m.option('-d', '--delete', default=False, action='store_true',
help='Delete previously aliased indices')
@m.option('-f', '--force', default=False, action='store_true',
help='Do not prompt on deletion')
@m.option('-k', '--keep', default=False, action='store_true',
help='Keep index in case of error')
def init(name=None, delete=False, force=False, keep=False):
'''[DEPRECATED] Initialize or rebuild the search index'''
log.warn(DEPRECATION_MSG.format(cmd='init'))
index(name=name, force=force, keep=not delete)
@m.option(dest='models', nargs='*', metavar='model',
help='Model to reindex')
@m.option('-n', '--name', default=None, help='Optionnal index name')
@m.option('-f', '--force', default=False, action='store_true',
help='Do not prompt on deletion')
@m.option('-k', '--keep', default=False, action='store_true',
help='Do not delete indexes')
def index(models=None, name=None, force=False, keep=False):
'''Initialize or rebuild the search index'''
index_name = name or default_index_name()
doc_types_names = [m.__name__.lower() for m in adapter_catalog.keys()]
models = [model.lower().rstrip('s') for model in (models or [])]
for model in models:
if model not in doc_types_names:
log.error('Unknown model %s', model)
sys.exit(-1)
log.info('Initiliazing index "{0}"'.format(index_name))
if es.indices.exists(index_name):
if IS_INTERACTIVE and not force:
msg = 'Index {0} will be deleted, are you sure?'
delete = prompt_bool(msg.format(index_name))
else:
delete = True
if delete:
es.indices.delete(index_name)
else:
sys.exit(-1)
es.initialize(index_name)
with handle_error(index_name, keep):
disable_refresh(index_name)
for adapter in iter_adapters():
if not models or adapter.doc_type().lower() in models:
index_model(index_name, adapter)
else:
log.info('Copying {0} objects to the new index'.format(
adapter.model.__name__))
# Need upgrade to Elasticsearch-py 5.0.0 to write:
# es.reindex({
# 'source': {'index': es.index_name, 'type': adapter.doc_type()},
# 'dest': {'index': index_name}
# })
#
# http://elasticsearch-py.readthedocs.io/en/master/api.html#elasticsearch.Elasticsearch.reindex
# This method (introduced in Elasticsearch 2.3 but only in Elasticsearch-py 5.0.0)
# triggers a server-side documents copy.
# Instead we use this helper for meant for backward compatibility
# but with poor performance as copy is client-side (scan+bulk)
es_reindex(es.client, es.index_name, index_name, scan_kwargs={
'doc_type': adapter.doc_type()
})
enable_refresh(index_name)
# At this step, we don't want error handler to delete the index
# in case of error
set_alias(index_name, delete=not keep)
| agpl-3.0 |
electrolinux/weblate | weblate/trans/tests/test_check_views.py | 9 | 4303 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Tests for check views.
"""
from weblate.trans.tests.test_views import ViewTestCase
from django.core.urlresolvers import reverse
class ChecksViewTest(ViewTestCase):
'''
Testing of check views.
'''
def test_browse(self):
response = self.client.get(reverse('checks'))
self.assertContains(response, '/same/')
response = self.client.get(reverse('checks'), {'language': 'de'})
self.assertContains(response, '/same/')
response = self.client.get(
reverse('checks'),
{'project': self.project.slug}
)
self.assertContains(response, '/same/')
def test_check(self):
response = self.client.get(
reverse('show_check', kwargs={'name': 'same'})
)
self.assertContains(response, '/same/')
response = self.client.get(
reverse('show_check', kwargs={'name': 'ellipsis'})
)
self.assertContains(response, u'…')
response = self.client.get(
reverse('show_check', kwargs={'name': 'not-existing'})
)
self.assertEqual(response.status_code, 404)
def test_project(self):
response = self.client.get(
reverse(
'show_check_project',
kwargs={'name': 'same', 'project': self.project.slug}
)
)
self.assertContains(response, '/same/')
response = self.client.get(
reverse(
'show_check_project',
kwargs={'name': 'same', 'project': self.project.slug}
),
{'language': 'cs'}
)
self.assertContains(response, '/same/')
response = self.client.get(
reverse(
'show_check_project',
kwargs={'name': 'ellipsis', 'project': self.project.slug}
)
)
self.assertContains(response, u'…')
response = self.client.get(
reverse(
'show_check_project',
kwargs={'name': 'non-existing', 'project': self.project.slug}
)
)
self.assertEqual(response.status_code, 404)
def test_subproject(self):
response = self.client.get(
reverse(
'show_check_subproject',
kwargs={
'name': 'same',
'project': self.project.slug,
'subproject': self.subproject.slug,
}
)
)
self.assertContains(response, '/same/')
response = self.client.get(
reverse(
'show_check_subproject',
kwargs={
'name': 'ellipsis',
'project': self.project.slug,
'subproject': self.subproject.slug,
}
)
)
self.assertRedirects(
response,
'{0}?type=ellipsis'.format(
reverse('review_source', kwargs={
'project': self.project.slug,
'subproject': self.subproject.slug,
})
)
)
response = self.client.get(
reverse(
'show_check_subproject',
kwargs={
'name': 'non-existing',
'project': self.project.slug,
'subproject': self.subproject.slug,
}
)
)
self.assertEqual(response.status_code, 404)
| gpl-3.0 |
dgarage/bc2 | test/functional/test_framework/blockstore.py | 63 | 5483 | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""BlockStore and TxStore helper classes."""
from .mininode import *
from io import BytesIO
import dbm.dumb as dbmd
logger = logging.getLogger("TestFramework.blockstore")
class BlockStore(object):
"""BlockStore helper class.
BlockStore keeps a map of blocks and implements helper functions for
responding to getheaders and getdata, and for constructing a getheaders
message.
"""
def __init__(self, datadir):
self.blockDB = dbmd.open(datadir + "/blocks", 'c')
self.currentBlock = 0
self.headers_map = dict()
def close(self):
self.blockDB.close()
def erase(self, blockhash):
del self.blockDB[repr(blockhash)]
# lookup an entry and return the item as raw bytes
def get(self, blockhash):
value = None
try:
value = self.blockDB[repr(blockhash)]
except KeyError:
return None
return value
# lookup an entry and return it as a CBlock
def get_block(self, blockhash):
ret = None
serialized_block = self.get(blockhash)
if serialized_block is not None:
f = BytesIO(serialized_block)
ret = CBlock()
ret.deserialize(f)
ret.calc_sha256()
return ret
def get_header(self, blockhash):
try:
return self.headers_map[blockhash]
except KeyError:
return None
# Note: this pulls full blocks out of the database just to retrieve
# the headers -- perhaps we could keep a separate data structure
# to avoid this overhead.
def headers_for(self, locator, hash_stop, current_tip=None):
if current_tip is None:
current_tip = self.currentBlock
current_block_header = self.get_header(current_tip)
if current_block_header is None:
return None
response = msg_headers()
headersList = [ current_block_header ]
maxheaders = 2000
while (headersList[0].sha256 not in locator.vHave):
prevBlockHash = headersList[0].hashPrevBlock
prevBlockHeader = self.get_header(prevBlockHash)
if prevBlockHeader is not None:
headersList.insert(0, prevBlockHeader)
else:
break
headersList = headersList[:maxheaders] # truncate if we have too many
hashList = [x.sha256 for x in headersList]
index = len(headersList)
if (hash_stop in hashList):
index = hashList.index(hash_stop)+1
response.headers = headersList[:index]
return response
def add_block(self, block):
block.calc_sha256()
try:
self.blockDB[repr(block.sha256)] = bytes(block.serialize())
except TypeError as e:
logger.exception("Unexpected error")
self.currentBlock = block.sha256
self.headers_map[block.sha256] = CBlockHeader(block)
def add_header(self, header):
self.headers_map[header.sha256] = header
# lookup the hashes in "inv", and return p2p messages for delivering
# blocks found.
def get_blocks(self, inv):
responses = []
for i in inv:
if (i.type == 2): # MSG_BLOCK
data = self.get(i.hash)
if data is not None:
# Use msg_generic to avoid re-serialization
responses.append(msg_generic(b"block", data))
return responses
def get_locator(self, current_tip=None):
if current_tip is None:
current_tip = self.currentBlock
r = []
counter = 0
step = 1
lastBlock = self.get_block(current_tip)
while lastBlock is not None:
r.append(lastBlock.hashPrevBlock)
for i in range(step):
lastBlock = self.get_block(lastBlock.hashPrevBlock)
if lastBlock is None:
break
counter += 1
if counter > 10:
step *= 2
locator = CBlockLocator()
locator.vHave = r
return locator
class TxStore(object):
def __init__(self, datadir):
self.txDB = dbmd.open(datadir + "/transactions", 'c')
def close(self):
self.txDB.close()
# lookup an entry and return the item as raw bytes
def get(self, txhash):
value = None
try:
value = self.txDB[repr(txhash)]
except KeyError:
return None
return value
def get_transaction(self, txhash):
ret = None
serialized_tx = self.get(txhash)
if serialized_tx is not None:
f = BytesIO(serialized_tx)
ret = CTransaction()
ret.deserialize(f)
ret.calc_sha256()
return ret
def add_transaction(self, tx):
tx.calc_sha256()
try:
self.txDB[repr(tx.sha256)] = bytes(tx.serialize())
except TypeError as e:
logger.exception("Unexpected error")
def get_transactions(self, inv):
responses = []
for i in inv:
if (i.type == 1): # MSG_TX
tx = self.get(i.hash)
if tx is not None:
responses.append(msg_generic(b"tx", tx))
return responses
| mit |
bitmazk/django-development-fabfile | development_fabfile/fabfile/remote.py | 2 | 8508 | """Fab tasks that execute things on a remote server."""
import sys
import django
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import cd, env, local, run
from .local import drop_db, create_db, import_db, import_media, reset_passwords
from .utils import require_server, run_workon
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
if getattr(settings, 'PEM_KEY_DIR', False):
env.key_filename = settings.PEM_KEY_DIR
@require_server
def run_collectstatic():
"""
Runs `./manage.py collectstatic` on the given server.
Usage::
fab <server> run_collectstatic
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py collectstatic --noinput'.format(
PYTHON_VERSION))
@require_server
def run_compilemessages():
"""
Executes ./manage.py compilemessages on the server.
Usage::
fab <server name> run_compilemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py compilemessages'.format(PYTHON_VERSION))
@require_server
def run_deploy_website(restart_apache=False, restart_uwsgi=False,
restart_nginx=False):
"""
Executes all tasks necessary to deploy the website on the given server.
Usage::
fab <server> run_deploy_website
"""
run_git_pull()
run_pip_install()
run_rsync_project()
run_syncdb()
run_collectstatic()
if getattr(settings, 'MAKEMESSAGES_ON_DEPLOYMENT', False):
run_makemessages()
if getattr(settings, 'COMPILEMESSAGES_ON_DEPLOYMENT', False):
run_compilemessages()
if restart_apache:
run_restart_apache()
if restart_uwsgi:
run_restart_uwsgi()
if restart_nginx:
run_restart_nginx()
else:
run_touch_wsgi()
@require_server
def run_download_db(filename=None):
"""
Downloads the database from the server into your local machine.
In order to import the downloaded database, run ``fab import_db``
Usage::
fab prod run_download_db
fab prod run_download_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR'), filename))
@require_server
def run_download_media(filename=None):
"""
Downloads the media dump from the server into your local machine.
In order to import the downloaded media dump, run ``fab import_media``
Usage::
fab prod run_download_media
fab prod run_download_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR'), filename))
@require_server
def run_export_db(filename=None):
"""
Exports the database on the server.
Usage::
fab prod run_export_db
fab prod run_export_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('fab export_db:remote=True,filename={}'.format(filename))
@require_server
def run_export_media(filename=None):
"""
Exports the media folder on the server.
Usage::
fab prod run_export_media
fab prod run_export_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_MEDIA_ROOT')):
run('rm -rf {0}'.format(filename))
run('tar -czf {0} *'.format(filename))
run('mv {0} {1}'.format(
filename, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR')))
@require_server
def run_git_pull():
"""
Pulls the latest code and updates submodules.
Usage::
fab <server> run_git_pull
"""
with cd(settings.FAB_SETTING('SERVER_REPO_ROOT')):
run('git pull && git submodule init && git submodule update')
@require_server
def import_remote_db():
"""
Downloads a db and imports it locally.
"""
run_export_db()
run_download_db()
drop_db()
create_db()
import_db()
reset_passwords()
@require_server
def import_remote_media():
"""
Downloads media and imports it locally.
"""
run_export_media()
run_download_media()
import_media()
@require_server
def run_makemessages():
"""
Executes ./manage.py makemessages -s --all on the server.
Usage::
fab <server name> run_makemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py makemessages -s --all'.format(
PYTHON_VERSION))
@require_server
def run_pip_install(upgrade=0):
"""
Installs the requirement.txt file on the given server.
Usage::
fab <server> run_pip_install
fab <server> run_pip_install:upgrade=1
:param upgrade: If set to 1, the command will be executed with the
``--upgrade`` flag.
"""
command = 'pip install -r {0}'.format(
settings.FAB_SETTING('SERVER_REQUIREMENTS_PATH'))
if upgrade:
command += ' --upgrade'
run_workon(command)
@require_server
def run_restart_apache():
"""
Restarts apache on the given server.
Usage::
fab <server> run_restart_apache
"""
run('{0}restart'.format(settings.FAB_SETTING('SERVER_APACHE_BIN_DIR')))
@require_server
def run_restart_uwsgi():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_uwsgi
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart uwsgi')
@require_server
def run_restart_nginx():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_nginx
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart nginx')
@require_server
def run_rsync_project():
"""
Copies the project from the git repository to it's destination folder.
This has the nice side effect of rsync deleting all ``.pyc`` files and
removing other files that might have been left behind by sys admins messing
around on the server.
Usage::
fab <server> run_rsync_project
"""
excludes = ''
for exclude in settings.RSYNC_EXCLUDES:
excludes += " --exclude '{0}'".format(exclude)
command = "rsync -avz --stats --delete {0} {1} {2}".format(
excludes, settings.FAB_SETTING('SERVER_REPO_PROJECT_ROOT'),
settings.FAB_SETTING('SERVER_APP_ROOT'))
run(command)
@require_server
def run_syncdb():
"""
Runs `./manage.py syncdb --migrate` on the given server.
Usage::
fab <server> run_syncdb
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
run_workon('python{} manage.py syncdb --migrate --noinput'.format(
PYTHON_VERSION))
else:
run_workon('python{} manage.py migrate'.format(PYTHON_VERSION))
@require_server
def run_touch_wsgi():
"""
Runs `touch <path>/wsgi.py` on the given server.
Usage::
fab <server> run_touch_wsgi
"""
run('touch {0}'.format(settings.FAB_SETTING('SERVER_WSGI_FILE')))
@require_server
def run_upload_db(filename=None):
"""
Uploads your local database to the server.
You can create a local dump with ``fab export_db`` first.
In order to import the database on the server you still need to SSH into
the server.
Usage::
fab prod run_upload_db
fab prod run_upload_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0} {1}:{3}'.format(
filename, ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')))
| mit |
cnvogelg/fs-uae-gles | launcher/fs_uae_launcher/netplay/oyoyo/examplebot.py | 2 | 1024 | #!/usr/bin/python
"""Example bot for oyoyo that responds to !say"""
import logging
import re
from fs_uae_launcher.oyoyo.client import IRCClient
from fs_uae_launcher.oyoyo.cmdhandler import DefaultCommandHandler
from fs_uae_launcher.oyoyo import helpers
HOST = 'irc.freenode.net'
PORT = 6667
NICK = 'oyoyo-example'
CHANNEL = '#oyoyo-test'
class MyHandler(DefaultCommandHandler):
def privmsg(self, nick, chan, msg):
msg = msg.decode()
match = re.match('\!say (.*)', msg)
if match:
to_say = match.group(1).strip()
print('Saying, "%s"' % to_say)
helpers.msg(self.client, chan, to_say)
def connect_cb(cli):
helpers.join(cli, CHANNEL)
def main():
logging.basicConfig(level=logging.DEBUG)
cli = IRCClient(MyHandler, host=HOST, port=PORT, nick=NICK,
connect_cb=connect_cb)
conn = cli.connect()
while True:
conn.next() ## python 2
# next(conn) ## python 3
if __name__ == '__main__':
main()
| gpl-2.0 |
DISBi/django-disbi | disbi/disbimodels.py | 1 | 7146 | """
Normal Django models with a few custom options for configuration.
If you have custom model classes that need these options, add them here and
create a child class of the appropriate options class and your custom model class.
"""
# Django
from django.db import models
class Options():
def __init__(self, di_show=False, di_display_name=None, di_hr_primary_key=False,
di_choose=False, di_combinable=False,
*args, **kwargs):
"""
Custom options for DISBi fields.
Args:
di_show (bool): Determines whether the column should be
included in the result table.
di_display_name (str): Will be used as column header in the result table.
di_hr_primary_key (bool): Determines whether the column should
be used for identifying rows. If true column must be unique
and may not be `null` or `blank`. Only one di_hr_primary_key
is allowed per model.
TODO: enforce this
"""
self.di_show = di_show
self.di_display_name = di_display_name
self.di_hr_primary_key = di_hr_primary_key
self.di_choose = di_choose
self.di_combinable = di_combinable
super().__init__(*args, **kwargs)
class RelationshipOptions():
def __init__(self, to, di_show=False, di_display_name=None, di_hr_primary_key=False,
di_choose=False, di_combinable=False,
*args, **kwargs):
"""
Custom options for DISBi relationship fields, which have a different
signature than normal fields.
Args:
di_show (bool): Determines whether the column should be
included in the result table.
di_display_name (str): Will be used as column header in the result table.
di_hr_primary_key (bool): Determines whether the column should
be used for identifying rows. If true column must be unique
and may not be `null` or `blank`. Only one di_hr_primary_key
is allowed per model.
TODO: enforce this
"""
self.di_show = di_show
self.display_name = di_display_name
self.di_hr_primary_key = di_hr_primary_key
self.di_choose = di_choose
self.di_combinable = di_combinable
super().__init__(to, *args, **kwargs)
class ExcludeOptions(Options):
"""
Adds the `exclude` option, to exclude rows where this field
evaluates to `False`. Should be only used on Bool fields.
"""
def __init__(self, di_exclude=False, di_show=False, di_display_name=None,
di_hr_primary_key=False, di_choose=False, di_combinable=False,
*args, **kwargs):
self.di_exclude = di_exclude
super().__init__(di_show, di_display_name, di_hr_primary_key, di_choose,
di_combinable
*args, **kwargs)
class FloatField(Options, models.FloatField):
"""
FloatField with custom DISBi options.
"""
pass
class BigIntegerField(Options, models.BigIntegerField):
"""
BigIntegerField with custom DISBi options.
"""
pass
class BinaryField(Options, models.BinaryField):
"""
BinaryField with custom DISBi options.
"""
pass
class CommaSeparatedIntegerField(Options, models.CommaSeparatedIntegerField):
"""
CommaSeparatedIntegerField with custom DISBi options.
"""
pass
class CharField(Options, models.CharField):
"""
CharField with custom DISBi options.
"""
pass
class DateField(Options, models.DateField):
"""
DateField with custom DISBi options.
"""
pass
class DateTimeField(Options, models.DateTimeField):
"""
DateTimeField with custom DISBi options.
"""
pass
class DecimalField(Options, models.DecimalField):
"""
DecimalField with custom DISBi options.
"""
pass
class DurationField(Options, models.DurationField):
"""
DurationField with custom DISBi options.
"""
pass
class EmailField(Options, models.EmailField):
"""
EmailField with custom DISBi options.
"""
pass
class FileField(Options, models.FileField):
"""
FileField with custom DISBi options.
"""
pass
class FilePathField(Options, models.FilePathField):
"""
FilePathField with custom DISBi options.
"""
pass
class ImageField(Options, models.ImageField):
"""
ImageField with custom DISBi options.
"""
pass
class IntegerField(Options, models.IntegerField):
"""
IntegerField with custom DISBi options.
"""
pass
class GenericIPAddressField(Options, models.GenericIPAddressField):
"""
GenericIPAddressField with custom DISBi options.
"""
pass
class PositiveIntegerField(Options, models.PositiveIntegerField):
"""
PositiveIntegerField with custom DISBi options.
"""
pass
class PositiveSmallIntegerField(Options, models.PositiveSmallIntegerField):
"""
PositiveSmallIntegerField with custom DISBi options.
"""
pass
class SlugField(Options, models.SlugField):
"""
SlugField with custom DISBi options.
"""
pass
class SmallIntegerField(Options, models.SmallIntegerField):
"""
SmallIntegerField with custom DISBi options.
"""
pass
class TextField(Options, models.TextField):
"""
TextField with custom DISBi options.
"""
pass
class TimeField(Options, models.TimeField):
"""
TimeField with custom DISBi options.
"""
pass
class URLField(Options, models.URLField):
"""
URLField with custom DISBi options.
"""
pass
class UUIDField(Options, models.UUIDField):
"""
UUIDField with custom DISBi options.
"""
pass
class ForeignKey(RelationshipOptions, models.ForeignKey):
"""
ForeignKey with custom DISBi options.
"""
pass
class ManyToManyField(RelationshipOptions, models.ManyToManyField):
"""
ManyToManyField with custom DISBi options.
"""
pass
class OneToOneField(RelationshipOptions, models.OneToOneField):
"""
OneToOneField with custom DISBi options.
"""
pass
class NullBooleanField(ExcludeOptions, models.NullBooleanField):
"""
NullBooleanField with custom DISBi and exclude options.
"""
pass
class BooleanField(ExcludeOptions, models.BooleanField):
"""
BooleanField with custom DISBi and exclude options.
"""
pass
class EmptyCharField(Options, models.CharField):
"""
FloatField with custom DISBi options and the option to add an
empty value displayer.
"""
def __init__(self, di_empty=None, di_show=True, di_display_name=None, di_hr_primary_key=False,
di_choose=False, di_combinable=False,
*args, **kwargs):
self.di_empty = di_empty
super().__init__(di_show, di_display_name, di_hr_primary_key, di_choose, di_combinable,
*args, **kwargs)
| mit |
DMLoy/ECommerceBasic | lib/python2.7/site-packages/django/contrib/gis/db/backends/oracle/operations.py | 98 | 12894 | """
This module contains the spatial lookup types, and the `get_geo_where_clause`
routine for Oracle Spatial.
Please note that WKT support is broken on the XE version, and thus
this backend will not work on such platforms. Specifically, XE lacks
support for an internal JVM, and Java libraries are required to use
the WKT constructors.
"""
import re
from decimal import Decimal
from django.db.backends.oracle.base import DatabaseOperations
from django.contrib.gis.db.backends.base import BaseSpatialOperations
from django.contrib.gis.db.backends.oracle.adapter import OracleSpatialAdapter
from django.contrib.gis.db.backends.util import SpatialFunction
from django.contrib.gis.geometry.backend import Geometry
from django.contrib.gis.measure import Distance
from django.utils import six
class SDOOperation(SpatialFunction):
"Base class for SDO* Oracle operations."
sql_template = "%(function)s(%(geo_col)s, %(geometry)s) %(operator)s '%(result)s'"
def __init__(self, func, **kwargs):
kwargs.setdefault('operator', '=')
kwargs.setdefault('result', 'TRUE')
super(SDOOperation, self).__init__(func, **kwargs)
class SDODistance(SpatialFunction):
"Class for Distance queries."
sql_template = ('%(function)s(%(geo_col)s, %(geometry)s, %(tolerance)s) '
'%(operator)s %(result)s')
dist_func = 'SDO_GEOM.SDO_DISTANCE'
def __init__(self, op, tolerance=0.05):
super(SDODistance, self).__init__(self.dist_func,
tolerance=tolerance,
operator=op, result='%s')
class SDODWithin(SpatialFunction):
dwithin_func = 'SDO_WITHIN_DISTANCE'
sql_template = "%(function)s(%(geo_col)s, %(geometry)s, %%s) = 'TRUE'"
def __init__(self):
super(SDODWithin, self).__init__(self.dwithin_func)
class SDOGeomRelate(SpatialFunction):
"Class for using SDO_GEOM.RELATE."
relate_func = 'SDO_GEOM.RELATE'
sql_template = ("%(function)s(%(geo_col)s, '%(mask)s', %(geometry)s, "
"%(tolerance)s) %(operator)s '%(mask)s'")
def __init__(self, mask, tolerance=0.05):
# SDO_GEOM.RELATE(...) has a peculiar argument order: column, mask, geom, tolerance.
# Moreover, the runction result is the mask (e.g., 'DISJOINT' instead of 'TRUE').
super(SDOGeomRelate, self).__init__(self.relate_func, operator='=',
mask=mask, tolerance=tolerance)
class SDORelate(SpatialFunction):
"Class for using SDO_RELATE."
masks = 'TOUCH|OVERLAPBDYDISJOINT|OVERLAPBDYINTERSECT|EQUAL|INSIDE|COVEREDBY|CONTAINS|COVERS|ANYINTERACT|ON'
mask_regex = re.compile(r'^(%s)(\+(%s))*$' % (masks, masks), re.I)
sql_template = "%(function)s(%(geo_col)s, %(geometry)s, 'mask=%(mask)s') = 'TRUE'"
relate_func = 'SDO_RELATE'
def __init__(self, mask):
if not self.mask_regex.match(mask):
raise ValueError('Invalid %s mask: "%s"' % (self.relate_func, mask))
super(SDORelate, self).__init__(self.relate_func, mask=mask)
# Valid distance types and substitutions
dtypes = (Decimal, Distance, float) + six.integer_types
class OracleOperations(DatabaseOperations, BaseSpatialOperations):
compiler_module = "django.contrib.gis.db.backends.oracle.compiler"
name = 'oracle'
oracle = True
valid_aggregates = dict([(a, None) for a in ('Union', 'Extent')])
Adapter = OracleSpatialAdapter
Adaptor = Adapter # Backwards-compatibility alias.
area = 'SDO_GEOM.SDO_AREA'
gml= 'SDO_UTIL.TO_GMLGEOMETRY'
centroid = 'SDO_GEOM.SDO_CENTROID'
difference = 'SDO_GEOM.SDO_DIFFERENCE'
distance = 'SDO_GEOM.SDO_DISTANCE'
extent= 'SDO_AGGR_MBR'
intersection= 'SDO_GEOM.SDO_INTERSECTION'
length = 'SDO_GEOM.SDO_LENGTH'
num_geom = 'SDO_UTIL.GETNUMELEM'
num_points = 'SDO_UTIL.GETNUMVERTICES'
perimeter = length
point_on_surface = 'SDO_GEOM.SDO_POINTONSURFACE'
reverse = 'SDO_UTIL.REVERSE_LINESTRING'
sym_difference = 'SDO_GEOM.SDO_XOR'
transform = 'SDO_CS.TRANSFORM'
union = 'SDO_GEOM.SDO_UNION'
unionagg = 'SDO_AGGR_UNION'
# We want to get SDO Geometries as WKT because it is much easier to
# instantiate GEOS proxies from WKT than SDO_GEOMETRY(...) strings.
# However, this adversely affects performance (i.e., Java is called
# to convert to WKT on every query). If someone wishes to write a
# SDO_GEOMETRY(...) parser in Python, let me know =)
select = 'SDO_UTIL.TO_WKTGEOMETRY(%s)'
distance_functions = {
'distance_gt' : (SDODistance('>'), dtypes),
'distance_gte' : (SDODistance('>='), dtypes),
'distance_lt' : (SDODistance('<'), dtypes),
'distance_lte' : (SDODistance('<='), dtypes),
'dwithin' : (SDODWithin(), dtypes),
}
geometry_functions = {
'contains' : SDOOperation('SDO_CONTAINS'),
'coveredby' : SDOOperation('SDO_COVEREDBY'),
'covers' : SDOOperation('SDO_COVERS'),
'disjoint' : SDOGeomRelate('DISJOINT'),
'intersects' : SDOOperation('SDO_OVERLAPBDYINTERSECT'), # TODO: Is this really the same as ST_Intersects()?
'equals' : SDOOperation('SDO_EQUAL'),
'exact' : SDOOperation('SDO_EQUAL'),
'overlaps' : SDOOperation('SDO_OVERLAPS'),
'same_as' : SDOOperation('SDO_EQUAL'),
'relate' : (SDORelate, six.string_types), # Oracle uses a different syntax, e.g., 'mask=inside+touch'
'touches' : SDOOperation('SDO_TOUCH'),
'within' : SDOOperation('SDO_INSIDE'),
}
geometry_functions.update(distance_functions)
gis_terms = ['isnull']
gis_terms += list(geometry_functions)
gis_terms = dict([(term, None) for term in gis_terms])
truncate_params = {'relate' : None}
def convert_extent(self, clob):
if clob:
# Generally, Oracle returns a polygon for the extent -- however,
# it can return a single point if there's only one Point in the
# table.
ext_geom = Geometry(clob.read())
gtype = str(ext_geom.geom_type)
if gtype == 'Polygon':
# Construct the 4-tuple from the coordinates in the polygon.
shell = ext_geom.shell
ll, ur = shell[0][:2], shell[2][:2]
elif gtype == 'Point':
ll = ext_geom.coords[:2]
ur = ll
else:
raise Exception('Unexpected geometry type returned for extent: %s' % gtype)
xmin, ymin = ll
xmax, ymax = ur
return (xmin, ymin, xmax, ymax)
else:
return None
def convert_geom(self, clob, geo_field):
if clob:
return Geometry(clob.read(), geo_field.srid)
else:
return None
def geo_db_type(self, f):
"""
Returns the geometry database type for Oracle. Unlike other spatial
backends, no stored procedure is necessary and it's the same for all
geometry types.
"""
return 'MDSYS.SDO_GEOMETRY'
def get_distance(self, f, value, lookup_type):
"""
Returns the distance parameters given the value and the lookup type.
On Oracle, geometry columns with a geodetic coordinate system behave
implicitly like a geography column, and thus meters will be used as
the distance parameter on them.
"""
if not value:
return []
value = value[0]
if isinstance(value, Distance):
if f.geodetic(self.connection):
dist_param = value.m
else:
dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection)))
else:
dist_param = value
# dwithin lookups on oracle require a special string parameter
# that starts with "distance=".
if lookup_type == 'dwithin':
dist_param = 'distance=%s' % dist_param
return [dist_param]
def get_geom_placeholder(self, f, value):
"""
Provides a proper substitution value for Geometries that are not in the
SRID of the field. Specifically, this routine will substitute in the
SDO_CS.TRANSFORM() function call.
"""
if value is None:
return 'NULL'
def transform_value(val, srid):
return val.srid != srid
if hasattr(value, 'expression'):
if transform_value(value, f.srid):
placeholder = '%s(%%s, %s)' % (self.transform, f.srid)
else:
placeholder = '%s'
# No geometry value used for F expression, substitue in
# the column name instead.
return placeholder % self.get_expression_column(value)
else:
if transform_value(value, f.srid):
return '%s(SDO_GEOMETRY(%%s, %s), %s)' % (self.transform, value.srid, f.srid)
else:
return 'SDO_GEOMETRY(%%s, %s)' % f.srid
def spatial_lookup_sql(self, lvalue, lookup_type, value, field, qn):
"Returns the SQL WHERE clause for use in Oracle spatial SQL construction."
alias, col, db_type = lvalue
# Getting the quoted table name as `geo_col`.
geo_col = '%s.%s' % (qn(alias), qn(col))
# See if a Oracle Geometry function matches the lookup type next
lookup_info = self.geometry_functions.get(lookup_type, False)
if lookup_info:
# Lookup types that are tuples take tuple arguments, e.g., 'relate' and
# 'dwithin' lookup types.
if isinstance(lookup_info, tuple):
# First element of tuple is lookup type, second element is the type
# of the expected argument (e.g., str, float)
sdo_op, arg_type = lookup_info
geom = value[0]
# Ensuring that a tuple _value_ was passed in from the user
if not isinstance(value, tuple):
raise ValueError('Tuple required for `%s` lookup type.' % lookup_type)
if len(value) != 2:
raise ValueError('2-element tuple required for %s lookup type.' % lookup_type)
# Ensuring the argument type matches what we expect.
if not isinstance(value[1], arg_type):
raise ValueError('Argument type should be %s, got %s instead.' % (arg_type, type(value[1])))
if lookup_type == 'relate':
# The SDORelate class handles construction for these queries,
# and verifies the mask argument.
return sdo_op(value[1]).as_sql(geo_col, self.get_geom_placeholder(field, geom))
else:
# Otherwise, just call the `as_sql` method on the SDOOperation instance.
return sdo_op.as_sql(geo_col, self.get_geom_placeholder(field, geom))
else:
# Lookup info is a SDOOperation instance, whose `as_sql` method returns
# the SQL necessary for the geometry function call. For example:
# SDO_CONTAINS("geoapp_country"."poly", SDO_GEOMTRY('POINT(5 23)', 4326)) = 'TRUE'
return lookup_info.as_sql(geo_col, self.get_geom_placeholder(field, value))
elif lookup_type == 'isnull':
# Handling 'isnull' lookup type
return "%s IS %sNULL" % (geo_col, (not value and 'NOT ' or ''))
raise TypeError("Got invalid lookup_type: %s" % repr(lookup_type))
def spatial_aggregate_sql(self, agg):
"""
Returns the spatial aggregate SQL template and function for the
given Aggregate instance.
"""
agg_name = agg.__class__.__name__.lower()
if agg_name == 'union' : agg_name += 'agg'
if agg.is_extent:
sql_template = '%(function)s(%(field)s)'
else:
sql_template = '%(function)s(SDOAGGRTYPE(%(field)s,%(tolerance)s))'
sql_function = getattr(self, agg_name)
return self.select % sql_template, sql_function
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
from django.contrib.gis.db.backends.oracle.models import GeometryColumns
return GeometryColumns
def spatial_ref_sys(self):
from django.contrib.gis.db.backends.oracle.models import SpatialRefSys
return SpatialRefSys
def modify_insert_params(self, placeholders, params):
"""Drop out insert parameters for NULL placeholder. Needed for Oracle Spatial
backend due to #10888
"""
# This code doesn't work for bulk insert cases.
assert len(placeholders) == 1
return [[param for pholder,param
in six.moves.zip(placeholders[0], params[0]) if pholder != 'NULL'], ]
| mit |
openhumanoids/oh-distro | software/ihmc/ihmc_step/translator_ihmc.py | 1 | 14123 | import lcm
import drc
import atlas
import bot_core
import time
import numpy as np
import py_drake_utils as ut
from bdi_step.footsteps import decode_footstep_plan, decode_deprecated_footstep_plan, encode_footstep_plan, FootGoal
from bdi_step.plotting import draw_swing
from bdi_step.utils import Behavior, gl, now_utime
NUM_REQUIRED_WALK_STEPS = 4
PLAN_UPDATE_TIMEOUT = 20 # maximum time allowed between a footstep plan and an 'update' which appends more steps to that plan
# Experimentally determined vector relating BDI's frame for foot position to ours. This is the xyz vector from the position of the foot origin (from drake forwardKin) to the BDI Atlas foot pos estimate, expressed in the frame of the foot.
ATLAS_FRAME_OFFSET = np.array([0.0400, 0.000, -0.0850])
def blank_step_spec():
msg = atlas.behavior_step_spec_t()
msg.foot = atlas.behavior_foot_data_t()
msg.action = atlas.behavior_step_action_t()
return msg
def blank_walk_spec():
msg = atlas.behavior_walk_spec_t()
msg.foot = atlas.behavior_foot_data_t()
msg.action = atlas.behavior_walk_action_t()
return msg
class Mode:
translating = 0
plotting = 1
class IHMCStepTranslator(object):
def __init__(self, mode=Mode.translating, safe=True):
self.mode = mode
self.safe = safe # Don't send atlas behavior commands (to ensure that the robot never starts walking accidentally when running tests)
self.lc = lcm.LCM()
if self.mode == Mode.plotting:
self.gl = gl
else:
self.gl = None
self.bdi_step_queue_in = []
self.delivered_index = None
self.use_spec = True
self.drift_from_plan = np.zeros((3,1))
self.behavior = Behavior.BDI_STEPPING
self.T_local_to_localbdi = bot_core.rigid_transform_t()
self.T_local_to_localbdi.trans = np.zeros(3)
self.T_local_to_localbdi.quat = ut.rpy2quat([0,0,0])
self.last_params = None
self.executing = False
self.last_footstep_plan_time = -np.inf
def handle_bdi_transform(self, channel, msg):
if isinstance(msg, str):
msg = bot_core.rigid_transform_t.decode(msg)
self.T_local_to_localbdi = msg
def handle_footstep_plan(self, channel, msg):
if isinstance(msg, str):
msg = drc.footstep_plan_t.decode(msg)
footsteps, opts = decode_footstep_plan(msg)
self.last_params = msg.params
if len(footsteps) <= 2:
# the first two footsteps are always just the positions of the robot's feet, so a plan of two or fewer footsteps is a no-op
print 'BDI step translator: Empty plan recieved. Not executing.'
return
behavior = opts['behavior']
#if behavior == Behavior.BDI_WALKING:
# # duration = 0.6
# if len(footsteps) < NUM_REQUIRED_WALK_STEPS+2:
# print 'ERROR: Footstep plan must be at least 4 steps for BDI walking translation'
# return
#elif behavior != Behavior.BDI_STEPPING:
# print "BDI step translator: Ignoring footstep plan without BDI_WALKING or BDI_STEPPING behavior"
# return
self.behavior = behavior
now = time.time()
if now - self.last_footstep_plan_time > PLAN_UPDATE_TIMEOUT:
self.executing = False
self.last_footstep_plan_time = now
if self.mode == Mode.plotting:
self.draw(footsteps)
else:
#if not self.executing:
print "Starting new footstep plan"
self.bdi_step_queue_in = footsteps
self.send_params(1)
if not self.safe:
m = "BDI step translator: Steps received; transitioning to {:s}".format("BDI_STEP" if self.behavior == Behavior.BDI_STEPPING else "BDI_WALK")
print m
ut.send_status(6,0,0,m)
time.sleep(1)
self.executing = True
self.send_behavior()
else:
m = "BDI step translator: Steps received; in SAFE mode; not transitioning to {:s}".format("BDI_STEP" if self.behavior == Behavior.BDI_STEPPING else "BDI_WALK")
print m
ut.send_status(6,0,0,m)
#else:
# print "Got updated footstep plan"
# if self.bdi_step_queue_in[self.delivered_index-1].is_right_foot == footsteps[0].is_right_foot:
# print "Re-aligning new footsteps to current plan"
# self.bdi_step_queue_in = self.bdi_step_queue_in[:self.delivered_index-1] + footsteps
# else:
# print "Can't align the updated plan to the current plan"
# return
@property
def bdi_step_queue_out(self):
bdi_step_queue_out = [s.copy() for s in self.bdi_step_queue_in]
for step in bdi_step_queue_out:
# Transform to BDI coordinate frame
T1 = ut.mk_transform(step.pos[:3], step.pos[3:])
T2 = ut.mk_transform(self.T_local_to_localbdi.trans, ut.quat2rpy(self.T_local_to_localbdi.quat))
T = T2.dot(T1)
step.pos[:3] = T[:3,3]
step.pos[3:] = ut.rotmat2rpy(T[:3,:3])
self.lc.publish('BDI_ADJUSTED_FOOTSTEP_PLAN', encode_footstep_plan(bdi_step_queue_out, self.last_params).encode())
for step in bdi_step_queue_out:
# Express pos of the center of the foot, as expected by BDI
R = ut.rpy2rotmat(step.pos[3:])
offs = R.dot(ATLAS_FRAME_OFFSET)
# import pdb; pdb.set_trace()
step.pos[:3] += offs
for i in reversed(range(2, len(bdi_step_queue_out))):
bdi_step_queue_out[i].pos[2] -= bdi_step_queue_out[i-1].pos[2]
return [s.to_bdi_spec(self.behavior, j+1) for j, s in enumerate(bdi_step_queue_out[2:])]
def handle_atlas_status(self, channel, msg):
if (not self.executing) or self.mode != Mode.translating:
return
if isinstance(msg, str):
msg = atlas.status_t.decode(msg)
if self.behavior == Behavior.BDI_WALKING:
index_needed = msg.walk_feedback.next_step_index_needed
# if (self.delivered_index + 1) < index_needed <= len(self.bdi_step_queue_in) - 4:
if index_needed <= len(self.bdi_step_queue_in) - 4:
#print "Handling request for next step: {:d}".format(index_needed)
self.send_params(index_needed-1)
else:
self.executing = False
else:
index_needed = msg.step_feedback.next_step_index_needed
if index_needed > 1 and index_needed > self.delivered_index:
# we're starting a new step, so publish the expected double support configuration
self.send_expected_double_support()
# if self.delivered_index < index_needed <= len(self.bdi_step_queue_in) - 2:
if index_needed <= len(self.bdi_step_queue_in) - 2:
# print "Handling request for next step: {:d}".format(index_needed)
self.send_params(index_needed)
else:
print "done executing"
self.executing = False
# Report progress through the footstep plan execution (only when stepping)
progress_msg = drc.footstep_plan_progress_t()
progress_msg.utime = msg.utime
progress_msg.num_steps = len(self.bdi_step_queue_in) - 2
progress_msg.current_step = index_needed - 1
self.lc.publish('FOOTSTEP_PLAN_PROGRESS', progress_msg.encode())
def send_params(self,step_index,force_stop_walking=False):
"""
Publish the next steppping footstep or up to the next 4 walking footsteps as needed.
"""
assert self.mode == Mode.translating, "Translator in Mode.plotting mode is not allowed to send step/walk params"
if self.behavior == Behavior.BDI_WALKING:
walk_param_msg = atlas.behavior_walk_params_t()
walk_param_msg.num_required_walk_steps = NUM_REQUIRED_WALK_STEPS
walk_param_msg.walk_spec_queue = self.bdi_step_queue_out[step_index-1:step_index+3]
walk_param_msg.step_queue = [atlas.step_data_t() for j in range(NUM_REQUIRED_WALK_STEPS)] # Unused
walk_param_msg.use_spec = True
walk_param_msg.use_relative_step_height = 1 # as of Atlas 2.5.0 this flag is disabled and always acts as if it's set to 1
walk_param_msg.use_demo_walk = 0
if force_stop_walking:
for step in walk_param_msg.walk_spec_queue:
step.step_index = -1
self.lc.publish('ATLAS_WALK_PARAMS', walk_param_msg.encode())
self.delivered_index = walk_param_msg.walk_spec_queue[0].step_index
#print "Sent walk params for step indices {:d} through {:d}".format(walk_param_msg.walk_spec_queue[0].step_index, walk_param_msg.walk_spec_queue[-1].step_index)
elif self.behavior == Behavior.BDI_STEPPING:
step_param_msg = atlas.behavior_step_params_t()
step_param_msg.desired_step = atlas.step_data_t() # Unused
step_param_msg.desired_step_spec = self.bdi_step_queue_out[step_index-1]
step_param_msg.use_relative_step_height = 1 # as of Atlas 2.5.0 this flag is disabled and always acts as if it's set to 1
step_param_msg.use_demo_walk = 0
step_param_msg.use_spec = True
step_param_msg.desired_step = atlas.step_data_t() # Unused
step_param_msg.desired_step_spec = self.bdi_step_queue_out[step_index-1]
if force_stop_walking:
step_param_msg.desired_step_spec.step_index = -1
self.lc.publish('ATLAS_STEP_PARAMS', step_param_msg.encode())
self.delivered_index = step_param_msg.desired_step_spec.step_index
#print "Sent step params for step index {:d}".format(step_param_msg.desired_step_spec.step_index)
else:
raise ValueError("Bad behavior value: {:s}".format(self.behavior))
def send_expected_double_support(self):
"""
Publish the next expected double support configuration as a two-element footstep plan to support continuous replanning mode.
"""
self.lc.publish('NEXT_EXPECTED_DOUBLE_SUPPORT', encode_footstep_plan(self.bdi_step_queue_in[self.delivered_index:self.delivered_index+2], self.last_params).encode())
def send_behavior(self):
command_msg = atlas.behavior_command_t()
command_msg.utime = now_utime()
if self.behavior == Behavior.BDI_STEPPING:
command_msg.command = "step"
elif self.behavior == Behavior.BDI_WALKING:
command_msg.command = "walk"
else:
raise ValueError("Tried to send invalid behavior to Atlas: {:s}".format(self.behavior))
self.lc.publish("ATLAS_BEHAVIOR_COMMAND", command_msg.encode())
def handle_stop_walking(self, channel, msg_data):
"""
Generate a set of footsteps with -1 step indices, which will cause the BDI controller to switch to standing instead of continuing to walk
"""
if self.behavior == Behavior.BDI_WALKING:
n_steps = 6
else:
n_steps = 3
footsteps = [FootGoal(pos=np.zeros((6)),
step_speed=0,
step_height=0,
step_id=0,
pos_fixed=np.zeros((6,1)),
is_right_foot=0,
is_in_contact=0,
bdi_step_duration=0,
bdi_sway_duration=0,
bdi_lift_height=0,
bdi_toe_off=0,
bdi_knee_nominal=0,
bdi_max_body_accel=0,
bdi_max_foot_vel=0,
bdi_sway_end_dist=-1,
bdi_step_end_dist=-1,
terrain_pts=np.matrix([]))] * n_steps
self.bdi_step_queue_in = footsteps
self.send_params(1, force_stop_walking=True)
self.bdi_step_queue_in = [] # to prevent infinite spewing of -1 step indices
self.delivered_index = None
self.executing = False
def run(self):
if self.mode == Mode.translating:
print "IHMCStepTranslator running in robot-side translator mode"
self.lc.subscribe('COMMITTED_FOOTSTEP_PLAN', self.handle_footstep_plan)
#self.lc.subscribe('STOP_WALKING', self.handle_stop_walking)
else:
print "IHMCStepTranslator running in base-side plotter mode"
self.lc.subscribe('FOOTSTEP_PLAN_RESPONSE', self.handle_footstep_plan)
#self.lc.subscribe('CANDIDATE_BDI_FOOTSTEP_PLAN', self.handle_footstep_plan)
#self.lc.subscribe('BDI_ADJUSTED_FOOTSTEP_PLAN', self.handle_footstep_plan)
#self.lc.subscribe('ATLAS_STATUS', self.handle_atlas_status)
self.lc.subscribe('LOCAL_TO_LOCAL_BDI', self.handle_bdi_transform)
while True:
self.lc.handle()
def draw(self, footsteps):
"""
Plot a rough guess of each swing foot trajectory, based on the BDI software manual's description of how swing_height and lift_height behave.
"""
for j in range(len(footsteps)-2):
st0 = footsteps[j].to_bdi_spec(self.behavior, 0)
st1 = footsteps[j+2].to_bdi_spec(self.behavior, 0)
is_stepping = self.behavior==Behavior.BDI_STEPPING
if is_stepping:
lift_height = st1.action.lift_height
else:
lift_height = None
draw_swing(self.gl,
st0.foot.position,
st1.foot.position,
st1.action.swing_height,
is_stepping=is_stepping,
lift_height=lift_height)
self.gl.switch_buffer()
| bsd-3-clause |
ruslanloman/nova | nova/tests/unit/api/openstack/compute/contrib/test_server_groups.py | 33 | 15330 | # Copyright (c) 2014 Cisco Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import uuidutils
import webob
from nova.api.openstack.compute.contrib import server_groups
from nova.api.openstack.compute.plugins.v3 import server_groups as sg_v21
from nova.api.openstack import extensions
from nova import context
import nova.db
from nova import exception
from nova import objects
from nova import test
from nova.tests.unit.api.openstack import fakes
FAKE_UUID1 = 'a47ae74e-ab08-447f-8eee-ffd43fc46c16'
FAKE_UUID2 = 'c6e6430a-6563-4efa-9542-5e93c9e97d18'
FAKE_UUID3 = 'b8713410-9ba3-e913-901b-13410ca90121'
class AttrDict(dict):
def __getattr__(self, k):
return self[k]
def server_group_template(**kwargs):
sgroup = kwargs.copy()
sgroup.setdefault('name', 'test')
return sgroup
def server_group_resp_template(**kwargs):
sgroup = kwargs.copy()
sgroup.setdefault('name', 'test')
sgroup.setdefault('policies', [])
sgroup.setdefault('members', [])
return sgroup
def server_group_db(sg):
attrs = sg.copy()
if 'id' in attrs:
attrs['uuid'] = attrs.pop('id')
if 'policies' in attrs:
policies = attrs.pop('policies')
attrs['policies'] = policies
else:
attrs['policies'] = []
if 'members' in attrs:
members = attrs.pop('members')
attrs['members'] = members
else:
attrs['members'] = []
attrs['deleted'] = 0
attrs['deleted_at'] = None
attrs['created_at'] = None
attrs['updated_at'] = None
if 'user_id' not in attrs:
attrs['user_id'] = 'user_id'
if 'project_id' not in attrs:
attrs['project_id'] = 'project_id'
attrs['id'] = 7
return AttrDict(attrs)
class ServerGroupTestV21(test.TestCase):
validation_error = exception.ValidationError
def setUp(self):
super(ServerGroupTestV21, self).setUp()
self._setup_controller()
self.req = fakes.HTTPRequest.blank('')
def _setup_controller(self):
self.controller = sg_v21.ServerGroupController()
def test_create_server_group_with_no_policies(self):
sgroup = server_group_template()
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
def _create_server_group_normal(self, policies):
sgroup = server_group_template()
sgroup['policies'] = policies
res_dict = self.controller.create(self.req,
body={'server_group': sgroup})
self.assertEqual(res_dict['server_group']['name'], 'test')
self.assertTrue(uuidutils.is_uuid_like(res_dict['server_group']['id']))
self.assertEqual(res_dict['server_group']['policies'], policies)
def test_create_server_group(self):
policies = ['affinity', 'anti-affinity']
for policy in policies:
self._create_server_group_normal([policy])
def _create_instance(self, context):
instance = objects.Instance(context=context, image_ref=1, node='node1',
reservation_id='a', host='host1', project_id='fake',
vm_state='fake', system_metadata={'key': 'value'})
instance.create()
return instance
def _create_instance_group(self, context, members):
ig = objects.InstanceGroup(context=context, name='fake_name',
user_id='fake_user', project_id='fake',
members=members)
ig.create()
return ig.uuid
def _create_groups_and_instances(self, ctx):
instances = [self._create_instance(ctx), self._create_instance(ctx)]
members = [instance.uuid for instance in instances]
ig_uuid = self._create_instance_group(ctx, members)
return (ig_uuid, instances, members)
def test_display_members(self):
ctx = context.RequestContext('fake_user', 'fake')
(ig_uuid, instances, members) = self._create_groups_and_instances(ctx)
res_dict = self.controller.show(self.req, ig_uuid)
result_members = res_dict['server_group']['members']
self.assertEqual(2, len(result_members))
for member in members:
self.assertIn(member, result_members)
def test_display_active_members_only(self):
ctx = context.RequestContext('fake_user', 'fake')
(ig_uuid, instances, members) = self._create_groups_and_instances(ctx)
# delete an instance
instances[1].destroy()
# check that the instance does not exist
self.assertRaises(exception.InstanceNotFound,
objects.Instance.get_by_uuid,
ctx, instances[1].uuid)
res_dict = self.controller.show(self.req, ig_uuid)
result_members = res_dict['server_group']['members']
# check that only the active instance is displayed
self.assertEqual(1, len(result_members))
self.assertIn(instances[0].uuid, result_members)
def test_create_server_group_with_non_alphanumeric_in_name(self):
# The fix for bug #1434335 expanded the allowable character set
# for server group names to include non-alphanumeric characters
# if they are printable.
sgroup = server_group_template(name='good* $%name',
policies=['affinity'])
res_dict = self.controller.create(self.req,
body={'server_group': sgroup})
self.assertEqual(res_dict['server_group']['name'], 'good* $%name')
def test_create_server_group_with_illegal_name(self):
# blank name
sgroup = server_group_template(name='', policies=['test_policy'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# name with length 256
sgroup = server_group_template(name='1234567890' * 26,
policies=['test_policy'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# non-string name
sgroup = server_group_template(name=12, policies=['test_policy'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# name with leading spaces
sgroup = server_group_template(name=' leading spaces',
policies=['test_policy'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# name with trailing spaces
sgroup = server_group_template(name='trailing space ',
policies=['test_policy'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# name with all spaces
sgroup = server_group_template(name=' ',
policies=['test_policy'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# name with unprintable character
sgroup = server_group_template(name='bad\x00name',
policies=['test_policy'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# name with out of range char U0001F4A9
sgroup = server_group_template(name=u"\U0001F4A9",
policies=['affinity'])
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
def test_create_server_group_with_illegal_policies(self):
# blank policy
sgroup = server_group_template(name='fake-name', policies='')
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# policy as integer
sgroup = server_group_template(name='fake-name', policies=7)
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# policy as string
sgroup = server_group_template(name='fake-name', policies='invalid')
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
# policy as None
sgroup = server_group_template(name='fake-name', policies=None)
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
def test_create_server_group_conflicting_policies(self):
sgroup = server_group_template()
policies = ['anti-affinity', 'affinity']
sgroup['policies'] = policies
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
def test_create_server_group_with_duplicate_policies(self):
sgroup = server_group_template()
policies = ['affinity', 'affinity']
sgroup['policies'] = policies
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
def test_create_server_group_not_supported(self):
sgroup = server_group_template()
policies = ['storage-affinity', 'anti-affinity', 'rack-affinity']
sgroup['policies'] = policies
self.assertRaises(self.validation_error, self.controller.create,
self.req, body={'server_group': sgroup})
def test_create_server_group_with_no_body(self):
self.assertRaises(self.validation_error,
self.controller.create, self.req, body=None)
def test_create_server_group_with_no_server_group(self):
body = {'no-instanceGroup': None}
self.assertRaises(self.validation_error,
self.controller.create, self.req, body=body)
def test_list_server_group_by_tenant(self):
groups = []
policies = ['anti-affinity']
members = []
metadata = {} # always empty
names = ['default-x', 'test']
sg1 = server_group_resp_template(id=str(1345),
name=names[0],
policies=policies,
members=members,
metadata=metadata)
sg2 = server_group_resp_template(id=str(891),
name=names[1],
policies=policies,
members=members,
metadata=metadata)
groups = [sg1, sg2]
expected = {'server_groups': groups}
def return_server_groups(context, project_id):
return [server_group_db(sg) for sg in groups]
self.stubs.Set(nova.db, 'instance_group_get_all_by_project_id',
return_server_groups)
res_dict = self.controller.index(self.req)
self.assertEqual(res_dict, expected)
def test_list_server_group_all(self):
all_groups = []
tenant_groups = []
policies = ['anti-affinity']
members = []
metadata = {} # always empty
names = ['default-x', 'test']
sg1 = server_group_resp_template(id=str(1345),
name=names[0],
policies=[],
members=members,
metadata=metadata)
sg2 = server_group_resp_template(id=str(891),
name=names[1],
policies=policies,
members=members,
metadata={})
tenant_groups = [sg2]
all_groups = [sg1, sg2]
all = {'server_groups': all_groups}
tenant_specific = {'server_groups': tenant_groups}
def return_all_server_groups(context):
return [server_group_db(sg) for sg in all_groups]
self.stubs.Set(nova.db, 'instance_group_get_all',
return_all_server_groups)
def return_tenant_server_groups(context, project_id):
return [server_group_db(sg) for sg in tenant_groups]
self.stubs.Set(nova.db, 'instance_group_get_all_by_project_id',
return_tenant_server_groups)
path = '/os-server-groups?all_projects=True'
req = fakes.HTTPRequest.blank(path, use_admin_context=True)
res_dict = self.controller.index(req)
self.assertEqual(res_dict, all)
req = fakes.HTTPRequest.blank(path)
res_dict = self.controller.index(req)
self.assertEqual(res_dict, tenant_specific)
def test_delete_server_group_by_id(self):
sg = server_group_template(id='123')
self.called = False
def server_group_delete(context, id):
self.called = True
def return_server_group(context, group_id):
self.assertEqual(sg['id'], group_id)
return server_group_db(sg)
self.stubs.Set(nova.db, 'instance_group_delete',
server_group_delete)
self.stubs.Set(nova.db, 'instance_group_get',
return_server_group)
resp = self.controller.delete(self.req, '123')
self.assertTrue(self.called)
# NOTE: on v2.1, http status code is set as wsgi_code of API
# method instead of status_int in a response object.
if isinstance(self.controller, sg_v21.ServerGroupController):
status_int = self.controller.delete.wsgi_code
else:
status_int = resp.status_int
self.assertEqual(204, status_int)
def test_delete_non_existing_server_group(self):
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
self.req, 'invalid')
class ServerGroupTestV2(ServerGroupTestV21):
validation_error = webob.exc.HTTPBadRequest
def _setup_controller(self):
ext_mgr = extensions.ExtensionManager()
ext_mgr.extensions = {}
self.controller = server_groups.ServerGroupController(ext_mgr)
| apache-2.0 |
paplorinc/intellij-community | python/helpers/python-skeletons/os/__init__.py | 19 | 23905 | """Skeleton for 'os' stdlib module."""
from __future__ import unicode_literals
import io
import os
import subprocess
import sys
error = OSError
def ctermid():
"""Return the filename corresponding to the controlling terminal of the
process.
:rtype: string
"""
return ''
def getegid():
"""Return the effective group id of the current process.
:rtype: int
"""
return 0
def geteuid():
"""Return the current process's effective user id.
:rtype: int
"""
return 0
def getgid():
"""Return the real group id of the current process.
:rtype: int
"""
return 0
def getgroups():
"""Return list of supplemental group ids associated with the current
process.
:rtype: list[int]
"""
return []
if sys.version_info >= (2, 7):
def initgroups(username, gid):
"""Call the system initgroups() to initialize the group access list
with all of the groups of which the specified username is a member,
plus the specified group id.
:type username: string
:type gid: int
:rtype: None
"""
pass
def getlogin():
"""Return the name of the user logged in on the controlling terminal of the
process.
:rtype: string
"""
return ''
def getpgid(pid):
"""Return the process group id of the process with process id pid.
:type pid: int
:rtype: int
"""
return 0
def getpgrp():
"""Return the id of the current process group.
:rtype: int
"""
return 0
def getpid():
"""Return the current process id.
:rtype: int
"""
return 0
def getppid():
"""Return the parent's process id.
:rtype: int
"""
return 0
if sys.version_info >= (2, 7):
def getresuid():
"""Return a tuple (ruid, euid, suid) denoting the current process's
real, effective, and saved user ids.
:rtype: (int, int, int)
"""
return 0, 0, 0
def getresgid():
"""Return a tuple (rgid, egid, sgid) denoting the current process's
real, effective, and saved group ids.
:rtype: (int, int, int)
"""
return 0, 0, 0
def getuid():
"""Return the current process's user id.
:rtype: int
"""
return 0
def getenv(varname, value=None):
"""Return the value of the environment variable varname if it exists, or
value if it doesn't.
:type varname: string
:type value: T
:rtype: string | T
"""
pass
def putenv(varname, value):
"""Set the environment variable named varname to the string value.
:type varname: string
:rtype: None
"""
pass
def setegid(egid):
"""Set the current process's effective group id.
:type egid: int
:rtype: None
"""
pass
def seteuid(euid):
"""Set the current process's effective user id.
:type euid: int
:rtype: None
"""
pass
def setgid(gid):
"""Set the current process' group id.
:type gid: int
:rtype: None
"""
pass
def setgroups(groups):
"""Set the list of supplemental group ids associated with the current
process to groups.
:type groups: collections.Iterable[int]
:rtype: None
"""
pass
def setpgid(pid, pgrp):
"""Call the system call setpgid() to set the process group id of the
process with id pid to the process group with id pgrp.
:type pid: int
:type pgrp: int
:rtype: None
"""
pass
def setregid(rgid, egid):
"""Set the current process's real and effective group ids.
:type rgid: int
:type egid: int
:rtype: None
"""
pass
if sys.version_info >= (2, 7):
def setresgid(rgid, egid, sgid):
"""Set the current process's real, effective, and saved group ids.
:type rgid: int
:type egid: int
:type sgid: int
:rtype: None
"""
pass
def setresuid(ruid, euid, suid):
"""Set the current process's real, effective, and saved user ids.
:type ruid: int
:type euid: int
:type suid: int
:rtype None
"""
pass
def setreuid(ruid, euid):
"""Set the current process's real and effective user ids.
:type ruid: int
:type euid: int
:rtype None
"""
pass
def setsid():
"""Call the system call getsid().
:rtype: None
"""
pass
def setuid(uid):
"""Set the current process's user id.
:type uid: int
:rtype: None
"""
pass
def strerror(code):
"""Return the error message corresponding to the error code in code.
:type code: int
:rtype: string
"""
return ''
def umask(mask):
"""Set the current numeric umask and return the previous umask.
:type mask: int
:rtype: int
"""
return 0
def uname():
"""Return a 5-tuple containing information identifying the current
operating system.
:rtype: (string, string, string, string, string)
"""
return '', '', '', '', ''
def unsetenv(varname):
"""Unset (delete) the environment variable named varname.
:type varname: string
:rtype: None
"""
pass
def fdopen(fd, mode='r', bufsize=-1):
"""Return an open file object connected to the file descriptor fd.
:type fd: int
:type mode: string
:type bufsize: int
:rtype: file
"""
return file()
def popen(command, mode='r', bufsize=-1):
"""Open a pipe to or from command.
:type command: string
:type mode: string
:type bufsize: int
:rtype: os._wrap_close
"""
pass
class _wrap_close(io.TextIOWrapper[unicode]):
def __init__(self, stream, proc):
"""
:type stream: io.TextIOWrapper[unicode]
:type proc: subprocess.Popen
"""
pass
def close(self):
"""
:rtype: int | None
"""
pass
def __enter__(self):
"""
:rtype: os._wrap_close
"""
pass
def __exit__(self, *args):
pass
def __iter__(self):
"""
:rtype: collections.Iterator[unicode]
"""
pass
def tmpfile():
"""Return a new file object opened in update mode (w+b).
:rtype: io.FileIO[bytes]
"""
pass
def popen2(cmd, mode='r', bufsize=-1):
"""Execute cmd as a sub-process and return the file objects (child_stdin,
child_stdout).
:type cmd: string
:type mode: string
:type bufsize: int
:rtype: (io.FileIO[bytes], io.FileIO[bytes])
"""
pass
def popen3(cmd, mode='r', bufsize=-1):
"""Execute cmd as a sub-process and return the file objects (child_stdin,
child_stdout, child_stderr).
:type cmd: string
:type mode: string
:type bufsize: int
:rtype: (io.FileIO[bytes], io.FileIO[bytes], io.FileIO[bytes])
"""
pass
def popen4(cmd, mode='r', bufsize=-1):
"""Execute cmd as a sub-process and return the file objects (child_stdin,
child_stdout_and_stderr).
:type cmd: string
:type mode: string
:type bufsize: int
:rtype: (io.FileIO[bytes], io.FileIO[bytes])
"""
pass
def close(fd):
"""Close file descriptor fd.
:type fd: int
:rtype: None
"""
pass
if sys.version_info >= (2, 6):
def closerange(fd_low, fd_high):
"""Close all file descriptors from fd_low (inclusive) to fd_high
(exclusive), ignoring errors.
:type fd_low: int
:type fd_high: int
:rtype: None
"""
pass
def dup(fd):
"""Return a duplicate of file descriptor fd.
:type fd: int
:rtype: int
"""
return 0
def dup2(fd, fd2):
"""Duplicate file descriptor fd to fd2, closing the latter first if
necessary.
:type fd: int
:type fd2: int
:rtype: None
"""
pass
if sys.version_info >= (2, 6):
def fchmod(fd, mode):
"""Change the mode of the file given by fd to the numeric mode.
:type fd: int
:type mode: int
:rtype: None
"""
pass
def fchown(fd, uid, gid):
"""Change the owner and group id of the file given by fd to the numeric
uid and gid.
:type fd: int
:type uid: int
:type gid: int
:rtype: None
"""
pass
def fdatasync(fd):
"""Force write of file with filedescriptor fd to disk.
:type fd: int
:rtype: None
"""
pass
def fpathconf(fd, name):
"""Return system configuration information relevant to an open file.
:type fd: int
:type name: string | int
"""
pass
def fstat(fd):
"""Return status for file descriptor fd, like stat().
:type fd: int
:rtype: os.stat_result
"""
pass
def fstatvfs(fd):
"""Return information about the filesystem containing the file associated
with file descriptor fd, like statvfs().
:type fd: int
:rtype: os.statvfs_result
"""
pass
def fsync(fd):
"""Force write of file with filedescriptor fd to disk.
:type fd: int
:rtype: None
"""
pass
def ftruncate(fd, length):
"""Truncate the file corresponding to file descriptor fd, so that it is at
most length bytes in size.
:type fd: int
:type length: numbers.Integral
:rtype: None
"""
pass
def isatty(fd):
"""Return True if the file descriptor fd is open and connected to a
tty(-like) device, else False.
:type fd: int
:rtype: bool
"""
return False
def lseek(fd, pos, how):
"""Set the current position of file descriptor fd to position pos, modified
by how.
:type fd: int
:type pos: numbers.Integral
:type how: int
:rtype: None
"""
pass
def open(file, flags, mode=0o777):
"""Open the file file and set various flags according to flags and possibly
its mode according to mode.
:type file: string
:type flags: int
:type mode: int
:rtype: int
"""
return 0
def openpty():
"""Open a new pseudo-terminal pair.
:rtype: (int, int)
"""
return 0, 0
def pipe():
"""Create a pipe.
:rtype: (int, int)
"""
return 0, 0
def read(fd, n):
"""Read at most n bytes from file descriptor fd.
:type fd: int
:type n: numbers.Integral
:rtype: bytes
"""
pass
def tcgetpgrp(fd):
"""Return the process group associated with the terminal given by fd.
:type fd: int
:rtype: int
"""
return 0
def tcsetpgrp(fd, pg):
"""Set the process group associated with the terminal given by fd to pg.
:type fd: int
:type pg: int
:rtype: None
"""
pass
def ttyname(fd):
"""Return a string which specifies the terminal device associated with file
descriptor fd.
:type fd: int
:rtype: string
"""
return ''
def write(fd, str):
"""Write the string str to file descriptor fd. Return the number of bytes
actually written.
:type fd: int
:type str: bytes
:rtype: int
"""
return 0
def access(path, mode):
"""Use the real uid/gid to test for access to path.
:type path: bytes | unicode
:type mode: int
:rtype: bool
"""
return False
def chdir(path):
"""Change the current working directory to path.
:type path: bytes | unicode
:rtype: None
"""
pass
def fchdir(fd):
"""Change the current working directory to the directory represented by the
file descriptor fd.
:type fd: int
:rtype: None
"""
pass
def getcwd():
"""Return a string representing the current working directory.
:rtype: string
"""
return ''
if sys.version_info < (3, 0):
def getcwdu():
"""Return a Unicode object representing the current working directory.
:rtype: unicode
"""
return ''
def chflags(path, flags):
"""Set the flags of path to the numeric flags.
:type path: bytes | unicode
:type flags: int
:rtype: None
"""
pass
def chroot(path):
"""Change the root directory of the current process to path.
:type path: bytes | unicode
:rtype: None
"""
pass
def chmod(path, mode):
"""Change the mode of path to the numeric mode.
:type path: bytes | unicode
:type mode: int
:rtype: None
"""
pass
def chown(path, uid, gid):
"""Change the owner and group id of path to the numeric uid and gid.
:type path: bytes | unicode
:type uid: int
:type gid: int
:rtype: None
"""
pass
def lchflags(path, flags):
"""Set the flags of path to the numeric flags, like chflags(), but do not
follow symbolic links.
:type path: bytes | unicode
:type flags: int
:rtype: None
"""
pass
def lchmod(path, mode):
"""Change the mode of path to the numeric mode. If path is a symlink, this
affects the symlink rather than the target.
:type path: bytes | unicode
:type mode: int
:rtype: None
"""
pass
def lchown(path, uid, gid):
"""Change the owner and group id of path to the numeric uid and gid. This
function will not follow symbolic links.
:type path: bytes | unicode
:type uid: int
:type gid: int
:rtype: None
"""
pass
def link(source, link_name):
"""Create a hard link pointing to source named link_name.
:type source: bytes | unicode
:type link_name: bytes | unicode
:rtype: None
"""
pass
def listdir(path):
"""Return a list containing the names of the entries in the directory given
by path.
:type path: T <= bytes | unicode
:rtype: list[T]
"""
return []
def lstat(path):
"""Perform the equivalent of an lstat() system call on the given path.
Similar to stat(), but does not follow symbolic links.
:type path: bytes | unicode
:rtype: os.stat_result
"""
pass
def mkfifo(path, mode=0o666):
"""Create a FIFO (a named pipe) named path with numeric mode mode.
:type path: bytes | unicode
:type mode: int
:rtype: None
"""
pass
def mknod(filename, mode=0o600, device=0):
"""Create a filesystem node (file, device special file or named pipe) named
filename.
:type filename: bytes | unicode
:type mode: int
:type device: int
:rtype: None
"""
pass
def major(device):
"""Extract the device major number from a raw device number (usually the
st_dev or st_rdev field from stat).
:type device: int
:rtype: int
"""
return 0
def minor(device):
"""Extract the device minor number from a raw device number (usually the
st_dev or st_rdev field from stat).
:type device: int
:rtype: int
"""
return 0
def makedev(major, minor):
"""Compose a raw device number from the major and minor device numbers.
:type major: int
:type minor: int
:rtype: int
"""
return 0
def mkdir(path, mode=0o777):
"""Create a directory named path with numeric mode mode.
:type path: bytes | unicode
:type mode: int
:rtype: None
"""
pass
def makedirs(path, mode=0o777, exist_ok=False):
"""Recursive directory creation function.
:type path: bytes | unicode
:type mode: int
:type exist_ok: int
:rtype: None
"""
pass
def pathconf(path, name):
"""Return system configuration information relevant to a named file.
:type path: bytes | unicode
:type name: int | string
"""
pass
def readlink(path):
"""Return a string representing the path to which the symbolic link points.
:type path: T <= bytes | unicode
:rtype: T
"""
return path
def remove(path):
"""Remove (delete) the file path.
:type path: bytes | unicode
:rtype: None
"""
pass
def removedirs(path):
"""Remove directories recursively.
:type path: bytes | unicode
:rtype: None
"""
pass
def rename(src, dst):
"""Rename the file or directory src to dst.
:type src: bytes | unicode
:type dst: bytes | unicode
:rtype: None
"""
pass
def renames(old, new):
"""Recursive directory or file renaming function.
:type old: bytes | unicode
:type new: bytes | unicode
:rtype: None
"""
pass
def rmdir(path):
"""Remove (delete) the directory path.
:type path: bytes | unicode
:rtype: None
"""
pass
def stat(path, dir_fd=None, follow_symlinks=True):
"""Perform the equivalent of a stat() system call on the given path.
:type path: bytes | unicode | int
:type dir_fd: int | None
:type follow_symlinks: bool | None
:rtype: os.stat_result
"""
pass
def stat_float_times(newvalue=None):
"""Determine whether stat_result represents time stamps as float objects.
:type newvalue: bool | None
:rtype: bool
"""
return False
def statvfs(path):
"""Perform a statvfs() system call on the given path.
:type path: bytes | unicode
:rtype: os.statvfs_result
"""
pass
def symlink(source, link_name, target_is_directory=False, dir_fd=None):
"""Create a symbolic link pointing to source named link_name.
:type source: bytes | unicode
:type link_name: bytes| unicode
:type target_is_directory: bool
:type dir_fd: int | None
:rtype: None
"""
pass
def tempnam(dir=None, prefix=None):
"""Return a unique path name that is reasonable for creating a temporary
file.
:type dir: bytes | unicode
:type prefix: bytes | unicode
:rtype: string
"""
return ''
def tmpnam():
"""Return a unique path name that is reasonable for creating a temporary
file.
:rtype: string
"""
return ''
def unlink(path):
"""Remove (delete) the file path.
:type path: bytes | unicode
:rtype: None
"""
pass
def utime(path, times):
"""Set the access and modified times of the file specified by path.
:type path: bytes | unicode
:type times: (numbers.Real, numbers.Real) | None
:rtype: None
"""
pass
def walk(top, topdown=True, onerror=None, followlinks=False):
"""Generate the file names in a directory tree by walking the tree either
top-down or bottom-up.
:type top: T <= bytes | unicode
:type topdown: bool
:type onerror: ((Exception) -> None) | None
:rtype: collections.Iterator[(T, list[T], list[T])]
"""
return []
def execl(path, *args):
"""Execute a new program, replacing the current process.
:type path: bytes | unicode
:rtype: None
"""
pass
def execle(path, *args):
"""Execute a new program, replacing the current process.
:type path: bytes | unicode
:rtype: None
"""
pass
def execlp(file, *args):
"""Execute a new program, replacing the current process.
:type file: bytes | unicode
:rtype: None
"""
pass
def execlpe(file, *args):
"""Execute a new program, replacing the current process.
:type file: bytes | unicode
:rtype: None
"""
pass
def execv(path, args):
"""Execute a new program, replacing the current process.
:type path: bytes | unicode
:type args: collections.Iterable
:rtype: None
"""
pass
def execve(path, args, env):
"""Execute a new program, replacing the current process.
:type path: bytes | unicode
:type args: collections.Iterable
:type env: collections.Mapping
:rtype: None
"""
pass
def execvp(file, args):
"""Execute a new program, replacing the current process.
:type file: bytes | unicode
:type args: collections.Iterable
:rtype: None
"""
pass
def execvpe(file, args, env):
"""Execute a new program, replacing the current process.
:type file: bytes | unicode
:type args: collections.Iterable
:type env: collections.Mapping
:rtype: None
"""
pass
def _exit(n):
"""Exit the process with status n, without calling cleanup handlers,
flushing stdio buffers, etc.
:type n: int
:rtype: None
"""
pass
def fork():
"""Fork a child process.
:rtype: int
"""
return 0
def forkpty():
"""Fork a child process, using a new pseudo-terminal as the child's
controlling terminal.
:rtype: (int, int)
"""
return 0, 0
def kill(pid, sig):
"""Send signal sig to the process pid.
:type pid: int
:type sig: int
:rtype: None
"""
pass
def killpg(pgid, sig):
"""Send the signal sig to the process group pgid.
:type pgid: int
:type sig: int
:rtype: None
"""
pass
def nice(increment):
"""Add increment to the process's "niceness".
:type increment: int
:rtype: int
"""
return 0
def plock(op):
"""Lock program segments into memory.
:rtype: None
"""
pass
def spawnl(mode, path, *args):
"""Execute the program path in a new process.
:type mode: int
:type path: bytes | unicode
:rtype: int
"""
return 0
def spawnle(mode, path, *args):
"""Execute the program path in a new process.
:type mode: int
:type path: bytes | unicode
:rtype: int
"""
return 0
def spawnlp(mode, file, *args):
"""Execute the program path in a new process.
:type mode: int
:type file: bytes | unicode
:rtype: int
"""
return 0
def spawnlpe(mode, file, *args):
"""Execute the program path in a new process.
:type mode: int
:type file: bytes | unicode
:rtype: int
"""
return 0
def spawnv(mode, path, args):
"""Execute the program path in a new process.
:type mode: int
:type path: bytes | unicode
:type args: collections.Iterable
:rtype: int
"""
return 0
def spawnve(mode, path, args, env):
"""Execute the program path in a new process.
:type mode: int
:type path: bytes | unicode
:type args: collections.Iterable
:type env: collections.Mapping
:rtype: int
"""
return 0
def spawnvp(mode, file, args):
"""Execute the program path in a new process.
:type mode: int
:type file: bytes | unicode
:type args: collections.Iterable
:rtype: int
"""
return 0
def spawnvpe(mode, file, args, env):
"""Execute the program path in a new process.
:type mode: int
:type file: bytes | unicode
:type args: collections.Iterable
:type env: collections.Mapping
:rtype: int
"""
return 0
def system(command):
"""Execute the command (a string) in a subshell.
:type command: bytes | unicode
:rtype: int
"""
return 0
def times():
"""Return a 5-tuple of floating point numbers indicating accumulated
(processor or other) times, in seconds.
:rtype: (float, float, float, float, float)
"""
return 0.0, 0.0, 0.0, 0.0, 0.0
def wait():
"""Wait for completion of a child process, and return a tuple containing
its pid and exit status indication
:rtype: (int, int)
"""
return 0, 0
def waitpid(pid, options):
"""Wait for completion of a child process given by process id pid, and
return a tuple containing its process id and exit status indication.
:type pid: int
:type options: int
:rtype: (int, int)
"""
return 0, 0
def wait3(options):
"""Similar to waitpid(), except no process id argument is given and a
3-element tuple containing the child's process id, exit status indication,
and resource usage information is returned.
:type options: int
:rtype: (int, int, resource.struct_rusage)
"""
pass
def wait4(pid, options):
"""Similar to waitpid(), except a 3-element tuple, containing the child's
process id, exit status indication, and resource usage information is
returned.
:type pid: int
:type options: int
:rtype: (int, int, resource.struct_rusage)
"""
pass
def urandom(n):
"""Return a string of n random bytes suitable for cryptographic use.
:type n: int
:rtype: bytes
"""
return b'' | apache-2.0 |
davepmer/test-kernel | tools/perf/scripts/python/syscall-counts-by-pid.py | 11180 | 1927 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
albert12132/templar | templar/api/rules/core.py | 1 | 4373 | """
The public API for Templar pre/post-processor rules.
Users can use this module with the following import statement:
from templar.api.rules import core
"""
from templar.exceptions import TemplarError
import re
class Rule:
"""Represents a preprocessor or postprocessor rule. Rules are applied in the order that they
are listed in the Config.
When constructing a rule, the arguments `src` and `dst` are regular expressions; Templar will
only apply a rule if the source and destination of the publishing pipeline match the regexes.
"""
def __init__(self, src=None, dst=None):
if src is not None and not isinstance(src, str):
raise InvalidRule(
"Rule's source pattern must be a string or None, "
"but was type '{}'".format(type(src).__name__))
if dst is not None and not isinstance(dst, str):
raise InvalidRule(
"Rule's destination pattern must be a string or None, "
"but was type '{}'".format(type(src).__name__))
self._src_pattern = src
self._dst_pattern = dst
def applies(self, src, dst):
"""Checks if this rule applies to the given src and dst paths, based on the src pattern and
dst pattern given in the constructor.
If src pattern was None, this rule will apply to any given src path (same for dst).
"""
if self._src_pattern and (src is None or re.search(self._src_pattern, src) is None):
return False
elif self._dst_pattern and (dst is None or re.search(self._dst_pattern, dst) is None):
return False
return True
def apply(self, content):
"""Applies this rule to the given content. A rule can do one or more of the following:
- Return a string; this is taken to be the transformed version of content, and will be used
as the new content after applying this rule.
- Modify variables (a dict). Usually, Rules that modify this dictionary will add new
variables. However, a Rule can also delete or update key/value pairs in the dictionary.
"""
raise NotImplementedError
class SubstitutionRule(Rule):
"""An abstract class that represents a rule that transforms the content that is being processed,
based on a regex pattern and a substitution function. The substitution behaves exactly like
re.sub.
"""
pattern = None # Subclasses should override this variable with a string or compiled regex.
def substitute(self, match):
"""A substitution function that returns the text with which to replace the given match.
Subclasses should implement this method.
"""
raise InvalidRule(
'{} must implement the substitute method to be '
'a valid SubstitutionRule'.format(type(self).__name__))
def apply(self, content):
if isinstance(self.pattern, str):
return re.sub(self.pattern, self.substitute, content)
elif hasattr(self.pattern, 'sub') and callable(self.pattern.sub):
return self.pattern.sub(self.substitute, content)
raise InvalidRule(
"{}'s pattern has type '{}', but expected a string or "
"compiled regex.".format(type(self).__name__, type(self.pattern).__name__))
class VariableRule(Rule):
"""An abstract class that represents a rule that constructs variables given the content. For
VariableRules, the apply method returns a dictionary mapping str -> str instead of returning
transformed content (a string).
"""
def extract(self, content):
"""A substitution function that returns the text with which to replace the given match.
Subclasses should implement this method.
"""
raise InvalidRule(
'{} must implement the extract method to be '
'a valid VariableRule'.format(type(self).__name__))
def apply(self, content):
variables = self.extract(content)
if not isinstance(variables, dict):
raise InvalidRule(
"{} is a VariableRule, so its extract method should return a dict. Instead, it "
"returned type '{}'".format(type(self).__name__, type(variables).__name__))
return variables
class InvalidRule(TemplarError):
pass
| mit |
ddki/my_study_project | language/python/frameworks/flask/venv/lib/python2.7/site-packages/werkzeug/debug/repr.py | 107 | 9340 | # -*- coding: utf-8 -*-
"""
werkzeug.debug.repr
~~~~~~~~~~~~~~~~~~~
This module implements object representations for debugging purposes.
Unlike the default repr these reprs expose a lot more information and
produce HTML instead of ASCII.
Together with the CSS and JavaScript files of the debugger this gives
a colorful and more compact output.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD.
"""
import sys
import re
import codecs
from traceback import format_exception_only
try:
from collections import deque
except ImportError: # pragma: no cover
deque = None
from werkzeug.utils import escape
from werkzeug._compat import iteritems, PY2, text_type, integer_types, \
string_types
missing = object()
_paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}')
RegexType = type(_paragraph_re)
HELP_HTML = '''\
<div class=box>
<h3>%(title)s</h3>
<pre class=help>%(text)s</pre>
</div>\
'''
OBJECT_DUMP_HTML = '''\
<div class=box>
<h3>%(title)s</h3>
%(repr)s
<table>%(items)s</table>
</div>\
'''
def debug_repr(obj):
"""Creates a debug repr of an object as HTML unicode string."""
return DebugReprGenerator().repr(obj)
def dump(obj=missing):
"""Print the object details to stdout._write (for the interactive
console of the web debugger.
"""
gen = DebugReprGenerator()
if obj is missing:
rv = gen.dump_locals(sys._getframe(1).f_locals)
else:
rv = gen.dump_object(obj)
sys.stdout._write(rv)
class _Helper(object):
"""Displays an HTML version of the normal help, for the interactive
debugger only because it requires a patched sys.stdout.
"""
def __repr__(self):
return 'Type help(object) for help about object.'
def __call__(self, topic=None):
if topic is None:
sys.stdout._write('<span class=help>%s</span>' % repr(self))
return
import pydoc
pydoc.help(topic)
rv = sys.stdout.reset()
if isinstance(rv, bytes):
rv = rv.decode('utf-8', 'ignore')
paragraphs = _paragraph_re.split(rv)
if len(paragraphs) > 1:
title = paragraphs[0]
text = '\n\n'.join(paragraphs[1:])
else: # pragma: no cover
title = 'Help'
text = paragraphs[0]
sys.stdout._write(HELP_HTML % {'title': title, 'text': text})
helper = _Helper()
def _add_subclass_info(inner, obj, base):
if isinstance(base, tuple):
for base in base:
if type(obj) is base:
return inner
elif type(obj) is base:
return inner
module = ''
if obj.__class__.__module__ not in ('__builtin__', 'exceptions'):
module = '<span class="module">%s.</span>' % obj.__class__.__module__
return '%s%s(%s)' % (module, obj.__class__.__name__, inner)
class DebugReprGenerator(object):
def __init__(self):
self._stack = []
def _sequence_repr_maker(left, right, base=object(), limit=8):
def proxy(self, obj, recursive):
if recursive:
return _add_subclass_info(left + '...' + right, obj, base)
buf = [left]
have_extended_section = False
for idx, item in enumerate(obj):
if idx:
buf.append(', ')
if idx == limit:
buf.append('<span class="extended">')
have_extended_section = True
buf.append(self.repr(item))
if have_extended_section:
buf.append('</span>')
buf.append(right)
return _add_subclass_info(u''.join(buf), obj, base)
return proxy
list_repr = _sequence_repr_maker('[', ']', list)
tuple_repr = _sequence_repr_maker('(', ')', tuple)
set_repr = _sequence_repr_maker('set([', '])', set)
frozenset_repr = _sequence_repr_maker('frozenset([', '])', frozenset)
if deque is not None:
deque_repr = _sequence_repr_maker('<span class="module">collections.'
'</span>deque([', '])', deque)
del _sequence_repr_maker
def regex_repr(self, obj):
pattern = repr(obj.pattern)
if PY2:
pattern = pattern.decode('string-escape', 'ignore')
else:
pattern = codecs.decode(pattern, 'unicode-escape', 'ignore')
if pattern[:1] == 'u':
pattern = 'ur' + pattern[1:]
else:
pattern = 'r' + pattern
return u're.compile(<span class="string regex">%s</span>)' % pattern
def string_repr(self, obj, limit=70):
buf = ['<span class="string">']
a = repr(obj[:limit])
b = repr(obj[limit:])
if isinstance(obj, text_type) and PY2:
buf.append('u')
a = a[1:]
b = b[1:]
if b != "''":
buf.extend((escape(a[:-1]), '<span class="extended">', escape(b[1:]), '</span>'))
else:
buf.append(escape(a))
buf.append('</span>')
return _add_subclass_info(u''.join(buf), obj, (bytes, text_type))
def dict_repr(self, d, recursive, limit=5):
if recursive:
return _add_subclass_info(u'{...}', d, dict)
buf = ['{']
have_extended_section = False
for idx, (key, value) in enumerate(iteritems(d)):
if idx:
buf.append(', ')
if idx == limit - 1:
buf.append('<span class="extended">')
have_extended_section = True
buf.append('<span class="pair"><span class="key">%s</span>: '
'<span class="value">%s</span></span>' %
(self.repr(key), self.repr(value)))
if have_extended_section:
buf.append('</span>')
buf.append('}')
return _add_subclass_info(u''.join(buf), d, dict)
def object_repr(self, obj):
r = repr(obj)
if PY2:
r = r.decode('utf-8', 'replace')
return u'<span class="object">%s</span>' % escape(r)
def dispatch_repr(self, obj, recursive):
if obj is helper:
return u'<span class="help">%r</span>' % helper
if isinstance(obj, (integer_types, float, complex)):
return u'<span class="number">%r</span>' % obj
if isinstance(obj, string_types):
return self.string_repr(obj)
if isinstance(obj, RegexType):
return self.regex_repr(obj)
if isinstance(obj, list):
return self.list_repr(obj, recursive)
if isinstance(obj, tuple):
return self.tuple_repr(obj, recursive)
if isinstance(obj, set):
return self.set_repr(obj, recursive)
if isinstance(obj, frozenset):
return self.frozenset_repr(obj, recursive)
if isinstance(obj, dict):
return self.dict_repr(obj, recursive)
if deque is not None and isinstance(obj, deque):
return self.deque_repr(obj, recursive)
return self.object_repr(obj)
def fallback_repr(self):
try:
info = ''.join(format_exception_only(*sys.exc_info()[:2]))
except Exception: # pragma: no cover
info = '?'
if PY2:
info = info.decode('utf-8', 'ignore')
return u'<span class="brokenrepr"><broken repr (%s)>' \
u'</span>' % escape(info.strip())
def repr(self, obj):
recursive = False
for item in self._stack:
if item is obj:
recursive = True
break
self._stack.append(obj)
try:
try:
return self.dispatch_repr(obj, recursive)
except Exception:
return self.fallback_repr()
finally:
self._stack.pop()
def dump_object(self, obj):
repr = items = None
if isinstance(obj, dict):
title = 'Contents of'
items = []
for key, value in iteritems(obj):
if not isinstance(key, string_types):
items = None
break
items.append((key, self.repr(value)))
if items is None:
items = []
repr = self.repr(obj)
for key in dir(obj):
try:
items.append((key, self.repr(getattr(obj, key))))
except Exception:
pass
title = 'Details for'
title += ' ' + object.__repr__(obj)[1:-1]
return self.render_object_dump(items, title, repr)
def dump_locals(self, d):
items = [(key, self.repr(value)) for key, value in d.items()]
return self.render_object_dump(items, 'Local variables in frame')
def render_object_dump(self, items, title, repr=None):
html_items = []
for key, value in items:
html_items.append('<tr><th>%s<td><pre class=repr>%s</pre>' %
(escape(key), value))
if not html_items:
html_items.append('<tr><td><em>Nothing</em>')
return OBJECT_DUMP_HTML % {
'title': escape(title),
'repr': repr and '<pre class=repr>%s</pre>' % repr or '',
'items': '\n'.join(html_items)
}
| mit |
apporc/neutron | neutron/tests/tempest/services/botoclients.py | 38 | 9286 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from six.moves import configparser as ConfigParser
import contextlib
from tempest_lib import exceptions as lib_exc
import types
import urlparse
from neutron.tests.tempest import config
import boto
import boto.ec2
import boto.s3.connection
CONF = config.CONF
class BotoClientBase(object):
ALLOWED_METHODS = set()
def __init__(self, identity_client):
self.identity_client = identity_client
self.ca_cert = CONF.identity.ca_certificates_file
self.connection_timeout = str(CONF.boto.http_socket_timeout)
self.num_retries = str(CONF.boto.num_retries)
self.build_timeout = CONF.boto.build_timeout
self.connection_data = {}
def _config_boto_timeout(self, timeout, retries):
try:
boto.config.add_section("Boto")
except ConfigParser.DuplicateSectionError:
pass
boto.config.set("Boto", "http_socket_timeout", timeout)
boto.config.set("Boto", "num_retries", retries)
def _config_boto_ca_certificates_file(self, ca_cert):
if ca_cert is None:
return
try:
boto.config.add_section("Boto")
except ConfigParser.DuplicateSectionError:
pass
boto.config.set("Boto", "ca_certificates_file", ca_cert)
def __getattr__(self, name):
"""Automatically creates methods for the allowed methods set."""
if name in self.ALLOWED_METHODS:
def func(self, *args, **kwargs):
with contextlib.closing(self.get_connection()) as conn:
return getattr(conn, name)(*args, **kwargs)
func.__name__ = name
setattr(self, name, types.MethodType(func, self, self.__class__))
setattr(self.__class__, name,
types.MethodType(func, None, self.__class__))
return getattr(self, name)
else:
raise AttributeError(name)
def get_connection(self):
self._config_boto_timeout(self.connection_timeout, self.num_retries)
self._config_boto_ca_certificates_file(self.ca_cert)
ec2_client_args = {'aws_access_key_id': CONF.boto.aws_access,
'aws_secret_access_key': CONF.boto.aws_secret}
if not all(ec2_client_args.values()):
ec2_client_args = self.get_aws_credentials(self.identity_client)
self.connection_data.update(ec2_client_args)
return self.connect_method(**self.connection_data)
def get_aws_credentials(self, identity_client):
"""
Obtain existing, or create new AWS credentials
:param identity_client: identity client with embedded credentials
:return: EC2 credentials
"""
ec2_cred_list = identity_client.list_user_ec2_credentials(
identity_client.user_id)
for cred in ec2_cred_list:
if cred['tenant_id'] == identity_client.tenant_id:
ec2_cred = cred
break
else:
ec2_cred = identity_client.create_user_ec2_credentials(
identity_client.user_id, identity_client.tenant_id)
if not all((ec2_cred, ec2_cred['access'], ec2_cred['secret'])):
raise lib_exc.NotFound("Unable to get access and secret keys")
else:
ec2_cred_aws = {}
ec2_cred_aws['aws_access_key_id'] = ec2_cred['access']
ec2_cred_aws['aws_secret_access_key'] = ec2_cred['secret']
return ec2_cred_aws
class APIClientEC2(BotoClientBase):
def connect_method(self, *args, **kwargs):
return boto.connect_ec2(*args, **kwargs)
def __init__(self, identity_client):
super(APIClientEC2, self).__init__(identity_client)
insecure_ssl = CONF.identity.disable_ssl_certificate_validation
purl = urlparse.urlparse(CONF.boto.ec2_url)
region_name = CONF.compute.region
if not region_name:
region_name = CONF.identity.region
region = boto.ec2.regioninfo.RegionInfo(name=region_name,
endpoint=purl.hostname)
port = purl.port
if port is None:
if purl.scheme is not "https":
port = 80
else:
port = 443
else:
port = int(port)
self.connection_data.update({"is_secure": purl.scheme == "https",
"validate_certs": not insecure_ssl,
"region": region,
"host": purl.hostname,
"port": port,
"path": purl.path})
ALLOWED_METHODS = set(('create_key_pair', 'get_key_pair',
'delete_key_pair', 'import_key_pair',
'get_all_key_pairs',
'get_all_tags',
'create_image', 'get_image',
'register_image', 'deregister_image',
'get_all_images', 'get_image_attribute',
'modify_image_attribute', 'reset_image_attribute',
'get_all_kernels',
'create_volume', 'delete_volume',
'get_all_volume_status', 'get_all_volumes',
'get_volume_attribute', 'modify_volume_attribute'
'bundle_instance', 'cancel_spot_instance_requests',
'confirm_product_instanc',
'get_all_instance_status', 'get_all_instances',
'get_all_reserved_instances',
'get_all_spot_instance_requests',
'get_instance_attribute', 'monitor_instance',
'monitor_instances', 'unmonitor_instance',
'unmonitor_instances',
'purchase_reserved_instance_offering',
'reboot_instances', 'request_spot_instances',
'reset_instance_attribute', 'run_instances',
'start_instances', 'stop_instances',
'terminate_instances',
'attach_network_interface', 'attach_volume',
'detach_network_interface', 'detach_volume',
'get_console_output',
'delete_network_interface', 'create_subnet',
'create_network_interface', 'delete_subnet',
'get_all_network_interfaces',
'allocate_address', 'associate_address',
'disassociate_address', 'get_all_addresses',
'release_address',
'create_snapshot', 'delete_snapshot',
'get_all_snapshots', 'get_snapshot_attribute',
'modify_snapshot_attribute',
'reset_snapshot_attribute', 'trim_snapshots',
'get_all_regions', 'get_all_zones',
'get_all_security_groups', 'create_security_group',
'delete_security_group', 'authorize_security_group',
'authorize_security_group_egress',
'revoke_security_group',
'revoke_security_group_egress'))
class ObjectClientS3(BotoClientBase):
def connect_method(self, *args, **kwargs):
return boto.connect_s3(*args, **kwargs)
def __init__(self, identity_client):
super(ObjectClientS3, self).__init__(identity_client)
insecure_ssl = CONF.identity.disable_ssl_certificate_validation
purl = urlparse.urlparse(CONF.boto.s3_url)
port = purl.port
if port is None:
if purl.scheme is not "https":
port = 80
else:
port = 443
else:
port = int(port)
self.connection_data.update({"is_secure": purl.scheme == "https",
"validate_certs": not insecure_ssl,
"host": purl.hostname,
"port": port,
"calling_format": boto.s3.connection.
OrdinaryCallingFormat()})
ALLOWED_METHODS = set(('create_bucket', 'delete_bucket', 'generate_url',
'get_all_buckets', 'get_bucket', 'delete_key',
'lookup'))
| apache-2.0 |
hsteinhaus/ardupilot | mk/VRBRAIN/Tools/genmsg/src/genmsg/msgs.py | 51 | 12602 | # Software License Agreement (BSD License)
#
# Copyright (c) 2008, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
"""
ROS msg library for Python
Implements: U{http://ros.org/wiki/msg}
"""
import os
import sys
from . base import InvalidMsgSpec, EXT_MSG, MSG_DIR, SEP, log
from . names import is_legal_resource_name, is_legal_resource_base_name, package_resource_name, resource_name
#TODOXXX: unit test
def bare_msg_type(msg_type):
"""
Compute the bare data type, e.g. for arrays, get the underlying array item type
:param msg_type: ROS msg type (e.g. 'std_msgs/String'), ``str``
:returns: base type, ``str``
"""
if msg_type is None:
return None
if '[' in msg_type:
return msg_type[:msg_type.find('[')]
return msg_type
def resolve_type(msg_type, package_context):
"""
Resolve type name based on current package context.
NOTE: in ROS Diamondback, 'Header' resolves to
'std_msgs/Header'. In previous releases, it resolves to
'roslib/Header' (REP 100).
e.g.::
resolve_type('String', 'std_msgs') -> 'std_msgs/String'
resolve_type('String[]', 'std_msgs') -> 'std_msgs/String[]'
resolve_type('std_msgs/String', 'foo') -> 'std_msgs/String'
resolve_type('uint16', 'std_msgs') -> 'uint16'
resolve_type('uint16[]', 'std_msgs') -> 'uint16[]'
"""
bt = bare_msg_type(msg_type)
if bt in BUILTIN_TYPES:
return msg_type
elif bt == HEADER:
return HEADER_FULL_NAME
elif SEP in msg_type:
return msg_type
else:
return "%s%s%s"%(package_context, SEP, msg_type)
#NOTE: this assumes that we aren't going to support multi-dimensional
def parse_type(msg_type):
"""
Parse ROS message field type
:param msg_type: ROS field type, ``str``
:returns: base_type, is_array, array_length, ``(str, bool, int)``
:raises: :exc:`ValueError` If *msg_type* cannot be parsed
"""
if not msg_type:
raise ValueError("Invalid empty type")
if '[' in msg_type:
var_length = msg_type.endswith('[]')
splits = msg_type.split('[')
if len(splits) > 2:
raise ValueError("Currently only support 1-dimensional array types: %s"%msg_type)
if var_length:
return msg_type[:-2], True, None
else:
try:
length = int(splits[1][:-1])
return splits[0], True, length
except ValueError:
raise ValueError("Invalid array dimension: [%s]"%splits[1][:-1])
else:
return msg_type, False, None
################################################################################
# name validation
def is_valid_msg_type(x):
"""
:returns: True if the name is a syntatically legal message type name, ``bool``
"""
if not x or len(x) != len(x.strip()):
return False
base = bare_msg_type(x)
if not is_legal_resource_name(base):
return False
#parse array indices
x = x[len(base):]
state = 0
i = 0
for c in x:
if state == 0:
if c != '[':
return False
state = 1 #open
elif state == 1:
if c == ']':
state = 0 #closed
else:
try:
int(c)
except:
return False
return state == 0
def is_valid_constant_type(x):
"""
:returns: ``True`` if the name is a legal constant type. Only simple types are allowed, ``bool``
"""
return x in PRIMITIVE_TYPES
def is_valid_msg_field_name(x):
"""
:returns: ``True`` if the name is a syntatically legal message field name, ``bool``
"""
return is_legal_resource_base_name(x)
# msg spec representation ##########################################
class Constant(object):
"""
Container class for holding a Constant declaration
Attributes:
- ``type``
- ``name``
- ``val``
- ``val_text``
"""
__slots__ = ['type', 'name', 'val', 'val_text']
def __init__(self, type_, name, val, val_text):
"""
:param type_: constant type, ``str``
:param name: constant name, ``str``
:param val: constant value, ``str``
:param val_text: Original text definition of *val*, ``str``
"""
if type is None or name is None or val is None or val_text is None:
raise ValueError('Constant must have non-None parameters')
self.type = type_
self.name = name.strip() #names are always stripped of whitespace
self.val = val
self.val_text = val_text
def __eq__(self, other):
if not isinstance(other, Constant):
return False
return self.type == other.type and self.name == other.name and self.val == other.val
def __repr__(self):
return "%s %s=%s"%(self.type, self.name, self.val)
def __str__(self):
return "%s %s=%s"%(self.type, self.name, self.val)
class Field(object):
"""
Container class for storing information about a single field in a MsgSpec
Attributes:
- ``name``
- ``type``
- ``base_type``
- ``is_array``
- ``array_len``
- ``is_builtin``
- ``is_header``
"""
def __init__(self, name, type):
self.name = name
self.type = type
(self.base_type, self.is_array, self.array_len) = parse_type(type)
self.is_header = is_header_type(self.type)
self.is_builtin = is_builtin(self.base_type)
def __eq__(self, other):
if not isinstance(other, Field):
return False
else:
return self.name == other.name and \
self.type == other.type
def __repr__(self):
return "[%s, %s, %s, %s, %s]"%(self.name, self.type, self.base_type, self.is_array, self.array_len)
class MsgSpec(object):
"""
Container class for storing loaded msg description files. Field
types and names are stored in separate lists with 1-to-1
correspondence. MsgSpec can also return an md5 of the source text.
"""
def __init__(self, types, names, constants, text, full_name, package = '', short_name = ''):
"""
:param types: list of field types, in order of declaration, ``[str]]``
:param names: list of field names, in order of declaration, ``[str]]``
:param constants: List of :class:`Constant` declarations, ``[Constant]``
:param text: text of declaration, ``str`
:raises: :exc:`InvalidMsgSpec` If spec is invalid (e.g. fields with the same name)
"""
alt_package, alt_short_name = package_resource_name(full_name)
if not package:
package = alt_package
if not short_name:
short_name = alt_short_name
self.types = types
if len(set(names)) != len(names):
raise InvalidMsgSpec("Duplicate field names in message: %s"%names)
self.names = names
self.constants = constants
assert len(self.types) == len(self.names), "len(%s) != len(%s)"%(self.types, self.names)
#Header.msg support
if (len(self.types)):
self.header_present = self.types[0] == HEADER_FULL_NAME and self.names[0] == 'header'
else:
self.header_present = False
self.text = text
self.full_name = full_name
self.short_name = short_name
self.package = package
try:
self._parsed_fields = [Field(name, type) for (name, type) in zip(self.names, self.types)]
except ValueError as e:
raise InvalidMsgSpec("invalid field: %s"%(e))
def fields(self):
"""
:returns: zip list of types and names (e.g. [('int32', 'x'), ('int32', 'y')], ``[(str,str),]``
"""
return list(zip(self.types, self.names)) #py3k
def parsed_fields(self):
"""
:returns: list of :class:`Field` classes, ``[Field,]``
"""
return self._parsed_fields
def has_header(self):
"""
:returns: ``True`` if msg decription contains a 'Header header'
declaration at the beginning, ``bool``
"""
return self.header_present
def __eq__(self, other):
if not other or not isinstance(other, MsgSpec):
return False
return self.types == other.types and self.names == other.names and \
self.constants == other.constants and self.text == other.text and \
self.full_name == other.full_name and self.short_name == other.short_name and \
self.package == other.package
def __ne__(self, other):
if not other or not isinstance(other, MsgSpec):
return True
return not self.__eq__(other)
def __repr__(self):
if self.constants:
return "MsgSpec[%s, %s, %s]"%(repr(self.constants), repr(self.types), repr(self.names))
else:
return "MsgSpec[%s, %s]"%(repr(self.types), repr(self.names))
def __str__(self):
return self.text
# .msg file routines ##############################################################
# adjustable constants, in case we change our minds
HEADER = 'Header'
TIME = 'time'
DURATION = 'duration'
HEADER_FULL_NAME = 'std_msgs/Header'
def is_header_type(msg_type):
"""
:param msg_type: message type name, ``str``
:returns: ``True`` if *msg_type* refers to the ROS Header type, ``bool``
"""
# for backwards compatibility, include roslib/Header. REP 100
return msg_type in [HEADER, HEADER_FULL_NAME, 'roslib/Header']
# time and duration types are represented as aggregate data structures
# for the purposes of serialization from the perspective of
# roslib.msgs. genmsg_py will do additional special handling is required
# to convert them into rospy.msg.Time/Duration instances.
## time as msg spec. time is unsigned
TIME_MSG = "uint32 secs\nuint32 nsecs"
## duration as msg spec. duration is just like time except signed
DURATION_MSG = "int32 secs\nint32 nsecs"
## primitive types are those for which we allow constants, i.e. have primitive representation
PRIMITIVE_TYPES = ['int8','uint8','int16','uint16','int32','uint32','int64','uint64','float32','float64',
'string',
'bool',
# deprecated:
'char','byte']
BUILTIN_TYPES = PRIMITIVE_TYPES + [TIME, DURATION]
def is_builtin(msg_type_name):
"""
:param msg_type_name: name of message type, ``str``
:returns: True if msg_type_name is a builtin/primitive type, ``bool``
"""
return msg_type_name in BUILTIN_TYPES
| gpl-3.0 |
e-gob/plataforma-kioscos-autoatencion | scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/network/nxos/nxos_bgp_neighbor_af.py | 22 | 26540 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_bgp_neighbor_af
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages BGP address-family's neighbors configuration.
description:
- Manages BGP address-family's neighbors configurations on NX-OS switches.
author: Gabriele Gerbino (@GGabriele)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- C(state=absent) removes the whole BGP address-family's
neighbor configuration.
- Default, when supported, removes properties
- In order to default maximum-prefix configuration, only
C(max_prefix_limit=default) is needed.
options:
asn:
description:
- BGP autonomous system number. Valid values are String,
Integer in ASPLAIN or ASDOT notation.
required: true
vrf:
description:
- Name of the VRF. The name 'default' is a valid VRF representing
the global bgp.
required: false
default: default
neighbor:
description:
- Neighbor Identifier. Valid values are string. Neighbors may use
IPv4 or IPv6 notation, with or without prefix length.
required: true
afi:
description:
- Address Family Identifier.
required: true
choices: ['ipv4','ipv6', 'vpnv4', 'vpnv6', 'l2vpn']
safi:
description:
- Sub Address Family Identifier.
required: true
choices: ['unicast','multicast', 'evpn']
additional_paths_receive:
description:
- Valid values are enable for basic command enablement; disable
for disabling the command at the neighbor af level
(it adds the disable keyword to the basic command); and inherit
to remove the command at this level (the command value is
inherited from a higher BGP layer).
required: false
choices: ['enable','disable', 'inherit']
default: null
additional_paths_send:
description:
- Valid values are enable for basic command enablement; disable
for disabling the command at the neighbor af level
(it adds the disable keyword to the basic command); and inherit
to remove the command at this level (the command value is
inherited from a higher BGP layer).
required: false
choices: ['enable','disable', 'inherit']
default: null
advertise_map_exist:
description:
- Conditional route advertisement. This property requires two
route maps, an advertise-map and an exist-map. Valid values are
an array specifying both the advertise-map name and the exist-map
name, or simply 'default' e.g. ['my_advertise_map',
'my_exist_map']. This command is mutually exclusive with the
advertise_map_non_exist property.
required: false
default: null
advertise_map_non_exist:
description:
- Conditional route advertisement. This property requires two
route maps, an advertise-map and an exist-map. Valid values are
an array specifying both the advertise-map name and the
non-exist-map name, or simply 'default' e.g.
['my_advertise_map', 'my_non_exist_map']. This command is mutually
exclusive with the advertise_map_exist property.
required: false
default: null
allowas_in:
description:
- Activate allowas-in property
required: false
default: null
allowas_in_max:
description:
- Optional max-occurrences value for allowas_in. Valid values are
an integer value or 'default'. Can be used independently or in
conjunction with allowas_in.
required: false
default: null
as_override:
description:
- Activate the as-override feature.
required: false
choices: ['true', 'false']
default: null
default_originate:
description:
- Activate the default-originate feature.
required: false
choices: ['true', 'false']
default: null
default_originate_route_map:
description:
- Optional route-map for the default_originate property. Can be
used independently or in conjunction with C(default_originate).
Valid values are a string defining a route-map name,
or 'default'.
required: false
default: null
filter_list_in:
description:
- Valid values are a string defining a filter-list name,
or 'default'.
required: false
default: null
filter_list_out:
description:
- Valid values are a string defining a filter-list name,
or 'default'.
required: false
default: null
max_prefix_limit:
description:
- maximum-prefix limit value. Valid values are an integer value
or 'default'.
required: false
default: null
max_prefix_interval:
description:
- Optional restart interval. Valid values are an integer.
Requires max_prefix_limit. May not be combined with max_prefix_warning.
required: false
default: null
max_prefix_threshold:
description:
- Optional threshold percentage at which to generate a warning.
Valid values are an integer value.
Requires max_prefix_limit.
required: false
default: null
max_prefix_warning:
description:
- Optional warning-only keyword. Requires max_prefix_limit. May not be
combined with max_prefix_interval.
required: false
choices: ['true','false']
default: null
next_hop_self:
description:
- Activate the next-hop-self feature.
required: false
choices: ['true','false']
default: null
next_hop_third_party:
description:
- Activate the next-hop-third-party feature.
required: false
choices: ['true','false']
default: null
prefix_list_in:
description:
- Valid values are a string defining a prefix-list name,
or 'default'.
required: false
default: null
prefix_list_out:
description:
- Valid values are a string defining a prefix-list name,
or 'default'.
required: false
default: null
route_map_in:
description:
- Valid values are a string defining a route-map name,
or 'default'.
required: false
default: null
route_map_out:
description:
- Valid values are a string defining a route-map name,
or 'default'.
required: false
default: null
route_reflector_client:
description:
- Router reflector client.
required: false
choices: ['true','false']
default: null
send_community:
description:
- send-community attribute.
required: false
choices: ['none', 'both', 'extended', 'standard', 'default']
default: null
soft_reconfiguration_in:
description:
- Valid values are 'enable' for basic command enablement; 'always'
to add the always keyword to the basic command; and 'inherit' to
remove the command at this level (the command value is inherited
from a higher BGP layer).
required: false
choices: ['enable','always','inherit']
default: null
soo:
description:
- Site-of-origin. Valid values are a string defining a VPN
extcommunity or 'default'.
required: false
default: null
suppress_inactive:
description:
- suppress-inactive feature.
required: false
choices: ['true','false','default']
default: null
unsuppress_map:
description:
- unsuppress-map. Valid values are a string defining a route-map
name or 'default'.
required: false
default: null
weight:
description:
- Weight value. Valid values are an integer value or 'default'.
required: false
default: null
state:
description:
- Determines whether the config should be present or not
on the device.
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- name: configure RR client
nxos_bgp_neighbor_af:
asn: 65535
neighbor: '3.3.3.3'
afi: ipv4
safi: unicast
route_reflector_client: true
state: present
'''
RETURN = '''
commands:
description: commands sent to the device
returned: always
type: list
sample: ["router bgp 65535", "neighbor 3.3.3.3",
"address-family ipv4 unicast", "route-reflector-client"]
'''
import re
from ansible.module_utils.nxos import get_config, load_config
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netcfg import CustomNetworkConfig
BOOL_PARAMS = [
'allowas_in',
'as_override',
'default_originate',
'next_hop_self',
'next_hop_third_party',
'route_reflector_client',
'suppress_inactive'
]
PARAM_TO_COMMAND_KEYMAP = {
'afi': 'address-family',
'asn': 'router bgp',
'neighbor': 'neighbor',
'additional_paths_receive': 'capability additional-paths receive',
'additional_paths_send': 'capability additional-paths send',
'advertise_map_exist': 'advertise-map exist-map',
'advertise_map_non_exist': 'advertise-map non-exist-map',
'allowas_in': 'allowas-in',
'allowas_in_max': 'allowas-in',
'as_override': 'as-override',
'default_originate': 'default-originate',
'default_originate_route_map': 'default-originate route-map',
'filter_list_in': 'filter-list in',
'filter_list_out': 'filter-list out',
'max_prefix_limit': 'maximum-prefix',
'max_prefix_interval': 'maximum-prefix interval',
'max_prefix_threshold': 'maximum-prefix threshold',
'max_prefix_warning': 'maximum-prefix warning',
'next_hop_self': 'next-hop-self',
'next_hop_third_party': 'next-hop-third-party',
'prefix_list_in': 'prefix-list in',
'prefix_list_out': 'prefix-list out',
'route_map_in': 'route-map in',
'route_map_out': 'route-map out',
'route_reflector_client': 'route-reflector-client',
'safi': 'address-family',
'send_community': 'send-community',
'soft_reconfiguration_in': 'soft-reconfiguration inbound',
'soo': 'soo',
'suppress_inactive': 'suppress-inactive',
'unsuppress_map': 'unsuppress-map',
'weight': 'weight',
'vrf': 'vrf'
}
def get_value(arg, config, module):
custom = [
'additional_paths_send',
'additional_paths_receive',
'max_prefix_limit',
'max_prefix_interval',
'max_prefix_threshold',
'max_prefix_warning',
'soft_reconfiguration_in'
]
command = PARAM_TO_COMMAND_KEYMAP[arg]
has_command = re.search(r'^\s+{0}\s*'.format(command), config, re.M)
has_command_val = re.search(r'(?:{0}\s)(?P<value>.*)$'.format(command), config, re.M)
value = ''
if arg in custom:
value = get_custom_value(arg, config, module)
elif arg == 'next_hop_third_party':
has_no_command = re.search(r'^\s+no\s+{0}\s*$'.format(command), config, re.M)
value = False
if not has_no_command:
value = True
elif arg in BOOL_PARAMS:
value = False
if has_command:
value = True
elif command.startswith('advertise-map'):
value = []
has_adv_map = re.search(r'{0}\s(?P<value1>.*)\s{1}\s(?P<value2>.*)$'.format(*command.split()), config, re.M)
if has_adv_map:
value = list(has_adv_map.groups())
elif command.split()[0] in ['filter-list', 'prefix-list', 'route-map']:
has_cmd_direction_val = re.search(r'{0}\s(?P<value>.*)\s{1}$'.format(*command.split()), config, re.M)
if has_cmd_direction_val:
value = has_cmd_direction_val.group('value')
elif arg == 'send_community':
if has_command:
value = 'none'
if has_command_val:
value = has_command_val.group('value')
elif has_command_val:
value = has_command_val.group('value')
return value
def get_custom_value(arg, config, module):
command = PARAM_TO_COMMAND_KEYMAP.get(arg)
splitted_config = config.splitlines()
value = ''
command_re = re.compile(r'\s+{0}\s*'.format(command), re.M)
has_command = command_re.search(config)
command_val_re = re.compile(r'(?:{0}\s)(?P<value>.*)$'.format(command), re.M)
has_command_val = command_val_re.search(config)
if arg.startswith('additional_paths'):
value = 'inherit'
for line in splitted_config:
if command in line:
if 'disable' in line:
value = 'disable'
else:
value = 'enable'
elif arg.startswith('max_prefix'):
for line in splitted_config:
if 'maximum-prefix' in line:
splitted_line = line.split()
if arg == 'max_prefix_limit':
value = splitted_line[1]
elif arg == 'max_prefix_interval' and 'restart' in line:
value = splitted_line[-1]
elif arg == 'max_prefix_threshold' and len(splitted_line) > 2:
try:
int(splitted_line[2])
value = splitted_line[2]
except ValueError:
value = ''
elif arg == 'max_prefix_warning':
value = 'warning-only' in line
elif arg == 'soft_reconfiguration_in':
value = 'inherit'
for line in splitted_config:
if command in line:
if 'always' in line:
value = 'always'
else:
value = 'enable'
return value
def get_existing(module, args, warnings):
existing = {}
netcfg = CustomNetworkConfig(indent=2, contents=get_config(module))
asn_regex = re.compile(r'.*router\sbgp\s(?P<existing_asn>\d+).*', re.S)
match_asn = asn_regex.match(str(netcfg))
if match_asn:
existing_asn = match_asn.group('existing_asn')
parents = ["router bgp {0}".format(existing_asn)]
if module.params['vrf'] != 'default':
parents.append('vrf {0}'.format(module.params['vrf']))
parents.append('neighbor {0}'.format(module.params['neighbor']))
parents.append('address-family {0} {1}'.format(module.params['afi'], module.params['safi']))
config = netcfg.get_section(parents)
if config:
for arg in args:
if arg not in ['asn', 'vrf', 'neighbor', 'afi', 'safi']:
existing[arg] = get_value(arg, config, module)
existing['asn'] = existing_asn
existing['neighbor'] = module.params['neighbor']
existing['vrf'] = module.params['vrf']
existing['afi'] = module.params['afi']
existing['safi'] = module.params['safi']
else:
warnings.append("The BGP process didn't exist but the task just created it.")
return existing
def apply_key_map(key_map, table):
new_dict = {}
for key in table:
new_key = key_map.get(key)
if new_key:
new_dict[new_key] = table.get(key)
return new_dict
def get_default_command(key, value, existing_commands):
command = ''
if key == 'send-community' and existing_commands.get(key) == 'none':
command = 'no {0}'.format(key)
elif existing_commands.get(key):
existing_value = existing_commands.get(key)
if value == 'inherit':
if existing_value != 'inherit':
command = 'no {0}'.format(key)
else:
if key == 'advertise-map exist':
command = 'no advertise-map {0} exist-map {1}'.format(
existing_value[0], existing_value[1])
elif key == 'advertise-map non-exist':
command = 'no advertise-map {0} non-exist-map {1}'.format(
existing_value[0], existing_value[1])
elif key == 'filter-list in':
command = 'no filter-list {0} in'.format(existing_value)
elif key == 'filter-list out':
command = 'no filter-list {0} out'.format(existing_value)
elif key == 'prefix-list in':
command = 'no prefix-list {0} in'.format(existing_value)
elif key == 'prefix-list out':
command = 'no prefix-list {0} out'.format(existing_value)
elif key == 'route-map in':
command = 'no route-map {0} in'.format(existing_value)
elif key == 'route-map out':
command = 'no route-map {0} out'.format(existing_value)
elif key.startswith('maximum-prefix'):
command = 'no maximum-prefix'
elif key == 'allowas-in max':
command = ['no allowas-in {0}'.format(existing_value)]
command.append('allowas-in')
else:
command = 'no {0} {1}'.format(key, existing_value)
else:
if key.replace(' ', '_').replace('-', '_') in BOOL_PARAMS:
command = 'no {0}'.format(key)
return command
def fix_proposed(module, proposed):
allowas_in = proposed.get('allowas_in')
allowas_in_max = proposed.get('allowas_in_max')
if allowas_in_max and not allowas_in:
proposed.pop('allowas_in_max')
elif allowas_in and allowas_in_max:
proposed.pop('allowas_in')
return proposed
def state_present(module, existing, proposed, candidate):
commands = list()
proposed = fix_proposed(module, proposed)
proposed_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, proposed)
existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing)
for key, value in proposed_commands.items():
if value in ['inherit', 'default']:
command = get_default_command(key, value, existing_commands)
if isinstance(command, str):
if command and command not in commands:
commands.append(command)
elif isinstance(command, list):
for cmd in command:
if cmd not in commands:
commands.append(cmd)
elif key.startswith('maximum-prefix'):
command = 'maximum-prefix {0}'.format(module.params['max_prefix_limit'])
if module.params['max_prefix_threshold']:
command += ' {0}'.format(module.params['max_prefix_threshold'])
if module.params['max_prefix_interval']:
command += ' restart {0}'.format(module.params['max_prefix_interval'])
elif module.params['max_prefix_warning']:
command += ' warning-only'
commands.append(command)
elif value is True:
commands.append(key)
elif value is False:
commands.append('no {0}'.format(key))
elif key == 'address-family':
commands.append("address-family {0} {1}".format(module.params['afi'], module.params['safi']))
elif key.startswith('capability additional-paths'):
command = key
if value == 'disable':
command += ' disable'
commands.append(command)
elif key.startswith('advertise-map'):
direction = key.split()[1]
commands.append('advertise-map {1} {0} {2}'.format(direction, *value))
elif key.split()[0] in ['filter-list', 'prefix-list', 'route-map']:
commands.append('{1} {0} {2}'.format(value, *key.split()))
elif key == 'soft-reconfiguration inbound':
command = ''
if value == 'enable':
command = key
elif value == 'always':
command = '{0} {1}'.format(key, value)
commands.append(command)
elif key == 'send-community':
command = key
if value != 'none':
command += ' {0}'.format(value)
commands.append(command)
else:
command = '{0} {1}'.format(key, value)
commands.append(command)
commands = set(commands)
if commands:
parents = ['router bgp {0}'.format(module.params['asn'])]
if module.params['vrf'] != 'default':
parents.append('vrf {0}'.format(module.params['vrf']))
parents.append('neighbor {0}'.format(module.params['neighbor']))
af_command = 'address-family {0} {1}'.format(
module.params['afi'], module.params['safi'])
parents.append(af_command)
if af_command in commands:
commands.remove(af_command)
candidate.add(commands, parents=parents)
def state_absent(module, existing, candidate):
commands = []
parents = ["router bgp {0}".format(module.params['asn'])]
if module.params['vrf'] != 'default':
parents.append('vrf {0}'.format(module.params['vrf']))
parents.append('neighbor {0}'.format(module.params['neighbor']))
commands.append('no address-family {0} {1}'.format(
module.params['afi'], module.params['safi']))
candidate.add(commands, parents=parents)
def main():
argument_spec = dict(
asn=dict(required=True, type='str'),
vrf=dict(required=False, type='str', default='default'),
neighbor=dict(required=True, type='str'),
afi=dict(required=True, type='str'),
safi=dict(required=True, type='str'),
additional_paths_receive=dict(required=False, type='str', choices=['enable', 'disable', 'inherit']),
additional_paths_send=dict(required=False, type='str', choices=['enable', 'disable', 'inherit']),
advertise_map_exist=dict(required=False, type='list'),
advertise_map_non_exist=dict(required=False, type='list'),
allowas_in=dict(required=False, type='bool'),
allowas_in_max=dict(required=False, type='str'),
as_override=dict(required=False, type='bool'),
default_originate=dict(required=False, type='bool'),
default_originate_route_map=dict(required=False, type='str'),
filter_list_in=dict(required=False, type='str'),
filter_list_out=dict(required=False, type='str'),
max_prefix_limit=dict(required=False, type='str'),
max_prefix_interval=dict(required=False, type='str'),
max_prefix_threshold=dict(required=False, type='str'),
max_prefix_warning=dict(required=False, type='bool'),
next_hop_self=dict(required=False, type='bool'),
next_hop_third_party=dict(required=False, type='bool'),
prefix_list_in=dict(required=False, type='str'),
prefix_list_out=dict(required=False, type='str'),
route_map_in=dict(required=False, type='str'),
route_map_out=dict(required=False, type='str'),
route_reflector_client=dict(required=False, type='bool'),
send_community=dict(required=False, choices=['none', 'both', 'extended', 'standard', 'default']),
soft_reconfiguration_in=dict(required=False, type='str', choices=['enable', 'always', 'inherit']),
soo=dict(required=False, type='str'),
suppress_inactive=dict(required=False, type='bool'),
unsuppress_map=dict(required=False, type='str'),
weight=dict(required=False, type='str'),
state=dict(choices=['present', 'absent'], default='present', required=False),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=[['advertise_map_exist', 'advertise_map_non_exist'],
['max_prefix_interval', 'max_prefix_warning']],
supports_check_mode=True,
)
warnings = list()
check_args(module, warnings)
result = dict(changed=False, warnings=warnings)
state = module.params['state']
for key in ['max_prefix_interval', 'max_prefix_warning', 'max_prefix_threshold']:
if module.params[key] and not module.params['max_prefix_limit']:
module.fail_json(
msg='max_prefix_limit is required when using %s' % key
)
if module.params['vrf'] == 'default' and module.params['soo']:
module.fail_json(msg='SOO is only allowed in non-default VRF')
args = PARAM_TO_COMMAND_KEYMAP.keys()
existing = get_existing(module, args, warnings)
if existing.get('asn') and state == 'present':
if existing.get('asn') != module.params['asn']:
module.fail_json(msg='Another BGP ASN already exists.',
proposed_asn=module.params['asn'],
existing_asn=existing.get('asn'))
for param in ['advertise_map_exist', 'advertise_map_non_exist']:
if module.params[param] == ['default']:
module.params[param] = 'default'
proposed_args = dict((k, v) for k, v in module.params.items() if v is not None and k in args)
proposed = {}
for key, value in proposed_args.items():
if key not in ['asn', 'vrf', 'neighbor']:
if not isinstance(value, list):
if str(value).lower() == 'true':
value = True
elif str(value).lower() == 'false':
value = False
elif str(value).lower() == 'default':
if key in BOOL_PARAMS:
value = False
else:
value = 'default'
if existing.get(key) != value:
proposed[key] = value
candidate = CustomNetworkConfig(indent=3)
if state == 'present':
state_present(module, existing, proposed, candidate)
elif state == 'absent' and existing:
state_absent(module, existing, candidate)
if candidate:
candidate = candidate.items_text()
warnings.extend(load_config(module, candidate))
result['changed'] = True
result['commands'] = candidate
else:
result['commands'] = []
module.exit_json(**result)
if __name__ == '__main__':
main()
| bsd-3-clause |
rajexp/stepMuzic | allauth/socialaccount/providers/openid/south_migrations/0003_auto__del_openidaccount.py | 82 | 1958 | # encoding: utf-8
from south.db import db
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'OpenIDAccount'
db.delete_table('openid_openidaccount')
def backwards(self, orm):
# Adding model 'OpenIDAccount'
db.create_table('openid_openidaccount', (
('socialaccount_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['socialaccount.SocialAccount'], unique=True, primary_key=True)),
('identity', self.gf('django.db.models.fields.URLField')(max_length=255, unique=True)),
))
db.send_create_signal('openid', ['OpenIDAccount'])
models = {
'openid.openidnonce': {
'Meta': {'object_name': 'OpenIDNonce'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'salt': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'server_url': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'timestamp': ('django.db.models.fields.IntegerField', [], {})
},
'openid.openidstore': {
'Meta': {'object_name': 'OpenIDStore'},
'assoc_type': ('django.db.models.fields.TextField', [], {}),
'handle': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issued': ('django.db.models.fields.IntegerField', [], {}),
'lifetime': ('django.db.models.fields.IntegerField', [], {}),
'secret': ('django.db.models.fields.TextField', [], {}),
'server_url': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['openid']
| unlicense |
RafaelTorrealba/odoo | openerp/osv/osv.py | 337 | 1384 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from ..exceptions import except_orm
from .orm import Model, TransientModel, AbstractModel
# Deprecated, kept for backward compatibility.
# openerp.exceptions.Warning should be used instead.
except_osv = except_orm
# Deprecated, kept for backward compatibility.
osv = Model
osv_memory = TransientModel
osv_abstract = AbstractModel # ;-)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
bonitadecker77/python-for-android | python-modules/twisted/twisted/conch/client/direct.py | 60 | 3234 | # Copyright (c) 2001-2008 Twisted Matrix Laboratories.
# See LICENSE for details.
from twisted.internet import defer, protocol, reactor
from twisted.conch import error
from twisted.conch.ssh import transport
from twisted.python import log
class SSHClientFactory(protocol.ClientFactory):
def __init__(self, d, options, verifyHostKey, userAuthObject):
self.d = d
self.options = options
self.verifyHostKey = verifyHostKey
self.userAuthObject = userAuthObject
def clientConnectionLost(self, connector, reason):
if self.options['reconnect']:
connector.connect()
def clientConnectionFailed(self, connector, reason):
if self.d is None:
return
d, self.d = self.d, None
d.errback(reason)
def buildProtocol(self, addr):
trans = SSHClientTransport(self)
if self.options['ciphers']:
trans.supportedCiphers = self.options['ciphers']
if self.options['macs']:
trans.supportedMACs = self.options['macs']
if self.options['compress']:
trans.supportedCompressions[0:1] = ['zlib']
if self.options['host-key-algorithms']:
trans.supportedPublicKeys = self.options['host-key-algorithms']
return trans
class SSHClientTransport(transport.SSHClientTransport):
def __init__(self, factory):
self.factory = factory
self.unixServer = None
def connectionLost(self, reason):
if self.unixServer:
d = self.unixServer.stopListening()
self.unixServer = None
else:
d = defer.succeed(None)
d.addCallback(lambda x:
transport.SSHClientTransport.connectionLost(self, reason))
def receiveError(self, code, desc):
if self.factory.d is None:
return
d, self.factory.d = self.factory.d, None
d.errback(error.ConchError(desc, code))
def sendDisconnect(self, code, reason):
if self.factory.d is None:
return
d, self.factory.d = self.factory.d, None
transport.SSHClientTransport.sendDisconnect(self, code, reason)
d.errback(error.ConchError(reason, code))
def receiveDebug(self, alwaysDisplay, message, lang):
log.msg('Received Debug Message: %s' % message)
if alwaysDisplay: # XXX what should happen here?
print message
def verifyHostKey(self, pubKey, fingerprint):
return self.factory.verifyHostKey(self, self.transport.getPeer().host, pubKey,
fingerprint)
def setService(self, service):
log.msg('setting client server to %s' % service)
transport.SSHClientTransport.setService(self, service)
if service.name != 'ssh-userauth' and self.factory.d is not None:
d, self.factory.d = self.factory.d, None
d.callback(None)
def connectionSecure(self):
self.requestService(self.factory.userAuthObject)
def connect(host, port, options, verifyHostKey, userAuthObject):
d = defer.Deferred()
factory = SSHClientFactory(d, options, verifyHostKey, userAuthObject)
reactor.connectTCP(host, port, factory)
return d
| apache-2.0 |
tanglei528/horizon | openstack_dashboard/api/network.py | 7 | 5048 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Abstraction layer for networking functionalities.
Currently Nova and Neutron have duplicated features. This API layer is
introduced to abstract the differences between them for seamless consumption by
different dashboard implementations.
"""
from openstack_dashboard.api import base
from openstack_dashboard.api import neutron
from openstack_dashboard.api import nova
class NetworkClient(object):
def __init__(self, request):
neutron_enabled = base.is_service_enabled(request, 'network')
if neutron_enabled:
self.floating_ips = neutron.FloatingIpManager(request)
else:
self.floating_ips = nova.FloatingIpManager(request)
if (neutron_enabled and
neutron.is_security_group_extension_supported(request)):
self.secgroups = neutron.SecurityGroupManager(request)
else:
self.secgroups = nova.SecurityGroupManager(request)
def floating_ip_pools_list(request):
return NetworkClient(request).floating_ips.list_pools()
def tenant_floating_ip_list(request):
return NetworkClient(request).floating_ips.list()
def tenant_floating_ip_get(request, floating_ip_id):
return NetworkClient(request).floating_ips.get(floating_ip_id)
def tenant_floating_ip_allocate(request, pool=None):
return NetworkClient(request).floating_ips.allocate(pool)
def tenant_floating_ip_release(request, floating_ip_id):
return NetworkClient(request).floating_ips.release(floating_ip_id)
def floating_ip_associate(request, floating_ip_id, port_id):
return NetworkClient(request).floating_ips.associate(floating_ip_id,
port_id)
def floating_ip_disassociate(request, floating_ip_id, port_id):
return NetworkClient(request).floating_ips.disassociate(floating_ip_id,
port_id)
def floating_ip_target_list(request):
return NetworkClient(request).floating_ips.list_targets()
def floating_ip_target_get_by_instance(request, instance_id):
return NetworkClient(request).floating_ips.get_target_id_by_instance(
instance_id)
def floating_ip_target_list_by_instance(request, instance_id):
floating_ips = NetworkClient(request).floating_ips
return floating_ips.list_target_id_by_instance(instance_id)
def floating_ip_simple_associate_supported(request):
return NetworkClient(request).floating_ips.is_simple_associate_supported()
def security_group_list(request):
return NetworkClient(request).secgroups.list()
def security_group_get(request, sg_id):
return NetworkClient(request).secgroups.get(sg_id)
def security_group_create(request, name, desc):
return NetworkClient(request).secgroups.create(name, desc)
def security_group_delete(request, sg_id):
return NetworkClient(request).secgroups.delete(sg_id)
def security_group_update(request, sg_id, name, desc):
return NetworkClient(request).secgroups.update(sg_id, name, desc)
def security_group_rule_create(request, parent_group_id,
direction, ethertype,
ip_protocol, from_port, to_port,
cidr, group_id):
return NetworkClient(request).secgroups.rule_create(
parent_group_id, direction, ethertype, ip_protocol,
from_port, to_port, cidr, group_id)
def security_group_rule_delete(request, sgr_id):
return NetworkClient(request).secgroups.rule_delete(sgr_id)
def server_security_groups(request, instance_id):
return NetworkClient(request).secgroups.list_by_instance(instance_id)
def server_update_security_groups(request, instance_id,
new_security_group_ids):
return NetworkClient(request).secgroups.update_instance_security_group(
instance_id, new_security_group_ids)
def security_group_backend(request):
return NetworkClient(request).secgroups.backend
def servers_update_addresses(request, servers):
"""Retrieve servers networking information from Neutron if enabled.
Should be used when up to date networking information is required,
and Nova's networking info caching mechanism is not fast enough.
"""
neutron_enabled = base.is_service_enabled(request, 'network')
if neutron_enabled:
neutron.servers_update_addresses(request, servers)
| apache-2.0 |
FlorianLudwig/odoo | addons/account/project/report/cost_ledger.py | 358 | 5399 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv
from openerp.report import report_sxw
class account_analytic_cost_ledger(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(account_analytic_cost_ledger, self).__init__(cr, uid, name, context=context)
self.localcontext.update( {
'time': time,
'lines_g': self._lines_g,
'lines_a': self._lines_a,
'account_sum_debit': self._account_sum_debit,
'account_sum_credit': self._account_sum_credit,
'account_sum_balance': self._account_sum_balance,
'sum_debit': self._sum_debit,
'sum_credit': self._sum_credit,
'sum_balance': self._sum_balance,
})
self.children = {} # a memo for the method _get_children
def _get_children(self, accounts):
""" return all children accounts of the given accounts
:param accounts: list of browse records of 'account.analytic.account'
:return: tuple of account ids
"""
analytic_obj = self.pool.get('account.analytic.account')
res = set()
for account in accounts:
if account.id not in self.children:
self.children[account.id] = analytic_obj.search(self.cr, self.uid, [('parent_id', 'child_of', [account.id])])
res.update(self.children[account.id])
return tuple(res)
def _lines_g(self, account, date1, date2):
self.cr.execute("SELECT sum(aal.amount) AS balance, aa.code AS code, aa.name AS name, aa.id AS id \
FROM account_account AS aa, account_analytic_line AS aal \
WHERE (aal.account_id IN %s) AND (aal.date>=%s) AND (aal.date<=%s) AND (aal.general_account_id=aa.id) AND aa.active \
GROUP BY aa.code, aa.name, aa.id ORDER BY aa.code", (self._get_children([account]), date1, date2))
res = self.cr.dictfetchall()
for r in res:
r['debit'] = r['balance'] if r['balance'] > 0 else 0.0
r['credit'] = -r['balance'] if r['balance'] < 0 else 0.0
return res
def _lines_a(self, general_account, account, date1, date2):
self.cr.execute("SELECT aal.name AS name, aal.code AS code, aal.amount AS balance, aal.date AS date, aaj.code AS cj FROM account_analytic_line AS aal, account_analytic_journal AS aaj \
WHERE (aal.general_account_id=%s) AND (aal.account_id IN %s) AND (aal.date>=%s) AND (aal.date<=%s) \
AND (aal.journal_id=aaj.id) \
ORDER BY aal.date, aaj.code, aal.code", (general_account['id'], self._get_children([account]), date1, date2))
res = self.cr.dictfetchall()
for r in res:
r['debit'] = r['balance'] if r['balance'] > 0 else 0.0
r['credit'] = -r['balance'] if r['balance'] < 0 else 0.0
return res
def _account_sum_debit(self, account, date1, date2):
return self._sum_debit([account], date1, date2)
def _account_sum_credit(self, account, date1, date2):
return self._sum_credit([account], date1, date2)
def _account_sum_balance(self, account, date1, date2):
debit = self._account_sum_debit(account, date1, date2)
credit = self._account_sum_credit(account, date1, date2)
return (debit-credit)
def _sum_debit(self, accounts, date1, date2):
self.cr.execute("SELECT sum(amount) FROM account_analytic_line WHERE account_id IN %s AND date>=%s AND date<=%s AND amount>0",
(self._get_children(accounts), date1, date2,))
return self.cr.fetchone()[0] or 0.0
def _sum_credit(self, accounts, date1, date2):
self.cr.execute("SELECT -sum(amount) FROM account_analytic_line WHERE account_id IN %s AND date>=%s AND date<=%s AND amount<0",
(self._get_children(accounts), date1, date2,))
return self.cr.fetchone()[0] or 0.0
def _sum_balance(self, accounts, date1, date2):
debit = self._sum_debit(accounts, date1, date2)
credit = self._sum_credit(accounts, date1, date2)
return (debit-credit)
class report_analyticcostledger(osv.AbstractModel):
_name = 'report.account.report_analyticcostledger'
_inherit = 'report.abstract_report'
_template = 'account.report_analyticcostledger'
_wrapped_report_class = account_analytic_cost_ledger
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
amosshapira/troposphere | tests/test_ecs.py | 23 | 1415 | import unittest
from troposphere import Ref
import troposphere.ecs as ecs
class TestECS(unittest.TestCase):
def test_allow_string_cluster(self):
task_definition = ecs.TaskDefinition(
"mytaskdef",
ContainerDefinitions=[
ecs.ContainerDefinition(
Image="myimage",
Memory="300",
Name="mycontainer",
)
],
Volumes=[
ecs.Volume(Name="my-vol"),
],
)
ecs_service = ecs.Service(
'Service',
Cluster='cluster',
DesiredCount=2,
TaskDefinition=Ref(task_definition),
)
ecs_service.JSONrepr()
def test_allow_ref_cluster(self):
task_definition = ecs.TaskDefinition(
"mytaskdef",
ContainerDefinitions=[
ecs.ContainerDefinition(
Image="myimage",
Memory="300",
Name="mycontainer",
)
],
Volumes=[
ecs.Volume(Name="my-vol"),
],
)
cluster = ecs.Cluster("mycluster")
ecs_service = ecs.Service(
'Service',
Cluster=Ref(cluster),
DesiredCount=2,
TaskDefinition=Ref(task_definition),
)
ecs_service.JSONrepr()
| bsd-2-clause |
crossbario/autobahn-python | examples/twisted/websocket/wrapping/server_endpoint.py | 3 | 2044 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from twisted.internet.protocol import Protocol
class HelloServerProtocol(Protocol):
def connectionMade(self):
print("connectionMade")
self.transport.write('how are you?')
def dataReceived(self, data):
print("dataReceived: {}".format(data))
if __name__ == '__main__':
import sys
from twisted.python import log
from twisted.internet import reactor
from twisted.internet.protocol import Factory
from twisted.internet.endpoints import serverFromString
log.startLogging(sys.stdout)
wrappedFactory = Factory.forProtocol(HelloServerProtocol)
endpoint = serverFromString(reactor, "autobahn:tcp\:9000:url=ws\://127.0.0.1\:9000")
endpoint.listen(wrappedFactory)
reactor.run()
| mit |
ZLLab-Mooc/edx-platform | lms/djangoapps/verify_student/ssencrypt.py | 183 | 7000 | """
NOTE: Anytime a `key` is passed into a function here, we assume it's a raw byte
string. It should *not* be a string representation of a hex value. In other
words, passing the `str` value of
`"32fe72aaf2abb44de9e161131b5435c8d37cbdb6f5df242ae860b283115f2dae"` is bad.
You want to pass in the result of calling .decode('hex') on that, so this instead:
"'2\xfer\xaa\xf2\xab\xb4M\xe9\xe1a\x13\x1bT5\xc8\xd3|\xbd\xb6\xf5\xdf$*\xe8`\xb2\x83\x11_-\xae'"
The RSA functions take any key format that RSA.importKey() accepts, so...
An RSA public key can be in any of the following formats:
* X.509 subjectPublicKeyInfo DER SEQUENCE (binary or PEM encoding)
* PKCS#1 RSAPublicKey DER SEQUENCE (binary or PEM encoding)
* OpenSSH (textual public key only)
An RSA private key can be in any of the following formats:
* PKCS#1 RSAPrivateKey DER SEQUENCE (binary or PEM encoding)
* PKCS#8 PrivateKeyInfo DER SEQUENCE (binary or PEM encoding)
* OpenSSH (textual public key only)
In case of PEM encoding, the private key can be encrypted with DES or 3TDES
according to a certain pass phrase. Only OpenSSL-compatible pass phrases are
supported.
"""
from hashlib import md5, sha256
import base64
import binascii
import hmac
import logging
from Crypto import Random
from Crypto.Cipher import AES, PKCS1_OAEP
from Crypto.PublicKey import RSA
log = logging.getLogger(__name__)
def encrypt_and_encode(data, key):
""" Encrypts and endcodes `data` using `key' """
return base64.urlsafe_b64encode(aes_encrypt(data, key))
def decode_and_decrypt(encoded_data, key):
""" Decrypts and decodes `data` using `key' """
return aes_decrypt(base64.urlsafe_b64decode(encoded_data), key)
def aes_encrypt(data, key):
"""
Return a version of the `data` that has been encrypted to
"""
cipher = aes_cipher_from_key(key)
padded_data = pad(data)
return cipher.encrypt(padded_data)
def aes_decrypt(encrypted_data, key):
"""
Decrypt `encrypted_data` using `key`
"""
cipher = aes_cipher_from_key(key)
padded_data = cipher.decrypt(encrypted_data)
return unpad(padded_data)
def aes_cipher_from_key(key):
"""
Given an AES key, return a Cipher object that has `encrypt()` and
`decrypt()` methods. It will create the cipher to use CBC mode, and create
the initialization vector as Software Secure expects it.
"""
return AES.new(key, AES.MODE_CBC, generate_aes_iv(key))
def generate_aes_iv(key):
"""
Return the initialization vector Software Secure expects for a given AES
key (they hash it a couple of times and take a substring).
"""
return md5(key + md5(key).hexdigest()).hexdigest()[:AES.block_size]
def random_aes_key():
return Random.new().read(32)
def pad(data):
""" Pad the given `data` such that it fits into the proper AES block size """
bytes_to_pad = AES.block_size - len(data) % AES.block_size
return data + (bytes_to_pad * chr(bytes_to_pad))
def unpad(padded_data):
""" remove all padding from `padded_data` """
num_padded_bytes = ord(padded_data[-1])
return padded_data[:-num_padded_bytes]
def rsa_encrypt(data, rsa_pub_key_str):
"""
`rsa_pub_key` is a string with the public key
"""
key = RSA.importKey(rsa_pub_key_str)
cipher = PKCS1_OAEP.new(key)
encrypted_data = cipher.encrypt(data)
return encrypted_data
def rsa_decrypt(data, rsa_priv_key_str):
"""
When given some `data` and an RSA private key, decrypt the data
"""
key = RSA.importKey(rsa_priv_key_str)
cipher = PKCS1_OAEP.new(key)
return cipher.decrypt(data)
def has_valid_signature(method, headers_dict, body_dict, access_key, secret_key):
"""
Given a message (either request or response), say whether it has a valid
signature or not.
"""
_, expected_signature, _ = generate_signed_message(
method, headers_dict, body_dict, access_key, secret_key
)
authorization = headers_dict["Authorization"]
auth_token, post_signature = authorization.split(":")
_, post_access_key = auth_token.split()
if post_access_key != access_key:
log.error("Posted access key does not match ours")
log.debug("Their access: %s; Our access: %s", post_access_key, access_key)
return False
if post_signature != expected_signature:
log.error("Posted signature does not match expected")
log.debug("Their sig: %s; Expected: %s", post_signature, expected_signature)
return False
return True
def generate_signed_message(method, headers_dict, body_dict, access_key, secret_key):
"""
Returns a (message, signature) pair.
"""
message = signing_format_message(method, headers_dict, body_dict)
# hmac needs a byte string for it's starting key, can't be unicode.
hashed = hmac.new(secret_key.encode('utf-8'), message, sha256)
signature = binascii.b2a_base64(hashed.digest()).rstrip('\n')
authorization_header = "SSI {}:{}".format(access_key, signature)
message += '\n'
return message, signature, authorization_header
def signing_format_message(method, headers_dict, body_dict):
"""
Given a dictionary of headers and a dictionary of the JSON for the body,
will return a str that represents the normalized version of this messsage
that will be used to generate a signature.
"""
headers_str = "{}\n\n{}".format(method, header_string(headers_dict))
body_str = body_string(body_dict)
message = headers_str + body_str
return message
def header_string(headers_dict):
"""Given a dictionary of headers, return a canonical string representation."""
header_list = []
if 'Content-Type' in headers_dict:
header_list.append(headers_dict['Content-Type'] + "\n")
if 'Date' in headers_dict:
header_list.append(headers_dict['Date'] + "\n")
if 'Content-MD5' in headers_dict:
header_list.append(headers_dict['Content-MD5'] + "\n")
return "".join(header_list) # Note that trailing \n's are important
def body_string(body_dict, prefix=""):
"""
Return a canonical string representation of the body of a JSON request or
response. This canonical representation will be used as an input to the
hashing used to generate a signature.
"""
body_list = []
for key, value in sorted(body_dict.items()):
if isinstance(value, (list, tuple)):
for i, arr in enumerate(value):
if isinstance(arr, dict):
body_list.append(body_string(arr, u"{}.{}.".format(key, i)))
else:
body_list.append(u"{}.{}:{}\n".format(key, i, arr).encode('utf-8'))
elif isinstance(value, dict):
body_list.append(body_string(value, key + ":"))
else:
if value is None:
value = "null"
body_list.append(u"{}{}:{}\n".format(prefix, key, value).encode('utf-8'))
return "".join(body_list) # Note that trailing \n's are important
| agpl-3.0 |
WebSpider/SickRage | lib/hachoir_parser/archive/rar.py | 84 | 13364 | """
RAR parser
Status: can only read higher-level attructures
Author: Christophe Gisquet
"""
from hachoir_parser import Parser
from hachoir_core.field import (StaticFieldSet, FieldSet,
Bit, Bits, Enum,
UInt8, UInt16, UInt32, UInt64,
String, TimeDateMSDOS32,
NullBytes, NullBits, RawBytes)
from hachoir_core.text_handler import textHandler, filesizeHandler, hexadecimal
from hachoir_core.endian import LITTLE_ENDIAN
from hachoir_parser.common.msdos import MSDOSFileAttr32
MAX_FILESIZE = 1000 * 1024 * 1024
BLOCK_NAME = {
0x72: "Marker",
0x73: "Archive",
0x74: "File",
0x75: "Comment",
0x76: "Extra info",
0x77: "Subblock",
0x78: "Recovery record",
0x79: "Archive authenticity",
0x7A: "New-format subblock",
0x7B: "Archive end",
}
COMPRESSION_NAME = {
0x30: "Storing",
0x31: "Fastest compression",
0x32: "Fast compression",
0x33: "Normal compression",
0x34: "Good compression",
0x35: "Best compression"
}
OS_MSDOS = 0
OS_WIN32 = 2
OS_NAME = {
0: "MS DOS",
1: "OS/2",
2: "Win32",
3: "Unix",
}
DICTIONARY_SIZE = {
0: "Dictionary size 64 Kb",
1: "Dictionary size 128 Kb",
2: "Dictionary size 256 Kb",
3: "Dictionary size 512 Kb",
4: "Dictionary size 1024 Kb",
7: "File is a directory",
}
def formatRARVersion(field):
"""
Decodes the RAR version stored on 1 byte
"""
return "%u.%u" % divmod(field.value, 10)
def commonFlags(s):
yield Bit(s, "has_added_size", "Additional field indicating additional size")
yield Bit(s, "is_ignorable", "Old versions of RAR should ignore this block when copying data")
class ArchiveFlags(StaticFieldSet):
format = (
(Bit, "vol", "Archive volume"),
(Bit, "has_comment", "Whether there is a comment"),
(Bit, "is_locked", "Archive volume"),
(Bit, "is_solid", "Whether files can be extracted separately"),
(Bit, "new_numbering", "New numbering, or compressed comment"), # From unrar
(Bit, "has_authenticity_information", "The integrity/authenticity of the archive can be checked"),
(Bit, "is_protected", "The integrity/authenticity of the archive can be checked"),
(Bit, "is_passworded", "Needs a password to be decrypted"),
(Bit, "is_first_vol", "Whether it is the first volume"),
(Bit, "is_encrypted", "Whether the encryption version is present"),
(NullBits, "internal", 6, "Reserved for 'internal use'")
)
def archiveFlags(s):
yield ArchiveFlags(s, "flags", "Archiver block flags")
def archiveHeader(s):
yield NullBytes(s, "reserved[]", 2, "Reserved word")
yield NullBytes(s, "reserved[]", 4, "Reserved dword")
def commentHeader(s):
yield filesizeHandler(UInt16(s, "total_size", "Comment header size + comment size"))
yield filesizeHandler(UInt16(s, "uncompressed_size", "Uncompressed comment size"))
yield UInt8(s, "required_version", "RAR version needed to extract comment")
yield UInt8(s, "packing_method", "Comment packing method")
yield UInt16(s, "comment_crc16", "Comment CRC")
def commentBody(s):
size = s["total_size"].value - s.current_size
if size > 0:
yield RawBytes(s, "comment_data", size, "Compressed comment data")
def signatureHeader(s):
yield TimeDateMSDOS32(s, "creation_time")
yield filesizeHandler(UInt16(s, "arc_name_size"))
yield filesizeHandler(UInt16(s, "user_name_size"))
def recoveryHeader(s):
yield filesizeHandler(UInt32(s, "total_size"))
yield textHandler(UInt8(s, "version"), hexadecimal)
yield UInt16(s, "rec_sectors")
yield UInt32(s, "total_blocks")
yield RawBytes(s, "mark", 8)
def avInfoHeader(s):
yield filesizeHandler(UInt16(s, "total_size", "Total block size"))
yield UInt8(s, "version", "Version needed to decompress", handler=hexadecimal)
yield UInt8(s, "method", "Compression method", handler=hexadecimal)
yield UInt8(s, "av_version", "Version for AV", handler=hexadecimal)
yield UInt32(s, "av_crc", "AV info CRC32", handler=hexadecimal)
def avInfoBody(s):
size = s["total_size"].value - s.current_size
if size > 0:
yield RawBytes(s, "av_info_data", size, "AV info")
class FileFlags(FieldSet):
static_size = 16
def createFields(self):
yield Bit(self, "continued_from", "File continued from previous volume")
yield Bit(self, "continued_in", "File continued in next volume")
yield Bit(self, "is_encrypted", "File encrypted with password")
yield Bit(self, "has_comment", "File comment present")
yield Bit(self, "is_solid", "Information from previous files is used (solid flag)")
# The 3 following lines are what blocks more staticity
yield Enum(Bits(self, "dictionary_size", 3, "Dictionary size"), DICTIONARY_SIZE)
for bit in commonFlags(self):
yield bit
yield Bit(self, "is_large", "file64 operations needed")
yield Bit(self, "is_unicode", "Filename also encoded using Unicode")
yield Bit(self, "has_salt", "Has salt for encryption")
yield Bit(self, "uses_file_version", "File versioning is used")
yield Bit(self, "has_ext_time", "Extra time ??")
yield Bit(self, "has_ext_flags", "Extra flag ??")
def fileFlags(s):
yield FileFlags(s, "flags", "File block flags")
class ExtTime(FieldSet):
def createFields(self):
yield textHandler(UInt16(self, "time_flags", "Flags for extended time"), hexadecimal)
flags = self["time_flags"].value
for index in xrange(4):
rmode = flags >> ((3-index)*4)
if rmode & 8:
if index:
yield TimeDateMSDOS32(self, "dos_time[]", "DOS Time")
if rmode & 3:
yield RawBytes(self, "remainder[]", rmode & 3, "Time remainder")
def specialHeader(s, is_file):
yield filesizeHandler(UInt32(s, "compressed_size", "Compressed size (bytes)"))
yield filesizeHandler(UInt32(s, "uncompressed_size", "Uncompressed size (bytes)"))
yield Enum(UInt8(s, "host_os", "Operating system used for archiving"), OS_NAME)
yield textHandler(UInt32(s, "crc32", "File CRC32"), hexadecimal)
yield TimeDateMSDOS32(s, "ftime", "Date and time (MS DOS format)")
yield textHandler(UInt8(s, "version", "RAR version needed to extract file"), formatRARVersion)
yield Enum(UInt8(s, "method", "Packing method"), COMPRESSION_NAME)
yield filesizeHandler(UInt16(s, "filename_length", "File name size"))
if s["host_os"].value in (OS_MSDOS, OS_WIN32):
yield MSDOSFileAttr32(s, "file_attr", "File attributes")
else:
yield textHandler(UInt32(s, "file_attr", "File attributes"), hexadecimal)
# Start additional field from unrar
if s["flags/is_large"].value:
yield filesizeHandler(UInt64(s, "large_size", "Extended 64bits filesize"))
# End additional field
size = s["filename_length"].value
if size > 0:
if s["flags/is_unicode"].value:
charset = "UTF-8"
else:
charset = "ISO-8859-15"
yield String(s, "filename", size, "Filename", charset=charset)
# Start additional fields from unrar - file only
if is_file:
if s["flags/has_salt"].value:
yield textHandler(UInt8(s, "salt", "Salt"), hexadecimal)
if s["flags/has_ext_time"].value:
yield ExtTime(s, "extra_time", "Extra time info")
def fileHeader(s):
return specialHeader(s, True)
def fileBody(s):
# File compressed data
size = s["compressed_size"].value
if s["flags/is_large"].value:
size += s["large_size"].value
if size > 0:
yield RawBytes(s, "compressed_data", size, "File compressed data")
def fileDescription(s):
return "File entry: %s (%s)" % \
(s["filename"].display, s["compressed_size"].display)
def newSubHeader(s):
return specialHeader(s, False)
class EndFlags(StaticFieldSet):
format = (
(Bit, "has_next_vol", "Whether there is another next volume"),
(Bit, "has_data_crc", "Whether a CRC value is present"),
(Bit, "rev_space"),
(Bit, "has_vol_number", "Whether the volume number is present"),
(Bits, "unused[]", 4),
(Bit, "has_added_size", "Additional field indicating additional size"),
(Bit, "is_ignorable", "Old versions of RAR should ignore this block when copying data"),
(Bits, "unused[]", 6),
)
def endFlags(s):
yield EndFlags(s, "flags", "End block flags")
class BlockFlags(FieldSet):
static_size = 16
def createFields(self):
yield textHandler(Bits(self, "unused[]", 8, "Unused flag bits"), hexadecimal)
yield Bit(self, "has_added_size", "Additional field indicating additional size")
yield Bit(self, "is_ignorable", "Old versions of RAR should ignore this block when copying data")
yield Bits(self, "unused[]", 6)
class Block(FieldSet):
BLOCK_INFO = {
# None means 'use default function'
0x72: ("marker", "Archive header", None, None, None),
0x73: ("archive_start", "Archive info", archiveFlags, archiveHeader, None),
0x74: ("file[]", fileDescription, fileFlags, fileHeader, fileBody),
0x75: ("comment[]", "Stray comment", None, commentHeader, commentBody),
0x76: ("av_info[]", "Extra information", None, avInfoHeader, avInfoBody),
0x77: ("sub_block[]", "Stray subblock", None, newSubHeader, fileBody),
0x78: ("recovery[]", "Recovery block", None, recoveryHeader, None),
0x79: ("signature", "Signature block", None, signatureHeader, None),
0x7A: ("new_sub_block[]", "Stray new-format subblock", fileFlags,
newSubHeader, fileBody),
0x7B: ("archive_end", "Archive end block", endFlags, None, None),
}
def __init__(self, parent, name):
FieldSet.__init__(self, parent, name)
t = self["block_type"].value
if t in self.BLOCK_INFO:
self._name, desc, parseFlags, parseHeader, parseBody = self.BLOCK_INFO[t]
if callable(desc):
self.createDescription = lambda: desc(self)
elif desc:
self._description = desc
if parseFlags : self.parseFlags = lambda: parseFlags(self)
if parseHeader : self.parseHeader = lambda: parseHeader(self)
if parseBody : self.parseBody = lambda: parseBody(self)
else:
self.info("Processing as unknown block block of type %u" % type)
self._size = 8*self["block_size"].value
if t == 0x74 or t == 0x7A:
self._size += 8*self["compressed_size"].value
if "is_large" in self["flags"] and self["flags/is_large"].value:
self._size += 8*self["large_size"].value
elif "has_added_size" in self:
self._size += 8*self["added_size"].value
# TODO: check if any other member is needed here
def createFields(self):
yield textHandler(UInt16(self, "crc16", "Block CRC16"), hexadecimal)
yield textHandler(UInt8(self, "block_type", "Block type"), hexadecimal)
# Parse flags
for field in self.parseFlags():
yield field
# Get block size
yield filesizeHandler(UInt16(self, "block_size", "Block size"))
# Parse remaining header
for field in self.parseHeader():
yield field
# Finish header with stuff of unknow size
size = self["block_size"].value - (self.current_size//8)
if size > 0:
yield RawBytes(self, "unknown", size, "Unknow data (UInt32 probably)")
# Parse body
for field in self.parseBody():
yield field
def createDescription(self):
return "Block entry: %s" % self["type"].display
def parseFlags(self):
yield BlockFlags(self, "flags", "Block header flags")
def parseHeader(self):
if "has_added_size" in self["flags"] and \
self["flags/has_added_size"].value:
yield filesizeHandler(UInt32(self, "added_size",
"Supplementary block size"))
def parseBody(self):
"""
Parse what is left of the block
"""
size = self["block_size"].value - (self.current_size//8)
if "has_added_size" in self["flags"] and self["flags/has_added_size"].value:
size += self["added_size"].value
if size > 0:
yield RawBytes(self, "body", size, "Body data")
class RarFile(Parser):
MAGIC = "Rar!\x1A\x07\x00"
PARSER_TAGS = {
"id": "rar",
"category": "archive",
"file_ext": ("rar",),
"mime": (u"application/x-rar-compressed", ),
"min_size": 7*8,
"magic": ((MAGIC, 0),),
"description": "Roshal archive (RAR)",
}
endian = LITTLE_ENDIAN
def validate(self):
magic = self.MAGIC
if self.stream.readBytes(0, len(magic)) != magic:
return "Invalid magic"
return True
def createFields(self):
while not self.eof:
yield Block(self, "block[]")
def createContentSize(self):
start = 0
end = MAX_FILESIZE * 8
pos = self.stream.searchBytes("\xC4\x3D\x7B\x00\x40\x07\x00", start, end)
if pos is not None:
return pos + 7*8
return None
| gpl-3.0 |
nopjmp/SickRage | lib/unidecode/x062.py | 252 | 4620 | data = (
'Lian ', # 0x00
'Nan ', # 0x01
'Mi ', # 0x02
'Tang ', # 0x03
'Jue ', # 0x04
'Gang ', # 0x05
'Gang ', # 0x06
'Gang ', # 0x07
'Ge ', # 0x08
'Yue ', # 0x09
'Wu ', # 0x0a
'Jian ', # 0x0b
'Xu ', # 0x0c
'Shu ', # 0x0d
'Rong ', # 0x0e
'Xi ', # 0x0f
'Cheng ', # 0x10
'Wo ', # 0x11
'Jie ', # 0x12
'Ge ', # 0x13
'Jian ', # 0x14
'Qiang ', # 0x15
'Huo ', # 0x16
'Qiang ', # 0x17
'Zhan ', # 0x18
'Dong ', # 0x19
'Qi ', # 0x1a
'Jia ', # 0x1b
'Die ', # 0x1c
'Zei ', # 0x1d
'Jia ', # 0x1e
'Ji ', # 0x1f
'Shi ', # 0x20
'Kan ', # 0x21
'Ji ', # 0x22
'Kui ', # 0x23
'Gai ', # 0x24
'Deng ', # 0x25
'Zhan ', # 0x26
'Chuang ', # 0x27
'Ge ', # 0x28
'Jian ', # 0x29
'Jie ', # 0x2a
'Yu ', # 0x2b
'Jian ', # 0x2c
'Yan ', # 0x2d
'Lu ', # 0x2e
'Xi ', # 0x2f
'Zhan ', # 0x30
'Xi ', # 0x31
'Xi ', # 0x32
'Chuo ', # 0x33
'Dai ', # 0x34
'Qu ', # 0x35
'Hu ', # 0x36
'Hu ', # 0x37
'Hu ', # 0x38
'E ', # 0x39
'Shi ', # 0x3a
'Li ', # 0x3b
'Mao ', # 0x3c
'Hu ', # 0x3d
'Li ', # 0x3e
'Fang ', # 0x3f
'Suo ', # 0x40
'Bian ', # 0x41
'Dian ', # 0x42
'Jiong ', # 0x43
'Shang ', # 0x44
'Yi ', # 0x45
'Yi ', # 0x46
'Shan ', # 0x47
'Hu ', # 0x48
'Fei ', # 0x49
'Yan ', # 0x4a
'Shou ', # 0x4b
'T ', # 0x4c
'Cai ', # 0x4d
'Zha ', # 0x4e
'Qiu ', # 0x4f
'Le ', # 0x50
'Bu ', # 0x51
'Ba ', # 0x52
'Da ', # 0x53
'Reng ', # 0x54
'Fu ', # 0x55
'Hameru ', # 0x56
'Zai ', # 0x57
'Tuo ', # 0x58
'Zhang ', # 0x59
'Diao ', # 0x5a
'Kang ', # 0x5b
'Yu ', # 0x5c
'Ku ', # 0x5d
'Han ', # 0x5e
'Shen ', # 0x5f
'Cha ', # 0x60
'Yi ', # 0x61
'Gu ', # 0x62
'Kou ', # 0x63
'Wu ', # 0x64
'Tuo ', # 0x65
'Qian ', # 0x66
'Zhi ', # 0x67
'Ren ', # 0x68
'Kuo ', # 0x69
'Men ', # 0x6a
'Sao ', # 0x6b
'Yang ', # 0x6c
'Niu ', # 0x6d
'Ban ', # 0x6e
'Che ', # 0x6f
'Rao ', # 0x70
'Xi ', # 0x71
'Qian ', # 0x72
'Ban ', # 0x73
'Jia ', # 0x74
'Yu ', # 0x75
'Fu ', # 0x76
'Ao ', # 0x77
'Xi ', # 0x78
'Pi ', # 0x79
'Zhi ', # 0x7a
'Zi ', # 0x7b
'E ', # 0x7c
'Dun ', # 0x7d
'Zhao ', # 0x7e
'Cheng ', # 0x7f
'Ji ', # 0x80
'Yan ', # 0x81
'Kuang ', # 0x82
'Bian ', # 0x83
'Chao ', # 0x84
'Ju ', # 0x85
'Wen ', # 0x86
'Hu ', # 0x87
'Yue ', # 0x88
'Jue ', # 0x89
'Ba ', # 0x8a
'Qin ', # 0x8b
'Zhen ', # 0x8c
'Zheng ', # 0x8d
'Yun ', # 0x8e
'Wan ', # 0x8f
'Nu ', # 0x90
'Yi ', # 0x91
'Shu ', # 0x92
'Zhua ', # 0x93
'Pou ', # 0x94
'Tou ', # 0x95
'Dou ', # 0x96
'Kang ', # 0x97
'Zhe ', # 0x98
'Pou ', # 0x99
'Fu ', # 0x9a
'Pao ', # 0x9b
'Ba ', # 0x9c
'Ao ', # 0x9d
'Ze ', # 0x9e
'Tuan ', # 0x9f
'Kou ', # 0xa0
'Lun ', # 0xa1
'Qiang ', # 0xa2
'[?] ', # 0xa3
'Hu ', # 0xa4
'Bao ', # 0xa5
'Bing ', # 0xa6
'Zhi ', # 0xa7
'Peng ', # 0xa8
'Tan ', # 0xa9
'Pu ', # 0xaa
'Pi ', # 0xab
'Tai ', # 0xac
'Yao ', # 0xad
'Zhen ', # 0xae
'Zha ', # 0xaf
'Yang ', # 0xb0
'Bao ', # 0xb1
'He ', # 0xb2
'Ni ', # 0xb3
'Yi ', # 0xb4
'Di ', # 0xb5
'Chi ', # 0xb6
'Pi ', # 0xb7
'Za ', # 0xb8
'Mo ', # 0xb9
'Mo ', # 0xba
'Shen ', # 0xbb
'Ya ', # 0xbc
'Chou ', # 0xbd
'Qu ', # 0xbe
'Min ', # 0xbf
'Chu ', # 0xc0
'Jia ', # 0xc1
'Fu ', # 0xc2
'Zhan ', # 0xc3
'Zhu ', # 0xc4
'Dan ', # 0xc5
'Chai ', # 0xc6
'Mu ', # 0xc7
'Nian ', # 0xc8
'La ', # 0xc9
'Fu ', # 0xca
'Pao ', # 0xcb
'Ban ', # 0xcc
'Pai ', # 0xcd
'Ling ', # 0xce
'Na ', # 0xcf
'Guai ', # 0xd0
'Qian ', # 0xd1
'Ju ', # 0xd2
'Tuo ', # 0xd3
'Ba ', # 0xd4
'Tuo ', # 0xd5
'Tuo ', # 0xd6
'Ao ', # 0xd7
'Ju ', # 0xd8
'Zhuo ', # 0xd9
'Pan ', # 0xda
'Zhao ', # 0xdb
'Bai ', # 0xdc
'Bai ', # 0xdd
'Di ', # 0xde
'Ni ', # 0xdf
'Ju ', # 0xe0
'Kuo ', # 0xe1
'Long ', # 0xe2
'Jian ', # 0xe3
'[?] ', # 0xe4
'Yong ', # 0xe5
'Lan ', # 0xe6
'Ning ', # 0xe7
'Bo ', # 0xe8
'Ze ', # 0xe9
'Qian ', # 0xea
'Hen ', # 0xeb
'Gua ', # 0xec
'Shi ', # 0xed
'Jie ', # 0xee
'Zheng ', # 0xef
'Nin ', # 0xf0
'Gong ', # 0xf1
'Gong ', # 0xf2
'Quan ', # 0xf3
'Shuan ', # 0xf4
'Cun ', # 0xf5
'Zan ', # 0xf6
'Kao ', # 0xf7
'Chi ', # 0xf8
'Xie ', # 0xf9
'Ce ', # 0xfa
'Hui ', # 0xfb
'Pin ', # 0xfc
'Zhuai ', # 0xfd
'Shi ', # 0xfe
'Na ', # 0xff
)
| gpl-3.0 |
SrNetoChan/Quantum-GIS | python/plugins/processing/algs/qgis/VectorLayerScatterplot.py | 30 | 4236 | # -*- coding: utf-8 -*-
"""
***************************************************************************
EquivalentNumField.py
---------------------
Date : January 2013
Copyright : (C) 2013 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'January 2013'
__copyright__ = '(C) 2013, Victor Olaya'
import warnings
from qgis.core import (QgsProcessingException,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterField,
QgsProcessingParameterFileDestination)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
from processing.tools import vector
from qgis.PyQt.QtCore import QCoreApplication
class VectorLayerScatterplot(QgisAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
XFIELD = 'XFIELD'
YFIELD = 'YFIELD'
def group(self):
return self.tr('Plots')
def groupId(self):
return 'plots'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
self.tr('Input layer')))
self.addParameter(QgsProcessingParameterField(self.XFIELD,
self.tr('X attribute'),
parentLayerParameterName=self.INPUT,
type=QgsProcessingParameterField.Numeric))
self.addParameter(QgsProcessingParameterField(self.YFIELD,
self.tr('Y attribute'),
parentLayerParameterName=self.INPUT,
type=QgsProcessingParameterField.Numeric))
self.addParameter(QgsProcessingParameterFileDestination(self.OUTPUT, self.tr('Scatterplot'), self.tr('HTML files (*.html)')))
def name(self):
return 'vectorlayerscatterplot'
def displayName(self):
return self.tr('Vector layer scatterplot')
def processAlgorithm(self, parameters, context, feedback):
try:
# importing plotly throws Python warnings from within the library - filter these out
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=ResourceWarning)
warnings.filterwarnings("ignore", category=ImportWarning)
import plotly as plt
import plotly.graph_objs as go
except ImportError:
raise QgsProcessingException(QCoreApplication.translate('VectorLayerScatterplot', 'This algorithm requires the Python “plotly” library. Please install this library and try again.'))
source = self.parameterAsSource(parameters, self.INPUT, context)
if source is None:
raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT))
xfieldname = self.parameterAsString(parameters, self.XFIELD, context)
yfieldname = self.parameterAsString(parameters, self.YFIELD, context)
output = self.parameterAsFileOutput(parameters, self.OUTPUT, context)
values = vector.values(source, xfieldname, yfieldname)
data = [go.Scatter(x=values[xfieldname],
y=values[yfieldname],
mode='markers')]
plt.offline.plot(data, filename=output, auto_open=False)
return {self.OUTPUT: output}
| gpl-2.0 |
scifiswapnil/Project-LoCatr | lib/python2.7/site-packages/django/contrib/auth/mixins.py | 115 | 4085 | from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.views import redirect_to_login
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.utils import six
from django.utils.encoding import force_text
class AccessMixin(object):
"""
Abstract CBV mixin that gives access mixins the same customizable
functionality.
"""
login_url = None
permission_denied_message = ''
raise_exception = False
redirect_field_name = REDIRECT_FIELD_NAME
def get_login_url(self):
"""
Override this method to override the login_url attribute.
"""
login_url = self.login_url or settings.LOGIN_URL
if not login_url:
raise ImproperlyConfigured(
'{0} is missing the login_url attribute. Define {0}.login_url, settings.LOGIN_URL, or override '
'{0}.get_login_url().'.format(self.__class__.__name__)
)
return force_text(login_url)
def get_permission_denied_message(self):
"""
Override this method to override the permission_denied_message attribute.
"""
return self.permission_denied_message
def get_redirect_field_name(self):
"""
Override this method to override the redirect_field_name attribute.
"""
return self.redirect_field_name
def handle_no_permission(self):
if self.raise_exception:
raise PermissionDenied(self.get_permission_denied_message())
return redirect_to_login(self.request.get_full_path(), self.get_login_url(), self.get_redirect_field_name())
class LoginRequiredMixin(AccessMixin):
"""
CBV mixin which verifies that the current user is authenticated.
"""
def dispatch(self, request, *args, **kwargs):
if not request.user.is_authenticated:
return self.handle_no_permission()
return super(LoginRequiredMixin, self).dispatch(request, *args, **kwargs)
class PermissionRequiredMixin(AccessMixin):
"""
CBV mixin which verifies that the current user has all specified
permissions.
"""
permission_required = None
def get_permission_required(self):
"""
Override this method to override the permission_required attribute.
Must return an iterable.
"""
if self.permission_required is None:
raise ImproperlyConfigured(
'{0} is missing the permission_required attribute. Define {0}.permission_required, or override '
'{0}.get_permission_required().'.format(self.__class__.__name__)
)
if isinstance(self.permission_required, six.string_types):
perms = (self.permission_required, )
else:
perms = self.permission_required
return perms
def has_permission(self):
"""
Override this method to customize the way permissions are checked.
"""
perms = self.get_permission_required()
return self.request.user.has_perms(perms)
def dispatch(self, request, *args, **kwargs):
if not self.has_permission():
return self.handle_no_permission()
return super(PermissionRequiredMixin, self).dispatch(request, *args, **kwargs)
class UserPassesTestMixin(AccessMixin):
"""
CBV Mixin that allows you to define a test function which must return True
if the current user can access the view.
"""
def test_func(self):
raise NotImplementedError(
'{0} is missing the implementation of the test_func() method.'.format(self.__class__.__name__)
)
def get_test_func(self):
"""
Override this method to use a different test_func method.
"""
return self.test_func
def dispatch(self, request, *args, **kwargs):
user_test_result = self.get_test_func()()
if not user_test_result:
return self.handle_no_permission()
return super(UserPassesTestMixin, self).dispatch(request, *args, **kwargs)
| mit |
Xekyo/bitcoin | test/functional/decodescript.py | 22 | 13449 | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test decoding scripts via decodescript RPC command."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import *
from io import BytesIO
class DecodeScriptTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def decodescript_script_sig(self):
signature = '304502207fa7a6d1e0ee81132a269ad84e68d695483745cde8b541e3bf630749894e342a022100c1f7ab20e13e22fb95281a870f3dcf38d782e53023ee313d741ad0cfbc0c509001'
push_signature = '48' + signature
public_key = '03b0da749730dc9b4b1f4a14d6902877a92541f5368778853d9c4a0cb7802dcfb2'
push_public_key = '21' + public_key
# below are test cases for all of the standard transaction types
# 1) P2PK scriptSig
# the scriptSig of a public key scriptPubKey simply pushes a signature onto the stack
rpc_result = self.nodes[0].decodescript(push_signature)
assert_equal(signature, rpc_result['asm'])
# 2) P2PKH scriptSig
rpc_result = self.nodes[0].decodescript(push_signature + push_public_key)
assert_equal(signature + ' ' + public_key, rpc_result['asm'])
# 3) multisig scriptSig
# this also tests the leading portion of a P2SH multisig scriptSig
# OP_0 <A sig> <B sig>
rpc_result = self.nodes[0].decodescript('00' + push_signature + push_signature)
assert_equal('0 ' + signature + ' ' + signature, rpc_result['asm'])
# 4) P2SH scriptSig
# an empty P2SH redeemScript is valid and makes for a very simple test case.
# thus, such a spending scriptSig would just need to pass the outer redeemScript
# hash test and leave true on the top of the stack.
rpc_result = self.nodes[0].decodescript('5100')
assert_equal('1 0', rpc_result['asm'])
# 5) null data scriptSig - no such thing because null data scripts can not be spent.
# thus, no test case for that standard transaction type is here.
def decodescript_script_pub_key(self):
public_key = '03b0da749730dc9b4b1f4a14d6902877a92541f5368778853d9c4a0cb7802dcfb2'
push_public_key = '21' + public_key
public_key_hash = '11695b6cd891484c2d49ec5aa738ec2b2f897777'
push_public_key_hash = '14' + public_key_hash
# below are test cases for all of the standard transaction types
# 1) P2PK scriptPubKey
# <pubkey> OP_CHECKSIG
rpc_result = self.nodes[0].decodescript(push_public_key + 'ac')
assert_equal(public_key + ' OP_CHECKSIG', rpc_result['asm'])
# 2) P2PKH scriptPubKey
# OP_DUP OP_HASH160 <PubKeyHash> OP_EQUALVERIFY OP_CHECKSIG
rpc_result = self.nodes[0].decodescript('76a9' + push_public_key_hash + '88ac')
assert_equal('OP_DUP OP_HASH160 ' + public_key_hash + ' OP_EQUALVERIFY OP_CHECKSIG', rpc_result['asm'])
# 3) multisig scriptPubKey
# <m> <A pubkey> <B pubkey> <C pubkey> <n> OP_CHECKMULTISIG
# just imagine that the pub keys used below are different.
# for our purposes here it does not matter that they are the same even though it is unrealistic.
rpc_result = self.nodes[0].decodescript('52' + push_public_key + push_public_key + push_public_key + '53ae')
assert_equal('2 ' + public_key + ' ' + public_key + ' ' + public_key + ' 3 OP_CHECKMULTISIG', rpc_result['asm'])
# 4) P2SH scriptPubKey
# OP_HASH160 <Hash160(redeemScript)> OP_EQUAL.
# push_public_key_hash here should actually be the hash of a redeem script.
# but this works the same for purposes of this test.
rpc_result = self.nodes[0].decodescript('a9' + push_public_key_hash + '87')
assert_equal('OP_HASH160 ' + public_key_hash + ' OP_EQUAL', rpc_result['asm'])
# 5) null data scriptPubKey
# use a signature look-alike here to make sure that we do not decode random data as a signature.
# this matters if/when signature sighash decoding comes along.
# would want to make sure that no such decoding takes place in this case.
signature_imposter = '48304502207fa7a6d1e0ee81132a269ad84e68d695483745cde8b541e3bf630749894e342a022100c1f7ab20e13e22fb95281a870f3dcf38d782e53023ee313d741ad0cfbc0c509001'
# OP_RETURN <data>
rpc_result = self.nodes[0].decodescript('6a' + signature_imposter)
assert_equal('OP_RETURN ' + signature_imposter[2:], rpc_result['asm'])
# 6) a CLTV redeem script. redeem scripts are in-effect scriptPubKey scripts, so adding a test here.
# OP_NOP2 is also known as OP_CHECKLOCKTIMEVERIFY.
# just imagine that the pub keys used below are different.
# for our purposes here it does not matter that they are the same even though it is unrealistic.
#
# OP_IF
# <receiver-pubkey> OP_CHECKSIGVERIFY
# OP_ELSE
# <lock-until> OP_CHECKLOCKTIMEVERIFY OP_DROP
# OP_ENDIF
# <sender-pubkey> OP_CHECKSIG
#
# lock until block 500,000
rpc_result = self.nodes[0].decodescript('63' + push_public_key + 'ad670320a107b17568' + push_public_key + 'ac')
assert_equal('OP_IF ' + public_key + ' OP_CHECKSIGVERIFY OP_ELSE 500000 OP_CHECKLOCKTIMEVERIFY OP_DROP OP_ENDIF ' + public_key + ' OP_CHECKSIG', rpc_result['asm'])
def decoderawtransaction_asm_sighashtype(self):
"""Test decoding scripts via RPC command "decoderawtransaction".
This test is in with the "decodescript" tests because they are testing the same "asm" script decodes.
"""
# this test case uses a random plain vanilla mainnet transaction with a single P2PKH input and output
tx = '0100000001696a20784a2c70143f634e95227dbdfdf0ecd51647052e70854512235f5986ca010000008a47304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb014104d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536ffffffff0100e1f505000000001976a914eb6c6e0cdb2d256a32d97b8df1fc75d1920d9bca88ac00000000'
rpc_result = self.nodes[0].decoderawtransaction(tx)
assert_equal('304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb[ALL] 04d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536', rpc_result['vin'][0]['scriptSig']['asm'])
# this test case uses a mainnet transaction that has a P2SH input and both P2PKH and P2SH outputs.
# it's from James D'Angelo's awesome introductory videos about multisig: https://www.youtube.com/watch?v=zIbUSaZBJgU and https://www.youtube.com/watch?v=OSA1pwlaypc
# verify that we have not altered scriptPubKey decoding.
tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914dc863734a218bfe83ef770ee9d41a27f824a6e5688acee2a02000000000017a9142a5edea39971049a540474c6a99edf0aa4074c588700000000'
rpc_result = self.nodes[0].decoderawtransaction(tx)
assert_equal('8e3730608c3b0bb5df54f09076e196bc292a8e39a78e73b44b6ba08c78f5cbb0', rpc_result['txid'])
assert_equal('0 3045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea[ALL] 3045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75[ALL] 5221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53ae', rpc_result['vin'][0]['scriptSig']['asm'])
assert_equal('OP_DUP OP_HASH160 dc863734a218bfe83ef770ee9d41a27f824a6e56 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm'])
assert_equal('OP_HASH160 2a5edea39971049a540474c6a99edf0aa4074c58 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm'])
txSave = CTransaction()
txSave.deserialize(BytesIO(hex_str_to_bytes(tx)))
# make sure that a specifically crafted op_return value will not pass all the IsDERSignature checks and then get decoded as a sighash type
tx = '01000000015ded05872fdbda629c7d3d02b194763ce3b9b1535ea884e3c8e765d42e316724020000006b48304502204c10d4064885c42638cbff3585915b322de33762598321145ba033fc796971e2022100bb153ad3baa8b757e30a2175bd32852d2e1cb9080f84d7e32fcdfd667934ef1b012103163c0ff73511ea1743fb5b98384a2ff09dd06949488028fd819f4d83f56264efffffffff0200000000000000000b6a0930060201000201000180380100000000001976a9141cabd296e753837c086da7a45a6c2fe0d49d7b7b88ac00000000'
rpc_result = self.nodes[0].decoderawtransaction(tx)
assert_equal('OP_RETURN 300602010002010001', rpc_result['vout'][0]['scriptPubKey']['asm'])
# verify that we have not altered scriptPubKey processing even of a specially crafted P2PKH pubkeyhash and P2SH redeem script hash that is made to pass the der signature checks
tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914301102070101010101010102060101010101010188acee2a02000000000017a91430110207010101010101010206010101010101018700000000'
rpc_result = self.nodes[0].decoderawtransaction(tx)
assert_equal('OP_DUP OP_HASH160 3011020701010101010101020601010101010101 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm'])
assert_equal('OP_HASH160 3011020701010101010101020601010101010101 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm'])
# some more full transaction tests of varying specific scriptSigs. used instead of
# tests in decodescript_script_sig because the decodescript RPC is specifically
# for working on scriptPubKeys (argh!).
push_signature = bytes_to_hex_str(txSave.vin[0].scriptSig)[2:(0x48*2+4)]
signature = push_signature[2:]
der_signature = signature[:-2]
signature_sighash_decoded = der_signature + '[ALL]'
signature_2 = der_signature + '82'
push_signature_2 = '48' + signature_2
signature_2_sighash_decoded = der_signature + '[NONE|ANYONECANPAY]'
# 1) P2PK scriptSig
txSave.vin[0].scriptSig = hex_str_to_bytes(push_signature)
rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
assert_equal(signature_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
# make sure that the sighash decodes come out correctly for a more complex / lesser used case.
txSave.vin[0].scriptSig = hex_str_to_bytes(push_signature_2)
rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
assert_equal(signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
# 2) multisig scriptSig
txSave.vin[0].scriptSig = hex_str_to_bytes('00' + push_signature + push_signature_2)
rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
assert_equal('0 ' + signature_sighash_decoded + ' ' + signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
# 3) test a scriptSig that contains more than push operations.
# in fact, it contains an OP_RETURN with data specially crafted to cause improper decode if the code does not catch it.
txSave.vin[0].scriptSig = hex_str_to_bytes('6a143011020701010101010101020601010101010101')
rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize()))
assert_equal('OP_RETURN 3011020701010101010101020601010101010101', rpc_result['vin'][0]['scriptSig']['asm'])
def run_test(self):
self.decodescript_script_sig()
self.decodescript_script_pub_key()
self.decoderawtransaction_asm_sighashtype()
if __name__ == '__main__':
DecodeScriptTest().main()
| mit |
yewang15215/django | tests/fixtures_regress/models.py | 281 | 8611 | from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Animal(models.Model):
name = models.CharField(max_length=150)
latin_name = models.CharField(max_length=150)
count = models.IntegerField()
weight = models.FloatField()
# use a non-default name for the default manager
specimens = models.Manager()
def __str__(self):
return self.name
class Plant(models.Model):
name = models.CharField(max_length=150)
class Meta:
# For testing when upper case letter in app name; regression for #4057
db_table = "Fixtures_regress_plant"
@python_2_unicode_compatible
class Stuff(models.Model):
name = models.CharField(max_length=20, null=True)
owner = models.ForeignKey(User, models.SET_NULL, null=True)
def __str__(self):
return six.text_type(self.name) + ' is owned by ' + six.text_type(self.owner)
class Absolute(models.Model):
name = models.CharField(max_length=40)
class Parent(models.Model):
name = models.CharField(max_length=10)
class Meta:
ordering = ('id',)
class Child(Parent):
data = models.CharField(max_length=10)
# Models to regression test #7572, #20820
class Channel(models.Model):
name = models.CharField(max_length=255)
class Article(models.Model):
title = models.CharField(max_length=255)
channels = models.ManyToManyField(Channel)
class Meta:
ordering = ('id',)
# Subclass of a model with a ManyToManyField for test_ticket_20820
class SpecialArticle(Article):
pass
# Models to regression test #22421
class CommonFeature(Article):
class Meta:
abstract = True
class Feature(CommonFeature):
pass
# Models to regression test #11428
@python_2_unicode_compatible
class Widget(models.Model):
name = models.CharField(max_length=255)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class WidgetProxy(Widget):
class Meta:
proxy = True
# Check for forward references in FKs and M2Ms with natural keys
class TestManager(models.Manager):
def get_by_natural_key(self, key):
return self.get(name=key)
@python_2_unicode_compatible
class Store(models.Model):
objects = TestManager()
name = models.CharField(max_length=255)
main = models.ForeignKey('self', models.SET_NULL, null=True)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
def natural_key(self):
return (self.name,)
@python_2_unicode_compatible
class Person(models.Model):
objects = TestManager()
name = models.CharField(max_length=255)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
# Person doesn't actually have a dependency on store, but we need to define
# one to test the behavior of the dependency resolution algorithm.
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.store']
@python_2_unicode_compatible
class Book(models.Model):
name = models.CharField(max_length=255)
author = models.ForeignKey(Person, models.CASCADE)
stores = models.ManyToManyField(Store)
class Meta:
ordering = ('name',)
def __str__(self):
return '%s by %s (available at %s)' % (
self.name,
self.author.name,
', '.join(s.name for s in self.stores.all())
)
class NKManager(models.Manager):
def get_by_natural_key(self, data):
return self.get(data=data)
@python_2_unicode_compatible
class NKChild(Parent):
data = models.CharField(max_length=10, unique=True)
objects = NKManager()
def natural_key(self):
return (self.data,)
def __str__(self):
return 'NKChild %s:%s' % (self.name, self.data)
@python_2_unicode_compatible
class RefToNKChild(models.Model):
text = models.CharField(max_length=10)
nk_fk = models.ForeignKey(NKChild, models.CASCADE, related_name='ref_fks')
nk_m2m = models.ManyToManyField(NKChild, related_name='ref_m2ms')
def __str__(self):
return '%s: Reference to %s [%s]' % (
self.text,
self.nk_fk,
', '.join(str(o) for o in self.nk_m2m.all())
)
# ome models with pathological circular dependencies
class Circle1(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.circle2']
class Circle2(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.circle1']
class Circle3(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.circle3']
class Circle4(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.circle5']
class Circle5(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.circle6']
class Circle6(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.circle4']
class ExternalDependency(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.book']
# Model for regression test of #11101
class Thingy(models.Model):
name = models.CharField(max_length=255)
class M2MToSelf(models.Model):
parent = models.ManyToManyField("self", blank=True)
@python_2_unicode_compatible
class BaseNKModel(models.Model):
"""
Base model with a natural_key and a manager with `get_by_natural_key`
"""
data = models.CharField(max_length=20, unique=True)
objects = NKManager()
class Meta:
abstract = True
def __str__(self):
return self.data
def natural_key(self):
return (self.data,)
class M2MSimpleA(BaseNKModel):
b_set = models.ManyToManyField("M2MSimpleB")
class M2MSimpleB(BaseNKModel):
pass
class M2MSimpleCircularA(BaseNKModel):
b_set = models.ManyToManyField("M2MSimpleCircularB")
class M2MSimpleCircularB(BaseNKModel):
a_set = models.ManyToManyField("M2MSimpleCircularA")
class M2MComplexA(BaseNKModel):
b_set = models.ManyToManyField("M2MComplexB", through="M2MThroughAB")
class M2MComplexB(BaseNKModel):
pass
class M2MThroughAB(BaseNKModel):
a = models.ForeignKey(M2MComplexA, models.CASCADE)
b = models.ForeignKey(M2MComplexB, models.CASCADE)
class M2MComplexCircular1A(BaseNKModel):
b_set = models.ManyToManyField("M2MComplexCircular1B",
through="M2MCircular1ThroughAB")
class M2MComplexCircular1B(BaseNKModel):
c_set = models.ManyToManyField("M2MComplexCircular1C",
through="M2MCircular1ThroughBC")
class M2MComplexCircular1C(BaseNKModel):
a_set = models.ManyToManyField("M2MComplexCircular1A",
through="M2MCircular1ThroughCA")
class M2MCircular1ThroughAB(BaseNKModel):
a = models.ForeignKey(M2MComplexCircular1A, models.CASCADE)
b = models.ForeignKey(M2MComplexCircular1B, models.CASCADE)
class M2MCircular1ThroughBC(BaseNKModel):
b = models.ForeignKey(M2MComplexCircular1B, models.CASCADE)
c = models.ForeignKey(M2MComplexCircular1C, models.CASCADE)
class M2MCircular1ThroughCA(BaseNKModel):
c = models.ForeignKey(M2MComplexCircular1C, models.CASCADE)
a = models.ForeignKey(M2MComplexCircular1A, models.CASCADE)
class M2MComplexCircular2A(BaseNKModel):
b_set = models.ManyToManyField("M2MComplexCircular2B",
through="M2MCircular2ThroughAB")
class M2MComplexCircular2B(BaseNKModel):
def natural_key(self):
return (self.data,)
# Fake the dependency for a circularity
natural_key.dependencies = ["fixtures_regress.M2MComplexCircular2A"]
class M2MCircular2ThroughAB(BaseNKModel):
a = models.ForeignKey(M2MComplexCircular2A, models.CASCADE)
b = models.ForeignKey(M2MComplexCircular2B, models.CASCADE)
| bsd-3-clause |
jlspyaozhongkai/Uter | third_party_backup/Python-2.7.9/Lib/ctypes/test/test_simplesubclasses.py | 117 | 1355 | import unittest
from ctypes import *
class MyInt(c_int):
def __cmp__(self, other):
if type(other) != MyInt:
return -1
return cmp(self.value, other.value)
def __hash__(self): # Silence Py3k warning
return hash(self.value)
class Test(unittest.TestCase):
def test_compare(self):
self.assertEqual(MyInt(3), MyInt(3))
self.assertNotEqual(MyInt(42), MyInt(43))
def test_ignore_retval(self):
# Test if the return value of a callback is ignored
# if restype is None
proto = CFUNCTYPE(None)
def func():
return (1, "abc", None)
cb = proto(func)
self.assertEqual(None, cb())
def test_int_callback(self):
args = []
def func(arg):
args.append(arg)
return arg
cb = CFUNCTYPE(None, MyInt)(func)
self.assertEqual(None, cb(42))
self.assertEqual(type(args[-1]), MyInt)
cb = CFUNCTYPE(c_int, c_int)(func)
self.assertEqual(42, cb(42))
self.assertEqual(type(args[-1]), int)
def test_int_struct(self):
class X(Structure):
_fields_ = [("x", MyInt)]
self.assertEqual(X().x, MyInt())
s = X()
s.x = MyInt(42)
self.assertEqual(s.x, MyInt(42))
if __name__ == "__main__":
unittest.main()
| gpl-3.0 |
waltervh/BornAgain | Examples/python/simulation/ex03_InterferenceFunctions/ApproximationLMA.py | 2 | 2597 | """
Cylinders of two different sizes in Local Monodisperse Approximation
"""
import bornagain as ba
from bornagain import deg, angstrom, nm
def get_sample():
"""
Returns a sample with cylinders of two different sizes on a substrate.
The cylinder positions are modelled in Local Monodisperse Approximation.
"""
m_ambience = ba.HomogeneousMaterial("Air", 0.0, 0.0)
m_substrate = ba.HomogeneousMaterial("Substrate", 6e-6, 2e-8)
m_particle = ba.HomogeneousMaterial("Particle", 6e-4, 2e-8)
# cylindrical particle 1
radius1 = 5*nm
height1 = radius1
cylinder_ff1 = ba.FormFactorCylinder(radius1, height1)
cylinder1 = ba.Particle(m_particle, cylinder_ff1)
# cylindrical particle 2
radius2 = 8*nm
height2 = radius2
cylinder_ff2 = ba.FormFactorCylinder(radius2, height2)
cylinder2 = ba.Particle(m_particle, cylinder_ff2)
# interference function1
interference1 = ba.InterferenceFunctionRadialParaCrystal(
16.8*nm, 1e3*nm)
pdf = ba.FTDistribution1DGauss(3 * nm)
interference1.setProbabilityDistribution(pdf)
# interference function2
interference2 = ba.InterferenceFunctionRadialParaCrystal(
22.8*nm, 1e3*nm)
interference2.setProbabilityDistribution(pdf)
# assembling the sample
particle_layout1 = ba.ParticleLayout()
particle_layout1.addParticle(cylinder1, 0.8)
particle_layout1.setInterferenceFunction(interference1)
particle_layout2 = ba.ParticleLayout()
particle_layout2.addParticle(cylinder2, 0.2)
particle_layout2.setInterferenceFunction(interference2)
air_layer = ba.Layer(m_ambience)
air_layer.addLayout(particle_layout1)
air_layer.addLayout(particle_layout2)
substrate_layer = ba.Layer(m_substrate)
multi_layer = ba.MultiLayer()
multi_layer.addLayer(air_layer)
multi_layer.addLayer(substrate_layer)
return multi_layer
def get_simulation():
"""
Create and return GISAXS simulation with beam and detector defined
"""
simulation = ba.GISASSimulation()
simulation.setDetectorParameters(200, 0.0*deg, 2.0*deg,
200, 0.0*deg, 2.0*deg)
simulation.setBeamParameters(1.0*angstrom, 0.2*deg, 0.0*deg)
return simulation
def run_simulation():
"""
Runs simulation and returns intensity map.
"""
simulation = get_simulation()
simulation.setSample(get_sample())
simulation.runSimulation()
return simulation.result()
if __name__ == '__main__':
result = run_simulation()
ba.plot_simulation_result(result, cmap='jet', aspect='auto')
| gpl-3.0 |
qenter/vlc-android | toolchains/arm/lib/python2.7/idlelib/EditorWindow.py | 35 | 64809 | import sys
import os
import re
import imp
from Tkinter import *
import tkSimpleDialog
import tkMessageBox
import webbrowser
from idlelib.MultiCall import MultiCallCreator
from idlelib import idlever
from idlelib import WindowList
from idlelib import SearchDialog
from idlelib import GrepDialog
from idlelib import ReplaceDialog
from idlelib import PyParse
from idlelib.configHandler import idleConf
from idlelib import aboutDialog, textView, configDialog
from idlelib import macosxSupport
# The default tab setting for a Text widget, in average-width characters.
TK_TABWIDTH_DEFAULT = 8
def _sphinx_version():
"Format sys.version_info to produce the Sphinx version string used to install the chm docs"
major, minor, micro, level, serial = sys.version_info
release = '%s%s' % (major, minor)
if micro:
release += '%s' % (micro,)
if level == 'candidate':
release += 'rc%s' % (serial,)
elif level != 'final':
release += '%s%s' % (level[0], serial)
return release
def _find_module(fullname, path=None):
"""Version of imp.find_module() that handles hierarchical module names"""
file = None
for tgt in fullname.split('.'):
if file is not None:
file.close() # close intermediate files
(file, filename, descr) = imp.find_module(tgt, path)
if descr[2] == imp.PY_SOURCE:
break # find but not load the source file
module = imp.load_module(tgt, file, filename, descr)
try:
path = module.__path__
except AttributeError:
raise ImportError, 'No source for module ' + module.__name__
if descr[2] != imp.PY_SOURCE:
# If all of the above fails and didn't raise an exception,fallback
# to a straight import which can find __init__.py in a package.
m = __import__(fullname)
try:
filename = m.__file__
except AttributeError:
pass
else:
file = None
base, ext = os.path.splitext(filename)
if ext == '.pyc':
ext = '.py'
filename = base + ext
descr = filename, None, imp.PY_SOURCE
return file, filename, descr
class HelpDialog(object):
def __init__(self):
self.parent = None # parent of help window
self.dlg = None # the help window iteself
def display(self, parent, near=None):
""" Display the help dialog.
parent - parent widget for the help window
near - a Toplevel widget (e.g. EditorWindow or PyShell)
to use as a reference for placing the help window
"""
if self.dlg is None:
self.show_dialog(parent)
if near:
self.nearwindow(near)
def show_dialog(self, parent):
self.parent = parent
fn=os.path.join(os.path.abspath(os.path.dirname(__file__)),'help.txt')
self.dlg = dlg = textView.view_file(parent,'Help',fn, modal=False)
dlg.bind('<Destroy>', self.destroy, '+')
def nearwindow(self, near):
# Place the help dialog near the window specified by parent.
# Note - this may not reposition the window in Metacity
# if "/apps/metacity/general/disable_workarounds" is enabled
dlg = self.dlg
geom = (near.winfo_rootx() + 10, near.winfo_rooty() + 10)
dlg.withdraw()
dlg.geometry("=+%d+%d" % geom)
dlg.deiconify()
dlg.lift()
def destroy(self, ev=None):
self.dlg = None
self.parent = None
helpDialog = HelpDialog() # singleton instance
class EditorWindow(object):
from idlelib.Percolator import Percolator
from idlelib.ColorDelegator import ColorDelegator
from idlelib.UndoDelegator import UndoDelegator
from idlelib.IOBinding import IOBinding, filesystemencoding, encoding
from idlelib import Bindings
from Tkinter import Toplevel
from idlelib.MultiStatusBar import MultiStatusBar
help_url = None
def __init__(self, flist=None, filename=None, key=None, root=None):
if EditorWindow.help_url is None:
dochome = os.path.join(sys.prefix, 'Doc', 'index.html')
if sys.platform.count('linux'):
# look for html docs in a couple of standard places
pyver = 'python-docs-' + '%s.%s.%s' % sys.version_info[:3]
if os.path.isdir('/var/www/html/python/'): # "python2" rpm
dochome = '/var/www/html/python/index.html'
else:
basepath = '/usr/share/doc/' # standard location
dochome = os.path.join(basepath, pyver,
'Doc', 'index.html')
elif sys.platform[:3] == 'win':
chmfile = os.path.join(sys.prefix, 'Doc',
'Python%s.chm' % _sphinx_version())
if os.path.isfile(chmfile):
dochome = chmfile
elif macosxSupport.runningAsOSXApp():
# documentation is stored inside the python framework
dochome = os.path.join(sys.prefix,
'Resources/English.lproj/Documentation/index.html')
dochome = os.path.normpath(dochome)
if os.path.isfile(dochome):
EditorWindow.help_url = dochome
if sys.platform == 'darwin':
# Safari requires real file:-URLs
EditorWindow.help_url = 'file://' + EditorWindow.help_url
else:
EditorWindow.help_url = "http://docs.python.org/%d.%d" % sys.version_info[:2]
currentTheme=idleConf.CurrentTheme()
self.flist = flist
root = root or flist.root
self.root = root
try:
sys.ps1
except AttributeError:
sys.ps1 = '>>> '
self.menubar = Menu(root)
self.top = top = WindowList.ListedToplevel(root, menu=self.menubar)
if flist:
self.tkinter_vars = flist.vars
#self.top.instance_dict makes flist.inversedict available to
#configDialog.py so it can access all EditorWindow instances
self.top.instance_dict = flist.inversedict
else:
self.tkinter_vars = {} # keys: Tkinter event names
# values: Tkinter variable instances
self.top.instance_dict = {}
self.recent_files_path = os.path.join(idleConf.GetUserCfgDir(),
'recent-files.lst')
self.text_frame = text_frame = Frame(top)
self.vbar = vbar = Scrollbar(text_frame, name='vbar')
self.width = idleConf.GetOption('main','EditorWindow','width', type='int')
text_options = {
'name': 'text',
'padx': 5,
'wrap': 'none',
'width': self.width,
'height': idleConf.GetOption('main', 'EditorWindow', 'height', type='int')}
if TkVersion >= 8.5:
# Starting with tk 8.5 we have to set the new tabstyle option
# to 'wordprocessor' to achieve the same display of tabs as in
# older tk versions.
text_options['tabstyle'] = 'wordprocessor'
self.text = text = MultiCallCreator(Text)(text_frame, **text_options)
self.top.focused_widget = self.text
self.createmenubar()
self.apply_bindings()
self.top.protocol("WM_DELETE_WINDOW", self.close)
self.top.bind("<<close-window>>", self.close_event)
if macosxSupport.runningAsOSXApp():
# Command-W on editorwindows doesn't work without this.
text.bind('<<close-window>>', self.close_event)
# Some OS X systems have only one mouse button,
# so use control-click for pulldown menus there.
# (Note, AquaTk defines <2> as the right button if
# present and the Tk Text widget already binds <2>.)
text.bind("<Control-Button-1>",self.right_menu_event)
else:
# Elsewhere, use right-click for pulldown menus.
text.bind("<3>",self.right_menu_event)
text.bind("<<cut>>", self.cut)
text.bind("<<copy>>", self.copy)
text.bind("<<paste>>", self.paste)
text.bind("<<center-insert>>", self.center_insert_event)
text.bind("<<help>>", self.help_dialog)
text.bind("<<python-docs>>", self.python_docs)
text.bind("<<about-idle>>", self.about_dialog)
text.bind("<<open-config-dialog>>", self.config_dialog)
text.bind("<<open-module>>", self.open_module)
text.bind("<<do-nothing>>", lambda event: "break")
text.bind("<<select-all>>", self.select_all)
text.bind("<<remove-selection>>", self.remove_selection)
text.bind("<<find>>", self.find_event)
text.bind("<<find-again>>", self.find_again_event)
text.bind("<<find-in-files>>", self.find_in_files_event)
text.bind("<<find-selection>>", self.find_selection_event)
text.bind("<<replace>>", self.replace_event)
text.bind("<<goto-line>>", self.goto_line_event)
text.bind("<<smart-backspace>>",self.smart_backspace_event)
text.bind("<<newline-and-indent>>",self.newline_and_indent_event)
text.bind("<<smart-indent>>",self.smart_indent_event)
text.bind("<<indent-region>>",self.indent_region_event)
text.bind("<<dedent-region>>",self.dedent_region_event)
text.bind("<<comment-region>>",self.comment_region_event)
text.bind("<<uncomment-region>>",self.uncomment_region_event)
text.bind("<<tabify-region>>",self.tabify_region_event)
text.bind("<<untabify-region>>",self.untabify_region_event)
text.bind("<<toggle-tabs>>",self.toggle_tabs_event)
text.bind("<<change-indentwidth>>",self.change_indentwidth_event)
text.bind("<Left>", self.move_at_edge_if_selection(0))
text.bind("<Right>", self.move_at_edge_if_selection(1))
text.bind("<<del-word-left>>", self.del_word_left)
text.bind("<<del-word-right>>", self.del_word_right)
text.bind("<<beginning-of-line>>", self.home_callback)
if flist:
flist.inversedict[self] = key
if key:
flist.dict[key] = self
text.bind("<<open-new-window>>", self.new_callback)
text.bind("<<close-all-windows>>", self.flist.close_all_callback)
text.bind("<<open-class-browser>>", self.open_class_browser)
text.bind("<<open-path-browser>>", self.open_path_browser)
self.set_status_bar()
vbar['command'] = text.yview
vbar.pack(side=RIGHT, fill=Y)
text['yscrollcommand'] = vbar.set
fontWeight = 'normal'
if idleConf.GetOption('main', 'EditorWindow', 'font-bold', type='bool'):
fontWeight='bold'
text.config(font=(idleConf.GetOption('main', 'EditorWindow', 'font'),
idleConf.GetOption('main', 'EditorWindow',
'font-size', type='int'),
fontWeight))
text_frame.pack(side=LEFT, fill=BOTH, expand=1)
text.pack(side=TOP, fill=BOTH, expand=1)
text.focus_set()
# usetabs true -> literal tab characters are used by indent and
# dedent cmds, possibly mixed with spaces if
# indentwidth is not a multiple of tabwidth,
# which will cause Tabnanny to nag!
# false -> tab characters are converted to spaces by indent
# and dedent cmds, and ditto TAB keystrokes
# Although use-spaces=0 can be configured manually in config-main.def,
# configuration of tabs v. spaces is not supported in the configuration
# dialog. IDLE promotes the preferred Python indentation: use spaces!
usespaces = idleConf.GetOption('main', 'Indent', 'use-spaces', type='bool')
self.usetabs = not usespaces
# tabwidth is the display width of a literal tab character.
# CAUTION: telling Tk to use anything other than its default
# tab setting causes it to use an entirely different tabbing algorithm,
# treating tab stops as fixed distances from the left margin.
# Nobody expects this, so for now tabwidth should never be changed.
self.tabwidth = 8 # must remain 8 until Tk is fixed.
# indentwidth is the number of screen characters per indent level.
# The recommended Python indentation is four spaces.
self.indentwidth = self.tabwidth
self.set_notabs_indentwidth()
# If context_use_ps1 is true, parsing searches back for a ps1 line;
# else searches for a popular (if, def, ...) Python stmt.
self.context_use_ps1 = False
# When searching backwards for a reliable place to begin parsing,
# first start num_context_lines[0] lines back, then
# num_context_lines[1] lines back if that didn't work, and so on.
# The last value should be huge (larger than the # of lines in a
# conceivable file).
# Making the initial values larger slows things down more often.
self.num_context_lines = 50, 500, 5000000
self.per = per = self.Percolator(text)
self.undo = undo = self.UndoDelegator()
per.insertfilter(undo)
text.undo_block_start = undo.undo_block_start
text.undo_block_stop = undo.undo_block_stop
undo.set_saved_change_hook(self.saved_change_hook)
# IOBinding implements file I/O and printing functionality
self.io = io = self.IOBinding(self)
io.set_filename_change_hook(self.filename_change_hook)
# Create the recent files submenu
self.recent_files_menu = Menu(self.menubar)
self.menudict['file'].insert_cascade(3, label='Recent Files',
underline=0,
menu=self.recent_files_menu)
self.update_recent_files_list()
self.color = None # initialized below in self.ResetColorizer
if filename:
if os.path.exists(filename) and not os.path.isdir(filename):
io.loadfile(filename)
else:
io.set_filename(filename)
self.ResetColorizer()
self.saved_change_hook()
self.set_indentation_params(self.ispythonsource(filename))
self.load_extensions()
menu = self.menudict.get('windows')
if menu:
end = menu.index("end")
if end is None:
end = -1
if end >= 0:
menu.add_separator()
end = end + 1
self.wmenu_end = end
WindowList.register_callback(self.postwindowsmenu)
# Some abstractions so IDLE extensions are cross-IDE
self.askyesno = tkMessageBox.askyesno
self.askinteger = tkSimpleDialog.askinteger
self.showerror = tkMessageBox.showerror
def _filename_to_unicode(self, filename):
"""convert filename to unicode in order to display it in Tk"""
if isinstance(filename, unicode) or not filename:
return filename
else:
try:
return filename.decode(self.filesystemencoding)
except UnicodeDecodeError:
# XXX
try:
return filename.decode(self.encoding)
except UnicodeDecodeError:
# byte-to-byte conversion
return filename.decode('iso8859-1')
def new_callback(self, event):
dirname, basename = self.io.defaultfilename()
self.flist.new(dirname)
return "break"
def home_callback(self, event):
if (event.state & 4) != 0 and event.keysym == "Home":
# state&4==Control. If <Control-Home>, use the Tk binding.
return
if self.text.index("iomark") and \
self.text.compare("iomark", "<=", "insert lineend") and \
self.text.compare("insert linestart", "<=", "iomark"):
# In Shell on input line, go to just after prompt
insertpt = int(self.text.index("iomark").split(".")[1])
else:
line = self.text.get("insert linestart", "insert lineend")
for insertpt in xrange(len(line)):
if line[insertpt] not in (' ','\t'):
break
else:
insertpt=len(line)
lineat = int(self.text.index("insert").split('.')[1])
if insertpt == lineat:
insertpt = 0
dest = "insert linestart+"+str(insertpt)+"c"
if (event.state&1) == 0:
# shift was not pressed
self.text.tag_remove("sel", "1.0", "end")
else:
if not self.text.index("sel.first"):
self.text.mark_set("my_anchor", "insert") # there was no previous selection
else:
if self.text.compare(self.text.index("sel.first"), "<", self.text.index("insert")):
self.text.mark_set("my_anchor", "sel.first") # extend back
else:
self.text.mark_set("my_anchor", "sel.last") # extend forward
first = self.text.index(dest)
last = self.text.index("my_anchor")
if self.text.compare(first,">",last):
first,last = last,first
self.text.tag_remove("sel", "1.0", "end")
self.text.tag_add("sel", first, last)
self.text.mark_set("insert", dest)
self.text.see("insert")
return "break"
def set_status_bar(self):
self.status_bar = self.MultiStatusBar(self.top)
if macosxSupport.runningAsOSXApp():
# Insert some padding to avoid obscuring some of the statusbar
# by the resize widget.
self.status_bar.set_label('_padding1', ' ', side=RIGHT)
self.status_bar.set_label('column', 'Col: ?', side=RIGHT)
self.status_bar.set_label('line', 'Ln: ?', side=RIGHT)
self.status_bar.pack(side=BOTTOM, fill=X)
self.text.bind("<<set-line-and-column>>", self.set_line_and_column)
self.text.event_add("<<set-line-and-column>>",
"<KeyRelease>", "<ButtonRelease>")
self.text.after_idle(self.set_line_and_column)
def set_line_and_column(self, event=None):
line, column = self.text.index(INSERT).split('.')
self.status_bar.set_label('column', 'Col: %s' % column)
self.status_bar.set_label('line', 'Ln: %s' % line)
menu_specs = [
("file", "_File"),
("edit", "_Edit"),
("format", "F_ormat"),
("run", "_Run"),
("options", "_Options"),
("windows", "_Windows"),
("help", "_Help"),
]
if macosxSupport.runningAsOSXApp():
del menu_specs[-3]
menu_specs[-2] = ("windows", "_Window")
def createmenubar(self):
mbar = self.menubar
self.menudict = menudict = {}
for name, label in self.menu_specs:
underline, label = prepstr(label)
menudict[name] = menu = Menu(mbar, name=name)
mbar.add_cascade(label=label, menu=menu, underline=underline)
if macosxSupport.isCarbonAquaTk(self.root):
# Insert the application menu
menudict['application'] = menu = Menu(mbar, name='apple')
mbar.add_cascade(label='IDLE', menu=menu)
self.fill_menus()
self.base_helpmenu_length = self.menudict['help'].index(END)
self.reset_help_menu_entries()
def postwindowsmenu(self):
# Only called when Windows menu exists
menu = self.menudict['windows']
end = menu.index("end")
if end is None:
end = -1
if end > self.wmenu_end:
menu.delete(self.wmenu_end+1, end)
WindowList.add_windows_to_menu(menu)
rmenu = None
def right_menu_event(self, event):
self.text.mark_set("insert", "@%d,%d" % (event.x, event.y))
if not self.rmenu:
self.make_rmenu()
rmenu = self.rmenu
self.event = event
iswin = sys.platform[:3] == 'win'
if iswin:
self.text.config(cursor="arrow")
for item in self.rmenu_specs:
try:
label, eventname, verify_state = item
except ValueError: # see issue1207589
continue
if verify_state is None:
continue
state = getattr(self, verify_state)()
rmenu.entryconfigure(label, state=state)
rmenu.tk_popup(event.x_root, event.y_root)
if iswin:
self.text.config(cursor="ibeam")
rmenu_specs = [
# ("Label", "<<virtual-event>>", "statefuncname"), ...
("Close", "<<close-window>>", None), # Example
]
def make_rmenu(self):
rmenu = Menu(self.text, tearoff=0)
for item in self.rmenu_specs:
label, eventname = item[0], item[1]
if label is not None:
def command(text=self.text, eventname=eventname):
text.event_generate(eventname)
rmenu.add_command(label=label, command=command)
else:
rmenu.add_separator()
self.rmenu = rmenu
def rmenu_check_cut(self):
return self.rmenu_check_copy()
def rmenu_check_copy(self):
try:
indx = self.text.index('sel.first')
except TclError:
return 'disabled'
else:
return 'normal' if indx else 'disabled'
def rmenu_check_paste(self):
try:
self.text.tk.call('tk::GetSelection', self.text, 'CLIPBOARD')
except TclError:
return 'disabled'
else:
return 'normal'
def about_dialog(self, event=None):
aboutDialog.AboutDialog(self.top,'About IDLE')
def config_dialog(self, event=None):
configDialog.ConfigDialog(self.top,'Settings')
def help_dialog(self, event=None):
if self.root:
parent = self.root
else:
parent = self.top
helpDialog.display(parent, near=self.top)
def python_docs(self, event=None):
if sys.platform[:3] == 'win':
try:
os.startfile(self.help_url)
except WindowsError as why:
tkMessageBox.showerror(title='Document Start Failure',
message=str(why), parent=self.text)
else:
webbrowser.open(self.help_url)
return "break"
def cut(self,event):
self.text.event_generate("<<Cut>>")
return "break"
def copy(self,event):
if not self.text.tag_ranges("sel"):
# There is no selection, so do nothing and maybe interrupt.
return
self.text.event_generate("<<Copy>>")
return "break"
def paste(self,event):
self.text.event_generate("<<Paste>>")
self.text.see("insert")
return "break"
def select_all(self, event=None):
self.text.tag_add("sel", "1.0", "end-1c")
self.text.mark_set("insert", "1.0")
self.text.see("insert")
return "break"
def remove_selection(self, event=None):
self.text.tag_remove("sel", "1.0", "end")
self.text.see("insert")
def move_at_edge_if_selection(self, edge_index):
"""Cursor move begins at start or end of selection
When a left/right cursor key is pressed create and return to Tkinter a
function which causes a cursor move from the associated edge of the
selection.
"""
self_text_index = self.text.index
self_text_mark_set = self.text.mark_set
edges_table = ("sel.first+1c", "sel.last-1c")
def move_at_edge(event):
if (event.state & 5) == 0: # no shift(==1) or control(==4) pressed
try:
self_text_index("sel.first")
self_text_mark_set("insert", edges_table[edge_index])
except TclError:
pass
return move_at_edge
def del_word_left(self, event):
self.text.event_generate('<Meta-Delete>')
return "break"
def del_word_right(self, event):
self.text.event_generate('<Meta-d>')
return "break"
def find_event(self, event):
SearchDialog.find(self.text)
return "break"
def find_again_event(self, event):
SearchDialog.find_again(self.text)
return "break"
def find_selection_event(self, event):
SearchDialog.find_selection(self.text)
return "break"
def find_in_files_event(self, event):
GrepDialog.grep(self.text, self.io, self.flist)
return "break"
def replace_event(self, event):
ReplaceDialog.replace(self.text)
return "break"
def goto_line_event(self, event):
text = self.text
lineno = tkSimpleDialog.askinteger("Goto",
"Go to line number:",parent=text)
if lineno is None:
return "break"
if lineno <= 0:
text.bell()
return "break"
text.mark_set("insert", "%d.0" % lineno)
text.see("insert")
def open_module(self, event=None):
# XXX Shouldn't this be in IOBinding or in FileList?
try:
name = self.text.get("sel.first", "sel.last")
except TclError:
name = ""
else:
name = name.strip()
name = tkSimpleDialog.askstring("Module",
"Enter the name of a Python module\n"
"to search on sys.path and open:",
parent=self.text, initialvalue=name)
if name:
name = name.strip()
if not name:
return
# XXX Ought to insert current file's directory in front of path
try:
(f, file, (suffix, mode, type)) = _find_module(name)
except (NameError, ImportError), msg:
tkMessageBox.showerror("Import error", str(msg), parent=self.text)
return
if type != imp.PY_SOURCE:
tkMessageBox.showerror("Unsupported type",
"%s is not a source module" % name, parent=self.text)
return
if f:
f.close()
if self.flist:
self.flist.open(file)
else:
self.io.loadfile(file)
def open_class_browser(self, event=None):
filename = self.io.filename
if not filename:
tkMessageBox.showerror(
"No filename",
"This buffer has no associated filename",
master=self.text)
self.text.focus_set()
return None
head, tail = os.path.split(filename)
base, ext = os.path.splitext(tail)
from idlelib import ClassBrowser
ClassBrowser.ClassBrowser(self.flist, base, [head])
def open_path_browser(self, event=None):
from idlelib import PathBrowser
PathBrowser.PathBrowser(self.flist)
def gotoline(self, lineno):
if lineno is not None and lineno > 0:
self.text.mark_set("insert", "%d.0" % lineno)
self.text.tag_remove("sel", "1.0", "end")
self.text.tag_add("sel", "insert", "insert +1l")
self.center()
def ispythonsource(self, filename):
if not filename or os.path.isdir(filename):
return True
base, ext = os.path.splitext(os.path.basename(filename))
if os.path.normcase(ext) in (".py", ".pyw"):
return True
try:
f = open(filename)
line = f.readline()
f.close()
except IOError:
return False
return line.startswith('#!') and line.find('python') >= 0
def close_hook(self):
if self.flist:
self.flist.unregister_maybe_terminate(self)
self.flist = None
def set_close_hook(self, close_hook):
self.close_hook = close_hook
def filename_change_hook(self):
if self.flist:
self.flist.filename_changed_edit(self)
self.saved_change_hook()
self.top.update_windowlist_registry(self)
self.ResetColorizer()
def _addcolorizer(self):
if self.color:
return
if self.ispythonsource(self.io.filename):
self.color = self.ColorDelegator()
# can add more colorizers here...
if self.color:
self.per.removefilter(self.undo)
self.per.insertfilter(self.color)
self.per.insertfilter(self.undo)
def _rmcolorizer(self):
if not self.color:
return
self.color.removecolors()
self.per.removefilter(self.color)
self.color = None
def ResetColorizer(self):
"Update the colour theme"
# Called from self.filename_change_hook and from configDialog.py
self._rmcolorizer()
self._addcolorizer()
theme = idleConf.GetOption('main','Theme','name')
normal_colors = idleConf.GetHighlight(theme, 'normal')
cursor_color = idleConf.GetHighlight(theme, 'cursor', fgBg='fg')
select_colors = idleConf.GetHighlight(theme, 'hilite')
self.text.config(
foreground=normal_colors['foreground'],
background=normal_colors['background'],
insertbackground=cursor_color,
selectforeground=select_colors['foreground'],
selectbackground=select_colors['background'],
)
def ResetFont(self):
"Update the text widgets' font if it is changed"
# Called from configDialog.py
fontWeight='normal'
if idleConf.GetOption('main','EditorWindow','font-bold',type='bool'):
fontWeight='bold'
self.text.config(font=(idleConf.GetOption('main','EditorWindow','font'),
idleConf.GetOption('main','EditorWindow','font-size',
type='int'),
fontWeight))
def RemoveKeybindings(self):
"Remove the keybindings before they are changed."
# Called from configDialog.py
self.Bindings.default_keydefs = keydefs = idleConf.GetCurrentKeySet()
for event, keylist in keydefs.items():
self.text.event_delete(event, *keylist)
for extensionName in self.get_standard_extension_names():
xkeydefs = idleConf.GetExtensionBindings(extensionName)
if xkeydefs:
for event, keylist in xkeydefs.items():
self.text.event_delete(event, *keylist)
def ApplyKeybindings(self):
"Update the keybindings after they are changed"
# Called from configDialog.py
self.Bindings.default_keydefs = keydefs = idleConf.GetCurrentKeySet()
self.apply_bindings()
for extensionName in self.get_standard_extension_names():
xkeydefs = idleConf.GetExtensionBindings(extensionName)
if xkeydefs:
self.apply_bindings(xkeydefs)
#update menu accelerators
menuEventDict = {}
for menu in self.Bindings.menudefs:
menuEventDict[menu[0]] = {}
for item in menu[1]:
if item:
menuEventDict[menu[0]][prepstr(item[0])[1]] = item[1]
for menubarItem in self.menudict.keys():
menu = self.menudict[menubarItem]
end = menu.index(END) + 1
for index in range(0, end):
if menu.type(index) == 'command':
accel = menu.entrycget(index, 'accelerator')
if accel:
itemName = menu.entrycget(index, 'label')
event = ''
if menubarItem in menuEventDict:
if itemName in menuEventDict[menubarItem]:
event = menuEventDict[menubarItem][itemName]
if event:
accel = get_accelerator(keydefs, event)
menu.entryconfig(index, accelerator=accel)
def set_notabs_indentwidth(self):
"Update the indentwidth if changed and not using tabs in this window"
# Called from configDialog.py
if not self.usetabs:
self.indentwidth = idleConf.GetOption('main', 'Indent','num-spaces',
type='int')
def reset_help_menu_entries(self):
"Update the additional help entries on the Help menu"
help_list = idleConf.GetAllExtraHelpSourcesList()
helpmenu = self.menudict['help']
# first delete the extra help entries, if any
helpmenu_length = helpmenu.index(END)
if helpmenu_length > self.base_helpmenu_length:
helpmenu.delete((self.base_helpmenu_length + 1), helpmenu_length)
# then rebuild them
if help_list:
helpmenu.add_separator()
for entry in help_list:
cmd = self.__extra_help_callback(entry[1])
helpmenu.add_command(label=entry[0], command=cmd)
# and update the menu dictionary
self.menudict['help'] = helpmenu
def __extra_help_callback(self, helpfile):
"Create a callback with the helpfile value frozen at definition time"
def display_extra_help(helpfile=helpfile):
if not helpfile.startswith(('www', 'http')):
helpfile = os.path.normpath(helpfile)
if sys.platform[:3] == 'win':
try:
os.startfile(helpfile)
except WindowsError as why:
tkMessageBox.showerror(title='Document Start Failure',
message=str(why), parent=self.text)
else:
webbrowser.open(helpfile)
return display_extra_help
def update_recent_files_list(self, new_file=None):
"Load and update the recent files list and menus"
rf_list = []
if os.path.exists(self.recent_files_path):
rf_list_file = open(self.recent_files_path,'r')
try:
rf_list = rf_list_file.readlines()
finally:
rf_list_file.close()
if new_file:
new_file = os.path.abspath(new_file) + '\n'
if new_file in rf_list:
rf_list.remove(new_file) # move to top
rf_list.insert(0, new_file)
# clean and save the recent files list
bad_paths = []
for path in rf_list:
if '\0' in path or not os.path.exists(path[0:-1]):
bad_paths.append(path)
rf_list = [path for path in rf_list if path not in bad_paths]
ulchars = "1234567890ABCDEFGHIJK"
rf_list = rf_list[0:len(ulchars)]
try:
with open(self.recent_files_path, 'w') as rf_file:
rf_file.writelines(rf_list)
except IOError as err:
if not getattr(self.root, "recentfilelist_error_displayed", False):
self.root.recentfilelist_error_displayed = True
tkMessageBox.showerror(title='IDLE Error',
message='Unable to update Recent Files list:\n%s'
% str(err),
parent=self.text)
# for each edit window instance, construct the recent files menu
for instance in self.top.instance_dict.keys():
menu = instance.recent_files_menu
menu.delete(0, END) # clear, and rebuild:
for i, file_name in enumerate(rf_list):
file_name = file_name.rstrip() # zap \n
# make unicode string to display non-ASCII chars correctly
ufile_name = self._filename_to_unicode(file_name)
callback = instance.__recent_file_callback(file_name)
menu.add_command(label=ulchars[i] + " " + ufile_name,
command=callback,
underline=0)
def __recent_file_callback(self, file_name):
def open_recent_file(fn_closure=file_name):
self.io.open(editFile=fn_closure)
return open_recent_file
def saved_change_hook(self):
short = self.short_title()
long = self.long_title()
if short and long:
title = short + " - " + long
elif short:
title = short
elif long:
title = long
else:
title = "Untitled"
icon = short or long or title
if not self.get_saved():
title = "*%s*" % title
icon = "*%s" % icon
self.top.wm_title(title)
self.top.wm_iconname(icon)
def get_saved(self):
return self.undo.get_saved()
def set_saved(self, flag):
self.undo.set_saved(flag)
def reset_undo(self):
self.undo.reset_undo()
def short_title(self):
filename = self.io.filename
if filename:
filename = os.path.basename(filename)
# return unicode string to display non-ASCII chars correctly
return self._filename_to_unicode(filename)
def long_title(self):
# return unicode string to display non-ASCII chars correctly
return self._filename_to_unicode(self.io.filename or "")
def center_insert_event(self, event):
self.center()
def center(self, mark="insert"):
text = self.text
top, bot = self.getwindowlines()
lineno = self.getlineno(mark)
height = bot - top
newtop = max(1, lineno - height//2)
text.yview(float(newtop))
def getwindowlines(self):
text = self.text
top = self.getlineno("@0,0")
bot = self.getlineno("@0,65535")
if top == bot and text.winfo_height() == 1:
# Geometry manager hasn't run yet
height = int(text['height'])
bot = top + height - 1
return top, bot
def getlineno(self, mark="insert"):
text = self.text
return int(float(text.index(mark)))
def get_geometry(self):
"Return (width, height, x, y)"
geom = self.top.wm_geometry()
m = re.match(r"(\d+)x(\d+)\+(-?\d+)\+(-?\d+)", geom)
tuple = (map(int, m.groups()))
return tuple
def close_event(self, event):
self.close()
def maybesave(self):
if self.io:
if not self.get_saved():
if self.top.state()!='normal':
self.top.deiconify()
self.top.lower()
self.top.lift()
return self.io.maybesave()
def close(self):
reply = self.maybesave()
if str(reply) != "cancel":
self._close()
return reply
def _close(self):
if self.io.filename:
self.update_recent_files_list(new_file=self.io.filename)
WindowList.unregister_callback(self.postwindowsmenu)
self.unload_extensions()
self.io.close()
self.io = None
self.undo = None
if self.color:
self.color.close(False)
self.color = None
self.text = None
self.tkinter_vars = None
self.per.close()
self.per = None
self.top.destroy()
if self.close_hook:
# unless override: unregister from flist, terminate if last window
self.close_hook()
def load_extensions(self):
self.extensions = {}
self.load_standard_extensions()
def unload_extensions(self):
for ins in self.extensions.values():
if hasattr(ins, "close"):
ins.close()
self.extensions = {}
def load_standard_extensions(self):
for name in self.get_standard_extension_names():
try:
self.load_extension(name)
except:
print "Failed to load extension", repr(name)
import traceback
traceback.print_exc()
def get_standard_extension_names(self):
return idleConf.GetExtensions(editor_only=True)
def load_extension(self, name):
try:
mod = __import__(name, globals(), locals(), [])
except ImportError:
print "\nFailed to import extension: ", name
return
cls = getattr(mod, name)
keydefs = idleConf.GetExtensionBindings(name)
if hasattr(cls, "menudefs"):
self.fill_menus(cls.menudefs, keydefs)
ins = cls(self)
self.extensions[name] = ins
if keydefs:
self.apply_bindings(keydefs)
for vevent in keydefs.keys():
methodname = vevent.replace("-", "_")
while methodname[:1] == '<':
methodname = methodname[1:]
while methodname[-1:] == '>':
methodname = methodname[:-1]
methodname = methodname + "_event"
if hasattr(ins, methodname):
self.text.bind(vevent, getattr(ins, methodname))
def apply_bindings(self, keydefs=None):
if keydefs is None:
keydefs = self.Bindings.default_keydefs
text = self.text
text.keydefs = keydefs
for event, keylist in keydefs.items():
if keylist:
text.event_add(event, *keylist)
def fill_menus(self, menudefs=None, keydefs=None):
"""Add appropriate entries to the menus and submenus
Menus that are absent or None in self.menudict are ignored.
"""
if menudefs is None:
menudefs = self.Bindings.menudefs
if keydefs is None:
keydefs = self.Bindings.default_keydefs
menudict = self.menudict
text = self.text
for mname, entrylist in menudefs:
menu = menudict.get(mname)
if not menu:
continue
for entry in entrylist:
if not entry:
menu.add_separator()
else:
label, eventname = entry
checkbutton = (label[:1] == '!')
if checkbutton:
label = label[1:]
underline, label = prepstr(label)
accelerator = get_accelerator(keydefs, eventname)
def command(text=text, eventname=eventname):
text.event_generate(eventname)
if checkbutton:
var = self.get_var_obj(eventname, BooleanVar)
menu.add_checkbutton(label=label, underline=underline,
command=command, accelerator=accelerator,
variable=var)
else:
menu.add_command(label=label, underline=underline,
command=command,
accelerator=accelerator)
def getvar(self, name):
var = self.get_var_obj(name)
if var:
value = var.get()
return value
else:
raise NameError, name
def setvar(self, name, value, vartype=None):
var = self.get_var_obj(name, vartype)
if var:
var.set(value)
else:
raise NameError, name
def get_var_obj(self, name, vartype=None):
var = self.tkinter_vars.get(name)
if not var and vartype:
# create a Tkinter variable object with self.text as master:
self.tkinter_vars[name] = var = vartype(self.text)
return var
# Tk implementations of "virtual text methods" -- each platform
# reusing IDLE's support code needs to define these for its GUI's
# flavor of widget.
# Is character at text_index in a Python string? Return 0 for
# "guaranteed no", true for anything else. This info is expensive
# to compute ab initio, but is probably already known by the
# platform's colorizer.
def is_char_in_string(self, text_index):
if self.color:
# Return true iff colorizer hasn't (re)gotten this far
# yet, or the character is tagged as being in a string
return self.text.tag_prevrange("TODO", text_index) or \
"STRING" in self.text.tag_names(text_index)
else:
# The colorizer is missing: assume the worst
return 1
# If a selection is defined in the text widget, return (start,
# end) as Tkinter text indices, otherwise return (None, None)
def get_selection_indices(self):
try:
first = self.text.index("sel.first")
last = self.text.index("sel.last")
return first, last
except TclError:
return None, None
# Return the text widget's current view of what a tab stop means
# (equivalent width in spaces).
def get_tabwidth(self):
current = self.text['tabs'] or TK_TABWIDTH_DEFAULT
return int(current)
# Set the text widget's current view of what a tab stop means.
def set_tabwidth(self, newtabwidth):
text = self.text
if self.get_tabwidth() != newtabwidth:
pixels = text.tk.call("font", "measure", text["font"],
"-displayof", text.master,
"n" * newtabwidth)
text.configure(tabs=pixels)
# If ispythonsource and guess are true, guess a good value for
# indentwidth based on file content (if possible), and if
# indentwidth != tabwidth set usetabs false.
# In any case, adjust the Text widget's view of what a tab
# character means.
def set_indentation_params(self, ispythonsource, guess=True):
if guess and ispythonsource:
i = self.guess_indent()
if 2 <= i <= 8:
self.indentwidth = i
if self.indentwidth != self.tabwidth:
self.usetabs = False
self.set_tabwidth(self.tabwidth)
def smart_backspace_event(self, event):
text = self.text
first, last = self.get_selection_indices()
if first and last:
text.delete(first, last)
text.mark_set("insert", first)
return "break"
# Delete whitespace left, until hitting a real char or closest
# preceding virtual tab stop.
chars = text.get("insert linestart", "insert")
if chars == '':
if text.compare("insert", ">", "1.0"):
# easy: delete preceding newline
text.delete("insert-1c")
else:
text.bell() # at start of buffer
return "break"
if chars[-1] not in " \t":
# easy: delete preceding real char
text.delete("insert-1c")
return "break"
# Ick. It may require *inserting* spaces if we back up over a
# tab character! This is written to be clear, not fast.
tabwidth = self.tabwidth
have = len(chars.expandtabs(tabwidth))
assert have > 0
want = ((have - 1) // self.indentwidth) * self.indentwidth
# Debug prompt is multilined....
if self.context_use_ps1:
last_line_of_prompt = sys.ps1.split('\n')[-1]
else:
last_line_of_prompt = ''
ncharsdeleted = 0
while 1:
if chars == last_line_of_prompt:
break
chars = chars[:-1]
ncharsdeleted = ncharsdeleted + 1
have = len(chars.expandtabs(tabwidth))
if have <= want or chars[-1] not in " \t":
break
text.undo_block_start()
text.delete("insert-%dc" % ncharsdeleted, "insert")
if have < want:
text.insert("insert", ' ' * (want - have))
text.undo_block_stop()
return "break"
def smart_indent_event(self, event):
# if intraline selection:
# delete it
# elif multiline selection:
# do indent-region
# else:
# indent one level
text = self.text
first, last = self.get_selection_indices()
text.undo_block_start()
try:
if first and last:
if index2line(first) != index2line(last):
return self.indent_region_event(event)
text.delete(first, last)
text.mark_set("insert", first)
prefix = text.get("insert linestart", "insert")
raw, effective = classifyws(prefix, self.tabwidth)
if raw == len(prefix):
# only whitespace to the left
self.reindent_to(effective + self.indentwidth)
else:
# tab to the next 'stop' within or to right of line's text:
if self.usetabs:
pad = '\t'
else:
effective = len(prefix.expandtabs(self.tabwidth))
n = self.indentwidth
pad = ' ' * (n - effective % n)
text.insert("insert", pad)
text.see("insert")
return "break"
finally:
text.undo_block_stop()
def newline_and_indent_event(self, event):
text = self.text
first, last = self.get_selection_indices()
text.undo_block_start()
try:
if first and last:
text.delete(first, last)
text.mark_set("insert", first)
line = text.get("insert linestart", "insert")
i, n = 0, len(line)
while i < n and line[i] in " \t":
i = i+1
if i == n:
# the cursor is in or at leading indentation in a continuation
# line; just inject an empty line at the start
text.insert("insert linestart", '\n')
return "break"
indent = line[:i]
# strip whitespace before insert point unless it's in the prompt
i = 0
last_line_of_prompt = sys.ps1.split('\n')[-1]
while line and line[-1] in " \t" and line != last_line_of_prompt:
line = line[:-1]
i = i+1
if i:
text.delete("insert - %d chars" % i, "insert")
# strip whitespace after insert point
while text.get("insert") in " \t":
text.delete("insert")
# start new line
text.insert("insert", '\n')
# adjust indentation for continuations and block
# open/close first need to find the last stmt
lno = index2line(text.index('insert'))
y = PyParse.Parser(self.indentwidth, self.tabwidth)
if not self.context_use_ps1:
for context in self.num_context_lines:
startat = max(lno - context, 1)
startatindex = repr(startat) + ".0"
rawtext = text.get(startatindex, "insert")
y.set_str(rawtext)
bod = y.find_good_parse_start(
self.context_use_ps1,
self._build_char_in_string_func(startatindex))
if bod is not None or startat == 1:
break
y.set_lo(bod or 0)
else:
r = text.tag_prevrange("console", "insert")
if r:
startatindex = r[1]
else:
startatindex = "1.0"
rawtext = text.get(startatindex, "insert")
y.set_str(rawtext)
y.set_lo(0)
c = y.get_continuation_type()
if c != PyParse.C_NONE:
# The current stmt hasn't ended yet.
if c == PyParse.C_STRING_FIRST_LINE:
# after the first line of a string; do not indent at all
pass
elif c == PyParse.C_STRING_NEXT_LINES:
# inside a string which started before this line;
# just mimic the current indent
text.insert("insert", indent)
elif c == PyParse.C_BRACKET:
# line up with the first (if any) element of the
# last open bracket structure; else indent one
# level beyond the indent of the line with the
# last open bracket
self.reindent_to(y.compute_bracket_indent())
elif c == PyParse.C_BACKSLASH:
# if more than one line in this stmt already, just
# mimic the current indent; else if initial line
# has a start on an assignment stmt, indent to
# beyond leftmost =; else to beyond first chunk of
# non-whitespace on initial line
if y.get_num_lines_in_stmt() > 1:
text.insert("insert", indent)
else:
self.reindent_to(y.compute_backslash_indent())
else:
assert 0, "bogus continuation type %r" % (c,)
return "break"
# This line starts a brand new stmt; indent relative to
# indentation of initial line of closest preceding
# interesting stmt.
indent = y.get_base_indent_string()
text.insert("insert", indent)
if y.is_block_opener():
self.smart_indent_event(event)
elif indent and y.is_block_closer():
self.smart_backspace_event(event)
return "break"
finally:
text.see("insert")
text.undo_block_stop()
# Our editwin provides a is_char_in_string function that works
# with a Tk text index, but PyParse only knows about offsets into
# a string. This builds a function for PyParse that accepts an
# offset.
def _build_char_in_string_func(self, startindex):
def inner(offset, _startindex=startindex,
_icis=self.is_char_in_string):
return _icis(_startindex + "+%dc" % offset)
return inner
def indent_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines)):
line = lines[pos]
if line:
raw, effective = classifyws(line, self.tabwidth)
effective = effective + self.indentwidth
lines[pos] = self._make_blanks(effective) + line[raw:]
self.set_region(head, tail, chars, lines)
return "break"
def dedent_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines)):
line = lines[pos]
if line:
raw, effective = classifyws(line, self.tabwidth)
effective = max(effective - self.indentwidth, 0)
lines[pos] = self._make_blanks(effective) + line[raw:]
self.set_region(head, tail, chars, lines)
return "break"
def comment_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines) - 1):
line = lines[pos]
lines[pos] = '##' + line
self.set_region(head, tail, chars, lines)
def uncomment_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines)):
line = lines[pos]
if not line:
continue
if line[:2] == '##':
line = line[2:]
elif line[:1] == '#':
line = line[1:]
lines[pos] = line
self.set_region(head, tail, chars, lines)
def tabify_region_event(self, event):
head, tail, chars, lines = self.get_region()
tabwidth = self._asktabwidth()
if tabwidth is None: return
for pos in range(len(lines)):
line = lines[pos]
if line:
raw, effective = classifyws(line, tabwidth)
ntabs, nspaces = divmod(effective, tabwidth)
lines[pos] = '\t' * ntabs + ' ' * nspaces + line[raw:]
self.set_region(head, tail, chars, lines)
def untabify_region_event(self, event):
head, tail, chars, lines = self.get_region()
tabwidth = self._asktabwidth()
if tabwidth is None: return
for pos in range(len(lines)):
lines[pos] = lines[pos].expandtabs(tabwidth)
self.set_region(head, tail, chars, lines)
def toggle_tabs_event(self, event):
if self.askyesno(
"Toggle tabs",
"Turn tabs " + ("on", "off")[self.usetabs] +
"?\nIndent width " +
("will be", "remains at")[self.usetabs] + " 8." +
"\n Note: a tab is always 8 columns",
parent=self.text):
self.usetabs = not self.usetabs
# Try to prevent inconsistent indentation.
# User must change indent width manually after using tabs.
self.indentwidth = 8
return "break"
# XXX this isn't bound to anything -- see tabwidth comments
## def change_tabwidth_event(self, event):
## new = self._asktabwidth()
## if new != self.tabwidth:
## self.tabwidth = new
## self.set_indentation_params(0, guess=0)
## return "break"
def change_indentwidth_event(self, event):
new = self.askinteger(
"Indent width",
"New indent width (2-16)\n(Always use 8 when using tabs)",
parent=self.text,
initialvalue=self.indentwidth,
minvalue=2,
maxvalue=16)
if new and new != self.indentwidth and not self.usetabs:
self.indentwidth = new
return "break"
def get_region(self):
text = self.text
first, last = self.get_selection_indices()
if first and last:
head = text.index(first + " linestart")
tail = text.index(last + "-1c lineend +1c")
else:
head = text.index("insert linestart")
tail = text.index("insert lineend +1c")
chars = text.get(head, tail)
lines = chars.split("\n")
return head, tail, chars, lines
def set_region(self, head, tail, chars, lines):
text = self.text
newchars = "\n".join(lines)
if newchars == chars:
text.bell()
return
text.tag_remove("sel", "1.0", "end")
text.mark_set("insert", head)
text.undo_block_start()
text.delete(head, tail)
text.insert(head, newchars)
text.undo_block_stop()
text.tag_add("sel", head, "insert")
# Make string that displays as n leading blanks.
def _make_blanks(self, n):
if self.usetabs:
ntabs, nspaces = divmod(n, self.tabwidth)
return '\t' * ntabs + ' ' * nspaces
else:
return ' ' * n
# Delete from beginning of line to insert point, then reinsert
# column logical (meaning use tabs if appropriate) spaces.
def reindent_to(self, column):
text = self.text
text.undo_block_start()
if text.compare("insert linestart", "!=", "insert"):
text.delete("insert linestart", "insert")
if column:
text.insert("insert", self._make_blanks(column))
text.undo_block_stop()
def _asktabwidth(self):
return self.askinteger(
"Tab width",
"Columns per tab? (2-16)",
parent=self.text,
initialvalue=self.indentwidth,
minvalue=2,
maxvalue=16)
# Guess indentwidth from text content.
# Return guessed indentwidth. This should not be believed unless
# it's in a reasonable range (e.g., it will be 0 if no indented
# blocks are found).
def guess_indent(self):
opener, indented = IndentSearcher(self.text, self.tabwidth).run()
if opener and indented:
raw, indentsmall = classifyws(opener, self.tabwidth)
raw, indentlarge = classifyws(indented, self.tabwidth)
else:
indentsmall = indentlarge = 0
return indentlarge - indentsmall
# "line.col" -> line, as an int
def index2line(index):
return int(float(index))
# Look at the leading whitespace in s.
# Return pair (# of leading ws characters,
# effective # of leading blanks after expanding
# tabs to width tabwidth)
def classifyws(s, tabwidth):
raw = effective = 0
for ch in s:
if ch == ' ':
raw = raw + 1
effective = effective + 1
elif ch == '\t':
raw = raw + 1
effective = (effective // tabwidth + 1) * tabwidth
else:
break
return raw, effective
import tokenize
_tokenize = tokenize
del tokenize
class IndentSearcher(object):
# .run() chews over the Text widget, looking for a block opener
# and the stmt following it. Returns a pair,
# (line containing block opener, line containing stmt)
# Either or both may be None.
def __init__(self, text, tabwidth):
self.text = text
self.tabwidth = tabwidth
self.i = self.finished = 0
self.blkopenline = self.indentedline = None
def readline(self):
if self.finished:
return ""
i = self.i = self.i + 1
mark = repr(i) + ".0"
if self.text.compare(mark, ">=", "end"):
return ""
return self.text.get(mark, mark + " lineend+1c")
def tokeneater(self, type, token, start, end, line,
INDENT=_tokenize.INDENT,
NAME=_tokenize.NAME,
OPENERS=('class', 'def', 'for', 'if', 'try', 'while')):
if self.finished:
pass
elif type == NAME and token in OPENERS:
self.blkopenline = line
elif type == INDENT and self.blkopenline:
self.indentedline = line
self.finished = 1
def run(self):
save_tabsize = _tokenize.tabsize
_tokenize.tabsize = self.tabwidth
try:
try:
_tokenize.tokenize(self.readline, self.tokeneater)
except (_tokenize.TokenError, SyntaxError):
# since we cut off the tokenizer early, we can trigger
# spurious errors
pass
finally:
_tokenize.tabsize = save_tabsize
return self.blkopenline, self.indentedline
### end autoindent code ###
def prepstr(s):
# Helper to extract the underscore from a string, e.g.
# prepstr("Co_py") returns (2, "Copy").
i = s.find('_')
if i >= 0:
s = s[:i] + s[i+1:]
return i, s
keynames = {
'bracketleft': '[',
'bracketright': ']',
'slash': '/',
}
def get_accelerator(keydefs, eventname):
keylist = keydefs.get(eventname)
# issue10940: temporary workaround to prevent hang with OS X Cocoa Tk 8.5
# if not keylist:
if (not keylist) or (macosxSupport.runningAsOSXApp() and eventname in {
"<<open-module>>",
"<<goto-line>>",
"<<change-indentwidth>>"}):
return ""
s = keylist[0]
s = re.sub(r"-[a-z]\b", lambda m: m.group().upper(), s)
s = re.sub(r"\b\w+\b", lambda m: keynames.get(m.group(), m.group()), s)
s = re.sub("Key-", "", s)
s = re.sub("Cancel","Ctrl-Break",s) # dscherer@cmu.edu
s = re.sub("Control-", "Ctrl-", s)
s = re.sub("-", "+", s)
s = re.sub("><", " ", s)
s = re.sub("<", "", s)
s = re.sub(">", "", s)
return s
def fixwordbreaks(root):
# Make sure that Tk's double-click and next/previous word
# operations use our definition of a word (i.e. an identifier)
tk = root.tk
tk.call('tcl_wordBreakAfter', 'a b', 0) # make sure word.tcl is loaded
tk.call('set', 'tcl_wordchars', '[a-zA-Z0-9_]')
tk.call('set', 'tcl_nonwordchars', '[^a-zA-Z0-9_]')
def test():
root = Tk()
fixwordbreaks(root)
root.withdraw()
if sys.argv[1:]:
filename = sys.argv[1]
else:
filename = None
edit = EditorWindow(root=root, filename=filename)
edit.set_close_hook(root.quit)
edit.text.bind("<<close-all-windows>>", edit.close_event)
root.mainloop()
root.destroy()
if __name__ == '__main__':
test()
| gpl-2.0 |
lovasb/django-ajaxtables | ajaxtables/views/__init__.py | 2 | 3679 | from django.http import Http404
from django.core.exceptions import ImproperlyConfigured
from vanilla import ListView
class AjaxListView(ListView):
template_names = ['ajaxtables/object_list.html', 'ajaxtables/object_list_data.html']
filter_form_class = None
page_size = 10
page_kwarg = 'page'
def get_page_from_request(self):
page_size = int(self.request.GET.get('pageSize', self.page_size))
act_page = int(self.request.GET.get('toPage', 1))
return page_size, act_page
def get_context_data(self, **kwargs):
context = super(AjaxListView, self).get_context_data(**kwargs)
if 'hidden_cols' in self.request.POST:
context['hidden'] = {k: True for k in self.request.POST.getlist('hidden_cols')}
if 'hidden_cols' in self.request.GET:
context['hidden'] = {k: True for k in self.request.GET.getlist('hidden_cols')}
return context
def get_template_names(self):
try:
assert len(self.template_names) == 2
except AssertionError:
msg = "'%s' must have two template names. One for view the" \
"table, and one for the jax loaded data."
raise ImproperlyConfigured(msg % self.__class__.__name__)
if self.request.is_ajax():
return [self.template_names[1]]
return [self.template_names[0]]
def form_to_filters(self, form_data):
return {}
def append_display_filters(self, queryset):
sort_by = self.request.POST.getlist('sort_by', None) or self.request.GET.getlist('sort_by', None)
if sort_by:
queryset = queryset.order_by(*sort_by)
hidden_cols = self.request.POST.getlist('hidden_cols', None) or self.request.GET.getlist('hidden_cols', None)
if hidden_cols:
queryset = queryset.defer(*hidden_cols)
return queryset
def paginate_queryset(self, queryset):
page_size, act_page = self.get_page_from_request()
try:
return super(AjaxListView, self).paginate_queryset(queryset, page_size)
except Http404:
paginator = self.get_paginator(queryset, page_size)
return paginator.page(1)
def get(self, request, *args, **kwargs):
if request.is_ajax(): ## no filter form provided, and request for data
queryset = self.append_display_filters(self.get_queryset())
page_size, act_page = self.get_page_from_request()
page = self.paginate_queryset(queryset)
self.object_list = page.object_list
context = self.get_context_data(
page_obj=page,
is_paginated=page.has_other_pages(),
paginator=page.paginator,
)
return self.render_to_response(context)
else:
form = self.filter_form_class(request.POST or None) if self.filter_form_class else None
context = self.get_context_data(form=form, page_size=self.page_size)
return self.render_to_response(context)
def post(self, request, *args, **kwargs):
form = self.filter_form_class(request.POST or None)
if form.is_valid():
filters = self.form_to_filters(form.cleaned_data)
queryset = self.append_display_filters(self.get_queryset().filter(**filters))
page = self.paginate_queryset(queryset)
self.object_list = page.object_list
context = self.get_context_data(
page_obj=page,
is_paginated=page.has_other_pages(),
paginator=page.paginator,
)
return self.render_to_response(context) | gpl-3.0 |
JVillella/tensorflow | tensorflow/examples/tutorials/mnist/mnist_with_summaries.py | 39 | 8550 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A simple MNIST classifier which displays summaries in TensorBoard.
This is an unimpressive MNIST model, but it is a good example of using
tf.name_scope to make a graph legible in the TensorBoard graph explorer, and of
naming summary tags so that they are grouped meaningfully in TensorBoard.
It demonstrates the functionality of every TensorBoard dashboard.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import sys
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
FLAGS = None
def train():
# Import data
mnist = input_data.read_data_sets(FLAGS.data_dir,
one_hot=True,
fake_data=FLAGS.fake_data)
sess = tf.InteractiveSession()
# Create a multilayer model.
# Input placeholders
with tf.name_scope('input'):
x = tf.placeholder(tf.float32, [None, 784], name='x-input')
y_ = tf.placeholder(tf.float32, [None, 10], name='y-input')
with tf.name_scope('input_reshape'):
image_shaped_input = tf.reshape(x, [-1, 28, 28, 1])
tf.summary.image('input', image_shaped_input, 10)
# We can't initialize these variables to 0 - the network will get stuck.
def weight_variable(shape):
"""Create a weight variable with appropriate initialization."""
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial)
def bias_variable(shape):
"""Create a bias variable with appropriate initialization."""
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
def variable_summaries(var):
"""Attach a lot of summaries to a Tensor (for TensorBoard visualization)."""
with tf.name_scope('summaries'):
mean = tf.reduce_mean(var)
tf.summary.scalar('mean', mean)
with tf.name_scope('stddev'):
stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean)))
tf.summary.scalar('stddev', stddev)
tf.summary.scalar('max', tf.reduce_max(var))
tf.summary.scalar('min', tf.reduce_min(var))
tf.summary.histogram('histogram', var)
def nn_layer(input_tensor, input_dim, output_dim, layer_name, act=tf.nn.relu):
"""Reusable code for making a simple neural net layer.
It does a matrix multiply, bias add, and then uses ReLU to nonlinearize.
It also sets up name scoping so that the resultant graph is easy to read,
and adds a number of summary ops.
"""
# Adding a name scope ensures logical grouping of the layers in the graph.
with tf.name_scope(layer_name):
# This Variable will hold the state of the weights for the layer
with tf.name_scope('weights'):
weights = weight_variable([input_dim, output_dim])
variable_summaries(weights)
with tf.name_scope('biases'):
biases = bias_variable([output_dim])
variable_summaries(biases)
with tf.name_scope('Wx_plus_b'):
preactivate = tf.matmul(input_tensor, weights) + biases
tf.summary.histogram('pre_activations', preactivate)
activations = act(preactivate, name='activation')
tf.summary.histogram('activations', activations)
return activations
hidden1 = nn_layer(x, 784, 500, 'layer1')
with tf.name_scope('dropout'):
keep_prob = tf.placeholder(tf.float32)
tf.summary.scalar('dropout_keep_probability', keep_prob)
dropped = tf.nn.dropout(hidden1, keep_prob)
# Do not apply softmax activation yet, see below.
y = nn_layer(dropped, 500, 10, 'layer2', act=tf.identity)
with tf.name_scope('cross_entropy'):
# The raw formulation of cross-entropy,
#
# tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(tf.softmax(y)),
# reduction_indices=[1]))
#
# can be numerically unstable.
#
# So here we use tf.nn.softmax_cross_entropy_with_logits on the
# raw outputs of the nn_layer above, and then average across
# the batch.
diff = tf.nn.softmax_cross_entropy_with_logits(labels=y_, logits=y)
with tf.name_scope('total'):
cross_entropy = tf.reduce_mean(diff)
tf.summary.scalar('cross_entropy', cross_entropy)
with tf.name_scope('train'):
train_step = tf.train.AdamOptimizer(FLAGS.learning_rate).minimize(
cross_entropy)
with tf.name_scope('accuracy'):
with tf.name_scope('correct_prediction'):
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
with tf.name_scope('accuracy'):
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
tf.summary.scalar('accuracy', accuracy)
# Merge all the summaries and write them out to
# /tmp/tensorflow/mnist/logs/mnist_with_summaries (by default)
merged = tf.summary.merge_all()
train_writer = tf.summary.FileWriter(FLAGS.log_dir + '/train', sess.graph)
test_writer = tf.summary.FileWriter(FLAGS.log_dir + '/test')
tf.global_variables_initializer().run()
# Train the model, and also write summaries.
# Every 10th step, measure test-set accuracy, and write test summaries
# All other steps, run train_step on training data, & add training summaries
def feed_dict(train):
"""Make a TensorFlow feed_dict: maps data onto Tensor placeholders."""
if train or FLAGS.fake_data:
xs, ys = mnist.train.next_batch(100, fake_data=FLAGS.fake_data)
k = FLAGS.dropout
else:
xs, ys = mnist.test.images, mnist.test.labels
k = 1.0
return {x: xs, y_: ys, keep_prob: k}
for i in range(FLAGS.max_steps):
if i % 10 == 0: # Record summaries and test-set accuracy
summary, acc = sess.run([merged, accuracy], feed_dict=feed_dict(False))
test_writer.add_summary(summary, i)
print('Accuracy at step %s: %s' % (i, acc))
else: # Record train set summaries, and train
if i % 100 == 99: # Record execution stats
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
summary, _ = sess.run([merged, train_step],
feed_dict=feed_dict(True),
options=run_options,
run_metadata=run_metadata)
train_writer.add_run_metadata(run_metadata, 'step%03d' % i)
train_writer.add_summary(summary, i)
print('Adding run metadata for', i)
else: # Record a summary
summary, _ = sess.run([merged, train_step], feed_dict=feed_dict(True))
train_writer.add_summary(summary, i)
train_writer.close()
test_writer.close()
def main(_):
if tf.gfile.Exists(FLAGS.log_dir):
tf.gfile.DeleteRecursively(FLAGS.log_dir)
tf.gfile.MakeDirs(FLAGS.log_dir)
train()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--fake_data', nargs='?', const=True, type=bool,
default=False,
help='If true, uses fake data for unit testing.')
parser.add_argument('--max_steps', type=int, default=1000,
help='Number of steps to run trainer.')
parser.add_argument('--learning_rate', type=float, default=0.001,
help='Initial learning rate')
parser.add_argument('--dropout', type=float, default=0.9,
help='Keep probability for training dropout.')
parser.add_argument(
'--data_dir',
type=str,
default=os.path.join(os.getenv('TEST_TMPDIR', '/tmp'),
'tensorflow/mnist/input_data'),
help='Directory for storing input data')
parser.add_argument(
'--log_dir',
type=str,
default=os.path.join(os.getenv('TEST_TMPDIR', '/tmp'),
'tensorflow/mnist/logs/mnist_with_summaries'),
help='Summaries log directory')
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
| apache-2.0 |
katakumpo/niceredis | niceredis/client/server.py | 1 | 6771 | # -*- coding: utf-8 *-*
import warnings
from redis.connection import Token
from redis.exceptions import ConnectionError, RedisError
from .base import RedisBase
class ServerCommands(RedisBase):
# SERVER INFORMATION
def bgrewriteaof(self):
"Tell the Redis server to rewrite the AOF file from data in memory."
return self.execute_command('BGREWRITEAOF')
def bgsave(self):
"""
Tell the Redis server to save its data to disk. Unlike save(),
this method is asynchronous and returns immediately.
"""
return self.execute_command('BGSAVE')
def client_kill(self, address):
"Disconnects the client at ``address`` (ip:port)"
return self.execute_command('CLIENT KILL', address)
def client_list(self):
"Returns a list of currently connected clients"
return self.execute_command('CLIENT LIST')
def client_getname(self):
"Returns the current connection name"
return self.execute_command('CLIENT GETNAME')
def client_setname(self, name):
"Sets the current connection name"
return self.execute_command('CLIENT SETNAME', name)
def config_get(self, pattern="*"):
"Return a dictionary of configuration based on the ``pattern``"
return self.execute_command('CONFIG GET', pattern)
def config_set(self, name, value):
"Set config item ``name`` with ``value``"
return self.execute_command('CONFIG SET', name, value)
def config_resetstat(self):
"Reset runtime statistics"
return self.execute_command('CONFIG RESETSTAT')
def config_rewrite(self):
"Rewrite config file with the minimal change to reflect running config"
return self.execute_command('CONFIG REWRITE')
def dbsize(self):
"Returns the number of keys in the current database"
return self.execute_command('DBSIZE')
def debug_object(self, key):
"Returns version specific meta information about a given key"
return self.execute_command('DEBUG OBJECT', key)
def echo(self, value):
"Echo the string back from the server"
return self.execute_command('ECHO', value)
def flushall(self):
"Delete all keys in all databases on the current host"
return self.execute_command('FLUSHALL')
def flushdb(self):
"Delete all keys in the current database"
return self.execute_command('FLUSHDB')
def info(self, section=None):
"""
Returns a dictionary containing information about the Redis server
The ``section`` option can be used to select a specific section
of information
The section option is not supported by older versions of Redis Server,
and will generate ResponseError
"""
if section is None:
return self.execute_command('INFO')
else:
return self.execute_command('INFO', section)
def lastsave(self):
"""
Return a Python datetime object representing the last time the
Redis database was saved to disk
"""
return self.execute_command('LASTSAVE')
def object(self, infotype, key):
"Return the encoding, idletime, or refcount about the key"
return self.execute_command('OBJECT', infotype, key, infotype=infotype)
def ping(self):
"Ping the Redis server"
return self.execute_command('PING')
def save(self):
"""
Tell the Redis server to save its data to disk,
blocking until the save is complete
"""
return self.execute_command('SAVE')
def sentinel(self, *args):
"Redis Sentinel's SENTINEL command."
warnings.warn(
DeprecationWarning('Use the individual sentinel_* methods'))
def sentinel_get_master_addr_by_name(self, service_name):
"Returns a (host, port) pair for the given ``service_name``"
return self.execute_command('SENTINEL GET-MASTER-ADDR-BY-NAME',
service_name)
def sentinel_master(self, service_name):
"Returns a dictionary containing the specified masters state."
return self.execute_command('SENTINEL MASTER', service_name)
def sentinel_masters(self):
"Returns a list of dictionaries containing each master's state."
return self.execute_command('SENTINEL MASTERS')
def sentinel_monitor(self, name, ip, port, quorum):
"Add a new master to Sentinel to be monitored"
return self.execute_command('SENTINEL MONITOR', name, ip, port, quorum)
def sentinel_remove(self, name):
"Remove a master from Sentinel's monitoring"
return self.execute_command('SENTINEL REMOVE', name)
def sentinel_sentinels(self, service_name):
"Returns a list of sentinels for ``service_name``"
return self.execute_command('SENTINEL SENTINELS', service_name)
def sentinel_set(self, name, option, value):
"Set Sentinel monitoring parameters for a given master"
return self.execute_command('SENTINEL SET', name, option, value)
def sentinel_slaves(self, service_name):
"Returns a list of slaves for ``service_name``"
return self.execute_command('SENTINEL SLAVES', service_name)
def shutdown(self):
"Shutdown the server"
try:
self.execute_command('SHUTDOWN')
except ConnectionError:
# a ConnectionError here is expected
return
raise RedisError("SHUTDOWN seems to have failed.")
def slaveof(self, host=None, port=None):
"""
Set the server to be a replicated slave of the instance identified
by the ``host`` and ``port``. If called without arguments, the
instance is promoted to a master instead.
"""
if host is None and port is None:
return self.execute_command('SLAVEOF', Token('NO'), Token('ONE'))
return self.execute_command('SLAVEOF', host, port)
def slowlog_get(self, num=None):
"""
Get the entries from the slowlog. If ``num`` is specified, get the
most recent ``num`` items.
"""
args = ['SLOWLOG GET']
if num is not None:
args.append(num)
return self.execute_command(*args)
def slowlog_len(self):
"Get the number of items in the slowlog"
return self.execute_command('SLOWLOG LEN')
def slowlog_reset(self):
"Remove all items in the slowlog"
return self.execute_command('SLOWLOG RESET')
def time(self):
"""
Returns the server time as a 2-item tuple of ints:
(seconds since epoch, microseconds into this second).
"""
return self.execute_command('TIME')
| mit |
chenjun0210/tensorflow | tensorflow/contrib/learn/python/learn/estimators/metric_key.py | 89 | 1569 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Enum for metric keys."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class MetricKey(object):
"""Metric key strings."""
LOSS = "loss"
AUC = "auc"
AUC_PR = "auc_precision_recall"
CLASS_AUC = "auc/class%d"
CLASS_AUC_PR = "auc_precision_recall/class%d"
PREDICTION_MEAN = "labels/prediction_mean"
CLASS_PREDICTION_MEAN = "labels/prediction_mean/class%d"
CLASS_LOGITS_MEAN = "labels/logits_mean/class%d"
CLASS_PROBABILITY_MEAN = "labels/probability_mean/class%d"
LABEL_MEAN = "labels/actual_label_mean"
CLASS_LABEL_MEAN = "labels/actual_label_mean/class%d"
ACCURACY = "accuracy"
ACCURACY_BASELINE = "accuracy/baseline_label_mean"
ACCURACY_MEAN = "accuracy/threshold_%f_mean"
PRECISION_MEAN = "precision/positive_threshold_%f_mean"
RECALL_MEAN = "recall/positive_threshold_%f_mean"
| apache-2.0 |
tbombach/autorest | src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyDateTime/autorestdatetimetestservice/operations/datetime_model_operations.py | 5 | 30734 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class DatetimeModelOperations(object):
"""DatetimeModelOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get null datetime value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/null'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('iso-8601', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_invalid(
self, custom_headers=None, raw=False, **operation_config):
"""Get invalid datetime value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/invalid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('iso-8601', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_overflow(
self, custom_headers=None, raw=False, **operation_config):
"""Get overflow datetime value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/overflow'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('iso-8601', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_underflow(
self, custom_headers=None, raw=False, **operation_config):
"""Get underflow datetime value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/underflow'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('iso-8601', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_utc_max_date_time(
self, datetime_body, custom_headers=None, raw=False, **operation_config):
"""Put max datetime value 9999-12-31T23:59:59.9999999Z.
:param datetime_body:
:type datetime_body: datetime
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/max/utc'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(datetime_body, 'iso-8601')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_utc_lowercase_max_date_time(
self, custom_headers=None, raw=False, **operation_config):
"""Get max datetime value 9999-12-31t23:59:59.9999999z.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/max/utc/lowercase'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('iso-8601', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_utc_uppercase_max_date_time(
self, custom_headers=None, raw=False, **operation_config):
"""Get max datetime value 9999-12-31T23:59:59.9999999Z.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/max/utc/uppercase'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('iso-8601', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_local_positive_offset_max_date_time(
self, datetime_body, custom_headers=None, raw=False, **operation_config):
"""Put max datetime value with positive numoffset
9999-12-31t23:59:59.9999999+14:00.
:param datetime_body:
:type datetime_body: datetime
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/max/localpositiveoffset'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(datetime_body, 'iso-8601')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_local_positive_offset_lowercase_max_date_time(
self, custom_headers=None, raw=False, **operation_config):
"""Get max datetime value with positive num offset
9999-12-31t23:59:59.9999999+14:00.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/max/localpositiveoffset/lowercase'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('iso-8601', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_local_positive_offset_uppercase_max_date_time(
self, custom_headers=None, raw=False, **operation_config):
"""Get max datetime value with positive num offset
9999-12-31T23:59:59.9999999+14:00.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/max/localpositiveoffset/uppercase'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('iso-8601', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_local_negative_offset_max_date_time(
self, datetime_body, custom_headers=None, raw=False, **operation_config):
"""Put max datetime value with positive numoffset
9999-12-31t23:59:59.9999999-14:00.
:param datetime_body:
:type datetime_body: datetime
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/max/localnegativeoffset'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(datetime_body, 'iso-8601')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_local_negative_offset_uppercase_max_date_time(
self, custom_headers=None, raw=False, **operation_config):
"""Get max datetime value with positive num offset
9999-12-31T23:59:59.9999999-14:00.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/max/localnegativeoffset/uppercase'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('iso-8601', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_local_negative_offset_lowercase_max_date_time(
self, custom_headers=None, raw=False, **operation_config):
"""Get max datetime value with positive num offset
9999-12-31t23:59:59.9999999-14:00.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/max/localnegativeoffset/lowercase'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('iso-8601', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_utc_min_date_time(
self, datetime_body, custom_headers=None, raw=False, **operation_config):
"""Put min datetime value 0001-01-01T00:00:00Z.
:param datetime_body:
:type datetime_body: datetime
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/min/utc'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(datetime_body, 'iso-8601')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_utc_min_date_time(
self, custom_headers=None, raw=False, **operation_config):
"""Get min datetime value 0001-01-01T00:00:00Z.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/min/utc'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('iso-8601', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_local_positive_offset_min_date_time(
self, datetime_body, custom_headers=None, raw=False, **operation_config):
"""Put min datetime value 0001-01-01T00:00:00+14:00.
:param datetime_body:
:type datetime_body: datetime
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/min/localpositiveoffset'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(datetime_body, 'iso-8601')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_local_positive_offset_min_date_time(
self, custom_headers=None, raw=False, **operation_config):
"""Get min datetime value 0001-01-01T00:00:00+14:00.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/min/localpositiveoffset'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('iso-8601', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_local_negative_offset_min_date_time(
self, datetime_body, custom_headers=None, raw=False, **operation_config):
"""Put min datetime value 0001-01-01T00:00:00-14:00.
:param datetime_body:
:type datetime_body: datetime
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/min/localnegativeoffset'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(datetime_body, 'iso-8601')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_local_negative_offset_min_date_time(
self, custom_headers=None, raw=False, **operation_config):
"""Get min datetime value 0001-01-01T00:00:00-14:00.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetime/min/localnegativeoffset'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('iso-8601', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
| mit |
cchurch/ansible | lib/ansible/module_utils/facts/network/freebsd.py | 232 | 1190 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.facts.network.base import NetworkCollector
from ansible.module_utils.facts.network.generic_bsd import GenericBsdIfconfigNetwork
class FreeBSDNetwork(GenericBsdIfconfigNetwork):
"""
This is the FreeBSD Network Class.
It uses the GenericBsdIfconfigNetwork unchanged.
"""
platform = 'FreeBSD'
class FreeBSDNetworkCollector(NetworkCollector):
_fact_class = FreeBSDNetwork
_platform = 'FreeBSD'
| gpl-3.0 |
Tatsh-ansible/ansible | lib/ansible/modules/cloud/vmware/vca_nat.py | 19 | 6959 | #!/usr/bin/python
# Copyright (c) 2015 VMware, Inc. All Rights Reserved.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: vca_nat
short_description: add remove nat rules in a gateway in a vca
description:
- Adds or removes nat rules from a gateway in a vca environment
version_added: "2.0"
author: Peter Sprygada (@privateip)
options:
purge_rules:
description:
- If set to true, it will delete all rules in the gateway that are not given as parameter to this module.
required: false
default: false
nat_rules:
description:
- A list of rules to be added to the gateway, Please see examples on valid entries
required: True
default: false
extends_documentation_fragment: vca.documentation
'''
EXAMPLES = '''
#An example for a source nat
- hosts: localhost
connection: local
tasks:
- vca_nat:
instance_id: 'b15ff1e5-1024-4f55-889f-ea0209726282'
vdc_name: 'benz_ansible'
state: 'present'
nat_rules:
- rule_type: SNAT
original_ip: 192.0.2.42
translated_ip: 203.0.113.23
#example for a DNAT
- hosts: localhost
connection: local
tasks:
- vca_nat:
instance_id: 'b15ff1e5-1024-4f55-889f-ea0209726282'
vdc_name: 'benz_ansible'
state: 'present'
nat_rules:
- rule_type: DNAT
original_ip: 203.0.113.23
original_port: 22
translated_ip: 192.0.2.42
translated_port: 22
'''
import time
import xmltodict
VALID_RULE_KEYS = ['rule_type', 'original_ip', 'original_port',
'translated_ip', 'translated_port', 'protocol']
def validate_nat_rules(nat_rules):
for rule in nat_rules:
if not isinstance(rule, dict):
raise VcaError("nat rules must be a list of dictionaries, "
"Please check", valid_keys=VALID_RULE_KEYS)
for k in rule.keys():
if k not in VALID_RULE_KEYS:
raise VcaError("%s is not a valid key in nat rules, please "
"check above.." % k, valid_keys=VALID_RULE_KEYS)
rule['original_port'] = str(rule.get('original_port', 'any')).lower()
rule['original_ip'] = rule.get('original_ip', 'any').lower()
rule['translated_ip'] = rule.get('translated_ip', 'any').lower()
rule['translated_port'] = str(rule.get('translated_port', 'any')).lower()
rule['protocol'] = rule.get('protocol', 'any').lower()
rule['rule_type'] = rule.get('rule_type', 'DNAT').lower()
return nat_rules
def nat_rules_to_dict(nat_rules):
result = []
for rule in nat_rules:
gw_rule = rule.get_GatewayNatRule()
result.append(
dict(
rule_type=rule.get_RuleType().lower(),
original_ip=gw_rule.get_OriginalIp().lower(),
original_port=(gw_rule.get_OriginalPort().lower() or 'any'),
translated_ip=gw_rule.get_TranslatedIp().lower(),
translated_port=(gw_rule.get_TranslatedPort().lower() or 'any'),
protocol=(gw_rule.get_Protocol().lower() or 'any')
)
)
return result
def rule_to_string(rule):
strings = list()
for key, value in rule.items():
strings.append('%s=%s' % (key, value))
return ', '.join(strings)
def main():
argument_spec = vca_argument_spec()
argument_spec.update(
dict(
nat_rules = dict(type='list', default=[]),
gateway_name = dict(default='gateway'),
purge_rules = dict(default=False, type='bool'),
state = dict(default='present', choices=['present', 'absent'])
)
)
module = AnsibleModule(argument_spec, supports_check_mode=True)
vdc_name = module.params.get('vdc_name')
state = module.params['state']
nat_rules = module.params['nat_rules']
gateway_name = module.params['gateway_name']
purge_rules = module.params['purge_rules']
if not purge_rules and not nat_rules:
module.fail_json(msg='Must define purge_rules or nat_rules')
vca = vca_login(module)
gateway = vca.get_gateway(vdc_name, gateway_name)
if not gateway:
module.fail_json(msg="Not able to find the gateway %s, please check "
"the gateway_name param" % gateway_name)
try:
desired_rules = validate_nat_rules(nat_rules)
except VcaError as e:
module.fail_json(msg=e.message)
rules = gateway.get_nat_rules()
result = dict(changed=False, rules_purged=0)
deletions = 0
additions = 0
if purge_rules is True and len(rules) > 0:
result['rules_purged'] = len(rules)
deletions = result['rules_purged']
rules = list()
if not module.check_mode:
gateway.del_all_nat_rules()
task = gateway.save_services_configuration()
vca.block_until_completed(task)
rules = gateway.get_nat_rules()
result['changed'] = True
current_rules = nat_rules_to_dict(rules)
result['current_rules'] = current_rules
result['desired_rules'] = desired_rules
for rule in desired_rules:
if rule not in current_rules:
additions += 1
if not module.check_mode:
gateway.add_nat_rule(**rule)
result['changed'] = True
result['rules_added'] = additions
result['delete_rule'] = list()
result['delete_rule_rc'] = list()
for rule in current_rules:
if rule not in desired_rules:
deletions += 1
if not module.check_mode:
result['delete_rule'].append(rule)
rc = gateway.del_nat_rule(**rule)
result['delete_rule_rc'].append(rc)
result['changed'] = True
result['rules_deleted'] = deletions
if not module.check_mode and (additions > 0 or deletions > 0):
task = gateway.save_services_configuration()
vca.block_until_completed(task)
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.vca import *
if __name__ == '__main__':
main()
| gpl-3.0 |
steedos/odoo7 | openerp/addons/account/edi/invoice.py | 59 | 13975 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011-2012 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
from openerp.addons.edi import EDIMixin
from werkzeug import url_encode
INVOICE_LINE_EDI_STRUCT = {
'name': True,
'origin': True,
'uos_id': True,
'product_id': True,
'price_unit': True,
'quantity': True,
'discount': True,
# fields used for web preview only - discarded on import
'price_subtotal': True,
}
INVOICE_TAX_LINE_EDI_STRUCT = {
'name': True,
'base': True,
'amount': True,
'manual': True,
'sequence': True,
'base_amount': True,
'tax_amount': True,
}
INVOICE_EDI_STRUCT = {
'name': True,
'origin': True,
'company_id': True, # -> to be changed into partner
'type': True, # -> reversed at import
'internal_number': True, # -> reference at import
'comment': True,
'date_invoice': True,
'date_due': True,
'partner_id': True,
'payment_term': True,
#custom: currency_id
'invoice_line': INVOICE_LINE_EDI_STRUCT,
'tax_line': INVOICE_TAX_LINE_EDI_STRUCT,
# fields used for web preview only - discarded on import
#custom: 'partner_ref'
'amount_total': True,
'amount_untaxed': True,
'amount_tax': True,
}
class account_invoice(osv.osv, EDIMixin):
_inherit = 'account.invoice'
def edi_export(self, cr, uid, records, edi_struct=None, context=None):
"""Exports a supplier or customer invoice"""
edi_struct = dict(edi_struct or INVOICE_EDI_STRUCT)
res_company = self.pool.get('res.company')
res_partner = self.pool.get('res.partner')
edi_doc_list = []
for invoice in records:
# generate the main report
self._edi_generate_report_attachment(cr, uid, invoice, context=context)
edi_doc = super(account_invoice,self).edi_export(cr, uid, [invoice], edi_struct, context)[0]
edi_doc.update({
'company_address': res_company.edi_export_address(cr, uid, invoice.company_id, context=context),
'company_paypal_account': invoice.company_id.paypal_account,
'partner_address': res_partner.edi_export(cr, uid, [invoice.partner_id], context=context)[0],
'currency': self.pool.get('res.currency').edi_export(cr, uid, [invoice.currency_id], context=context)[0],
'partner_ref': invoice.reference or False,
})
edi_doc_list.append(edi_doc)
return edi_doc_list
def _edi_tax_account(self, cr, uid, invoice_type='out_invoice', context=None):
#TODO/FIXME: should select proper Tax Account
account_pool = self.pool.get('account.account')
account_ids = account_pool.search(cr, uid, [('type','<>','view'),('type','<>','income'), ('type', '<>', 'closed')])
tax_account = False
if account_ids:
tax_account = account_pool.browse(cr, uid, account_ids[0])
return tax_account
def _edi_invoice_account(self, cr, uid, partner_id, invoice_type, context=None):
res_partner = self.pool.get('res.partner')
partner = res_partner.browse(cr, uid, partner_id, context=context)
if invoice_type in ('out_invoice', 'out_refund'):
invoice_account = partner.property_account_receivable
else:
invoice_account = partner.property_account_payable
return invoice_account
def _edi_product_account(self, cr, uid, product_id, invoice_type, context=None):
product_pool = self.pool.get('product.product')
product = product_pool.browse(cr, uid, product_id, context=context)
if invoice_type in ('out_invoice','out_refund'):
account = product.property_account_income or product.categ_id.property_account_income_categ
else:
account = product.property_account_expense or product.categ_id.property_account_expense_categ
return account
def _edi_import_company(self, cr, uid, edi_document, context=None):
# TODO: for multi-company setups, we currently import the document in the
# user's current company, but we should perhaps foresee a way to select
# the desired company among the user's allowed companies
self._edi_requires_attributes(('company_id','company_address','type'), edi_document)
res_partner = self.pool.get('res.partner')
xid, company_name = edi_document.pop('company_id')
# Retrofit address info into a unified partner info (changed in v7 - used to keep them separate)
company_address_edi = edi_document.pop('company_address')
company_address_edi['name'] = company_name
company_address_edi['is_company'] = True
company_address_edi['__import_model'] = 'res.partner'
company_address_edi['__id'] = xid # override address ID, as of v7 they should be the same anyway
if company_address_edi.get('logo'):
company_address_edi['image'] = company_address_edi.pop('logo')
invoice_type = edi_document['type']
if invoice_type.startswith('out_'):
company_address_edi['customer'] = True
else:
company_address_edi['supplier'] = True
partner_id = res_partner.edi_import(cr, uid, company_address_edi, context=context)
# modify edi_document to refer to new partner
partner = res_partner.browse(cr, uid, partner_id, context=context)
partner_edi_m2o = self.edi_m2o(cr, uid, partner, context=context)
edi_document['partner_id'] = partner_edi_m2o
edi_document.pop('partner_address', None) # ignored, that's supposed to be our own address!
return partner_id
def edi_import(self, cr, uid, edi_document, context=None):
""" During import, invoices will import the company that is provided in the invoice as
a new partner (e.g. supplier company for a customer invoice will be come a supplier
record for the new invoice.
Summary of tasks that need to be done:
- import company as a new partner, if type==in then supplier=1, else customer=1
- partner_id field is modified to point to the new partner
- company_address data used to add address to new partner
- change type: out_invoice'<->'in_invoice','out_refund'<->'in_refund'
- reference: should contain the value of the 'internal_number'
- reference_type: 'none'
- internal number: reset to False, auto-generated
- journal_id: should be selected based on type: simply put the 'type'
in the context when calling create(), will be selected correctly
- payment_term: if set, create a default one based on name...
- for invoice lines, the account_id value should be taken from the
product's default, i.e. from the default category, as it will not
be provided.
- for tax lines, we disconnect from the invoice.line, so all tax lines
will be of type 'manual', and default accounts should be picked based
on the tax config of the DB where it is imported.
"""
if context is None:
context = {}
self._edi_requires_attributes(('company_id','company_address','type','invoice_line','currency'), edi_document)
# extract currency info
res_currency = self.pool.get('res.currency')
currency_info = edi_document.pop('currency')
currency_id = res_currency.edi_import(cr, uid, currency_info, context=context)
currency = res_currency.browse(cr, uid, currency_id)
edi_document['currency_id'] = self.edi_m2o(cr, uid, currency, context=context)
# change type: out_invoice'<->'in_invoice','out_refund'<->'in_refund'
invoice_type = edi_document['type']
invoice_type = invoice_type.startswith('in_') and invoice_type.replace('in_','out_') or invoice_type.replace('out_','in_')
edi_document['type'] = invoice_type
# import company as a new partner
partner_id = self._edi_import_company(cr, uid, edi_document, context=context)
# Set Account
invoice_account = self._edi_invoice_account(cr, uid, partner_id, invoice_type, context=context)
edi_document['account_id'] = invoice_account and self.edi_m2o(cr, uid, invoice_account, context=context) or False
# reference: should contain the value of the 'internal_number'
edi_document['reference'] = edi_document.get('internal_number', False)
# reference_type: 'none'
edi_document['reference_type'] = 'none'
# internal number: reset to False, auto-generated
edi_document['internal_number'] = False
# discard web preview fields, if present
edi_document.pop('partner_ref', None)
# journal_id: should be selected based on type: simply put the 'type' in the context when calling create(), will be selected correctly
context.update(type=invoice_type)
# for invoice lines, the account_id value should be taken from the product's default, i.e. from the default category, as it will not be provided.
for edi_invoice_line in edi_document['invoice_line']:
product_info = edi_invoice_line['product_id']
product_id = self.edi_import_relation(cr, uid, 'product.product', product_info[1],
product_info[0], context=context)
account = self._edi_product_account(cr, uid, product_id, invoice_type, context=context)
# TODO: could be improved with fiscal positions perhaps
# account = fpos_obj.map_account(cr, uid, fiscal_position_id, account.id)
edi_invoice_line['account_id'] = self.edi_m2o(cr, uid, account, context=context) if account else False
# discard web preview fields, if present
edi_invoice_line.pop('price_subtotal', None)
# for tax lines, we disconnect from the invoice.line, so all tax lines will be of type 'manual', and default accounts should be picked based
# on the tax config of the DB where it is imported.
tax_account = self._edi_tax_account(cr, uid, context=context)
tax_account_info = self.edi_m2o(cr, uid, tax_account, context=context)
for edi_tax_line in edi_document.get('tax_line', []):
edi_tax_line['account_id'] = tax_account_info
edi_tax_line['manual'] = True
return super(account_invoice,self).edi_import(cr, uid, edi_document, context=context)
def _edi_record_display_action(self, cr, uid, id, context=None):
"""Returns an appropriate action definition dict for displaying
the record with ID ``rec_id``.
:param int id: database ID of record to display
:return: action definition dict
"""
action = super(account_invoice,self)._edi_record_display_action(cr, uid, id, context=context)
try:
invoice = self.browse(cr, uid, id, context=context)
if 'out_' in invoice.type:
view_ext_id = 'invoice_form'
journal_type = 'sale'
else:
view_ext_id = 'invoice_supplier_form'
journal_type = 'purchase'
ctx = "{'type': '%s', 'journal_type': '%s'}" % (invoice.type, journal_type)
action.update(context=ctx)
view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account', view_ext_id)[1]
action.update(views=[(view_id,'form'), (False, 'tree')])
except ValueError:
# ignore if views are missing
pass
return action
def _edi_paypal_url(self, cr, uid, ids, field, arg, context=None):
res = dict.fromkeys(ids, False)
for inv in self.browse(cr, uid, ids, context=context):
if inv.type == 'out_invoice' and inv.company_id.paypal_account:
params = {
"cmd": "_xclick",
"business": inv.company_id.paypal_account,
"item_name": "%s Invoice %s" % (inv.company_id.name, inv.number or ''),
"invoice": inv.number,
"amount": inv.residual,
"currency_code": inv.currency_id.name,
"button_subtype": "services",
"no_note": "1",
"bn": "OpenERP_Invoice_PayNow_" + inv.currency_id.name,
}
res[inv.id] = "https://www.paypal.com/cgi-bin/webscr?" + url_encode(params)
return res
_columns = {
'paypal_url': fields.function(_edi_paypal_url, type='char', string='Paypal Url'),
}
class account_invoice_line(osv.osv, EDIMixin):
_inherit='account.invoice.line'
class account_invoice_tax(osv.osv, EDIMixin):
_inherit = "account.invoice.tax"
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
shashank971/edx-platform | docs/en_us/enrollment_api/source/conf.py | 39 | 2910 | # -*- coding: utf-8 -*-
# pylint: disable=invalid-name
# pylint: disable=redefined-builtin
# pylint: disable=protected-access
# pylint: disable=unused-argument
import os
from path import Path as path
import sys
import mock
MOCK_MODULES = [
'ipware',
'ip',
'ipware.ip',
'get_ip',
'pygeoip',
'ipaddr',
'django_countries',
'fields',
'django_countries.fields',
'opaque_keys',
'opaque_keys.edx',
'opaque_keys.edx.keys',
'CourseKey',
'UsageKey',
'BlockTypeKey',
'opaque_keys.edx.locations',
'SlashSeparatedCourseKey',
'Location',
'opaque_keys.edx.locator',
'Locator',
'south',
'modelsinspector',
'south.modelsinspector',
'add_introspection_rules'
]
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = mock.Mock()
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
sys.path.append('../../../../')
from docs.shared.conf import *
# Add any paths that contain templates here, relative to this directory.
#templates_path.append('source/_templates')
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path.append('source/_static')
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
root = path('../../../../').abspath()
sys.path.insert(0, root)
sys.path.append(root / "common/djangoapps")
sys.path.append('.')
#sys.path.insert(
# 0,
# os.path.abspath(
# os.path.normpath(
# os.path.dirname(__file__) + '/../../../..'
# )
# )
#)
# django configuration - careful here
if on_rtd:
os.environ['DJANGO_SETTINGS_MODULE'] = 'lms'
else:
os.environ['DJANGO_SETTINGS_MODULE'] = 'lms'
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx',
'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath',
'sphinx.ext.mathjax', 'sphinx.ext.viewcode', 'sphinxcontrib.napoleon']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['build', 'links.rst']
project = u'edX Enrollment API Version 1'
copyright = u'2015, edX'
| agpl-3.0 |
eXistenZNL/SickRage | lib/hachoir_parser/network/ouid.py | 186 | 385658 | # -*- coding: utf-8 -*-
"""
List of registered IEEE 24-bit Organizationally Unique IDentifiers.
Original data file:
http://standards.ieee.org/regauth/oui/oui.txt
"""
REGISTERED_OUID = {
0x000000: u'XEROX CORPORATION',
0x000001: u'XEROX CORPORATION',
0x000002: u'XEROX CORPORATION',
0x000003: u'XEROX CORPORATION',
0x000004: u'XEROX CORPORATION',
0x000005: u'XEROX CORPORATION',
0x000006: u'XEROX CORPORATION',
0x000007: u'XEROX CORPORATION',
0x000008: u'XEROX CORPORATION',
0x000009: u'XEROX CORPORATION',
0x00000A: u'OMRON TATEISI ELECTRONICS CO.',
0x00000B: u'MATRIX CORPORATION',
0x00000C: u'CISCO SYSTEMS, INC.',
0x00000D: u'FIBRONICS LTD.',
0x00000E: u'FUJITSU LIMITED',
0x00000F: u'NEXT, INC.',
0x000010: u'SYTEK INC.',
0x000011: u'NORMEREL SYSTEMES',
0x000012: u'INFORMATION TECHNOLOGY LIMITED',
0x000013: u'CAMEX',
0x000014: u'NETRONIX',
0x000015: u'DATAPOINT CORPORATION',
0x000016: u'DU PONT PIXEL SYSTEMS.',
0x000017: u'TEKELEC',
0x000018: u'WEBSTER COMPUTER CORPORATION',
0x000019: u'APPLIED DYNAMICS INTERNATIONAL',
0x00001A: u'ADVANCED MICRO DEVICES',
0x00001B: u'NOVELL INC.',
0x00001C: u'BELL TECHNOLOGIES',
0x00001D: u'CABLETRON SYSTEMS, INC.',
0x00001E: u'TELSIST INDUSTRIA ELECTRONICA',
0x00001F: u'Telco Systems, Inc.',
0x000020: u'DATAINDUSTRIER DIAB AB',
0x000021: u'SUREMAN COMP. & COMMUN. CORP.',
0x000022: u'VISUAL TECHNOLOGY INC.',
0x000023: u'ABB INDUSTRIAL SYSTEMS AB',
0x000024: u'CONNECT AS',
0x000025: u'RAMTEK CORP.',
0x000026: u'SHA-KEN CO., LTD.',
0x000027: u'JAPAN RADIO COMPANY',
0x000028: u'PRODIGY SYSTEMS CORPORATION',
0x000029: u'IMC NETWORKS CORP.',
0x00002A: u'TRW - SEDD/INP',
0x00002B: u'CRISP AUTOMATION, INC',
0x00002C: u'AUTOTOTE LIMITED',
0x00002D: u'CHROMATICS INC',
0x00002E: u'SOCIETE EVIRA',
0x00002F: u'TIMEPLEX INC.',
0x000030: u'VG LABORATORY SYSTEMS LTD',
0x000031: u'QPSX COMMUNICATIONS PTY LTD',
0x000032: u'Marconi plc',
0x000033: u'EGAN MACHINERY COMPANY',
0x000034: u'NETWORK RESOURCES CORPORATION',
0x000035: u'SPECTRAGRAPHICS CORPORATION',
0x000036: u'ATARI CORPORATION',
0x000037: u'OXFORD METRICS LIMITED',
0x000038: u'CSS LABS',
0x000039: u'TOSHIBA CORPORATION',
0x00003A: u'CHYRON CORPORATION',
0x00003B: u'i Controls, Inc.',
0x00003C: u'AUSPEX SYSTEMS INC.',
0x00003D: u'UNISYS',
0x00003E: u'SIMPACT',
0x00003F: u'SYNTREX, INC.',
0x000040: u'APPLICON, INC.',
0x000041: u'ICE CORPORATION',
0x000042: u'METIER MANAGEMENT SYSTEMS LTD.',
0x000043: u'MICRO TECHNOLOGY',
0x000044: u'CASTELLE CORPORATION',
0x000045: u'FORD AEROSPACE & COMM. CORP.',
0x000046: u'OLIVETTI NORTH AMERICA',
0x000047: u'NICOLET INSTRUMENTS CORP.',
0x000048: u'SEIKO EPSON CORPORATION',
0x000049: u'APRICOT COMPUTERS, LTD',
0x00004A: u'ADC CODENOLL TECHNOLOGY CORP.',
0x00004B: u'ICL DATA OY',
0x00004C: u'NEC CORPORATION',
0x00004D: u'DCI CORPORATION',
0x00004E: u'AMPEX CORPORATION',
0x00004F: u'LOGICRAFT, INC.',
0x000050: u'RADISYS CORPORATION',
0x000051: u'HOB ELECTRONIC GMBH & CO. KG',
0x000052: u'Intrusion.com, Inc.',
0x000053: u'COMPUCORP',
0x000054: u'MODICON, INC.',
0x000055: u'COMMISSARIAT A L`ENERGIE ATOM.',
0x000056: u'DR. B. STRUCK',
0x000057: u'SCITEX CORPORATION LTD.',
0x000058: u'RACORE COMPUTER PRODUCTS INC.',
0x000059: u'HELLIGE GMBH',
0x00005A: u'SysKonnect GmbH',
0x00005B: u'ELTEC ELEKTRONIK AG',
0x00005C: u'TELEMATICS INTERNATIONAL INC.',
0x00005D: u'CS TELECOM',
0x00005E: u'USC INFORMATION SCIENCES INST',
0x00005F: u'SUMITOMO ELECTRIC IND., LTD.',
0x000060: u'KONTRON ELEKTRONIK GMBH',
0x000061: u'GATEWAY COMMUNICATIONS',
0x000062: u'BULL HN INFORMATION SYSTEMS',
0x000063: u'BARCO CONTROL ROOMS GMBH',
0x000064: u'YOKOGAWA DIGITAL COMPUTER CORP',
0x000065: u'Network General Corporation',
0x000066: u'TALARIS SYSTEMS, INC.',
0x000067: u'SOFT * RITE, INC.',
0x000068: u'ROSEMOUNT CONTROLS',
0x000069: u'CONCORD COMMUNICATIONS INC',
0x00006A: u'COMPUTER CONSOLES INC.',
0x00006B: u'SILICON GRAPHICS INC./MIPS',
0x00006C: u'PRIVATE',
0x00006D: u'CRAY COMMUNICATIONS, LTD.',
0x00006E: u'ARTISOFT, INC.',
0x00006F: u'Madge Ltd.',
0x000070: u'HCL LIMITED',
0x000071: u'ADRA SYSTEMS INC.',
0x000072: u'MINIWARE TECHNOLOGY',
0x000073: u'SIECOR CORPORATION',
0x000074: u'RICOH COMPANY LTD.',
0x000075: u'Nortel Networks',
0x000076: u'ABEKAS VIDEO SYSTEM',
0x000077: u'INTERPHASE CORPORATION',
0x000078: u'LABTAM LIMITED',
0x000079: u'NETWORTH INCORPORATED',
0x00007A: u'DANA COMPUTER INC.',
0x00007B: u'RESEARCH MACHINES',
0x00007C: u'AMPERE INCORPORATED',
0x00007D: u'SUN MICROSYSTEMS, INC.',
0x00007E: u'CLUSTRIX CORPORATION',
0x00007F: u'LINOTYPE-HELL AG',
0x000080: u'CRAY COMMUNICATIONS A/S',
0x000081: u'BAY NETWORKS',
0x000082: u'LECTRA SYSTEMES SA',
0x000083: u'TADPOLE TECHNOLOGY PLC',
0x000084: u'SUPERNET',
0x000085: u'CANON INC.',
0x000086: u'MEGAHERTZ CORPORATION',
0x000087: u'HITACHI, LTD.',
0x000088: u'COMPUTER NETWORK TECH. CORP.',
0x000089: u'CAYMAN SYSTEMS INC.',
0x00008A: u'DATAHOUSE INFORMATION SYSTEMS',
0x00008B: u'INFOTRON',
0x00008C: u'Alloy Computer Products (Australia) Pty Ltd',
0x00008D: u'VERDIX CORPORATION',
0x00008E: u'SOLBOURNE COMPUTER, INC.',
0x00008F: u'RAYTHEON COMPANY',
0x000090: u'MICROCOM',
0x000091: u'ANRITSU CORPORATION',
0x000092: u'COGENT DATA TECHNOLOGIES',
0x000093: u'PROTEON INC.',
0x000094: u'ASANTE TECHNOLOGIES',
0x000095: u'SONY TEKTRONIX CORP.',
0x000096: u'MARCONI ELECTRONICS LTD.',
0x000097: u'EPOCH SYSTEMS',
0x000098: u'CROSSCOMM CORPORATION',
0x000099: u'MTX, INC.',
0x00009A: u'RC COMPUTER A/S',
0x00009B: u'INFORMATION INTERNATIONAL, INC',
0x00009C: u'ROLM MIL-SPEC COMPUTERS',
0x00009D: u'LOCUS COMPUTING CORPORATION',
0x00009E: u'MARLI S.A.',
0x00009F: u'AMERISTAR TECHNOLOGIES INC.',
0x0000A0: u'SANYO Electric Co., Ltd.',
0x0000A1: u'MARQUETTE ELECTRIC CO.',
0x0000A2: u'BAY NETWORKS',
0x0000A3: u'NETWORK APPLICATION TECHNOLOGY',
0x0000A4: u'ACORN COMPUTERS LIMITED',
0x0000A5: u'COMPATIBLE SYSTEMS CORP.',
0x0000A6: u'NETWORK GENERAL CORPORATION',
0x0000A7: u'NETWORK COMPUTING DEVICES INC.',
0x0000A8: u'STRATUS COMPUTER INC.',
0x0000A9: u'NETWORK SYSTEMS CORP.',
0x0000AA: u'XEROX CORPORATION',
0x0000AB: u'LOGIC MODELING CORPORATION',
0x0000AC: u'CONWARE COMPUTER CONSULTING',
0x0000AD: u'BRUKER INSTRUMENTS INC.',
0x0000AE: u'DASSAULT ELECTRONIQUE',
0x0000AF: u'NUCLEAR DATA INSTRUMENTATION',
0x0000B0: u'RND-RAD NETWORK DEVICES',
0x0000B1: u'ALPHA MICROSYSTEMS INC.',
0x0000B2: u'TELEVIDEO SYSTEMS, INC.',
0x0000B3: u'CIMLINC INCORPORATED',
0x0000B4: u'EDIMAX COMPUTER COMPANY',
0x0000B5: u'DATABILITY SOFTWARE SYS. INC.',
0x0000B6: u'MICRO-MATIC RESEARCH',
0x0000B7: u'DOVE COMPUTER CORPORATION',
0x0000B8: u'SEIKOSHA CO., LTD.',
0x0000B9: u'MCDONNELL DOUGLAS COMPUTER SYS',
0x0000BA: u'SIIG, INC.',
0x0000BB: u'TRI-DATA',
0x0000BC: u'ALLEN-BRADLEY CO. INC.',
0x0000BD: u'MITSUBISHI CABLE COMPANY',
0x0000BE: u'THE NTI GROUP',
0x0000BF: u'SYMMETRIC COMPUTER SYSTEMS',
0x0000C0: u'WESTERN DIGITAL CORPORATION',
0x0000C1: u'Madge Ltd.',
0x0000C2: u'INFORMATION PRESENTATION TECH.',
0x0000C3: u'HARRIS CORP COMPUTER SYS DIV',
0x0000C4: u'WATERS DIV. OF MILLIPORE',
0x0000C5: u'FARALLON COMPUTING/NETOPIA',
0x0000C6: u'EON SYSTEMS',
0x0000C7: u'ARIX CORPORATION',
0x0000C8: u'ALTOS COMPUTER SYSTEMS',
0x0000C9: u'EMULEX CORPORATION',
0x0000CA: u'ARRIS International',
0x0000CB: u'COMPU-SHACK ELECTRONIC GMBH',
0x0000CC: u'DENSAN CO., LTD.',
0x0000CD: u'Allied Telesyn Research Ltd.',
0x0000CE: u'MEGADATA CORP.',
0x0000CF: u'HAYES MICROCOMPUTER PRODUCTS',
0x0000D0: u'DEVELCON ELECTRONICS LTD.',
0x0000D1: u'ADAPTEC INCORPORATED',
0x0000D2: u'SBE, INC.',
0x0000D3: u'WANG LABORATORIES INC.',
0x0000D4: u'PURE DATA LTD.',
0x0000D5: u'MICROGNOSIS INTERNATIONAL',
0x0000D6: u'PUNCH LINE HOLDING',
0x0000D7: u'DARTMOUTH COLLEGE',
0x0000D8: u'NOVELL, INC.',
0x0000D9: u'NIPPON TELEGRAPH & TELEPHONE',
0x0000DA: u'ATEX',
0x0000DB: u'BRITISH TELECOMMUNICATIONS PLC',
0x0000DC: u'HAYES MICROCOMPUTER PRODUCTS',
0x0000DD: u'TCL INCORPORATED',
0x0000DE: u'CETIA',
0x0000DF: u'BELL & HOWELL PUB SYS DIV',
0x0000E0: u'QUADRAM CORP.',
0x0000E1: u'GRID SYSTEMS',
0x0000E2: u'ACER TECHNOLOGIES CORP.',
0x0000E3: u'INTEGRATED MICRO PRODUCTS LTD',
0x0000E4: u'IN2 GROUPE INTERTECHNIQUE',
0x0000E5: u'SIGMEX LTD.',
0x0000E6: u'APTOR PRODUITS DE COMM INDUST',
0x0000E7: u'STAR GATE TECHNOLOGIES',
0x0000E8: u'ACCTON TECHNOLOGY CORP.',
0x0000E9: u'ISICAD, INC.',
0x0000EA: u'UPNOD AB',
0x0000EB: u'MATSUSHITA COMM. IND. CO. LTD.',
0x0000EC: u'MICROPROCESS',
0x0000ED: u'APRIL',
0x0000EE: u'NETWORK DESIGNERS, LTD.',
0x0000EF: u'KTI',
0x0000F0: u'SAMSUNG ELECTRONICS CO., LTD.',
0x0000F1: u'MAGNA COMPUTER CORPORATION',
0x0000F2: u'SPIDER COMMUNICATIONS',
0x0000F3: u'GANDALF DATA LIMITED',
0x0000F4: u'ALLIED TELESYN INTERNATIONAL',
0x0000F5: u'DIAMOND SALES LIMITED',
0x0000F6: u'APPLIED MICROSYSTEMS CORP.',
0x0000F7: u'YOUTH KEEP ENTERPRISE CO LTD',
0x0000F8: u'DIGITAL EQUIPMENT CORPORATION',
0x0000F9: u'QUOTRON SYSTEMS INC.',
0x0000FA: u'MICROSAGE COMPUTER SYSTEMS INC',
0x0000FB: u'RECHNER ZUR KOMMUNIKATION',
0x0000FC: u'MEIKO',
0x0000FD: u'HIGH LEVEL HARDWARE',
0x0000FE: u'ANNAPOLIS MICRO SYSTEMS',
0x0000FF: u'CAMTEC ELECTRONICS LTD.',
0x000100: u'EQUIP\'TRANS',
0x000101: u'PRIVATE',
0x000102: u'3COM CORPORATION',
0x000103: u'3COM CORPORATION',
0x000104: u'DVICO Co., Ltd.',
0x000105: u'BECKHOFF GmbH',
0x000106: u'Tews Datentechnik GmbH',
0x000107: u'Leiser GmbH',
0x000108: u'AVLAB Technology, Inc.',
0x000109: u'Nagano Japan Radio Co., Ltd.',
0x00010A: u'CIS TECHNOLOGY INC.',
0x00010B: u'Space CyberLink, Inc.',
0x00010C: u'System Talks Inc.',
0x00010D: u'CORECO, INC.',
0x00010E: u'Bri-Link Technologies Co., Ltd',
0x00010F: u'McDATA Corporation',
0x000110: u'Gotham Networks',
0x000111: u'iDigm Inc.',
0x000112: u'Shark Multimedia Inc.',
0x000113: u'OLYMPUS CORPORATION',
0x000114: u'KANDA TSUSHIN KOGYO CO., LTD.',
0x000115: u'EXTRATECH CORPORATION',
0x000116: u'Netspect Technologies, Inc.',
0x000117: u'CANAL +',
0x000118: u'EZ Digital Co., Ltd.',
0x000119: u'RTUnet (Australia)',
0x00011A: u'EEH DataLink GmbH',
0x00011B: u'Unizone Technologies, Inc.',
0x00011C: u'Universal Talkware Corporation',
0x00011D: u'Centillium Communications',
0x00011E: u'Precidia Technologies, Inc.',
0x00011F: u'RC Networks, Inc.',
0x000120: u'OSCILLOQUARTZ S.A.',
0x000121: u'Watchguard Technologies, Inc.',
0x000122: u'Trend Communications, Ltd.',
0x000123: u'DIGITAL ELECTRONICS CORP.',
0x000124: u'Acer Incorporated',
0x000125: u'YAESU MUSEN CO., LTD.',
0x000126: u'PAC Labs',
0x000127: u'OPEN Networks Pty Ltd',
0x000128: u'EnjoyWeb, Inc.',
0x000129: u'DFI Inc.',
0x00012A: u'Telematica Sistems Inteligente',
0x00012B: u'TELENET Co., Ltd.',
0x00012C: u'Aravox Technologies, Inc.',
0x00012D: u'Komodo Technology',
0x00012E: u'PC Partner Ltd.',
0x00012F: u'Twinhead International Corp',
0x000130: u'Extreme Networks',
0x000131: u'Detection Systems, Inc.',
0x000132: u'Dranetz - BMI',
0x000133: u'KYOWA Electronic Instruments C',
0x000134: u'SIG Positec Systems AG',
0x000135: u'KDC Corp.',
0x000136: u'CyberTAN Technology, Inc.',
0x000137: u'IT Farm Corporation',
0x000138: u'XAVi Technologies Corp.',
0x000139: u'Point Multimedia Systems',
0x00013A: u'SHELCAD COMMUNICATIONS, LTD.',
0x00013B: u'BNA SYSTEMS',
0x00013C: u'TIW SYSTEMS',
0x00013D: u'RiscStation Ltd.',
0x00013E: u'Ascom Tateco AB',
0x00013F: u'Neighbor World Co., Ltd.',
0x000140: u'Sendtek Corporation',
0x000141: u'CABLE PRINT',
0x000142: u'Cisco Systems, Inc.',
0x000143: u'Cisco Systems, Inc.',
0x000144: u'EMC Corporation',
0x000145: u'WINSYSTEMS, INC.',
0x000146: u'Tesco Controls, Inc.',
0x000147: u'Zhone Technologies',
0x000148: u'X-traWeb Inc.',
0x000149: u'T.D.T. Transfer Data Test GmbH',
0x00014A: u'Sony Corporation',
0x00014B: u'Ennovate Networks, Inc.',
0x00014C: u'Berkeley Process Control',
0x00014D: u'Shin Kin Enterprises Co., Ltd',
0x00014E: u'WIN Enterprises, Inc.',
0x00014F: u'ADTRAN INC',
0x000150: u'GILAT COMMUNICATIONS, LTD.',
0x000151: u'Ensemble Communications',
0x000152: u'CHROMATEK INC.',
0x000153: u'ARCHTEK TELECOM CORPORATION',
0x000154: u'G3M Corporation',
0x000155: u'Promise Technology, Inc.',
0x000156: u'FIREWIREDIRECT.COM, INC.',
0x000157: u'SYSWAVE CO., LTD',
0x000158: u'Electro Industries/Gauge Tech',
0x000159: u'S1 Corporation',
0x00015A: u'Digital Video Broadcasting',
0x00015B: u'ITALTEL S.p.A/RF-UP-I',
0x00015C: u'CADANT INC.',
0x00015D: u'Sun Microsystems, Inc',
0x00015E: u'BEST TECHNOLOGY CO., LTD.',
0x00015F: u'DIGITAL DESIGN GmbH',
0x000160: u'ELMEX Co., LTD.',
0x000161: u'Meta Machine Technology',
0x000162: u'Cygnet Technologies, Inc.',
0x000163: u'Cisco Systems, Inc.',
0x000164: u'Cisco Systems, Inc.',
0x000165: u'AirSwitch Corporation',
0x000166: u'TC GROUP A/S',
0x000167: u'HIOKI E.E. CORPORATION',
0x000168: u'VITANA CORPORATION',
0x000169: u'Celestix Networks Pte Ltd.',
0x00016A: u'ALITEC',
0x00016B: u'LightChip, Inc.',
0x00016C: u'FOXCONN',
0x00016D: u'CarrierComm Inc.',
0x00016E: u'Conklin Corporation',
0x00016F: u'HAITAI ELECTRONICS CO., LTD.',
0x000170: u'ESE Embedded System Engineer\'g',
0x000171: u'Allied Data Technologies',
0x000172: u'TechnoLand Co., LTD.',
0x000173: u'AMCC',
0x000174: u'CyberOptics Corporation',
0x000175: u'Radiant Communications Corp.',
0x000176: u'Orient Silver Enterprises',
0x000177: u'EDSL',
0x000178: u'MARGI Systems, Inc.',
0x000179: u'WIRELESS TECHNOLOGY, INC.',
0x00017A: u'Chengdu Maipu Electric Industrial Co., Ltd.',
0x00017B: u'Heidelberger Druckmaschinen AG',
0x00017C: u'AG-E GmbH',
0x00017D: u'ThermoQuest',
0x00017E: u'ADTEK System Science Co., Ltd.',
0x00017F: u'Experience Music Project',
0x000180: u'AOpen, Inc.',
0x000181: u'Nortel Networks',
0x000182: u'DICA TECHNOLOGIES AG',
0x000183: u'ANITE TELECOMS',
0x000184: u'SIEB & MEYER AG',
0x000185: u'Aloka Co., Ltd.',
0x000186: u'Uwe Disch',
0x000187: u'i2SE GmbH',
0x000188: u'LXCO Technologies ag',
0x000189: u'Refraction Technology, Inc.',
0x00018A: u'ROI COMPUTER AG',
0x00018B: u'NetLinks Co., Ltd.',
0x00018C: u'Mega Vision',
0x00018D: u'AudeSi Technologies',
0x00018E: u'Logitec Corporation',
0x00018F: u'Kenetec, Inc.',
0x000190: u'SMK-M',
0x000191: u'SYRED Data Systems',
0x000192: u'Texas Digital Systems',
0x000193: u'Hanbyul Telecom Co., Ltd.',
0x000194: u'Capital Equipment Corporation',
0x000195: u'Sena Technologies, Inc.',
0x000196: u'Cisco Systems, Inc.',
0x000197: u'Cisco Systems, Inc.',
0x000198: u'Darim Vision',
0x000199: u'HeiSei Electronics',
0x00019A: u'LEUNIG GmbH',
0x00019B: u'Kyoto Microcomputer Co., Ltd.',
0x00019C: u'JDS Uniphase Inc.',
0x00019D: u'E-Control Systems, Inc.',
0x00019E: u'ESS Technology, Inc.',
0x00019F: u'Phonex Broadband',
0x0001A0: u'Infinilink Corporation',
0x0001A1: u'Mag-Tek, Inc.',
0x0001A2: u'Logical Co., Ltd.',
0x0001A3: u'GENESYS LOGIC, INC.',
0x0001A4: u'Microlink Corporation',
0x0001A5: u'Nextcomm, Inc.',
0x0001A6: u'Scientific-Atlanta Arcodan A/S',
0x0001A7: u'UNEX TECHNOLOGY CORPORATION',
0x0001A8: u'Welltech Computer Co., Ltd.',
0x0001A9: u'BMW AG',
0x0001AA: u'Airspan Communications, Ltd.',
0x0001AB: u'Main Street Networks',
0x0001AC: u'Sitara Networks, Inc.',
0x0001AD: u'Coach Master International d.b.a. CMI Worldwide, Inc.',
0x0001AE: u'Trex Enterprises',
0x0001AF: u'Motorola Computer Group',
0x0001B0: u'Fulltek Technology Co., Ltd.',
0x0001B1: u'General Bandwidth',
0x0001B2: u'Digital Processing Systems, Inc.',
0x0001B3: u'Precision Electronic Manufacturing',
0x0001B4: u'Wayport, Inc.',
0x0001B5: u'Turin Networks, Inc.',
0x0001B6: u'SAEJIN T&M Co., Ltd.',
0x0001B7: u'Centos, Inc.',
0x0001B8: u'Netsensity, Inc.',
0x0001B9: u'SKF Condition Monitoring',
0x0001BA: u'IC-Net, Inc.',
0x0001BB: u'Frequentis',
0x0001BC: u'Brains Corporation',
0x0001BD: u'Peterson Electro-Musical Products, Inc.',
0x0001BE: u'Gigalink Co., Ltd.',
0x0001BF: u'Teleforce Co., Ltd.',
0x0001C0: u'CompuLab, Ltd.',
0x0001C1: u'Vitesse Semiconductor Corporation',
0x0001C2: u'ARK Research Corp.',
0x0001C3: u'Acromag, Inc.',
0x0001C4: u'NeoWave, Inc.',
0x0001C5: u'Simpler Networks',
0x0001C6: u'Quarry Technologies',
0x0001C7: u'Cisco Systems, Inc.',
0x0001C8: u'THOMAS CONRAD CORP.',
0x0001C8: u'CONRAD CORP.',
0x0001C9: u'Cisco Systems, Inc.',
0x0001CA: u'Geocast Network Systems, Inc.',
0x0001CB: u'EVR',
0x0001CC: u'Japan Total Design Communication Co., Ltd.',
0x0001CD: u'ARtem',
0x0001CE: u'Custom Micro Products, Ltd.',
0x0001CF: u'Alpha Data Parallel Systems, Ltd.',
0x0001D0: u'VitalPoint, Inc.',
0x0001D1: u'CoNet Communications, Inc.',
0x0001D2: u'MacPower Peripherals, Ltd.',
0x0001D3: u'PAXCOMM, Inc.',
0x0001D4: u'Leisure Time, Inc.',
0x0001D5: u'HAEDONG INFO & COMM CO., LTD',
0x0001D6: u'MAN Roland Druckmaschinen AG',
0x0001D7: u'F5 Networks, Inc.',
0x0001D8: u'Teltronics, Inc.',
0x0001D9: u'Sigma, Inc.',
0x0001DA: u'WINCOMM Corporation',
0x0001DB: u'Freecom Technologies GmbH',
0x0001DC: u'Activetelco',
0x0001DD: u'Avail Networks',
0x0001DE: u'Trango Systems, Inc.',
0x0001DF: u'ISDN Communications, Ltd.',
0x0001E0: u'Fast Systems, Inc.',
0x0001E1: u'Kinpo Electronics, Inc.',
0x0001E2: u'Ando Electric Corporation',
0x0001E3: u'Siemens AG',
0x0001E4: u'Sitera, Inc.',
0x0001E5: u'Supernet, Inc.',
0x0001E6: u'Hewlett-Packard Company',
0x0001E7: u'Hewlett-Packard Company',
0x0001E8: u'Force10 Networks, Inc.',
0x0001E9: u'Litton Marine Systems B.V.',
0x0001EA: u'Cirilium Corp.',
0x0001EB: u'C-COM Corporation',
0x0001EC: u'Ericsson Group',
0x0001ED: u'SETA Corp.',
0x0001EE: u'Comtrol Europe, Ltd.',
0x0001EF: u'Camtel Technology Corp.',
0x0001F0: u'Tridium, Inc.',
0x0001F1: u'Innovative Concepts, Inc.',
0x0001F2: u'Mark of the Unicorn, Inc.',
0x0001F3: u'QPS, Inc.',
0x0001F4: u'Enterasys Networks',
0x0001F5: u'ERIM S.A.',
0x0001F6: u'Association of Musical Electronics Industry',
0x0001F7: u'Image Display Systems, Inc.',
0x0001F8: u'Adherent Systems, Ltd.',
0x0001F9: u'TeraGlobal Communications Corp.',
0x0001FA: u'HOROSCAS',
0x0001FB: u'DoTop Technology, Inc.',
0x0001FC: u'Keyence Corporation',
0x0001FD: u'Digital Voice Systems, Inc.',
0x0001FE: u'DIGITAL EQUIPMENT CORPORATION',
0x0001FF: u'Data Direct Networks, Inc.',
0x000200: u'Net & Sys Co., Ltd.',
0x000201: u'IFM Electronic gmbh',
0x000202: u'Amino Communications, Ltd.',
0x000203: u'Woonsang Telecom, Inc.',
0x000204: u'Bodmann Industries Elektronik GmbH',
0x000205: u'Hitachi Denshi, Ltd.',
0x000206: u'Telital R&D Denmark A/S',
0x000207: u'VisionGlobal Network Corp.',
0x000208: u'Unify Networks, Inc.',
0x000209: u'Shenzhen SED Information Technology Co., Ltd.',
0x00020A: u'Gefran Spa',
0x00020B: u'Native Networks, Inc.',
0x00020C: u'Metro-Optix',
0x00020D: u'Micronpc.com',
0x00020E: u'Laurel Networks, Inc.',
0x00020F: u'AATR',
0x000210: u'Fenecom',
0x000211: u'Nature Worldwide Technology Corp.',
0x000212: u'SierraCom',
0x000213: u'S.D.E.L.',
0x000214: u'DTVRO',
0x000215: u'Cotas Computer Technology A/B',
0x000216: u'Cisco Systems, Inc.',
0x000217: u'Cisco Systems, Inc.',
0x000218: u'Advanced Scientific Corp',
0x000219: u'Paralon Technologies',
0x00021A: u'Zuma Networks',
0x00021B: u'Kollmorgen-Servotronix',
0x00021C: u'Network Elements, Inc.',
0x00021D: u'Data General Communication Ltd.',
0x00021E: u'SIMTEL S.R.L.',
0x00021F: u'Aculab PLC',
0x000220: u'Canon Aptex, Inc.',
0x000221: u'DSP Application, Ltd.',
0x000222: u'Chromisys, Inc.',
0x000223: u'ClickTV',
0x000224: u'C-COR',
0x000225: u'Certus Technology, Inc.',
0x000226: u'XESystems, Inc.',
0x000227: u'ESD GmbH',
0x000228: u'Necsom, Ltd.',
0x000229: u'Adtec Corporation',
0x00022A: u'Asound Electronic',
0x00022B: u'SAXA, Inc.',
0x00022C: u'ABB Bomem, Inc.',
0x00022D: u'Agere Systems',
0x00022E: u'TEAC Corp. R& D',
0x00022F: u'P-Cube, Ltd.',
0x000230: u'Intersoft Electronics',
0x000231: u'Ingersoll-Rand',
0x000232: u'Avision, Inc.',
0x000233: u'Mantra Communications, Inc.',
0x000234: u'Imperial Technology, Inc.',
0x000235: u'Paragon Networks International',
0x000236: u'INIT GmbH',
0x000237: u'Cosmo Research Corp.',
0x000238: u'Serome Technology, Inc.',
0x000239: u'Visicom',
0x00023A: u'ZSK Stickmaschinen GmbH',
0x00023B: u'Redback Networks',
0x00023C: u'Creative Technology, Ltd.',
0x00023D: u'NuSpeed, Inc.',
0x00023E: u'Selta Telematica S.p.a',
0x00023F: u'Compal Electronics, Inc.',
0x000240: u'Seedek Co., Ltd.',
0x000241: u'Amer.com',
0x000242: u'Videoframe Systems',
0x000243: u'Raysis Co., Ltd.',
0x000244: u'SURECOM Technology Co.',
0x000245: u'Lampus Co, Ltd.',
0x000246: u'All-Win Tech Co., Ltd.',
0x000247: u'Great Dragon Information Technology (Group) Co., Ltd.',
0x000248: u'Pilz GmbH & Co.',
0x000249: u'Aviv Infocom Co, Ltd.',
0x00024A: u'Cisco Systems, Inc.',
0x00024B: u'Cisco Systems, Inc.',
0x00024C: u'SiByte, Inc.',
0x00024D: u'Mannesman Dematic Colby Pty. Ltd.',
0x00024E: u'Datacard Group',
0x00024F: u'IPM Datacom S.R.L.',
0x000250: u'Geyser Networks, Inc.',
0x000251: u'Soma Networks, Inc.',
0x000252: u'Carrier Corporation',
0x000253: u'Televideo, Inc.',
0x000254: u'WorldGate',
0x000255: u'IBM Corporation',
0x000256: u'Alpha Processor, Inc.',
0x000257: u'Microcom Corp.',
0x000258: u'Flying Packets Communications',
0x000259: u'Tsann Kuen China (Shanghai)Enterprise Co., Ltd. IT Group',
0x00025A: u'Catena Networks',
0x00025B: u'Cambridge Silicon Radio',
0x00025C: u'SCI Systems (Kunshan) Co., Ltd.',
0x00025D: u'Calix Networks',
0x00025E: u'High Technology Ltd',
0x00025F: u'Nortel Networks',
0x000260: u'Accordion Networks, Inc.',
0x000261: u'Tilgin AB',
0x000262: u'Soyo Group Soyo Com Tech Co., Ltd',
0x000263: u'UPS Manufacturing SRL',
0x000264: u'AudioRamp.com',
0x000265: u'Virditech Co. Ltd.',
0x000266: u'Thermalogic Corporation',
0x000267: u'NODE RUNNER, INC.',
0x000268: u'Harris Government Communications',
0x000269: u'Nadatel Co., Ltd',
0x00026A: u'Cocess Telecom Co., Ltd.',
0x00026B: u'BCM Computers Co., Ltd.',
0x00026C: u'Philips CFT',
0x00026D: u'Adept Telecom',
0x00026E: u'NeGeN Access, Inc.',
0x00026F: u'Senao International Co., Ltd.',
0x000270: u'Crewave Co., Ltd.',
0x000271: u'Vpacket Communications',
0x000272: u'CC&C Technologies, Inc.',
0x000273: u'Coriolis Networks',
0x000274: u'Tommy Technologies Corp.',
0x000275: u'SMART Technologies, Inc.',
0x000276: u'Primax Electronics Ltd.',
0x000277: u'Cash Systemes Industrie',
0x000278: u'Samsung Electro-Mechanics Co., Ltd.',
0x000279: u'Control Applications, Ltd.',
0x00027A: u'IOI Technology Corporation',
0x00027B: u'Amplify Net, Inc.',
0x00027C: u'Trilithic, Inc.',
0x00027D: u'Cisco Systems, Inc.',
0x00027E: u'Cisco Systems, Inc.',
0x00027F: u'ask-technologies.com',
0x000280: u'Mu Net, Inc.',
0x000281: u'Madge Ltd.',
0x000282: u'ViaClix, Inc.',
0x000283: u'Spectrum Controls, Inc.',
0x000284: u'AREVA T&D',
0x000285: u'Riverstone Networks',
0x000286: u'Occam Networks',
0x000287: u'Adapcom',
0x000288: u'GLOBAL VILLAGE COMMUNICATION',
0x000289: u'DNE Technologies',
0x00028A: u'Ambit Microsystems Corporation',
0x00028B: u'VDSL Systems OY',
0x00028C: u'Micrel-Synergy Semiconductor',
0x00028D: u'Movita Technologies, Inc.',
0x00028E: u'Rapid 5 Networks, Inc.',
0x00028F: u'Globetek, Inc.',
0x000290: u'Woorigisool, Inc.',
0x000291: u'Open Network Co., Ltd.',
0x000292: u'Logic Innovations, Inc.',
0x000293: u'Solid Data Systems',
0x000294: u'Tokyo Sokushin Co., Ltd.',
0x000295: u'IP.Access Limited',
0x000296: u'Lectron Co,. Ltd.',
0x000297: u'C-COR.net',
0x000298: u'Broadframe Corporation',
0x000299: u'Apex, Inc.',
0x00029A: u'Storage Apps',
0x00029B: u'Kreatel Communications AB',
0x00029C: u'3COM',
0x00029D: u'Merix Corp.',
0x00029E: u'Information Equipment Co., Ltd.',
0x00029F: u'L-3 Communication Aviation Recorders',
0x0002A0: u'Flatstack Ltd.',
0x0002A1: u'World Wide Packets',
0x0002A2: u'Hilscher GmbH',
0x0002A3: u'ABB Power Automation',
0x0002A4: u'AddPac Technology Co., Ltd.',
0x0002A5: u'Compaq Computer Corporation',
0x0002A6: u'Effinet Systems Co., Ltd.',
0x0002A7: u'Vivace Networks',
0x0002A8: u'Air Link Technology',
0x0002A9: u'RACOM, s.r.o.',
0x0002AA: u'PLcom Co., Ltd.',
0x0002AB: u'CTC Union Technologies Co., Ltd.',
0x0002AC: u'3PAR data',
0x0002AD: u'Pentax Corpotation',
0x0002AE: u'Scannex Electronics Ltd.',
0x0002AF: u'TeleCruz Technology, Inc.',
0x0002B0: u'Hokubu Communication & Industrial Co., Ltd.',
0x0002B1: u'Anritsu, Ltd.',
0x0002B2: u'Cablevision',
0x0002B3: u'Intel Corporation',
0x0002B4: u'DAPHNE',
0x0002B5: u'Avnet, Inc.',
0x0002B6: u'Acrosser Technology Co., Ltd.',
0x0002B7: u'Watanabe Electric Industry Co., Ltd.',
0x0002B8: u'WHI KONSULT AB',
0x0002B9: u'Cisco Systems, Inc.',
0x0002BA: u'Cisco Systems, Inc.',
0x0002BB: u'Continuous Computing',
0x0002BC: u'LVL 7 Systems, Inc.',
0x0002BD: u'Bionet Co., Ltd.',
0x0002BE: u'Totsu Engineering, Inc.',
0x0002BF: u'dotRocket, Inc.',
0x0002C0: u'Bencent Tzeng Industry Co., Ltd.',
0x0002C1: u'Innovative Electronic Designs, Inc.',
0x0002C2: u'Net Vision Telecom',
0x0002C3: u'Arelnet Ltd.',
0x0002C4: u'Vector International BUBA',
0x0002C5: u'Evertz Microsystems Ltd.',
0x0002C6: u'Data Track Technology PLC',
0x0002C7: u'ALPS ELECTRIC Co., Ltd.',
0x0002C8: u'Technocom Communications Technology (pte) Ltd',
0x0002C9: u'Mellanox Technologies',
0x0002CA: u'EndPoints, Inc.',
0x0002CB: u'TriState Ltd.',
0x0002CC: u'M.C.C.I',
0x0002CD: u'TeleDream, Inc.',
0x0002CE: u'FoxJet, Inc.',
0x0002CF: u'ZyGate Communications, Inc.',
0x0002D0: u'Comdial Corporation',
0x0002D1: u'Vivotek, Inc.',
0x0002D2: u'Workstation AG',
0x0002D3: u'NetBotz, Inc.',
0x0002D4: u'PDA Peripherals, Inc.',
0x0002D5: u'ACR',
0x0002D6: u'NICE Systems',
0x0002D7: u'EMPEG Ltd',
0x0002D8: u'BRECIS Communications Corporation',
0x0002D9: u'Reliable Controls',
0x0002DA: u'ExiO Communications, Inc.',
0x0002DB: u'NETSEC',
0x0002DC: u'Fujitsu General Limited',
0x0002DD: u'Bromax Communications, Ltd.',
0x0002DE: u'Astrodesign, Inc.',
0x0002DF: u'Net Com Systems, Inc.',
0x0002E0: u'ETAS GmbH',
0x0002E1: u'Integrated Network Corporation',
0x0002E2: u'NDC Infared Engineering',
0x0002E3: u'LITE-ON Communications, Inc.',
0x0002E4: u'JC HYUN Systems, Inc.',
0x0002E5: u'Timeware Ltd.',
0x0002E6: u'Gould Instrument Systems, Inc.',
0x0002E7: u'CAB GmbH & Co KG',
0x0002E8: u'E.D.&A.',
0x0002E9: u'CS Systemes De Securite - C3S',
0x0002EA: u'Focus Enhancements',
0x0002EB: u'Pico Communications',
0x0002EC: u'Maschoff Design Engineering',
0x0002ED: u'DXO Telecom Co., Ltd.',
0x0002EE: u'Nokia Danmark A/S',
0x0002EF: u'CCC Network Systems Group Ltd.',
0x0002F0: u'AME Optimedia Technology Co., Ltd.',
0x0002F1: u'Pinetron Co., Ltd.',
0x0002F2: u'eDevice, Inc.',
0x0002F3: u'Media Serve Co., Ltd.',
0x0002F4: u'PCTEL, Inc.',
0x0002F5: u'VIVE Synergies, Inc.',
0x0002F6: u'Equipe Communications',
0x0002F7: u'ARM',
0x0002F8: u'SEAKR Engineering, Inc.',
0x0002F9: u'Mimos Semiconductor SDN BHD',
0x0002FA: u'DX Antenna Co., Ltd.',
0x0002FB: u'Baumuller Aulugen-Systemtechnik GmbH',
0x0002FC: u'Cisco Systems, Inc.',
0x0002FD: u'Cisco Systems, Inc.',
0x0002FE: u'Viditec, Inc.',
0x0002FF: u'Handan BroadInfoCom',
0x000300: u'NetContinuum, Inc.',
0x000301: u'Avantas Networks Corporation',
0x000302: u'Charles Industries, Ltd.',
0x000303: u'JAMA Electronics Co., Ltd.',
0x000304: u'Pacific Broadband Communications',
0x000305: u'Smart Network Devices GmbH',
0x000306: u'Fusion In Tech Co., Ltd.',
0x000307: u'Secure Works, Inc.',
0x000308: u'AM Communications, Inc.',
0x000309: u'Texcel Technology PLC',
0x00030A: u'Argus Technologies',
0x00030B: u'Hunter Technology, Inc.',
0x00030C: u'Telesoft Technologies Ltd.',
0x00030D: u'Uniwill Computer Corp.',
0x00030E: u'Core Communications Co., Ltd.',
0x00030F: u'Digital China (Shanghai) Networks Ltd.',
0x000310: u'Link Evolution Corp.',
0x000311: u'Micro Technology Co., Ltd.',
0x000312: u'TR-Systemtechnik GmbH',
0x000313: u'Access Media SPA',
0x000314: u'Teleware Network Systems',
0x000315: u'Cidco Incorporated',
0x000316: u'Nobell Communications, Inc.',
0x000317: u'Merlin Systems, Inc.',
0x000318: u'Cyras Systems, Inc.',
0x000319: u'Infineon AG',
0x00031A: u'Beijing Broad Telecom Ltd., China',
0x00031B: u'Cellvision Systems, Inc.',
0x00031C: u'Svenska Hardvarufabriken AB',
0x00031D: u'Taiwan Commate Computer, Inc.',
0x00031E: u'Optranet, Inc.',
0x00031F: u'Condev Ltd.',
0x000320: u'Xpeed, Inc.',
0x000321: u'Reco Research Co., Ltd.',
0x000322: u'IDIS Co., Ltd.',
0x000323: u'Cornet Technology, Inc.',
0x000324: u'SANYO Multimedia Tottori Co., Ltd.',
0x000325: u'Arima Computer Corp.',
0x000326: u'Iwasaki Information Systems Co., Ltd.',
0x000327: u'ACT\'L',
0x000328: u'Mace Group, Inc.',
0x000329: u'F3, Inc.',
0x00032A: u'UniData Communication Systems, Inc.',
0x00032B: u'GAI Datenfunksysteme GmbH',
0x00032C: u'ABB Industrie AG',
0x00032D: u'IBASE Technology, Inc.',
0x00032E: u'Scope Information Management, Ltd.',
0x00032F: u'Global Sun Technology, Inc.',
0x000330: u'Imagenics, Co., Ltd.',
0x000331: u'Cisco Systems, Inc.',
0x000332: u'Cisco Systems, Inc.',
0x000333: u'Digitel Co., Ltd.',
0x000334: u'Newport Electronics',
0x000335: u'Mirae Technology',
0x000336: u'Zetes Technologies',
0x000337: u'Vaone, Inc.',
0x000338: u'Oak Technology',
0x000339: u'Eurologic Systems, Ltd.',
0x00033A: u'Silicon Wave, Inc.',
0x00033B: u'TAMI Tech Co., Ltd.',
0x00033C: u'Daiden Co., Ltd.',
0x00033D: u'ILSHin Lab',
0x00033E: u'Tateyama System Laboratory Co., Ltd.',
0x00033F: u'BigBand Networks, Ltd.',
0x000340: u'Floware Wireless Systems, Ltd.',
0x000341: u'Axon Digital Design',
0x000342: u'Nortel Networks',
0x000343: u'Martin Professional A/S',
0x000344: u'Tietech.Co., Ltd.',
0x000345: u'Routrek Networks Corporation',
0x000346: u'Hitachi Kokusai Electric, Inc.',
0x000347: u'Intel Corporation',
0x000348: u'Norscan Instruments, Ltd.',
0x000349: u'Vidicode Datacommunicatie B.V.',
0x00034A: u'RIAS Corporation',
0x00034B: u'Nortel Networks',
0x00034C: u'Shanghai DigiVision Technology Co., Ltd.',
0x00034D: u'Chiaro Networks, Ltd.',
0x00034E: u'Pos Data Company, Ltd.',
0x00034F: u'Sur-Gard Security',
0x000350: u'BTICINO SPA',
0x000351: u'Diebold, Inc.',
0x000352: u'Colubris Networks',
0x000353: u'Mitac, Inc.',
0x000354: u'Fiber Logic Communications',
0x000355: u'TeraBeam Internet Systems',
0x000356: u'Wincor Nixdorf GmbH & Co KG',
0x000357: u'Intervoice-Brite, Inc.',
0x000358: u'Hanyang Digitech Co., Ltd.',
0x000359: u'DigitalSis',
0x00035A: u'Photron Limited',
0x00035B: u'BridgeWave Communications',
0x00035C: u'Saint Song Corp.',
0x00035D: u'Bosung Hi-Net Co., Ltd.',
0x00035E: u'Metropolitan Area Networks, Inc.',
0x00035F: u'Prueftechnik Condition Monitoring GmbH & Co. KG',
0x000360: u'PAC Interactive Technology, Inc.',
0x000361: u'Widcomm, Inc.',
0x000362: u'Vodtel Communications, Inc.',
0x000363: u'Miraesys Co., Ltd.',
0x000364: u'Scenix Semiconductor, Inc.',
0x000365: u'Kira Information & Communications, Ltd.',
0x000366: u'ASM Pacific Technology',
0x000367: u'Jasmine Networks, Inc.',
0x000368: u'Embedone Co., Ltd.',
0x000369: u'Nippon Antenna Co., Ltd.',
0x00036A: u'Mainnet, Ltd.',
0x00036B: u'Cisco Systems, Inc.',
0x00036C: u'Cisco Systems, Inc.',
0x00036D: u'Runtop, Inc.',
0x00036E: u'Nicon Systems (Pty) Limited',
0x00036F: u'Telsey SPA',
0x000370: u'NXTV, Inc.',
0x000371: u'Acomz Networks Corp.',
0x000372: u'ULAN',
0x000373: u'Aselsan A.S',
0x000374: u'Hunter Watertech',
0x000375: u'NetMedia, Inc.',
0x000376: u'Graphtec Technology, Inc.',
0x000377: u'Gigabit Wireless',
0x000378: u'HUMAX Co., Ltd.',
0x000379: u'Proscend Communications, Inc.',
0x00037A: u'Taiyo Yuden Co., Ltd.',
0x00037B: u'IDEC IZUMI Corporation',
0x00037C: u'Coax Media',
0x00037D: u'Stellcom',
0x00037E: u'PORTech Communications, Inc.',
0x00037F: u'Atheros Communications, Inc.',
0x000380: u'SSH Communications Security Corp.',
0x000381: u'Ingenico International',
0x000382: u'A-One Co., Ltd.',
0x000383: u'Metera Networks, Inc.',
0x000384: u'AETA',
0x000385: u'Actelis Networks, Inc.',
0x000386: u'Ho Net, Inc.',
0x000387: u'Blaze Network Products',
0x000388: u'Fastfame Technology Co., Ltd.',
0x000389: u'Plantronics',
0x00038A: u'America Online, Inc.',
0x00038B: u'PLUS-ONE I&T, Inc.',
0x00038C: u'Total Impact',
0x00038D: u'PCS Revenue Control Systems, Inc.',
0x00038E: u'Atoga Systems, Inc.',
0x00038F: u'Weinschel Corporation',
0x000390: u'Digital Video Communications, Inc.',
0x000391: u'Advanced Digital Broadcast, Ltd.',
0x000392: u'Hyundai Teletek Co., Ltd.',
0x000393: u'Apple Computer, Inc.',
0x000394: u'Connect One',
0x000395: u'California Amplifier',
0x000396: u'EZ Cast Co., Ltd.',
0x000397: u'Watchfront Electronics',
0x000398: u'WISI',
0x000399: u'Dongju Informations & Communications Co., Ltd.',
0x00039A: u'SiConnect',
0x00039B: u'NetChip Technology, Inc.',
0x00039C: u'OptiMight Communications, Inc.',
0x00039D: u'BENQ CORPORATION',
0x00039E: u'Tera System Co., Ltd.',
0x00039F: u'Cisco Systems, Inc.',
0x0003A0: u'Cisco Systems, Inc.',
0x0003A1: u'HIPER Information & Communication, Inc.',
0x0003A2: u'Catapult Communications',
0x0003A3: u'MAVIX, Ltd.',
0x0003A4: u'Data Storage and Information Management',
0x0003A5: u'Medea Corporation',
0x0003A6: u'Traxit Technology, Inc.',
0x0003A7: u'Unixtar Technology, Inc.',
0x0003A8: u'IDOT Computers, Inc.',
0x0003A9: u'AXCENT Media AG',
0x0003AA: u'Watlow',
0x0003AB: u'Bridge Information Systems',
0x0003AC: u'Fronius Schweissmaschinen',
0x0003AD: u'Emerson Energy Systems AB',
0x0003AE: u'Allied Advanced Manufacturing Pte, Ltd.',
0x0003AF: u'Paragea Communications',
0x0003B0: u'Xsense Technology Corp.',
0x0003B1: u'Hospira Inc.',
0x0003B2: u'Radware',
0x0003B3: u'IA Link Systems Co., Ltd.',
0x0003B4: u'Macrotek International Corp.',
0x0003B5: u'Entra Technology Co.',
0x0003B6: u'QSI Corporation',
0x0003B7: u'ZACCESS Systems',
0x0003B8: u'NetKit Solutions, LLC',
0x0003B9: u'Hualong Telecom Co., Ltd.',
0x0003BA: u'Sun Microsystems',
0x0003BB: u'Signal Communications Limited',
0x0003BC: u'COT GmbH',
0x0003BD: u'OmniCluster Technologies, Inc.',
0x0003BE: u'Netility',
0x0003BF: u'Centerpoint Broadband Technologies, Inc.',
0x0003C0: u'RFTNC Co., Ltd.',
0x0003C1: u'Packet Dynamics Ltd',
0x0003C2: u'Solphone K.K.',
0x0003C3: u'Micronik Multimedia',
0x0003C4: u'Tomra Systems ASA',
0x0003C5: u'Mobotix AG',
0x0003C6: u'ICUE Systems, Inc.',
0x0003C7: u'hopf Elektronik GmbH',
0x0003C8: u'CML Emergency Services',
0x0003C9: u'TECOM Co., Ltd.',
0x0003CA: u'MTS Systems Corp.',
0x0003CB: u'Nippon Systems Development Co., Ltd.',
0x0003CC: u'Momentum Computer, Inc.',
0x0003CD: u'Clovertech, Inc.',
0x0003CE: u'ETEN Technologies, Inc.',
0x0003CF: u'Muxcom, Inc.',
0x0003D0: u'KOANKEISO Co., Ltd.',
0x0003D1: u'Takaya Corporation',
0x0003D2: u'Crossbeam Systems, Inc.',
0x0003D3: u'Internet Energy Systems, Inc.',
0x0003D4: u'Alloptic, Inc.',
0x0003D5: u'Advanced Communications Co., Ltd.',
0x0003D6: u'RADVision, Ltd.',
0x0003D7: u'NextNet Wireless, Inc.',
0x0003D8: u'iMPath Networks, Inc.',
0x0003D9: u'Secheron SA',
0x0003DA: u'Takamisawa Cybernetics Co., Ltd.',
0x0003DB: u'Apogee Electronics Corp.',
0x0003DC: u'Lexar Media, Inc.',
0x0003DD: u'Comark Corp.',
0x0003DE: u'OTC Wireless',
0x0003DF: u'Desana Systems',
0x0003E0: u'RadioFrame Networks, Inc.',
0x0003E1: u'Winmate Communication, Inc.',
0x0003E2: u'Comspace Corporation',
0x0003E3: u'Cisco Systems, Inc.',
0x0003E4: u'Cisco Systems, Inc.',
0x0003E5: u'Hermstedt SG',
0x0003E6: u'Entone Technologies, Inc.',
0x0003E7: u'Logostek Co. Ltd.',
0x0003E8: u'Wavelength Digital Limited',
0x0003E9: u'Akara Canada, Inc.',
0x0003EA: u'Mega System Technologies, Inc.',
0x0003EB: u'Atrica',
0x0003EC: u'ICG Research, Inc.',
0x0003ED: u'Shinkawa Electric Co., Ltd.',
0x0003EE: u'MKNet Corporation',
0x0003EF: u'Oneline AG',
0x0003F0: u'Redfern Broadband Networks',
0x0003F1: u'Cicada Semiconductor, Inc.',
0x0003F2: u'Seneca Networks',
0x0003F3: u'Dazzle Multimedia, Inc.',
0x0003F4: u'NetBurner',
0x0003F5: u'Chip2Chip',
0x0003F6: u'Allegro Networks, Inc.',
0x0003F7: u'Plast-Control GmbH',
0x0003F8: u'SanCastle Technologies, Inc.',
0x0003F9: u'Pleiades Communications, Inc.',
0x0003FA: u'TiMetra Networks',
0x0003FB: u'Toko Seiki Company, Ltd.',
0x0003FC: u'Intertex Data AB',
0x0003FD: u'Cisco Systems, Inc.',
0x0003FE: u'Cisco Systems, Inc.',
0x0003FF: u'Microsoft Corporation',
0x000400: u'LEXMARK INTERNATIONAL, INC.',
0x000401: u'Osaki Electric Co., Ltd.',
0x000402: u'Nexsan Technologies, Ltd.',
0x000403: u'Nexsi Corporation',
0x000404: u'Makino Milling Machine Co., Ltd.',
0x000405: u'ACN Technologies',
0x000406: u'Fa. Metabox AG',
0x000407: u'Topcon Positioning Systems, Inc.',
0x000408: u'Sanko Electronics Co., Ltd.',
0x000409: u'Cratos Networks',
0x00040A: u'Sage Systems',
0x00040B: u'3com Europe Ltd.',
0x00040C: u'KANNO Work\'s Ltd.',
0x00040D: u'Avaya, Inc.',
0x00040E: u'AVM GmbH',
0x00040F: u'Asus Network Technologies, Inc.',
0x000410: u'Spinnaker Networks, Inc.',
0x000411: u'Inkra Networks, Inc.',
0x000412: u'WaveSmith Networks, Inc.',
0x000413: u'SNOM Technology AG',
0x000414: u'Umezawa Musen Denki Co., Ltd.',
0x000415: u'Rasteme Systems Co., Ltd.',
0x000416: u'Parks S/A Comunicacoes Digitais',
0x000417: u'ELAU AG',
0x000418: u'Teltronic S.A.U.',
0x000419: u'Fibercycle Networks, Inc.',
0x00041A: u'ines GmbH',
0x00041B: u'Digital Interfaces Ltd.',
0x00041C: u'ipDialog, Inc.',
0x00041D: u'Corega of America',
0x00041E: u'Shikoku Instrumentation Co., Ltd.',
0x00041F: u'Sony Computer Entertainment, Inc.',
0x000420: u'Slim Devices, Inc.',
0x000421: u'Ocular Networks',
0x000422: u'Gordon Kapes, Inc.',
0x000423: u'Intel Corporation',
0x000424: u'TMC s.r.l.',
0x000425: u'Atmel Corporation',
0x000426: u'Autosys',
0x000427: u'Cisco Systems, Inc.',
0x000428: u'Cisco Systems, Inc.',
0x000429: u'Pixord Corporation',
0x00042A: u'Wireless Networks, Inc.',
0x00042B: u'IT Access Co., Ltd.',
0x00042C: u'Minet, Inc.',
0x00042D: u'Sarian Systems, Ltd.',
0x00042E: u'Netous Technologies, Ltd.',
0x00042F: u'International Communications Products, Inc.',
0x000430: u'Netgem',
0x000431: u'GlobalStreams, Inc.',
0x000432: u'Voyetra Turtle Beach, Inc.',
0x000433: u'Cyberboard A/S',
0x000434: u'Accelent Systems, Inc.',
0x000435: u'Comptek International, Inc.',
0x000436: u'ELANsat Technologies, Inc.',
0x000437: u'Powin Information Technology, Inc.',
0x000438: u'Nortel Networks',
0x000439: u'Rosco Entertainment Technology, Inc.',
0x00043A: u'Intelligent Telecommunications, Inc.',
0x00043B: u'Lava Computer Mfg., Inc.',
0x00043C: u'SONOS Co., Ltd.',
0x00043D: u'INDEL AG',
0x00043E: u'Telencomm',
0x00043F: u'Electronic Systems Technology, Inc.',
0x000440: u'cyberPIXIE, Inc.',
0x000441: u'Half Dome Systems, Inc.',
0x000442: u'NACT',
0x000443: u'Agilent Technologies, Inc.',
0x000444: u'Western Multiplex Corporation',
0x000445: u'LMS Skalar Instruments GmbH',
0x000446: u'CYZENTECH Co., Ltd.',
0x000447: u'Acrowave Systems Co., Ltd.',
0x000448: u'Polaroid Professional Imaging',
0x000449: u'Mapletree Networks',
0x00044A: u'iPolicy Networks, Inc.',
0x00044B: u'NVIDIA',
0x00044C: u'JENOPTIK',
0x00044D: u'Cisco Systems, Inc.',
0x00044E: u'Cisco Systems, Inc.',
0x00044F: u'Leukhardt Systemelektronik GmbH',
0x000450: u'DMD Computers SRL',
0x000451: u'Medrad, Inc.',
0x000452: u'RocketLogix, Inc.',
0x000453: u'YottaYotta, Inc.',
0x000454: u'Quadriga UK',
0x000455: u'ANTARA.net',
0x000456: u'PipingHot Networks',
0x000457: u'Universal Access Technology, Inc.',
0x000458: u'Fusion X Co., Ltd.',
0x000459: u'Veristar Corporation',
0x00045A: u'The Linksys Group, Inc.',
0x00045B: u'Techsan Electronics Co., Ltd.',
0x00045C: u'Mobiwave Pte Ltd',
0x00045D: u'BEKA Elektronik',
0x00045E: u'PolyTrax Information Technology AG',
0x00045F: u'Evalue Technology, Inc.',
0x000460: u'Knilink Technology, Inc.',
0x000461: u'EPOX Computer Co., Ltd.',
0x000462: u'DAKOS Data & Communication Co., Ltd.',
0x000463: u'Bosch Security Systems',
0x000464: u'Fantasma Networks, Inc.',
0x000465: u'i.s.t isdn-support technik GmbH',
0x000466: u'ARMITEL Co.',
0x000467: u'Wuhan Research Institute of MII',
0x000468: u'Vivity, Inc.',
0x000469: u'Innocom, Inc.',
0x00046A: u'Navini Networks',
0x00046B: u'Palm Wireless, Inc.',
0x00046C: u'Cyber Technology Co., Ltd.',
0x00046D: u'Cisco Systems, Inc.',
0x00046E: u'Cisco Systems, Inc.',
0x00046F: u'Digitel S/A Industria Eletronica',
0x000470: u'ipUnplugged AB',
0x000471: u'IPrad',
0x000472: u'Telelynx, Inc.',
0x000473: u'Photonex Corporation',
0x000474: u'LEGRAND',
0x000475: u'3 Com Corporation',
0x000476: u'3 Com Corporation',
0x000477: u'Scalant Systems, Inc.',
0x000478: u'G. Star Technology Corporation',
0x000479: u'Radius Co., Ltd.',
0x00047A: u'AXXESSIT ASA',
0x00047B: u'Schlumberger',
0x00047C: u'Skidata AG',
0x00047D: u'Pelco',
0x00047E: u'Optelecom=NKF',
0x00047F: u'Chr. Mayr GmbH & Co. KG',
0x000480: u'Foundry Networks, Inc.',
0x000481: u'Econolite Control Products, Inc.',
0x000482: u'Medialogic Corp.',
0x000483: u'Deltron Technology, Inc.',
0x000484: u'Amann GmbH',
0x000485: u'PicoLight',
0x000486: u'ITTC, University of Kansas',
0x000487: u'Cogency Semiconductor, Inc.',
0x000488: u'Eurotherm Controls',
0x000489: u'YAFO Networks, Inc.',
0x00048A: u'Temia Vertriebs GmbH',
0x00048B: u'Poscon Corporation',
0x00048C: u'Nayna Networks, Inc.',
0x00048D: u'Tone Commander Systems, Inc.',
0x00048E: u'Ohm Tech Labs, Inc.',
0x00048F: u'TD Systems Corp.',
0x000490: u'Optical Access',
0x000491: u'Technovision, Inc.',
0x000492: u'Hive Internet, Ltd.',
0x000493: u'Tsinghua Unisplendour Co., Ltd.',
0x000494: u'Breezecom, Ltd.',
0x000495: u'Tejas Networks',
0x000496: u'Extreme Networks',
0x000497: u'MacroSystem Digital Video AG',
0x000498: u'Mahi Networks',
0x000499: u'Chino Corporation',
0x00049A: u'Cisco Systems, Inc.',
0x00049B: u'Cisco Systems, Inc.',
0x00049C: u'Surgient Networks, Inc.',
0x00049D: u'Ipanema Technologies',
0x00049E: u'Wirelink Co., Ltd.',
0x00049F: u'Freescale Semiconductor',
0x0004A0: u'Verity Instruments, Inc.',
0x0004A1: u'Pathway Connectivity',
0x0004A2: u'L.S.I. Japan Co., Ltd.',
0x0004A3: u'Microchip Technology, Inc.',
0x0004A4: u'NetEnabled, Inc.',
0x0004A5: u'Barco Projection Systems NV',
0x0004A6: u'SAF Tehnika Ltd.',
0x0004A7: u'FabiaTech Corporation',
0x0004A8: u'Broadmax Technologies, Inc.',
0x0004A9: u'SandStream Technologies, Inc.',
0x0004AA: u'Jetstream Communications',
0x0004AB: u'Comverse Network Systems, Inc.',
0x0004AC: u'IBM CORP.',
0x0004AD: u'Malibu Networks',
0x0004AE: u'Liquid Metronics',
0x0004AF: u'Digital Fountain, Inc.',
0x0004B0: u'ELESIGN Co., Ltd.',
0x0004B1: u'Signal Technology, Inc.',
0x0004B2: u'ESSEGI SRL',
0x0004B3: u'Videotek, Inc.',
0x0004B4: u'CIAC',
0x0004B5: u'Equitrac Corporation',
0x0004B6: u'Stratex Networks, Inc.',
0x0004B7: u'AMB i.t. Holding',
0x0004B8: u'Kumahira Co., Ltd.',
0x0004B9: u'S.I. Soubou, Inc.',
0x0004BA: u'KDD Media Will Corporation',
0x0004BB: u'Bardac Corporation',
0x0004BC: u'Giantec, Inc.',
0x0004BD: u'Motorola BCS',
0x0004BE: u'OptXCon, Inc.',
0x0004BF: u'VersaLogic Corp.',
0x0004C0: u'Cisco Systems, Inc.',
0x0004C1: u'Cisco Systems, Inc.',
0x0004C2: u'Magnipix, Inc.',
0x0004C3: u'CASTOR Informatique',
0x0004C4: u'Allen & Heath Limited',
0x0004C5: u'ASE Technologies, USA',
0x0004C6: u'Yamaha Motor Co., Ltd.',
0x0004C7: u'NetMount',
0x0004C8: u'LIBA Maschinenfabrik GmbH',
0x0004C9: u'Micro Electron Co., Ltd.',
0x0004CA: u'FreeMs Corp.',
0x0004CB: u'Tdsoft Communication, Ltd.',
0x0004CC: u'Peek Traffic B.V.',
0x0004CD: u'Informedia Research Group',
0x0004CE: u'Patria Ailon',
0x0004CF: u'Seagate Technology',
0x0004D0: u'Softlink s.r.o.',
0x0004D1: u'Drew Technologies, Inc.',
0x0004D2: u'Adcon Telemetry GmbH',
0x0004D3: u'Toyokeiki Co., Ltd.',
0x0004D4: u'Proview Electronics Co., Ltd.',
0x0004D5: u'Hitachi Communication Systems, Inc.',
0x0004D6: u'Takagi Industrial Co., Ltd.',
0x0004D7: u'Omitec Instrumentation Ltd.',
0x0004D8: u'IPWireless, Inc.',
0x0004D9: u'Titan Electronics, Inc.',
0x0004DA: u'Relax Technology, Inc.',
0x0004DB: u'Tellus Group Corp.',
0x0004DC: u'Nortel Networks',
0x0004DD: u'Cisco Systems, Inc.',
0x0004DE: u'Cisco Systems, Inc.',
0x0004DF: u'Teracom Telematica Ltda.',
0x0004E0: u'Procket Networks',
0x0004E1: u'Infinior Microsystems',
0x0004E2: u'SMC Networks, Inc.',
0x0004E3: u'Accton Technology Corp.',
0x0004E4: u'Daeryung Ind., Inc.',
0x0004E5: u'Glonet Systems, Inc.',
0x0004E6: u'Banyan Network Private Limited',
0x0004E7: u'Lightpointe Communications, Inc',
0x0004E8: u'IER, Inc.',
0x0004E9: u'Infiniswitch Corporation',
0x0004EA: u'Hewlett-Packard Company',
0x0004EB: u'Paxonet Communications, Inc.',
0x0004EC: u'Memobox SA',
0x0004ED: u'Billion Electric Co., Ltd.',
0x0004EE: u'Lincoln Electric Company',
0x0004EF: u'Polestar Corp.',
0x0004F0: u'International Computers, Ltd',
0x0004F1: u'WhereNet',
0x0004F2: u'Polycom',
0x0004F3: u'FS FORTH-SYSTEME GmbH',
0x0004F4: u'Infinite Electronics Inc.',
0x0004F5: u'SnowShore Networks, Inc.',
0x0004F6: u'Amphus',
0x0004F7: u'Omega Band, Inc.',
0x0004F8: u'QUALICABLE TV Industria E Com., Ltda',
0x0004F9: u'Xtera Communications, Inc.',
0x0004FA: u'NBS Technologies Inc.',
0x0004FB: u'Commtech, Inc.',
0x0004FC: u'Stratus Computer (DE), Inc.',
0x0004FD: u'Japan Control Engineering Co., Ltd.',
0x0004FE: u'Pelago Networks',
0x0004FF: u'Acronet Co., Ltd.',
0x000500: u'Cisco Systems, Inc.',
0x000501: u'Cisco Systems, Inc.',
0x000502: u'APPLE COMPUTER',
0x000503: u'ICONAG',
0x000504: u'Naray Information & Communication Enterprise',
0x000505: u'Systems Integration Solutions, Inc.',
0x000506: u'Reddo Networks AB',
0x000507: u'Fine Appliance Corp.',
0x000508: u'Inetcam, Inc.',
0x000509: u'AVOC Nishimura Ltd.',
0x00050A: u'ICS Spa',
0x00050B: u'SICOM Systems, Inc.',
0x00050C: u'Network Photonics, Inc.',
0x00050D: u'Midstream Technologies, Inc.',
0x00050E: u'3ware, Inc.',
0x00050F: u'Tanaka S/S Ltd.',
0x000510: u'Infinite Shanghai Communication Terminals Ltd.',
0x000511: u'Complementary Technologies Ltd',
0x000512: u'MeshNetworks, Inc.',
0x000513: u'VTLinx Multimedia Systems, Inc.',
0x000514: u'KDT Systems Co., Ltd.',
0x000515: u'Nuark Co., Ltd.',
0x000516: u'SMART Modular Technologies',
0x000517: u'Shellcomm, Inc.',
0x000518: u'Jupiters Technology',
0x000519: u'Siemens Building Technologies AG,',
0x00051A: u'3Com Europe Ltd.',
0x00051B: u'Magic Control Technology Corporation',
0x00051C: u'Xnet Technology Corp.',
0x00051D: u'Airocon, Inc.',
0x00051E: u'Brocade Communications Systems, Inc.',
0x00051F: u'Taijin Media Co., Ltd.',
0x000520: u'Smartronix, Inc.',
0x000521: u'Control Microsystems',
0x000522: u'LEA*D Corporation, Inc.',
0x000523: u'AVL List GmbH',
0x000524: u'BTL System (HK) Limited',
0x000525: u'Puretek Industrial Co., Ltd.',
0x000526: u'IPAS GmbH',
0x000527: u'SJ Tek Co. Ltd',
0x000528: u'New Focus, Inc.',
0x000529: u'Shanghai Broadan Communication Technology Co., Ltd',
0x00052A: u'Ikegami Tsushinki Co., Ltd.',
0x00052B: u'HORIBA, Ltd.',
0x00052C: u'Supreme Magic Corporation',
0x00052D: u'Zoltrix International Limited',
0x00052E: u'Cinta Networks',
0x00052F: u'Leviton Voice and Data',
0x000530: u'Andiamo Systems, Inc.',
0x000531: u'Cisco Systems, Inc.',
0x000532: u'Cisco Systems, Inc.',
0x000533: u'Sanera Systems, Inc.',
0x000534: u'Northstar Engineering Ltd.',
0x000535: u'Chip PC Ltd.',
0x000536: u'Danam Communications, Inc.',
0x000537: u'Nets Technology Co., Ltd.',
0x000538: u'Merilus, Inc.',
0x000539: u'A Brand New World in Sweden AB',
0x00053A: u'Willowglen Services Pte Ltd',
0x00053B: u'Harbour Networks Ltd., Co. Beijing',
0x00053C: u'Xircom',
0x00053D: u'Agere Systems',
0x00053E: u'KID Systeme GmbH',
0x00053F: u'VisionTek, Inc.',
0x000540: u'FAST Corporation',
0x000541: u'Advanced Systems Co., Ltd.',
0x000542: u'Otari, Inc.',
0x000543: u'IQ Wireless GmbH',
0x000544: u'Valley Technologies, Inc.',
0x000545: u'Internet Photonics',
0x000546: u'KDDI Network & Solultions Inc.',
0x000547: u'Starent Networks',
0x000548: u'Disco Corporation',
0x000549: u'Salira Optical Network Systems',
0x00054A: u'Ario Data Networks, Inc.',
0x00054B: u'Micro Innovation AG',
0x00054C: u'RF Innovations Pty Ltd',
0x00054D: u'Brans Technologies, Inc.',
0x00054E: u'Philips Components',
0x00054F: u'PRIVATE',
0x000550: u'Vcomms Limited',
0x000551: u'F & S Elektronik Systeme GmbH',
0x000552: u'Xycotec Computer GmbH',
0x000553: u'DVC Company, Inc.',
0x000554: u'Rangestar Wireless',
0x000555: u'Japan Cash Machine Co., Ltd.',
0x000556: u'360 Systems',
0x000557: u'Agile TV Corporation',
0x000558: u'Synchronous, Inc.',
0x000559: u'Intracom S.A.',
0x00055A: u'Power Dsine Ltd.',
0x00055B: u'Charles Industries, Ltd.',
0x00055C: u'Kowa Company, Ltd.',
0x00055D: u'D-Link Systems, Inc.',
0x00055E: u'Cisco Systems, Inc.',
0x00055F: u'Cisco Systems, Inc.',
0x000560: u'LEADER COMM.CO., LTD',
0x000561: u'nac Image Technology, Inc.',
0x000562: u'Digital View Limited',
0x000563: u'J-Works, Inc.',
0x000564: u'Tsinghua Bitway Co., Ltd.',
0x000565: u'Tailyn Communication Company Ltd.',
0x000566: u'Secui.com Corporation',
0x000567: u'Etymonic Design, Inc.',
0x000568: u'Piltofish Networks AB',
0x000569: u'VMWARE, Inc.',
0x00056A: u'Heuft Systemtechnik GmbH',
0x00056B: u'C.P. Technology Co., Ltd.',
0x00056C: u'Hung Chang Co., Ltd.',
0x00056D: u'Pacific Corporation',
0x00056E: u'National Enhance Technology, Inc.',
0x00056F: u'Innomedia Technologies Pvt. Ltd.',
0x000570: u'Baydel Ltd.',
0x000571: u'Seiwa Electronics Co.',
0x000572: u'Deonet Co., Ltd.',
0x000573: u'Cisco Systems, Inc.',
0x000574: u'Cisco Systems, Inc.',
0x000575: u'CDS-Electronics BV',
0x000576: u'NSM Technology Ltd.',
0x000577: u'SM Information & Communication',
0x000578: u'PRIVATE',
0x000579: u'Universal Control Solution Corp.',
0x00057A: u'Hatteras Networks',
0x00057B: u'Chung Nam Electronic Co., Ltd.',
0x00057C: u'RCO Security AB',
0x00057D: u'Sun Communications, Inc.',
0x00057E: u'Eckelmann Steuerungstechnik GmbH',
0x00057F: u'Acqis Technology',
0x000580: u'Fibrolan Ltd.',
0x000581: u'Snell & Wilcox Ltd.',
0x000582: u'ClearCube Technology',
0x000583: u'ImageCom Limited',
0x000584: u'AbsoluteValue Systems, Inc.',
0x000585: u'Juniper Networks, Inc.',
0x000586: u'Lucent Technologies',
0x000587: u'Locus, Incorporated',
0x000588: u'Sensoria Corp.',
0x000589: u'National Datacomputer',
0x00058A: u'Netcom Co., Ltd.',
0x00058B: u'IPmental, Inc.',
0x00058C: u'Opentech Inc.',
0x00058D: u'Lynx Photonic Networks, Inc.',
0x00058E: u'Flextronics International GmbH & Co. Nfg. KG',
0x00058F: u'CLCsoft co.',
0x000590: u'Swissvoice Ltd.',
0x000591: u'Active Silicon Ltd.',
0x000592: u'Pultek Corp.',
0x000593: u'Grammar Engine Inc.',
0x000594: u'IXXAT Automation GmbH',
0x000595: u'Alesis Corporation',
0x000596: u'Genotech Co., Ltd.',
0x000597: u'Eagle Traffic Control Systems',
0x000598: u'CRONOS S.r.l.',
0x000599: u'DRS Test and Energy Management or DRS-TEM',
0x00059A: u'Cisco Systems, Inc.',
0x00059B: u'Cisco Systems, Inc.',
0x00059C: u'Kleinknecht GmbH, Ing. Buero',
0x00059D: u'Daniel Computing Systems, Inc.',
0x00059E: u'Zinwell Corporation',
0x00059F: u'Yotta Networks, Inc.',
0x0005A0: u'MOBILINE Kft.',
0x0005A1: u'Zenocom',
0x0005A2: u'CELOX Networks',
0x0005A3: u'QEI, Inc.',
0x0005A4: u'Lucid Voice Ltd.',
0x0005A5: u'KOTT',
0x0005A6: u'Extron Electronics',
0x0005A7: u'Hyperchip, Inc.',
0x0005A8: u'WYLE ELECTRONICS',
0x0005A9: u'Princeton Networks, Inc.',
0x0005AA: u'Moore Industries International Inc.',
0x0005AB: u'Cyber Fone, Inc.',
0x0005AC: u'Northern Digital, Inc.',
0x0005AD: u'Topspin Communications, Inc.',
0x0005AE: u'Mediaport USA',
0x0005AF: u'InnoScan Computing A/S',
0x0005B0: u'Korea Computer Technology Co., Ltd.',
0x0005B1: u'ASB Technology BV',
0x0005B2: u'Medison Co., Ltd.',
0x0005B3: u'Asahi-Engineering Co., Ltd.',
0x0005B4: u'Aceex Corporation',
0x0005B5: u'Broadcom Technologies',
0x0005B6: u'INSYS Microelectronics GmbH',
0x0005B7: u'Arbor Technology Corp.',
0x0005B8: u'Electronic Design Associates, Inc.',
0x0005B9: u'Airvana, Inc.',
0x0005BA: u'Area Netwoeks, Inc.',
0x0005BB: u'Myspace AB',
0x0005BC: u'Resorsys Ltd.',
0x0005BD: u'ROAX BV',
0x0005BE: u'Kongsberg Seatex AS',
0x0005BF: u'JustEzy Technology, Inc.',
0x0005C0: u'Digital Network Alacarte Co., Ltd.',
0x0005C1: u'A-Kyung Motion, Inc.',
0x0005C2: u'Soronti, Inc.',
0x0005C3: u'Pacific Instruments, Inc.',
0x0005C4: u'Telect, Inc.',
0x0005C5: u'Flaga HF',
0x0005C6: u'Triz Communications',
0x0005C7: u'I/F-COM A/S',
0x0005C8: u'VERYTECH',
0x0005C9: u'LG Innotek',
0x0005CA: u'Hitron Technology, Inc.',
0x0005CB: u'ROIS Technologies, Inc.',
0x0005CC: u'Sumtel Communications, Inc.',
0x0005CD: u'Denon, Ltd.',
0x0005CE: u'Prolink Microsystems Corporation',
0x0005CF: u'Thunder River Technologies, Inc.',
0x0005D0: u'Solinet Systems',
0x0005D1: u'Metavector Technologies',
0x0005D2: u'DAP Technologies',
0x0005D3: u'eProduction Solutions, Inc.',
0x0005D4: u'FutureSmart Networks, Inc.',
0x0005D5: u'Speedcom Wireless',
0x0005D6: u'Titan Wireless',
0x0005D7: u'Vista Imaging, Inc.',
0x0005D8: u'Arescom, Inc.',
0x0005D9: u'Techno Valley, Inc.',
0x0005DA: u'Apex Automationstechnik',
0x0005DB: u'Nentec GmbH',
0x0005DC: u'Cisco Systems, Inc.',
0x0005DD: u'Cisco Systems, Inc.',
0x0005DE: u'Gi Fone Korea, Inc.',
0x0005DF: u'Electronic Innovation, Inc.',
0x0005E0: u'Empirix Corp.',
0x0005E1: u'Trellis Photonics, Ltd.',
0x0005E2: u'Creativ Network Technologies',
0x0005E3: u'LightSand Communications, Inc.',
0x0005E4: u'Red Lion Controls L.P.',
0x0005E5: u'Renishaw PLC',
0x0005E6: u'Egenera, Inc.',
0x0005E7: u'Netrake Corp.',
0x0005E8: u'TurboWave, Inc.',
0x0005E9: u'Unicess Network, Inc.',
0x0005EA: u'Rednix',
0x0005EB: u'Blue Ridge Networks, Inc.',
0x0005EC: u'Mosaic Systems Inc.',
0x0005ED: u'Technikum Joanneum GmbH',
0x0005EE: u'BEWATOR Group',
0x0005EF: u'ADOIR Digital Technology',
0x0005F0: u'SATEC',
0x0005F1: u'Vrcom, Inc.',
0x0005F2: u'Power R, Inc.',
0x0005F3: u'Weboyn',
0x0005F4: u'System Base Co., Ltd.',
0x0005F5: u'OYO Geospace Corp.',
0x0005F6: u'Young Chang Co. Ltd.',
0x0005F7: u'Analog Devices, Inc.',
0x0005F8: u'Real Time Access, Inc.',
0x0005F9: u'TOA Corporation',
0x0005FA: u'IPOptical, Inc.',
0x0005FB: u'ShareGate, Inc.',
0x0005FC: u'Schenck Pegasus Corp.',
0x0005FD: u'PacketLight Networks Ltd.',
0x0005FE: u'Traficon N.V.',
0x0005FF: u'SNS Solutions, Inc.',
0x000600: u'Toshiba Teli Corporation',
0x000601: u'Otanikeiki Co., Ltd.',
0x000602: u'Cirkitech Electronics Co.',
0x000603: u'Baker Hughes Inc.',
0x000604: u'@Track Communications, Inc.',
0x000605: u'Inncom International, Inc.',
0x000606: u'RapidWAN, Inc.',
0x000607: u'Omni Directional Control Technology Inc.',
0x000608: u'At-Sky SAS',
0x000609: u'Crossport Systems',
0x00060A: u'Blue2space',
0x00060B: u'Paceline Systems Corporation',
0x00060C: u'Melco Industries, Inc.',
0x00060D: u'Wave7 Optics',
0x00060E: u'IGYS Systems, Inc.',
0x00060F: u'Narad Networks Inc',
0x000610: u'Abeona Networks Inc',
0x000611: u'Zeus Wireless, Inc.',
0x000612: u'Accusys, Inc.',
0x000613: u'Kawasaki Microelectronics Incorporated',
0x000614: u'Prism Holdings',
0x000615: u'Kimoto Electric Co., Ltd.',
0x000616: u'Tel Net Co., Ltd.',
0x000617: u'Redswitch Inc.',
0x000618: u'DigiPower Manufacturing Inc.',
0x000619: u'Connection Technology Systems',
0x00061A: u'Zetari Inc.',
0x00061B: u'Portable Systems, IBM Japan Co, Ltd',
0x00061C: u'Hoshino Metal Industries, Ltd.',
0x00061D: u'MIP Telecom, Inc.',
0x00061E: u'Maxan Systems',
0x00061F: u'Vision Components GmbH',
0x000620: u'Serial System Ltd.',
0x000621: u'Hinox, Co., Ltd.',
0x000622: u'Chung Fu Chen Yeh Enterprise Corp.',
0x000623: u'MGE UPS Systems France',
0x000624: u'Gentner Communications Corp.',
0x000625: u'The Linksys Group, Inc.',
0x000626: u'MWE GmbH',
0x000627: u'Uniwide Technologies, Inc.',
0x000628: u'Cisco Systems, Inc.',
0x000629: u'IBM CORPORATION',
0x00062A: u'Cisco Systems, Inc.',
0x00062B: u'INTRASERVER TECHNOLOGY',
0x00062C: u'Network Robots, Inc.',
0x00062D: u'TouchStar Technologies, L.L.C.',
0x00062E: u'Aristos Logic Corp.',
0x00062F: u'Pivotech Systems Inc.',
0x000630: u'Adtranz Sweden',
0x000631: u'Optical Solutions, Inc.',
0x000632: u'Mesco Engineering GmbH',
0x000633: u'Smiths Heimann Biometric Systems',
0x000634: u'GTE Airfone Inc.',
0x000635: u'PacketAir Networks, Inc.',
0x000636: u'Jedai Broadband Networks',
0x000637: u'Toptrend-Meta Information (ShenZhen) Inc.',
0x000638: u'Sungjin C&C Co., Ltd.',
0x000639: u'Newtec',
0x00063A: u'Dura Micro, Inc.',
0x00063B: u'Arcturus Networks, Inc.',
0x00063C: u'NMI Electronics Ltd',
0x00063D: u'Microwave Data Systems Inc.',
0x00063E: u'Opthos Inc.',
0x00063F: u'Everex Communications Inc.',
0x000640: u'White Rock Networks',
0x000641: u'ITCN',
0x000642: u'Genetel Systems Inc.',
0x000643: u'SONO Computer Co., Ltd.',
0x000644: u'NEIX Inc.',
0x000645: u'Meisei Electric Co. Ltd.',
0x000646: u'ShenZhen XunBao Network Technology Co Ltd',
0x000647: u'Etrali S.A.',
0x000648: u'Seedsware, Inc.',
0x000649: u'Quante',
0x00064A: u'Honeywell Co., Ltd. (KOREA)',
0x00064B: u'Alexon Co., Ltd.',
0x00064C: u'Invicta Networks, Inc.',
0x00064D: u'Sencore',
0x00064E: u'Broad Net Technology Inc.',
0x00064F: u'PRO-NETS Technology Corporation',
0x000650: u'Tiburon Networks, Inc.',
0x000651: u'Aspen Networks Inc.',
0x000652: u'Cisco Systems, Inc.',
0x000653: u'Cisco Systems, Inc.',
0x000654: u'Maxxio Technologies',
0x000655: u'Yipee, Inc.',
0x000656: u'Tactel AB',
0x000657: u'Market Central, Inc.',
0x000658: u'Helmut Fischer GmbH & Co. KG',
0x000659: u'EAL (Apeldoorn) B.V.',
0x00065A: u'Strix Systems',
0x00065B: u'Dell Computer Corp.',
0x00065C: u'Malachite Technologies, Inc.',
0x00065D: u'Heidelberg Web Systems',
0x00065E: u'Photuris, Inc.',
0x00065F: u'ECI Telecom - NGTS Ltd.',
0x000660: u'NADEX Co., Ltd.',
0x000661: u'NIA Home Technologies Corp.',
0x000662: u'MBM Technology Ltd.',
0x000663: u'Human Technology Co., Ltd.',
0x000664: u'Fostex Corporation',
0x000665: u'Sunny Giken, Inc.',
0x000666: u'Roving Networks',
0x000667: u'Tripp Lite',
0x000668: u'Vicon Industries Inc.',
0x000669: u'Datasound Laboratories Ltd',
0x00066A: u'InfiniCon Systems, Inc.',
0x00066B: u'Sysmex Corporation',
0x00066C: u'Robinson Corporation',
0x00066D: u'Compuprint S.P.A.',
0x00066E: u'Delta Electronics, Inc.',
0x00066F: u'Korea Data Systems',
0x000670: u'Upponetti Oy',
0x000671: u'Softing AG',
0x000672: u'Netezza',
0x000673: u'Optelecom-nkf',
0x000674: u'Spectrum Control, Inc.',
0x000675: u'Banderacom, Inc.',
0x000676: u'Novra Technologies Inc.',
0x000677: u'SICK AG',
0x000678: u'Marantz Japan, Inc.',
0x000679: u'Konami Corporation',
0x00067A: u'JMP Systems',
0x00067B: u'Toplink C&C Corporation',
0x00067C: u'CISCO SYSTEMS, INC.',
0x00067D: u'Takasago Ltd.',
0x00067E: u'WinCom Systems, Inc.',
0x00067F: u'Rearden Steel Technologies',
0x000680: u'Card Access, Inc.',
0x000681: u'Goepel Electronic GmbH',
0x000682: u'Convedia',
0x000683: u'Bravara Communications, Inc.',
0x000684: u'Biacore AB',
0x000685: u'NetNearU Corporation',
0x000686: u'ZARDCOM Co., Ltd.',
0x000687: u'Omnitron Systems Technology, Inc.',
0x000688: u'Telways Communication Co., Ltd.',
0x000689: u'yLez Technologies Pte Ltd',
0x00068A: u'NeuronNet Co. Ltd. R&D Center',
0x00068B: u'AirRunner Technologies, Inc.',
0x00068C: u'3Com Corporation',
0x00068D: u'SEPATON, Inc.',
0x00068E: u'HID Corporation',
0x00068F: u'Telemonitor, Inc.',
0x000690: u'Euracom Communication GmbH',
0x000691: u'PT Inovacao',
0x000692: u'Intruvert Networks, Inc.',
0x000693: u'Flexus Computer Technology, Inc.',
0x000694: u'Mobillian Corporation',
0x000695: u'Ensure Technologies, Inc.',
0x000696: u'Advent Networks',
0x000697: u'R & D Center',
0x000698: u'egnite Software GmbH',
0x000699: u'Vida Design Co.',
0x00069A: u'e & Tel',
0x00069B: u'AVT Audio Video Technologies GmbH',
0x00069C: u'Transmode Systems AB',
0x00069D: u'Petards Mobile Intelligence',
0x00069E: u'UNIQA, Inc.',
0x00069F: u'Kuokoa Networks',
0x0006A0: u'Mx Imaging',
0x0006A1: u'Celsian Technologies, Inc.',
0x0006A2: u'Microtune, Inc.',
0x0006A3: u'Bitran Corporation',
0x0006A4: u'INNOWELL Corp.',
0x0006A5: u'PINON Corp.',
0x0006A6: u'Artistic Licence (UK) Ltd',
0x0006A7: u'Primarion',
0x0006A8: u'KC Technology, Inc.',
0x0006A9: u'Universal Instruments Corp.',
0x0006AA: u'Miltope Corporation',
0x0006AB: u'W-Link Systems, Inc.',
0x0006AC: u'Intersoft Co.',
0x0006AD: u'KB Electronics Ltd.',
0x0006AE: u'Himachal Futuristic Communications Ltd',
0x0006AF: u'PRIVATE',
0x0006B0: u'Comtech EF Data Corp.',
0x0006B1: u'Sonicwall',
0x0006B2: u'Linxtek Co.',
0x0006B3: u'Diagraph Corporation',
0x0006B4: u'Vorne Industries, Inc.',
0x0006B5: u'Luminent, Inc.',
0x0006B6: u'Nir-Or Israel Ltd.',
0x0006B7: u'TELEM GmbH',
0x0006B8: u'Bandspeed Pty Ltd',
0x0006B9: u'A5TEK Corp.',
0x0006BA: u'Westwave Communications',
0x0006BB: u'ATI Technologies Inc.',
0x0006BC: u'Macrolink, Inc.',
0x0006BD: u'BNTECHNOLOGY Co., Ltd.',
0x0006BE: u'Baumer Optronic GmbH',
0x0006BF: u'Accella Technologies Co., Ltd.',
0x0006C0: u'United Internetworks, Inc.',
0x0006C1: u'CISCO SYSTEMS, INC.',
0x0006C2: u'Smartmatic Corporation',
0x0006C3: u'Schindler Elevators Ltd.',
0x0006C4: u'Piolink Inc.',
0x0006C5: u'INNOVI Technologies Limited',
0x0006C6: u'lesswire AG',
0x0006C7: u'RFNET Technologies Pte Ltd (S)',
0x0006C8: u'Sumitomo Metal Micro Devices, Inc.',
0x0006C9: u'Technical Marketing Research, Inc.',
0x0006CA: u'American Computer & Digital Components, Inc. (ACDC)',
0x0006CB: u'Jotron Electronics A/S',
0x0006CC: u'JMI Electronics Co., Ltd.',
0x0006CD: u'Kodak IL Ltd.',
0x0006CE: u'DATENO',
0x0006CF: u'Thales Avionics In-Flight Systems, LLC',
0x0006D0: u'Elgar Electronics Corp.',
0x0006D1: u'Tahoe Networks, Inc.',
0x0006D2: u'Tundra Semiconductor Corp.',
0x0006D3: u'Alpha Telecom, Inc. U.S.A.',
0x0006D4: u'Interactive Objects, Inc.',
0x0006D5: u'Diamond Systems Corp.',
0x0006D6: u'Cisco Systems, Inc.',
0x0006D7: u'Cisco Systems, Inc.',
0x0006D8: u'Maple Optical Systems',
0x0006D9: u'IPM-Net S.p.A.',
0x0006DA: u'ITRAN Communications Ltd.',
0x0006DB: u'ICHIPS Co., Ltd.',
0x0006DC: u'Syabas Technology (Amquest)',
0x0006DD: u'AT & T Laboratories - Cambridge Ltd',
0x0006DE: u'Flash Technology',
0x0006DF: u'AIDONIC Corporation',
0x0006E0: u'MAT Co., Ltd.',
0x0006E1: u'Techno Trade s.a',
0x0006E2: u'Ceemax Technology Co., Ltd.',
0x0006E3: u'Quantitative Imaging Corporation',
0x0006E4: u'Citel Technologies Ltd.',
0x0006E5: u'Fujian Newland Computer Ltd. Co.',
0x0006E6: u'DongYang Telecom Co., Ltd.',
0x0006E7: u'Bit Blitz Communications Inc.',
0x0006E8: u'Optical Network Testing, Inc.',
0x0006E9: u'Intime Corp.',
0x0006EA: u'ELZET80 Mikrocomputer GmbH&Co. KG',
0x0006EB: u'Global Data',
0x0006EC: u'M/A COM Private Radio System Inc.',
0x0006ED: u'Inara Networks',
0x0006EE: u'Shenyang Neu-era Information & Technology Stock Co., Ltd',
0x0006EF: u'Maxxan Systems, Inc.',
0x0006F0: u'Digeo, Inc.',
0x0006F1: u'Optillion',
0x0006F2: u'Platys Communications',
0x0006F3: u'AcceLight Networks',
0x0006F4: u'Prime Electronics & Satellitics Inc.',
0x0006F8: u'CPU Technology, Inc.',
0x0006F9: u'Mitsui Zosen Systems Research Inc.',
0x0006FA: u'IP SQUARE Co, Ltd.',
0x0006FB: u'Hitachi Printing Solutions, Ltd.',
0x0006FC: u'Fnet Co., Ltd.',
0x0006FD: u'Comjet Information Systems Corp.',
0x0006FE: u'Celion Networks, Inc.',
0x0006FF: u'Sheba Systems Co., Ltd.',
0x000700: u'Zettamedia Korea',
0x000701: u'RACAL-DATACOM',
0x000702: u'Varian Medical Systems',
0x000703: u'CSEE Transport',
0x000705: u'Endress & Hauser GmbH & Co',
0x000706: u'Sanritz Corporation',
0x000707: u'Interalia Inc.',
0x000708: u'Bitrage Inc.',
0x000709: u'Westerstrand Urfabrik AB',
0x00070A: u'Unicom Automation Co., Ltd.',
0x00070B: u'Octal, SA',
0x00070C: u'SVA-Intrusion.com Co. Ltd.',
0x00070D: u'Cisco Systems Inc.',
0x00070E: u'Cisco Systems Inc.',
0x00070F: u'Fujant, Inc.',
0x000710: u'Adax, Inc.',
0x000711: u'Acterna',
0x000712: u'JAL Information Technology',
0x000713: u'IP One, Inc.',
0x000714: u'Brightcom',
0x000715: u'General Research of Electronics, Inc.',
0x000716: u'J & S Marine Ltd.',
0x000717: u'Wieland Electric GmbH',
0x000718: u'iCanTek Co., Ltd.',
0x000719: u'Mobiis Co., Ltd.',
0x00071A: u'Finedigital Inc.',
0x00071B: u'Position Technology Inc.',
0x00071C: u'AT&T Fixed Wireless Services',
0x00071D: u'Satelsa Sistemas Y Aplicaciones De Telecomunicaciones, S.A.',
0x00071E: u'Tri-M Engineering / Nupak Dev. Corp.',
0x00071F: u'European Systems Integration',
0x000720: u'Trutzschler GmbH & Co. KG',
0x000721: u'Formac Elektronik GmbH',
0x000722: u'Nielsen Media Research',
0x000723: u'ELCON Systemtechnik GmbH',
0x000724: u'Telemax Co., Ltd.',
0x000725: u'Bematech International Corp.',
0x000727: u'Zi Corporation (HK) Ltd.',
0x000728: u'Neo Telecom',
0x000729: u'Kistler Instrumente AG',
0x00072A: u'Innovance Networks',
0x00072B: u'Jung Myung Telecom Co., Ltd.',
0x00072C: u'Fabricom',
0x00072D: u'CNSystems',
0x00072E: u'North Node AB',
0x00072F: u'Intransa, Inc.',
0x000730: u'Hutchison OPTEL Telecom Technology Co., Ltd.',
0x000731: u'Spiricon, Inc.',
0x000732: u'AAEON Technology Inc.',
0x000733: u'DANCONTROL Engineering',
0x000734: u'ONStor, Inc.',
0x000735: u'Flarion Technologies, Inc.',
0x000736: u'Data Video Technologies Co., Ltd.',
0x000737: u'Soriya Co. Ltd.',
0x000738: u'Young Technology Co., Ltd.',
0x000739: u'Motion Media Technology Ltd.',
0x00073A: u'Inventel Systemes',
0x00073B: u'Tenovis GmbH & Co KG',
0x00073C: u'Telecom Design',
0x00073D: u'Nanjing Postel Telecommunications Co., Ltd.',
0x00073E: u'China Great-Wall Computer Shenzhen Co., Ltd.',
0x00073F: u'Woojyun Systec Co., Ltd.',
0x000740: u'Melco Inc.',
0x000741: u'Sierra Automated Systems',
0x000742: u'Current Technologies',
0x000743: u'Chelsio Communications',
0x000744: u'Unico, Inc.',
0x000745: u'Radlan Computer Communications Ltd.',
0x000746: u'TURCK, Inc.',
0x000747: u'Mecalc',
0x000748: u'The Imaging Source Europe',
0x000749: u'CENiX Inc.',
0x00074A: u'Carl Valentin GmbH',
0x00074B: u'Daihen Corporation',
0x00074C: u'Beicom Inc.',
0x00074D: u'Zebra Technologies Corp.',
0x00074E: u'Naughty boy co., Ltd.',
0x00074F: u'Cisco Systems, Inc.',
0x000750: u'Cisco Systems, Inc.',
0x000751: u'm.u.t. - GmbH',
0x000752: u'Rhythm Watch Co., Ltd.',
0x000753: u'Beijing Qxcomm Technology Co., Ltd.',
0x000754: u'Xyterra Computing, Inc.',
0x000755: u'Lafon SA',
0x000756: u'Juyoung Telecom',
0x000757: u'Topcall International AG',
0x000758: u'Dragonwave',
0x000759: u'Boris Manufacturing Corp.',
0x00075A: u'Air Products and Chemicals, Inc.',
0x00075B: u'Gibson Guitars',
0x00075C: u'Eastman Kodak Company',
0x00075D: u'Celleritas Inc.',
0x00075E: u'Ametek Power Instruments',
0x00075F: u'VCS Video Communication Systems AG',
0x000760: u'TOMIS Information & Telecom Corp.',
0x000761: u'Logitech SA',
0x000762: u'Group Sense Limited',
0x000763: u'Sunniwell Cyber Tech. Co., Ltd.',
0x000764: u'YoungWoo Telecom Co. Ltd.',
0x000765: u'Jade Quantum Technologies, Inc.',
0x000766: u'Chou Chin Industrial Co., Ltd.',
0x000767: u'Yuxing Electronics Company Limited',
0x000768: u'Danfoss A/S',
0x000769: u'Italiana Macchi SpA',
0x00076A: u'NEXTEYE Co., Ltd.',
0x00076B: u'Stralfors AB',
0x00076C: u'Daehanet, Inc.',
0x00076D: u'Flexlight Networks',
0x00076E: u'Sinetica Corporation Limited',
0x00076F: u'Synoptics Limited',
0x000770: u'Locusnetworks Corporation',
0x000771: u'Embedded System Corporation',
0x000772: u'Alcatel Shanghai Bell Co., Ltd.',
0x000773: u'Ascom Powerline Communications Ltd.',
0x000774: u'GuangZhou Thinker Technology Co. Ltd.',
0x000775: u'Valence Semiconductor, Inc.',
0x000776: u'Federal APD',
0x000777: u'Motah Ltd.',
0x000778: u'GERSTEL GmbH & Co. KG',
0x000779: u'Sungil Telecom Co., Ltd.',
0x00077A: u'Infoware System Co., Ltd.',
0x00077B: u'Millimetrix Broadband Networks',
0x00077C: u'OnTime Networks',
0x00077E: u'Elrest GmbH',
0x00077F: u'J Communications Co., Ltd.',
0x000780: u'Bluegiga Technologies OY',
0x000781: u'Itron Inc.',
0x000782: u'Nauticus Networks, Inc.',
0x000783: u'SynCom Network, Inc.',
0x000784: u'Cisco Systems Inc.',
0x000785: u'Cisco Systems Inc.',
0x000786: u'Wireless Networks Inc.',
0x000787: u'Idea System Co., Ltd.',
0x000788: u'Clipcomm, Inc.',
0x000789: u'Eastel Systems Corporation',
0x00078A: u'Mentor Data System Inc.',
0x00078B: u'Wegener Communications, Inc.',
0x00078C: u'Elektronikspecialisten i Borlange AB',
0x00078D: u'NetEngines Ltd.',
0x00078E: u'Garz & Friche GmbH',
0x00078F: u'Emkay Innovative Products',
0x000790: u'Tri-M Technologies (s) Limited',
0x000791: u'International Data Communications, Inc.',
0x000792: u'Suetron Electronic GmbH',
0x000793: u'Shin Satellite Public Company Limited',
0x000794: u'Simple Devices, Inc.',
0x000795: u'Elitegroup Computer System Co. (ECS)',
0x000796: u'LSI Systems, Inc.',
0x000797: u'Netpower Co., Ltd.',
0x000798: u'Selea SRL',
0x000799: u'Tipping Point Technologies, Inc.',
0x00079A: u'SmartSight Networks Inc.',
0x00079B: u'Aurora Networks',
0x00079C: u'Golden Electronics Technology Co., Ltd.',
0x00079D: u'Musashi Co., Ltd.',
0x00079E: u'Ilinx Co., Ltd.',
0x00079F: u'Action Digital Inc.',
0x0007A0: u'e-Watch Inc.',
0x0007A1: u'VIASYS Healthcare GmbH',
0x0007A2: u'Opteon Corporation',
0x0007A3: u'Ositis Software, Inc.',
0x0007A4: u'GN Netcom Ltd.',
0x0007A5: u'Y.D.K Co. Ltd.',
0x0007A6: u'Home Automation, Inc.',
0x0007A7: u'A-Z Inc.',
0x0007A8: u'Haier Group Technologies Ltd.',
0x0007A9: u'Novasonics',
0x0007AA: u'Quantum Data Inc.',
0x0007AC: u'Eolring',
0x0007AD: u'Pentacon GmbH Foto-und Feinwerktechnik',
0x0007AE: u'Britestream Networks, Inc.',
0x0007AF: u'N-Tron Corp.',
0x0007B0: u'Office Details, Inc.',
0x0007B1: u'Equator Technologies',
0x0007B2: u'Transaccess S.A.',
0x0007B3: u'Cisco Systems Inc.',
0x0007B4: u'Cisco Systems Inc.',
0x0007B5: u'Any One Wireless Ltd.',
0x0007B6: u'Telecom Technology Ltd.',
0x0007B7: u'Samurai Ind. Prods Eletronicos Ltda',
0x0007B8: u'American Predator Corp.',
0x0007B9: u'Ginganet Corporation',
0x0007BA: u'UTStarcom, Inc.',
0x0007BB: u'Candera Inc.',
0x0007BC: u'Identix Inc.',
0x0007BD: u'Radionet Ltd.',
0x0007BE: u'DataLogic SpA',
0x0007BF: u'Armillaire Technologies, Inc.',
0x0007C0: u'NetZerver Inc.',
0x0007C1: u'Overture Networks, Inc.',
0x0007C2: u'Netsys Telecom',
0x0007C3: u'Cirpack',
0x0007C4: u'JEAN Co. Ltd.',
0x0007C5: u'Gcom, Inc.',
0x0007C6: u'VDS Vosskuhler GmbH',
0x0007C7: u'Synectics Systems Limited',
0x0007C8: u'Brain21, Inc.',
0x0007C9: u'Technol Seven Co., Ltd.',
0x0007CA: u'Creatix Polymedia Ges Fur Kommunikaitonssysteme',
0x0007CB: u'Freebox SA',
0x0007CC: u'Kaba Benzing GmbH',
0x0007CD: u'NMTEL Co., Ltd.',
0x0007CE: u'Cabletime Limited',
0x0007CF: u'Anoto AB',
0x0007D0: u'Automat Engenharia de Automaoa Ltda.',
0x0007D1: u'Spectrum Signal Processing Inc.',
0x0007D2: u'Logopak Systeme',
0x0007D3: u'Stork Digital Imaging B.V.',
0x0007D4: u'Zhejiang Yutong Network Communication Co Ltd.',
0x0007D5: u'3e Technologies Int;., Inc.',
0x0007D6: u'Commil Ltd.',
0x0007D7: u'Caporis Networks AG',
0x0007D8: u'Hitron Systems Inc.',
0x0007D9: u'Splicecom',
0x0007DA: u'Neuro Telecom Co., Ltd.',
0x0007DB: u'Kirana Networks, Inc.',
0x0007DC: u'Atek Co, Ltd.',
0x0007DD: u'Cradle Technologies',
0x0007DE: u'eCopilt AB',
0x0007DF: u'Vbrick Systems Inc.',
0x0007E0: u'Palm Inc.',
0x0007E1: u'WIS Communications Co. Ltd.',
0x0007E2: u'Bitworks, Inc.',
0x0007E3: u'Navcom Technology, Inc.',
0x0007E4: u'SoftRadio Co., Ltd.',
0x0007E5: u'Coup Corporation',
0x0007E6: u'edgeflow Canada Inc.',
0x0007E7: u'FreeWave Technologies',
0x0007E8: u'St. Bernard Software',
0x0007E9: u'Intel Corporation',
0x0007EA: u'Massana, Inc.',
0x0007EB: u'Cisco Systems Inc.',
0x0007EC: u'Cisco Systems Inc.',
0x0007ED: u'Altera Corporation',
0x0007EE: u'telco Informationssysteme GmbH',
0x0007EF: u'Lockheed Martin Tactical Systems',
0x0007F0: u'LogiSync Corporation',
0x0007F1: u'TeraBurst Networks Inc.',
0x0007F2: u'IOA Corporation',
0x0007F3: u'Thinkengine Networks',
0x0007F4: u'Eletex Co., Ltd.',
0x0007F5: u'Bridgeco Co AG',
0x0007F6: u'Qqest Software Systems',
0x0007F7: u'Galtronics',
0x0007F8: u'ITDevices, Inc.',
0x0007F9: u'Phonetics, Inc.',
0x0007FA: u'ITT Co., Ltd.',
0x0007FB: u'Giga Stream UMTS Technologies GmbH',
0x0007FC: u'Adept Systems Inc.',
0x0007FD: u'LANergy Ltd.',
0x0007FE: u'Rigaku Corporation',
0x0007FF: u'Gluon Networks',
0x000800: u'MULTITECH SYSTEMS, INC.',
0x000801: u'HighSpeed Surfing Inc.',
0x000802: u'Compaq Computer Corporation',
0x000803: u'Cos Tron',
0x000804: u'ICA Inc.',
0x000805: u'Techno-Holon Corporation',
0x000806: u'Raonet Systems, Inc.',
0x000807: u'Access Devices Limited',
0x000808: u'PPT Vision, Inc.',
0x000809: u'Systemonic AG',
0x00080A: u'Espera-Werke GmbH',
0x00080B: u'Birka BPA Informationssystem AB',
0x00080C: u'VDA elettronica SrL',
0x00080D: u'Toshiba',
0x00080E: u'Motorola, BCS',
0x00080F: u'Proximion Fiber Optics AB',
0x000810: u'Key Technology, Inc.',
0x000811: u'VOIX Corporation',
0x000812: u'GM-2 Corporation',
0x000813: u'Diskbank, Inc.',
0x000814: u'TIL Technologies',
0x000815: u'CATS Co., Ltd.',
0x000816: u'Bluetags A/S',
0x000817: u'EmergeCore Networks LLC',
0x000818: u'Pixelworks, Inc.',
0x000819: u'Banksys',
0x00081A: u'Sanrad Intelligence Storage Communications (2000) Ltd.',
0x00081B: u'Windigo Systems',
0x00081C: u'@pos.com',
0x00081D: u'Ipsil, Incorporated',
0x00081E: u'Repeatit AB',
0x00081F: u'Pou Yuen Tech Corp. Ltd.',
0x000820: u'Cisco Systems Inc.',
0x000821: u'Cisco Systems Inc.',
0x000822: u'InPro Comm',
0x000823: u'Texa Corp.',
0x000824: u'Promatek Industries Ltd.',
0x000825: u'Acme Packet',
0x000826: u'Colorado Med Tech',
0x000827: u'Pirelli Broadband Solutions',
0x000828: u'Koei Engineering Ltd.',
0x000829: u'Aval Nagasaki Corporation',
0x00082A: u'Powerwallz Network Security',
0x00082B: u'Wooksung Electronics, Inc.',
0x00082C: u'Homag AG',
0x00082D: u'Indus Teqsite Private Limited',
0x00082E: u'Multitone Electronics PLC',
0x00084E: u'DivergeNet, Inc.',
0x00084F: u'Qualstar Corporation',
0x000850: u'Arizona Instrument Corp.',
0x000851: u'Canadian Bank Note Company, Ltd.',
0x000852: u'Davolink Co. Inc.',
0x000853: u'Schleicher GmbH & Co. Relaiswerke KG',
0x000854: u'Netronix, Inc.',
0x000855: u'NASA-Goddard Space Flight Center',
0x000856: u'Gamatronic Electronic Industries Ltd.',
0x000857: u'Polaris Networks, Inc.',
0x000858: u'Novatechnology Inc.',
0x000859: u'ShenZhen Unitone Electronics Co., Ltd.',
0x00085A: u'IntiGate Inc.',
0x00085B: u'Hanbit Electronics Co., Ltd.',
0x00085C: u'Shanghai Dare Technologies Co. Ltd.',
0x00085D: u'Aastra',
0x00085E: u'PCO AG',
0x00085F: u'Picanol N.V.',
0x000860: u'LodgeNet Entertainment Corp.',
0x000861: u'SoftEnergy Co., Ltd.',
0x000862: u'NEC Eluminant Technologies, Inc.',
0x000863: u'Entrisphere Inc.',
0x000864: u'Fasy S.p.A.',
0x000865: u'JASCOM CO., LTD',
0x000866: u'DSX Access Systems, Inc.',
0x000867: u'Uptime Devices',
0x000868: u'PurOptix',
0x000869: u'Command-e Technology Co.,Ltd.',
0x00086A: u'Industrie Technik IPS GmbH',
0x00086B: u'MIPSYS',
0x00086C: u'Plasmon LMS',
0x00086D: u'Missouri FreeNet',
0x00086E: u'Hyglo AB',
0x00086F: u'Resources Computer Network Ltd.',
0x000870: u'Rasvia Systems, Inc.',
0x000871: u'NORTHDATA Co., Ltd.',
0x000872: u'Sorenson Technologies, Inc.',
0x000873: u'DAP Design B.V.',
0x000874: u'Dell Computer Corp.',
0x000875: u'Acorp Electronics Corp.',
0x000876: u'SDSystem',
0x000877: u'Liebert HIROSS S.p.A.',
0x000878: u'Benchmark Storage Innovations',
0x000879: u'CEM Corporation',
0x00087A: u'Wipotec GmbH',
0x00087B: u'RTX Telecom A/S',
0x00087C: u'Cisco Systems, Inc.',
0x00087D: u'Cisco Systems Inc.',
0x00087E: u'Bon Electro-Telecom Inc.',
0x00087F: u'SPAUN electronic GmbH & Co. KG',
0x000880: u'BroadTel Canada Communications inc.',
0x000881: u'DIGITAL HANDS CO.,LTD.',
0x000882: u'SIGMA CORPORATION',
0x000883: u'Hewlett-Packard Company',
0x000884: u'Index Braille AB',
0x000885: u'EMS Dr. Thomas Wuensche',
0x000886: u'Hansung Teliann, Inc.',
0x000887: u'Maschinenfabrik Reinhausen GmbH',
0x000888: u'OULLIM Information Technology Inc,.',
0x000889: u'Echostar Technologies Corp',
0x00088A: u'Minds@Work',
0x00088B: u'Tropic Networks Inc.',
0x00088C: u'Quanta Network Systems Inc.',
0x00088D: u'Sigma-Links Inc.',
0x00088E: u'Nihon Computer Co., Ltd.',
0x00088F: u'ADVANCED DIGITAL TECHNOLOGY',
0x000890: u'AVILINKS SA',
0x000891: u'Lyan Inc.',
0x000892: u'EM Solutions',
0x000893: u'LE INFORMATION COMMUNICATION INC.',
0x000894: u'InnoVISION Multimedia Ltd.',
0x000895: u'DIRC Technologie GmbH & Co.KG',
0x000896: u'Printronix, Inc.',
0x000897: u'Quake Technologies',
0x000898: u'Gigabit Optics Corporation',
0x000899: u'Netbind, Inc.',
0x00089A: u'Alcatel Microelectronics',
0x00089B: u'ICP Electronics Inc.',
0x00089C: u'Elecs Industry Co., Ltd.',
0x00089D: u'UHD-Elektronik',
0x00089E: u'Beijing Enter-Net co.LTD',
0x00089F: u'EFM Networks',
0x0008A0: u'Stotz Feinmesstechnik GmbH',
0x0008A1: u'CNet Technology Inc.',
0x0008A2: u'ADI Engineering, Inc.',
0x0008A3: u'Cisco Systems',
0x0008A4: u'Cisco Systems',
0x0008A5: u'Peninsula Systems Inc.',
0x0008A6: u'Multiware & Image Co., Ltd.',
0x0008A7: u'iLogic Inc.',
0x0008A8: u'Systec Co., Ltd.',
0x0008A9: u'SangSang Technology, Inc.',
0x0008AA: u'KARAM',
0x0008AB: u'EnerLinx.com, Inc.',
0x0008AC: u'PRIVATE',
0x0008AD: u'Toyo-Linx Co., Ltd.',
0x0008AE: u'PacketFront Sweden AB',
0x0008AF: u'Novatec Corporation',
0x0008B0: u'BKtel communications GmbH',
0x0008B1: u'ProQuent Systems',
0x0008B2: u'SHENZHEN COMPASS TECHNOLOGY DEVELOPMENT CO.,LTD',
0x0008B3: u'Fastwel',
0x0008B4: u'SYSPOL',
0x0008B5: u'TAI GUEN ENTERPRISE CO., LTD',
0x0008B6: u'RouteFree, Inc.',
0x0008B7: u'HIT Incorporated',
0x0008B8: u'E.F. Johnson',
0x0008B9: u'KAON MEDIA Co., Ltd.',
0x0008BA: u'Erskine Systems Ltd',
0x0008BB: u'NetExcell',
0x0008BC: u'Ilevo AB',
0x0008BD: u'TEPG-US',
0x0008BE: u'XENPAK MSA Group',
0x0008BF: u'Aptus Elektronik AB',
0x0008C0: u'ASA SYSTEMS',
0x0008C1: u'Avistar Communications Corporation',
0x0008C2: u'Cisco Systems',
0x0008C3: u'Contex A/S',
0x0008C4: u'Hikari Co.,Ltd.',
0x0008C5: u'Liontech Co., Ltd.',
0x0008C6: u'Philips Consumer Communications',
0x0008C7: u'COMPAQ COMPUTER CORPORATION',
0x0008C8: u'Soneticom, Inc.',
0x0008C9: u'TechniSat Digital GmbH',
0x0008CA: u'TwinHan Technology Co.,Ltd',
0x0008CB: u'Zeta Broadband Inc.',
0x0008CC: u'Remotec, Inc.',
0x0008CD: u'With-Net Inc',
0x0008CE: u'IPMobileNet Inc.',
0x0008CF: u'Nippon Koei Power Systems Co., Ltd.',
0x0008D0: u'Musashi Engineering Co., LTD.',
0x0008D1: u'KAREL INC.',
0x0008D2: u'ZOOM Networks Inc.',
0x0008D3: u'Hercules Technologies S.A.',
0x0008D4: u'IneoQuest Technologies, Inc',
0x0008D5: u'Vanguard Managed Solutions',
0x0008D6: u'HASSNET Inc.',
0x0008D7: u'HOW CORPORATION',
0x0008D8: u'Dowkey Microwave',
0x0008D9: u'Mitadenshi Co.,LTD',
0x0008DA: u'SofaWare Technologies Ltd.',
0x0008DB: u'Corrigent Systems',
0x0008DC: u'Wiznet',
0x0008DD: u'Telena Communications, Inc.',
0x0008DE: u'3UP Systems',
0x0008DF: u'Alistel Inc.',
0x0008E0: u'ATO Technology Ltd.',
0x0008E1: u'Barix AG',
0x0008E2: u'Cisco Systems',
0x0008E3: u'Cisco Systems',
0x0008E4: u'Envenergy Inc',
0x0008E5: u'IDK Corporation',
0x0008E6: u'Littlefeet',
0x0008E7: u'SHI ControlSystems,Ltd.',
0x0008E8: u'Excel Master Ltd.',
0x0008E9: u'NextGig',
0x0008EA: u'Motion Control Engineering, Inc',
0x0008EB: u'ROMWin Co.,Ltd.',
0x0008EC: u'Zonu, Inc.',
0x0008ED: u'ST&T Instrument Corp.',
0x0008EE: u'Logic Product Development',
0x0008EF: u'DIBAL,S.A.',
0x0008F0: u'Next Generation Systems, Inc.',
0x0008F1: u'Voltaire',
0x0008F2: u'C&S Technology',
0x0008F3: u'WANY',
0x0008F4: u'Bluetake Technology Co., Ltd.',
0x0008F5: u'YESTECHNOLOGY Co.,Ltd.',
0x0008F6: u'SUMITOMO ELECTRIC HIGHTECHS.co.,ltd.',
0x0008F7: u'Hitachi Ltd, Semiconductor & Integrated Circuits Gr',
0x0008F8: u'Guardall Ltd',
0x0008F9: u'Padcom, Inc.',
0x0008FA: u'Karl E.Brinkmann GmbH',
0x0008FB: u'SonoSite, Inc.',
0x0008FC: u'Gigaphoton Inc.',
0x0008FD: u'BlueKorea Co., Ltd.',
0x0008FE: u'UNIK C&C Co.,Ltd.',
0x0008FF: u'Trilogy Communications Ltd',
0x000900: u'TMT',
0x000901: u'Shenzhen Shixuntong Information & Technoligy Co',
0x000902: u'Redline Communications Inc.',
0x000903: u'Panasas, Inc',
0x000904: u'MONDIAL electronic',
0x000905: u'iTEC Technologies Ltd.',
0x000906: u'Esteem Networks',
0x000907: u'Chrysalis Development',
0x000908: u'VTech Technology Corp.',
0x000909: u'Telenor Connect A/S',
0x00090A: u'SnedFar Technology Co., Ltd.',
0x00090B: u'MTL Instruments PLC',
0x00090C: u'Mayekawa Mfg. Co. Ltd.',
0x00090D: u'LEADER ELECTRONICS CORP.',
0x00090E: u'Helix Technology Inc.',
0x00090F: u'Fortinet Inc.',
0x000910: u'Simple Access Inc.',
0x000911: u'Cisco Systems',
0x000912: u'Cisco Systems',
0x000913: u'SystemK Corporation',
0x000914: u'COMPUTROLS INC.',
0x000915: u'CAS Corp.',
0x000916: u'Listman Home Technologies, Inc.',
0x000917: u'WEM Technology Inc',
0x000918: u'SAMSUNG TECHWIN CO.,LTD',
0x000919: u'MDS Gateways',
0x00091A: u'Macat Optics & Electronics Co., Ltd.',
0x00091B: u'Digital Generation Inc.',
0x00091C: u'CacheVision, Inc',
0x00091D: u'Proteam Computer Corporation',
0x00091E: u'Firstech Technology Corp.',
0x00091F: u'A&D Co., Ltd.',
0x000920: u'EpoX COMPUTER CO.,LTD.',
0x000921: u'Planmeca Oy',
0x000922: u'Touchless Sensor Technology AG',
0x000923: u'Heaman System Co., Ltd',
0x000924: u'Telebau GmbH',
0x000925: u'VSN Systemen BV',
0x000926: u'YODA COMMUNICATIONS, INC.',
0x000927: u'TOYOKEIKI CO.,LTD.',
0x000928: u'Telecore Inc',
0x000929: u'Sanyo Industries (UK) Limited',
0x00092A: u'MYTECS Co.,Ltd.',
0x00092B: u'iQstor Networks, Inc.',
0x00092C: u'Hitpoint Inc.',
0x00092D: u'High Tech Computer, Corp.',
0x00092E: u'B&Tech System Inc.',
0x00092F: u'Akom Technology Corporation',
0x000930: u'AeroConcierge Inc.',
0x000931: u'Future Internet, Inc.',
0x000932: u'Omnilux',
0x000933: u'OPTOVALLEY Co. Ltd.',
0x000934: u'Dream-Multimedia-Tv GmbH',
0x000935: u'Sandvine Incorporated',
0x000936: u'Ipetronik GmbH & Co.KG',
0x000937: u'Inventec Appliance Corp',
0x000938: u'Allot Communications',
0x000939: u'ShibaSoku Co.,Ltd.',
0x00093A: u'Molex Fiber Optics',
0x00093B: u'HYUNDAI NETWORKS INC.',
0x00093C: u'Jacques Technologies P/L',
0x00093D: u'Newisys,Inc.',
0x00093E: u'C&I Technologies',
0x00093F: u'Double-Win Enterpirse CO., LTD',
0x000940: u'AGFEO GmbH & Co. KG',
0x000941: u'Allied Telesis K.K.',
0x000942: u'CRESCO, LTD.',
0x000943: u'Cisco Systems',
0x000944: u'Cisco Systems',
0x000945: u'Palmmicro Communications Inc',
0x000946: u'Cluster Labs GmbH',
0x000947: u'Aztek, Inc.',
0x000948: u'Vista Control Systems, Corp.',
0x000949: u'Glyph Technologies Inc.',
0x00094A: u'Homenet Communications',
0x00094B: u'FillFactory NV',
0x00094C: u'Communication Weaver Co.,Ltd.',
0x00094D: u'Braintree Communications Pty Ltd',
0x00094E: u'BARTECH SYSTEMS INTERNATIONAL, INC',
0x00094F: u'elmegt GmbH & Co. KG',
0x000950: u'Independent Storage Corporation',
0x000951: u'Apogee Instruments, Inc',
0x000952: u'Auerswald GmbH & Co. KG',
0x000953: u'Linkage System Integration Co.Ltd.',
0x000954: u'AMiT spol. s. r. o.',
0x000955: u'Young Generation International Corp.',
0x000956: u'Network Systems Group, Ltd. (NSG)',
0x000957: u'Supercaller, Inc.',
0x000958: u'INTELNET S.A.',
0x000959: u'Sitecsoft',
0x00095A: u'RACEWOOD TECHNOLOGY',
0x00095B: u'Netgear, Inc.',
0x00095C: u'Philips Medical Systems - Cardiac and Monitoring Systems (CM',
0x00095D: u'Dialogue Technology Corp.',
0x00095E: u'Masstech Group Inc.',
0x00095F: u'Telebyte, Inc.',
0x000960: u'YOZAN Inc.',
0x000961: u'Switchgear and Instrumentation Ltd',
0x000962: u'Filetrac AS',
0x000963: u'Dominion Lasercom Inc.',
0x000964: u'Hi-Techniques',
0x000965: u'PRIVATE',
0x000966: u'Thales Navigation',
0x000967: u'Tachyon, Inc',
0x000968: u'TECHNOVENTURE, INC.',
0x000969: u'Meret Optical Communications',
0x00096A: u'Cloverleaf Communications Inc.',
0x00096B: u'IBM Corporation',
0x00096C: u'Imedia Semiconductor Corp.',
0x00096D: u'Powernet Technologies Corp.',
0x00096E: u'GIANT ELECTRONICS LTD.',
0x00096F: u'Beijing Zhongqing Elegant Tech. Corp.,Limited',
0x000970: u'Vibration Research Corporation',
0x000971: u'Time Management, Inc.',
0x000972: u'Securebase,Inc',
0x000973: u'Lenten Technology Co., Ltd.',
0x000974: u'Innopia Technologies, Inc.',
0x000975: u'fSONA Communications Corporation',
0x000976: u'Datasoft ISDN Systems GmbH',
0x000977: u'Brunner Elektronik AG',
0x000978: u'AIJI System Co., Ltd.',
0x000979: u'Advanced Television Systems Committee, Inc.',
0x00097A: u'Louis Design Labs.',
0x00097B: u'Cisco Systems',
0x00097C: u'Cisco Systems',
0x00097D: u'SecWell Networks Oy',
0x00097E: u'IMI TECHNOLOGY CO., LTD',
0x00097F: u'Vsecure 2000 LTD.',
0x000980: u'Power Zenith Inc.',
0x000981: u'Newport Networks',
0x000982: u'Loewe Opta GmbH',
0x000983: u'Gvision Incorporated',
0x000984: u'MyCasa Network Inc.',
0x000985: u'Auto Telecom Company',
0x000986: u'Metalink LTD.',
0x000987: u'NISHI NIPPON ELECTRIC WIRE & CABLE CO.,LTD.',
0x000988: u'Nudian Electron Co., Ltd.',
0x000989: u'VividLogic Inc.',
0x00098A: u'EqualLogic Inc',
0x00098B: u'Entropic Communications, Inc.',
0x00098C: u'Option Wireless Sweden',
0x00098D: u'Velocity Semiconductor',
0x00098E: u'ipcas GmbH',
0x00098F: u'Cetacean Networks',
0x000990: u'ACKSYS Communications & systems',
0x000991: u'GE Fanuc Automation Manufacturing, Inc.',
0x000992: u'InterEpoch Technology,INC.',
0x000993: u'Visteon Corporation',
0x000994: u'Cronyx Engineering',
0x000995: u'Castle Technology Ltd',
0x000996: u'RDI',
0x000997: u'Nortel Networks',
0x000998: u'Capinfo Company Limited',
0x000999: u'CP GEORGES RENAULT',
0x00099A: u'ELMO COMPANY, LIMITED',
0x00099B: u'Western Telematic Inc.',
0x00099C: u'Naval Research Laboratory',
0x00099D: u'Haliplex Communications',
0x00099E: u'Testech, Inc.',
0x00099F: u'VIDEX INC.',
0x0009A0: u'Microtechno Corporation',
0x0009A1: u'Telewise Communications, Inc.',
0x0009A2: u'Interface Co., Ltd.',
0x0009A3: u'Leadfly Techologies Corp. Ltd.',
0x0009A4: u'HARTEC Corporation',
0x0009A5: u'HANSUNG ELETRONIC INDUSTRIES DEVELOPMENT CO., LTD',
0x0009A6: u'Ignis Optics, Inc.',
0x0009A7: u'Bang & Olufsen A/S',
0x0009A8: u'Eastmode Pte Ltd',
0x0009A9: u'Ikanos Communications',
0x0009AA: u'Data Comm for Business, Inc.',
0x0009AB: u'Netcontrol Oy',
0x0009AC: u'LANVOICE',
0x0009AD: u'HYUNDAI SYSCOMM, INC.',
0x0009AE: u'OKANO ELECTRIC CO.,LTD',
0x0009AF: u'e-generis',
0x0009B0: u'Onkyo Corporation',
0x0009B1: u'Kanematsu Electronics, Ltd.',
0x0009B2: u'L&F Inc.',
0x0009B3: u'MCM Systems Ltd',
0x0009B4: u'KISAN TELECOM CO., LTD.',
0x0009B5: u'3J Tech. Co., Ltd.',
0x0009B6: u'Cisco Systems',
0x0009B7: u'Cisco Systems',
0x0009B8: u'Entise Systems',
0x0009B9: u'Action Imaging Solutions',
0x0009BA: u'MAKU Informationstechik GmbH',
0x0009BB: u'MathStar, Inc.',
0x0009BC: u'Integrian, Inc.',
0x0009BD: u'Epygi Technologies, Ltd.',
0x0009BE: u'Mamiya-OP Co.,Ltd.',
0x0009BF: u'Nintendo Co.,Ltd.',
0x0009C0: u'6WIND',
0x0009C1: u'PROCES-DATA A/S',
0x0009C2: u'PRIVATE',
0x0009C3: u'NETAS',
0x0009C4: u'Medicore Co., Ltd',
0x0009C5: u'KINGENE Technology Corporation',
0x0009C6: u'Visionics Corporation',
0x0009C7: u'Movistec',
0x0009C8: u'SINAGAWA TSUSHIN KEISOU SERVICE',
0x0009C9: u'BlueWINC Co., Ltd.',
0x0009CA: u'iMaxNetworks(Shenzhen)Limited.',
0x0009CB: u'HBrain',
0x0009CC: u'Moog GmbH',
0x0009CD: u'HUDSON SOFT CO.,LTD.',
0x0009CE: u'SpaceBridge Semiconductor Corp.',
0x0009CF: u'iAd GmbH',
0x0009D0: u'Versatel Networks',
0x0009D1: u'SERANOA NETWORKS INC',
0x0009D2: u'Mai Logic Inc.',
0x0009D3: u'Western DataCom Co., Inc.',
0x0009D4: u'Transtech Networks',
0x0009D5: u'Signal Communication, Inc.',
0x0009D6: u'KNC One GmbH',
0x0009D7: u'DC Security Products',
0x0009D8: u'PRIVATE',
0x0009D9: u'Neoscale Systems, Inc',
0x0009DA: u'Control Module Inc.',
0x0009DB: u'eSpace',
0x0009DC: u'Galaxis Technology AG',
0x0009DD: u'Mavin Technology Inc.',
0x0009DE: u'Samjin Information & Communications Co., Ltd.',
0x0009DF: u'Vestel Komunikasyon Sanayi ve Ticaret A.S.',
0x0009E0: u'XEMICS S.A.',
0x0009E1: u'Gemtek Technology Co., Ltd.',
0x0009E2: u'Sinbon Electronics Co., Ltd.',
0x0009E3: u'Angel Iglesias S.A.',
0x0009E4: u'K Tech Infosystem Inc.',
0x0009E5: u'Hottinger Baldwin Messtechnik GmbH',
0x0009E6: u'Cyber Switching Inc.',
0x0009E7: u'ADC Techonology',
0x0009E8: u'Cisco Systems',
0x0009E9: u'Cisco Systems',
0x0009EA: u'YEM Inc.',
0x0009EB: u'HuMANDATA LTD.',
0x0009EC: u'Daktronics, Inc.',
0x0009ED: u'CipherOptics',
0x0009EE: u'MEIKYO ELECTRIC CO.,LTD',
0x0009EF: u'Vocera Communications',
0x0009F0: u'Shimizu Technology Inc.',
0x0009F1: u'Yamaki Electric Corporation',
0x0009F2: u'Cohu, Inc., Electronics Division',
0x0009F3: u'WELL Communication Corp.',
0x0009F4: u'Alcon Laboratories, Inc.',
0x0009F5: u'Emerson Network Power Co.,Ltd',
0x0009F6: u'Shenzhen Eastern Digital Tech Ltd.',
0x0009F7: u'SED, a division of Calian',
0x0009F8: u'UNIMO TECHNOLOGY CO., LTD.',
0x0009F9: u'ART JAPAN CO., LTD.',
0x0009FB: u'Philips Medizinsysteme Boeblingen GmbH',
0x0009FC: u'IPFLEX Inc.',
0x0009FD: u'Ubinetics Limited',
0x0009FE: u'Daisy Technologies, Inc.',
0x0009FF: u'X.net 2000 GmbH',
0x000A00: u'Mediatek Corp.',
0x000A01: u'SOHOware, Inc.',
0x000A02: u'ANNSO CO., LTD.',
0x000A03: u'ENDESA SERVICIOS, S.L.',
0x000A04: u'3Com Europe Ltd',
0x000A05: u'Widax Corp.',
0x000A06: u'Teledex LLC',
0x000A07: u'WebWayOne Ltd',
0x000A08: u'ALPINE ELECTRONICS, INC.',
0x000A09: u'TaraCom Integrated Products, Inc.',
0x000A0A: u'SUNIX Co., Ltd.',
0x000A0B: u'Sealevel Systems, Inc.',
0x000A0C: u'Scientific Research Corporation',
0x000A0D: u'MergeOptics GmbH',
0x000A0E: u'Invivo Research Inc.',
0x000A0F: u'Ilryung Telesys, Inc',
0x000A10: u'FAST media integrations AG',
0x000A11: u'ExPet Technologies, Inc',
0x000A12: u'Azylex Technology, Inc',
0x000A13: u'Silent Witness',
0x000A14: u'TECO a.s.',
0x000A15: u'Silicon Data, Inc',
0x000A16: u'Lassen Research',
0x000A17: u'NESTAR COMMUNICATIONS, INC',
0x000A18: u'Vichel Inc.',
0x000A19: u'Valere Power, Inc.',
0x000A1A: u'Imerge Ltd',
0x000A1B: u'Stream Labs',
0x000A1C: u'Bridge Information Co., Ltd.',
0x000A1D: u'Optical Communications Products Inc.',
0x000A1E: u'Red-M Products Limited',
0x000A1F: u'ART WARE Telecommunication Co., Ltd.',
0x000A20: u'SVA Networks, Inc.',
0x000A21: u'Integra Telecom Co. Ltd',
0x000A22: u'Amperion Inc',
0x000A23: u'Parama Networks Inc',
0x000A24: u'Octave Communications',
0x000A25: u'CERAGON NETWORKS',
0x000A26: u'CEIA S.p.A.',
0x000A27: u'Apple Computer, Inc.',
0x000A28: u'Motorola',
0x000A29: u'Pan Dacom Networking AG',
0x000A2A: u'QSI Systems Inc.',
0x000A2B: u'Etherstuff',
0x000A2C: u'Active Tchnology Corporation',
0x000A2D: u'PRIVATE',
0x000A2E: u'MAPLE NETWORKS CO., LTD',
0x000A2F: u'Artnix Inc.',
0x000A30: u'Johnson Controls-ASG',
0x000A31: u'HCV Wireless',
0x000A32: u'Xsido Corporation',
0x000A33: u'Emulex Corporation',
0x000A34: u'Identicard Systems Incorporated',
0x000A35: u'Xilinx',
0x000A36: u'Synelec Telecom Multimedia',
0x000A37: u'Procera Networks, Inc.',
0x000A38: u'Netlock Technologies, Inc.',
0x000A39: u'LoPA Information Technology',
0x000A3A: u'J-THREE INTERNATIONAL Holding Co., Ltd.',
0x000A3B: u'GCT Semiconductor, Inc',
0x000A3C: u'Enerpoint Ltd.',
0x000A3D: u'Elo Sistemas Eletronicos S.A.',
0x000A3E: u'EADS Telecom',
0x000A3F: u'Data East Corporation',
0x000A40: u'Crown Audio',
0x000A41: u'Cisco Systems',
0x000A42: u'Cisco Systems',
0x000A43: u'Chunghwa Telecom Co., Ltd.',
0x000A44: u'Avery Dennison Deutschland GmbH',
0x000A45: u'Audio-Technica Corp.',
0x000A46: u'ARO Controls SAS',
0x000A47: u'Allied Vision Technologies',
0x000A48: u'Albatron Technology',
0x000A49: u'Acopia Networks',
0x000A4A: u'Targa Systems Ltd.',
0x000A4B: u'DataPower Technology, Inc.',
0x000A4C: u'Molecular Devices Corporation',
0x000A4D: u'Noritz Corporation',
0x000A4E: u'UNITEK Electronics INC.',
0x000A4F: u'Brain Boxes Limited',
0x000A50: u'REMOTEK CORPORATION',
0x000A51: u'GyroSignal Technology Co., Ltd.',
0x000A52: u'AsiaRF Ltd.',
0x000A53: u'Intronics, Incorporated',
0x000A54: u'Laguna Hills, Inc.',
0x000A55: u'MARKEM Corporation',
0x000A56: u'HITACHI Maxell Ltd.',
0x000A57: u'Hewlett-Packard Company - Standards',
0x000A58: u'Ingenieur-Buero Freyer & Siegel',
0x000A59: u'HW server',
0x000A5A: u'GreenNET Technologies Co.,Ltd.',
0x000A5B: u'Power-One as',
0x000A5C: u'Carel s.p.a.',
0x000A5D: u'PUC Founder (MSC) Berhad',
0x000A5E: u'3COM Corporation',
0x000A5F: u'almedio inc.',
0x000A60: u'Autostar Technology Pte Ltd',
0x000A61: u'Cellinx Systems Inc.',
0x000A62: u'Crinis Networks, Inc.',
0x000A63: u'DHD GmbH',
0x000A64: u'Eracom Technologies',
0x000A65: u'GentechMedia.co.,ltd.',
0x000A66: u'MITSUBISHI ELECTRIC SYSTEM & SERVICE CO.,LTD.',
0x000A67: u'OngCorp',
0x000A68: u'SolarFlare Communications, Inc.',
0x000A69: u'SUNNY bell Technology Co., Ltd.',
0x000A6A: u'SVM Microwaves s.r.o.',
0x000A6B: u'Tadiran Telecom Business Systems LTD',
0x000A6C: u'Walchem Corporation',
0x000A6D: u'EKS Elektronikservice GmbH',
0x000A6E: u'Broadcast Technology Limited',
0x000A6F: u'ZyFLEX Technologies Inc',
0x000A70: u'MPLS Forum',
0x000A71: u'Avrio Technologies, Inc',
0x000A72: u'SimpleTech, Inc.',
0x000A73: u'Scientific Atlanta',
0x000A74: u'Manticom Networks Inc.',
0x000A75: u'Cat Electronics',
0x000A76: u'Beida Jade Bird Huaguang Technology Co.,Ltd',
0x000A77: u'Bluewire Technologies LLC',
0x000A78: u'OLITEC',
0x000A79: u'corega K.K.',
0x000A7A: u'Kyoritsu Electric Co., Ltd.',
0x000A7B: u'Cornelius Consult',
0x000A7C: u'Tecton Ltd',
0x000A7D: u'Valo, Inc.',
0x000A7E: u'The Advantage Group',
0x000A7F: u'Teradon Industries, Inc',
0x000A80: u'Telkonet Inc.',
0x000A81: u'TEIMA Audiotex S.L.',
0x000A82: u'TATSUTA SYSTEM ELECTRONICS CO.,LTD.',
0x000A83: u'SALTO SYSTEMS S.L.',
0x000A84: u'Rainsun Enterprise Co., Ltd.',
0x000A85: u'PLAT\'C2,Inc',
0x000A86: u'Lenze',
0x000A87: u'Integrated Micromachines Inc.',
0x000A88: u'InCypher S.A.',
0x000A89: u'Creval Systems, Inc.',
0x000A8A: u'Cisco Systems',
0x000A8B: u'Cisco Systems',
0x000A8C: u'Guardware Systems Ltd.',
0x000A8D: u'EUROTHERM LIMITED',
0x000A8E: u'Invacom Ltd',
0x000A8F: u'Aska International Inc.',
0x000A90: u'Bayside Interactive, Inc.',
0x000A91: u'HemoCue AB',
0x000A92: u'Presonus Corporation',
0x000A93: u'W2 Networks, Inc.',
0x000A94: u'ShangHai cellink CO., LTD',
0x000A95: u'Apple Computer, Inc.',
0x000A96: u'MEWTEL TECHNOLOGY INC.',
0x000A97: u'SONICblue, Inc.',
0x000A98: u'M+F Gwinner GmbH & Co',
0x000A99: u'Dataradio Inc.',
0x000A9A: u'Aiptek International Inc',
0x000A9B: u'Towa Meccs Corporation',
0x000A9C: u'Server Technology, Inc.',
0x000A9D: u'King Young Technology Co. Ltd.',
0x000A9E: u'BroadWeb Corportation',
0x000A9F: u'Pannaway Technologies, Inc.',
0x000AA0: u'Cedar Point Communications',
0x000AA1: u'V V S Limited',
0x000AA2: u'SYSTEK INC.',
0x000AA3: u'SHIMAFUJI ELECTRIC CO.,LTD.',
0x000AA4: u'SHANGHAI SURVEILLANCE TECHNOLOGY CO,LTD',
0x000AA5: u'MAXLINK INDUSTRIES LIMITED',
0x000AA6: u'Hochiki Corporation',
0x000AA7: u'FEI Company',
0x000AA8: u'ePipe Pty. Ltd.',
0x000AA9: u'Brooks Automation GmbH',
0x000AAA: u'AltiGen Communications Inc.',
0x000AAB: u'TOYOTA MACS, INC.',
0x000AAC: u'TerraTec Electronic GmbH',
0x000AAD: u'Stargames Corporation',
0x000AAE: u'Rosemount Process Analytical',
0x000AAF: u'Pipal Systems',
0x000AB0: u'LOYTEC electronics GmbH',
0x000AB1: u'GENETEC Corporation',
0x000AB2: u'Fresnel Wireless Systems',
0x000AB3: u'Fa. GIRA',
0x000AB4: u'ETIC Telecommunications',
0x000AB5: u'Digital Electronic Network',
0x000AB6: u'COMPUNETIX, INC',
0x000AB7: u'Cisco Systems',
0x000AB8: u'Cisco Systems',
0x000AB9: u'Astera Technologies Corp.',
0x000ABA: u'Arcon Technology Limited',
0x000ABB: u'Taiwan Secom Co,. Ltd',
0x000ABC: u'Seabridge Ltd.',
0x000ABD: u'Rupprecht & Patashnick Co.',
0x000ABE: u'OPNET Technologies CO., LTD.',
0x000ABF: u'HIROTA SS',
0x000AC0: u'Fuyoh Video Industry CO., LTD.',
0x000AC1: u'Futuretel',
0x000AC2: u'FiberHome Telecommunication Technologies CO.,LTD',
0x000AC3: u'eM Technics Co., Ltd.',
0x000AC4: u'Daewoo Teletech Co., Ltd',
0x000AC5: u'Color Kinetics',
0x000AC6: u'Ceterus Networks, Inc.',
0x000AC7: u'Unication Group',
0x000AC8: u'ZPSYS CO.,LTD. (Planning&Management)',
0x000AC9: u'Zambeel Inc',
0x000ACA: u'YOKOYAMA SHOKAI CO.,Ltd.',
0x000ACB: u'XPAK MSA Group',
0x000ACC: u'Winnow Networks, Inc.',
0x000ACD: u'Sunrich Technology Limited',
0x000ACE: u'RADIANTECH, INC.',
0x000ACF: u'PROVIDEO Multimedia Co. Ltd.',
0x000AD0: u'Niigata Develoment Center, F.I.T. Co., Ltd.',
0x000AD1: u'MWS',
0x000AD2: u'JEPICO Corporation',
0x000AD3: u'INITECH Co., Ltd',
0x000AD4: u'CoreBell Systems Inc.',
0x000AD5: u'Brainchild Electronic Co., Ltd.',
0x000AD6: u'BeamReach Networks',
0x000AD7: u'Origin ELECTRIC CO.,LTD.',
0x000AD8: u'IPCserv Technology Corp.',
0x000AD9: u'Sony Ericsson Mobile Communications AB',
0x000ADA: u'PRIVATE',
0x000ADB: u'SkyPilot Network, Inc',
0x000ADC: u'RuggedCom Inc.',
0x000ADD: u'InSciTek Microsystems, Inc.',
0x000ADE: u'Happy Communication Co., Ltd.',
0x000ADF: u'Gennum Corporation',
0x000AE0: u'Fujitsu Softek',
0x000AE1: u'EG Technology',
0x000AE2: u'Binatone Electronics International, Ltd',
0x000AE3: u'YANG MEI TECHNOLOGY CO., LTD',
0x000AE4: u'Wistron Corp.',
0x000AE5: u'ScottCare Corporation',
0x000AE6: u'Elitegroup Computer System Co. (ECS)',
0x000AE7: u'ELIOP S.A.',
0x000AE8: u'Cathay Roxus Information Technology Co. LTD',
0x000AE9: u'AirVast Technology Inc.',
0x000AEA: u'ADAM ELEKTRONIK LTD.STI.',
0x000AEB: u'Shenzhen Tp-Link Technology Co; Ltd.',
0x000AEC: u'Koatsu Gas Kogyo Co., Ltd.',
0x000AED: u'HARTING Vending G.m.b.H. & CO KG',
0x000AEE: u'GCD Hard- & Software GmbH',
0x000AEF: u'OTRUM ASA',
0x000AF0: u'SHIN-OH ELECTRONICS CO., LTD. R&D',
0x000AF1: u'Clarity Design, Inc.',
0x000AF2: u'NeoAxiom Corp.',
0x000AF3: u'Cisco Systems',
0x000AF4: u'Cisco Systems',
0x000AF5: u'Airgo Networks, Inc.',
0x000AF6: u'Computer Process Controls',
0x000AF7: u'Broadcom Corp.',
0x000AF8: u'American Telecare Inc.',
0x000AF9: u'HiConnect, Inc.',
0x000AFA: u'Traverse Technologies Australia',
0x000AFB: u'Ambri Limited',
0x000AFC: u'Core Tec Communications, LLC',
0x000AFD: u'Viking Electronic Services',
0x000AFE: u'NovaPal Ltd',
0x000AFF: u'Kilchherr Elektronik AG',
0x000B00: u'FUJIAN START COMPUTER EQUIPMENT CO.,LTD',
0x000B01: u'DAIICHI ELECTRONICS CO., LTD.',
0x000B02: u'Dallmeier electronic',
0x000B03: u'Taekwang Industrial Co., Ltd',
0x000B04: u'Volktek Corporation',
0x000B05: u'Pacific Broadband Networks',
0x000B06: u'Motorola BCS',
0x000B07: u'Voxpath Networks',
0x000B08: u'Pillar Data Systems',
0x000B09: u'Ifoundry Systems Singapore',
0x000B0A: u'dBm Optics',
0x000B0B: u'Corrent Corporation',
0x000B0C: u'Agile Systems Inc.',
0x000B0D: u'Air2U, Inc.',
0x000B0E: u'Trapeze Networks',
0x000B0F: u'Nyquist Industrial Control BV',
0x000B10: u'11wave Technonlogy Co.,Ltd',
0x000B11: u'HIMEJI ABC TRADING CO.,LTD.',
0x000B12: u'NURI Telecom Co., Ltd.',
0x000B13: u'ZETRON INC',
0x000B14: u'ViewSonic Corporation',
0x000B15: u'Platypus Technology',
0x000B16: u'Communication Machinery Corporation',
0x000B17: u'MKS Instruments',
0x000B18: u'PRIVATE',
0x000B19: u'Vernier Networks, Inc.',
0x000B1A: u'Teltone Corporation',
0x000B1B: u'Systronix, Inc.',
0x000B1C: u'SIBCO bv',
0x000B1D: u'LayerZero Power Systems, Inc.',
0x000B1E: u'KAPPA opto-electronics GmbH',
0x000B1F: u'I CON Computer Co.',
0x000B20: u'Hirata corporation',
0x000B21: u'G-Star Communications Inc.',
0x000B22: u'Environmental Systems and Services',
0x000B23: u'Siemens Subscriber Networks',
0x000B24: u'AirLogic',
0x000B25: u'Aeluros',
0x000B26: u'Wetek Corporation',
0x000B27: u'Scion Corporation',
0x000B28: u'Quatech Inc.',
0x000B29: u'LG Industrial Systems Co.,Ltd.',
0x000B2A: u'HOWTEL Co., Ltd.',
0x000B2B: u'HOSTNET CORPORATION',
0x000B2C: u'Eiki Industrial Co. Ltd.',
0x000B2D: u'Danfoss Inc.',
0x000B2E: u'Cal-Comp Electronics (Thailand) Public Company Limited Taipe',
0x000B2F: u'bplan GmbH',
0x000B30: u'Beijing Gongye Science & Technology Co.,Ltd',
0x000B31: u'Yantai ZhiYang Scientific and technology industry CO., LTD',
0x000B32: u'VORMETRIC, INC.',
0x000B33: u'Vivato',
0x000B34: u'ShangHai Broadband Technologies CO.LTD',
0x000B35: u'Quad Bit System co., Ltd.',
0x000B36: u'Productivity Systems, Inc.',
0x000B37: u'MANUFACTURE DES MONTRES ROLEX SA',
0x000B38: u'Knuerr AG',
0x000B39: u'Keisoku Giken Co.,Ltd.',
0x000B3A: u'QuStream Corporation',
0x000B3B: u'devolo AG',
0x000B3C: u'Cygnal Integrated Products, Inc.',
0x000B3D: u'CONTAL OK Ltd.',
0x000B3E: u'BittWare, Inc',
0x000B3F: u'Anthology Solutions Inc.',
0x000B40: u'OpNext Inc.',
0x000B41: u'Ing. Buero Dr. Beutlhauser',
0x000B42: u'commax Co., Ltd.',
0x000B43: u'Microscan Systems, Inc.',
0x000B44: u'Concord IDea Corp.',
0x000B45: u'Cisco',
0x000B46: u'Cisco',
0x000B47: u'Advanced Energy',
0x000B48: u'sofrel',
0x000B49: u'RF-Link System Inc.',
0x000B4A: u'Visimetrics (UK) Ltd',
0x000B4B: u'VISIOWAVE SA',
0x000B4C: u'Clarion (M) Sdn Bhd',
0x000B4D: u'Emuzed',
0x000B4E: u'VertexRSI Antenna Products Division',
0x000B4F: u'Verifone, INC.',
0x000B50: u'Oxygnet',
0x000B51: u'Micetek International Inc.',
0x000B52: u'JOYMAX ELECTRONICS CORP.',
0x000B53: u'INITIUM Co., Ltd.',
0x000B54: u'BiTMICRO Networks, Inc.',
0x000B55: u'ADInstruments',
0x000B56: u'Cybernetics',
0x000B57: u'Silicon Laboratories',
0x000B58: u'Astronautics C.A LTD',
0x000B59: u'ScriptPro, LLC',
0x000B5A: u'HyperEdge',
0x000B5B: u'Rincon Research Corporation',
0x000B5C: u'Newtech Co.,Ltd',
0x000B5D: u'FUJITSU LIMITED',
0x000B5E: u'Audio Engineering Society Inc.',
0x000B5F: u'Cisco Systems',
0x000B60: u'Cisco Systems',
0x000B61: u'Friedrich Lütze GmbH &Co.',
0x000B62: u'Ingenieurbüro Ingo Mohnen',
0x000B63: u'Kaleidescape',
0x000B64: u'Kieback & Peter GmbH & Co KG',
0x000B65: u'Sy.A.C. srl',
0x000B66: u'Teralink Communications',
0x000B67: u'Topview Technology Corporation',
0x000B68: u'Addvalue Communications Pte Ltd',
0x000B69: u'Franke Finland Oy',
0x000B6A: u'Asiarock Incorporation',
0x000B6B: u'Wistron Neweb Corp.',
0x000B6C: u'Sychip Inc.',
0x000B6D: u'SOLECTRON JAPAN NAKANIIDA',
0x000B6E: u'Neff Instrument Corp.',
0x000B6F: u'Media Streaming Networks Inc',
0x000B70: u'Load Technology, Inc.',
0x000B71: u'Litchfield Communications Inc.',
0x000B72: u'Lawo AG',
0x000B73: u'Kodeos Communications',
0x000B74: u'Kingwave Technology Co., Ltd.',
0x000B75: u'Iosoft Ltd.',
0x000B76: u'ET&T Co. Ltd.',
0x000B77: u'Cogent Systems, Inc.',
0x000B78: u'TAIFATECH INC.',
0x000B79: u'X-COM, Inc.',
0x000B7A: u'Wave Science Inc.',
0x000B7B: u'Test-Um Inc.',
0x000B7C: u'Telex Communications',
0x000B7D: u'SOLOMON EXTREME INTERNATIONAL LTD.',
0x000B7E: u'SAGINOMIYA Seisakusho Inc.',
0x000B7F: u'OmniWerks',
0x000B80: u'Lycium Networks',
0x000B81: u'Kaparel Corporation',
0x000B82: u'Grandstream Networks, Inc.',
0x000B83: u'DATAWATT B.V.',
0x000B84: u'BODET',
0x000B85: u'Airespace, Inc.',
0x000B86: u'Aruba Networks',
0x000B87: u'American Reliance Inc.',
0x000B88: u'Vidisco ltd.',
0x000B89: u'Top Global Technology, Ltd.',
0x000B8A: u'MITEQ Inc.',
0x000B8B: u'KERAJET, S.A.',
0x000B8C: u'flextronics israel',
0x000B8D: u'Avvio Networks',
0x000B8E: u'Ascent Corporation',
0x000B8F: u'AKITA ELECTRONICS SYSTEMS CO.,LTD.',
0x000B90: u'Covaro Networks, Inc.',
0x000B91: u'Aglaia Gesellschaft für Bildverarbeitung und Kommunikation m',
0x000B92: u'Ascom Danmark A/S',
0x000B93: u'Barmag Electronic',
0x000B94: u'Digital Monitoring Products, Inc.',
0x000B95: u'eBet Gaming Systems Pty Ltd',
0x000B96: u'Innotrac Diagnostics Oy',
0x000B97: u'Matsushita Electric Industrial Co.,Ltd.',
0x000B98: u'NiceTechVision',
0x000B99: u'SensAble Technologies, Inc.',
0x000B9A: u'Shanghai Ulink Telecom Equipment Co. Ltd.',
0x000B9B: u'Sirius System Co, Ltd.',
0x000B9C: u'TriBeam Technologies, Inc.',
0x000B9D: u'TwinMOS Technologies Inc.',
0x000B9E: u'Yasing Technology Corp.',
0x000B9F: u'Neue ELSA GmbH',
0x000BA0: u'T&L Information Inc.',
0x000BA1: u'SYSCOM Ltd.',
0x000BA2: u'Sumitomo Electric Networks, Inc',
0x000BA3: u'Siemens AG, I&S',
0x000BA4: u'Shiron Satellite Communications Ltd. (1996)',
0x000BA5: u'Quasar Cipta Mandiri, PT',
0x000BA6: u'Miyakawa Electric Works Ltd.',
0x000BA7: u'Maranti Networks',
0x000BA8: u'HANBACK ELECTRONICS CO., LTD.',
0x000BA9: u'CloudShield Technologies, Inc.',
0x000BAA: u'Aiphone co.,Ltd',
0x000BAB: u'Advantech Technology (CHINA) Co., Ltd.',
0x000BAC: u'3Com Europe Ltd.',
0x000BAD: u'PC-PoS Inc.',
0x000BAE: u'Vitals System Inc.',
0x000BAF: u'WOOJU COMMUNICATIONS Co,.Ltd',
0x000BB0: u'Sysnet Telematica srl',
0x000BB1: u'Super Star Technology Co., Ltd.',
0x000BB2: u'SMALLBIG TECHNOLOGY',
0x000BB3: u'RiT technologies Ltd.',
0x000BB4: u'RDC Semiconductor Inc.,',
0x000BB5: u'nStor Technologies, Inc.',
0x000BB6: u'Mototech Inc.',
0x000BB7: u'Micro Systems Co.,Ltd.',
0x000BB8: u'Kihoku Electronic Co.',
0x000BB9: u'Imsys AB',
0x000BBA: u'Harmonic Broadband Access Networks',
0x000BBB: u'Etin Systems Co., Ltd',
0x000BBC: u'En Garde Systems, Inc.',
0x000BBD: u'Connexionz Limited',
0x000BBE: u'Cisco Systems',
0x000BBF: u'Cisco Systems',
0x000BC0: u'China IWNComm Co., Ltd.',
0x000BC1: u'Bay Microsystems, Inc.',
0x000BC2: u'Corinex Communication Corp.',
0x000BC3: u'Multiplex, Inc.',
0x000BC4: u'BIOTRONIK GmbH & Co',
0x000BC5: u'SMC Networks, Inc.',
0x000BC6: u'ISAC, Inc.',
0x000BC7: u'ICET S.p.A.',
0x000BC8: u'AirFlow Networks',
0x000BC9: u'Electroline Equipment',
0x000BCA: u'DATAVAN International Corporation',
0x000BCB: u'Fagor Automation , S. Coop',
0x000BCC: u'JUSAN, S.A.',
0x000BCD: u'Compaq (HP)',
0x000BCE: u'Free2move AB',
0x000BCF: u'AGFA NDT INC.',
0x000BD0: u'XiMeta Technology Americas Inc.',
0x000BD1: u'Aeronix, Inc.',
0x000BD2: u'Remopro Technology Inc.',
0x000BD3: u'cd3o',
0x000BD4: u'Beijing Wise Technology & Science Development Co.Ltd',
0x000BD5: u'Nvergence, Inc.',
0x000BD6: u'Paxton Access Ltd',
0x000BD7: u'MBB Gelma GmbH',
0x000BD8: u'Industrial Scientific Corp.',
0x000BD9: u'General Hydrogen',
0x000BDA: u'EyeCross Co.,Inc.',
0x000BDB: u'Dell ESG PCBA Test',
0x000BDC: u'AKCP',
0x000BDD: u'TOHOKU RICOH Co., LTD.',
0x000BDE: u'TELDIX GmbH',
0x000BDF: u'Shenzhen RouterD Networks Limited',
0x000BE0: u'SercoNet Ltd.',
0x000BE1: u'Nokia NET Product Operations',
0x000BE2: u'Lumenera Corporation',
0x000BE3: u'Key Stream Co., Ltd.',
0x000BE4: u'Hosiden Corporation',
0x000BE5: u'HIMS Korea Co., Ltd.',
0x000BE6: u'Datel Electronics',
0x000BE7: u'COMFLUX TECHNOLOGY INC.',
0x000BE8: u'AOIP',
0x000BE9: u'Actel Corporation',
0x000BEA: u'Zultys Technologies',
0x000BEB: u'Systegra AG',
0x000BEC: u'NIPPON ELECTRIC INSTRUMENT, INC.',
0x000BED: u'ELM Inc.',
0x000BEE: u'inc.jet, Incorporated',
0x000BEF: u'Code Corporation',
0x000BF0: u'MoTEX Products Co., Ltd.',
0x000BF1: u'LAP Laser Applikations',
0x000BF2: u'Chih-Kan Technology Co., Ltd.',
0x000BF3: u'BAE SYSTEMS',
0x000BF4: u'PRIVATE',
0x000BF5: u'Shanghai Sibo Telecom Technology Co.,Ltd',
0x000BF6: u'Nitgen Co., Ltd',
0x000BF7: u'NIDEK CO.,LTD',
0x000BF8: u'Infinera',
0x000BF9: u'Gemstone communications, Inc.',
0x000BFA: u'EXEMYS SRL',
0x000BFB: u'D-NET International Corporation',
0x000BFC: u'Cisco Systems',
0x000BFD: u'Cisco Systems',
0x000BFE: u'CASTEL Broadband Limited',
0x000BFF: u'Berkeley Camera Engineering',
0x000C00: u'BEB Industrie-Elektronik AG',
0x000C01: u'Abatron AG',
0x000C02: u'ABB Oy',
0x000C03: u'HDMI Licensing, LLC',
0x000C04: u'Tecnova',
0x000C05: u'RPA Reserch Co., Ltd.',
0x000C06: u'Nixvue Systems Pte Ltd',
0x000C07: u'Iftest AG',
0x000C08: u'HUMEX Technologies Corp.',
0x000C09: u'Hitachi IE Systems Co., Ltd',
0x000C0A: u'Guangdong Province Electronic Technology Research Institute',
0x000C0B: u'Broadbus Technologies',
0x000C0C: u'APPRO TECHNOLOGY INC.',
0x000C0D: u'Communications & Power Industries / Satcom Division',
0x000C0E: u'XtremeSpectrum, Inc.',
0x000C0F: u'Techno-One Co., Ltd',
0x000C10: u'PNI Corporation',
0x000C11: u'NIPPON DEMPA CO.,LTD.',
0x000C12: u'Micro-Optronic-Messtechnik GmbH',
0x000C13: u'MediaQ',
0x000C14: u'Diagnostic Instruments, Inc.',
0x000C15: u'CyberPower Systems, Inc.',
0x000C16: u'Concorde Microsystems Inc.',
0x000C17: u'AJA Video Systems Inc',
0x000C18: u'Zenisu Keisoku Inc.',
0x000C19: u'Telio Communications GmbH',
0x000C1A: u'Quest Technical Solutions Inc.',
0x000C1B: u'ORACOM Co, Ltd.',
0x000C1C: u'MicroWeb Co., Ltd.',
0x000C1D: u'Mettler & Fuchs AG',
0x000C1E: u'Global Cache',
0x000C1F: u'Glimmerglass Networks',
0x000C20: u'Fi WIn, Inc.',
0x000C21: u'Faculty of Science and Technology, Keio University',
0x000C22: u'Double D Electronics Ltd',
0x000C23: u'Beijing Lanchuan Tech. Co., Ltd.',
0x000C24: u'ANATOR',
0x000C25: u'Allied Telesyn Networks',
0x000C26: u'Weintek Labs. Inc.',
0x000C27: u'Sammy Corporation',
0x000C28: u'RIFATRON',
0x000C29: u'VMware, Inc.',
0x000C2A: u'OCTTEL Communication Co., Ltd.',
0x000C2B: u'ELIAS Technology, Inc.',
0x000C2C: u'Enwiser Inc.',
0x000C2D: u'FullWave Technology Co., Ltd.',
0x000C2E: u'Openet information technology(shenzhen) Co., Ltd.',
0x000C2F: u'SeorimTechnology Co.,Ltd.',
0x000C30: u'Cisco',
0x000C31: u'Cisco',
0x000C32: u'Avionic Design Development GmbH',
0x000C33: u'Compucase Enterprise Co. Ltd.',
0x000C34: u'Vixen Co., Ltd.',
0x000C35: u'KaVo Dental GmbH & Co. KG',
0x000C36: u'SHARP TAKAYA ELECTRONICS INDUSTRY CO.,LTD.',
0x000C37: u'Geomation, Inc.',
0x000C38: u'TelcoBridges Inc.',
0x000C39: u'Sentinel Wireless Inc.',
0x000C3A: u'Oxance',
0x000C3B: u'Orion Electric Co., Ltd.',
0x000C3C: u'MediaChorus, Inc.',
0x000C3D: u'Glsystech Co., Ltd.',
0x000C3E: u'Crest Audio',
0x000C3F: u'Cogent Defence & Security Networks,',
0x000C40: u'Altech Controls',
0x000C41: u'The Linksys Group, Inc.',
0x000C42: u'Routerboard.com',
0x000C43: u'Ralink Technology, Corp.',
0x000C44: u'Automated Interfaces, Inc.',
0x000C45: u'Animation Technologies Inc.',
0x000C46: u'Allied Telesyn Inc.',
0x000C47: u'SK Teletech(R&D Planning Team)',
0x000C48: u'QoStek Corporation',
0x000C49: u'Dangaard Telecom RTC Division A/S',
0x000C4A: u'Cygnus Microsystems Private Limited',
0x000C4B: u'Cheops Elektronik',
0x000C4C: u'Arcor AG&Co.',
0x000C4D: u'ACRA CONTROL',
0x000C4E: u'Winbest Technology CO,LT',
0x000C4F: u'UDTech Japan Corporation',
0x000C50: u'Seagate Technology',
0x000C51: u'Scientific Technologies Inc.',
0x000C52: u'Roll Systems Inc.',
0x000C53: u'PRIVATE',
0x000C54: u'Pedestal Networks, Inc',
0x000C55: u'Microlink Communications Inc.',
0x000C56: u'Megatel Computer (1986) Corp.',
0x000C57: u'MACKIE Engineering Services Belgium BVBA',
0x000C58: u'M&S Systems',
0x000C59: u'Indyme Electronics, Inc.',
0x000C5A: u'IBSmm Industrieelektronik Multimedia',
0x000C5B: u'HANWANG TECHNOLOGY CO.,LTD',
0x000C5C: u'GTN Systems B.V.',
0x000C5D: u'CHIC TECHNOLOGY (CHINA) CORP.',
0x000C5E: u'Calypso Medical',
0x000C5F: u'Avtec, Inc.',
0x000C60: u'ACM Systems',
0x000C61: u'AC Tech corporation DBA Advanced Digital',
0x000C62: u'ABB Automation Technology Products AB, Control',
0x000C63: u'Zenith Electronics Corporation',
0x000C64: u'X2 MSA Group',
0x000C65: u'Sunin Telecom',
0x000C66: u'Pronto Networks Inc',
0x000C67: u'OYO ELECTRIC CO.,LTD',
0x000C68: u'SigmaTel, Inc.',
0x000C69: u'National Radio Astronomy Observatory',
0x000C6A: u'MBARI',
0x000C6B: u'Kurz Industrie-Elektronik GmbH',
0x000C6C: u'Elgato Systems LLC',
0x000C6D: u'BOC Edwards',
0x000C6E: u'ASUSTEK COMPUTER INC.',
0x000C6F: u'Amtek system co.,LTD.',
0x000C70: u'ACC GmbH',
0x000C71: u'Wybron, Inc',
0x000C72: u'Tempearl Industrial Co., Ltd.',
0x000C73: u'TELSON ELECTRONICS CO., LTD',
0x000C74: u'RIVERTEC CORPORATION',
0x000C75: u'Oriental integrated electronics. LTD',
0x000C76: u'MICRO-STAR INTERNATIONAL CO., LTD.',
0x000C77: u'Life Racing Ltd',
0x000C78: u'In-Tech Electronics Limited',
0x000C79: u'Extel Communications P/L',
0x000C7A: u'DaTARIUS Technologies GmbH',
0x000C7B: u'ALPHA PROJECT Co.,Ltd.',
0x000C7C: u'Internet Information Image Inc.',
0x000C7D: u'TEIKOKU ELECTRIC MFG. CO., LTD',
0x000C7E: u'Tellium Incorporated',
0x000C7F: u'synertronixx GmbH',
0x000C80: u'Opelcomm Inc.',
0x000C81: u'Nulec Industries Pty Ltd',
0x000C82: u'NETWORK TECHNOLOGIES INC',
0x000C83: u'Logical Solutions',
0x000C84: u'Eazix, Inc.',
0x000C85: u'Cisco Systems',
0x000C86: u'Cisco Systems',
0x000C87: u'ATI',
0x000C88: u'Apache Micro Peripherals, Inc.',
0x000C89: u'AC Electric Vehicles, Ltd.',
0x000C8A: u'Bose Corporation',
0x000C8B: u'Connect Tech Inc',
0x000C8C: u'KODICOM CO.,LTD.',
0x000C8D: u'MATRIX VISION GmbH',
0x000C8E: u'Mentor Engineering Inc',
0x000C8F: u'Nergal s.r.l.',
0x000C90: u'Octasic Inc.',
0x000C91: u'Riverhead Networks Inc.',
0x000C92: u'WolfVision Gmbh',
0x000C93: u'Xeline Co., Ltd.',
0x000C94: u'United Electronic Industries, Inc.',
0x000C95: u'PrimeNet',
0x000C96: u'OQO, Inc.',
0x000C97: u'NV ADB TTV Technologies SA',
0x000C98: u'LETEK Communications Inc.',
0x000C99: u'HITEL LINK Co.,Ltd',
0x000C9A: u'Hitech Electronics Corp.',
0x000C9B: u'EE Solutions, Inc',
0x000C9C: u'Chongho information & communications',
0x000C9D: u'AirWalk Communications, Inc.',
0x000C9E: u'MemoryLink Corp.',
0x000C9F: u'NKE Corporation',
0x000CA0: u'StorCase Technology, Inc.',
0x000CA1: u'SIGMACOM Co., LTD.',
0x000CA2: u'Scopus Network Technologies Ltd',
0x000CA3: u'Rancho Technology, Inc.',
0x000CA4: u'Prompttec Product Management GmbH',
0x000CA5: u'Naman NZ LTd',
0x000CA6: u'Mintera Corporation',
0x000CA7: u'Metro (Suzhou) Technologies Co., Ltd.',
0x000CA8: u'Garuda Networks Corporation',
0x000CA9: u'Ebtron Inc.',
0x000CAA: u'Cubic Transportation Systems Inc',
0x000CAB: u'COMMEND International',
0x000CAC: u'Citizen Watch Co., Ltd.',
0x000CAD: u'BTU International',
0x000CAE: u'Ailocom Oy',
0x000CAF: u'TRI TERM CO.,LTD.',
0x000CB0: u'Star Semiconductor Corporation',
0x000CB1: u'Salland Engineering (Europe) BV',
0x000CB2: u'safei Co., Ltd.',
0x000CB3: u'ROUND Co.,Ltd.',
0x000CB4: u'AutoCell Laboratories, Inc.',
0x000CB5: u'Premier Technolgies, Inc',
0x000CB6: u'NANJING SEU MOBILE & INTERNET TECHNOLOGY CO.,LTD',
0x000CB7: u'Nanjing Huazhuo Electronics Co., Ltd.',
0x000CB8: u'MEDION AG',
0x000CB9: u'LEA',
0x000CBA: u'Jamex',
0x000CBB: u'ISKRAEMECO',
0x000CBC: u'Iscutum',
0x000CBD: u'Interface Masters, Inc',
0x000CBE: u'PRIVATE',
0x000CBF: u'Holy Stone Ent. Co., Ltd.',
0x000CC0: u'Genera Oy',
0x000CC1: u'Cooper Industries Inc.',
0x000CC2: u'PRIVATE',
0x000CC3: u'BeWAN systems',
0x000CC4: u'Tiptel AG',
0x000CC5: u'Nextlink Co., Ltd.',
0x000CC6: u'Ka-Ro electronics GmbH',
0x000CC7: u'Intelligent Computer Solutions Inc.',
0x000CC8: u'Xytronix Research & Design, Inc.',
0x000CC9: u'ILWOO DATA & TECHNOLOGY CO.,LTD',
0x000CCA: u'Hitachi Global Storage Technologies',
0x000CCB: u'Design Combus Ltd',
0x000CCC: u'Aeroscout Ltd.',
0x000CCD: u'IEC - TC57',
0x000CCE: u'Cisco Systems',
0x000CCF: u'Cisco Systems',
0x000CD0: u'Symetrix',
0x000CD1: u'SFOM Technology Corp.',
0x000CD2: u'Schaffner EMV AG',
0x000CD3: u'Prettl Elektronik Radeberg GmbH',
0x000CD4: u'Positron Public Safety Systems inc.',
0x000CD5: u'Passave Inc.',
0x000CD6: u'PARTNER TECH',
0x000CD7: u'Nallatech Ltd',
0x000CD8: u'M. K. Juchheim GmbH & Co',
0x000CD9: u'Itcare Co., Ltd',
0x000CDA: u'FreeHand Systems, Inc.',
0x000CDB: u'Foundry Networks',
0x000CDC: u'BECS Technology, Inc',
0x000CDD: u'AOS Technologies AG',
0x000CDE: u'ABB STOTZ-KONTAKT GmbH',
0x000CDF: u'PULNiX America, Inc',
0x000CE0: u'Trek Diagnostics Inc.',
0x000CE1: u'The Open Group',
0x000CE2: u'Rolls-Royce',
0x000CE3: u'Option International N.V.',
0x000CE4: u'NeuroCom International, Inc.',
0x000CE5: u'Motorola BCS',
0x000CE6: u'Meru Networks Inc',
0x000CE7: u'MediaTek Inc.',
0x000CE8: u'GuangZhou AnJuBao Co., Ltd',
0x000CE9: u'BLOOMBERG L.P.',
0x000CEA: u'aphona Kommunikationssysteme',
0x000CEB: u'CNMP Networks, Inc.',
0x000CEC: u'Spectracom Corp.',
0x000CED: u'Real Digital Media',
0x000CEE: u'jp-embedded',
0x000CEF: u'Open Networks Engineering Ltd',
0x000CF0: u'M & N GmbH',
0x000CF1: u'Intel Corporation',
0x000CF2: u'GAMESA EÓLICA',
0x000CF3: u'CALL IMAGE SA',
0x000CF4: u'AKATSUKI ELECTRIC MFG.CO.,LTD.',
0x000CF5: u'InfoExpress',
0x000CF6: u'Sitecom Europe BV',
0x000CF7: u'Nortel Networks',
0x000CF8: u'Nortel Networks',
0x000CF9: u'ITT Flygt AB',
0x000CFA: u'Digital Systems Corp',
0x000CFB: u'Korea Network Systems',
0x000CFC: u'S2io Technologies Corp',
0x000CFD: u'PRIVATE',
0x000CFE: u'Grand Electronic Co., Ltd',
0x000CFF: u'MRO-TEK LIMITED',
0x000D00: u'Seaway Networks Inc.',
0x000D01: u'P&E Microcomputer Systems, Inc.',
0x000D02: u'NEC AccessTechnica,Ltd',
0x000D03: u'Matrics, Inc.',
0x000D04: u'Foxboro Eckardt Development GmbH',
0x000D05: u'cybernet manufacturing inc.',
0x000D06: u'Compulogic Limited',
0x000D07: u'Calrec Audio Ltd',
0x000D08: u'AboveCable, Inc.',
0x000D09: u'Yuehua(Zhuhai) Electronic CO. LTD',
0x000D0A: u'Projectiondesign as',
0x000D0B: u'Buffalo Inc.',
0x000D0C: u'MDI Security Systems',
0x000D0D: u'ITSupported, LLC',
0x000D0E: u'Inqnet Systems, Inc.',
0x000D0F: u'Finlux Ltd',
0x000D10: u'Embedtronics Oy',
0x000D11: u'DENTSPLY - Gendex',
0x000D12: u'AXELL Corporation',
0x000D13: u'Wilhelm Rutenbeck GmbH&Co.',
0x000D14: u'Vtech Innovation LP dba Advanced American Telephones',
0x000D15: u'Voipac s.r.o.',
0x000D16: u'UHS Systems Pty Ltd',
0x000D17: u'Turbo Networks Co.Ltd',
0x000D18: u'Sunitec Enterprise Co., Ltd.',
0x000D19: u'ROBE Show lighting',
0x000D1A: u'Mustek System Inc.',
0x000D1B: u'Kyoto Electronics Manufacturing Co., Ltd.',
0x000D1C: u'I2E TELECOM',
0x000D1D: u'HIGH-TEK HARNESS ENT. CO., LTD.',
0x000D1E: u'Control Techniques',
0x000D1F: u'AV Digital',
0x000D20: u'ASAHIKASEI TECHNOSYSTEM CO.,LTD.',
0x000D21: u'WISCORE Inc.',
0x000D22: u'Unitronics',
0x000D23: u'Smart Solution, Inc',
0x000D24: u'SENTEC E&E CO., LTD.',
0x000D25: u'SANDEN CORPORATION',
0x000D26: u'Primagraphics Limited',
0x000D27: u'MICROPLEX Printware AG',
0x000D28: u'Cisco',
0x000D29: u'Cisco',
0x000D2A: u'Scanmatic AS',
0x000D2B: u'Racal Instruments',
0x000D2C: u'Patapsco Designs Ltd',
0x000D2D: u'NCT Deutschland GmbH',
0x000D2E: u'Matsushita Avionics Systems Corporation',
0x000D2F: u'AIN Comm.Tech.Co., LTD',
0x000D30: u'IceFyre Semiconductor',
0x000D31: u'Compellent Technologies, Inc.',
0x000D32: u'DispenseSource, Inc.',
0x000D33: u'Prediwave Corp.',
0x000D34: u'Shell International Exploration and Production, Inc.',
0x000D35: u'PAC International Ltd',
0x000D36: u'Wu Han Routon Electronic Co., Ltd',
0x000D37: u'WIPLUG',
0x000D38: u'NISSIN INC.',
0x000D39: u'Network Electronics',
0x000D3A: u'Microsoft Corp.',
0x000D3B: u'Microelectronics Technology Inc.',
0x000D3C: u'i.Tech Dynamic Ltd',
0x000D3D: u'Hammerhead Systems, Inc.',
0x000D3E: u'APLUX Communications Ltd.',
0x000D3F: u'VXI Technology',
0x000D40: u'Verint Loronix Video Solutions',
0x000D41: u'Siemens AG ICM MP UC RD IT KLF1',
0x000D42: u'Newbest Development Limited',
0x000D43: u'DRS Tactical Systems Inc.',
0x000D44: u'PRIVATE',
0x000D45: u'Tottori SANYO Electric Co., Ltd.',
0x000D46: u'SSD Drives, Inc.',
0x000D47: u'Collex',
0x000D48: u'AEWIN Technologies Co., Ltd.',
0x000D49: u'Triton Systems of Delaware, Inc.',
0x000D4A: u'Steag ETA-Optik',
0x000D4B: u'Roku, LLC',
0x000D4C: u'Outline Electronics Ltd.',
0x000D4D: u'Ninelanes',
0x000D4E: u'NDR Co.,LTD.',
0x000D4F: u'Kenwood Corporation',
0x000D50: u'Galazar Networks',
0x000D51: u'DIVR Systems, Inc.',
0x000D52: u'Comart system',
0x000D53: u'Beijing 5w Communication Corp.',
0x000D54: u'3Com Europe Ltd',
0x000D55: u'SANYCOM Technology Co.,Ltd',
0x000D56: u'Dell PCBA Test',
0x000D57: u'Fujitsu I-Network Systems Limited.',
0x000D58: u'PRIVATE',
0x000D59: u'Amity Systems, Inc.',
0x000D5A: u'Tiesse SpA',
0x000D5B: u'Smart Empire Investments Limited',
0x000D5C: u'Robert Bosch GmbH, VT-ATMO',
0x000D5D: u'Raritan Computer, Inc',
0x000D5E: u'NEC CustomTechnica, Ltd.',
0x000D5F: u'Minds Inc',
0x000D60: u'IBM Corporation',
0x000D61: u'Giga-Byte Technology Co., Ltd.',
0x000D62: u'Funkwerk Dabendorf GmbH',
0x000D63: u'DENT Instruments, Inc.',
0x000D64: u'COMAG Handels AG',
0x000D65: u'Cisco Systems',
0x000D66: u'Cisco Systems',
0x000D67: u'BelAir Networks Inc.',
0x000D68: u'Vinci Systems, Inc.',
0x000D69: u'TMT&D Corporation',
0x000D6A: u'Redwood Technologies LTD',
0x000D6B: u'Mita-Teknik A/S',
0x000D6C: u'M-Audio',
0x000D6D: u'K-Tech Devices Corp.',
0x000D6E: u'K-Patents Oy',
0x000D6F: u'Ember Corporation',
0x000D70: u'Datamax Corporation',
0x000D71: u'boca systems',
0x000D72: u'2Wire, Inc',
0x000D73: u'Technical Support, Inc.',
0x000D74: u'Sand Network Systems, Inc.',
0x000D75: u'Kobian Pte Ltd - Taiwan Branch',
0x000D76: u'Hokuto Denshi Co,. Ltd.',
0x000D77: u'FalconStor Software',
0x000D78: u'Engineering & Security',
0x000D79: u'Dynamic Solutions Co,.Ltd.',
0x000D7A: u'DiGATTO Asia Pacific Pte Ltd',
0x000D7B: u'Consensys Computers Inc.',
0x000D7C: u'Codian Ltd',
0x000D7D: u'Afco Systems',
0x000D7E: u'Axiowave Networks, Inc.',
0x000D7F: u'MIDAS COMMUNICATION TECHNOLOGIES PTE LTD ( Foreign Branch)',
0x000D80: u'Online Development Inc',
0x000D81: u'Pepperl+Fuchs GmbH',
0x000D82: u'PHS srl',
0x000D83: u'Sanmina-SCI Hungary Ltd.',
0x000D84: u'Makus Inc.',
0x000D85: u'Tapwave, Inc.',
0x000D86: u'Huber + Suhner AG',
0x000D87: u'Elitegroup Computer System Co. (ECS)',
0x000D88: u'D-Link Corporation',
0x000D89: u'Bils Technology Inc',
0x000D8A: u'Winners Electronics Co., Ltd.',
0x000D8B: u'T&D Corporation',
0x000D8C: u'Shanghai Wedone Digital Ltd. CO.',
0x000D8D: u'ProLinx Communication Gateways, Inc.',
0x000D8E: u'Koden Electronics Co., Ltd.',
0x000D8F: u'King Tsushin Kogyo Co., LTD.',
0x000D90: u'Factum Electronics AB',
0x000D91: u'Eclipse (HQ Espana) S.L.',
0x000D92: u'Arima Communication Corporation',
0x000D93: u'Apple Computer',
0x000D94: u'AFAR Communications,Inc',
0x000D95: u'Opti-cell, Inc.',
0x000D96: u'Vtera Technology Inc.',
0x000D97: u'Tropos Networks, Inc.',
0x000D98: u'S.W.A.C. Schmitt-Walter Automation Consult GmbH',
0x000D99: u'Orbital Sciences Corp.; Launch Systems Group',
0x000D9A: u'INFOTEC LTD',
0x000D9B: u'Heraeus Electro-Nite International N.V.',
0x000D9C: u'Elan GmbH & Co KG',
0x000D9D: u'Hewlett Packard',
0x000D9E: u'TOKUDEN OHIZUMI SEISAKUSYO Co.,Ltd.',
0x000D9F: u'RF Micro Devices',
0x000DA0: u'NEDAP N.V.',
0x000DA1: u'MIRAE ITS Co.,LTD.',
0x000DA2: u'Infrant Technologies, Inc.',
0x000DA3: u'Emerging Technologies Limited',
0x000DA4: u'DOSCH & AMAND SYSTEMS AG',
0x000DA5: u'Fabric7 Systems, Inc',
0x000DA6: u'Universal Switching Corporation',
0x000DA7: u'PRIVATE',
0x000DA8: u'Teletronics Technology Corporation',
0x000DA9: u'T.E.A.M. S.L.',
0x000DAA: u'S.A.Tehnology co.,Ltd.',
0x000DAB: u'Parker Hannifin GmbH Electromechanical Division Europe',
0x000DAC: u'Japan CBM Corporation',
0x000DAD: u'Dataprobe Inc',
0x000DAE: u'SAMSUNG HEAVY INDUSTRIES CO., LTD.',
0x000DAF: u'Plexus Corp (UK) Ltd',
0x000DB0: u'Olym-tech Co.,Ltd.',
0x000DB1: u'Japan Network Service Co., Ltd.',
0x000DB2: u'Ammasso, Inc.',
0x000DB3: u'SDO Communication Corperation',
0x000DB4: u'NETASQ',
0x000DB5: u'GLOBALSAT TECHNOLOGY CORPORATION',
0x000DB6: u'Teknovus, Inc.',
0x000DB7: u'SANKO ELECTRIC CO,.LTD',
0x000DB8: u'SCHILLER AG',
0x000DB9: u'PC Engines GmbH',
0x000DBA: u'Océ Document Technologies GmbH',
0x000DBB: u'Nippon Dentsu Co.,Ltd.',
0x000DBC: u'Cisco Systems',
0x000DBD: u'Cisco Systems',
0x000DBE: u'Bel Fuse Europe Ltd.,UK',
0x000DBF: u'TekTone Sound & Signal Mfg., Inc.',
0x000DC0: u'Spagat AS',
0x000DC1: u'SafeWeb Inc',
0x000DC2: u'PRIVATE',
0x000DC3: u'First Communication, Inc.',
0x000DC4: u'Emcore Corporation',
0x000DC5: u'EchoStar International Corporation',
0x000DC6: u'DigiRose Technology Co., Ltd.',
0x000DC7: u'COSMIC ENGINEERING INC.',
0x000DC8: u'AirMagnet, Inc',
0x000DC9: u'THALES Elektronik Systeme GmbH',
0x000DCA: u'Tait Electronics',
0x000DCB: u'Petcomkorea Co., Ltd.',
0x000DCC: u'NEOSMART Corp.',
0x000DCD: u'GROUPE TXCOM',
0x000DCE: u'Dynavac Technology Pte Ltd',
0x000DCF: u'Cidra Corp.',
0x000DD0: u'TetraTec Instruments GmbH',
0x000DD1: u'Stryker Corporation',
0x000DD2: u'Simrad Optronics ASA',
0x000DD3: u'SAMWOO Telecommunication Co.,Ltd.',
0x000DD4: u'Revivio Inc.',
0x000DD5: u'O\'RITE TECHNOLOGY CO.,LTD',
0x000DD6: u'ITI LTD',
0x000DD7: u'Bright',
0x000DD8: u'BBN',
0x000DD9: u'Anton Paar GmbH',
0x000DDA: u'ALLIED TELESIS K.K.',
0x000DDB: u'AIRWAVE TECHNOLOGIES INC.',
0x000DDC: u'VAC',
0x000DDD: u'PROFÝLO TELRA ELEKTRONÝK SANAYÝ VE TÝCARET A.Þ.',
0x000DDE: u'Joyteck Co., Ltd.',
0x000DDF: u'Japan Image & Network Inc.',
0x000DE0: u'ICPDAS Co.,LTD',
0x000DE1: u'Control Products, Inc.',
0x000DE2: u'CMZ Sistemi Elettronici',
0x000DE3: u'AT Sweden AB',
0x000DE4: u'DIGINICS, Inc.',
0x000DE5: u'Samsung Thales',
0x000DE6: u'YOUNGBO ENGINEERING CO.,LTD',
0x000DE7: u'Snap-on OEM Group',
0x000DE8: u'Nasaco Electronics Pte. Ltd',
0x000DE9: u'Napatech Aps',
0x000DEA: u'Kingtel Telecommunication Corp.',
0x000DEB: u'CompXs Limited',
0x000DEC: u'Cisco Systems',
0x000DED: u'Cisco Systems',
0x000DEE: u'Andrew RF Power Amplifier Group',
0x000DEF: u'Soc. Coop. Bilanciai',
0x000DF0: u'QCOM TECHNOLOGY INC.',
0x000DF1: u'IONIX INC.',
0x000DF2: u'PRIVATE',
0x000DF3: u'Asmax Solutions',
0x000DF4: u'Watertek Co.',
0x000DF5: u'Teletronics International Inc.',
0x000DF6: u'Technology Thesaurus Corp.',
0x000DF7: u'Space Dynamics Lab',
0x000DF8: u'ORGA Kartensysteme GmbH',
0x000DF9: u'NDS Limited',
0x000DFA: u'Micro Control Systems Ltd.',
0x000DFB: u'Komax AG',
0x000DFC: u'ITFOR Inc. resarch and development',
0x000DFD: u'Huges Hi-Tech Inc.,',
0x000DFE: u'Hauppauge Computer Works, Inc.',
0x000DFF: u'CHENMING MOLD INDUSTRY CORP.',
0x000E00: u'Atrie',
0x000E01: u'ASIP Technologies Inc.',
0x000E02: u'Advantech AMT Inc.',
0x000E03: u'Emulex',
0x000E04: u'CMA/Microdialysis AB',
0x000E05: u'WIRELESS MATRIX CORP.',
0x000E06: u'Team Simoco Ltd',
0x000E07: u'Sony Ericsson Mobile Communications AB',
0x000E08: u'Sipura Technology, Inc.',
0x000E09: u'Shenzhen Coship Software Co.,LTD.',
0x000E0A: u'SAKUMA DESIGN OFFICE',
0x000E0B: u'Netac Technology Co., Ltd.',
0x000E0C: u'Intel Corporation',
0x000E0D: u'HESCH Schröder GmbH',
0x000E0E: u'ESA elettronica S.P.A.',
0x000E0F: u'ERMME',
0x000E10: u'PRIVATE',
0x000E11: u'BDT Büro- und Datentechnik GmbH & Co. KG',
0x000E12: u'Adaptive Micro Systems Inc.',
0x000E13: u'Accu-Sort Systems inc.',
0x000E14: u'Visionary Solutions, Inc.',
0x000E15: u'Tadlys LTD',
0x000E16: u'SouthWing',
0x000E17: u'PRIVATE',
0x000E18: u'MyA Technology',
0x000E19: u'LogicaCMG Pty Ltd',
0x000E1A: u'JPS Communications',
0x000E1B: u'IAV GmbH',
0x000E1C: u'Hach Company',
0x000E1D: u'ARION Technology Inc.',
0x000E1E: u'PRIVATE',
0x000E1F: u'TCL Networks Equipment Co., Ltd.',
0x000E20: u'PalmSource, Inc.',
0x000E21: u'MTU Friedrichshafen GmbH',
0x000E22: u'PRIVATE',
0x000E23: u'Incipient, Inc.',
0x000E24: u'Huwell Technology Inc.',
0x000E25: u'Hannae Technology Co., Ltd',
0x000E26: u'Gincom Technology Corp.',
0x000E27: u'Crere Networks, Inc.',
0x000E28: u'Dynamic Ratings P/L',
0x000E29: u'Shester Communications Inc',
0x000E2A: u'PRIVATE',
0x000E2B: u'Safari Technologies',
0x000E2C: u'Netcodec co.',
0x000E2D: u'Hyundai Digital Technology Co.,Ltd.',
0x000E2E: u'Edimax Technology Co., Ltd.',
0x000E2F: u'Disetronic Medical Systems AG',
0x000E30: u'AERAS Networks, Inc.',
0x000E31: u'Olympus BioSystems GmbH',
0x000E32: u'Kontron Medical',
0x000E33: u'Shuko Electronics Co.,Ltd',
0x000E34: u'NexGen City, LP',
0x000E35: u'Intel Corp',
0x000E36: u'HEINESYS, Inc.',
0x000E37: u'Harms & Wende GmbH & Co.KG',
0x000E38: u'Cisco Systems',
0x000E39: u'Cisco Systems',
0x000E3A: u'Cirrus Logic',
0x000E3B: u'Hawking Technologies, Inc.',
0x000E3C: u'TransAct Technoloiges Inc.',
0x000E3D: u'Televic N.V.',
0x000E3E: u'Sun Optronics Inc',
0x000E3F: u'Soronti, Inc.',
0x000E40: u'Nortel Networks',
0x000E41: u'NIHON MECHATRONICS CO.,LTD.',
0x000E42: u'Motic Incoporation Ltd.',
0x000E43: u'G-Tek Electronics Sdn. Bhd.',
0x000E44: u'Digital 5, Inc.',
0x000E45: u'Beijing Newtry Electronic Technology Ltd',
0x000E46: u'Niigata Seimitsu Co.,Ltd.',
0x000E47: u'NCI System Co.,Ltd.',
0x000E48: u'Lipman TransAction Solutions',
0x000E49: u'Forsway Scandinavia AB',
0x000E4A: u'Changchun Huayu WEBPAD Co.,LTD',
0x000E4B: u'atrium c and i',
0x000E4C: u'Bermai Inc.',
0x000E4D: u'Numesa Inc.',
0x000E4E: u'Waveplus Technology Co., Ltd.',
0x000E4F: u'Trajet GmbH',
0x000E50: u'Thomson Telecom Belgium',
0x000E51: u'tecna elettronica srl',
0x000E52: u'Optium Corporation',
0x000E53: u'AV TECH CORPORATION',
0x000E54: u'AlphaCell Wireless Ltd.',
0x000E55: u'AUVITRAN',
0x000E56: u'4G Systems GmbH',
0x000E57: u'Iworld Networking, Inc.',
0x000E58: u'Sonos, Inc.',
0x000E59: u'SAGEM SA',
0x000E5A: u'TELEFIELD inc.',
0x000E5B: u'ParkerVision - Direct2Data',
0x000E5C: u'Motorola BCS',
0x000E5D: u'Triple Play Technologies A/S',
0x000E5E: u'Beijing Raisecom Science & Technology Development Co.,Ltd',
0x000E5F: u'activ-net GmbH & Co. KG',
0x000E60: u'360SUN Digital Broadband Corporation',
0x000E61: u'MICROTROL LIMITED',
0x000E62: u'Nortel Networks',
0x000E63: u'Lemke Diagnostics GmbH',
0x000E64: u'Elphel, Inc',
0x000E65: u'TransCore',
0x000E66: u'Hitachi Advanced Digital, Inc.',
0x000E67: u'Eltis Microelectronics Ltd.',
0x000E68: u'E-TOP Network Technology Inc.',
0x000E69: u'China Electric Power Research Institute',
0x000E6A: u'3COM EUROPE LTD',
0x000E6B: u'Janitza electronics GmbH',
0x000E6C: u'Device Drivers Limited',
0x000E6D: u'Murata Manufacturing Co., Ltd.',
0x000E6E: u'MICRELEC ELECTRONICS S.A',
0x000E6F: u'IRIS Corporation Berhad',
0x000E70: u'in2 Networks',
0x000E71: u'Gemstar Technology Development Ltd.',
0x000E72: u'CTS electronics',
0x000E73: u'Tpack A/S',
0x000E74: u'Solar Telecom. Tech',
0x000E75: u'New York Air Brake Corp.',
0x000E76: u'GEMSOC INNOVISION INC.',
0x000E77: u'Decru, Inc.',
0x000E78: u'Amtelco',
0x000E79: u'Ample Communications Inc.',
0x000E7A: u'GemWon Communications Co., Ltd.',
0x000E7B: u'Toshiba',
0x000E7C: u'Televes S.A.',
0x000E7D: u'Electronics Line 3000 Ltd.',
0x000E7E: u'Comprog Oy',
0x000E7F: u'Hewlett Packard',
0x000E80: u'Thomson Technology Inc',
0x000E81: u'Devicescape Software, Inc.',
0x000E82: u'Commtech Wireless',
0x000E83: u'Cisco Systems',
0x000E84: u'Cisco Systems',
0x000E85: u'Catalyst Enterprises, Inc.',
0x000E86: u'Alcatel North America',
0x000E87: u'adp Gauselmann GmbH',
0x000E88: u'VIDEOTRON CORP.',
0x000E89: u'CLEMATIC',
0x000E8A: u'Avara Technologies Pty. Ltd.',
0x000E8B: u'Astarte Technology Co, Ltd.',
0x000E8C: u'Siemens AG A&D ET',
0x000E8D: u'Systems in Progress Holding GmbH',
0x000E8E: u'SparkLAN Communications, Inc.',
0x000E8F: u'Sercomm Corp.',
0x000E90: u'PONICO CORP.',
0x000E91: u'Northstar Technologies',
0x000E92: u'Millinet Co., Ltd.',
0x000E93: u'Milénio 3 Sistemas Electrónicos, Lda.',
0x000E94: u'Maas International BV',
0x000E95: u'Fujiya Denki Seisakusho Co.,Ltd.',
0x000E96: u'Cubic Defense Applications, Inc.',
0x000E97: u'Ultracker Technology CO., Inc',
0x000E98: u'Vitec CC, INC.',
0x000E99: u'Spectrum Digital, Inc',
0x000E9A: u'BOE TECHNOLOGY GROUP CO.,LTD',
0x000E9B: u'Ambit Microsystems Corporation',
0x000E9C: u'Pemstar',
0x000E9D: u'Video Networks Ltd',
0x000E9E: u'Topfield Co., Ltd',
0x000E9F: u'TEMIC SDS GmbH',
0x000EA0: u'NetKlass Technology Inc.',
0x000EA1: u'Formosa Teletek Corporation',
0x000EA2: u'CyberGuard Corporation',
0x000EA3: u'CNCR-IT CO.,LTD,HangZhou P.R.CHINA',
0x000EA4: u'Certance Inc.',
0x000EA5: u'BLIP Systems',
0x000EA6: u'ASUSTEK COMPUTER INC.',
0x000EA7: u'Endace Inc Ltd.',
0x000EA8: u'United Technologists Europe Limited',
0x000EA9: u'Shanghai Xun Shi Communications Equipment Ltd. Co.',
0x000EAA: u'Scalent Systems, Inc.',
0x000EAB: u'OctigaBay Systems Corporation',
0x000EAC: u'MINTRON ENTERPRISE CO., LTD.',
0x000EAD: u'Metanoia Technologies, Inc.',
0x000EAE: u'GAWELL TECHNOLOGIES CORP.',
0x000EAF: u'CASTEL',
0x000EB0: u'Solutions Radio BV',
0x000EB1: u'Newcotech,Ltd',
0x000EB2: u'Micro-Research Finland Oy',
0x000EB3: u'LeftHand Networks',
0x000EB4: u'GUANGZHOU GAOKE COMMUNICATIONS TECHNOLOGY CO.LTD.',
0x000EB5: u'Ecastle Electronics Co., Ltd.',
0x000EB6: u'Riverbed Technology, Inc.',
0x000EB7: u'Knovative, Inc.',
0x000EB8: u'Iiga co.,Ltd',
0x000EB9: u'HASHIMOTO Electronics Industry Co.,Ltd.',
0x000EBA: u'HANMI SEMICONDUCTOR CO., LTD.',
0x000EBB: u'Everbee Networks',
0x000EBC: u'Cullmann GmbH',
0x000EBD: u'Burdick, a Quinton Compny',
0x000EBE: u'B&B Electronics Manufacturing Co.',
0x000EBF: u'Remsdaq Limited',
0x000EC0: u'Nortel Networks',
0x000EC1: u'MYNAH Technologies',
0x000EC2: u'Lowrance Electronics, Inc.',
0x000EC3: u'Logic Controls, Inc.',
0x000EC4: u'Iskra Transmission d.d.',
0x000EC5: u'Digital Multitools Inc',
0x000EC6: u'ASIX ELECTRONICS CORP.',
0x000EC7: u'Motorola Korea',
0x000EC8: u'Zoran Corporation',
0x000EC9: u'YOKO Technology Corp.',
0x000ECA: u'WTSS Inc',
0x000ECB: u'VineSys Technology',
0x000ECC: u'Tableau',
0x000ECD: u'SKOV A/S',
0x000ECE: u'S.I.T.T.I. S.p.A.',
0x000ECF: u'PROFIBUS Nutzerorganisation e.V.',
0x000ED0: u'Privaris, Inc.',
0x000ED1: u'Osaka Micro Computer.',
0x000ED2: u'Filtronic plc',
0x000ED3: u'Epicenter, Inc.',
0x000ED4: u'CRESITT INDUSTRIE',
0x000ED5: u'COPAN Systems Inc.',
0x000ED6: u'Cisco Systems',
0x000ED7: u'Cisco Systems',
0x000ED8: u'Aktino, Inc.',
0x000ED9: u'Aksys, Ltd.',
0x000EDA: u'C-TECH UNITED CORP.',
0x000EDB: u'XiNCOM Corp.',
0x000EDC: u'Tellion INC.',
0x000EDD: u'SHURE INCORPORATED',
0x000EDE: u'REMEC, Inc.',
0x000EDF: u'PLX Technology',
0x000EE0: u'Mcharge',
0x000EE1: u'ExtremeSpeed Inc.',
0x000EE2: u'Custom Engineering S.p.A.',
0x000EE3: u'Chiyu Technology Co.,Ltd',
0x000EE4: u'BOE TECHNOLOGY GROUP CO.,LTD',
0x000EE5: u'bitWallet, Inc.',
0x000EE6: u'Adimos Systems LTD',
0x000EE7: u'AAC ELECTRONICS CORP.',
0x000EE8: u'zioncom',
0x000EE9: u'WayTech Development, Inc.',
0x000EEA: u'Shadong Luneng Jicheng Electronics,Co.,Ltd',
0x000EEB: u'Sandmartin(zhong shan)Electronics Co.,Ltd',
0x000EEC: u'Orban',
0x000EED: u'Nokia Danmark A/S',
0x000EEE: u'Muco Industrie BV',
0x000EEF: u'PRIVATE',
0x000EF0: u'Festo AG & Co. KG',
0x000EF1: u'EZQUEST INC.',
0x000EF2: u'Infinico Corporation',
0x000EF3: u'Smarthome',
0x000EF4: u'Shenzhen Kasda Digital Technology Co.,Ltd',
0x000EF5: u'iPAC Technology Co., Ltd.',
0x000EF6: u'E-TEN Information Systems Co., Ltd.',
0x000EF7: u'Vulcan Portals Inc',
0x000EF8: u'SBC ASI',
0x000EF9: u'REA Elektronik GmbH',
0x000EFA: u'Optoway Technology Incorporation',
0x000EFB: u'Macey Enterprises',
0x000EFC: u'JTAG Technologies B.V.',
0x000EFD: u'FUJI PHOTO OPTICAL CO., LTD.',
0x000EFE: u'EndRun Technologies LLC',
0x000EFF: u'Megasolution,Inc.',
0x000F00: u'Legra Systems, Inc.',
0x000F01: u'DIGITALKS INC',
0x000F02: u'Digicube Technology Co., Ltd',
0x000F03: u'COM&C CO., LTD',
0x000F04: u'cim-usa inc',
0x000F05: u'3B SYSTEM INC.',
0x000F06: u'Nortel Networks',
0x000F07: u'Mangrove Systems, Inc.',
0x000F08: u'Indagon Oy',
0x000F09: u'PRIVATE',
0x000F0A: u'Clear Edge Networks',
0x000F0B: u'Kentima Technologies AB',
0x000F0C: u'SYNCHRONIC ENGINEERING',
0x000F0D: u'Hunt Electronic Co., Ltd.',
0x000F0E: u'WaveSplitter Technologies, Inc.',
0x000F0F: u'Real ID Technology Co., Ltd.',
0x000F10: u'RDM Corporation',
0x000F11: u'Prodrive B.V.',
0x000F12: u'Panasonic AVC Networks Germany GmbH',
0x000F13: u'Nisca corporation',
0x000F14: u'Mindray Co., Ltd.',
0x000F15: u'Kjaerulff1 A/S',
0x000F16: u'JAY HOW TECHNOLOGY CO.,',
0x000F17: u'Insta Elektro GmbH',
0x000F18: u'Industrial Control Systems',
0x000F19: u'Guidant Corporation',
0x000F1A: u'Gaming Support B.V.',
0x000F1B: u'Ego Systems Inc.',
0x000F1C: u'DigitAll World Co., Ltd',
0x000F1D: u'Cosmo Techs Co., Ltd.',
0x000F1E: u'Chengdu KT Electric Co.of High & New Technology',
0x000F1F: u'WW PCBA Test',
0x000F20: u'Hewlett Packard',
0x000F21: u'Scientific Atlanta, Inc',
0x000F22: u'Helius, Inc.',
0x000F23: u'Cisco Systems',
0x000F24: u'Cisco Systems',
0x000F25: u'AimValley B.V.',
0x000F26: u'WorldAccxx LLC',
0x000F27: u'TEAL Electronics, Inc.',
0x000F28: u'Itronix Corporation',
0x000F29: u'Augmentix Corporation',
0x000F2A: u'Cableware Electronics',
0x000F2B: u'GREENBELL SYSTEMS',
0x000F2C: u'Uplogix, Inc.',
0x000F2D: u'CHUNG-HSIN ELECTRIC & MACHINERY MFG.CORP.',
0x000F2E: u'Megapower International Corp.',
0x000F2F: u'W-LINX TECHNOLOGY CO., LTD.',
0x000F30: u'Raza Microelectronics Inc',
0x000F31: u'Prosilica',
0x000F32: u'LuTong Electronic Technology Co.,Ltd',
0x000F33: u'DUALi Inc.',
0x000F34: u'Cisco Systems',
0x000F35: u'Cisco Systems',
0x000F36: u'Accurate Techhnologies, Inc.',
0x000F37: u'Xambala Incorporated',
0x000F38: u'Netstar',
0x000F39: u'IRIS SENSORS',
0x000F3A: u'HISHARP',
0x000F3B: u'Fuji System Machines Co., Ltd.',
0x000F3C: u'Endeleo Limited',
0x000F3D: u'D-Link Corporation',
0x000F3E: u'CardioNet, Inc',
0x000F3F: u'Big Bear Networks',
0x000F40: u'Optical Internetworking Forum',
0x000F41: u'Zipher Ltd',
0x000F42: u'Xalyo Systems',
0x000F43: u'Wasabi Systems Inc.',
0x000F44: u'Tivella Inc.',
0x000F45: u'Stretch, Inc.',
0x000F46: u'SINAR AG',
0x000F47: u'ROBOX SPA',
0x000F48: u'Polypix Inc.',
0x000F49: u'Northover Solutions Limited',
0x000F4A: u'Kyushu-kyohan co.,ltd',
0x000F4B: u'Katana Technology',
0x000F4C: u'Elextech INC',
0x000F4D: u'Centrepoint Technologies Inc.',
0x000F4E: u'Cellink',
0x000F4F: u'Cadmus Technology Ltd',
0x000F50: u'Baxall Limited',
0x000F51: u'Azul Systems, Inc.',
0x000F52: u'YORK Refrigeration, Marine & Controls',
0x000F53: u'Solarflare Communications Inc',
0x000F54: u'Entrelogic Corporation',
0x000F55: u'Datawire Communication Networks Inc.',
0x000F56: u'Continuum Photonics Inc',
0x000F57: u'CABLELOGIC Co., Ltd.',
0x000F58: u'Adder Technology Limited',
0x000F59: u'Phonak Communications AG',
0x000F5A: u'Peribit Networks',
0x000F5B: u'Delta Information Systems, Inc.',
0x000F5C: u'Day One Digital Media Limited',
0x000F5D: u'42Networks AB',
0x000F5E: u'Veo',
0x000F5F: u'Nicety Technologies Inc. (NTS)',
0x000F60: u'Lifetron Co.,Ltd',
0x000F61: u'Kiwi Networks',
0x000F62: u'Alcatel Bell Space N.V.',
0x000F63: u'Obzerv Technologies',
0x000F64: u'D&R Electronica Weesp BV',
0x000F65: u'icube Corp.',
0x000F66: u'Cisco-Linksys',
0x000F67: u'West Instruments',
0x000F68: u'Vavic Network Technology, Inc.',
0x000F69: u'SEW Eurodrive GmbH & Co. KG',
0x000F6A: u'Nortel Networks',
0x000F6B: u'GateWare Communications GmbH',
0x000F6C: u'ADDI-DATA GmbH',
0x000F6D: u'Midas Engineering',
0x000F6E: u'BBox',
0x000F6F: u'FTA Communication Technologies',
0x000F70: u'Wintec Industries, inc.',
0x000F71: u'Sanmei Electronics Co.,Ltd',
0x000F72: u'Sandburst',
0x000F73: u'Rockwell Samsung Automation',
0x000F74: u'Qamcom Technology AB',
0x000F75: u'First Silicon Solutions',
0x000F76: u'Digital Keystone, Inc.',
0x000F77: u'DENTUM CO.,LTD',
0x000F78: u'Datacap Systems Inc',
0x000F79: u'Bluetooth Interest Group Inc.',
0x000F7A: u'BeiJing NuQX Technology CO.,LTD',
0x000F7B: u'Arce Sistemas, S.A.',
0x000F7C: u'ACTi Corporation',
0x000F7D: u'Xirrus',
0x000F7E: u'Ablerex Electronics Co., LTD',
0x000F7F: u'UBSTORAGE Co.,Ltd.',
0x000F80: u'Trinity Security Systems,Inc.',
0x000F81: u'Secure Info Imaging',
0x000F82: u'Mortara Instrument, Inc.',
0x000F83: u'Brainium Technologies Inc.',
0x000F84: u'Astute Networks, Inc.',
0x000F85: u'ADDO-Japan Corporation',
0x000F86: u'Research In Motion Limited',
0x000F87: u'Maxcess International',
0x000F88: u'AMETEK, Inc.',
0x000F89: u'Winnertec System Co., Ltd.',
0x000F8A: u'WideView',
0x000F8B: u'Orion MultiSystems Inc',
0x000F8C: u'Gigawavetech Pte Ltd',
0x000F8D: u'FAST TV-Server AG',
0x000F8E: u'DONGYANG TELECOM CO.,LTD.',
0x000F8F: u'Cisco Systems',
0x000F90: u'Cisco Systems',
0x000F91: u'Aerotelecom Co.,Ltd.',
0x000F92: u'Microhard Systems Inc.',
0x000F93: u'Landis+Gyr Ltd.',
0x000F94: u'Genexis',
0x000F95: u'ELECOM Co.,LTD Laneed Division',
0x000F96: u'Critical Telecom Corp.',
0x000F97: u'Avanex Corporation',
0x000F98: u'Avamax Co. Ltd.',
0x000F99: u'APAC opto Electronics Inc.',
0x000F9A: u'Synchrony, Inc.',
0x000F9B: u'Ross Video Limited',
0x000F9C: u'Panduit Corp',
0x000F9D: u'Newnham Research Ltd',
0x000F9E: u'Murrelektronik GmbH',
0x000F9F: u'Motorola BCS',
0x000FA0: u'CANON KOREA BUSINESS SOLUTIONS INC.',
0x000FA1: u'Gigabit Systems Inc.',
0x000FA2: u'Digital Path Networks',
0x000FA3: u'Alpha Networks Inc.',
0x000FA4: u'Sprecher Automation GmbH',
0x000FA5: u'SMP / BWA Technology GmbH',
0x000FA6: u'S2 Security Corporation',
0x000FA7: u'Raptor Networks Technology',
0x000FA8: u'Photometrics, Inc.',
0x000FA9: u'PC Fabrik',
0x000FAA: u'Nexus Technologies',
0x000FAB: u'Kyushu Electronics Systems Inc.',
0x000FAC: u'IEEE 802.11',
0x000FAD: u'FMN communications GmbH',
0x000FAE: u'E2O Communications',
0x000FAF: u'Dialog Inc.',
0x000FB0: u'Compal Electronics,INC.',
0x000FB1: u'Cognio Inc.',
0x000FB2: u'Broadband Pacenet (India) Pvt. Ltd.',
0x000FB3: u'Actiontec Electronics, Inc',
0x000FB4: u'Timespace Technology',
0x000FB5: u'NETGEAR Inc',
0x000FB6: u'Europlex Technologies',
0x000FB7: u'Cavium Networks',
0x000FB8: u'CallURL Inc.',
0x000FB9: u'Adaptive Instruments',
0x000FBA: u'Tevebox AB',
0x000FBB: u'Siemens Networks GmbH & Co. KG',
0x000FBC: u'Onkey Technologies, Inc.',
0x000FBD: u'MRV Communications (Networks) LTD',
0x000FBE: u'e-w/you Inc.',
0x000FBF: u'DGT Sp. z o.o.',
0x000FC0: u'DELCOMp',
0x000FC1: u'WAVE Corporation',
0x000FC2: u'Uniwell Corporation',
0x000FC3: u'PalmPalm Technology, Inc.',
0x000FC4: u'NST co.,LTD.',
0x000FC5: u'KeyMed Ltd',
0x000FC6: u'Eurocom Industries A/S',
0x000FC7: u'Dionica R&D Ltd.',
0x000FC8: u'Chantry Networks',
0x000FC9: u'Allnet GmbH',
0x000FCA: u'A-JIN TECHLINE CO, LTD',
0x000FCB: u'3COM EUROPE LTD',
0x000FCC: u'Netopia, Inc.',
0x000FCD: u'Nortel Networks',
0x000FCE: u'Kikusui Electronics Corp.',
0x000FCF: u'Datawind Research',
0x000FD0: u'ASTRI',
0x000FD1: u'Applied Wireless Identifications Group, Inc.',
0x000FD2: u'EWA Technologies, Inc.',
0x000FD3: u'Digium',
0x000FD4: u'Soundcraft',
0x000FD5: u'Schwechat - RISE',
0x000FD6: u'Sarotech Co., Ltd',
0x000FD7: u'Harman Music Group',
0x000FD8: u'Force, Inc.',
0x000FD9: u'FlexDSL Telecommunications AG',
0x000FDA: u'YAZAKI CORPORATION',
0x000FDB: u'Westell Technologies',
0x000FDC: u'Ueda Japan Radio Co., Ltd.',
0x000FDD: u'SORDIN AB',
0x000FDE: u'Sony Ericsson Mobile Communications AB',
0x000FDF: u'SOLOMON Technology Corp.',
0x000FE0: u'NComputing Co.,Ltd.',
0x000FE1: u'ID DIGITAL CORPORATION',
0x000FE2: u'Hangzhou Huawei-3Com Tech. Co., Ltd.',
0x000FE3: u'Damm Cellular Systems A/S',
0x000FE4: u'Pantech Co.,Ltd',
0x000FE5: u'MERCURY SECURITY CORPORATION',
0x000FE6: u'MBTech Systems, Inc.',
0x000FE7: u'Lutron Electronics Co., Inc.',
0x000FE8: u'Lobos, Inc.',
0x000FE9: u'GW TECHNOLOGIES CO.,LTD.',
0x000FEA: u'Giga-Byte Technology Co.,LTD.',
0x000FEB: u'Cylon Controls',
0x000FEC: u'Arkus Inc.',
0x000FED: u'Anam Electronics Co., Ltd',
0x000FEE: u'XTec, Incorporated',
0x000FEF: u'Thales e-Transactions GmbH',
0x000FF0: u'Sunray Enterprise',
0x000FF1: u'nex-G Systems Pte.Ltd',
0x000FF2: u'Loud Technologies Inc.',
0x000FF3: u'Jung Myoung Communications&Technology',
0x000FF4: u'Guntermann & Drunck GmbH',
0x000FF5: u'GN&S company',
0x000FF6: u'Darfon Electronics Corp.',
0x000FF7: u'Cisco Systems',
0x000FF8: u'Cisco Systems',
0x000FF9: u'Valcretec, Inc.',
0x000FFA: u'Optinel Systems, Inc.',
0x000FFB: u'Nippon Denso Industry Co., Ltd.',
0x000FFC: u'Merit Li-Lin Ent.',
0x000FFD: u'Glorytek Network Inc.',
0x000FFE: u'G-PRO COMPUTER',
0x000FFF: u'Control4',
0x001000: u'CABLE TELEVISION LABORATORIES, INC.',
0x001001: u'MCK COMMUNICATIONS',
0x001002: u'ACTIA',
0x001003: u'IMATRON, INC.',
0x001004: u'THE BRANTLEY COILE COMPANY,INC',
0x001005: u'UEC COMMERCIAL',
0x001006: u'Thales Contact Solutions Ltd.',
0x001007: u'CISCO SYSTEMS, INC.',
0x001008: u'VIENNA SYSTEMS CORPORATION',
0x001009: u'HORO QUARTZ',
0x00100A: u'WILLIAMS COMMUNICATIONS GROUP',
0x00100B: u'CISCO SYSTEMS, INC.',
0x00100C: u'ITO CO., LTD.',
0x00100D: u'CISCO SYSTEMS, INC.',
0x00100E: u'MICRO LINEAR COPORATION',
0x00100F: u'INDUSTRIAL CPU SYSTEMS',
0x001010: u'INITIO CORPORATION',
0x001011: u'CISCO SYSTEMS, INC.',
0x001012: u'PROCESSOR SYSTEMS (I) PVT LTD',
0x001013: u'Kontron',
0x001014: u'CISCO SYSTEMS, INC.',
0x001015: u'OOmon Inc.',
0x001016: u'T.SQWARE',
0x001017: u'MICOS GmbH',
0x001018: u'BROADCOM CORPORATION',
0x001019: u'SIRONA DENTAL SYSTEMS GmbH & Co. KG',
0x00101A: u'PictureTel Corp.',
0x00101B: u'CORNET TECHNOLOGY, INC.',
0x00101C: u'OHM TECHNOLOGIES INTL, LLC',
0x00101D: u'WINBOND ELECTRONICS CORP.',
0x00101E: u'MATSUSHITA ELECTRONIC INSTRUMENTS CORP.',
0x00101F: u'CISCO SYSTEMS, INC.',
0x001020: u'WELCH ALLYN, DATA COLLECTION',
0x001021: u'ENCANTO NETWORKS, INC.',
0x001022: u'SatCom Media Corporation',
0x001023: u'FLOWWISE NETWORKS, INC.',
0x001024: u'NAGOYA ELECTRIC WORKS CO., LTD',
0x001025: u'GRAYHILL INC.',
0x001026: u'ACCELERATED NETWORKS, INC.',
0x001027: u'L-3 COMMUNICATIONS EAST',
0x001028: u'COMPUTER TECHNICA, INC.',
0x001029: u'CISCO SYSTEMS, INC.',
0x00102A: u'ZF MICROSYSTEMS, INC.',
0x00102B: u'UMAX DATA SYSTEMS, INC.',
0x00102C: u'Lasat Networks A/S',
0x00102D: u'HITACHI SOFTWARE ENGINEERING',
0x00102E: u'NETWORK SYSTEMS & TECHNOLOGIES PVT. LTD.',
0x00102F: u'CISCO SYSTEMS, INC.',
0x001030: u'EION Inc.',
0x001031: u'OBJECTIVE COMMUNICATIONS, INC.',
0x001032: u'ALTA TECHNOLOGY',
0x001033: u'ACCESSLAN COMMUNICATIONS, INC.',
0x001034: u'GNP Computers',
0x001035: u'ELITEGROUP COMPUTER SYSTEMS CO., LTD',
0x001036: u'INTER-TEL INTEGRATED SYSTEMS',
0x001037: u'CYQ\'ve Technology Co., Ltd.',
0x001038: u'MICRO RESEARCH INSTITUTE, INC.',
0x001039: u'Vectron Systems AG',
0x00103A: u'DIAMOND NETWORK TECH',
0x00103B: u'HIPPI NETWORKING FORUM',
0x00103C: u'IC ENSEMBLE, INC.',
0x00103D: u'PHASECOM, LTD.',
0x00103E: u'NETSCHOOLS CORPORATION',
0x00103F: u'TOLLGRADE COMMUNICATIONS, INC.',
0x001040: u'INTERMEC CORPORATION',
0x001041: u'BRISTOL BABCOCK, INC.',
0x001042: u'AlacriTech',
0x001043: u'A2 CORPORATION',
0x001044: u'InnoLabs Corporation',
0x001045: u'Nortel Networks',
0x001046: u'ALCORN MCBRIDE INC.',
0x001047: u'ECHO ELETRIC CO. LTD.',
0x001048: u'HTRC AUTOMATION, INC.',
0x001049: u'SHORELINE TELEWORKS, INC.',
0x00104A: u'THE PARVUC CORPORATION',
0x00104B: u'3COM CORPORATION',
0x00104C: u'COMPUTER ACCESS TECHNOLOGY',
0x00104D: u'SURTEC INDUSTRIES, INC.',
0x00104E: u'CEOLOGIC',
0x00104F: u'STORAGE TECHNOLOGY CORPORATION',
0x001050: u'RION CO., LTD.',
0x001051: u'CMICRO CORPORATION',
0x001052: u'METTLER-TOLEDO (ALBSTADT) GMBH',
0x001053: u'COMPUTER TECHNOLOGY CORP.',
0x001054: u'CISCO SYSTEMS, INC.',
0x001055: u'FUJITSU MICROELECTRONICS, INC.',
0x001056: u'SODICK CO., LTD.',
0x001057: u'Rebel.com, Inc.',
0x001058: u'ArrowPoint Communications',
0x001059: u'DIABLO RESEARCH CO. LLC',
0x00105A: u'3COM CORPORATION',
0x00105B: u'NET INSIGHT AB',
0x00105C: u'QUANTUM DESIGNS (H.K.) LTD.',
0x00105D: u'Draeger Medical',
0x00105E: u'HEKIMIAN LABORATORIES, INC.',
0x00105F: u'IN-SNEC',
0x001060: u'BILLIONTON SYSTEMS, INC.',
0x001061: u'HOSTLINK CORP.',
0x001062: u'NX SERVER, ILNC.',
0x001063: u'STARGUIDE DIGITAL NETWORKS',
0x001064: u'DNPG, LLC',
0x001065: u'RADYNE CORPORATION',
0x001066: u'ADVANCED CONTROL SYSTEMS, INC.',
0x001067: u'REDBACK NETWORKS, INC.',
0x001068: u'COMOS TELECOM',
0x001069: u'HELIOSS COMMUNICATIONS, INC.',
0x00106A: u'DIGITAL MICROWAVE CORPORATION',
0x00106B: u'SONUS NETWORKS, INC.',
0x00106C: u'INFRATEC PLUS GmbH',
0x00106D: u'Axxcelera Broadband Wireless',
0x00106E: u'TADIRAN COM. LTD.',
0x00106F: u'TRENTON TECHNOLOGY INC.',
0x001070: u'CARADON TREND LTD.',
0x001071: u'ADVANET INC.',
0x001072: u'GVN TECHNOLOGIES, INC.',
0x001073: u'TECHNOBOX, INC.',
0x001074: u'ATEN INTERNATIONAL CO., LTD.',
0x001075: u'Maxtor Corporation',
0x001076: u'EUREM GmbH',
0x001077: u'SAF DRIVE SYSTEMS, LTD.',
0x001078: u'NUERA COMMUNICATIONS, INC.',
0x001079: u'CISCO SYSTEMS, INC.',
0x00107A: u'AmbiCom, Inc.',
0x00107B: u'CISCO SYSTEMS, INC.',
0x00107C: u'P-COM, INC.',
0x00107D: u'AURORA COMMUNICATIONS, LTD.',
0x00107E: u'BACHMANN ELECTRONIC GmbH',
0x00107F: u'CRESTRON ELECTRONICS, INC.',
0x001080: u'METAWAVE COMMUNICATIONS',
0x001081: u'DPS, INC.',
0x001082: u'JNA TELECOMMUNICATIONS LIMITED',
0x001083: u'HEWLETT-PACKARD COMPANY',
0x001084: u'K-BOT COMMUNICATIONS',
0x001085: u'POLARIS COMMUNICATIONS, INC.',
0x001086: u'ATTO TECHNOLOGY, INC.',
0x001087: u'Xstreamis PLC',
0x001088: u'AMERICAN NETWORKS INC.',
0x001089: u'WebSonic',
0x00108A: u'TeraLogic, Inc.',
0x00108B: u'LASERANIMATION SOLLINGER GmbH',
0x00108C: u'FUJITSU TELECOMMUNICATIONS EUROPE, LTD.',
0x00108D: u'JOHNSON CONTROLS, INC.',
0x00108E: u'HUGH SYMONS CONCEPT Technologies Ltd.',
0x00108F: u'RAPTOR SYSTEMS',
0x001090: u'CIMETRICS, INC.',
0x001091: u'NO WIRES NEEDED BV',
0x001092: u'NETCORE INC.',
0x001093: u'CMS COMPUTERS, LTD.',
0x001094: u'Performance Analysis Broadband, Spirent plc',
0x001095: u'Thomson Inc.',
0x001096: u'TRACEWELL SYSTEMS, INC.',
0x001097: u'WinNet Metropolitan Communications Systems, Inc.',
0x001098: u'STARNET TECHNOLOGIES, INC.',
0x001099: u'InnoMedia, Inc.',
0x00109A: u'NETLINE',
0x00109B: u'Emulex Corporation',
0x00109C: u'M-SYSTEM CO., LTD.',
0x00109D: u'CLARINET SYSTEMS, INC.',
0x00109E: u'AWARE, INC.',
0x00109F: u'PAVO, INC.',
0x0010A0: u'INNOVEX TECHNOLOGIES, INC.',
0x0010A1: u'KENDIN SEMICONDUCTOR, INC.',
0x0010A2: u'TNS',
0x0010A3: u'OMNITRONIX, INC.',
0x0010A4: u'XIRCOM',
0x0010A5: u'OXFORD INSTRUMENTS',
0x0010A6: u'CISCO SYSTEMS, INC.',
0x0010A7: u'UNEX TECHNOLOGY CORPORATION',
0x0010A8: u'RELIANCE COMPUTER CORP.',
0x0010A9: u'ADHOC TECHNOLOGIES',
0x0010AA: u'MEDIA4, INC.',
0x0010AB: u'KOITO INDUSTRIES, LTD.',
0x0010AC: u'IMCI TECHNOLOGIES',
0x0010AD: u'SOFTRONICS USB, INC.',
0x0010AE: u'SHINKO ELECTRIC INDUSTRIES CO.',
0x0010AF: u'TAC SYSTEMS, INC.',
0x0010B0: u'MERIDIAN TECHNOLOGY CORP.',
0x0010B1: u'FOR-A CO., LTD.',
0x0010B2: u'COACTIVE AESTHETICS',
0x0010B3: u'NOKIA MULTIMEDIA TERMINALS',
0x0010B4: u'ATMOSPHERE NETWORKS',
0x0010B5: u'ACCTON TECHNOLOGY CORPORATION',
0x0010B6: u'ENTRATA COMMUNICATIONS CORP.',
0x0010B7: u'COYOTE TECHNOLOGIES, LLC',
0x0010B8: u'ISHIGAKI COMPUTER SYSTEM CO.',
0x0010B9: u'MAXTOR CORP.',
0x0010BA: u'MARTINHO-DAVIS SYSTEMS, INC.',
0x0010BB: u'DATA & INFORMATION TECHNOLOGY',
0x0010BC: u'Aastra Telecom',
0x0010BD: u'THE TELECOMMUNICATION TECHNOLOGY COMMITTEE',
0x0010BE: u'TELEXIS CORP.',
0x0010BF: u'InterAir Wireless',
0x0010C0: u'ARMA, INC.',
0x0010C1: u'OI ELECTRIC CO., LTD.',
0x0010C2: u'WILLNET, INC.',
0x0010C3: u'CSI-CONTROL SYSTEMS',
0x0010C4: u'MEDIA LINKS CO., LTD.',
0x0010C5: u'PROTOCOL TECHNOLOGIES, INC.',
0x0010C6: u'USI',
0x0010C7: u'DATA TRANSMISSION NETWORK',
0x0010C8: u'COMMUNICATIONS ELECTRONICS SECURITY GROUP',
0x0010C9: u'MITSUBISHI ELECTRONICS LOGISTIC SUPPORT CO.',
0x0010CA: u'INTEGRAL ACCESS',
0x0010CB: u'FACIT K.K.',
0x0010CC: u'CLP COMPUTER LOGISTIK PLANUNG GmbH',
0x0010CD: u'INTERFACE CONCEPT',
0x0010CE: u'VOLAMP, LTD.',
0x0010CF: u'FIBERLANE COMMUNICATIONS',
0x0010D0: u'WITCOM, LTD.',
0x0010D1: u'Top Layer Networks, Inc.',
0x0010D2: u'NITTO TSUSHINKI CO., LTD',
0x0010D3: u'GRIPS ELECTRONIC GMBH',
0x0010D4: u'STORAGE COMPUTER CORPORATION',
0x0010D5: u'IMASDE CANARIAS, S.A.',
0x0010D6: u'ITT - A/CD',
0x0010D7: u'ARGOSY RESEARCH INC.',
0x0010D8: u'CALISTA',
0x0010D9: u'IBM JAPAN, FUJISAWA MT+D',
0x0010DA: u'MOTION ENGINEERING, INC.',
0x0010DB: u'Juniper Networks, Inc.',
0x0010DC: u'MICRO-STAR INTERNATIONAL CO., LTD.',
0x0010DD: u'ENABLE SEMICONDUCTOR, INC.',
0x0010DE: u'INTERNATIONAL DATACASTING CORPORATION',
0x0010DF: u'RISE COMPUTER INC.',
0x0010E0: u'COBALT MICROSERVER, INC.',
0x0010E1: u'S.I. TECH, INC.',
0x0010E2: u'ArrayComm, Inc.',
0x0010E3: u'COMPAQ COMPUTER CORPORATION',
0x0010E4: u'NSI CORPORATION',
0x0010E5: u'SOLECTRON TEXAS',
0x0010E6: u'APPLIED INTELLIGENT SYSTEMS, INC.',
0x0010E7: u'BreezeCom',
0x0010E8: u'TELOCITY, INCORPORATED',
0x0010E9: u'RAIDTEC LTD.',
0x0010EA: u'ADEPT TECHNOLOGY',
0x0010EB: u'SELSIUS SYSTEMS, INC.',
0x0010EC: u'RPCG, LLC',
0x0010ED: u'SUNDANCE TECHNOLOGY, INC.',
0x0010EE: u'CTI PRODUCTS, INC.',
0x0010EF: u'DBTEL INCORPORATED',
0x0010F1: u'I-O CORPORATION',
0x0010F2: u'ANTEC',
0x0010F3: u'Nexcom International Co., Ltd.',
0x0010F4: u'VERTICAL NETWORKS, INC.',
0x0010F5: u'AMHERST SYSTEMS, INC.',
0x0010F6: u'CISCO SYSTEMS, INC.',
0x0010F7: u'IRIICHI TECHNOLOGIES Inc.',
0x0010F8: u'TEXIO CORPORATION',
0x0010F9: u'UNIQUE SYSTEMS, INC.',
0x0010FA: u'ZAYANTE, INC.',
0x0010FB: u'ZIDA TECHNOLOGIES LIMITED',
0x0010FC: u'BROADBAND NETWORKS, INC.',
0x0010FD: u'COCOM A/S',
0x0010FE: u'DIGITAL EQUIPMENT CORPORATION',
0x0010FF: u'CISCO SYSTEMS, INC.',
0x001100: u'RAM Industries, LLC',
0x001101: u'CET Technologies Pte Ltd',
0x001102: u'Aurora Multimedia Corp.',
0x001103: u'kawamura electric inc.',
0x001104: u'TELEXY',
0x001105: u'Sunplus Technology Co., Ltd.',
0x001106: u'Siemens NV (Belgium)',
0x001107: u'RGB Networks Inc.',
0x001108: u'Orbital Data Corporation',
0x001109: u'Micro-Star International',
0x00110A: u'Hewlett Packard',
0x00110B: u'Franklin Technology Systems',
0x00110C: u'Atmark Techno, Inc.',
0x00110D: u'SANBlaze Technology, Inc.',
0x00110E: u'Tsurusaki Sealand Transportation Co. Ltd.',
0x00110F: u'netplat,Inc.',
0x001110: u'Maxanna Technology Co., Ltd.',
0x001111: u'Intel Corporation',
0x001112: u'Honeywell CMSS',
0x001113: u'Fraunhofer FOKUS',
0x001114: u'EverFocus Electronics Corp.',
0x001115: u'EPIN Technologies, Inc.',
0x001116: u'COTEAU VERT CO., LTD.',
0x001117: u'CESNET',
0x001118: u'BLX IC Design Corp., Ltd.',
0x001119: u'Solteras, Inc.',
0x00111A: u'Motorola BCS',
0x00111B: u'Targa Systems Div L-3 Communications Canada',
0x00111C: u'Pleora Technologies Inc.',
0x00111D: u'Hectrix Limited',
0x00111E: u'EPSG (Ethernet Powerlink Standardization Group)',
0x00111F: u'Doremi Labs, Inc.',
0x001120: u'Cisco Systems',
0x001121: u'Cisco Systems',
0x001122: u'CIMSYS Inc',
0x001123: u'Appointech, Inc.',
0x001124: u'Apple Computer',
0x001125: u'IBM Corporation',
0x001126: u'Venstar Inc.',
0x001127: u'TASI, Inc',
0x001128: u'Streamit',
0x001129: u'Paradise Datacom Ltd.',
0x00112A: u'Niko NV',
0x00112B: u'NetModule',
0x00112C: u'IZT GmbH',
0x00112D: u'Guys Without Ties',
0x00112E: u'CEICOM',
0x00112F: u'ASUSTek Computer Inc.',
0x001130: u'Allied Telesis (Hong Kong) Ltd.',
0x001131: u'UNATECH. CO.,LTD',
0x001132: u'Synology Incorporated',
0x001133: u'Siemens Austria SIMEA',
0x001134: u'MediaCell, Inc.',
0x001135: u'Grandeye Ltd',
0x001136: u'Goodrich Sensor Systems',
0x001137: u'AICHI ELECTRIC CO., LTD.',
0x001138: u'TAISHIN CO., LTD.',
0x001139: u'STOEBER ANTRIEBSTECHNIK GmbH + Co. KG.',
0x00113A: u'SHINBORAM',
0x00113B: u'Micronet Communications Inc.',
0x00113C: u'Micronas GmbH',
0x00113D: u'KN SOLTEC CO.,LTD.',
0x00113E: u'JL Corporation',
0x00113F: u'Alcatel DI',
0x001140: u'Nanometrics Inc.',
0x001141: u'GoodMan Corporation',
0x001142: u'e-SMARTCOM INC.',
0x001143: u'DELL INC.',
0x001144: u'Assurance Technology Corp',
0x001145: u'ValuePoint Networks',
0x001146: u'Telecard-Pribor Ltd',
0x001147: u'Secom-Industry co.LTD.',
0x001148: u'Prolon Control Systems',
0x001149: u'Proliphix LLC',
0x00114A: u'KAYABA INDUSTRY Co,.Ltd.',
0x00114B: u'Francotyp-Postalia AG & Co. KG',
0x00114C: u'caffeina applied research ltd.',
0x00114D: u'Atsumi Electric Co.,LTD.',
0x00114E: u'690885 Ontario Inc.',
0x00114F: u'US Digital Television, Inc',
0x001150: u'Belkin Corporation',
0x001151: u'Mykotronx',
0x001152: u'Eidsvoll Electronics AS',
0x001153: u'Trident Tek, Inc.',
0x001154: u'Webpro Technologies Inc.',
0x001155: u'Sevis Systems',
0x001156: u'Pharos Systems NZ',
0x001157: u'OF Networks Co., Ltd.',
0x001158: u'Nortel Networks',
0x001159: u'MATISSE NETWORKS INC',
0x00115A: u'Ivoclar Vivadent AG',
0x00115B: u'Elitegroup Computer System Co. (ECS)',
0x00115C: u'Cisco',
0x00115D: u'Cisco',
0x00115E: u'ProMinent Dosiertechnik GmbH',
0x00115F: u'Intellix Co., Ltd.',
0x001160: u'ARTDIO Company Co., LTD',
0x001161: u'NetStreams, LLC',
0x001162: u'STAR MICRONICS CO.,LTD.',
0x001163: u'SYSTEM SPA DEPT. ELECTRONICS',
0x001164: u'ACARD Technology Corp.',
0x001165: u'Znyx Networks',
0x001166: u'Taelim Electronics Co., Ltd.',
0x001167: u'Integrated System Solution Corp.',
0x001168: u'HomeLogic LLC',
0x001169: u'EMS Satcom',
0x00116A: u'Domo Ltd',
0x00116B: u'Digital Data Communications Asia Co.,Ltd',
0x00116C: u'Nanwang Multimedia Inc.,Ltd',
0x00116D: u'American Time and Signal',
0x00116E: u'PePLink Ltd.',
0x00116F: u'Netforyou Co., LTD.',
0x001170: u'GSC SRL',
0x001171: u'DEXTER Communications, Inc.',
0x001172: u'COTRON CORPORATION',
0x001173: u'Adtron Corporation',
0x001174: u'Wibhu Technologies, Inc.',
0x001175: u'PathScale, Inc.',
0x001176: u'Intellambda Systems, Inc.',
0x001177: u'COAXIAL NETWORKS, INC.',
0x001178: u'Chiron Technology Ltd',
0x001179: u'Singular Technology Co. Ltd.',
0x00117A: u'Singim International Corp.',
0x00117B: u'Büchi Labortechnik AG',
0x00117C: u'e-zy.net',
0x00117D: u'ZMD America, Inc.',
0x00117E: u'Progeny Inc.',
0x00117F: u'Neotune Information Technology Corporation,.LTD',
0x001180: u'Motorola BCS',
0x001181: u'InterEnergy Co.Ltd,',
0x001182: u'IMI Norgren Ltd',
0x001183: u'PSC Scanning, Inc',
0x001184: u'Humo Laboratory,Ltd.',
0x001185: u'Hewlett Packard',
0x001186: u'Prime Systems, Inc.',
0x001187: u'Category Solutions, Inc',
0x001188: u'Enterasys',
0x001189: u'Aerotech Inc',
0x00118A: u'Viewtran Technology Limited',
0x00118B: u'NetDevices Inc.',
0x00118C: u'Missouri Department of Transportation',
0x00118D: u'Hanchang System Corp.',
0x00118E: u'Halytech Mace',
0x00118F: u'EUTECH INSTRUMENTS PTE. LTD.',
0x001190: u'Digital Design Corporation',
0x001191: u'CTS-Clima Temperatur Systeme GmbH',
0x001192: u'Cisco Systems',
0x001193: u'Cisco Systems',
0x001194: u'Chi Mei Communication Systems, Inc.',
0x001195: u'D-Link Corporation',
0x001196: u'Actuality Systems, Inc.',
0x001197: u'Monitoring Technologies Limited',
0x001198: u'Prism Media Products Limited',
0x001199: u'2wcom GmbH',
0x00119A: u'Alkeria srl',
0x00119B: u'Telesynergy Research Inc.',
0x00119C: u'EP&T Energy',
0x00119D: u'Diginfo Technology Corporation',
0x00119E: u'Solectron Brazil',
0x00119F: u'Nokia Danmark A/S',
0x0011A0: u'Vtech Engineering Canada Ltd',
0x0011A1: u'VISION NETWARE CO.,LTD',
0x0011A2: u'Manufacturing Technology Inc',
0x0011A3: u'LanReady Technologies Inc.',
0x0011A4: u'JStream Technologies Inc.',
0x0011A5: u'Fortuna Electronic Corp.',
0x0011A6: u'Sypixx Networks',
0x0011A7: u'Infilco Degremont Inc.',
0x0011A8: u'Quest Technologies',
0x0011A9: u'MOIMSTONE Co., LTD',
0x0011AA: u'Uniclass Technology, Co., LTD',
0x0011AB: u'TRUSTABLE TECHNOLOGY CO.,LTD.',
0x0011AC: u'Simtec Electronics',
0x0011AD: u'Shanghai Ruijie Technology',
0x0011AE: u'Motorola BCS',
0x0011AF: u'Medialink-i,Inc',
0x0011B0: u'Fortelink Inc.',
0x0011B1: u'BlueExpert Technology Corp.',
0x0011B2: u'2001 Technology Inc.',
0x0011B3: u'YOSHIMIYA CO.,LTD.',
0x0011B4: u'Westermo Teleindustri AB',
0x0011B5: u'Shenzhen Powercom Co.,Ltd',
0x0011B6: u'Open Systems International',
0x0011B7: u'Melexis Nederland B.V.',
0x0011B8: u'Liebherr - Elektronik GmbH',
0x0011B9: u'Inner Range Pty. Ltd.',
0x0011BA: u'Elexol Pty Ltd',
0x0011BB: u'Cisco Systems',
0x0011BC: u'Cisco Systems',
0x0011BD: u'Bombardier Transportation',
0x0011BE: u'AGP Telecom Co. Ltd',
0x0011BF: u'AESYS S.p.A.',
0x0011C0: u'Aday Technology Inc',
0x0011C1: u'4P MOBILE DATA PROCESSING',
0x0011C2: u'United Fiber Optic Communication',
0x0011C3: u'Transceiving System Technology Corporation',
0x0011C4: u'Terminales de Telecomunicacion Terrestre, S.L.',
0x0011C5: u'TEN Technology',
0x0011C6: u'Seagate Technology LLC',
0x0011C7: u'RAYMARINE Group Ltd.',
0x0011C8: u'Powercom Co., Ltd.',
0x0011C9: u'MTT Corporation',
0x0011CA: u'Long Range Systems, Inc.',
0x0011CB: u'Jacobsons RKH AB',
0x0011CC: u'Guangzhou Jinpeng Group Co.,Ltd.',
0x0011CD: u'Axsun Technologies',
0x0011CE: u'Ubisense Limited',
0x0011CF: u'Thrane & Thrane A/S',
0x0011D0: u'Tandberg Data ASA',
0x0011D1: u'Soft Imaging System GmbH',
0x0011D2: u'Perception Digital Ltd',
0x0011D3: u'NextGenTel Holding ASA',
0x0011D4: u'NetEnrich, Inc',
0x0011D5: u'Hangzhou Sunyard System Engineering Co.,Ltd.',
0x0011D6: u'HandEra, Inc.',
0x0011D7: u'eWerks Inc',
0x0011D8: u'ASUSTek Computer Inc.',
0x0011D9: u'TiVo',
0x0011DA: u'Vivaas Technology Inc.',
0x0011DB: u'Land-Cellular Corporation',
0x0011DC: u'Glunz & Jensen',
0x0011DD: u'FROMUS TEC. Co., Ltd.',
0x0011DE: u'EURILOGIC',
0x0011DF: u'Arecont Systems',
0x0011E0: u'U-MEDIA Communications, Inc.',
0x0011E1: u'BEKO Electronics Co.',
0x0011E2: u'Hua Jung Components Co., Ltd.',
0x0011E3: u'Thomson, Inc.',
0x0011E4: u'Danelec Electronics A/S',
0x0011E5: u'KCodes Corporation',
0x0011E6: u'Scientific Atlanta',
0x0011E7: u'WORLDSAT - Texas de France',
0x0011E8: u'Tixi.Com',
0x0011E9: u'STARNEX CO., LTD.',
0x0011EA: u'IWICS Inc.',
0x0011EB: u'Innovative Integration',
0x0011EC: u'AVIX INC.',
0x0011ED: u'802 Global',
0x0011EE: u'Estari, Inc.',
0x0011EF: u'Conitec Datensysteme GmbH',
0x0011F0: u'Wideful Limited',
0x0011F1: u'QinetiQ Ltd',
0x0011F2: u'Institute of Network Technologies',
0x0011F3: u'Gavitec AG- mobile digit',
0x0011F4: u'woori-net',
0x0011F5: u'ASKEY COMPUTER CORP.',
0x0011F6: u'Asia Pacific Microsystems , Inc.',
0x0011F7: u'Shenzhen Forward Industry Co., Ltd',
0x0011F8: u'AIRAYA Corp',
0x0011F9: u'Nortel Networks',
0x0011FA: u'Rane Corporation',
0x0011FB: u'Heidelberg Engineering GmbH',
0x0011FC: u'HARTING Electric Gmbh & Co.KG',
0x0011FD: u'KORG INC.',
0x0011FE: u'Keiyo System Research, Inc.',
0x0011FF: u'Digitro Tecnologia Ltda',
0x001200: u'Cisco',
0x001201: u'Cisco',
0x001202: u'Audio International Inc.',
0x001203: u'Activ Networks',
0x001204: u'u10 Networks, Inc.',
0x001205: u'Terrasat Communications, Inc.',
0x001206: u'iQuest (NZ) Ltd',
0x001207: u'Head Strong International Limited',
0x001208: u'Gantner Electronic GmbH',
0x001209: u'Fastrax Ltd',
0x00120A: u'Emerson Electric GmbH & Co. OHG',
0x00120B: u'Chinasys Technologies Limited',
0x00120C: u'CE-Infosys Pte Ltd',
0x00120D: u'Advanced Telecommunication Technologies, Inc.',
0x00120E: u'AboCom',
0x00120F: u'IEEE 802.3',
0x001210: u'WideRay Corp',
0x001211: u'Protechna Herbst GmbH & Co. KG',
0x001212: u'PLUS Vision Corporation',
0x001213: u'Metrohm AG',
0x001214: u'Koenig & Bauer AG',
0x001215: u'iStor Networks, Inc.',
0x001216: u'ICP Internet Communication Payment AG',
0x001217: u'Cisco-Linksys, LLC',
0x001218: u'ARUZE Corporation',
0x001219: u'Ahead Communication Systems Inc',
0x00121A: u'Techno Soft Systemnics Inc.',
0x00121B: u'Sound Devices, LLC',
0x00121C: u'PARROT S.A.',
0x00121D: u'Netfabric Corporation',
0x00121E: u'Juniper Networks, Inc.',
0x00121F: u'Harding Intruments',
0x001220: u'Cadco Systems',
0x001221: u'B.Braun Melsungen AG',
0x001222: u'Skardin (UK) Ltd',
0x001223: u'Pixim',
0x001224: u'NexQL Corporation',
0x001225: u'Motorola BCS',
0x001226: u'Japan Direx Corporation',
0x001227: u'Franklin Electric Co., Inc.',
0x001228: u'Data Ltd.',
0x001229: u'BroadEasy Technologies Co.,Ltd',
0x00122A: u'VTech Telecommunications Ltd.',
0x00122B: u'Virbiage Pty Ltd',
0x00122C: u'Soenen Controls N.V.',
0x00122D: u'SiNett Corporation',
0x00122E: u'Signal Technology - AISD',
0x00122F: u'Sanei Electric Inc.',
0x001230: u'Picaso Infocommunication CO., LTD.',
0x001231: u'Motion Control Systems, Inc.',
0x001232: u'LeWiz Communications Inc.',
0x001233: u'JRC TOKKI Co.,Ltd.',
0x001234: u'Camille Bauer',
0x001235: u'Andrew Corporation',
0x001236: u'ConSentry Networks',
0x001237: u'Texas Instruments',
0x001238: u'SetaBox Technology Co., Ltd.',
0x001239: u'S Net Systems Inc.',
0x00123A: u'Posystech Inc., Co.',
0x00123B: u'KeRo Systems ApS',
0x00123C: u'IP3 Networks, Inc.',
0x00123D: u'GES',
0x00123E: u'ERUNE technology Co., Ltd.',
0x00123F: u'Dell Inc',
0x001240: u'AMOI ELECTRONICS CO.,LTD',
0x001241: u'a2i marketing center',
0x001242: u'Millennial Net',
0x001243: u'Cisco',
0x001244: u'Cisco',
0x001245: u'Zellweger Analytics, Inc.',
0x001246: u'T.O.M TECHNOLOGY INC..',
0x001247: u'Samsung Electronics Co., Ltd.',
0x001248: u'Kashya Inc.',
0x001249: u'Delta Elettronica S.p.A.',
0x00124A: u'Dedicated Devices, Inc.',
0x00124B: u'Chipcon AS',
0x00124C: u'BBWM Corporation',
0x00124D: u'Inducon BV',
0x00124E: u'XAC AUTOMATION CORP.',
0x00124F: u'Tyco Thermal Controls LLC.',
0x001250: u'Tokyo Aircaft Instrument Co., Ltd.',
0x001251: u'SILINK',
0x001252: u'Citronix, LLC',
0x001253: u'AudioDev AB',
0x001254: u'Spectra Technologies Holdings Company Ltd',
0x001255: u'NetEffect Incorporated',
0x001256: u'LG INFORMATION & COMM.',
0x001257: u'LeapComm Communication Technologies Inc.',
0x001258: u'Activis Polska',
0x001259: u'THERMO ELECTRON KARLSRUHE',
0x00125A: u'Microsoft Corporation',
0x00125B: u'KAIMEI ELECTRONI',
0x00125C: u'Green Hills Software, Inc.',
0x00125D: u'CyberNet Inc.',
0x00125E: u'CAEN',
0x00125F: u'AWIND Inc.',
0x001260: u'Stanton Magnetics,inc.',
0x001261: u'Adaptix, Inc',
0x001262: u'Nokia Danmark A/S',
0x001263: u'Data Voice Technologies GmbH',
0x001264: u'daum electronic gmbh',
0x001265: u'Enerdyne Technologies, Inc.',
0x001266: u'PRIVATE',
0x001267: u'Matsushita Electronic Components Co., Ltd.',
0x001268: u'IPS d.o.o.',
0x001269: u'Value Electronics',
0x00126A: u'OPTOELECTRONICS Co., Ltd.',
0x00126B: u'Ascalade Communications Limited',
0x00126C: u'Visonic Ltd.',
0x00126D: u'University of California, Berkeley',
0x00126E: u'Seidel Elektronik GmbH Nfg.KG',
0x00126F: u'Rayson Technology Co., Ltd.',
0x001270: u'NGES Denro Systems',
0x001271: u'Measurement Computing Corp',
0x001272: u'Redux Communications Ltd.',
0x001273: u'Stoke Inc',
0x001274: u'NIT lab',
0x001275: u'Moteiv Corporation',
0x001276: u'Microsol Holdings Ltd.',
0x001277: u'Korenix Technologies Co., Ltd.',
0x001278: u'International Bar Code',
0x001279: u'Hewlett Packard',
0x00127A: u'Sanyu Industry Co.,Ltd.',
0x00127B: u'VIA Networking Technologies, Inc.',
0x00127C: u'SWEGON AB',
0x00127D: u'MobileAria',
0x00127E: u'Digital Lifestyles Group, Inc.',
0x00127F: u'Cisco',
0x001280: u'Cisco',
0x001281: u'CIEFFE srl',
0x001282: u'Qovia',
0x001283: u'Nortel Networks',
0x001284: u'Lab33 Srl',
0x001285: u'Gizmondo Europe Ltd',
0x001286: u'ENDEVCO CORP',
0x001287: u'Digital Everywhere Unterhaltungselektronik GmbH',
0x001288: u'2Wire, Inc',
0x001289: u'Advance Sterilization Products',
0x00128A: u'Motorola PCS',
0x00128B: u'Sensory Networks Inc',
0x00128C: u'Woodward Governor',
0x00128D: u'STB Datenservice GmbH',
0x00128E: u'Q-Free ASA',
0x00128F: u'Montilio',
0x001290: u'KYOWA Electric & Machinery Corp.',
0x001291: u'KWS Computersysteme GmbH',
0x001292: u'Griffin Technology',
0x001293: u'GE Energy',
0x001294: u'Eudyna Devices Inc.',
0x001295: u'Aiware Inc.',
0x001296: u'Addlogix',
0x001297: u'O2Micro, Inc.',
0x001298: u'MICO ELECTRIC(SHENZHEN) LIMITED',
0x001299: u'Ktech Telecommunications Inc',
0x00129A: u'IRT Electronics Pty Ltd',
0x00129B: u'E2S Electronic Engineering Solutions, S.L.',
0x00129C: u'Yulinet',
0x00129D: u'FIRST INTERNATIONAL COMPUTER DO BRASIL LTDA',
0x00129E: u'Surf Communications Inc.',
0x00129F: u'RAE Systems, Inc.',
0x0012A0: u'NeoMeridian Sdn Bhd',
0x0012A1: u'BluePacket Communications Co., Ltd.',
0x0012A2: u'VITA',
0x0012A3: u'Trust International B.V.',
0x0012A4: u'ThingMagic, LLC',
0x0012A5: u'Stargen, Inc.',
0x0012A6: u'Lake Technology Ltd',
0x0012A7: u'ISR TECHNOLOGIES Inc',
0x0012A8: u'intec GmbH',
0x0012A9: u'3COM EUROPE LTD',
0x0012AA: u'IEE, Inc.',
0x0012AB: u'WiLife, Inc.',
0x0012AC: u'ONTIMETEK INC.',
0x0012AD: u'IDS GmbH',
0x0012AE: u'HLS HARD-LINE Solutions Inc.',
0x0012AF: u'ELPRO Technologies',
0x0012B0: u'Efore Oyj (Plc)',
0x0012B1: u'Dai Nippon Printing Co., Ltd',
0x0012B2: u'AVOLITES LTD.',
0x0012B3: u'Advance Wireless Technology Corp.',
0x0012B4: u'Work GmbH',
0x0012B5: u'Vialta, Inc.',
0x0012B6: u'Santa Barbara Infrared, Inc.',
0x0012B7: u'PTW Freiburg',
0x0012B8: u'G2 Microsystems',
0x0012B9: u'Fusion Digital Technology',
0x0012BA: u'FSI Systems, Inc.',
0x0012BB: u'Telecommunications Industry Association TR-41 Committee',
0x0012BC: u'Echolab LLC',
0x0012BD: u'Avantec Manufacturing Limited',
0x0012BE: u'Astek Corporation',
0x0012BF: u'Arcadyan Technology Corporation',
0x0012C0: u'HotLava Systems, Inc.',
0x0012C1: u'Check Point Software Technologies',
0x0012C2: u'Apex Electronics Factory',
0x0012C3: u'WIT S.A.',
0x0012C4: u'Viseon, Inc.',
0x0012C5: u'V-Show Technology Co.Ltd',
0x0012C6: u'TGC America, Inc',
0x0012C7: u'SECURAY Technologies Ltd.Co.',
0x0012C8: u'Perfect tech',
0x0012C9: u'Motorola BCS',
0x0012CA: u'Hansen Telecom',
0x0012CB: u'CSS Inc.',
0x0012CC: u'Bitatek CO., LTD',
0x0012CD: u'ASEM SpA',
0x0012CE: u'Advanced Cybernetics Group',
0x0012CF: u'Accton Technology Corporation',
0x0012D0: u'Gossen-Metrawatt-GmbH',
0x0012D1: u'Texas Instruments Inc',
0x0012D2: u'Texas Instruments',
0x0012D3: u'Zetta Systems, Inc.',
0x0012D4: u'Princeton Technology, Ltd',
0x0012D5: u'Motion Reality Inc.',
0x0012D6: u'Jiangsu Yitong High-Tech Co.,Ltd',
0x0012D7: u'Invento Networks, Inc.',
0x0012D8: u'International Games System Co., Ltd.',
0x0012D9: u'Cisco Systems',
0x0012DA: u'Cisco Systems',
0x0012DB: u'ZIEHL industrie-elektronik GmbH + Co KG',
0x0012DC: u'SunCorp Industrial Limited',
0x0012DD: u'Shengqu Information Technology (Shanghai) Co., Ltd.',
0x0012DE: u'Radio Components Sweden AB',
0x0012DF: u'Novomatic AG',
0x0012E0: u'Codan Limited',
0x0012E1: u'Alliant Networks, Inc',
0x0012E2: u'ALAXALA Networks Corporation',
0x0012E3: u'Agat-RT, Ltd.',
0x0012E4: u'ZIEHL industrie-electronik GmbH + Co KG',
0x0012E5: u'Time America, Inc.',
0x0012E6: u'SPECTEC COMPUTER CO., LTD.',
0x0012E7: u'Projectek Networking Electronics Corp.',
0x0012E8: u'Fraunhofer IMS',
0x0012E9: u'Abbey Systems Ltd',
0x0012EA: u'Trane',
0x0012EB: u'R2DI, LLC',
0x0012EC: u'Movacolor b.v.',
0x0012ED: u'AVG Advanced Technologies',
0x0012EE: u'Sony Ericsson Mobile Communications AB',
0x0012EF: u'OneAccess SA',
0x0012F0: u'Intel Corporate',
0x0012F1: u'IFOTEC',
0x0012F2: u'Foundry Networks',
0x0012F3: u'connectBlue AB',
0x0012F4: u'Belco International Co.,Ltd.',
0x0012F5: u'Prolificx Ltd',
0x0012F6: u'MDK CO.,LTD.',
0x0012F7: u'Xiamen Xinglian Electronics Co., Ltd.',
0x0012F8: u'WNI Resources, LLC',
0x0012F9: u'URYU SEISAKU, LTD.',
0x0012FA: u'THX LTD',
0x0012FB: u'Samsung Electronics',
0x0012FC: u'PLANET System Co.,LTD',
0x0012FD: u'OPTIMUS IC S.A.',
0x0012FE: u'Lenovo Mobile Communication Technology Ltd.',
0x0012FF: u'Lely Industries N.V.',
0x001300: u'IT-FACTORY, INC.',
0x001301: u'IronGate S.L.',
0x001302: u'Intel Corporate',
0x001303: u'GateConnect Technologies GmbH',
0x001304: u'Flaircomm Technologies Co. LTD',
0x001305: u'Epicom, Inc.',
0x001306: u'Always On Wireless',
0x001307: u'Paravirtual Corporation',
0x001308: u'Nuvera Fuel Cells',
0x001309: u'Ocean Broadband Networks',
0x00130A: u'Nortel',
0x00130B: u'Mextal B.V.',
0x00130C: u'HF System Corporation',
0x00130D: u'GALILEO AVIONICA',
0x00130E: u'Focusrite Audio Engineering Limited',
0x00130F: u'EGEMEN Bilgisayar Muh San ve Tic LTD STI',
0x001310: u'Cisco-Linksys, LLC',
0x001311: u'ARRIS International',
0x001312: u'Amedia Networks Inc.',
0x001313: u'GuangZhou Post & Telecom Equipment ltd',
0x001314: u'Asiamajor Inc.',
0x001315: u'SONY Computer Entertainment inc,',
0x001316: u'L-S-B GmbH',
0x001317: u'GN Netcom as',
0x001318: u'DGSTATION Co., Ltd.',
0x001319: u'Cisco Systems',
0x00131A: u'Cisco Systems',
0x00131B: u'BeCell Innovations Corp.',
0x00131C: u'LiteTouch, Inc.',
0x00131D: u'Scanvaegt International A/S',
0x00131E: u'Peiker acustic GmbH & Co. KG',
0x00131F: u'NxtPhase T&D, Corp.',
0x001320: u'Intel Corporate',
0x001321: u'Hewlett Packard',
0x001322: u'DAQ Electronics, Inc.',
0x001323: u'Cap Co., Ltd.',
0x001324: u'Schneider Electric Ultra Terminal',
0x001325: u'ImmenStar Inc.',
0x001326: u'ECM Systems Ltd',
0x001327: u'Data Acquisitions limited',
0x001328: u'Westech Korea Inc.,',
0x001329: u'VSST Co., LTD',
0x00132A: u'STROM telecom, s. r. o.',
0x00132B: u'Phoenix Digital',
0x00132C: u'MAZ Brandenburg GmbH',
0x00132D: u'iWise Communications Pty Ltd',
0x00132E: u'ITian Coporation',
0x00132F: u'Interactek',
0x001330: u'EURO PROTECTION SURVEILLANCE',
0x001331: u'CellPoint Connect',
0x001332: u'Beijing Topsec Network Security Technology Co., Ltd.',
0x001333: u'Baud Technology Inc.',
0x001334: u'Arkados, Inc.',
0x001335: u'VS Industry Berhad',
0x001336: u'Tianjin 712 Communication Broadcasting co., ltd.',
0x001337: u'Orient Power Home Network Ltd.',
0x001338: u'FRESENIUS-VIAL',
0x001339: u'EL-ME AG',
0x00133A: u'VadaTech Inc.',
0x00133B: u'Speed Dragon Multimedia Limited',
0x00133C: u'QUINTRON SYSTEMS INC.',
0x00133D: u'Micro Memory LLC',
0x00133E: u'MetaSwitch',
0x00133F: u'Eppendorf Instrumente GmbH',
0x001340: u'AD.EL s.r.l.',
0x001341: u'Shandong New Beiyang Information Technology Co.,Ltd',
0x001342: u'Vision Research, Inc.',
0x001343: u'Matsushita Electronic Components (Europe) GmbH',
0x001344: u'Fargo Electronics Inc.',
0x001345: u'Eaton Corporation',
0x001346: u'D-Link Corporation',
0x001347: u'BlueTree Wireless Data Inc.',
0x001348: u'Artila Electronics Co., Ltd.',
0x001349: u'ZyXEL Communications Corporation',
0x00134A: u'Engim, Inc.',
0x00134B: u'ToGoldenNet Technology Inc.',
0x00134C: u'YDT Technology International',
0x00134D: u'IPC systems',
0x00134E: u'Valox Systems, Inc.',
0x00134F: u'Tranzeo Wireless Technologies Inc.',
0x001350: u'Silver Spring Networks, Inc',
0x001351: u'Niles Audio Corporation',
0x001352: u'Naztec, Inc.',
0x001353: u'HYDAC Filtertechnik GMBH',
0x001354: u'Zcomax Technologies, Inc.',
0x001355: u'TOMEN Cyber-business Solutions, Inc.',
0x001356: u'target systemelectronic gmbh',
0x001357: u'Soyal Technology Co., Ltd.',
0x001358: u'Realm Systems, Inc.',
0x001359: u'ProTelevision Technologies A/S',
0x00135A: u'Project T&E Limited',
0x00135B: u'PanelLink Cinema, LLC',
0x00135C: u'OnSite Systems, Inc.',
0x00135D: u'NTTPC Communications, Inc.',
0x00135E: u'EAB/RWI/K',
0x00135F: u'Cisco Systems',
0x001360: u'Cisco Systems',
0x001361: u'Biospace Co., Ltd.',
0x001362: u'ShinHeung Precision Co., Ltd.',
0x001363: u'Verascape, Inc.',
0x001364: u'Paradigm Technology Inc..',
0x001365: u'Nortel',
0x001366: u'Neturity Technologies Inc.',
0x001367: u'Narayon. Co., Ltd.',
0x001368: u'Maersk Data Defence',
0x001369: u'Honda Electron Co., LED.',
0x00136A: u'Hach Ultra Analytics',
0x00136B: u'E-TEC',
0x00136C: u'PRIVATE',
0x00136D: u'Tentaculus AB',
0x00136E: u'Techmetro Corp.',
0x00136F: u'PacketMotion, Inc.',
0x001370: u'Nokia Danmark A/S',
0x001371: u'Motorola CHS',
0x001372: u'Dell Inc.',
0x001373: u'BLwave Electronics Co., Ltd',
0x001374: u'Attansic Technology Corp.',
0x001375: u'American Security Products Co.',
0x001376: u'Tabor Electronics Ltd.',
0x001377: u'Samsung Electronics CO., LTD',
0x001378: u'QSAN Technology, Inc.',
0x001379: u'PONDER INFORMATION INDUSTRIES LTD.',
0x00137A: u'Netvox Technology Co., Ltd.',
0x00137B: u'Movon Corporation',
0x00137C: u'Kaicom co., Ltd.',
0x00137D: u'Dynalab, Inc.',
0x00137E: u'CorEdge Networks, Inc.',
0x00137F: u'Cisco Systems',
0x001380: u'Cisco Systems',
0x001381: u'CHIPS & Systems, Inc.',
0x001382: u'Cetacea Networks Corporation',
0x001383: u'Application Technologies and Engineering Research Laboratory',
0x001384: u'Advanced Motion Controls',
0x001385: u'Add-On Technology Co., LTD.',
0x001386: u'ABB Inc./Totalflow',
0x001387: u'27M Technologies AB',
0x001388: u'WiMedia Alliance',
0x001389: u'Redes de Telefonía Móvil S.A.',
0x00138A: u'QINGDAO GOERTEK ELECTRONICS CO.,LTD.',
0x00138B: u'Phantom Technologies LLC',
0x00138C: u'Kumyoung.Co.Ltd',
0x00138D: u'Kinghold',
0x00138E: u'FOAB Elektronik AB',
0x00138F: u'Asiarock Incorporation',
0x001390: u'Termtek Computer Co., Ltd',
0x001391: u'OUEN CO.,LTD.',
0x001392: u'Ruckus Wireless',
0x001393: u'Panta Systems, Inc.',
0x001394: u'Infohand Co.,Ltd',
0x001395: u'congatec AG',
0x001396: u'Acbel Polytech Inc.',
0x001397: u'Xsigo Systems, Inc.',
0x001398: u'TrafficSim Co.,Ltd',
0x001399: u'STAC Corporation.',
0x00139A: u'K-ubique ID Corp.',
0x00139B: u'ioIMAGE Ltd.',
0x00139C: u'Exavera Technologies, Inc.',
0x00139D: u'Design of Systems on Silicon S.A.',
0x00139E: u'Ciara Technologies Inc.',
0x00139F: u'Electronics Design Services, Co., Ltd.',
0x0013A0: u'ALGOSYSTEM Co., Ltd.',
0x0013A1: u'Crow Electronic Engeneering',
0x0013A2: u'MaxStream, Inc',
0x0013A3: u'Siemens Com CPE Devices',
0x0013A4: u'KeyEye Communications',
0x0013A5: u'General Solutions, LTD.',
0x0013A6: u'Extricom Ltd',
0x0013A7: u'BATTELLE MEMORIAL INSTITUTE',
0x0013A8: u'Tanisys Technology',
0x0013A9: u'Sony Corporation',
0x0013AA: u'ALS & TEC Ltd.',
0x0013AB: u'Telemotive AG',
0x0013AC: u'Sunmyung Electronics Co., LTD',
0x0013AD: u'Sendo Ltd',
0x0013AE: u'Radiance Technologies',
0x0013AF: u'NUMA Technology,Inc.',
0x0013B0: u'Jablotron',
0x0013B1: u'Intelligent Control Systems (Asia) Pte Ltd',
0x0013B2: u'Carallon Limited',
0x0013B3: u'Beijing Ecom Communications Technology Co., Ltd.',
0x0013B4: u'Appear TV',
0x0013B5: u'Wavesat',
0x0013B6: u'Sling Media, Inc.',
0x0013B7: u'Scantech ID',
0x0013B8: u'RyCo Electronic Systems Limited',
0x0013B9: u'BM SPA',
0x0013BA: u'ReadyLinks Inc',
0x0013BB: u'PRIVATE',
0x0013BC: u'Artimi Ltd',
0x0013BD: u'HYMATOM SA',
0x0013BE: u'Virtual Conexions',
0x0013BF: u'Media System Planning Corp.',
0x0013C0: u'Trix Tecnologia Ltda.',
0x0013C1: u'Asoka USA Corporation',
0x0013C2: u'WACOM Co.,Ltd',
0x0013C3: u'Cisco Systems',
0x0013C4: u'Cisco Systems',
0x0013C5: u'LIGHTRON FIBER-OPTIC DEVICES INC.',
0x0013C6: u'OpenGear, Inc',
0x0013C7: u'IONOS Co.,Ltd.',
0x0013C8: u'PIRELLI BROADBAND SOLUTIONS S.P.A.',
0x0013C9: u'Beyond Achieve Enterprises Ltd.',
0x0013CA: u'X-Digital Systems, Inc.',
0x0013CB: u'Zenitel Norway AS',
0x0013CC: u'Tall Maple Systems',
0x0013CD: u'MTI co. LTD',
0x0013CE: u'Intel Corporate',
0x0013CF: u'4Access Communications',
0x0013D0: u'e-San Limited',
0x0013D1: u'KIRK telecom A/S',
0x0013D2: u'PAGE IBERICA, S.A.',
0x0013D3: u'MICRO-STAR INTERNATIONAL CO., LTD.',
0x0013D4: u'ASUSTek COMPUTER INC.',
0x0013D5: u'WiNetworks LTD',
0x0013D6: u'TII NETWORK TECHNOLOGIES, INC.',
0x0013D7: u'SPIDCOM Technologies SA',
0x0013D8: u'Princeton Instruments',
0x0013D9: u'Matrix Product Development, Inc.',
0x0013DA: u'Diskware Co., Ltd',
0x0013DB: u'SHOEI Electric Co.,Ltd',
0x0013DC: u'IBTEK INC.',
0x0013DD: u'Abbott Diagnostics',
0x0013DE: u'Adapt4',
0x0013DF: u'Ryvor Corp.',
0x0013E0: u'Murata Manufacturing Co., Ltd.',
0x0013E1: u'Iprobe',
0x0013E2: u'GeoVision Inc.',
0x0013E3: u'CoVi Technologies, Inc.',
0x0013E4: u'YANGJAE SYSTEMS CORP.',
0x0013E5: u'TENOSYS, INC.',
0x0013E6: u'Technolution',
0x0013E7: u'Minelab Electronics Pty Limited',
0x0013E8: u'Intel Corporate',
0x0013E9: u'VeriWave, Inc.',
0x0013EA: u'Kamstrup A/S',
0x0013EB: u'Sysmaster Corporation',
0x0013EC: u'Sunbay Software AG',
0x0013ED: u'PSIA',
0x0013EE: u'JBX Designs Inc.',
0x0013EF: u'Kingjon Digital Technology Co.,Ltd',
0x0013F0: u'Wavefront Semiconductor',
0x0013F1: u'AMOD Technology Co., Ltd.',
0x0013F2: u'Klas Ltd',
0x0013F3: u'Giga-byte Communications Inc.',
0x0013F4: u'Psitek (Pty) Ltd',
0x0013F5: u'Akimbi Systems',
0x0013F6: u'Cintech',
0x0013F7: u'SMC Networks, Inc.',
0x0013F8: u'Dex Security Solutions',
0x0013F9: u'Cavera Systems',
0x0013FA: u'LifeSize Communications, Inc',
0x0013FB: u'RKC INSTRUMENT INC.',
0x0013FC: u'SiCortex, Inc',
0x0013FD: u'Nokia Danmark A/S',
0x0013FE: u'GRANDTEC ELECTRONIC CORP.',
0x0013FF: u'Dage-MTI of MC, Inc.',
0x001400: u'MINERVA KOREA CO., LTD',
0x001401: u'Rivertree Networks Corp.',
0x001402: u'kk-electronic a/s',
0x001403: u'Renasis, LLC',
0x001404: u'Motorola CHS',
0x001405: u'OpenIB, Inc.',
0x001406: u'Go Networks',
0x001407: u'Biosystems',
0x001408: u'Eka Systems Inc.',
0x001409: u'MAGNETI MARELLI S.E. S.p.A.',
0x00140A: u'WEPIO Co., Ltd.',
0x00140B: u'FIRST INTERNATIONAL COMPUTER, INC.',
0x00140C: u'GKB CCTV CO., LTD.',
0x00140D: u'Nortel',
0x00140E: u'Nortel',
0x00140F: u'Federal State Unitary Enterprise Leningrad R&D Institute of',
0x001410: u'Suzhou Keda Technology CO.,Ltd',
0x001411: u'Deutschmann Automation GmbH & Co. KG',
0x001412: u'S-TEC electronics AG',
0x001413: u'Trebing & Himstedt Prozessautomation GmbH & Co. KG',
0x001414: u'Jumpnode Systems LLC.',
0x001415: u'Intec Automation Inc.',
0x001416: u'Scosche Industries, Inc.',
0x001417: u'RSE Informations Technologie GmbH',
0x001418: u'C4Line',
0x001419: u'SIDSA',
0x00141A: u'DEICY CORPORATION',
0x00141B: u'Cisco Systems',
0x00141C: u'Cisco Systems',
0x00141D: u'Lust Antriebstechnik GmbH',
0x00141E: u'P.A. Semi, Inc.',
0x00141F: u'SunKwang Electronics Co., Ltd',
0x001420: u'G-Links networking company',
0x001421: u'Total Wireless Technologies Pte. Ltd.',
0x001422: u'Dell Inc.',
0x001423: u'J-S Co. NEUROCOM',
0x001424: u'Merry Electrics CO., LTD.',
0x001425: u'Galactic Computing Corp.',
0x001426: u'NL Technology',
0x001427: u'JazzMutant',
0x001428: u'Vocollect, Inc',
0x001429: u'V Center Technologies Co., Ltd.',
0x00142A: u'Elitegroup Computer System Co., Ltd',
0x00142B: u'Edata Technologies Inc.',
0x00142C: u'Koncept International, Inc.',
0x00142D: u'Toradex AG',
0x00142E: u'77 Elektronika Kft.',
0x00142F: u'WildPackets',
0x001430: u'ViPowER, Inc',
0x001431: u'PDL Electronics Ltd',
0x001432: u'Tarallax Wireless, Inc.',
0x001433: u'Empower Technologies(Canada) Inc.',
0x001434: u'Keri Systems, Inc',
0x001435: u'CityCom Corp.',
0x001436: u'Qwerty Elektronik AB',
0x001437: u'GSTeletech Co.,Ltd.',
0x001438: u'Hewlett Packard',
0x001439: u'Blonder Tongue Laboratories, Inc.',
0x00143A: u'RAYTALK INTERNATIONAL SRL',
0x00143B: u'Sensovation AG',
0x00143C: u'Oerlikon Contraves Inc.',
0x00143D: u'Aevoe Inc.',
0x00143E: u'AirLink Communications, Inc.',
0x00143F: u'Hotway Technology Corporation',
0x001440: u'ATOMIC Corporation',
0x001441: u'Innovation Sound Technology Co., LTD.',
0x001442: u'ATTO CORPORATION',
0x001443: u'Consultronics Europe Ltd',
0x001444: u'Grundfos Electronics',
0x001445: u'Telefon-Gradnja d.o.o.',
0x001446: u'KidMapper, Inc.',
0x001447: u'BOAZ Inc.',
0x001448: u'Inventec Multimedia & Telecom Corporation',
0x001449: u'Sichuan Changhong Electric Ltd.',
0x00144A: u'Taiwan Thick-Film Ind. Corp.',
0x00144B: u'Hifn, Inc.',
0x00144C: u'General Meters Corp.',
0x00144D: u'Intelligent Systems',
0x00144E: u'SRISA',
0x00144F: u'Sun Microsystems, Inc.',
0x001450: u'Heim Systems GmbH',
0x001451: u'Apple Computer Inc.',
0x001452: u'CALCULEX,INC.',
0x001453: u'ADVANTECH TECHNOLOGIES CO.,LTD',
0x001454: u'Symwave',
0x001455: u'Coder Electronics Corporation',
0x001456: u'Edge Products',
0x001457: u'T-VIPS AS',
0x001458: u'HS Automatic ApS',
0x001459: u'Moram Co., Ltd.',
0x00145A: u'Elektrobit AG',
0x00145B: u'SeekerNet Inc.',
0x00145C: u'Intronics B.V.',
0x00145D: u'WJ Communications, Inc.',
0x00145E: u'IBM',
0x00145F: u'ADITEC CO. LTD',
0x001460: u'Kyocera Wireless Corp.',
0x001461: u'CORONA CORPORATION',
0x001462: u'Digiwell Technology, inc',
0x001463: u'IDCS N.V.',
0x001464: u'Cryptosoft',
0x001465: u'Novo Nordisk A/S',
0x001466: u'Kleinhenz Elektronik GmbH',
0x001467: u'ArrowSpan Inc.',
0x001468: u'CelPlan International, Inc.',
0x001469: u'Cisco Systems',
0x00146A: u'Cisco Systems',
0x00146B: u'Anagran, Inc.',
0x00146C: u'Netgear Inc.',
0x00146D: u'RF Technologies',
0x00146E: u'H. Stoll GmbH & Co. KG',
0x00146F: u'Kohler Co',
0x001470: u'Prokom Software SA',
0x001471: u'Eastern Asia Technology Limited',
0x001472: u'China Broadband Wireless IP Standard Group',
0x001473: u'Bookham Inc',
0x001474: u'K40 Electronics',
0x001475: u'Wiline Networks, Inc.',
0x001476: u'MultiCom Industries Limited',
0x001477: u'Nertec Inc.',
0x001478: u'ShenZhen TP-LINK Technologies Co., Ltd.',
0x001479: u'NEC Magnus Communications,Ltd.',
0x00147A: u'Eubus GmbH',
0x00147B: u'Iteris, Inc.',
0x00147C: u'3Com Europe Ltd',
0x00147D: u'Aeon Digital International',
0x00147E: u'PanGo Networks, Inc.',
0x00147F: u'Thomson Telecom Belgium',
0x001480: u'Hitachi-LG Data Storage Korea, Inc',
0x001481: u'Multilink Inc',
0x001482: u'GoBackTV, Inc',
0x001483: u'eXS Inc.',
0x001484: u'CERMATE TECHNOLOGIES INC',
0x001485: u'Giga-Byte',
0x001486: u'Echo Digital Audio Corporation',
0x001487: u'American Technology Integrators',
0x001488: u'Akorri Networks',
0x001489: u'B15402100 - JANDEI, S.L.',
0x00148A: u'Elin Ebg Traction Gmbh',
0x00148B: u'Globo Electronic GmbH & Co. KG',
0x00148C: u'Fortress Technologies',
0x00148D: u'Cubic Defense Simulation Systems',
0x00148E: u'Tele Power Inc.',
0x00148F: u'Protronic (Far East) Ltd.',
0x001490: u'ASP Corporation',
0x001491: u'Daniels Electronics Ltd.',
0x001492: u'Liteon, Mobile Media Solution SBU',
0x001493: u'Systimax Solutions',
0x001494: u'ESU AG',
0x001495: u'2Wire, Inc.',
0x001496: u'Phonic Corp.',
0x001497: u'ZHIYUAN Eletronics co.,ltd.',
0x001498: u'Viking Design Technology',
0x001499: u'Helicomm Inc',
0x00149A: u'Motorola Mobile Devices Business',
0x00149B: u'Nokota Communications, LLC',
0x00149C: u'HF Company',
0x00149D: u'Sound ID Inc.',
0x00149E: u'UbONE Co., Ltd',
0x00149F: u'System and Chips, Inc.',
0x0014A0: u'RFID Asset Track, Inc.',
0x0014A1: u'Synchronous Communication Corp',
0x0014A2: u'Core Micro Systems Inc.',
0x0014A3: u'Vitelec BV',
0x0014A4: u'Hon Hai Precision Ind. Co., Ltd.',
0x0014A5: u'Gemtek Technology Co., Ltd.',
0x0014A6: u'Teranetics, Inc.',
0x0014A7: u'Nokia Danmark A/S',
0x0014A8: u'Cisco Systems',
0x0014A9: u'Cisco Systems',
0x0014AA: u'Ashly Audio, Inc.',
0x0014AB: u'Senhai Electronic Technology Co., Ltd.',
0x0014AC: u'Bountiful WiFi',
0x0014AD: u'Gassner Wiege- u. Meßtechnik GmbH',
0x0014AE: u'Wizlogics Co., Ltd.',
0x0014AF: u'Datasym Inc.',
0x0014B0: u'Naeil Community',
0x0014B1: u'Avitec AB',
0x0014B2: u'mCubelogics Corporation',
0x0014B3: u'CoreStar International Corp',
0x0014B4: u'General Dynamics United Kingdom Ltd',
0x0014B5: u'PRIVATE',
0x0014B6: u'Enswer Technology Inc.',
0x0014B7: u'AR Infotek Inc.',
0x0014B8: u'Hill-Rom',
0x0014B9: u'STEPMIND',
0x0014BA: u'Carvers SA de CV',
0x0014BB: u'Open Interface North America',
0x0014BC: u'SYNECTIC TELECOM EXPORTS PVT. LTD.',
0x0014BD: u'incNETWORKS, Inc',
0x0014BE: u'Wink communication technology CO.LTD',
0x0014BF: u'Cisco-Linksys LLC',
0x0014C0: u'Symstream Technology Group Ltd',
0x0014C1: u'U.S. Robotics Corporation',
0x0014C2: u'Hewlett Packard',
0x0014C3: u'Seagate Technology LLC',
0x0014C4: u'Vitelcom Mobile Technology',
0x0014C5: u'Alive Technologies Pty Ltd',
0x0014C6: u'Quixant Ltd',
0x0014C7: u'Nortel',
0x0014C8: u'Contemporary Research Corp',
0x0014C9: u'Silverback Systems, Inc.',
0x0014CA: u'Key Radio Systems Limited',
0x0014CB: u'LifeSync Corporation',
0x0014CC: u'Zetec, Inc.',
0x0014CD: u'DigitalZone Co., Ltd.',
0x0014CE: u'NF CORPORATION',
0x0014CF: u'Nextlink.to A/S',
0x0014D0: u'BTI Photonics',
0x0014D1: u'TRENDware International, Inc.',
0x0014D2: u'KYUKI CORPORATION',
0x0014D3: u'SEPSA',
0x0014D4: u'K Technology Corporation',
0x0014D5: u'Datang Telecom Technology CO. , LCD,Optical Communication Br',
0x0014D6: u'Jeongmin Electronics Co.,Ltd.',
0x0014D7: u'DataStor Technology Inc.',
0x0014D8: u'bio-logic SA',
0x0014D9: u'IP Fabrics, Inc.',
0x0014DA: u'Huntleigh Healthcare',
0x0014DB: u'Elma Trenew Electronic GmbH',
0x0014DC: u'Communication System Design & Manufacturing (CSDM)',
0x0014DD: u'Covergence Inc.',
0x0014DE: u'Sage Instruments Inc.',
0x0014DF: u'HI-P Tech Corporation',
0x0014E0: u'LET\'S Corporation',
0x0014E1: u'Data Display AG',
0x0014E2: u'datacom systems inc.',
0x0014E3: u'mm-lab GmbH',
0x0014E4: u'Integral Technologies',
0x0014E5: u'Alticast',
0x0014E6: u'AIM Infrarotmodule GmbH',
0x0014E7: u'Stolinx,. Inc',
0x0014E8: u'Motorola CHS',
0x0014E9: u'Nortech International',
0x0014EA: u'S Digm Inc. (Safe Paradigm Inc.)',
0x0014EB: u'AwarePoint Corporation',
0x0014EC: u'Acro Telecom',
0x0014ED: u'Airak, Inc.',
0x0014EE: u'Western Digital Technologies, Inc.',
0x0014EF: u'TZero Technologies, Inc.',
0x0014F0: u'Business Security OL AB',
0x0014F1: u'Cisco Systems',
0x0014F2: u'Cisco Systems',
0x0014F3: u'ViXS Systems Inc',
0x0014F4: u'DekTec Digital Video B.V.',
0x0014F5: u'OSI Security Devices',
0x0014F6: u'Juniper Networks, Inc.',
0x0014F7: u'Crevis',
0x0014F8: u'Scientific Atlanta',
0x0014F9: u'Vantage Controls',
0x0014FA: u'AsGa S.A.',
0x0014FB: u'Technical Solutions Inc.',
0x0014FC: u'Extandon, Inc.',
0x0014FD: u'Thecus Technology Corp.',
0x0014FE: u'Artech Electronics',
0x0014FF: u'Precise Automation, LLC',
0x001500: u'Intel Corporate',
0x001501: u'LexBox',
0x001502: u'BETA tech',
0x001503: u'PROFIcomms s.r.o.',
0x001504: u'GAME PLUS CO., LTD.',
0x001505: u'Actiontec Electronics, Inc',
0x001506: u'BeamExpress, Inc',
0x001507: u'Renaissance Learning Inc',
0x001508: u'Global Target Enterprise Inc',
0x001509: u'Plus Technology Co., Ltd',
0x00150A: u'Sonoa Systems, Inc',
0x00150B: u'SAGE INFOTECH LTD.',
0x00150C: u'AVM GmbH',
0x00150D: u'Hoana Medical, Inc.',
0x00150E: u'OPENBRAIN TECHNOLOGIES CO., LTD.',
0x00150F: u'mingjong',
0x001510: u'Techsphere Co., Ltd',
0x001511: u'Data Center Systems',
0x001512: u'Zurich University of Applied Sciences',
0x001513: u'EFS sas',
0x001514: u'Hu Zhou NAVA Networks&Electronics Ltd.',
0x001515: u'Leipold+Co.GmbH',
0x001516: u'URIEL SYSTEMS INC.',
0x001517: u'Intel Corporate',
0x001518: u'Shenzhen 10MOONS Technology Development CO.,Ltd',
0x001519: u'StoreAge Networking Technologies',
0x00151A: u'Hunter Engineering Company',
0x00151B: u'Isilon Systems Inc.',
0x00151C: u'LENECO',
0x00151D: u'M2I CORPORATION',
0x00151E: u'Metaware Co., Ltd.',
0x00151F: u'Multivision Intelligent Surveillance (Hong Kong) Ltd',
0x001520: u'Radiocrafts AS',
0x001521: u'Horoquartz',
0x001522: u'Dea Security',
0x001523: u'Meteor Communications Corporation',
0x001524: u'Numatics, Inc.',
0x001525: u'PTI Integrated Systems, Inc.',
0x001526: u'Remote Technologies Inc',
0x001527: u'Balboa Instruments',
0x001528: u'Beacon Medical Products LLC d.b.a. BeaconMedaes',
0x001529: u'N3 Corporation',
0x00152A: u'Nokia GmbH',
0x00152B: u'Cisco Systems',
0x00152C: u'Cisco Systems',
0x00152D: u'TenX Networks, LLC',
0x00152E: u'PacketHop, Inc.',
0x00152F: u'Motorola CHS',
0x001530: u'Bus-Tech, Inc.',
0x001531: u'KOCOM',
0x001532: u'Consumer Technologies Group, LLC',
0x001533: u'NADAM.CO.,LTD',
0x001534: u'A BELTRÓNICA, Companhia de Comunicações, Lda',
0x001535: u'OTE Spa',
0x001536: u'Powertech co.,Ltd',
0x001537: u'Ventus Networks',
0x001538: u'RFID, Inc.',
0x001539: u'Technodrive SRL',
0x00153A: u'Shenzhen Syscan Technology Co.,Ltd.',
0x00153B: u'EMH Elektrizitätszähler GmbH & CoKG',
0x00153C: u'Kprotech Co., Ltd.',
0x00153D: u'ELIM PRODUCT CO.',
0x00153E: u'Q-Matic Sweden AB',
0x00153F: u'Alcatel Alenia Space Italia',
0x001540: u'Nortel',
0x001541: u'StrataLight Communications, Inc.',
0x001542: u'MICROHARD S.R.L.',
0x001543: u'Aberdeen Test Center',
0x001544: u'coM.s.a.t. AG',
0x001545: u'SEECODE Co., Ltd.',
0x001546: u'ITG Worldwide Sdn Bhd',
0x001547: u'AiZen Solutions Inc.',
0x001548: u'CUBE TECHNOLOGIES',
0x001549: u'Dixtal Biomedica Ind. Com. Ltda',
0x00154A: u'WANSHIH ELECTRONIC CO., LTD',
0x00154B: u'Wonde Proud Technology Co., Ltd',
0x00154C: u'Saunders Electronics',
0x00154D: u'Netronome Systems, Inc.',
0x00154E: u'Hirschmann Automation and Control GmbH',
0x00154F: u'one RF Technology',
0x001550: u'Nits Technology Inc',
0x001551: u'RadioPulse Inc.',
0x001552: u'Wi-Gear Inc.',
0x001553: u'Cytyc Corporation',
0x001554: u'Atalum Wireless S.A.',
0x001555: u'DFM GmbH',
0x001556: u'SAGEM SA',
0x001557: u'Olivetti',
0x001558: u'FOXCONN',
0x001559: u'Securaplane Technologies, Inc.',
0x00155A: u'DAINIPPON PHARMACEUTICAL CO., LTD.',
0x00155B: u'Sampo Corporation',
0x00155C: u'Dresser Wayne',
0x00155D: u'Microsoft Corporation',
0x00155E: u'Morgan Stanley',
0x00155F: u'Ubiwave',
0x001560: u'Hewlett Packard',
0x001561: u'JJPlus Corporation',
0x001562: u'Cisco Systems',
0x001563: u'Cisco Systems',
0x001564: u'BEHRINGER Spezielle Studiotechnik GmbH',
0x001565: u'XIAMEN YEALINK NETWORK TECHNOLOGY CO.,LTD',
0x001566: u'A-First Technology Co., Ltd.',
0x001567: u'RADWIN Inc.',
0x001568: u'Dilithium Networks',
0x001569: u'PECO II, Inc.',
0x00156A: u'DG2L Technologies Pvt. Ltd.',
0x00156B: u'Perfisans Networks Corp.',
0x00156C: u'SANE SYSTEM CO., LTD',
0x00156D: u'Ubiquiti Networks',
0x00156E: u'A. W. Communication Systems Ltd',
0x00156F: u'Xiranet Communications GmbH',
0x001570: u'Symbol Technologies',
0x001571: u'Nolan Systems',
0x001572: u'Red-Lemon',
0x001573: u'NewSoft Technology Corporation',
0x001574: u'Horizon Semiconductors Ltd.',
0x001575: u'Nevis Networks Inc.',
0x001576: u'scil animal care company GmbH',
0x001577: u'Allied Telesyn, Inc.',
0x001578: u'Audio / Video Innovations',
0x001579: u'Lunatone Industrielle Elektronik GmbH',
0x00157A: u'Telefin S.p.A.',
0x00157B: u'Leuze electronic GmbH + Co. KG',
0x00157C: u'Dave Networks, Inc.',
0x00157D: u'POSDATA CO., LTD.',
0x00157E: u'HEYFRA ELECTRONIC gmbH',
0x00157F: u'ChuanG International Holding CO.,LTD.',
0x001580: u'U-WAY CORPORATION',
0x001581: u'MAKUS Inc.',
0x001582: u'TVonics Ltd',
0x001583: u'IVT corporation',
0x001584: u'Schenck Process GmbH',
0x001585: u'Aonvision Technolopy Corp.',
0x001586: u'Xiamen Overseas Chinese Electronic Co., Ltd.',
0x001587: u'Takenaka Seisakusho Co.,Ltd',
0x001588: u'Balda-Thong Fook Solutions Sdn. Bhd.',
0x001589: u'D-MAX Technology Co.,Ltd',
0x00158A: u'SURECOM Technology Corp.',
0x00158B: u'Park Air Systems Ltd',
0x00158C: u'Liab ApS',
0x00158D: u'Jennic Ltd',
0x00158E: u'Plustek.INC',
0x00158F: u'NTT Advanced Technology Corporation',
0x001590: u'Hectronic GmbH',
0x001591: u'RLW Inc.',
0x001592: u'Facom UK Ltd (Melksham)',
0x001593: u'U4EA Technologies Inc.',
0x001594: u'BIXOLON CO.,LTD',
0x001595: u'Quester Tangent Corporation',
0x001596: u'ARRIS International',
0x001597: u'AETA AUDIO SYSTEMS',
0x001598: u'Kolektor group',
0x001599: u'Samsung Electronics Co., LTD',
0x00159A: u'Motorola CHS',
0x00159B: u'Nortel',
0x00159C: u'B-KYUNG SYSTEM Co.,Ltd.',
0x00159D: u'Minicom Advanced Systems ltd',
0x00159E: u'Saitek plc',
0x00159F: u'Terascala, Inc.',
0x0015A0: u'Nokia Danmark A/S',
0x0015A1: u'SINTERS SAS',
0x0015A2: u'ARRIS International',
0x0015A3: u'ARRIS International',
0x0015A4: u'ARRIS International',
0x0015A5: u'DCI Co., Ltd.',
0x0015A6: u'Digital Electronics Products Ltd.',
0x0015A7: u'Robatech AG',
0x0015A8: u'Motorola Mobile Devices',
0x0015A9: u'KWANG WOO I&C CO.,LTD',
0x0015AA: u'Rextechnik International Co.,',
0x0015AB: u'PRO CO SOUND INC',
0x0015AC: u'Capelon AB',
0x0015AD: u'Accedian Networks',
0x0015AE: u'kyung il',
0x0015AF: u'AzureWave Technologies, Inc.',
0x0015B0: u'AUTOTELENET CO.,LTD',
0x0015B1: u'Ambient Corporation',
0x0015B2: u'Advanced Industrial Computer, Inc.',
0x0015B3: u'Caretech AB',
0x0015B4: u'Polymap Wireless LLC',
0x0015B5: u'CI Network Corp.',
0x0015B6: u'ShinMaywa Industries, Ltd.',
0x0015B7: u'Toshiba',
0x0015B8: u'Tahoe',
0x0015B9: u'Samsung Electronics Co., Ltd.',
0x0015BA: u'iba AG',
0x0015BB: u'SMA Technologie AG',
0x0015BC: u'Develco',
0x0015BD: u'Group 4 Technology Ltd',
0x0015BE: u'Iqua Ltd.',
0x0015BF: u'technicob',
0x0015C0: u'DIGITAL TELEMEDIA CO.,LTD.',
0x0015C1: u'SONY Computer Entertainment inc,',
0x0015C2: u'3M Germany',
0x0015C3: u'Ruf Telematik AG',
0x0015C4: u'FLOVEL CO., LTD.',
0x0015C5: u'Dell Inc',
0x0015C6: u'Cisco Systems',
0x0015C7: u'Cisco Systems',
0x0015C8: u'FlexiPanel Ltd',
0x0015C9: u'Gumstix, Inc',
0x0015CA: u'TeraRecon, Inc.',
0x0015CB: u'Surf Communication Solutions Ltd.',
0x0015CC: u'TEPCO UQUEST, LTD.',
0x0015CD: u'Exartech International Corp.',
0x0015CE: u'ARRIS International',
0x0015CF: u'ARRIS International',
0x0015D0: u'ARRIS International',
0x0015D1: u'ARRIS International',
0x0015D2: u'Xantech Corporation',
0x0015D3: u'Pantech&Curitel Communications, Inc.',
0x0015D4: u'Emitor AB',
0x0015D5: u'NICEVT',
0x0015D6: u'OSLiNK Sp. z o.o.',
0x0015D7: u'Reti Corporation',
0x0015D8: u'Interlink Electronics',
0x0015D9: u'PKC Electronics Oy',
0x0015DA: u'IRITEL A.D.',
0x0015DB: u'Canesta Inc.',
0x0015DC: u'KT&C Co., Ltd.',
0x0015DD: u'IP Control Systems Ltd.',
0x0015DE: u'Nokia Danmark A/S',
0x0015DF: u'Clivet S.p.A.',
0x0015E0: u'Ericsson Mobile Platforms',
0x0015E1: u'picoChip Designs Ltd',
0x0015E2: u'Wissenschaftliche Geraetebau Dr. Ing. H. Knauer GmbH',
0x0015E3: u'Dream Technologies Corporation',
0x0015E4: u'Zimmer Elektromedizin',
0x0015E5: u'Cheertek Inc.',
0x0015E6: u'MOBILE TECHNIKA Inc.',
0x0015E7: u'Quantec ProAudio',
0x0015E8: u'Nortel',
0x0015E9: u'D-Link Corporation',
0x0015EA: u'Tellumat (Pty) Ltd',
0x0015EB: u'ZTE CORPORATION',
0x0015EC: u'Boca Devices LLC',
0x0015ED: u'Fulcrum Microsystems, Inc.',
0x0015EE: u'Omnex Control Systems',
0x0015EF: u'NEC TOKIN Corporation',
0x0015F0: u'EGO BV',
0x0015F1: u'KYLINK Communications Corp.',
0x0015F2: u'ASUSTek COMPUTER INC.',
0x0015F3: u'PELTOR AB',
0x0015F4: u'Eventide',
0x0015F5: u'Sustainable Energy Systems',
0x0015F6: u'SCIENCE AND ENGINEERING SERVICES, INC.',
0x0015F7: u'Wintecronics Ltd.',
0x0015F8: u'Kingtronics Industrial Co. Ltd.',
0x0015F9: u'Cisco Systems',
0x0015FA: u'Cisco Systems',
0x0015FB: u'setex schermuly textile computer gmbh',
0x0015FC: u'Startco Engineering Ltd.',
0x0015FD: u'Complete Media Systems',
0x0015FE: u'SCHILLING ROBOTICS LLC',
0x0015FF: u'Novatel Wireless, Inc.',
0x001600: u'CelleBrite Mobile Synchronization',
0x001601: u'Buffalo Inc.',
0x001602: u'CEYON TECHNOLOGY CO.,LTD.',
0x001603: u'PRIVATE',
0x001604: u'Sigpro',
0x001605: u'YORKVILLE SOUND INC.',
0x001606: u'Ideal Industries',
0x001607: u'Curves International Inc.',
0x001608: u'Sequans Communications',
0x001609: u'Unitech electronics co., ltd.',
0x00160A: u'SWEEX Europe BV',
0x00160B: u'TVWorks LLC',
0x00160C: u'LPL DEVELOPMENT S.A. DE C.V',
0x00160D: u'Be Here Corporation',
0x00160E: u'Optica Technologies Inc.',
0x00160F: u'BADGER METER INC',
0x001610: u'Carina Technology',
0x001611: u'Altecon Srl',
0x001612: u'Otsuka Electronics Co., Ltd.',
0x001613: u'LibreStream Technologies Inc.',
0x001614: u'Picosecond Pulse Labs',
0x001615: u'Nittan Company, Limited',
0x001616: u'BROWAN COMMUNICATION INC.',
0x001617: u'MSI',
0x001618: u'HIVION Co., Ltd.',
0x001619: u'La Factoría de Comunicaciones Aplicadas,S.L.',
0x00161A: u'Dametric AB',
0x00161B: u'Micronet Corporation',
0x00161C: u'e:cue',
0x00161D: u'Innovative Wireless Technologies, Inc.',
0x00161E: u'Woojinnet',
0x00161F: u'SUNWAVETEC Co., Ltd.',
0x001620: u'Sony Ericsson Mobile Communications AB',
0x001621: u'Colorado Vnet',
0x001622: u'BBH SYSTEMS GMBH',
0x001623: u'Interval Media',
0x001624: u'PRIVATE',
0x001625: u'Impinj, Inc.',
0x001626: u'Motorola CHS',
0x001627: u'embedded-logic DESIGN AND MORE GmbH',
0x001628: u'Ultra Electronics Manufacturing and Card Systems',
0x001629: u'Nivus GmbH',
0x00162A: u'Antik computers & communications s.r.o.',
0x00162B: u'Togami Electric Mfg.co.,Ltd.',
0x00162C: u'Xanboo',
0x00162D: u'STNet Co., Ltd.',
0x00162E: u'Space Shuttle Hi-Tech Co., Ltd.',
0x00162F: u'Geutebrück GmbH',
0x001630: u'Vativ Technologies',
0x001631: u'Xteam',
0x001632: u'SAMSUNG ELECTRONICS CO., LTD.',
0x001633: u'Oxford Diagnostics Ltd.',
0x001634: u'Mathtech, Inc.',
0x001635: u'Hewlett Packard',
0x001636: u'Quanta Computer Inc.',
0x001637: u'Citel Srl',
0x001638: u'TECOM Co., Ltd.',
0x001639: u'UBIQUAM Co.,Ltd',
0x00163A: u'YVES TECHNOLOGY CO., LTD.',
0x00163B: u'VertexRSI/General Dynamics',
0x00163C: u'Rebox B.V.',
0x00163D: u'Tsinghua Tongfang Legend Silicon Tech. Co., Ltd.',
0x00163E: u'Xensource, Inc.',
0x00163F: u'CReTE SYSTEMS Inc.',
0x001640: u'Asmobile Communication Inc.',
0x001641: u'USI',
0x001642: u'Pangolin',
0x001643: u'Sunhillo Corproation',
0x001644: u'LITE-ON Technology Corp.',
0x001645: u'Power Distribution, Inc.',
0x001646: u'Cisco Systems',
0x001647: u'Cisco Systems',
0x001648: u'SSD Company Limited',
0x001649: u'SetOne GmbH',
0x00164A: u'Vibration Technology Limited',
0x00164B: u'Quorion Data Systems GmbH',
0x00164C: u'PLANET INT Co., Ltd',
0x00164D: u'Alcatel North America IP Division',
0x00164E: u'Nokia Danmark A/S',
0x00164F: u'World Ethnic Broadcastin Inc.',
0x001650: u'EYAL MICROWAVE',
0x001651: u'PRIVATE',
0x001652: u'Hoatech Technologies, Inc.',
0x001653: u'LEGO System A/S IE Electronics Division',
0x001654: u'Flex-P Industries Sdn. Bhd.',
0x001655: u'FUHO TECHNOLOGY Co., LTD',
0x001656: u'Nintendo Co., Ltd.',
0x001657: u'Aegate Ltd',
0x001658: u'Fusiontech Technologies Inc.',
0x001659: u'Z.M.P. RADWAG',
0x00165A: u'Harman Specialty Group',
0x00165B: u'Grip Audio',
0x00165C: u'Trackflow Ltd',
0x00165D: u'AirDefense, Inc.',
0x00165E: u'Precision I/O',
0x00165F: u'Fairmount Automation',
0x001660: u'Nortel',
0x001661: u'Novatium Solutions (P) Ltd',
0x001662: u'Liyuh Technology Ltd.',
0x001663: u'KBT Mobile',
0x001664: u'Prod-El SpA',
0x001665: u'Cellon France',
0x001666: u'Quantier Communication Inc.',
0x001667: u'A-TEC Subsystem INC.',
0x001668: u'Eishin Electronics',
0x001669: u'MRV Communication (Networks) LTD',
0x00166A: u'TPS',
0x00166B: u'Samsung Electronics',
0x00166C: u'Samsung Electonics Digital Video System Division',
0x00166D: u'Yulong Computer Telecommunication Scientific(shenzhen)Co.,Lt',
0x00166E: u'Arbitron Inc.',
0x00166F: u'Intel Corporation',
0x001670: u'SKNET Corporation',
0x001671: u'Symphox Information Co.',
0x001672: u'Zenway enterprise ltd',
0x001673: u'PRIVATE',
0x001674: u'EuroCB (Phils.), Inc.',
0x001675: u'Motorola MDb',
0x001676: u'Intel Corporation',
0x001677: u'Bihl+Wiedemann GmbH',
0x001678: u'SHENZHEN BAOAN GAOKE ELECTRONICS CO., LTD',
0x001679: u'eOn Communications',
0x00167A: u'Skyworth Overseas Dvelopment Ltd.',
0x00167B: u'Haver&Boecker',
0x00167C: u'iRex Technologies BV',
0x00167D: u'Sky-Line',
0x00167E: u'DIBOSS.CO.,LTD',
0x00167F: u'Bluebird Soft Inc.',
0x001680: u'Bally Gaming + Systems',
0x001681: u'Vector Informatik GmbH',
0x001682: u'Pro Dex, Inc',
0x001683: u'WEBIO International Co.,.Ltd.',
0x001684: u'Donjin Co.,Ltd.',
0x001685: u'FRWD Technologies Ltd.',
0x001686: u'Karl Storz Imaging',
0x001687: u'Chubb CSC-Vendor AP',
0x001688: u'ServerEngines LLC',
0x001689: u'Pilkor Electronics Co., Ltd',
0x00168A: u'id-Confirm Inc',
0x00168B: u'Paralan Corporation',
0x00168C: u'DSL Partner AS',
0x00168D: u'KORWIN CO., Ltd.',
0x00168E: u'Vimicro corporation',
0x00168F: u'GN Netcom as',
0x001690: u'J-TEK INCORPORATION',
0x001691: u'Moser-Baer AG',
0x001692: u'Scientific-Atlanta, Inc.',
0x001693: u'PowerLink Technology Inc.',
0x001694: u'Sennheiser Communications A/S',
0x001695: u'AVC Technology Limited',
0x001696: u'QDI Technology (H.K.) Limited',
0x001697: u'NEC Corporation',
0x001698: u'T&A Mobile Phones SAS',
0x001699: u'PRIVATE',
0x00169A: u'Quadrics Ltd',
0x00169B: u'Alstom Transport',
0x00169C: u'Cisco Systems',
0x00169D: u'Cisco Systems',
0x00169E: u'TV One Ltd',
0x00169F: u'Vimtron Electronics Co., Ltd.',
0x0016A0: u'Auto-Maskin',
0x0016A1: u'3Leaf Networks',
0x0016A2: u'CentraLite Systems, Inc.',
0x0016A3: u'TEAM ARTECHE, S.A.',
0x0016A4: u'Ezurio Ltd',
0x0016A5: u'Tandberg Storage ASA',
0x0016A6: u'Dovado FZ-LLC',
0x0016A7: u'AWETA G&P',
0x0016A8: u'CWT CO., LTD.',
0x0016A9: u'2EI',
0x0016AA: u'Kei Communication Technology Inc.',
0x0016AB: u'PBI-Dansensor A/S',
0x0016AC: u'Toho Technology Corp.',
0x0016AD: u'BT-Links Company Limited',
0x0016AE: u'INVENTEL',
0x0016AF: u'Shenzhen Union Networks Equipment Co.,Ltd.',
0x0016B0: u'VK Corporation',
0x0016B1: u'KBS',
0x0016B2: u'DriveCam Inc',
0x0016B3: u'Photonicbridges (China) Co., Ltd.',
0x0016B4: u'PRIVATE',
0x0016B5: u'Motorola CHS',
0x0016B6: u'Cisco-Linksys',
0x0016B7: u'Seoul Commtech',
0x0016B8: u'Sony Ericsson Mobile Communications',
0x0016B9: u'ProCurve Networking',
0x0016BA: u'WEATHERNEWS INC.',
0x0016BB: u'Law-Chain Computer Technology Co Ltd',
0x0016BC: u'Nokia Danmark A/S',
0x0016BD: u'ATI Industrial Automation',
0x0016BE: u'INFRANET, Inc.',
0x0016BF: u'PaloDEx Group Oy',
0x0016C0: u'Semtech Corporation',
0x0016C1: u'Eleksen Ltd',
0x0016C2: u'Avtec Systems Inc',
0x0016C3: u'BA Systems Inc',
0x0016C4: u'SiRF Technology, Inc.',
0x0016C5: u'Shenzhen Xing Feng Industry Co.,Ltd',
0x0016C6: u'North Atlantic Industries',
0x0016C7: u'Cisco Systems',
0x0016C8: u'Cisco Systems',
0x0016C9: u'NAT Seattle, Inc.',
0x0016CA: u'Nortel',
0x0016CB: u'Apple Computer',
0x0016CC: u'Xcute Mobile Corp.',
0x0016CD: u'HIJI HIGH-TECH CO., LTD.',
0x0016CE: u'Hon Hai Precision Ind. Co., Ltd.',
0x0016CF: u'Hon Hai Precision Ind. Co., Ltd.',
0x0016D0: u'ATech elektronika d.o.o.',
0x0016D1: u'ZAT a.s.',
0x0016D2: u'Caspian',
0x0016D3: u'Wistron Corporation',
0x0016D4: u'Compal Communications, Inc.',
0x0016D5: u'Synccom Co., Ltd',
0x0016D6: u'TDA Tech Pty Ltd',
0x0016D7: u'Sunways AG',
0x0016D8: u'Senea AB',
0x0016D9: u'NINGBO BIRD CO.,LTD.',
0x0016DA: u'Futronic Technology Co. Ltd.',
0x0016DB: u'Samsung Electronics Co., Ltd.',
0x0016DC: u'ARCHOS',
0x0016DD: u'Gigabeam Corporation',
0x0016DE: u'FAST Inc',
0x0016DF: u'Lundinova AB',
0x0016E0: u'3Com Europe Ltd',
0x0016E1: u'SiliconStor, Inc.',
0x0016E2: u'American Fibertek, Inc.',
0x0016E3: u'ASKEY COMPUTER CORP.',
0x0016E4: u'VANGUARD SECURITY ENGINEERING CORP.',
0x0016E5: u'FORDLEY DEVELOPMENT LIMITED',
0x0016E6: u'GIGA-BYTE TECHNOLOGY CO.,LTD.',
0x0016E7: u'Dynamix Promotions Limited',
0x0016E8: u'Sigma Designs, Inc.',
0x0016E9: u'Tiba Medical Inc',
0x0016EA: u'Intel Corporation',
0x0016EB: u'Intel Corporation',
0x0016EC: u'Elitegroup Computer Systems Co., Ltd.',
0x0016ED: u'Integrian, Inc.',
0x0016EE: u'RoyalDigital Inc.',
0x0016EF: u'Koko Fitness, Inc.',
0x0016F0: u'Zermatt Systems, Inc',
0x0016F1: u'OmniSense, LLC',
0x0016F2: u'Dmobile System Co., Ltd.',
0x0016F3: u'CAST Information Co., Ltd',
0x0016F4: u'Eidicom Co., Ltd.',
0x0016F5: u'Dalian Golden Hualu Digital Technology Co.,Ltd',
0x0016F6: u'Video Products Group',
0x0016F7: u'L-3 Communications, Electrodynamics, Inc.',
0x0016F8: u'AVIQTECH TECHNOLOGY CO., LTD.',
0x0016F9: u'CETRTA POT, d.o.o., Kranj',
0x0016FA: u'ECI Telecom Ltd.',
0x0016FB: u'SHENZHEN MTC CO.,LTD.',
0x0016FC: u'TOHKEN CO.,LTD.',
0x0016FD: u'Jaty Electronics',
0x0016FE: u'Alps Electric Co., Ltd',
0x0016FF: u'Wamin Optocomm Mfg Corp',
0x001700: u'Motorola MDb',
0x001701: u'KDE, Inc.',
0x001702: u'Osung Midicom Co., Ltd',
0x001703: u'MOSDAN Internation Co.,Ltd',
0x001704: u'Shinco Electronics Group Co.,Ltd',
0x001705: u'Methode Electronics',
0x001706: u'Techfaith Wireless Communication Technology Limited.',
0x001707: u'InGrid, Inc',
0x001708: u'Hewlett Packard',
0x001709: u'Exalt Communications',
0x00170A: u'INEW DIGITAL COMPANY',
0x00170B: u'Contela, Inc.',
0x00170C: u'Benefon Oyj',
0x00170D: u'Dust Networks Inc.',
0x00170E: u'Cisco Systems',
0x00170F: u'Cisco Systems',
0x001710: u'Casa Systems Inc.',
0x001711: u'GE Healthcare Bio-Sciences AB',
0x001712: u'ISCO International',
0x001713: u'Tiger NetCom',
0x001714: u'BR Controls Nederland bv',
0x001715: u'Qstik',
0x001716: u'Qno Technology Inc.',
0x001717: u'Leica Geosystems AG',
0x001718: u'Vansco Electronics Oy',
0x001719: u'AudioCodes USA, Inc',
0x00171A: u'Winegard Company',
0x00171B: u'Innovation Lab Corp.',
0x00171C: u'NT MicroSystems, Inc.',
0x00171D: u'DIGIT',
0x00171E: u'Theo Benning GmbH & Co. KG',
0x00171F: u'IMV Corporation',
0x001720: u'Image Sensing Systems, Inc.',
0x001721: u'FITRE S.p.A.',
0x001722: u'Hanazeder Electronic GmbH',
0x001723: u'Summit Data Communications',
0x001724: u'Studer Professional Audio GmbH',
0x001725: u'Liquid Computing',
0x001726: u'm2c Electronic Technology Ltd.',
0x001727: u'Thermo Ramsey Italia s.r.l.',
0x001728: u'Selex Communications',
0x001729: u'Ubicod Co.LTD',
0x00172A: u'Proware Technology Corp.',
0x00172B: u'Global Technologies Inc.',
0x00172C: u'TAEJIN INFOTECH',
0x00172D: u'Axcen Photonics Corporation',
0x00172E: u'FXC Inc.',
0x00172F: u'NeuLion Incorporated',
0x001730: u'Automation Electronics',
0x001731: u'ASUSTek COMPUTER INC.',
0x001732: u'Science-Technical Center "RISSA"',
0x001733: u'neuf cegetel',
0x001734: u'LGC Wireless Inc.',
0x001735: u'PRIVATE',
0x001736: u'iiTron Inc.',
0x001737: u'Industrie Dial Face S.p.A.',
0x001738: u'XIV',
0x001739: u'Bright Headphone Electronics Company',
0x00173A: u'Edge Integration Systems Inc.',
0x00173B: u'Arched Rock Corporation',
0x00173C: u'Extreme Engineering Solutions',
0x00173D: u'Neology',
0x00173E: u'LeucotronEquipamentos Ltda.',
0x00173F: u'Belkin Corporation',
0x001740: u'Technologies Labtronix',
0x001741: u'DEFIDEV',
0x001742: u'FUJITSU LIMITED',
0x001743: u'Deck Srl',
0x001744: u'Araneo Ltd.',
0x001745: u'INNOTZ CO., Ltd',
0x001746: u'Freedom9 Inc.',
0x001747: u'Trimble',
0x001748: u'Neokoros Brasil Ltda',
0x001749: u'HYUNDAE YONG-O-SA CO.,LTD',
0x00174A: u'SOCOMEC',
0x00174B: u'Nokia Danmark A/S',
0x00174C: u'Millipore',
0x00174D: u'DYNAMIC NETWORK FACTORY, INC.',
0x00174E: u'Parama-tech Co.,Ltd.',
0x00174F: u'iCatch Inc.',
0x001750: u'GSI Group, MicroE Systems',
0x001751: u'Online Corporation',
0x001752: u'DAGS, Inc',
0x001753: u'nFore Technology Inc.',
0x001754: u'Arkino Corporation., Ltd',
0x001755: u'GE Security',
0x001756: u'Vinci Labs Oy',
0x001757: u'RIX TECHNOLOGY LIMITED',
0x001758: u'ThruVision Ltd',
0x001759: u'Cisco Systems',
0x00175A: u'Cisco Systems',
0x00175B: u'ACS Solutions Switzerland Ltd.',
0x00175C: u'SHARP CORPORATION',
0x00175D: u'Dongseo system.',
0x00175E: u'Anta Systems, Inc.',
0x00175F: u'XENOLINK Communications Co., Ltd.',
0x001760: u'Naito Densei Machida MFG.CO.,LTD',
0x001761: u'ZKSoftware Inc.',
0x001762: u'Solar Technology, Inc.',
0x001763: u'Essentia S.p.A.',
0x001764: u'ATMedia GmbH',
0x001765: u'Nortel',
0x001766: u'Accense Technology, Inc.',
0x001767: u'Earforce AS',
0x001768: u'Zinwave Ltd',
0x001769: u'Cymphonix Corp',
0x00176A: u'Avago Technologies',
0x00176B: u'Kiyon, Inc.',
0x00176C: u'Pivot3, Inc.',
0x00176D: u'CORE CORPORATION',
0x00176E: u'DUCATI SISTEMI',
0x00176F: u'PAX Computer Technology(Shenzhen) Ltd.',
0x001770: u'Arti Industrial Electronics Ltd.',
0x001771: u'APD Communications Ltd',
0x001772: u'ASTRO Strobel Kommunikationssysteme GmbH',
0x001773: u'Laketune Technologies Co. Ltd',
0x001774: u'Elesta GmbH',
0x001775: u'TTE Germany GmbH',
0x001776: u'Meso Scale Diagnostics, LLC',
0x001777: u'Obsidian Research Corporation',
0x001778: u'Central Music Co.',
0x001779: u'QuickTel',
0x00177A: u'ASSA ABLOY AB',
0x00177B: u'Azalea Networks inc',
0x00177C: u'D-Link India Ltd',
0x00177D: u'IDT International Limited',
0x00177E: u'Meshcom Technologies Inc.',
0x00177F: u'Worldsmart Retech',
0x001780: u'Applera Holding B.V. Singapore Operations',
0x001781: u'Greystone Data System, Inc.',
0x001782: u'LoBenn Inc.',
0x001783: u'Texas Instruments',
0x001784: u'Motorola Mobile Devices',
0x001785: u'Sparr Electronics Ltd',
0x001786: u'wisembed',
0x001787: u'Brother, Brother & Sons ApS',
0x001788: u'Philips Lighting BV',
0x001789: u'Zenitron Corporation',
0x00178A: u'DARTS TECHNOLOGIES CORP.',
0x00178B: u'Teledyne Technologies Incorporated',
0x00178C: u'Independent Witness, Inc',
0x00178D: u'Checkpoint Systems, Inc.',
0x00178E: u'Gunnebo Cash Automation AB',
0x00178F: u'NINGBO YIDONG ELECTRONIC CO.,LTD.',
0x001790: u'HYUNDAI DIGITECH Co, Ltd.',
0x001791: u'LinTech GmbH',
0x001792: u'Falcom Wireless Comunications Gmbh',
0x001793: u'Tigi Corporation',
0x001794: u'Cisco Systems',
0x001795: u'Cisco Systems',
0x001796: u'Rittmeyer AG',
0x001797: u'Telsy Elettronica S.p.A.',
0x001798: u'Azonic Technology Co., LTD',
0x001799: u'SmarTire Systems Inc.',
0x00179A: u'D-Link Corporation',
0x00179B: u'Chant Sincere CO., LTD.',
0x00179C: u'DEPRAG SCHULZ GMBH u. CO.',
0x00179D: u'Kelman Limited',
0x00179E: u'Sirit Inc',
0x00179F: u'Apricorn',
0x0017A0: u'RoboTech srl',
0x0017A1: u'3soft inc.',
0x0017A2: u'Camrivox Ltd.',
0x0017A3: u'MIX s.r.l.',
0x0017A4: u'Global Data Services',
0x0017A5: u'TrendChip Technologies Corp.',
0x0017A6: u'YOSIN ELECTRONICS CO., LTD.',
0x0017A7: u'Mobile Computing Promotion Consortium',
0x0017A8: u'EDM Corporation',
0x0017A9: u'Sentivision',
0x0017AA: u'elab-experience inc.',
0x0017AB: u'Nintendo Co., Ltd.',
0x0017AC: u'O\'Neil Product Development Inc.',
0x0017AD: u'AceNet Corporation',
0x0017AE: u'GAI-Tronics',
0x0017AF: u'Enermet',
0x0017B0: u'Nokia Danmark A/S',
0x0017B1: u'ACIST Medical Systems, Inc.',
0x0017B2: u'SK Telesys',
0x0017B3: u'Aftek Infosys Limited',
0x0017B4: u'Remote Security Systems, LLC',
0x0017B5: u'Peerless Systems Corporation',
0x0017B6: u'Aquantia',
0x0017B7: u'Tonze Technology Co.',
0x0017B8: u'NOVATRON CO., LTD.',
0x0017B9: u'Gambro Lundia AB',
0x0017BA: u'SEDO CO., LTD.',
0x0017BB: u'Syrinx Industrial Electronics',
0x0017BC: u'Touchtunes Music Corporation',
0x0017BD: u'Tibetsystem',
0x0017BE: u'Tratec Telecom B.V.',
0x0017BF: u'Coherent Research Limited',
0x0017C0: u'PureTech Systems, Inc.',
0x0017C1: u'CM Precision Technology LTD.',
0x0017C2: u'Pirelli Broadband Solutions',
0x0017C3: u'KTF Technologies Inc.',
0x0017C4: u'Quanta Microsystems, INC.',
0x0017C5: u'SonicWALL',
0x0017C6: u'Labcal Technologies',
0x0017C7: u'MARA Systems Consulting AB',
0x0017C8: u'Kyocera Mita Corporation',
0x0017C9: u'Samsung Electronics Co., Ltd.',
0x0017CA: u'BenQ Corporation',
0x0017CB: u'Juniper Networks',
0x0017CC: u'Alcatel USA Sourcing LP',
0x0017CD: u'CEC Wireless R&D Ltd.',
0x0017CE: u'MB International Telecom Labs srl',
0x0017CF: u'iMCA-GmbH',
0x0017D0: u'Opticom Communications, LLC',
0x0017D1: u'Nortel',
0x0017D2: u'THINLINX PTY LTD',
0x0017D3: u'Etymotic Research, Inc.',
0x0017D4: u'Monsoon Multimedia, Inc',
0x0017D5: u'Samsung Electronics Co., Ltd.',
0x0017D6: u'Bluechips Microhouse Co.,Ltd.',
0x0017D7: u'Input/Output Inc.',
0x0017D8: u'Magnum Semiconductor, Inc.',
0x0017D9: u'AAI Corporation',
0x0017DA: u'Spans Logic',
0x0017DB: u'PRIVATE',
0x0017DC: u'DAEMYUNG ZERO1',
0x0017DD: u'Clipsal Australia',
0x0017DE: u'Advantage Six Ltd',
0x0017DF: u'Cisco Systems',
0x0017E0: u'Cisco Systems',
0x0017E1: u'DACOS Technologies Co., Ltd.',
0x0017E2: u'Motorola Mobile Devices',
0x0017E3: u'Texas Instruments',
0x0017E4: u'Texas Instruments',
0x0017E5: u'Texas Instruments',
0x0017E6: u'Texas Instruments',
0x0017E7: u'Texas Instruments',
0x0017E8: u'Texas Instruments',
0x0017E9: u'Texas Instruments',
0x0017EA: u'Texas Instruments',
0x0017EB: u'Texas Instruments',
0x0017EC: u'Texas Instruments',
0x0017ED: u'WooJooIT Ltd.',
0x0017EE: u'Motorola CHS',
0x0017EF: u'Blade Network Technologies, Inc.',
0x0017F0: u'SZCOM Broadband Network Technology Co.,Ltd',
0x0017F1: u'Renu Electronics Pvt Ltd',
0x0017F2: u'Apple Computer',
0x0017F3: u'M/A-COM Wireless Systems',
0x0017F4: u'ZERON ALLIANCE',
0x0017F5: u'NEOPTEK',
0x0017F6: u'Pyramid Meriden Inc.',
0x0017F7: u'CEM Solutions Pvt Ltd',
0x0017F8: u'Motech Industries Inc.',
0x0017F9: u'Forcom Sp. z o.o.',
0x0017FA: u'Microsoft Corporation',
0x0017FB: u'FA',
0x0017FC: u'Suprema Inc.',
0x0017FD: u'Amulet Hotkey',
0x0017FE: u'TALOS SYSTEM INC.',
0x0017FF: u'PLAYLINE Co.,Ltd.',
0x001800: u'UNIGRAND LTD',
0x001801: u'Actiontec Electronics, Inc',
0x001802: u'Alpha Networks Inc.',
0x001803: u'ArcSoft Shanghai Co. LTD',
0x001804: u'E-TEK DIGITAL TECHNOLOGY LIMITED',
0x001805: u'Beijing InHand Networking',
0x001806: u'Hokkei Industries Co., Ltd.',
0x001807: u'Fanstel Corp.',
0x001808: u'SightLogix, Inc.',
0x001809: u'CRESYN',
0x00180A: u'Meraki Networks, Inc.',
0x00180B: u'Brilliant Telecommunications',
0x00180C: u'Optelian Access Networks Corporation',
0x00180D: u'Terabytes Server Storage Tech Corp',
0x00180E: u'Avega Systems',
0x00180F: u'Nokia Danmark A/S',
0x001810: u'IPTrade S.A.',
0x001811: u'Neuros Technology International, LLC.',
0x001812: u'Beijing Xinwei Telecom Technology Co., Ltd.',
0x001813: u'Sony Ericsson Mobile Communications',
0x001814: u'Mitutoyo Corporation',
0x001815: u'GZ Technologies, Inc.',
0x001816: u'Ubixon Co., Ltd.',
0x001817: u'D. E. Shaw Research, LLC',
0x001818: u'Cisco Systems',
0x001819: u'Cisco Systems',
0x00181A: u'AVerMedia Technologies Inc.',
0x00181B: u'TaiJin Metal Co., Ltd.',
0x00181C: u'Exterity Limited',
0x00181D: u'ASIA ELECTRONICS CO.,LTD',
0x00181E: u'GDX Technologies Ltd.',
0x00181F: u'Palmmicro Communications',
0x001820: u'w5networks',
0x001821: u'SINDORICOH',
0x001822: u'CEC TELECOM CO.,LTD.',
0x001823: u'Delta Electronics, Inc.',
0x001824: u'Kimaldi Electronics, S.L.',
0x001825: u'Wavion LTD',
0x001826: u'Cale Access AB',
0x001827: u'NEC PHILIPS UNIFIED SOLUTIONS NEDERLAND BV',
0x001828: u'e2v technologies (UK) ltd.',
0x001829: u'Gatsometer',
0x00182A: u'Taiwan Video & Monitor',
0x00182B: u'Softier',
0x00182C: u'Ascend Networks, Inc.',
0x00182D: u'Artec Group OÜ',
0x00182E: u'Wireless Ventures USA',
0x00182F: u'Texas Instruments',
0x001830: u'Texas Instruments',
0x001831: u'Texas Instruments',
0x001832: u'Texas Instruments',
0x001833: u'Texas Instruments',
0x001834: u'Texas Instruments',
0x001835: u'ITC',
0x001836: u'Reliance Electric Limited',
0x001837: u'Universal ABIT Co., Ltd.',
0x001838: u'PanAccess Communications,Inc.',
0x001839: u'Cisco-Linksys LLC',
0x00183A: u'Westell Technologies',
0x00183B: u'CENITS Co., Ltd.',
0x00183C: u'Encore Software Limited',
0x00183D: u'Vertex Link Corporation',
0x00183E: u'Digilent, Inc',
0x00183F: u'2Wire, Inc',
0x001840: u'3 Phoenix, Inc.',
0x001841: u'High Tech Computer Corp',
0x001842: u'Nokia Danmark A/S',
0x001843: u'Dawevision Ltd',
0x001844: u'Heads Up Technologies, Inc.',
0x001845: u'NPL Pulsar Ltd.',
0x001846: u'Crypto S.A.',
0x001847: u'AceNet Technology Inc.',
0x001848: u'Vecima Networks Inc.',
0x001849: u'Pigeon Point Systems',
0x00184A: u'Catcher, Inc.',
0x00184B: u'Las Vegas Gaming, Inc.',
0x00184C: u'Bogen Communications',
0x00184D: u'Netgear Inc.',
0x00184E: u'Lianhe Technologies, Inc.',
0x00184F: u'8 Ways Technology Corp.',
0x001850: u'Secfone Kft',
0x001851: u'SWsoft',
0x001852: u'StorLink Semiconductors, Inc.',
0x001853: u'Atera Networks LTD.',
0x001854: u'Argard Co., Ltd',
0x001855: u'Aeromaritime Systembau GmbH',
0x001856: u'EyeFi, Inc',
0x001857: u'Unilever R&D',
0x001858: u'TagMaster AB',
0x001859: u'Strawberry Linux Co.,Ltd.',
0x00185A: u'uControl, Inc.',
0x00185B: u'Network Chemistry, Inc',
0x00185C: u'EDS Lab Pte Ltd',
0x00185D: u'TAIGUEN TECHNOLOGY (SHEN-ZHEN) CO., LTD.',
0x00185E: u'Nexterm Inc.',
0x00185F: u'TAC Inc.',
0x001860: u'SIM Technology Group Shanghai Simcom Ltd.,',
0x001861: u'Ooma, Inc.',
0x001862: u'Seagate Technology',
0x001863: u'Veritech Electronics Limited',
0x001864: u'Cybectec Inc.',
0x001865: u'Bayer Diagnostics Sudbury Ltd',
0x001866: u'Leutron Vision',
0x001867: u'Evolution Robotics Retail',
0x001868: u'Scientific Atlanta, A Cisco Company',
0x001869: u'KINGJIM',
0x00186A: u'Global Link Digital Technology Co,.LTD',
0x00186B: u'Sambu Communics CO., LTD.',
0x00186C: u'Neonode AB',
0x00186D: u'Zhenjiang Sapphire Electronic Industry CO.',
0x00186E: u'3COM Europe Ltd',
0x00186F: u'Setha Industria Eletronica LTDA',
0x001870: u'E28 Shanghai Limited',
0x001871: u'Global Data Services',
0x001872: u'Expertise Engineering',
0x001873: u'Cisco Systems',
0x001874: u'Cisco Systems',
0x001875: u'AnaCise Testnology Pte Ltd',
0x001876: u'WowWee Ltd.',
0x001877: u'Amplex A/S',
0x001878: u'Mackware GmbH',
0x001879: u'dSys',
0x00187A: u'Wiremold',
0x00187B: u'4NSYS Co. Ltd.',
0x00187C: u'INTERCROSS, LLC',
0x00187D: u'Armorlink shanghai Co. Ltd',
0x00187E: u'RGB Spectrum',
0x00187F: u'ZODIANET',
0x001880: u'Mobilygen',
0x001881: u'Buyang Electronics Industrial Co., Ltd',
0x001882: u'Huawei Technologies Co., Ltd.',
0x001883: u'FORMOSA21 INC.',
0x001884: u'FON',
0x001885: u'Avigilon Corporation',
0x001886: u'EL-TECH, INC.',
0x001887: u'Metasystem SpA',
0x001888: u'GOTIVE a.s.',
0x001889: u'WinNet Solutions Limited',
0x00188A: u'Infinova LLC',
0x00188B: u'Dell',
0x00188C: u'Mobile Action Technology Inc.',
0x00188D: u'Nokia Danmark A/S',
0x00188E: u'Ekahau, Inc.',
0x00188F: u'Montgomery Technology, Inc.',
0x001890: u'RadioCOM, s.r.o.',
0x001891: u'Zhongshan General K-mate Electronics Co., Ltd',
0x001892: u'ads-tec GmbH',
0x001893: u'SHENZHEN PHOTON BROADBAND TECHNOLOGY CO.,LTD',
0x001894: u'zimocom',
0x001895: u'Hansun Technologies Inc.',
0x001896: u'Great Well Electronic LTD',
0x001897: u'JESS-LINK PRODUCTS Co., LTD',
0x001898: u'KINGSTATE ELECTRONICS CORPORATION',
0x001899: u'ShenZhen jieshun Science&Technology Industry CO,LTD.',
0x00189A: u'HANA Micron Inc.',
0x00189B: u'Thomson Inc.',
0x00189C: u'Weldex Corporation',
0x00189D: u'Navcast Inc.',
0x00189E: u'OMNIKEY GmbH.',
0x00189F: u'Lenntek Corporation',
0x0018A0: u'Cierma Ascenseurs',
0x0018A1: u'Tiqit Computers, Inc.',
0x0018A2: u'XIP Technology AB',
0x0018A3: u'ZIPPY TECHNOLOGY CORP.',
0x0018A4: u'Motorola Mobile Devices',
0x0018A5: u'ADigit Technologies Corp.',
0x0018A6: u'Persistent Systems, LLC',
0x0018A7: u'Yoggie Security Systems LTD.',
0x0018A8: u'AnNeal Technology Inc.',
0x0018A9: u'Ethernet Direct Corporation',
0x0018AA: u'PRIVATE',
0x0018AB: u'BEIJING LHWT MICROELECTRONICS INC.',
0x0018AC: u'Shanghai Jiao Da HISYS Technology Co. Ltd.',
0x0018AD: u'NIDEC SANKYO CORPORATION',
0x0018AE: u'Tongwei Video Technology CO.,LTD',
0x0018AF: u'Samsung Electronics Co., Ltd.',
0x0018B0: u'Nortel',
0x0018B1: u'Blade Network Technologies',
0x0018B2: u'ADEUNIS RF',
0x0018B3: u'TEC WizHome Co., Ltd.',
0x0018B4: u'Dawon Media Inc.',
0x0018B5: u'Magna Carta',
0x0018B6: u'S3C, Inc.',
0x0018B7: u'D3 LED, LLC',
0x0018B8: u'New Voice International AG',
0x0018B9: u'Cisco Systems',
0x0018BA: u'Cisco Systems',
0x0018BB: u'Eliwell Controls srl',
0x0018BC: u'ZAO NVP Bolid',
0x0018BD: u'SHENZHEN DVBWORLD TECHNOLOGY CO., LTD.',
0x0018BE: u'ANSA Corporation',
0x0018BF: u'Essence Technology Solution, Inc.',
0x0018C0: u'Motorola CHS',
0x0018C1: u'Almitec Informática e Comércio Ltda.',
0x0018C2: u'Firetide, Inc',
0x0018C3: u'C&S Microwave',
0x0018C4: u'Raba Technologies LLC',
0x0018C5: u'Nokia Danmark A/S',
0x0018C6: u'OPW Fuel Management Systems',
0x0018C7: u'Real Time Automation',
0x0018C8: u'ISONAS Inc.',
0x0018C9: u'EOps Technology Limited',
0x0018CA: u'Viprinet GmbH',
0x0018CB: u'Tecobest Technology Limited',
0x0018CC: u'AXIOHM SAS',
0x0018CD: u'Erae Electronics Industry Co., Ltd',
0x0018CE: u'Dreamtech Co., Ltd',
0x0018CF: u'Baldor Electric Company',
0x0018D0: u'@ROAD Inc',
0x0018D1: u'Siemens Home & Office Comm. Devices',
0x0018D2: u'High-Gain Antennas LLC',
0x0018D3: u'TEAMCAST',
0x0018D4: u'Unified Display Interface SIG',
0x0018D5: u'REIGNCOM',
0x0018D6: u'Swirlnet A/S',
0x0018D7: u'Javad Navigation Systems Inc.',
0x0018D8: u'ARCH METER Corporation',
0x0018D9: u'Santosha Internatonal, Inc',
0x0018DA: u'AMBER wireless GmbH',
0x0018DB: u'EPL Technology Ltd',
0x0018DC: u'Prostar Co., Ltd.',
0x0018DD: u'Silicondust Engineering Ltd',
0x0018DE: u'Intel Corporation',
0x0018DF: u'The Morey Corporation',
0x0018E0: u'ANAVEO',
0x0018E1: u'Verkerk Service Systemen',
0x0018E2: u'Topdata Sistemas de Automacao Ltda',
0x0018E3: u'Visualgate Systems, Inc.',
0x0018E4: u'YIGUANG',
0x0018E5: u'Adhoco AG',
0x0018E6: u'Computer Hardware Design SIA',
0x0018E7: u'Cameo Communications, INC.',
0x0018E8: u'Hacetron Corporation',
0x0018E9: u'Numata Corporation',
0x0018EA: u'Alltec GmbH',
0x0018EB: u'BroVis Wireless Networks',
0x0018EC: u'Welding Technology Corporation',
0x0018ED: u'ACCUTECH INTERNATIONAL CO., LTD.',
0x0018EE: u'Videology Imaging Solutions, Inc.',
0x0018EF: u'Escape Communications, Inc.',
0x0018F0: u'JOYTOTO Co., Ltd.',
0x0018F1: u'Chunichi Denshi Co.,LTD.',
0x0018F2: u'Beijing Tianyu Communication Equipment Co., Ltd',
0x0018F3: u'ASUSTek COMPUTER INC.',
0x0018F4: u'EO TECHNICS Co., Ltd.',
0x0018F5: u'Shenzhen Streaming Video Technology Company Limited',
0x0018F6: u'Thomson Telecom Belgium',
0x0018F7: u'Kameleon Technologies',
0x0018F8: u'Cisco-Linksys LLC',
0x0018F9: u'VVOND, Inc.',
0x0018FA: u'Yushin Precision Equipment Co.,Ltd.',
0x0018FB: u'Compro Technology',
0x0018FC: u'Altec Electronic AG',
0x0018FD: u'Optimal Technologies International Inc.',
0x0018FE: u'Hewlett Packard',
0x0018FF: u'PowerQuattro Co.',
0x001900: u'Intelliverese - DBA Voicecom',
0x001901: u'F1MEDIA',
0x001902: u'Cambridge Consultants Ltd',
0x001903: u'Bigfoot Networks Inc',
0x001904: u'WB Electronics Sp. z o.o.',
0x001905: u'SCHRACK Seconet AG',
0x001906: u'Cisco Systems',
0x001907: u'Cisco Systems',
0x001908: u'Duaxes Corporation',
0x001909: u'Devi A/S',
0x00190A: u'HASWARE INC.',
0x00190B: u'Southern Vision Systems, Inc.',
0x00190C: u'Encore Electronics, Inc.',
0x00190D: u'IEEE 1394c',
0x00190E: u'Atech Technology Co., Ltd.',
0x00190F: u'Advansus Corp.',
0x001910: u'Knick Elektronische Messgeraete GmbH & Co. KG',
0x001911: u'Just In Mobile Information Technologies (Shanghai) Co., Ltd.',
0x001912: u'Welcat Inc',
0x001913: u'Chuang-Yi Network Equipment Co.Ltd.',
0x001914: u'Winix Co., Ltd',
0x001915: u'TECOM Co., Ltd.',
0x001916: u'PayTec AG',
0x001917: u'Posiflex Inc.',
0x001918: u'Interactive Wear AG',
0x001919: u'ASTEL Inc.',
0x00191A: u'IRLINK',
0x00191B: u'Sputnik Engineering AG',
0x00191C: u'Sensicast Systems',
0x00191D: u'Nintendo Co.,Ltd.',
0x00191E: u'Beyondwiz Co., Ltd.',
0x00191F: u'Microlink communications Inc.',
0x001920: u'KUME electric Co.,Ltd.',
0x001921: u'Elitegroup Computer System Co.',
0x001922: u'CM Comandos Lineares',
0x001923: u'Phonex Korea Co., LTD.',
0x001924: u'LBNL Engineering',
0x001925: u'Intelicis Corporation',
0x001926: u'BitsGen Co., Ltd.',
0x001927: u'ImCoSys Ltd',
0x001928: u'Siemens AG, Transportation Systems',
0x001929: u'2M2B Montadora de Maquinas Bahia Brasil LTDA',
0x00192A: u'Antiope Associates',
0x00192B: u'Hexagram, Inc.',
0x00192C: u'Motorola Mobile Devices',
0x00192D: u'Nokia Corporation',
0x00192E: u'Spectral Instruments, Inc.',
0x00192F: u'Cisco Systems',
0x001930: u'Cisco Systems',
0x001931: u'Balluff GmbH',
0x001932: u'Gude Analog- und Digialsysteme GmbH',
0x001933: u'Strix Systems, Inc.',
0x001934: u'TRENDON TOUCH TECHNOLOGY CORP.',
0x001935: u'Duerr Dental GmbH & Co. KG',
0x001936: u'STERLITE OPTICAL TECHNOLOGIES LIMITED',
0x001937: u'CommerceGuard AB',
0x001938: u'UMB Communications Co., Ltd.',
0x001939: u'Gigamips',
0x00193A: u'OESOLUTIONS',
0x00193B: u'Deliberant LLC',
0x00193C: u'HighPoint Technologies Incorporated',
0x00193D: u'GMC Guardian Mobility Corp.',
0x00193E: u'PIRELLI BROADBAND SOLUTIONS',
0x00193F: u'RDI technology(Shenzhen) Co.,LTD',
0x001940: u'Rackable Systems',
0x001941: u'Pitney Bowes, Inc',
0x001942: u'ON SOFTWARE INTERNATIONAL LIMITED',
0x001943: u'Belden',
0x001944: u'Fossil Partners, L.P.',
0x001945: u'Ten-Tec Inc.',
0x001946: u'Cianet Industria e Comercio S/A',
0x001947: u'Scientific Atlanta, A Cisco Company',
0x001948: u'AireSpider Networks',
0x001949: u'TENTEL COMTECH CO., LTD.',
0x00194A: u'TESTO AG',
0x00194B: u'SAGEM COMMUNICATION',
0x00194C: u'Fujian Stelcom information & Technology CO.,Ltd',
0x00194D: u'Avago Technologies Sdn Bhd',
0x00194E: u'Ultra Electronics - TCS (Tactical Communication Systems)',
0x00194F: u'Nokia Danmark A/S',
0x001950: u'Harman Multimedia',
0x001951: u'NETCONS, s.r.o.',
0x001952: u'ACOGITO Co., Ltd',
0x001953: u'Chainleader Communications Corp.',
0x001954: u'Leaf Corporation.',
0x001955: u'Cisco Systems',
0x001956: u'Cisco Systems',
0x001957: u'Saafnet Canada Inc.',
0x001958: u'Bluetooth SIG, Inc.',
0x001959: u'Staccato Communications Inc.',
0x00195A: u'Jenaer Antriebstechnik GmbH',
0x00195B: u'D-Link Corporation',
0x00195C: u'Innotech Corporation',
0x00195D: u'ShenZhen XinHuaTong Opto Electronics Co.,Ltd',
0x00195E: u'Motorola CHS',
0x00195F: u'Valemount Networks Corporation',
0x001960: u'DoCoMo Systems, Inc.',
0x001961: u'Blaupunkt GmbH',
0x001962: u'Commerciant, LP',
0x001963: u'Sony Ericsson Mobile Communications AB',
0x001964: u'Doorking Inc.',
0x001965: u'YuHua TelTech (ShangHai) Co., Ltd.',
0x001966: u'Asiarock Technology Limited',
0x001967: u'TELDAT Sp.J.',
0x001968: u'Digital Video Networks(Shanghai) CO. LTD.',
0x001969: u'Nortel',
0x00196A: u'MikroM GmbH',
0x00196B: u'Danpex Corporation',
0x00196C: u'ETROVISION TECHNOLOGY',
0x00196D: u'Raybit Systems Korea, Inc',
0x00196E: u'Metacom (Pty) Ltd.',
0x00196F: u'SensoPart GmbH',
0x001970: u'Z-Com, Inc.',
0x001971: u'Guangzhou Unicomp Technology Co.,Ltd',
0x001972: u'Plexus (Xiamen) Co.,ltd',
0x001973: u'Zeugma Systems',
0x001974: u'AboCom Systems, Inc.',
0x001975: u'Beijing Huisen networks technology Inc',
0x001976: u'Xipher Technologies, LLC',
0x001977: u'Aerohive Networks, Inc.',
0x001978: u'Datum Systems, Inc.',
0x001979: u'Nokia Danmark A/S',
0x00197A: u'MAZeT GmbH',
0x00197B: u'Picotest Corp.',
0x00197C: u'Riedel Communications GmbH',
0x00197D: u'Hon Hai Precision Ind. Co., Ltd',
0x00197E: u'Hon Hai Precision Ind. Co., Ltd',
0x00197F: u'PLANTRONICS, INC.',
0x001980: u'Gridpoint Systems',
0x001981: u'Vivox Inc',
0x001982: u'SmarDTV',
0x001983: u'CCT R&D Limited',
0x001984: u'ESTIC Corporation',
0x001985: u'IT Watchdogs, Inc',
0x001986: u'Cheng Hongjian',
0x001987: u'Panasonic Mobile Communications Co., Ltd.',
0x001988: u'Wi2Wi, Inc',
0x001989: u'Sonitrol Corporation',
0x00198A: u'Northrop Grumman Systems Corp.',
0x00198B: u'Novera Optics Korea, Inc.',
0x00198C: u'iXSea',
0x00198D: u'Ocean Optics, Inc.',
0x00198E: u'Oticon A/S',
0x00198F: u'Alcatel Bell N.V.',
0x001990: u'ELM DATA Co., Ltd.',
0x001991: u'avinfo',
0x001992: u'Bluesocket, Inc',
0x001993: u'Changshu Switchgear MFG. Co.,Ltd. (Former Changshu Switchgea',
0x001994: u'Jorjin technologies inc.',
0x001995: u'Jurong Hi-Tech (Suzhou)Co.ltd',
0x001996: u'TurboChef Technologies Inc.',
0x001997: u'Soft Device Sdn Bhd',
0x001998: u'SATO CORPORATION',
0x001999: u'Fujitsu Siemens Computers',
0x00199A: u'EDO-EVI',
0x00199B: u'Diversified Technical Systems, Inc.',
0x00199C: u'CTRING',
0x00199D: u'V, Inc.',
0x00199E: u'SHOWADENSHI ELECTRONICS,INC.',
0x00199F: u'DKT A/S',
0x0019A0: u'NIHON DATA SYSTENS, INC.',
0x0019A1: u'LG INFORMATION & COMM.',
0x0019A2: u'ORION TELE-EQUIPMENTS PVT LTD',
0x0019A3: u'asteel electronique atlantique',
0x0019A4: u'Austar Technology (hang zhou) Co.,Ltd',
0x0019A5: u'RadarFind Corporation',
0x0019A6: u'Motorola CHS',
0x0019A7: u'ITU-T',
0x0019A8: u'WiQuest Communications, Inc',
0x0019A9: u'Cisco Systems',
0x0019AA: u'Cisco Systems',
0x0019AB: u'Raycom CO ., LTD',
0x0019AC: u'GSP SYSTEMS Inc.',
0x0019AD: u'BOBST SA',
0x0019AE: u'Hopling Technologies b.v.',
0x0019AF: u'Rigol Technologies, Inc.',
0x0019B0: u'HanYang System',
0x0019B1: u'Arrow7 Corporation',
0x0019B2: u'XYnetsoft Co.,Ltd',
0x0019B3: u'Stanford Research Systems',
0x0019B4: u'VideoCast Ltd.',
0x0019B5: u'Famar Fueguina S.A.',
0x0019B6: u'Euro Emme s.r.l.',
0x0019B7: u'Nokia Danmark A/S',
0x0019B8: u'Boundary Devices',
0x0019B9: u'Dell Inc.',
0x0019BA: u'Paradox Security Systems Ltd',
0x0019BB: u'Hewlett Packard',
0x0019BC: u'ELECTRO CHANCE SRL',
0x0019BD: u'New Media Life',
0x0019BE: u'Altai Technologies Limited',
0x0019BF: u'Citiway technology Co.,ltd',
0x0019C0: u'Motorola Mobile Devices',
0x0019C1: u'Alps Electric Co., Ltd',
0x0019C2: u'Equustek Solutions, Inc.',
0x0019C3: u'Qualitrol',
0x0019C4: u'Infocrypt Inc.',
0x0019C5: u'SONY Computer Entertainment inc,',
0x0019C6: u'ZTE Corporation',
0x0019C7: u'Cambridge Industries(Group) Co.,Ltd.',
0x0019C8: u'AnyDATA Corporation',
0x0019C9: u'S&C ELECTRIC COMPANY',
0x0019CA: u'Broadata Communications, Inc',
0x0019CB: u'ZyXEL Communications Corporation',
0x0019CC: u'RCG (HK) Ltd',
0x0019CD: u'Chengdu ethercom information technology Ltd.',
0x0019CE: u'Progressive Gaming International',
0x0019CF: u'SALICRU, S.A.',
0x0019D0: u'Cathexis',
0x0019D1: u'Intel Corporation',
0x0019D2: u'Intel Corporation',
0x0019D3: u'TRAK Microwave',
0x0019D4: u'ICX Technologies',
0x0019D5: u'IP Innovations, Inc.',
0x0019D6: u'LS Cable Ltd.',
0x0019D7: u'FORTUNETEK CO., LTD',
0x0019D8: u'MAXFOR',
0x0019D9: u'Zeutschel GmbH',
0x0019DA: u'Welltrans O&E Technology Co. , Ltd.',
0x0019DB: u'MICRO-STAR INTERNATIONAL CO., LTD.',
0x0019DC: u'ENENSYS Technologies',
0x0019DD: u'FEI-Zyfer, Inc.',
0x0019DE: u'MOBITEK',
0x0019DF: u'THOMSON APDG',
0x0019E0: u'TP-LINK Technologies Co., Ltd.',
0x0019E1: u'Nortel',
0x0019E2: u'Juniper Networks',
0x0019E3: u'Apple Computers',
0x0019E4: u'2Wire, Inc',
0x0019E5: u'Lynx Studio Technology, Inc.',
0x0019E6: u'TOYO MEDIC CO.,LTD.',
0x0019E7: u'Cisco Systems',
0x0019E8: u'Cisco Systems',
0x0019E9: u'S-Information Technolgy, Co., Ltd.',
0x0019EA: u'TeraMage Technologies Co., Ltd.',
0x0019EB: u'Pyronix Ltd',
0x0019EC: u'Sagamore Systems, Inc.',
0x0019ED: u'Axesstel Inc.',
0x0019EE: u'CARLO GAVAZZI CONTROLS SPA-Controls Division',
0x0019EF: u'SHENZHEN LINNKING ELECTRONICS CO.,LTD',
0x0019F0: u'UNIONMAN TECHNOLOGY CO.,LTD',
0x0019F1: u'Star Communication Network Technology Co.,Ltd',
0x0019F2: u'Teradyne K.K.',
0x0019F3: u'Telematrix, Inc',
0x0019F4: u'Convergens Oy Ltd',
0x0019F5: u'Imagination Technologies Ltd',
0x0019F6: u'Acconet (PTE) Ltd',
0x0019F7: u'Onset Computer Corporation',
0x0019F8: u'Embedded Systems Design, Inc.',
0x0019F9: u'Lambda',
0x0019FA: u'Cable Vision Electronics CO., LTD.',
0x0019FB: u'AMSTRAD PLC',
0x0019FC: u'PT. Ufoakses Sukses Luarbiasa',
0x0019FD: u'Nintendo Co., Ltd.',
0x0019FE: u'SHENZHEN SEECOMM TECHNOLOGY CO.,LTD.',
0x0019FF: u'Finnzymes',
0x001A00: u'MATRIX INC.',
0x001A01: u'Smiths Medical',
0x001A02: u'SECURE CARE PRODUCTS, INC',
0x001A03: u'Angel Electronics Co., Ltd.',
0x001A04: u'Interay Solutions BV',
0x001A05: u'OPTIBASE LTD',
0x001A06: u'OpVista, Inc.',
0x001A07: u'Arecont Vision',
0x001A08: u'Dalman Technical Services',
0x001A09: u'Wayfarer Transit Systems Ltd',
0x001A0A: u'Adaptive Micro-Ware Inc.',
0x001A0B: u'BONA TECHNOLOGY INC.',
0x001A0C: u'Swe-Dish Satellite Systems AB',
0x001A0D: u'HandHeld entertainment, Inc.',
0x001A0E: u'Cheng Uei Precision Industry Co.,Ltd',
0x001A0F: u'Sistemas Avanzados de Control, S.A.',
0x001A10: u'LUCENT TRANS ELECTRONICS CO.,LTD',
0x001A11: u'Google Inc.',
0x001A12: u'PRIVATE',
0x001A13: u'Wanlida Group Co., LTD',
0x001A14: u'Xin Hua Control Engineering Co.,Ltd.',
0x001A15: u'gemalto e-Payment',
0x001A16: u'Nokia Danmark A/S',
0x001A17: u'Teak Technologies, Inc.',
0x001A18: u'Advanced Simulation Technology inc.',
0x001A19: u'Computer Engineering Limited',
0x001A1A: u'Gentex Corporation/Electro-Acoustic Products',
0x001A1B: u'Motorola Mobile Devices',
0x001A1C: u'GT&T Engineering Pte Ltd',
0x001A1D: u'PChome Online Inc.',
0x001A1E: u'Aruba Networks',
0x001A1F: u'Coastal Environmental Systems',
0x001A20: u'CMOTECH Co. Ltd.',
0x001A21: u'Indac B.V.',
0x001A22: u'eq-3 GmbH',
0x001A23: u'Ice Qube, Inc',
0x001A24: u'Galaxy Telecom Technologies Ltd',
0x001A25: u'DELTA DORE',
0x001A26: u'Deltanode Solutions AB',
0x001A27: u'Ubistar',
0x001A28: u'ASWT Co., LTD. Taiwan Branch H.K.',
0x001A29: u'Techsonic Industries d/b/a Humminbird',
0x001A2A: u'Arcadyan Technology Corporation',
0x001A2B: u'Ayecom Technology Co., Ltd.',
0x001A2C: u'SATEC Co.,LTD',
0x001A2D: u'The Navvo Group',
0x001A2E: u'Ziova Coporation',
0x001A2F: u'Cisco Systems',
0x001A30: u'Cisco Systems',
0x001A31: u'SCAN COIN Industries AB',
0x001A32: u'ACTIVA MULTIMEDIA',
0x001A33: u'ASI Communications, Inc.',
0x001A34: u'Konka Group Co., Ltd.',
0x001A35: u'BARTEC GmbH',
0x001A36: u'Actimon GmbH & Co. KG',
0x001A37: u'Lear Corporation',
0x001A38: u'SCI Technology',
0x001A39: u'Merten GmbH&CoKG',
0x001A3A: u'Dongahelecomm',
0x001A3B: u'Doah Elecom Inc.',
0x001A3C: u'Technowave Ltd.',
0x001A3D: u'Ajin Vision Co.,Ltd',
0x001A3E: u'Faster Technology LLC',
0x001A3F: u'intelbras',
0x001A40: u'A-FOUR TECH CO., LTD.',
0x001A41: u'INOCOVA Co.,Ltd',
0x001A42: u'Techcity Technology co., Ltd.',
0x001A43: u'Logical Link Communications',
0x001A44: u'JWTrading Co., Ltd',
0x001A45: u'GN Netcom as',
0x001A46: u'Digital Multimedia Technology Co., Ltd',
0x001A47: u'Agami Systems, Inc.',
0x001A48: u'Takacom Corporation',
0x001A49: u'Micro Vision Co.,LTD',
0x001A4A: u'Qumranet Inc.',
0x001A4B: u'Hewlett Packard',
0x001A4C: u'Crossbow Technology, Inc',
0x001A4D: u'GIGABYTE TECHNOLOGY CO.,LTD.',
0x001A4E: u'NTI AG / LinMot',
0x001A4F: u'AVM GmbH',
0x001A50: u'PheeNet Technology Corp.',
0x001A51: u'Alfred Mann Foundation',
0x001A52: u'Meshlinx Wireless Inc.',
0x001A53: u'Zylaya',
0x001A54: u'Hip Shing Electronics Ltd.',
0x001A55: u'ACA-Digital Corporation',
0x001A56: u'ViewTel Co,. Ltd.',
0x001A57: u'Matrix Design Group, LLC',
0x001A58: u'Celectronic GmbH',
0x001A59: u'Ircona',
0x001A5A: u'Korea Electric Power Data Network (KDN) Co., Ltd',
0x001A5B: u'NetCare Service Co., Ltd.',
0x001A5C: u'Euchner GmbH+Co. KG',
0x001A5D: u'Mobinnova Corp.',
0x001A5E: u'Thincom Technology Co.,Ltd',
0x001A5F: u'KitWorks.fi Ltd.',
0x001A60: u'Wave Electronics Co.,Ltd.',
0x001A61: u'PacStar Corp.',
0x001A62: u'trusted data',
0x001A63: u'Elster Electricity, LLC',
0x001A64: u'IBM Corp.',
0x001A65: u'Seluxit',
0x001A66: u'Motorola CHS',
0x001A67: u'Infinite QL Sdn Bhd',
0x001A68: u'Weltec Enterprise Co., Ltd.',
0x001A69: u'Wuhan Yangtze Optical Technology CO.,Ltd.',
0x001A6A: u'Tranzas, Inc.',
0x001A6B: u'USI',
0x001A6C: u'Cisco Systems',
0x001A6D: u'Cisco Systems',
0x001A6E: u'Impro Technologies',
0x001A6F: u'MI.TEL s.r.l.',
0x001A70: u'Cisco-Linksys, LLC',
0x001A71: u'Diostech Co., Ltd.',
0x001A72: u'Mosart Semiconductor Corp.',
0x001A73: u'Gemtek Technology Co., Ltd.',
0x001A74: u'Procare International Co',
0x001A75: u'Sony Ericsson Mobile Communications',
0x001A76: u'SDT information Technology Co.,LTD.',
0x001A77: u'Motorola Mobile Devices',
0x001A78: u'ubtos',
0x001A79: u'TELECOMUNICATION TECHNOLOGIES LTD.',
0x001A7A: u'Lismore Instruments Limited',
0x001A7B: u'Teleco, Inc.',
0x001A7C: u'Hirschmann Automation and Control B.V.',
0x001A7D: u'cyber-blue(HK)Ltd',
0x001A7E: u'LN Srithai Comm Ltd.',
0x001A7F: u'GCI Science&Technology Co.,Ltd.',
0x001A80: u'Sony Corporation',
0x001A81: u'Zelax',
0x001A82: u'PROBA Building Automation Co.,LTD',
0x001A83: u'Pegasus Technologies Inc.',
0x001A84: u'V One Multimedia Pte Ltd',
0x001A85: u'NV Michel Van de Wiele',
0x001A86: u'AdvancedIO Systems Inc',
0x001A87: u'Canhold International Limited',
0x001A88: u'Venergy,Co,Ltd',
0x001A89: u'Nokia Danmark A/S',
0x001A8A: u'Samsung Electronics Co., Ltd.',
0x001A8B: u'CHUNIL ELECTRIC IND., CO.',
0x001A8C: u'Astaro AG',
0x001A8D: u'AVECS Bergen GmbH',
0x001A8E: u'3Way Networks Ltd',
0x001A8F: u'Nortel',
0x001A90: u'Trópico Sistemas e Telecomunicações da Amazônia LTDA.',
0x001A91: u'FusionDynamic Ltd.',
0x001A92: u'ASUSTek COMPUTER INC.',
0x001A93: u'ERCO Leuchten GmbH',
0x001A94: u'Votronic GmbH',
0x001A95: u'Hisense Mobile Communications Technoligy Co.,Ltd.',
0x001A96: u'ECLER S.A.',
0x001A97: u'fitivision technology Inc.',
0x001A98: u'Asotel Communication Limited Taiwan Branch',
0x001A99: u'Smarty (HZ) Information Electronics Co., Ltd',
0x001A9A: u'Skyworth Digital technology(shenzhen)co.ltd.',
0x001A9B: u'ADEC & Parter AG',
0x001A9C: u'RightHand Technologies, Inc.',
0x001A9D: u'Skipper Wireless, Inc.',
0x001A9E: u'ICON Digital International Limited',
0x001A9F: u'A-Link Europe Ltd',
0x001AA0: u'Dell Inc',
0x001AA1: u'Cisco Systems',
0x001AA2: u'Cisco Systems',
0x001AA3: u'DELORME',
0x001AA4: u'Future University-Hakodate',
0x001AA5: u'BRN Phoenix',
0x001AA6: u'Telefunken Radio Communication Systems GmbH &CO.KG',
0x001AA7: u'Torian Wireless',
0x001AA8: u'Mamiya Digital Imaging Co., Ltd.',
0x001AA9: u'FUJIAN STAR-NET COMMUNICATION CO.,LTD',
0x001AAA: u'Analogic Corp.',
0x001AAB: u'eWings s.r.l.',
0x001AAC: u'Corelatus AB',
0x001AAD: u'Motorola CHS',
0x001AAE: u'Savant Systems LLC',
0x001AAF: u'BLUSENS TECHNOLOGY',
0x001AB0: u'Signal Networks Pvt. Ltd.,',
0x001AB1: u'Asia Pacific Satellite Industries Co., Ltd.',
0x001AB2: u'Cyber Solutions Inc.',
0x001AB3: u'VISIONITE INC.',
0x001AB4: u'FFEI Ltd.',
0x001AB5: u'Home Network System',
0x001AB6: u'Luminary Micro Inc',
0x001AB7: u'Ethos Networks LTD.',
0x001AB8: u'Anseri Corporation',
0x001AB9: u'PMC',
0x001ABA: u'Caton Overseas Limited',
0x001ABB: u'Fontal Technology Incorporation',
0x001ABC: u'U4EA Technologies Ltd',
0x001ABD: u'Impatica Inc.',
0x001ABE: u'COMPUTER HI-TECH INC.',
0x001ABF: u'TRUMPF Laser Marking Systems AG',
0x001AC0: u'JOYBIEN TECHNOLOGIES CO., LTD.',
0x001AC1: u'3COM EUROPE',
0x001AC2: u'YEC Co.,Ltd.',
0x001AC3: u'Scientific-Atlanta, Inc',
0x001AC4: u'2Wire, Inc',
0x001AC5: u'BreakingPoint Systems, Inc.',
0x001AC6: u'Micro Control Designs',
0x001AC7: u'UNIPOINT',
0x001AC8: u'ISL (Instrumentation Scientifique de Laboratoire)',
0x001AC9: u'SUZUKEN CO.,LTD',
0x001ACA: u'Tilera Corporation',
0x001ACB: u'Autocom Products Ltd',
0x001ACC: u'Celestial Semiconductor, Ltd',
0x001ACD: u'Tidel Engineering LP',
0x001ACE: u'YUPITERU INDUSTRIES CO., LTD.',
0x001ACF: u'C.T. ELETTRONICA',
0x001AD0: u'Siemens Schweiz AG',
0x001AD1: u'FARGO CO., LTD.',
0x001AD2: u'Eletronica Nitron Ltda',
0x001AD3: u'Vamp Ltd.',
0x001AD4: u'iPOX Technology Co., Ltd.',
0x001AD5: u'KMC CHAIN INDUSTRIAL CO., LTD.',
0x001AD6: u'JIAGNSU AETNA ELECTRIC CO.,LTD',
0x001AD7: u'Christie Digital Systems, Inc.',
0x001AD8: u'AlsterAero GmbH',
0x001AD9: u'International Broadband Electric Communications, Inc.',
0x001ADA: u'Biz-2-Me Inc.',
0x001ADB: u'Motorola Mobile Devices',
0x001ADC: u'Nokia Danmark A/S',
0x001ADD: u'PePWave Ltd',
0x001ADE: u'Motorola CHS',
0x001ADF: u'Interactivetv Pty Limited',
0x001AE0: u'Mythology Tech Express Inc.',
0x001AE1: u'EDGE ACCESS INC',
0x001AE2: u'Cisco Systems',
0x001AE3: u'Cisco Systems',
0x001AE4: u'Liposonix Inc,',
0x001AE5: u'Mvox Technologies Inc.',
0x001AE6: u'Atlanta Advanced Communications Holdings Limited',
0x001AE7: u'Aztek Networks, Inc.',
0x001AE8: u'Siemens Enterprise Communications GmbH & Co. KG',
0x001AE9: u'Nintendo Co., Ltd.',
0x001AEA: u'Radio Terminal Systems Pty Ltd',
0x001AEB: u'Allied Telesis K.K.',
0x001AEC: u'Keumbee Electronics Co.,Ltd.',
0x001AED: u'INCOTEC GmbH',
0x001AEE: u'Shenztech Ltd',
0x001AEF: u'Loopcomm Technology, Inc.',
0x001AF0: u'Alcatel - IPD',
0x001AF1: u'Embedded Artists AB',
0x001AF2: u'Dynavisions GmbH',
0x001AF3: u'Samyoung Electronics',
0x001AF4: u'Handreamnet',
0x001AF5: u'PENTAONE. CO., LTD.',
0x001AF6: u'Woven Systems, Inc.',
0x001AF7: u'dataschalt e+a GmbH',
0x001AF8: u'Copley Controls Corporation',
0x001AF9: u'AeroVIronment (AV Inc)',
0x001AFA: u'Welch Allyn, Inc.',
0x001AFB: u'Joby Inc.',
0x001AFC: u'ModusLink Corporation',
0x001AFD: u'EVOLIS',
0x001AFE: u'SOFACREAL',
0x001AFF: u'Wizyoung Tech.',
0x001B00: u'Neopost Technologies',
0x001B01: u'Applied Radio Technologies',
0x001B02: u'ED Co.Ltd',
0x001B03: u'Action Technology (SZ) Co., Ltd',
0x001B04: u'Affinity International S.p.a',
0x001B05: u'Young Media Concepts GmbH',
0x001B06: u'Ateliers R. LAUMONIER',
0x001B07: u'Mendocino Software',
0x001B08: u'Danfoss Drives A/S',
0x001B09: u'Matrix Telecom Pvt. Ltd.',
0x001B0A: u'Intelligent Distributed Controls Ltd',
0x001B0B: u'Phidgets Inc.',
0x001B0C: u'Cisco Systems',
0x001B0D: u'Cisco Systems',
0x001B0E: u'InoTec GmbH Organisationssysteme',
0x001B0F: u'Petratec',
0x001B10: u'ShenZhen Kang Hui Technology Co.,ltd',
0x001B11: u'D-Link Corporation',
0x001B12: u'Apprion',
0x001B13: u'Icron Technologies Corporation',
0x001B14: u'Carex Lighting Equipment Factory',
0x001B15: u'Voxtel, Inc.',
0x001B16: u'Celtro Ltd.',
0x001B17: u'Palo Alto Networks',
0x001B18: u'Tsuken Electric Ind. Co.,Ltd',
0x001B19: u'IEEE 1588 Standard',
0x001B1A: u'e-trees Japan, Inc.',
0x001B1B: u'Siemens AG, A&D AS EWK PU1',
0x001B1C: u'Coherent',
0x001B1D: u'Phoenix International Co., Ltd',
0x001B1E: u'HART Communication Foundation',
0x001B1F: u'DELTA - Danish Electronics, Light & Acoustics',
0x001B20: u'TPine Technology',
0x001B21: u'Intel Corporate',
0x001B22: u'Palit Microsystems ( H.K.) Ltd.',
0x001B23: u'SimpleComTools',
0x001B24: u'Quanta Computer Inc.',
0x001B25: u'Nortel',
0x001B26: u'RON-Telecom ZAO',
0x001B27: u'Merlin CSI',
0x001B28: u'POLYGON, JSC',
0x001B29: u'Avantis.Co.,Ltd',
0x001B2A: u'Cisco Systems',
0x001B2B: u'Cisco Systems',
0x001B2C: u'ATRON electronic GmbH',
0x001B2D: u'PRIVATE',
0x001B2E: u'Sinkyo Electron Inc',
0x001B2F: u'NETGEAR Inc.',
0x001B30: u'Solitech Inc.',
0x001B31: u'Neural Image. Co. Ltd.',
0x001B32: u'QLogic Corporation',
0x001B33: u'Nokia Danmark A/S',
0x001B34: u'Focus System Inc.',
0x001B35: u'ChongQing JINOU Science & Technology Development CO.,Ltd',
0x001B36: u'Tsubata Engineering Co.,Ltd. (Head Office)',
0x001B37: u'Computec Oy',
0x001B38: u'COMPAL ELECTRONICS TECHNOLOGIC CO., LTD.',
0x001B39: u'Proxicast',
0x001B3A: u'SIMS Corp.',
0x001B3B: u'Yi-Qing CO., LTD',
0x001B3C: u'Software Technologies Group,Inc.',
0x001B3D: u'EuroTel Spa',
0x001B3E: u'Curtis, Inc.',
0x001B3F: u'ProCurve Networking by HP',
0x001B40: u'Network Automation mxc AB',
0x001B41: u'General Infinity Co.,Ltd.',
0x001B42: u'Wise & Blue',
0x001B43: u'Beijing DG Telecommunications equipment Co.,Ltd',
0x001B44: u'SanDisk Corporation',
0x001B45: u'ABB AS, Division Automation Products',
0x001B46: u'Blueone Technology Co.,Ltd',
0x001B47: u'Futarque A/S',
0x001B48: u'Shenzhen Lantech Electronics Co., Ltd.',
0x001B49: u'Roberts Radio limited',
0x001B4A: u'W&W Communications, Inc.',
0x001B4B: u'SANION Co., Ltd.',
0x001B4C: u'Signtech',
0x001B4D: u'Areca Technology Corporation',
0x001B4E: u'Navman New Zealand',
0x001B4F: u'Avaya Inc.',
0x001B50: u'Nizhny Novgorod Factory named after M.Frunze, FSUE (NZiF)',
0x001B51: u'Vector Technology Corp.',
0x001B52: u'Motorola Mobile Devices',
0x001B53: u'Cisco Systems',
0x001B54: u'Cisco Systems',
0x001B55: u'Hurco Automation Ltd.',
0x001B56: u'Tehuti Networks Ltd.',
0x001B57: u'SEMINDIA SYSTEMS PRIVATE LIMITED',
0x001B58: u'PRIVATE',
0x001B59: u'Sony Ericsson Mobile Communications AB',
0x001B5A: u'Apollo Imaging Technologies, Inc.',
0x001B5B: u'2Wire, Inc.',
0x001B5C: u'Azuretec Co., Ltd.',
0x001B5D: u'Vololink Pty Ltd',
0x001B5E: u'BPL Limited',
0x001B5F: u'Alien Technology',
0x001B60: u'NAVIGON AG',
0x001B61: u'Digital Acoustics, LLC',
0x001B62: u'JHT Optoelectronics Co.,Ltd.',
0x001B63: u'Apple Inc.',
0x001B64: u'IsaacLandKorea',
0x001B65: u'China Gridcom Co., Ltd',
0x001B66: u'Sennheiser electronic GmbH & Co. KG',
0x001B67: u'Ubiquisys Ltd',
0x001B68: u'Modnnet Co., Ltd',
0x001B69: u'Equaline Corporation',
0x001B6A: u'Powerwave UK Ltd',
0x001B6B: u'Swyx Solutions AG',
0x001B6C: u'LookX Digital Media BV',
0x001B6D: u'Midtronics, Inc.',
0x001B6E: u'Anue Systems, Inc.',
0x001B6F: u'Teletrak Ltd',
0x001B70: u'IRI Ubiteq, INC.',
0x001B71: u'Telular Corp.',
0x001B72: u'Sicep s.p.a.',
0x001B73: u'DTL Broadcast Ltd',
0x001B74: u'MiraLink Corporation',
0x001B75: u'Hypermedia Systems',
0x001B76: u'Ripcode, Inc.',
0x001B77: u'Intel Corporate',
0x001B78: u'Hewlett Packard',
0x001B79: u'FAIVELEY TRANSPORT',
0x001B7A: u'Nintendo Co., Ltd.',
0x001B7B: u'The Tintometer Ltd',
0x001B7C: u'A & R Cambridge',
0x001B7D: u'CXR Anderson Jacobson',
0x001B7E: u'Beckmann GmbH',
0x001B7F: u'TMN Technologies Telecomunicacoes Ltda',
0x001B80: u'LORD Corporation',
0x001B81: u'DATAQ Instruments, Inc.',
0x001B82: u'Taiwan Semiconductor Co., Ltd.',
0x001B83: u'Finsoft Ltd',
0x001B84: u'Scan Engineering Telecom',
0x001B85: u'MAN Diesel A/S',
0x001B86: u'Bosch Access Systems GmbH',
0x001B87: u'Deepsound Tech. Co., Ltd',
0x001B88: u'Divinet Access Technologies Ltd',
0x001B89: u'EMZA Visual Sense Ltd.',
0x001B8A: u'2M Electronic A/S',
0x001B8B: u'NEC AccessTechnica,Ltd.',
0x001B8C: u'JMicron Technology Corp.',
0x001B8D: u'Electronic Computer Systems, Inc.',
0x001B8E: u'Hulu Sweden AB',
0x001B8F: u'Cisco Systems',
0x001B90: u'Cisco Systems',
0x001B91: u'EFKON AG',
0x001B92: u'l-acoustics',
0x001B93: u'JC Decaux SA DNT',
0x001B94: u'T.E.M.A. S.p.A.',
0x001B95: u'VIDEO SYSTEMS SRL',
0x001B96: u'Snif Labs, Inc.',
0x001B97: u'Violin Technologies',
0x001B98: u'Samsung Electronics Co., Ltd.',
0x001B99: u'KS System GmbH',
0x001B9A: u'Apollo Fire Detectors Ltd',
0x001B9B: u'Hose-McCann Communications',
0x001B9C: u'SATEL sp. z o.o.',
0x001B9D: u'Novus Security Sp. z o.o.',
0x001B9E: u'ASKEY COMPUTER CORP',
0x001B9F: u'Calyptech Pty Ltd',
0x001BA0: u'Awox',
0x001BA1: u'Åmic AB',
0x001BA2: u'IDS Imaging Development Systems GmbH',
0x001BA3: u'Flexit Group GmbH',
0x001BA4: u'S.A.E Afikim',
0x001BA5: u'MyungMin Systems, Inc.',
0x001BA6: u'intotech inc.',
0x001BA7: u'Lorica Solutions',
0x001BA8: u'UBI&MOBI,.Inc',
0x001BA9: u'BROTHER INDUSTRIES, LTD. Printing & Solutions Company',
0x001BAA: u'XenICs nv',
0x001BAB: u'Telchemy, Incorporated',
0x001BAC: u'Curtiss Wright Controls Embedded Computing',
0x001BAD: u'iControl Incorporated',
0x001BAE: u'Micro Control Systems, Inc',
0x001BAF: u'Nokia Danmark A/S',
0x001BB0: u'BHARAT ELECTRONICS',
0x001BB1: u'Wistron Neweb Corp.',
0x001BB2: u'Intellect International NV',
0x001BB3: u'Condalo GmbH',
0x001BB4: u'Airvod Limited',
0x001BB5: u'Cherry GmbH',
0x001BB6: u'Bird Electronic Corp.',
0x001BB7: u'Alta Heights Technology Corp.',
0x001BB8: u'BLUEWAY ELECTRONIC CO;LTD',
0x001BB9: u'Elitegroup Computer System Co.',
0x001C7C: u'PERQ SYSTEMS CORPORATION',
0x002000: u'LEXMARK INTERNATIONAL, INC.',
0x002001: u'DSP SOLUTIONS, INC.',
0x002002: u'SERITECH ENTERPRISE CO., LTD.',
0x002003: u'PIXEL POWER LTD.',
0x002004: u'YAMATAKE-HONEYWELL CO., LTD.',
0x002005: u'SIMPLE TECHNOLOGY',
0x002006: u'GARRETT COMMUNICATIONS, INC.',
0x002007: u'SFA, INC.',
0x002008: u'CABLE & COMPUTER TECHNOLOGY',
0x002009: u'PACKARD BELL ELEC., INC.',
0x00200A: u'SOURCE-COMM CORP.',
0x00200B: u'OCTAGON SYSTEMS CORP.',
0x00200C: u'ADASTRA SYSTEMS CORP.',
0x00200D: u'CARL ZEISS',
0x00200E: u'SATELLITE TECHNOLOGY MGMT, INC',
0x00200F: u'TANBAC CO., LTD.',
0x002010: u'JEOL SYSTEM TECHNOLOGY CO. LTD',
0x002011: u'CANOPUS CO., LTD.',
0x002012: u'CAMTRONICS MEDICAL SYSTEMS',
0x002013: u'DIVERSIFIED TECHNOLOGY, INC.',
0x002014: u'GLOBAL VIEW CO., LTD.',
0x002015: u'ACTIS COMPUTER SA',
0x002016: u'SHOWA ELECTRIC WIRE & CABLE CO',
0x002017: u'ORBOTECH',
0x002018: u'CIS TECHNOLOGY INC.',
0x002019: u'OHLER GmbH',
0x00201A: u'MRV Communications, Inc.',
0x00201B: u'NORTHERN TELECOM/NETWORK',
0x00201C: u'EXCEL, INC.',
0x00201D: u'KATANA PRODUCTS',
0x00201E: u'NETQUEST CORPORATION',
0x00201F: u'BEST POWER TECHNOLOGY, INC.',
0x002020: u'MEGATRON COMPUTER INDUSTRIES PTY, LTD.',
0x002021: u'ALGORITHMS SOFTWARE PVT. LTD.',
0x002022: u'NMS Communications',
0x002023: u'T.C. TECHNOLOGIES PTY. LTD',
0x002024: u'PACIFIC COMMUNICATION SCIENCES',
0x002025: u'CONTROL TECHNOLOGY, INC.',
0x002026: u'AMKLY SYSTEMS, INC.',
0x002027: u'MING FORTUNE INDUSTRY CO., LTD',
0x002028: u'WEST EGG SYSTEMS, INC.',
0x002029: u'TELEPROCESSING PRODUCTS, INC.',
0x00202A: u'N.V. DZINE',
0x00202B: u'ADVANCED TELECOMMUNICATIONS MODULES, LTD.',
0x00202C: u'WELLTRONIX CO., LTD.',
0x00202D: u'TAIYO CORPORATION',
0x00202E: u'DAYSTAR DIGITAL',
0x00202F: u'ZETA COMMUNICATIONS, LTD.',
0x002030: u'ANALOG & DIGITAL SYSTEMS',
0x002031: u'ERTEC GmbH',
0x002032: u'ALCATEL TAISEL',
0x002033: u'SYNAPSE TECHNOLOGIES, INC.',
0x002034: u'ROTEC INDUSTRIEAUTOMATION GMBH',
0x002035: u'IBM CORPORATION',
0x002036: u'BMC SOFTWARE',
0x002037: u'SEAGATE TECHNOLOGY',
0x002038: u'VME MICROSYSTEMS INTERNATIONAL CORPORATION',
0x002039: u'SCINETS',
0x00203A: u'DIGITAL BI0METRICS INC.',
0x00203B: u'WISDM LTD.',
0x00203C: u'EUROTIME AB',
0x00203D: u'NOVAR ELECTRONICS CORPORATION',
0x00203E: u'LogiCan Technologies, Inc.',
0x00203F: u'JUKI CORPORATION',
0x002040: u'Motorola Broadband Communications Sector',
0x002041: u'DATA NET',
0x002042: u'DATAMETRICS CORP.',
0x002043: u'NEURON COMPANY LIMITED',
0x002044: u'GENITECH PTY LTD',
0x002045: u'ION Networks, Inc.',
0x002046: u'CIPRICO, INC.',
0x002047: u'STEINBRECHER CORP.',
0x002048: u'Marconi Communications',
0x002049: u'COMTRON, INC.',
0x00204A: u'PRONET GMBH',
0x00204B: u'AUTOCOMPUTER CO., LTD.',
0x00204C: u'MITRON COMPUTER PTE LTD.',
0x00204D: u'INOVIS GMBH',
0x00204E: u'NETWORK SECURITY SYSTEMS, INC.',
0x00204F: u'DEUTSCHE AEROSPACE AG',
0x002050: u'KOREA COMPUTER INC.',
0x002051: u'Verilink Corporation',
0x002052: u'RAGULA SYSTEMS',
0x002053: u'HUNTSVILLE MICROSYSTEMS, INC.',
0x002054: u'EASTERN RESEARCH, INC.',
0x002055: u'ALTECH CO., LTD.',
0x002056: u'NEOPRODUCTS',
0x002057: u'TITZE DATENTECHNIK GmbH',
0x002058: u'ALLIED SIGNAL INC.',
0x002059: u'MIRO COMPUTER PRODUCTS AG',
0x00205A: u'COMPUTER IDENTICS',
0x00205B: u'Kentrox, LLC',
0x00205C: u'InterNet Systems of Florida, Inc.',
0x00205D: u'NANOMATIC OY',
0x00205E: u'CASTLE ROCK, INC.',
0x00205F: u'GAMMADATA COMPUTER GMBH',
0x002060: u'ALCATEL ITALIA S.p.A.',
0x002061: u'DYNATECH COMMUNICATIONS, INC.',
0x002062: u'SCORPION LOGIC, LTD.',
0x002063: u'WIPRO INFOTECH LTD.',
0x002064: u'PROTEC MICROSYSTEMS, INC.',
0x002065: u'SUPERNET NETWORKING INC.',
0x002066: u'GENERAL MAGIC, INC.',
0x002067: u'PRIVATE',
0x002068: u'ISDYNE',
0x002069: u'ISDN SYSTEMS CORPORATION',
0x00206A: u'OSAKA COMPUTER CORP.',
0x00206B: u'KONICA MINOLTA HOLDINGS, INC.',
0x00206C: u'EVERGREEN TECHNOLOGY CORP.',
0x00206D: u'DATA RACE, INC.',
0x00206E: u'XACT, INC.',
0x00206F: u'FLOWPOINT CORPORATION',
0x002070: u'HYNET, LTD.',
0x002071: u'IBR GMBH',
0x002072: u'WORKLINK INNOVATIONS',
0x002073: u'FUSION SYSTEMS CORPORATION',
0x002074: u'SUNGWOON SYSTEMS',
0x002075: u'MOTOROLA COMMUNICATION ISRAEL',
0x002076: u'REUDO CORPORATION',
0x002077: u'KARDIOS SYSTEMS CORP.',
0x002078: u'RUNTOP, INC.',
0x002079: u'MIKRON GMBH',
0x00207A: u'WiSE Communications, Inc.',
0x00207B: u'Intel Corporation',
0x00207C: u'AUTEC GmbH',
0x00207D: u'ADVANCED COMPUTER APPLICATIONS',
0x00207E: u'FINECOM Co., Ltd.',
0x00207F: u'KYOEI SANGYO CO., LTD.',
0x002080: u'SYNERGY (UK) LTD.',
0x002081: u'TITAN ELECTRONICS',
0x002082: u'ONEAC CORPORATION',
0x002083: u'PRESTICOM INCORPORATED',
0x002084: u'OCE PRINTING SYSTEMS, GMBH',
0x002085: u'EXIDE ELECTRONICS',
0x002086: u'MICROTECH ELECTRONICS LIMITED',
0x002087: u'MEMOTEC COMMUNICATIONS CORP.',
0x002088: u'GLOBAL VILLAGE COMMUNICATION',
0x002089: u'T3PLUS NETWORKING, INC.',
0x00208A: u'SONIX COMMUNICATIONS, LTD.',
0x00208B: u'LAPIS TECHNOLOGIES, INC.',
0x00208C: u'GALAXY NETWORKS, INC.',
0x00208D: u'CMD TECHNOLOGY',
0x00208E: u'CHEVIN SOFTWARE ENG. LTD.',
0x00208F: u'ECI TELECOM LTD.',
0x002090: u'ADVANCED COMPRESSION TECHNOLOGY, INC.',
0x002091: u'J125, NATIONAL SECURITY AGENCY',
0x002092: u'CHESS ENGINEERING B.V.',
0x002093: u'LANDINGS TECHNOLOGY CORP.',
0x002094: u'CUBIX CORPORATION',
0x002095: u'RIVA ELECTRONICS',
0x002096: u'Invensys',
0x002097: u'APPLIED SIGNAL TECHNOLOGY',
0x002098: u'HECTRONIC AB',
0x002099: u'BON ELECTRIC CO., LTD.',
0x00209A: u'THE 3DO COMPANY',
0x00209B: u'ERSAT ELECTRONIC GMBH',
0x00209C: u'PRIMARY ACCESS CORP.',
0x00209D: u'LIPPERT AUTOMATIONSTECHNIK',
0x00209E: u'BROWN\'S OPERATING SYSTEM SERVICES, LTD.',
0x00209F: u'MERCURY COMPUTER SYSTEMS, INC.',
0x0020A0: u'OA LABORATORY CO., LTD.',
0x0020A1: u'DOVATRON',
0x0020A2: u'GALCOM NETWORKING LTD.',
0x0020A3: u'DIVICOM INC.',
0x0020A4: u'MULTIPOINT NETWORKS',
0x0020A5: u'API ENGINEERING',
0x0020A6: u'PROXIM, INC.',
0x0020A7: u'PAIRGAIN TECHNOLOGIES, INC.',
0x0020A8: u'SAST TECHNOLOGY CORP.',
0x0020A9: u'WHITE HORSE INDUSTRIAL',
0x0020AA: u'DIGIMEDIA VISION LTD.',
0x0020AB: u'MICRO INDUSTRIES CORP.',
0x0020AC: u'INTERFLEX DATENSYSTEME GMBH',
0x0020AD: u'LINQ SYSTEMS',
0x0020AE: u'ORNET DATA COMMUNICATION TECH.',
0x0020AF: u'3COM CORPORATION',
0x0020B0: u'GATEWAY DEVICES, INC.',
0x0020B1: u'COMTECH RESEARCH INC.',
0x0020B2: u'GKD Gesellschaft Fur Kommunikation Und Datentechnik',
0x0020B3: u'SCLTEC COMMUNICATIONS SYSTEMS',
0x0020B4: u'TERMA ELEKTRONIK AS',
0x0020B5: u'YASKAWA ELECTRIC CORPORATION',
0x0020B6: u'AGILE NETWORKS, INC.',
0x0020B7: u'NAMAQUA COMPUTERWARE',
0x0020B8: u'PRIME OPTION, INC.',
0x0020B9: u'METRICOM, INC.',
0x0020BA: u'CENTER FOR HIGH PERFORMANCE',
0x0020BB: u'ZAX CORPORATION',
0x0020BC: u'Long Reach Networks Pty Ltd',
0x0020BD: u'NIOBRARA R & D CORPORATION',
0x0020BE: u'LAN ACCESS CORP.',
0x0020BF: u'AEHR TEST SYSTEMS',
0x0020C0: u'PULSE ELECTRONICS, INC.',
0x0020C1: u'SAXA, Inc.',
0x0020C2: u'TEXAS MEMORY SYSTEMS, INC.',
0x0020C3: u'COUNTER SOLUTIONS LTD.',
0x0020C4: u'INET,INC.',
0x0020C5: u'EAGLE TECHNOLOGY',
0x0020C6: u'NECTEC',
0x0020C7: u'AKAI Professional M.I. Corp.',
0x0020C8: u'LARSCOM INCORPORATED',
0x0020C9: u'VICTRON BV',
0x0020CA: u'DIGITAL OCEAN',
0x0020CB: u'PRETEC ELECTRONICS CORP.',
0x0020CC: u'DIGITAL SERVICES, LTD.',
0x0020CD: u'HYBRID NETWORKS, INC.',
0x0020CE: u'LOGICAL DESIGN GROUP, INC.',
0x0020CF: u'TEST & MEASUREMENT SYSTEMS INC',
0x0020D0: u'VERSALYNX CORPORATION',
0x0020D1: u'MICROCOMPUTER SYSTEMS (M) SDN.',
0x0020D2: u'RAD DATA COMMUNICATIONS, LTD.',
0x0020D3: u'OST (OUEST STANDARD TELEMATIQU',
0x0020D4: u'CABLETRON - ZEITTNET INC.',
0x0020D5: u'VIPA GMBH',
0x0020D6: u'BREEZECOM',
0x0020D7: u'JAPAN MINICOMPUTER SYSTEMS CO., Ltd.',
0x0020D8: u'Nortel Networks',
0x0020D9: u'PANASONIC TECHNOLOGIES, INC./MIECO-US',
0x0020DA: u'Alcatel North America ESD',
0x0020DB: u'XNET TECHNOLOGY, INC.',
0x0020DC: u'DENSITRON TAIWAN LTD.',
0x0020DD: u'Cybertec Pty Ltd',
0x0020DE: u'JAPAN DIGITAL LABORAT\'Y CO.LTD',
0x0020DF: u'KYOSAN ELECTRIC MFG. CO., LTD.',
0x0020E0: u'Actiontec Electronics, Inc.',
0x0020E1: u'ALAMAR ELECTRONICS',
0x0020E2: u'INFORMATION RESOURCE ENGINEERING',
0x0020E3: u'MCD KENCOM CORPORATION',
0x0020E4: u'HSING TECH ENTERPRISE CO., LTD',
0x0020E5: u'APEX DATA, INC.',
0x0020E6: u'LIDKOPING MACHINE TOOLS AB',
0x0020E7: u'B&W NUCLEAR SERVICE COMPANY',
0x0020E8: u'DATATREK CORPORATION',
0x0020E9: u'DANTEL',
0x0020EA: u'EFFICIENT NETWORKS, INC.',
0x0020EB: u'CINCINNATI MICROWAVE, INC.',
0x0020EC: u'TECHWARE SYSTEMS CORP.',
0x0020ED: u'GIGA-BYTE TECHNOLOGY CO., LTD.',
0x0020EE: u'GTECH CORPORATION',
0x0020EF: u'USC CORPORATION',
0x0020F0: u'UNIVERSAL MICROELECTRONICS CO.',
0x0020F1: u'ALTOS INDIA LIMITED',
0x0020F2: u'SUN MICROSYSTEMS, INC.',
0x0020F3: u'RAYNET CORPORATION',
0x0020F4: u'SPECTRIX CORPORATION',
0x0020F5: u'PANDATEL AG',
0x0020F6: u'NET TEK AND KARLNET, INC.',
0x0020F7: u'CYBERDATA',
0x0020F8: u'CARRERA COMPUTERS, INC.',
0x0020F9: u'PARALINK NETWORKS, INC.',
0x0020FA: u'GDE SYSTEMS, INC.',
0x0020FB: u'OCTEL COMMUNICATIONS CORP.',
0x0020FC: u'MATROX',
0x0020FD: u'ITV TECHNOLOGIES, INC.',
0x0020FE: u'TOPWARE INC. / GRAND COMPUTER',
0x0020FF: u'SYMMETRICAL TECHNOLOGIES',
0x002654: u'3Com Corporation',
0x003000: u'ALLWELL TECHNOLOGY CORP.',
0x003001: u'SMP',
0x003002: u'Expand Networks',
0x003003: u'Phasys Ltd.',
0x003004: u'LEADTEK RESEARCH INC.',
0x003005: u'Fujitsu Siemens Computers',
0x003006: u'SUPERPOWER COMPUTER',
0x003007: u'OPTI, INC.',
0x003008: u'AVIO DIGITAL, INC.',
0x003009: u'Tachion Networks, Inc.',
0x00300A: u'AZTECH SYSTEMS LTD.',
0x00300B: u'mPHASE Technologies, Inc.',
0x00300C: u'CONGRUENCY, LTD.',
0x00300D: u'MMC Technology, Inc.',
0x00300E: u'Klotz Digital AG',
0x00300F: u'IMT - Information Management T',
0x003010: u'VISIONETICS INTERNATIONAL',
0x003011: u'HMS FIELDBUS SYSTEMS AB',
0x003012: u'DIGITAL ENGINEERING LTD.',
0x003013: u'NEC Corporation',
0x003014: u'DIVIO, INC.',
0x003015: u'CP CLARE CORP.',
0x003016: u'ISHIDA CO., LTD.',
0x003017: u'BlueArc UK Ltd',
0x003018: u'Jetway Information Co., Ltd.',
0x003019: u'CISCO SYSTEMS, INC.',
0x00301A: u'SMARTBRIDGES PTE. LTD.',
0x00301B: u'SHUTTLE, INC.',
0x00301C: u'ALTVATER AIRDATA SYSTEMS',
0x00301D: u'SKYSTREAM, INC.',
0x00301E: u'3COM Europe Ltd.',
0x00301F: u'OPTICAL NETWORKS, INC.',
0x003020: u'TSI, Inc..',
0x003021: u'HSING TECH. ENTERPRISE CO.,LTD',
0x003022: u'Fong Kai Industrial Co., Ltd.',
0x003023: u'COGENT COMPUTER SYSTEMS, INC.',
0x003024: u'CISCO SYSTEMS, INC.',
0x003025: u'CHECKOUT COMPUTER SYSTEMS, LTD',
0x003026: u'HeiTel Digital Video GmbH',
0x003027: u'KERBANGO, INC.',
0x003028: u'FASE Saldatura srl',
0x003029: u'OPICOM',
0x00302A: u'SOUTHERN INFORMATION',
0x00302B: u'INALP NETWORKS, INC.',
0x00302C: u'SYLANTRO SYSTEMS CORPORATION',
0x00302D: u'QUANTUM BRIDGE COMMUNICATIONS',
0x00302E: u'Hoft & Wessel AG',
0x00302F: u'Smiths Industries',
0x003030: u'HARMONIX CORPORATION',
0x003031: u'LIGHTWAVE COMMUNICATIONS, INC.',
0x003032: u'MagicRam, Inc.',
0x003033: u'ORIENT TELECOM CO., LTD.',
0x003034: u'SET ENGINEERING',
0x003035: u'Corning Incorporated',
0x003036: u'RMP ELEKTRONIKSYSTEME GMBH',
0x003037: u'Packard Bell Nec Services',
0x003038: u'XCP, INC.',
0x003039: u'SOFTBOOK PRESS',
0x00303A: u'MAATEL',
0x00303B: u'PowerCom Technology',
0x00303C: u'ONNTO CORP.',
0x00303D: u'IVA CORPORATION',
0x00303E: u'Radcom Ltd.',
0x00303F: u'TurboComm Tech Inc.',
0x003040: u'CISCO SYSTEMS, INC.',
0x003041: u'SAEJIN T & M CO., LTD.',
0x003042: u'DeTeWe-Deutsche Telephonwerke',
0x003043: u'IDREAM TECHNOLOGIES, PTE. LTD.',
0x003044: u'Portsmith LLC',
0x003045: u'Village Networks, Inc. (VNI)',
0x003046: u'Controlled Electronic Manageme',
0x003047: u'NISSEI ELECTRIC CO., LTD.',
0x003048: u'Supermicro Computer, Inc.',
0x003049: u'BRYANT TECHNOLOGY, LTD.',
0x00304A: u'Fraunhofer IPMS',
0x00304B: u'ORBACOM SYSTEMS, INC.',
0x00304C: u'APPIAN COMMUNICATIONS, INC.',
0x00304D: u'ESI',
0x00304E: u'BUSTEC PRODUCTION LTD.',
0x00304F: u'PLANET Technology Corporation',
0x003050: u'Versa Technology',
0x003051: u'ORBIT AVIONIC & COMMUNICATION',
0x003052: u'ELASTIC NETWORKS',
0x003053: u'Basler AG',
0x003054: u'CASTLENET TECHNOLOGY, INC.',
0x003055: u'Hitachi Semiconductor America,',
0x003056: u'Beck IPC GmbH',
0x003057: u'QTelNet, Inc.',
0x003058: u'API MOTION',
0x003059: u'DIGITAL-LOGIC AG',
0x00305A: u'TELGEN CORPORATION',
0x00305B: u'MODULE DEPARTMENT',
0x00305C: u'SMAR Laboratories Corp.',
0x00305D: u'DIGITRA SYSTEMS, INC.',
0x00305E: u'Abelko Innovation',
0x00305F: u'IMACON APS',
0x003060: u'Powerfile, Inc.',
0x003061: u'MobyTEL',
0x003062: u'PATH 1 NETWORK TECHNOL\'S INC.',
0x003063: u'SANTERA SYSTEMS, INC.',
0x003064: u'ADLINK TECHNOLOGY, INC.',
0x003065: u'APPLE COMPUTER, INC.',
0x003066: u'DIGITAL WIRELESS CORPORATION',
0x003067: u'BIOSTAR MICROTECH INT\'L CORP.',
0x003068: u'CYBERNETICS TECH. CO., LTD.',
0x003069: u'IMPACCT TECHNOLOGY CORP.',
0x00306A: u'PENTA MEDIA CO., LTD.',
0x00306B: u'CMOS SYSTEMS, INC.',
0x00306C: u'Hitex Holding GmbH',
0x00306D: u'LUCENT TECHNOLOGIES',
0x00306E: u'HEWLETT PACKARD',
0x00306F: u'SEYEON TECH. CO., LTD.',
0x003070: u'1Net Corporation',
0x003071: u'Cisco Systems, Inc.',
0x003072: u'INTELLIBYTE INC.',
0x003073: u'International Microsystems, In',
0x003074: u'EQUIINET LTD.',
0x003075: u'ADTECH',
0x003076: u'Akamba Corporation',
0x003077: u'ONPREM NETWORKS',
0x003078: u'Cisco Systems, Inc.',
0x003079: u'CQOS, INC.',
0x00307A: u'Advanced Technology & Systems',
0x00307B: u'Cisco Systems, Inc.',
0x00307C: u'ADID SA',
0x00307D: u'GRE AMERICA, INC.',
0x00307E: u'Redflex Communication Systems',
0x00307F: u'IRLAN LTD.',
0x003080: u'CISCO SYSTEMS, INC.',
0x003081: u'ALTOS C&C',
0x003082: u'TAIHAN ELECTRIC WIRE CO., LTD.',
0x003083: u'Ivron Systems',
0x003084: u'ALLIED TELESYN INTERNAIONAL',
0x003085: u'CISCO SYSTEMS, INC.',
0x003086: u'Transistor Devices, Inc.',
0x003087: u'VEGA GRIESHABER KG',
0x003088: u'Siara Systems, Inc.',
0x003089: u'Spectrapoint Wireless, LLC',
0x00308A: u'NICOTRA SISTEMI S.P.A',
0x00308B: u'Brix Networks',
0x00308C: u'ADVANCED DIGITAL INFORMATION',
0x00308D: u'PINNACLE SYSTEMS, INC.',
0x00308E: u'CROSS MATCH TECHNOLOGIES, INC.',
0x00308F: u'MICRILOR, Inc.',
0x003090: u'CYRA TECHNOLOGIES, INC.',
0x003091: u'TAIWAN FIRST LINE ELEC. CORP.',
0x003092: u'ModuNORM GmbH',
0x003093: u'SONNET TECHNOLOGIES, INC.',
0x003094: u'Cisco Systems, Inc.',
0x003095: u'Procomp Informatics, Ltd.',
0x003096: u'CISCO SYSTEMS, INC.',
0x003097: u'EXOMATIC AB',
0x003098: u'Global Converging Technologies',
0x003099: u'BOENIG UND KALLENBACH OHG',
0x00309A: u'ASTRO TERRA CORP.',
0x00309B: u'Smartware',
0x00309C: u'Timing Applications, Inc.',
0x00309D: u'Nimble Microsystems, Inc.',
0x00309E: u'WORKBIT CORPORATION.',
0x00309F: u'AMBER NETWORKS',
0x0030A0: u'TYCO SUBMARINE SYSTEMS, LTD.',
0x0030A1: u'WEBGATE Inc.',
0x0030A2: u'Lightner Engineering',
0x0030A3: u'CISCO SYSTEMS, INC.',
0x0030A4: u'Woodwind Communications System',
0x0030A5: u'ACTIVE POWER',
0x0030A6: u'VIANET TECHNOLOGIES, LTD.',
0x0030A7: u'SCHWEITZER ENGINEERING',
0x0030A8: u'OL\'E COMMUNICATIONS, INC.',
0x0030A9: u'Netiverse, Inc.',
0x0030AA: u'AXUS MICROSYSTEMS, INC.',
0x0030AB: u'DELTA NETWORKS, INC.',
0x0030AC: u'Systeme Lauer GmbH & Co., Ltd.',
0x0030AD: u'SHANGHAI COMMUNICATION',
0x0030AE: u'Times N System, Inc.',
0x0030AF: u'Honeywell GmbH',
0x0030B0: u'Convergenet Technologies',
0x0030B1: u'aXess-pro networks GmbH',
0x0030B2: u'L-3 Sonoma EO',
0x0030B3: u'San Valley Systems, Inc.',
0x0030B4: u'INTERSIL CORP.',
0x0030B5: u'Tadiran Microwave Networks',
0x0030B6: u'CISCO SYSTEMS, INC.',
0x0030B7: u'Teletrol Systems, Inc.',
0x0030B8: u'RiverDelta Networks',
0x0030B9: u'ECTEL',
0x0030BA: u'AC&T SYSTEM CO., LTD.',
0x0030BB: u'CacheFlow, Inc.',
0x0030BC: u'Optronic AG',
0x0030BD: u'BELKIN COMPONENTS',
0x0030BE: u'City-Net Technology, Inc.',
0x0030BF: u'MULTIDATA GMBH',
0x0030C0: u'Lara Technology, Inc.',
0x0030C1: u'HEWLETT-PACKARD',
0x0030C2: u'COMONE',
0x0030C3: u'FLUECKIGER ELEKTRONIK AG',
0x0030C4: u'Canon Imaging System Technologies, Inc.',
0x0030C5: u'CADENCE DESIGN SYSTEMS',
0x0030C6: u'CONTROL SOLUTIONS, INC.',
0x0030C7: u'MACROMATE CORP.',
0x0030C8: u'GAD LINE, LTD.',
0x0030C9: u'LuxN, N',
0x0030CA: u'Discovery Com',
0x0030CB: u'OMNI FLOW COMPUTERS, INC.',
0x0030CC: u'Tenor Networks, Inc.',
0x0030CD: u'CONEXANT SYSTEMS, INC.',
0x0030CE: u'Zaffire',
0x0030CF: u'TWO TECHNOLOGIES, INC.',
0x0030D0: u'Tellabs',
0x0030D1: u'INOVA CORPORATION',
0x0030D2: u'WIN TECHNOLOGIES, CO., LTD.',
0x0030D3: u'Agilent Technologies',
0x0030D4: u'AAE Systems, Inc',
0x0030D5: u'DResearch GmbH',
0x0030D6: u'MSC VERTRIEBS GMBH',
0x0030D7: u'Innovative Systems, L.L.C.',
0x0030D8: u'SITEK',
0x0030D9: u'DATACORE SOFTWARE CORP.',
0x0030DA: u'COMTREND CO.',
0x0030DB: u'Mindready Solutions, Inc.',
0x0030DC: u'RIGHTECH CORPORATION',
0x0030DD: u'INDIGITA CORPORATION',
0x0030DE: u'WAGO Kontakttechnik GmbH',
0x0030DF: u'KB/TEL TELECOMUNICACIONES',
0x0030E0: u'OXFORD SEMICONDUCTOR LTD.',
0x0030E1: u'ACROTRON SYSTEMS, INC.',
0x0030E2: u'GARNET SYSTEMS CO., LTD.',
0x0030E3: u'SEDONA NETWORKS CORP.',
0x0030E4: u'CHIYODA SYSTEM RIKEN',
0x0030E5: u'Amper Datos S.A.',
0x0030E6: u'Draeger Medical Systems, Inc.',
0x0030E7: u'CNF MOBILE SOLUTIONS, INC.',
0x0030E8: u'ENSIM CORP.',
0x0030E9: u'GMA COMMUNICATION MANUFACT\'G',
0x0030EA: u'TeraForce Technology Corporation',
0x0030EB: u'TURBONET COMMUNICATIONS, INC.',
0x0030EC: u'BORGARDT',
0x0030ED: u'Expert Magnetics Corp.',
0x0030EE: u'DSG Technology, Inc.',
0x0030EF: u'NEON TECHNOLOGY, INC.',
0x0030F0: u'Uniform Industrial Corp.',
0x0030F1: u'Accton Technology Corp.',
0x0030F2: u'CISCO SYSTEMS, INC.',
0x0030F3: u'At Work Computers',
0x0030F4: u'STARDOT TECHNOLOGIES',
0x0030F5: u'Wild Lab. Ltd.',
0x0030F6: u'SECURELOGIX CORPORATION',
0x0030F7: u'RAMIX INC.',
0x0030F8: u'Dynapro Systems, Inc.',
0x0030F9: u'Sollae Systems Co., Ltd.',
0x0030FA: u'TELICA, INC.',
0x0030FB: u'AZS Technology AG',
0x0030FC: u'Terawave Communications, Inc.',
0x0030FD: u'INTEGRATED SYSTEMS DESIGN',
0x0030FE: u'DSA GmbH',
0x0030FF: u'DATAFAB SYSTEMS, INC.',
0x004000: u'PCI COMPONENTES DA AMZONIA LTD',
0x004001: u'ZYXEL COMMUNICATIONS, INC.',
0x004002: u'PERLE SYSTEMS LIMITED',
0x004003: u'Emerson Process Management Power & Water Solutions, Inc.',
0x004004: u'ICM CO. LTD.',
0x004005: u'ANI COMMUNICATIONS INC.',
0x004006: u'SAMPO TECHNOLOGY CORPORATION',
0x004007: u'TELMAT INFORMATIQUE',
0x004008: u'A PLUS INFO CORPORATION',
0x004009: u'TACHIBANA TECTRON CO., LTD.',
0x00400A: u'PIVOTAL TECHNOLOGIES, INC.',
0x00400B: u'CISCO SYSTEMS, INC.',
0x00400C: u'GENERAL MICRO SYSTEMS, INC.',
0x00400D: u'LANNET DATA COMMUNICATIONS,LTD',
0x00400E: u'MEMOTEC COMMUNICATIONS, INC.',
0x00400F: u'DATACOM TECHNOLOGIES',
0x004010: u'SONIC SYSTEMS, INC.',
0x004011: u'ANDOVER CONTROLS CORPORATION',
0x004012: u'WINDATA, INC.',
0x004013: u'NTT DATA COMM. SYSTEMS CORP.',
0x004014: u'COMSOFT GMBH',
0x004015: u'ASCOM INFRASYS AG',
0x004016: u'HADAX ELECTRONICS, INC.',
0x004017: u'Silex Technology America',
0x004018: u'ADOBE SYSTEMS, INC.',
0x004019: u'AEON SYSTEMS, INC.',
0x00401A: u'FUJI ELECTRIC CO., LTD.',
0x00401B: u'PRINTER SYSTEMS CORP.',
0x00401C: u'AST RESEARCH, INC.',
0x00401D: u'INVISIBLE SOFTWARE, INC.',
0x00401E: u'ICC',
0x00401F: u'COLORGRAPH LTD',
0x004020: u'PINACL COMMUNICATION',
0x004021: u'RASTER GRAPHICS',
0x004022: u'KLEVER COMPUTERS, INC.',
0x004023: u'LOGIC CORPORATION',
0x004024: u'COMPAC INC.',
0x004025: u'MOLECULAR DYNAMICS',
0x004026: u'MELCO, INC.',
0x004027: u'SMC MASSACHUSETTS, INC.',
0x004028: u'NETCOMM LIMITED',
0x004029: u'COMPEX',
0x00402A: u'CANOGA-PERKINS',
0x00402B: u'TRIGEM COMPUTER, INC.',
0x00402C: u'ISIS DISTRIBUTED SYSTEMS, INC.',
0x00402D: u'HARRIS ADACOM CORPORATION',
0x00402E: u'PRECISION SOFTWARE, INC.',
0x00402F: u'XLNT DESIGNS INC.',
0x004030: u'GK COMPUTER',
0x004031: u'KOKUSAI ELECTRIC CO., LTD',
0x004032: u'DIGITAL COMMUNICATIONS',
0x004033: u'ADDTRON TECHNOLOGY CO., LTD.',
0x004034: u'BUSTEK CORPORATION',
0x004035: u'OPCOM',
0x004036: u'TRIBE COMPUTER WORKS, INC.',
0x004037: u'SEA-ILAN, INC.',
0x004038: u'TALENT ELECTRIC INCORPORATED',
0x004039: u'OPTEC DAIICHI DENKO CO., LTD.',
0x00403A: u'IMPACT TECHNOLOGIES',
0x00403B: u'SYNERJET INTERNATIONAL CORP.',
0x00403C: u'FORKS, INC.',
0x00403D: u'TERADATA',
0x00403E: u'RASTER OPS CORPORATION',
0x00403F: u'SSANGYONG COMPUTER SYSTEMS',
0x004040: u'RING ACCESS, INC.',
0x004041: u'FUJIKURA LTD.',
0x004042: u'N.A.T. GMBH',
0x004043: u'NOKIA TELECOMMUNICATIONS',
0x004044: u'QNIX COMPUTER CO., LTD.',
0x004045: u'TWINHEAD CORPORATION',
0x004046: u'UDC RESEARCH LIMITED',
0x004047: u'WIND RIVER SYSTEMS',
0x004048: u'SMD INFORMATICA S.A.',
0x004049: u'TEGIMENTA AG',
0x00404A: u'WEST AUSTRALIAN DEPARTMENT',
0x00404B: u'MAPLE COMPUTER SYSTEMS',
0x00404C: u'HYPERTEC PTY LTD.',
0x00404D: u'TELECOMMUNICATIONS TECHNIQUES',
0x00404E: u'FLUENT, INC.',
0x00404F: u'SPACE & NAVAL WARFARE SYSTEMS',
0x004050: u'IRONICS, INCORPORATED',
0x004051: u'GRACILIS, INC.',
0x004052: u'STAR TECHNOLOGIES, INC.',
0x004053: u'AMPRO COMPUTERS',
0x004054: u'CONNECTION MACHINES SERVICES',
0x004055: u'METRONIX GMBH',
0x004056: u'MCM JAPAN LTD.',
0x004057: u'LOCKHEED - SANDERS',
0x004058: u'KRONOS, INC.',
0x004059: u'YOSHIDA KOGYO K. K.',
0x00405A: u'GOLDSTAR INFORMATION & COMM.',
0x00405B: u'FUNASSET LIMITED',
0x00405C: u'FUTURE SYSTEMS, INC.',
0x00405D: u'STAR-TEK, INC.',
0x00405E: u'NORTH HILLS ISRAEL',
0x00405F: u'AFE COMPUTERS LTD.',
0x004060: u'COMENDEC LTD',
0x004061: u'DATATECH ENTERPRISES CO., LTD.',
0x004062: u'E-SYSTEMS, INC./GARLAND DIV.',
0x004063: u'VIA TECHNOLOGIES, INC.',
0x004064: u'KLA INSTRUMENTS CORPORATION',
0x004065: u'GTE SPACENET',
0x004066: u'HITACHI CABLE, LTD.',
0x004067: u'OMNIBYTE CORPORATION',
0x004068: u'EXTENDED SYSTEMS',
0x004069: u'LEMCOM SYSTEMS, INC.',
0x00406A: u'KENTEK INFORMATION SYSTEMS,INC',
0x00406B: u'SYSGEN',
0x00406C: u'COPERNIQUE',
0x00406D: u'LANCO, INC.',
0x00406E: u'COROLLARY, INC.',
0x00406F: u'SYNC RESEARCH INC.',
0x004070: u'INTERWARE CO., LTD.',
0x004071: u'ATM COMPUTER GMBH',
0x004072: u'Applied Innovation Inc.',
0x004073: u'BASS ASSOCIATES',
0x004074: u'CABLE AND WIRELESS',
0x004075: u'M-TRADE (UK) LTD',
0x004076: u'Sun Conversion Technologies',
0x004077: u'MAXTON TECHNOLOGY CORPORATION',
0x004078: u'WEARNES AUTOMATION PTE LTD',
0x004079: u'JUKO MANUFACTURE COMPANY, LTD.',
0x00407A: u'SOCIETE D\'EXPLOITATION DU CNIT',
0x00407B: u'SCIENTIFIC ATLANTA',
0x00407C: u'QUME CORPORATION',
0x00407D: u'EXTENSION TECHNOLOGY CORP.',
0x00407E: u'EVERGREEN SYSTEMS, INC.',
0x00407F: u'FLIR Systems',
0x004080: u'ATHENIX CORPORATION',
0x004081: u'MANNESMANN SCANGRAPHIC GMBH',
0x004082: u'LABORATORY EQUIPMENT CORP.',
0x004083: u'TDA INDUSTRIA DE PRODUTOS',
0x004084: u'HONEYWELL INC.',
0x004085: u'SAAB INSTRUMENTS AB',
0x004086: u'MICHELS & KLEBERHOFF COMPUTER',
0x004087: u'UBITREX CORPORATION',
0x004088: u'MOBIUS TECHNOLOGIES, INC.',
0x004089: u'MEIDENSHA CORPORATION',
0x00408A: u'TPS TELEPROCESSING SYS. GMBH',
0x00408B: u'RAYLAN CORPORATION',
0x00408C: u'AXIS COMMUNICATIONS AB',
0x00408D: u'THE GOODYEAR TIRE & RUBBER CO.',
0x00408E: u'DIGILOG, INC.',
0x00408F: u'WM-DATA MINFO AB',
0x004090: u'ANSEL COMMUNICATIONS',
0x004091: u'PROCOMP INDUSTRIA ELETRONICA',
0x004092: u'ASP COMPUTER PRODUCTS, INC.',
0x004093: u'PAXDATA NETWORKS LTD.',
0x004094: u'SHOGRAPHICS, INC.',
0x004095: u'R.P.T. INTERGROUPS INT\'L LTD.',
0x004096: u'Cisco Systems, Inc.',
0x004097: u'DATEX DIVISION OF',
0x004098: u'DRESSLER GMBH & CO.',
0x004099: u'NEWGEN SYSTEMS CORP.',
0x00409A: u'NETWORK EXPRESS, INC.',
0x00409B: u'HAL COMPUTER SYSTEMS INC.',
0x00409C: u'TRANSWARE',
0x00409D: u'DIGIBOARD, INC.',
0x00409E: u'CONCURRENT TECHNOLOGIES LTD.',
0x00409F: u'LANCAST/CASAT TECHNOLOGY, INC.',
0x0040A0: u'GOLDSTAR CO., LTD.',
0x0040A1: u'ERGO COMPUTING',
0x0040A2: u'KINGSTAR TECHNOLOGY INC.',
0x0040A3: u'MICROUNITY SYSTEMS ENGINEERING',
0x0040A4: u'ROSE ELECTRONICS',
0x0040A5: u'CLINICOMP INTL.',
0x0040A6: u'Cray, Inc.',
0x0040A7: u'ITAUTEC PHILCO S.A.',
0x0040A8: u'IMF INTERNATIONAL LTD.',
0x0040A9: u'DATACOM INC.',
0x0040AA: u'VALMET AUTOMATION INC.',
0x0040AB: u'ROLAND DG CORPORATION',
0x0040AC: u'SUPER WORKSTATION, INC.',
0x0040AD: u'SMA REGELSYSTEME GMBH',
0x0040AE: u'DELTA CONTROLS, INC.',
0x0040AF: u'DIGITAL PRODUCTS, INC.',
0x0040B0: u'BYTEX CORPORATION, ENGINEERING',
0x0040B1: u'CODONICS INC.',
0x0040B2: u'SYSTEMFORSCHUNG',
0x0040B3: u'PAR MICROSYSTEMS CORPORATION',
0x0040B4: u'NEXTCOM K.K.',
0x0040B5: u'VIDEO TECHNOLOGY COMPUTERS LTD',
0x0040B6: u'COMPUTERM CORPORATION',
0x0040B7: u'STEALTH COMPUTER SYSTEMS',
0x0040B8: u'IDEA ASSOCIATES',
0x0040B9: u'MACQ ELECTRONIQUE SA',
0x0040BA: u'ALLIANT COMPUTER SYSTEMS CORP.',
0x0040BB: u'GOLDSTAR CABLE CO., LTD.',
0x0040BC: u'ALGORITHMICS LTD.',
0x0040BD: u'STARLIGHT NETWORKS, INC.',
0x0040BE: u'BOEING DEFENSE & SPACE',
0x0040BF: u'CHANNEL SYSTEMS INTERN\'L INC.',
0x0040C0: u'VISTA CONTROLS CORPORATION',
0x0040C1: u'BIZERBA-WERKE WILHEIM KRAUT',
0x0040C2: u'APPLIED COMPUTING DEVICES',
0x0040C3: u'FISCHER AND PORTER CO.',
0x0040C4: u'KINKEI SYSTEM CORPORATION',
0x0040C5: u'MICOM COMMUNICATIONS INC.',
0x0040C6: u'FIBERNET RESEARCH, INC.',
0x0040C7: u'RUBY TECH CORPORATION',
0x0040C8: u'MILAN TECHNOLOGY CORPORATION',
0x0040C9: u'NCUBE',
0x0040CA: u'FIRST INTERNAT\'L COMPUTER, INC',
0x0040CB: u'LANWAN TECHNOLOGIES',
0x0040CC: u'SILCOM MANUF\'G TECHNOLOGY INC.',
0x0040CD: u'TERA MICROSYSTEMS, INC.',
0x0040CE: u'NET-SOURCE, INC.',
0x0040CF: u'STRAWBERRY TREE, INC.',
0x0040D0: u'MITAC INTERNATIONAL CORP.',
0x0040D1: u'FUKUDA DENSHI CO., LTD.',
0x0040D2: u'PAGINE CORPORATION',
0x0040D3: u'KIMPSION INTERNATIONAL CORP.',
0x0040D4: u'GAGE TALKER CORP.',
0x0040D5: u'SARTORIUS AG',
0x0040D6: u'LOCAMATION B.V.',
0x0040D7: u'STUDIO GEN INC.',
0x0040D8: u'OCEAN OFFICE AUTOMATION LTD.',
0x0040D9: u'AMERICAN MEGATRENDS INC.',
0x0040DA: u'TELSPEC LTD',
0x0040DB: u'ADVANCED TECHNICAL SOLUTIONS',
0x0040DC: u'TRITEC ELECTRONIC GMBH',
0x0040DD: u'HONG TECHNOLOGIES',
0x0040DE: u'ELETTRONICA SAN GIORGIO',
0x0040DF: u'DIGALOG SYSTEMS, INC.',
0x0040E0: u'ATOMWIDE LTD.',
0x0040E1: u'MARNER INTERNATIONAL, INC.',
0x0040E2: u'MESA RIDGE TECHNOLOGIES, INC.',
0x0040E3: u'QUIN SYSTEMS LTD',
0x0040E4: u'E-M TECHNOLOGY, INC.',
0x0040E5: u'SYBUS CORPORATION',
0x0040E6: u'C.A.E.N.',
0x0040E7: u'ARNOS INSTRUMENTS & COMPUTER',
0x0040E8: u'CHARLES RIVER DATA SYSTEMS,INC',
0x0040E9: u'ACCORD SYSTEMS, INC.',
0x0040EA: u'PLAIN TREE SYSTEMS INC',
0x0040EB: u'MARTIN MARIETTA CORPORATION',
0x0040EC: u'MIKASA SYSTEM ENGINEERING',
0x0040ED: u'NETWORK CONTROLS INT\'NATL INC.',
0x0040EE: u'OPTIMEM',
0x0040EF: u'HYPERCOM, INC.',
0x0040F0: u'MICRO SYSTEMS, INC.',
0x0040F1: u'CHUO ELECTRONICS CO., LTD.',
0x0040F2: u'JANICH & KLASS COMPUTERTECHNIK',
0x0040F3: u'NETCOR',
0x0040F4: u'CAMEO COMMUNICATIONS, INC.',
0x0040F5: u'OEM ENGINES',
0x0040F6: u'KATRON COMPUTERS INC.',
0x0040F7: u'POLAROID MEDICAL IMAGING SYS.',
0x0040F8: u'SYSTEMHAUS DISCOM',
0x0040F9: u'COMBINET',
0x0040FA: u'MICROBOARDS, INC.',
0x0040FB: u'CASCADE COMMUNICATIONS CORP.',
0x0040FC: u'IBR COMPUTER TECHNIK GMBH',
0x0040FD: u'LXE',
0x0040FE: u'SYMPLEX COMMUNICATIONS',
0x0040FF: u'TELEBIT CORPORATION',
0x004252: u'RLX Technologies',
0x004501: u'Versus Technology, Inc.',
0x005000: u'NEXO COMMUNICATIONS, INC.',
0x005001: u'YAMASHITA SYSTEMS CORP.',
0x005002: u'OMNISEC AG',
0x005003: u'GRETAG MACBETH AG',
0x005004: u'3COM CORPORATION',
0x005006: u'TAC AB',
0x005007: u'SIEMENS TELECOMMUNICATION SYSTEMS LIMITED',
0x005008: u'TIVA MICROCOMPUTER CORP. (TMC)',
0x005009: u'PHILIPS BROADBAND NETWORKS',
0x00500A: u'IRIS TECHNOLOGIES, INC.',
0x00500B: u'CISCO SYSTEMS, INC.',
0x00500C: u'e-Tek Labs, Inc.',
0x00500D: u'SATORI ELECTORIC CO., LTD.',
0x00500E: u'CHROMATIS NETWORKS, INC.',
0x00500F: u'CISCO SYSTEMS, INC.',
0x005010: u'NovaNET Learning, Inc.',
0x005012: u'CBL - GMBH',
0x005013: u'Chaparral Network Storage',
0x005014: u'CISCO SYSTEMS, INC.',
0x005015: u'BRIGHT STAR ENGINEERING',
0x005016: u'SST/WOODHEAD INDUSTRIES',
0x005017: u'RSR S.R.L.',
0x005018: u'AMIT, Inc.',
0x005019: u'SPRING TIDE NETWORKS, INC.',
0x00501A: u'UISIQN',
0x00501B: u'ABL CANADA, INC.',
0x00501C: u'JATOM SYSTEMS, INC.',
0x00501E: u'Miranda Technologies, Inc.',
0x00501F: u'MRG SYSTEMS, LTD.',
0x005020: u'MEDIASTAR CO., LTD.',
0x005021: u'EIS INTERNATIONAL, INC.',
0x005022: u'ZONET TECHNOLOGY, INC.',
0x005023: u'PG DESIGN ELECTRONICS, INC.',
0x005024: u'NAVIC SYSTEMS, INC.',
0x005026: u'COSYSTEMS, INC.',
0x005027: u'GENICOM CORPORATION',
0x005028: u'AVAL COMMUNICATIONS',
0x005029: u'1394 PRINTER WORKING GROUP',
0x00502A: u'CISCO SYSTEMS, INC.',
0x00502B: u'GENRAD LTD.',
0x00502C: u'SOYO COMPUTER, INC.',
0x00502D: u'ACCEL, INC.',
0x00502E: u'CAMBEX CORPORATION',
0x00502F: u'TollBridge Technologies, Inc.',
0x005030: u'FUTURE PLUS SYSTEMS',
0x005031: u'AEROFLEX LABORATORIES, INC.',
0x005032: u'PICAZO COMMUNICATIONS, INC.',
0x005033: u'MAYAN NETWORKS',
0x005036: u'NETCAM, LTD.',
0x005037: u'KOGA ELECTRONICS CO.',
0x005038: u'DAIN TELECOM CO., LTD.',
0x005039: u'MARINER NETWORKS',
0x00503A: u'DATONG ELECTRONICS LTD.',
0x00503B: u'MEDIAFIRE CORPORATION',
0x00503C: u'TSINGHUA NOVEL ELECTRONICS',
0x00503E: u'CISCO SYSTEMS, INC.',
0x00503F: u'ANCHOR GAMES',
0x005040: u'Matsushita Electric Works, Ltd.',
0x005041: u'Coretronic Corporation',
0x005042: u'SCI MANUFACTURING SINGAPORE PTE, LTD.',
0x005043: u'MARVELL SEMICONDUCTOR, INC.',
0x005044: u'ASACA CORPORATION',
0x005045: u'RIOWORKS SOLUTIONS, INC.',
0x005046: u'MENICX INTERNATIONAL CO., LTD.',
0x005047: u'PRIVATE',
0x005048: u'INFOLIBRIA',
0x005049: u'ELLACOYA NETWORKS, INC.',
0x00504A: u'ELTECO A.S.',
0x00504B: u'BARCONET N.V.',
0x00504C: u'GALIL MOTION CONTROL, INC.',
0x00504D: u'TOKYO ELECTRON DEVICE LTD.',
0x00504E: u'SIERRA MONITOR CORP.',
0x00504F: u'OLENCOM ELECTRONICS',
0x005050: u'CISCO SYSTEMS, INC.',
0x005051: u'IWATSU ELECTRIC CO., LTD.',
0x005052: u'TIARA NETWORKS, INC.',
0x005053: u'CISCO SYSTEMS, INC.',
0x005054: u'CISCO SYSTEMS, INC.',
0x005055: u'DOMS A/S',
0x005056: u'VMWare, Inc.',
0x005057: u'BROADBAND ACCESS SYSTEMS',
0x005058: u'VegaStream Limted',
0x005059: u'iBAHN',
0x00505A: u'NETWORK ALCHEMY, INC.',
0x00505B: u'KAWASAKI LSI U.S.A., INC.',
0x00505C: u'TUNDO CORPORATION',
0x00505E: u'DIGITEK MICROLOGIC S.A.',
0x00505F: u'BRAND INNOVATORS',
0x005060: u'TANDBERG TELECOM AS',
0x005062: u'KOUWELL ELECTRONICS CORP. **',
0x005063: u'OY COMSEL SYSTEM AB',
0x005064: u'CAE ELECTRONICS',
0x005065: u'DENSEI-LAMBAD Co., Ltd.',
0x005066: u'AtecoM GmbH advanced telecomunication modules',
0x005067: u'AEROCOMM, INC.',
0x005068: u'ELECTRONIC INDUSTRIES ASSOCIATION',
0x005069: u'PixStream Incorporated',
0x00506A: u'EDEVA, INC.',
0x00506B: u'SPX-ATEG',
0x00506C: u'G & L BEIJER ELECTRONICS AB',
0x00506D: u'VIDEOJET SYSTEMS',
0x00506E: u'CORDER ENGINEERING CORPORATION',
0x00506F: u'G-CONNECT',
0x005070: u'CHAINTECH COMPUTER CO., LTD.',
0x005071: u'AIWA CO., LTD.',
0x005072: u'CORVIS CORPORATION',
0x005073: u'CISCO SYSTEMS, INC.',
0x005074: u'ADVANCED HI-TECH CORP.',
0x005075: u'KESTREL SOLUTIONS',
0x005076: u'IBM',
0x005077: u'PROLIFIC TECHNOLOGY, INC.',
0x005078: u'MEGATON HOUSE, LTD.',
0x005079: u'PRIVATE',
0x00507A: u'XPEED, INC.',
0x00507B: u'MERLOT COMMUNICATIONS',
0x00507C: u'VIDEOCON AG',
0x00507D: u'IFP',
0x00507E: u'NEWER TECHNOLOGY',
0x00507F: u'DrayTek Corp.',
0x005080: u'CISCO SYSTEMS, INC.',
0x005081: u'MURATA MACHINERY, LTD.',
0x005082: u'FORESSON CORPORATION',
0x005083: u'GILBARCO, INC.',
0x005084: u'ATL PRODUCTS',
0x005086: u'TELKOM SA, LTD.',
0x005087: u'TERASAKI ELECTRIC CO., LTD.',
0x005088: u'AMANO CORPORATION',
0x005089: u'SAFETY MANAGEMENT SYSTEMS',
0x00508B: u'COMPAQ COMPUTER CORPORATION',
0x00508C: u'RSI SYSTEMS',
0x00508D: u'ABIT COMPUTER CORPORATION',
0x00508E: u'OPTIMATION, INC.',
0x00508F: u'ASITA TECHNOLOGIES INT\'L LTD.',
0x005090: u'DCTRI',
0x005091: u'NETACCESS, INC.',
0x005092: u'RIGAKU INDUSTRIAL CORPORATION',
0x005093: u'BOEING',
0x005094: u'PACE MICRO TECHNOLOGY PLC',
0x005095: u'PERACOM NETWORKS',
0x005096: u'SALIX TECHNOLOGIES, INC.',
0x005097: u'MMC-EMBEDDED COMPUTERTECHNIK GmbH',
0x005098: u'GLOBALOOP, LTD.',
0x005099: u'3COM EUROPE, LTD.',
0x00509A: u'TAG ELECTRONIC SYSTEMS',
0x00509B: u'SWITCHCORE AB',
0x00509C: u'BETA RESEARCH',
0x00509D: u'THE INDUSTREE B.V.',
0x00509E: u'Les Technologies SoftAcoustik Inc.',
0x00509F: u'HORIZON COMPUTER',
0x0050A0: u'DELTA COMPUTER SYSTEMS, INC.',
0x0050A1: u'CARLO GAVAZZI, INC.',
0x0050A2: u'CISCO SYSTEMS, INC.',
0x0050A3: u'TransMedia Communications, Inc.',
0x0050A4: u'IO TECH, INC.',
0x0050A5: u'CAPITOL BUSINESS SYSTEMS, LTD.',
0x0050A6: u'OPTRONICS',
0x0050A7: u'CISCO SYSTEMS, INC.',
0x0050A8: u'OpenCon Systems, Inc.',
0x0050A9: u'MOLDAT WIRELESS TECHNOLGIES',
0x0050AA: u'KONICA MINOLTA HOLDINGS, INC.',
0x0050AB: u'NALTEC, INC.',
0x0050AC: u'MAPLE COMPUTER CORPORATION',
0x0050AD: u'CommUnique Wireless Corp.',
0x0050AE: u'IWAKI ELECTRONICS CO., LTD.',
0x0050AF: u'INTERGON, INC.',
0x0050B0: u'TECHNOLOGY ATLANTA CORPORATION',
0x0050B1: u'GIDDINGS & LEWIS',
0x0050B2: u'BRODEL AUTOMATION',
0x0050B3: u'VOICEBOARD CORPORATION',
0x0050B4: u'SATCHWELL CONTROL SYSTEMS, LTD',
0x0050B5: u'FICHET-BAUCHE',
0x0050B6: u'GOOD WAY IND. CO., LTD.',
0x0050B7: u'BOSER TECHNOLOGY CO., LTD.',
0x0050B8: u'INOVA COMPUTERS GMBH & CO. KG',
0x0050B9: u'XITRON TECHNOLOGIES, INC.',
0x0050BA: u'D-LINK',
0x0050BB: u'CMS TECHNOLOGIES',
0x0050BC: u'HAMMER STORAGE SOLUTIONS',
0x0050BD: u'CISCO SYSTEMS, INC.',
0x0050BE: u'FAST MULTIMEDIA AG',
0x0050BF: u'MOTOTECH INC.',
0x0050C0: u'GATAN, INC.',
0x0050C1: u'GEMFLEX NETWORKS, LTD.',
0x0050C2: u'IEEE REGISTRATION AUTHORITY',
0x0050C4: u'IMD',
0x0050C5: u'ADS TECHNOLOGIES, INC.',
0x0050C6: u'LOOP TELECOMMUNICATION INTERNATIONAL, INC.',
0x0050C8: u'ADDONICS COMMUNICATIONS, INC.',
0x0050C9: u'MASPRO DENKOH CORP.',
0x0050CA: u'NET TO NET TECHNOLOGIES',
0x0050CB: u'JETTER',
0x0050CC: u'XYRATEX',
0x0050CD: u'DIGIANSWER A/S',
0x0050CE: u'LG INTERNATIONAL CORP.',
0x0050CF: u'VANLINK COMMUNICATION TECHNOLOGY RESEARCH INSTITUTE',
0x0050D0: u'MINERVA SYSTEMS',
0x0050D1: u'CISCO SYSTEMS, INC.',
0x0050D2: u'CMC Electronics Inc',
0x0050D3: u'DIGITAL AUDIO PROCESSING PTY. LTD.',
0x0050D4: u'JOOHONG INFORMATION &',
0x0050D5: u'AD SYSTEMS CORP.',
0x0050D6: u'ATLAS COPCO TOOLS AB',
0x0050D7: u'TELSTRAT',
0x0050D8: u'UNICORN COMPUTER CORP.',
0x0050D9: u'ENGETRON-ENGENHARIA ELETRONICA IND. e COM. LTDA',
0x0050DA: u'3COM CORPORATION',
0x0050DB: u'CONTEMPORARY CONTROL',
0x0050DC: u'TAS TELEFONBAU A. SCHWABE GMBH & CO. KG',
0x0050DD: u'SERRA SOLDADURA, S.A.',
0x0050DE: u'SIGNUM SYSTEMS CORP.',
0x0050DF: u'AirFiber, Inc.',
0x0050E1: u'NS TECH ELECTRONICS SDN BHD',
0x0050E2: u'CISCO SYSTEMS, INC.',
0x0050E3: u'Terayon Communications Systems',
0x0050E4: u'APPLE COMPUTER, INC.',
0x0050E6: u'HAKUSAN CORPORATION',
0x0050E7: u'PARADISE INNOVATIONS (ASIA)',
0x0050E8: u'NOMADIX INC.',
0x0050EA: u'XEL COMMUNICATIONS, INC.',
0x0050EB: u'ALPHA-TOP CORPORATION',
0x0050EC: u'OLICOM A/S',
0x0050ED: u'ANDA NETWORKS',
0x0050EE: u'TEK DIGITEL CORPORATION',
0x0050EF: u'SPE Systemhaus GmbH',
0x0050F0: u'CISCO SYSTEMS, INC.',
0x0050F1: u'LIBIT SIGNAL PROCESSING, LTD.',
0x0050F2: u'MICROSOFT CORP.',
0x0050F3: u'GLOBAL NET INFORMATION CO., Ltd.',
0x0050F4: u'SIGMATEK GMBH & CO. KG',
0x0050F6: u'PAN-INTERNATIONAL INDUSTRIAL CORP.',
0x0050F7: u'VENTURE MANUFACTURING (SINGAPORE) LTD.',
0x0050F8: u'ENTREGA TECHNOLOGIES, INC.',
0x0050F9: u'SENSORMATIC ACD',
0x0050FA: u'OXTEL, LTD.',
0x0050FB: u'VSK ELECTRONICS',
0x0050FC: u'EDIMAX TECHNOLOGY CO., LTD.',
0x0050FD: u'VISIONCOMM CO., LTD.',
0x0050FE: u'PCTVnet ASA',
0x0050FF: u'HAKKO ELECTRONICS CO., LTD.',
0x006000: u'XYCOM INC.',
0x006001: u'InnoSys, Inc.',
0x006002: u'SCREEN SUBTITLING SYSTEMS, LTD',
0x006003: u'TERAOKA WEIGH SYSTEM PTE, LTD.',
0x006004: u'COMPUTADORES MODULARES SA',
0x006005: u'FEEDBACK DATA LTD.',
0x006006: u'SOTEC CO., LTD',
0x006007: u'ACRES GAMING, INC.',
0x006008: u'3COM CORPORATION',
0x006009: u'CISCO SYSTEMS, INC.',
0x00600A: u'SORD COMPUTER CORPORATION',
0x00600B: u'LOGWARE GmbH',
0x00600C: u'APPLIED DATA SYSTEMS, INC.',
0x00600D: u'Digital Logic GmbH',
0x00600E: u'WAVENET INTERNATIONAL, INC.',
0x00600F: u'WESTELL, INC.',
0x006010: u'NETWORK MACHINES, INC.',
0x006011: u'CRYSTAL SEMICONDUCTOR CORP.',
0x006012: u'POWER COMPUTING CORPORATION',
0x006013: u'NETSTAL MASCHINEN AG',
0x006014: u'EDEC CO., LTD.',
0x006015: u'NET2NET CORPORATION',
0x006016: u'CLARIION',
0x006017: u'TOKIMEC INC.',
0x006018: u'STELLAR ONE CORPORATION',
0x006019: u'Roche Diagnostics',
0x00601A: u'KEITHLEY INSTRUMENTS',
0x00601B: u'MESA ELECTRONICS',
0x00601C: u'TELXON CORPORATION',
0x00601D: u'LUCENT TECHNOLOGIES',
0x00601E: u'SOFTLAB, INC.',
0x00601F: u'STALLION TECHNOLOGIES',
0x006020: u'PIVOTAL NETWORKING, INC.',
0x006021: u'DSC CORPORATION',
0x006022: u'VICOM SYSTEMS, INC.',
0x006023: u'PERICOM SEMICONDUCTOR CORP.',
0x006024: u'GRADIENT TECHNOLOGIES, INC.',
0x006025: u'ACTIVE IMAGING PLC',
0x006026: u'VIKING COMPONENTS, INC.',
0x006027: u'Superior Modular Products',
0x006028: u'MACROVISION CORPORATION',
0x006029: u'CARY PERIPHERALS INC.',
0x00602A: u'SYMICRON COMPUTER COMMUNICATIONS, LTD.',
0x00602B: u'PEAK AUDIO',
0x00602C: u'LINX Data Terminals, Inc.',
0x00602D: u'ALERTON TECHNOLOGIES, INC.',
0x00602E: u'CYCLADES CORPORATION',
0x00602F: u'CISCO SYSTEMS, INC.',
0x006030: u'VILLAGE TRONIC ENTWICKLUNG',
0x006031: u'HRK SYSTEMS',
0x006032: u'I-CUBE, INC.',
0x006033: u'ACUITY IMAGING, INC.',
0x006034: u'ROBERT BOSCH GmbH',
0x006035: u'DALLAS SEMICONDUCTOR, INC.',
0x006036: u'AUSTRIAN RESEARCH CENTER SEIBERSDORF',
0x006037: u'NXP Semiconductors',
0x006038: u'Nortel Networks',
0x006039: u'SanCom Technology, Inc.',
0x00603A: u'QUICK CONTROLS LTD.',
0x00603B: u'AMTEC spa',
0x00603C: u'HAGIWARA SYS-COM CO., LTD.',
0x00603D: u'3CX',
0x00603E: u'CISCO SYSTEMS, INC.',
0x00603F: u'PATAPSCO DESIGNS',
0x006040: u'NETRO CORP.',
0x006041: u'Yokogawa Electric Corporation',
0x006042: u'TKS (USA), INC.',
0x006043: u'ComSoft Systems, Inc.',
0x006044: u'LITTON/POLY-SCIENTIFIC',
0x006045: u'PATHLIGHT TECHNOLOGIES',
0x006046: u'VMETRO, INC.',
0x006047: u'CISCO SYSTEMS, INC.',
0x006048: u'EMC CORPORATION',
0x006049: u'VINA TECHNOLOGIES',
0x00604A: u'SAIC IDEAS GROUP',
0x00604B: u'Safe-com GmbH & Co. KG',
0x00604C: u'SAGEM SA',
0x00604D: u'MMC NETWORKS, INC.',
0x00604E: u'CYCLE COMPUTER CORPORATION, INC.',
0x00604F: u'SUZUKI MFG. CO., LTD.',
0x006050: u'INTERNIX INC.',
0x006051: u'QUALITY SEMICONDUCTOR',
0x006052: u'PERIPHERALS ENTERPRISE CO., Ltd.',
0x006053: u'TOYODA MACHINE WORKS, LTD.',
0x006054: u'CONTROLWARE GMBH',
0x006055: u'CORNELL UNIVERSITY',
0x006056: u'NETWORK TOOLS, INC.',
0x006057: u'MURATA MANUFACTURING CO., LTD.',
0x006058: u'COPPER MOUNTAIN COMMUNICATIONS, INC.',
0x006059: u'TECHNICAL COMMUNICATIONS CORP.',
0x00605A: u'CELCORE, INC.',
0x00605B: u'IntraServer Technology, Inc.',
0x00605C: u'CISCO SYSTEMS, INC.',
0x00605D: u'SCANIVALVE CORP.',
0x00605E: u'LIBERTY TECHNOLOGY NETWORKING',
0x00605F: u'NIPPON UNISOFT CORPORATION',
0x006060: u'DAWNING TECHNOLOGIES, INC.',
0x006061: u'WHISTLE COMMUNICATIONS CORP.',
0x006062: u'TELESYNC, INC.',
0x006063: u'PSION DACOM PLC.',
0x006064: u'NETCOMM LIMITED',
0x006065: u'BERNECKER & RAINER INDUSTRIE-ELEKTRONIC GmbH',
0x006066: u'LACROIX TECHNOLGIE',
0x006067: u'ACER NETXUS INC.',
0x006068: u'EICON TECHNOLOGY CORPORATION',
0x006069: u'BROCADE COMMUNICATIONS SYSTEMS, Inc.',
0x00606A: u'MITSUBISHI WIRELESS COMMUNICATIONS. INC.',
0x00606B: u'Synclayer Inc.',
0x00606C: u'ARESCOM',
0x00606D: u'DIGITAL EQUIPMENT CORP.',
0x00606E: u'DAVICOM SEMICONDUCTOR, INC.',
0x00606F: u'CLARION CORPORATION OF AMERICA',
0x006070: u'CISCO SYSTEMS, INC.',
0x006071: u'MIDAS LAB, INC.',
0x006072: u'VXL INSTRUMENTS, LIMITED',
0x006073: u'REDCREEK COMMUNICATIONS, INC.',
0x006074: u'QSC AUDIO PRODUCTS',
0x006075: u'PENTEK, INC.',
0x006076: u'SCHLUMBERGER TECHNOLOGIES RETAIL PETROLEUM SYSTEMS',
0x006077: u'PRISA NETWORKS',
0x006078: u'POWER MEASUREMENT LTD.',
0x006079: u'Mainstream Data, Inc.',
0x00607A: u'DVS GmbH',
0x00607B: u'FORE SYSTEMS, INC.',
0x00607C: u'WaveAccess, Ltd.',
0x00607D: u'SENTIENT NETWORKS INC.',
0x00607E: u'GIGALABS, INC.',
0x00607F: u'AURORA TECHNOLOGIES, INC.',
0x006080: u'MICROTRONIX DATACOM LTD.',
0x006081: u'TV/COM INTERNATIONAL',
0x006082: u'NOVALINK TECHNOLOGIES, INC.',
0x006083: u'CISCO SYSTEMS, INC.',
0x006084: u'DIGITAL VIDEO',
0x006085: u'Storage Concepts',
0x006086: u'LOGIC REPLACEMENT TECH. LTD.',
0x006087: u'KANSAI ELECTRIC CO., LTD.',
0x006088: u'WHITE MOUNTAIN DSP, INC.',
0x006089: u'XATA',
0x00608A: u'CITADEL COMPUTER',
0x00608B: u'ConferTech International',
0x00608C: u'3COM CORPORATION',
0x00608D: u'UNIPULSE CORP.',
0x00608E: u'HE ELECTRONICS, TECHNOLOGIE & SYSTEMTECHNIK GmbH',
0x00608F: u'TEKRAM TECHNOLOGY CO., LTD.',
0x006090: u'ABLE COMMUNICATIONS, INC.',
0x006091: u'FIRST PACIFIC NETWORKS, INC.',
0x006092: u'MICRO/SYS, INC.',
0x006093: u'VARIAN',
0x006094: u'IBM CORP.',
0x006095: u'ACCU-TIME SYSTEMS, INC.',
0x006096: u'T.S. MICROTECH INC.',
0x006097: u'3COM CORPORATION',
0x006098: u'HT COMMUNICATIONS',
0x006099: u'SBE, Inc.',
0x00609A: u'NJK TECHNO CO.',
0x00609B: u'ASTRO-MED, INC.',
0x00609C: u'Perkin-Elmer Incorporated',
0x00609D: u'PMI FOOD EQUIPMENT GROUP',
0x00609E: u'ASC X3 - INFORMATION TECHNOLOGY STANDARDS SECRETARIATS',
0x00609F: u'PHAST CORPORATION',
0x0060A0: u'SWITCHED NETWORK TECHNOLOGIES, INC.',
0x0060A1: u'VPNet, Inc.',
0x0060A2: u'NIHON UNISYS LIMITED CO.',
0x0060A3: u'CONTINUUM TECHNOLOGY CORP.',
0x0060A4: u'GRINAKER SYSTEM TECHNOLOGIES',
0x0060A5: u'PERFORMANCE TELECOM CORP.',
0x0060A6: u'PARTICLE MEASURING SYSTEMS',
0x0060A7: u'MICROSENS GmbH & CO. KG',
0x0060A8: u'TIDOMAT AB',
0x0060A9: u'GESYTEC MbH',
0x0060AA: u'INTELLIGENT DEVICES INC. (IDI)',
0x0060AB: u'LARSCOM INCORPORATED',
0x0060AC: u'RESILIENCE CORPORATION',
0x0060AD: u'MegaChips Corporation',
0x0060AE: u'TRIO INFORMATION SYSTEMS AB',
0x0060AF: u'PACIFIC MICRO DATA, INC.',
0x0060B0: u'HEWLETT-PACKARD CO.',
0x0060B1: u'INPUT/OUTPUT, INC.',
0x0060B2: u'PROCESS CONTROL CORP.',
0x0060B3: u'Z-COM, INC.',
0x0060B4: u'GLENAYRE R&D INC.',
0x0060B5: u'KEBA GmbH',
0x0060B6: u'LAND COMPUTER CO., LTD.',
0x0060B7: u'CHANNELMATIC, INC.',
0x0060B8: u'CORELIS INC.',
0x0060B9: u'NITSUKO CORPORATION',
0x0060BA: u'SAHARA NETWORKS, INC.',
0x0060BB: u'CABLETRON - NETLINK, INC.',
0x0060BC: u'KeunYoung Electronics & Communication Co., Ltd.',
0x0060BD: u'HUBBELL-PULSECOM',
0x0060BE: u'WEBTRONICS',
0x0060BF: u'MACRAIGOR SYSTEMS, INC.',
0x0060C0: u'NERA AS',
0x0060C1: u'WaveSpan Corporation',
0x0060C2: u'MPL AG',
0x0060C3: u'NETVISION CORPORATION',
0x0060C4: u'SOLITON SYSTEMS K.K.',
0x0060C5: u'ANCOT CORP.',
0x0060C6: u'DCS AG',
0x0060C7: u'AMATI COMMUNICATIONS CORP.',
0x0060C8: u'KUKA WELDING SYSTEMS & ROBOTS',
0x0060C9: u'ControlNet, Inc.',
0x0060CA: u'HARMONIC SYSTEMS INCORPORATED',
0x0060CB: u'HITACHI ZOSEN CORPORATION',
0x0060CC: u'EMTRAK, INCORPORATED',
0x0060CD: u'VideoServer, Inc.',
0x0060CE: u'ACCLAIM COMMUNICATIONS',
0x0060CF: u'ALTEON NETWORKS, INC.',
0x0060D0: u'SNMP RESEARCH INCORPORATED',
0x0060D1: u'CASCADE COMMUNICATIONS',
0x0060D2: u'LUCENT TECHNOLOGIES TAIWAN TELECOMMUNICATIONS CO., LTD.',
0x0060D3: u'AT&T',
0x0060D4: u'ELDAT COMMUNICATION LTD.',
0x0060D5: u'MIYACHI TECHNOS CORP.',
0x0060D6: u'NovAtel Wireless Technologies Ltd.',
0x0060D7: u'ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE (EPFL)',
0x0060D8: u'ELMIC SYSTEMS, INC.',
0x0060D9: u'TRANSYS NETWORKS INC.',
0x0060DA: u'JBM ELECTRONICS CO.',
0x0060DB: u'NTP ELEKTRONIK A/S',
0x0060DC: u'Toyo Network Systems Co, Ltd.',
0x0060DD: u'MYRICOM, INC.',
0x0060DE: u'KAYSER-THREDE GmbH',
0x0060DF: u'CNT Corporation',
0x0060E0: u'AXIOM TECHNOLOGY CO., LTD.',
0x0060E1: u'ORCKIT COMMUNICATIONS LTD.',
0x0060E2: u'QUEST ENGINEERING & DEVELOPMENT',
0x0060E3: u'ARBIN INSTRUMENTS',
0x0060E4: u'COMPUSERVE, INC.',
0x0060E5: u'FUJI AUTOMATION CO., LTD.',
0x0060E6: u'SHOMITI SYSTEMS INCORPORATED',
0x0060E7: u'RANDATA',
0x0060E8: u'HITACHI COMPUTER PRODUCTS (AMERICA), INC.',
0x0060E9: u'ATOP TECHNOLOGIES, INC.',
0x0060EA: u'StreamLogic',
0x0060EB: u'FOURTHTRACK SYSTEMS',
0x0060EC: u'HERMARY OPTO ELECTRONICS INC.',
0x0060ED: u'RICARDO TEST AUTOMATION LTD.',
0x0060EE: u'APOLLO',
0x0060EF: u'FLYTECH TECHNOLOGY CO., LTD.',
0x0060F0: u'JOHNSON & JOHNSON MEDICAL, INC',
0x0060F1: u'EXP COMPUTER, INC.',
0x0060F2: u'LASERGRAPHICS, INC.',
0x0060F3: u'Performance Analysis Broadband, Spirent plc',
0x0060F4: u'ADVANCED COMPUTER SOLUTIONS, Inc.',
0x0060F5: u'ICON WEST, INC.',
0x0060F6: u'NEXTEST COMMUNICATIONS PRODUCTS, INC.',
0x0060F7: u'DATAFUSION SYSTEMS',
0x0060F8: u'Loran International Technologies Inc.',
0x0060F9: u'DIAMOND LANE COMMUNICATIONS',
0x0060FA: u'EDUCATIONAL TECHNOLOGY RESOURCES, INC.',
0x0060FB: u'PACKETEER, INC.',
0x0060FC: u'CONSERVATION THROUGH INNOVATION LTD.',
0x0060FD: u'NetICs, Inc.',
0x0060FE: u'LYNX SYSTEM DEVELOPERS, INC.',
0x0060FF: u'QuVis, Inc.',
0x0070B0: u'M/A-COM INC. COMPANIES',
0x0070B3: u'DATA RECALL LTD.',
0x008000: u'MULTITECH SYSTEMS, INC.',
0x008001: u'PERIPHONICS CORPORATION',
0x008002: u'SATELCOM (UK) LTD',
0x008003: u'HYTEC ELECTRONICS LTD.',
0x008004: u'ANTLOW COMMUNICATIONS, LTD.',
0x008005: u'CACTUS COMPUTER INC.',
0x008006: u'COMPUADD CORPORATION',
0x008007: u'DLOG NC-SYSTEME',
0x008008: u'DYNATECH COMPUTER SYSTEMS',
0x008009: u'JUPITER SYSTEMS, INC.',
0x00800A: u'JAPAN COMPUTER CORP.',
0x00800B: u'CSK CORPORATION',
0x00800C: u'VIDECOM LIMITED',
0x00800D: u'VOSSWINKEL F.U.',
0x00800E: u'ATLANTIX CORPORATION',
0x00800F: u'STANDARD MICROSYSTEMS',
0x008010: u'COMMODORE INTERNATIONAL',
0x008011: u'DIGITAL SYSTEMS INT\'L. INC.',
0x008012: u'INTEGRATED MEASUREMENT SYSTEMS',
0x008013: u'THOMAS-CONRAD CORPORATION',
0x008014: u'ESPRIT SYSTEMS',
0x008015: u'SEIKO SYSTEMS, INC.',
0x008016: u'WANDEL AND GOLTERMANN',
0x008017: u'PFU LIMITED',
0x008018: u'KOBE STEEL, LTD.',
0x008019: u'DAYNA COMMUNICATIONS, INC.',
0x00801A: u'BELL ATLANTIC',
0x00801B: u'KODIAK TECHNOLOGY',
0x00801C: u'NEWPORT SYSTEMS SOLUTIONS',
0x00801D: u'INTEGRATED INFERENCE MACHINES',
0x00801E: u'XINETRON, INC.',
0x00801F: u'KRUPP ATLAS ELECTRONIK GMBH',
0x008020: u'NETWORK PRODUCTS',
0x008021: u'Alcatel Canada Inc.',
0x008022: u'SCAN-OPTICS',
0x008023: u'INTEGRATED BUSINESS NETWORKS',
0x008024: u'KALPANA, INC.',
0x008025: u'STOLLMANN GMBH',
0x008026: u'NETWORK PRODUCTS CORPORATION',
0x008027: u'ADAPTIVE SYSTEMS, INC.',
0x008028: u'TRADPOST (HK) LTD',
0x008029: u'EAGLE TECHNOLOGY, INC.',
0x00802A: u'TEST SYSTEMS & SIMULATIONS INC',
0x00802B: u'INTEGRATED MARKETING CO',
0x00802C: u'THE SAGE GROUP PLC',
0x00802D: u'XYLOGICS INC',
0x00802E: u'CASTLE ROCK COMPUTING',
0x00802F: u'NATIONAL INSTRUMENTS CORP.',
0x008030: u'NEXUS ELECTRONICS',
0x008031: u'BASYS, CORP.',
0x008032: u'ACCESS CO., LTD.',
0x008033: u'FORMATION, INC.',
0x008034: u'SMT GOUPIL',
0x008035: u'TECHNOLOGY WORKS, INC.',
0x008036: u'REFLEX MANUFACTURING SYSTEMS',
0x008037: u'Ericsson Group',
0x008038: u'DATA RESEARCH & APPLICATIONS',
0x008039: u'ALCATEL STC AUSTRALIA',
0x00803A: u'VARITYPER, INC.',
0x00803B: u'APT COMMUNICATIONS, INC.',
0x00803C: u'TVS ELECTRONICS LTD',
0x00803D: u'SURIGIKEN CO., LTD.',
0x00803E: u'SYNERNETICS',
0x00803F: u'TATUNG COMPANY',
0x008040: u'JOHN FLUKE MANUFACTURING CO.',
0x008041: u'VEB KOMBINAT ROBOTRON',
0x008042: u'FORCE COMPUTERS',
0x008043: u'NETWORLD, INC.',
0x008044: u'SYSTECH COMPUTER CORP.',
0x008045: u'MATSUSHITA ELECTRIC IND. CO',
0x008046: u'UNIVERSITY OF TORONTO',
0x008047: u'IN-NET CORP.',
0x008048: u'COMPEX INCORPORATED',
0x008049: u'NISSIN ELECTRIC CO., LTD.',
0x00804A: u'PRO-LOG',
0x00804B: u'EAGLE TECHNOLOGIES PTY.LTD.',
0x00804C: u'CONTEC CO., LTD.',
0x00804D: u'CYCLONE MICROSYSTEMS, INC.',
0x00804E: u'APEX COMPUTER COMPANY',
0x00804F: u'DAIKIN INDUSTRIES, LTD.',
0x008050: u'ZIATECH CORPORATION',
0x008051: u'FIBERMUX',
0x008052: u'TECHNICALLY ELITE CONCEPTS',
0x008053: u'INTELLICOM, INC.',
0x008054: u'FRONTIER TECHNOLOGIES CORP.',
0x008055: u'FERMILAB',
0x008056: u'SPHINX ELEKTRONIK GMBH',
0x008057: u'ADSOFT, LTD.',
0x008058: u'PRINTER SYSTEMS CORPORATION',
0x008059: u'STANLEY ELECTRIC CO., LTD',
0x00805A: u'TULIP COMPUTERS INTERNAT\'L B.V',
0x00805B: u'CONDOR SYSTEMS, INC.',
0x00805C: u'AGILIS CORPORATION',
0x00805D: u'CANSTAR',
0x00805E: u'LSI LOGIC CORPORATION',
0x00805F: u'COMPAQ COMPUTER CORPORATION',
0x008060: u'NETWORK INTERFACE CORPORATION',
0x008061: u'LITTON SYSTEMS, INC.',
0x008062: u'INTERFACE CO.',
0x008063: u'RICHARD HIRSCHMANN GMBH & CO.',
0x008064: u'WYSE TECHNOLOGY',
0x008065: u'CYBERGRAPHIC SYSTEMS PTY LTD.',
0x008066: u'ARCOM CONTROL SYSTEMS, LTD.',
0x008067: u'SQUARE D COMPANY',
0x008068: u'YAMATECH SCIENTIFIC LTD.',
0x008069: u'COMPUTONE SYSTEMS',
0x00806A: u'ERI (EMPAC RESEARCH INC.)',
0x00806B: u'SCHMID TELECOMMUNICATION',
0x00806C: u'CEGELEC PROJECTS LTD',
0x00806D: u'CENTURY SYSTEMS CORP.',
0x00806E: u'NIPPON STEEL CORPORATION',
0x00806F: u'ONELAN LTD.',
0x008070: u'COMPUTADORAS MICRON',
0x008071: u'SAI TECHNOLOGY',
0x008072: u'MICROPLEX SYSTEMS LTD.',
0x008073: u'DWB ASSOCIATES',
0x008074: u'FISHER CONTROLS',
0x008075: u'PARSYTEC GMBH',
0x008076: u'MCNC',
0x008077: u'BROTHER INDUSTRIES, LTD.',
0x008078: u'PRACTICAL PERIPHERALS, INC.',
0x008079: u'MICROBUS DESIGNS LTD.',
0x00807A: u'AITECH SYSTEMS LTD.',
0x00807B: u'ARTEL COMMUNICATIONS CORP.',
0x00807C: u'FIBERCOM, INC.',
0x00807D: u'EQUINOX SYSTEMS INC.',
0x00807E: u'SOUTHERN PACIFIC LTD.',
0x00807F: u'DY-4 INCORPORATED',
0x008080: u'DATAMEDIA CORPORATION',
0x008081: u'KENDALL SQUARE RESEARCH CORP.',
0x008082: u'PEP MODULAR COMPUTERS GMBH',
0x008083: u'AMDAHL',
0x008084: u'THE CLOUD INC.',
0x008085: u'H-THREE SYSTEMS CORPORATION',
0x008086: u'COMPUTER GENERATION INC.',
0x008087: u'OKI ELECTRIC INDUSTRY CO., LTD',
0x008088: u'VICTOR COMPANY OF JAPAN, LTD.',
0x008089: u'TECNETICS (PTY) LTD.',
0x00808A: u'SUMMIT MICROSYSTEMS CORP.',
0x00808B: u'DACOLL LIMITED',
0x00808C: u'NetScout Systems, Inc.',
0x00808D: u'WESTCOAST TECHNOLOGY B.V.',
0x00808E: u'RADSTONE TECHNOLOGY',
0x00808F: u'C. ITOH ELECTRONICS, INC.',
0x008090: u'MICROTEK INTERNATIONAL, INC.',
0x008091: u'TOKYO ELECTRIC CO.,LTD',
0x008092: u'JAPAN COMPUTER INDUSTRY, INC.',
0x008093: u'XYRON CORPORATION',
0x008094: u'ALFA LAVAL AUTOMATION AB',
0x008095: u'BASIC MERTON HANDELSGES.M.B.H.',
0x008096: u'HUMAN DESIGNED SYSTEMS, INC.',
0x008097: u'CENTRALP AUTOMATISMES',
0x008098: u'TDK CORPORATION',
0x008099: u'KLOCKNER MOELLER IPC',
0x00809A: u'NOVUS NETWORKS LTD',
0x00809B: u'JUSTSYSTEM CORPORATION',
0x00809C: u'LUXCOM, INC.',
0x00809D: u'Commscraft Ltd.',
0x00809E: u'DATUS GMBH',
0x00809F: u'ALCATEL BUSINESS SYSTEMS',
0x0080A0: u'EDISA HEWLETT PACKARD S/A',
0x0080A1: u'MICROTEST, INC.',
0x0080A2: u'CREATIVE ELECTRONIC SYSTEMS',
0x0080A3: u'LANTRONIX',
0x0080A4: u'LIBERTY ELECTRONICS',
0x0080A5: u'SPEED INTERNATIONAL',
0x0080A6: u'REPUBLIC TECHNOLOGY, INC.',
0x0080A7: u'MEASUREX CORP.',
0x0080A8: u'VITACOM CORPORATION',
0x0080A9: u'CLEARPOINT RESEARCH',
0x0080AA: u'MAXPEED',
0x0080AB: u'DUKANE NETWORK INTEGRATION',
0x0080AC: u'IMLOGIX, DIVISION OF GENESYS',
0x0080AD: u'CNET TECHNOLOGY, INC.',
0x0080AE: u'HUGHES NETWORK SYSTEMS',
0x0080AF: u'ALLUMER CO., LTD.',
0x0080B0: u'ADVANCED INFORMATION',
0x0080B1: u'SOFTCOM A/S',
0x0080B2: u'NETWORK EQUIPMENT TECHNOLOGIES',
0x0080B3: u'AVAL DATA CORPORATION',
0x0080B4: u'SOPHIA SYSTEMS',
0x0080B5: u'UNITED NETWORKS INC.',
0x0080B6: u'THEMIS COMPUTER',
0x0080B7: u'STELLAR COMPUTER',
0x0080B8: u'BUG, INCORPORATED',
0x0080B9: u'ARCHE TECHNOLIGIES INC.',
0x0080BA: u'SPECIALIX (ASIA) PTE, LTD',
0x0080BB: u'HUGHES LAN SYSTEMS',
0x0080BC: u'HITACHI ENGINEERING CO., LTD',
0x0080BD: u'THE FURUKAWA ELECTRIC CO., LTD',
0x0080BE: u'ARIES RESEARCH',
0x0080BF: u'TAKAOKA ELECTRIC MFG. CO. LTD.',
0x0080C0: u'PENRIL DATACOMM',
0x0080C1: u'LANEX CORPORATION',
0x0080C2: u'IEEE 802.1 COMMITTEE',
0x0080C3: u'BICC INFORMATION SYSTEMS & SVC',
0x0080C4: u'DOCUMENT TECHNOLOGIES, INC.',
0x0080C5: u'NOVELLCO DE MEXICO',
0x0080C6: u'NATIONAL DATACOMM CORPORATION',
0x0080C7: u'XIRCOM',
0x0080C8: u'D-LINK SYSTEMS, INC.',
0x0080C9: u'ALBERTA MICROELECTRONIC CENTRE',
0x0080CA: u'NETCOM RESEARCH INCORPORATED',
0x0080CB: u'FALCO DATA PRODUCTS',
0x0080CC: u'MICROWAVE BYPASS SYSTEMS',
0x0080CD: u'MICRONICS COMPUTER, INC.',
0x0080CE: u'BROADCAST TELEVISION SYSTEMS',
0x0080CF: u'EMBEDDED PERFORMANCE INC.',
0x0080D0: u'COMPUTER PERIPHERALS, INC.',
0x0080D1: u'KIMTRON CORPORATION',
0x0080D2: u'SHINNIHONDENKO CO., LTD.',
0x0080D3: u'SHIVA CORP.',
0x0080D4: u'CHASE RESEARCH LTD.',
0x0080D5: u'CADRE TECHNOLOGIES',
0x0080D6: u'NUVOTECH, INC.',
0x0080D7: u'Fantum Engineering',
0x0080D8: u'NETWORK PERIPHERALS INC.',
0x0080D9: u'EMK ELEKTRONIK',
0x0080DA: u'BRUEL & KJAER',
0x0080DB: u'GRAPHON CORPORATION',
0x0080DC: u'PICKER INTERNATIONAL',
0x0080DD: u'GMX INC/GIMIX',
0x0080DE: u'GIPSI S.A.',
0x0080DF: u'ADC CODENOLL TECHNOLOGY CORP.',
0x0080E0: u'XTP SYSTEMS, INC.',
0x0080E1: u'STMICROELECTRONICS',
0x0080E2: u'T.D.I. CO., LTD.',
0x0080E3: u'CORAL NETWORK CORPORATION',
0x0080E4: u'NORTHWEST DIGITAL SYSTEMS, INC',
0x0080E5: u'LSI Logic Corporation',
0x0080E6: u'PEER NETWORKS, INC.',
0x0080E7: u'LYNWOOD SCIENTIFIC DEV. LTD.',
0x0080E8: u'CUMULUS CORPORATIION',
0x0080E9: u'Madge Ltd.',
0x0080EA: u'ADVA Optical Networking Ltd.',
0x0080EB: u'COMPCONTROL B.V.',
0x0080EC: u'SUPERCOMPUTING SOLUTIONS, INC.',
0x0080ED: u'IQ TECHNOLOGIES, INC.',
0x0080EE: u'THOMSON CSF',
0x0080EF: u'RATIONAL',
0x0080F0: u'Panasonic Communications Co., Ltd.',
0x0080F1: u'OPUS SYSTEMS',
0x0080F2: u'RAYCOM SYSTEMS INC',
0x0080F3: u'SUN ELECTRONICS CORP.',
0x0080F4: u'TELEMECANIQUE ELECTRIQUE',
0x0080F5: u'QUANTEL LTD',
0x0080F6: u'SYNERGY MICROSYSTEMS',
0x0080F7: u'ZENITH ELECTRONICS',
0x0080F8: u'MIZAR, INC.',
0x0080F9: u'HEURIKON CORPORATION',
0x0080FA: u'RWT GMBH',
0x0080FB: u'BVM LIMITED',
0x0080FC: u'AVATAR CORPORATION',
0x0080FD: u'EXSCEED CORPRATION',
0x0080FE: u'AZURE TECHNOLOGIES, INC.',
0x0080FF: u'SOC. DE TELEINFORMATIQUE RTC',
0x009000: u'DIAMOND MULTIMEDIA',
0x009001: u'NISHIMU ELECTRONICS INDUSTRIES CO., LTD.',
0x009002: u'ALLGON AB',
0x009003: u'APLIO',
0x009004: u'3COM EUROPE LTD.',
0x009005: u'PROTECH SYSTEMS CO., LTD.',
0x009006: u'HAMAMATSU PHOTONICS K.K.',
0x009007: u'DOMEX TECHNOLOGY CORP.',
0x009008: u'HanA Systems Inc.',
0x009009: u'i Controls, Inc.',
0x00900A: u'PROTON ELECTRONIC INDUSTRIAL CO., LTD.',
0x00900B: u'LANNER ELECTRONICS, INC.',
0x00900C: u'CISCO SYSTEMS, INC.',
0x00900D: u'Overland Storage Inc.',
0x00900E: u'HANDLINK TECHNOLOGIES, INC.',
0x00900F: u'KAWASAKI HEAVY INDUSTRIES, LTD',
0x009010: u'SIMULATION LABORATORIES, INC.',
0x009011: u'WAVTrace, Inc.',
0x009012: u'GLOBESPAN SEMICONDUCTOR, INC.',
0x009013: u'SAMSAN CORP.',
0x009014: u'ROTORK INSTRUMENTS, LTD.',
0x009015: u'CENTIGRAM COMMUNICATIONS CORP.',
0x009016: u'ZAC',
0x009017: u'ZYPCOM, INC.',
0x009018: u'ITO ELECTRIC INDUSTRY CO, LTD.',
0x009019: u'HERMES ELECTRONICS CO., LTD.',
0x00901A: u'UNISPHERE SOLUTIONS',
0x00901B: u'DIGITAL CONTROLS',
0x00901C: u'mps Software Gmbh',
0x00901D: u'PEC (NZ) LTD.',
0x00901E: u'SELESTA INGEGNE RIA S.P.A.',
0x00901F: u'ADTEC PRODUCTIONS, INC.',
0x009020: u'PHILIPS ANALYTICAL X-RAY B.V.',
0x009021: u'CISCO SYSTEMS, INC.',
0x009022: u'IVEX',
0x009023: u'ZILOG INC.',
0x009024: u'PIPELINKS, INC.',
0x009025: u'VISION SYSTEMS LTD. PTY',
0x009026: u'ADVANCED SWITCHING COMMUNICATIONS, INC.',
0x009027: u'INTEL CORPORATION',
0x009028: u'NIPPON SIGNAL CO., LTD.',
0x009029: u'CRYPTO AG',
0x00902A: u'COMMUNICATION DEVICES, INC.',
0x00902B: u'CISCO SYSTEMS, INC.',
0x00902C: u'DATA & CONTROL EQUIPMENT LTD.',
0x00902D: u'DATA ELECTRONICS (AUST.) PTY, LTD.',
0x00902E: u'NAMCO LIMITED',
0x00902F: u'NETCORE SYSTEMS, INC.',
0x009030: u'HONEYWELL-DATING',
0x009031: u'MYSTICOM, LTD.',
0x009032: u'PELCOMBE GROUP LTD.',
0x009033: u'INNOVAPHONE AG',
0x009034: u'IMAGIC, INC.',
0x009035: u'ALPHA TELECOM, INC.',
0x009036: u'ens, inc.',
0x009037: u'ACUCOMM, INC.',
0x009038: u'FOUNTAIN TECHNOLOGIES, INC.',
0x009039: u'SHASTA NETWORKS',
0x00903A: u'NIHON MEDIA TOOL INC.',
0x00903B: u'TriEMS Research Lab, Inc.',
0x00903C: u'ATLANTIC NETWORK SYSTEMS',
0x00903D: u'BIOPAC SYSTEMS, INC.',
0x00903E: u'N.V. PHILIPS INDUSTRIAL ACTIVITIES',
0x00903F: u'AZTEC RADIOMEDIA',
0x009040: u'Siemens Network Convergence LLC',
0x009041: u'APPLIED DIGITAL ACCESS',
0x009042: u'ECCS, Inc.',
0x009043: u'NICHIBEI DENSHI CO., LTD.',
0x009044: u'ASSURED DIGITAL, INC.',
0x009045: u'Marconi Communications',
0x009046: u'DEXDYNE, LTD.',
0x009047: u'GIGA FAST E. LTD.',
0x009048: u'ZEAL CORPORATION',
0x009049: u'ENTRIDIA CORPORATION',
0x00904A: u'CONCUR SYSTEM TECHNOLOGIES',
0x00904B: u'GemTek Technology Co., Ltd.',
0x00904C: u'EPIGRAM, INC.',
0x00904D: u'SPEC S.A.',
0x00904E: u'DELEM BV',
0x00904F: u'ABB POWER T&D COMPANY, INC.',
0x009050: u'TELESTE OY',
0x009051: u'ULTIMATE TECHNOLOGY CORP.',
0x009052: u'SELCOM ELETTRONICA S.R.L.',
0x009053: u'DAEWOO ELECTRONICS CO., LTD.',
0x009054: u'INNOVATIVE SEMICONDUCTORS, INC',
0x009055: u'PARKER HANNIFIN CORPORATION COMPUMOTOR DIVISION',
0x009056: u'TELESTREAM, INC.',
0x009057: u'AANetcom, Inc.',
0x009058: u'Ultra Electronics Ltd., Command and Control Systems',
0x009059: u'TELECOM DEVICE K.K.',
0x00905A: u'DEARBORN GROUP, INC.',
0x00905B: u'RAYMOND AND LAE ENGINEERING',
0x00905C: u'EDMI',
0x00905D: u'NETCOM SICHERHEITSTECHNIK GmbH',
0x00905E: u'RAULAND-BORG CORPORATION',
0x00905F: u'CISCO SYSTEMS, INC.',
0x009060: u'SYSTEM CREATE CORP.',
0x009061: u'PACIFIC RESEARCH & ENGINEERING CORPORATION',
0x009062: u'ICP VORTEX COMPUTERSYSTEME GmbH',
0x009063: u'COHERENT COMMUNICATIONS SYSTEMS CORPORATION',
0x009064: u'THOMSON BROADCAST SYSTEMS',
0x009065: u'FINISAR CORPORATION',
0x009066: u'Troika Networks, Inc.',
0x009067: u'WalkAbout Computers, Inc.',
0x009068: u'DVT CORP.',
0x009069: u'JUNIPER NETWORKS, INC.',
0x00906A: u'TURNSTONE SYSTEMS, INC.',
0x00906B: u'APPLIED RESOURCES, INC.',
0x00906C: u'Sartorius Hamburg GmbH',
0x00906D: u'CISCO SYSTEMS, INC.',
0x00906E: u'PRAXON, INC.',
0x00906F: u'CISCO SYSTEMS, INC.',
0x009070: u'NEO NETWORKS, INC.',
0x009071: u'Applied Innovation Inc.',
0x009072: u'SIMRAD AS',
0x009073: u'GAIO TECHNOLOGY',
0x009074: u'ARGON NETWORKS, INC.',
0x009075: u'NEC DO BRASIL S.A.',
0x009076: u'FMT AIRCRAFT GATE SUPPORT SYSTEMS AB',
0x009077: u'ADVANCED FIBRE COMMUNICATIONS',
0x009078: u'MER TELEMANAGEMENT SOLUTIONS, LTD.',
0x009079: u'ClearOne, Inc.',
0x00907A: u'SPECTRALINK CORP.',
0x00907B: u'E-TECH, INC.',
0x00907C: u'DIGITALCAST, INC.',
0x00907D: u'Lake Communications',
0x00907E: u'VETRONIX CORP.',
0x00907F: u'WatchGuard Technologies, Inc.',
0x009080: u'NOT LIMITED, INC.',
0x009081: u'ALOHA NETWORKS, INC.',
0x009082: u'FORCE INSTITUTE',
0x009083: u'TURBO COMMUNICATION, INC.',
0x009084: u'ATECH SYSTEM',
0x009085: u'GOLDEN ENTERPRISES, INC.',
0x009086: u'CISCO SYSTEMS, INC.',
0x009087: u'ITIS',
0x009088: u'BAXALL SECURITY LTD.',
0x009089: u'SOFTCOM MICROSYSTEMS, INC.',
0x00908A: u'BAYLY COMMUNICATIONS, INC.',
0x00908B: u'PFU Systems, Inc.',
0x00908C: u'ETREND ELECTRONICS, INC.',
0x00908D: u'VICKERS ELECTRONICS SYSTEMS',
0x00908E: u'Nortel Networks Broadband Access',
0x00908F: u'AUDIO CODES LTD.',
0x009090: u'I-BUS',
0x009091: u'DigitalScape, Inc.',
0x009092: u'CISCO SYSTEMS, INC.',
0x009093: u'NANAO CORPORATION',
0x009094: u'OSPREY TECHNOLOGIES, INC.',
0x009095: u'UNIVERSAL AVIONICS',
0x009096: u'ASKEY COMPUTER CORP.',
0x009097: u'SYCAMORE NETWORKS',
0x009098: u'SBC DESIGNS, INC.',
0x009099: u'ALLIED TELESIS, K.K.',
0x00909A: u'ONE WORLD SYSTEMS, INC.',
0x00909B: u'MARKPOINT AB',
0x00909C: u'Terayon Communications Systems',
0x00909D: u'NovaTech Process Solutions, LLC',
0x00909E: u'Critical IO, LLC',
0x00909F: u'DIGI-DATA CORPORATION',
0x0090A0: u'8X8 INC.',
0x0090A1: u'FLYING PIG SYSTEMS, LTD.',
0x0090A2: u'CYBERTAN TECHNOLOGY, INC.',
0x0090A3: u'Corecess Inc.',
0x0090A4: u'ALTIGA NETWORKS',
0x0090A5: u'SPECTRA LOGIC',
0x0090A6: u'CISCO SYSTEMS, INC.',
0x0090A7: u'CLIENTEC CORPORATION',
0x0090A8: u'NineTiles Networks, Ltd.',
0x0090A9: u'WESTERN DIGITAL',
0x0090AA: u'INDIGO ACTIVE VISION SYSTEMS LIMITED',
0x0090AB: u'CISCO SYSTEMS, INC.',
0x0090AC: u'OPTIVISION, INC.',
0x0090AD: u'ASPECT ELECTRONICS, INC.',
0x0090AE: u'ITALTEL S.p.A.',
0x0090AF: u'J. MORITA MFG. CORP.',
0x0090B0: u'VADEM',
0x0090B1: u'CISCO SYSTEMS, INC.',
0x0090B2: u'AVICI SYSTEMS INC.',
0x0090B3: u'AGRANAT SYSTEMS',
0x0090B4: u'WILLOWBROOK TECHNOLOGIES',
0x0090B5: u'NIKON CORPORATION',
0x0090B6: u'FIBEX SYSTEMS',
0x0090B7: u'DIGITAL LIGHTWAVE, INC.',
0x0090B8: u'ROHDE & SCHWARZ GMBH & CO. KG',
0x0090B9: u'BERAN INSTRUMENTS LTD.',
0x0090BA: u'VALID NETWORKS, INC.',
0x0090BB: u'TAINET COMMUNICATION SYSTEM Corp.',
0x0090BC: u'TELEMANN CO., LTD.',
0x0090BD: u'OMNIA COMMUNICATIONS, INC.',
0x0090BE: u'IBC/INTEGRATED BUSINESS COMPUTERS',
0x0090BF: u'CISCO SYSTEMS, INC.',
0x0090C0: u'K.J. LAW ENGINEERS, INC.',
0x0090C1: u'Peco II, Inc.',
0x0090C2: u'JK microsystems, Inc.',
0x0090C3: u'TOPIC SEMICONDUCTOR CORP.',
0x0090C4: u'JAVELIN SYSTEMS, INC.',
0x0090C5: u'INTERNET MAGIC, INC.',
0x0090C6: u'OPTIM SYSTEMS, INC.',
0x0090C7: u'ICOM INC.',
0x0090C8: u'WAVERIDER COMMUNICATIONS (CANADA) INC.',
0x0090C9: u'DPAC Technologies',
0x0090CA: u'ACCORD VIDEO TELECOMMUNICATIONS, LTD.',
0x0090CB: u'Wireless OnLine, Inc.',
0x0090CC: u'PLANET COMMUNICATIONS, INC.',
0x0090CD: u'ENT-EMPRESA NACIONAL DE TELECOMMUNICACOES, S.A.',
0x0090CE: u'TETRA GmbH',
0x0090CF: u'NORTEL',
0x0090D0: u'Thomson Telecom Belgium',
0x0090D1: u'LEICHU ENTERPRISE CO., LTD.',
0x0090D2: u'ARTEL VIDEO SYSTEMS',
0x0090D3: u'GIESECKE & DEVRIENT GmbH',
0x0090D4: u'BindView Development Corp.',
0x0090D5: u'EUPHONIX, INC.',
0x0090D6: u'CRYSTAL GROUP',
0x0090D7: u'NetBoost Corp.',
0x0090D8: u'WHITECROSS SYSTEMS',
0x0090D9: u'CISCO SYSTEMS, INC.',
0x0090DA: u'DYNARC, INC.',
0x0090DB: u'NEXT LEVEL COMMUNICATIONS',
0x0090DC: u'TECO INFORMATION SYSTEMS',
0x0090DD: u'THE MIHARU COMMUNICATIONS CO., LTD.',
0x0090DE: u'CARDKEY SYSTEMS, INC.',
0x0090DF: u'MITSUBISHI CHEMICAL AMERICA, INC.',
0x0090E0: u'SYSTRAN CORP.',
0x0090E1: u'TELENA S.P.A.',
0x0090E2: u'DISTRIBUTED PROCESSING TECHNOLOGY',
0x0090E3: u'AVEX ELECTRONICS INC.',
0x0090E4: u'NEC AMERICA, INC.',
0x0090E5: u'TEKNEMA, INC.',
0x0090E6: u'ACER LABORATORIES, INC.',
0x0090E7: u'HORSCH ELEKTRONIK AG',
0x0090E8: u'MOXA TECHNOLOGIES CORP., LTD.',
0x0090E9: u'JANZ COMPUTER AG',
0x0090EA: u'ALPHA TECHNOLOGIES, INC.',
0x0090EB: u'SENTRY TELECOM SYSTEMS',
0x0090EC: u'PYRESCOM',
0x0090ED: u'CENTRAL SYSTEM RESEARCH CO., LTD.',
0x0090EE: u'PERSONAL COMMUNICATIONS TECHNOLOGIES',
0x0090EF: u'INTEGRIX, INC.',
0x0090F0: u'Harmonic Video Systems Ltd.',
0x0090F1: u'DOT HILL SYSTEMS CORPORATION',
0x0090F2: u'CISCO SYSTEMS, INC.',
0x0090F3: u'ASPECT COMMUNICATIONS',
0x0090F4: u'LIGHTNING INSTRUMENTATION',
0x0090F5: u'CLEVO CO.',
0x0090F6: u'ESCALATE NETWORKS, INC.',
0x0090F7: u'NBASE COMMUNICATIONS LTD.',
0x0090F8: u'MEDIATRIX TELECOM',
0x0090F9: u'LEITCH',
0x0090FA: u'EMULEX Corp',
0x0090FB: u'PORTWELL, INC.',
0x0090FC: u'NETWORK COMPUTING DEVICES',
0x0090FD: u'CopperCom, Inc.',
0x0090FE: u'ELECOM CO., LTD. (LANEED DIV.)',
0x0090FF: u'TELLUS TECHNOLOGY INC.',
0x0091D6: u'Crystal Group, Inc.',
0x009D8E: u'CARDIAC RECORDERS, INC.',
0x00A000: u'CENTILLION NETWORKS, INC.',
0x00A001: u'DRS Signal Solutions',
0x00A002: u'LEEDS & NORTHRUP AUSTRALIA PTY LTD',
0x00A003: u'STAEFA CONTROL SYSTEM',
0x00A004: u'NETPOWER, INC.',
0x00A005: u'DANIEL INSTRUMENTS, LTD.',
0x00A006: u'IMAGE DATA PROCESSING SYSTEM GROUP',
0x00A007: u'APEXX TECHNOLOGY, INC.',
0x00A008: u'NETCORP',
0x00A009: u'WHITETREE NETWORK',
0x00A00A: u'Airspan',
0x00A00B: u'COMPUTEX CO., LTD.',
0x00A00C: u'KINGMAX TECHNOLOGY, INC.',
0x00A00D: u'THE PANDA PROJECT',
0x00A00E: u'VISUAL NETWORKS, INC.',
0x00A00F: u'Broadband Technologies',
0x00A010: u'SYSLOGIC DATENTECHNIK AG',
0x00A011: u'MUTOH INDUSTRIES LTD.',
0x00A012: u'B.A.T.M. ADVANCED TECHNOLOGIES',
0x00A013: u'TELTREND LTD.',
0x00A014: u'CSIR',
0x00A015: u'WYLE',
0x00A016: u'MICROPOLIS CORP.',
0x00A017: u'J B M CORPORATION',
0x00A018: u'CREATIVE CONTROLLERS, INC.',
0x00A019: u'NEBULA CONSULTANTS, INC.',
0x00A01A: u'BINAR ELEKTRONIK AB',
0x00A01B: u'PREMISYS COMMUNICATIONS, INC.',
0x00A01C: u'NASCENT NETWORKS CORPORATION',
0x00A01D: u'SIXNET',
0x00A01E: u'EST CORPORATION',
0x00A01F: u'TRICORD SYSTEMS, INC.',
0x00A020: u'CITICORP/TTI',
0x00A021: u'General Dynamics',
0x00A022: u'CENTRE FOR DEVELOPMENT OF ADVANCED COMPUTING',
0x00A023: u'APPLIED CREATIVE TECHNOLOGY, INC.',
0x00A024: u'3COM CORPORATION',
0x00A025: u'REDCOM LABS INC.',
0x00A026: u'TELDAT, S.A.',
0x00A027: u'FIREPOWER SYSTEMS, INC.',
0x00A028: u'CONNER PERIPHERALS',
0x00A029: u'COULTER CORPORATION',
0x00A02A: u'TRANCELL SYSTEMS',
0x00A02B: u'TRANSITIONS RESEARCH CORP.',
0x00A02C: u'interWAVE Communications',
0x00A02D: u'1394 Trade Association',
0x00A02E: u'BRAND COMMUNICATIONS, LTD.',
0x00A02F: u'PIRELLI CAVI',
0x00A030: u'CAPTOR NV/SA',
0x00A031: u'HAZELTINE CORPORATION, MS 1-17',
0x00A032: u'GES SINGAPORE PTE. LTD.',
0x00A033: u'imc MeBsysteme GmbH',
0x00A034: u'AXEL',
0x00A035: u'CYLINK CORPORATION',
0x00A036: u'APPLIED NETWORK TECHNOLOGY',
0x00A037: u'DATASCOPE CORPORATION',
0x00A038: u'EMAIL ELECTRONICS',
0x00A039: u'ROSS TECHNOLOGY, INC.',
0x00A03A: u'KUBOTEK CORPORATION',
0x00A03B: u'TOSHIN ELECTRIC CO., LTD.',
0x00A03C: u'EG&G NUCLEAR INSTRUMENTS',
0x00A03D: u'OPTO-22',
0x00A03E: u'ATM FORUM',
0x00A03F: u'COMPUTER SOCIETY MICROPROCESSOR & MICROPROCESSOR STANDARDS C',
0x00A040: u'APPLE COMPUTER',
0x00A041: u'INFICON',
0x00A042: u'SPUR PRODUCTS CORP.',
0x00A043: u'AMERICAN TECHNOLOGY LABS, INC.',
0x00A044: u'NTT IT CO., LTD.',
0x00A045: u'PHOENIX CONTACT GMBH & CO.',
0x00A046: u'SCITEX CORP. LTD.',
0x00A047: u'INTEGRATED FITNESS CORP.',
0x00A048: u'QUESTECH, LTD.',
0x00A049: u'DIGITECH INDUSTRIES, INC.',
0x00A04A: u'NISSHIN ELECTRIC CO., LTD.',
0x00A04B: u'TFL LAN INC.',
0x00A04C: u'INNOVATIVE SYSTEMS & TECHNOLOGIES, INC.',
0x00A04D: u'EDA INSTRUMENTS, INC.',
0x00A04E: u'VOELKER TECHNOLOGIES, INC.',
0x00A04F: u'AMERITEC CORP.',
0x00A050: u'CYPRESS SEMICONDUCTOR',
0x00A051: u'ANGIA COMMUNICATIONS. INC.',
0x00A052: u'STANILITE ELECTRONICS PTY. LTD',
0x00A053: u'COMPACT DEVICES, INC.',
0x00A054: u'PRIVATE',
0x00A055: u'Data Device Corporation',
0x00A056: u'MICROPROSS',
0x00A057: u'LANCOM Systems GmbH',
0x00A058: u'GLORY, LTD.',
0x00A059: u'HAMILTON HALLMARK',
0x00A05A: u'KOFAX IMAGE PRODUCTS',
0x00A05B: u'MARQUIP, INC.',
0x00A05C: u'INVENTORY CONVERSION, INC./',
0x00A05D: u'CS COMPUTER SYSTEME GmbH',
0x00A05E: u'MYRIAD LOGIC INC.',
0x00A05F: u'BTG ENGINEERING BV',
0x00A060: u'ACER PERIPHERALS, INC.',
0x00A061: u'PURITAN BENNETT',
0x00A062: u'AES PRODATA',
0x00A063: u'JRL SYSTEMS, INC.',
0x00A064: u'KVB/ANALECT',
0x00A065: u'Symantec Corporation',
0x00A066: u'ISA CO., LTD.',
0x00A067: u'NETWORK SERVICES GROUP',
0x00A068: u'BHP LIMITED',
0x00A069: u'Symmetricom, Inc.',
0x00A06A: u'Verilink Corporation',
0x00A06B: u'DMS DORSCH MIKROSYSTEM GMBH',
0x00A06C: u'SHINDENGEN ELECTRIC MFG. CO., LTD.',
0x00A06D: u'MANNESMANN TALLY CORPORATION',
0x00A06E: u'AUSTRON, INC.',
0x00A06F: u'THE APPCON GROUP, INC.',
0x00A070: u'COASTCOM',
0x00A071: u'VIDEO LOTTERY TECHNOLOGIES,INC',
0x00A072: u'OVATION SYSTEMS LTD.',
0x00A073: u'COM21, INC.',
0x00A074: u'PERCEPTION TECHNOLOGY',
0x00A075: u'MICRON TECHNOLOGY, INC.',
0x00A076: u'CARDWARE LAB, INC.',
0x00A077: u'FUJITSU NEXION, INC.',
0x00A078: u'Marconi Communications',
0x00A079: u'ALPS ELECTRIC (USA), INC.',
0x00A07A: u'ADVANCED PERIPHERALS TECHNOLOGIES, INC.',
0x00A07B: u'DAWN COMPUTER INCORPORATION',
0x00A07C: u'TONYANG NYLON CO., LTD.',
0x00A07D: u'SEEQ TECHNOLOGY, INC.',
0x00A07E: u'AVID TECHNOLOGY, INC.',
0x00A07F: u'GSM-SYNTEL, LTD.',
0x00A080: u'SBE, Inc.',
0x00A081: u'ALCATEL DATA NETWORKS',
0x00A082: u'NKT ELEKTRONIK A/S',
0x00A083: u'ASIMMPHONY TURKEY',
0x00A084: u'DATAPLEX PTY. LTD.',
0x00A085: u'PRIVATE',
0x00A086: u'AMBER WAVE SYSTEMS, INC.',
0x00A087: u'Zarlink Semiconductor Ltd.',
0x00A088: u'ESSENTIAL COMMUNICATIONS',
0x00A089: u'XPOINT TECHNOLOGIES, INC.',
0x00A08A: u'BROOKTROUT TECHNOLOGY, INC.',
0x00A08B: u'ASTON ELECTRONIC DESIGNS LTD.',
0x00A08C: u'MultiMedia LANs, Inc.',
0x00A08D: u'JACOMO CORPORATION',
0x00A08E: u'Nokia Internet Communications',
0x00A08F: u'DESKNET SYSTEMS, INC.',
0x00A090: u'TimeStep Corporation',
0x00A091: u'APPLICOM INTERNATIONAL',
0x00A092: u'H. BOLLMANN MANUFACTURERS, LTD',
0x00A093: u'B/E AEROSPACE, Inc.',
0x00A094: u'COMSAT CORPORATION',
0x00A095: u'ACACIA NETWORKS, INC.',
0x00A096: u'MITUMI ELECTRIC CO., LTD.',
0x00A097: u'JC INFORMATION SYSTEMS',
0x00A098: u'NETWORK APPLIANCE CORP.',
0x00A099: u'K-NET LTD.',
0x00A09A: u'NIHON KOHDEN AMERICA',
0x00A09B: u'QPSX COMMUNICATIONS, LTD.',
0x00A09C: u'Xyplex, Inc.',
0x00A09D: u'JOHNATHON FREEMAN TECHNOLOGIES',
0x00A09E: u'ICTV',
0x00A09F: u'COMMVISION CORP.',
0x00A0A0: u'COMPACT DATA, LTD.',
0x00A0A1: u'EPIC DATA INC.',
0x00A0A2: u'DIGICOM S.P.A.',
0x00A0A3: u'RELIABLE POWER METERS',
0x00A0A4: u'MICROS SYSTEMS, INC.',
0x00A0A5: u'TEKNOR MICROSYSTEME, INC.',
0x00A0A6: u'M.I. SYSTEMS, K.K.',
0x00A0A7: u'VORAX CORPORATION',
0x00A0A8: u'RENEX CORPORATION',
0x00A0A9: u'NAVTEL COMMUNICATIONS INC.',
0x00A0AA: u'SPACELABS MEDICAL',
0x00A0AB: u'NETCS INFORMATIONSTECHNIK GMBH',
0x00A0AC: u'GILAT SATELLITE NETWORKS, LTD.',
0x00A0AD: u'MARCONI SPA',
0x00A0AE: u'NUCOM SYSTEMS, INC.',
0x00A0AF: u'WMS INDUSTRIES',
0x00A0B0: u'I-O DATA DEVICE, INC.',
0x00A0B1: u'FIRST VIRTUAL CORPORATION',
0x00A0B2: u'SHIMA SEIKI',
0x00A0B3: u'ZYKRONIX',
0x00A0B4: u'TEXAS MICROSYSTEMS, INC.',
0x00A0B5: u'3H TECHNOLOGY',
0x00A0B6: u'SANRITZ AUTOMATION CO., LTD.',
0x00A0B7: u'CORDANT, INC.',
0x00A0B8: u'SYMBIOS LOGIC INC.',
0x00A0B9: u'EAGLE TECHNOLOGY, INC.',
0x00A0BA: u'PATTON ELECTRONICS CO.',
0x00A0BB: u'HILAN GMBH',
0x00A0BC: u'VIASAT, INCORPORATED',
0x00A0BD: u'I-TECH CORP.',
0x00A0BE: u'INTEGRATED CIRCUIT SYSTEMS, INC. COMMUNICATIONS GROUP',
0x00A0BF: u'WIRELESS DATA GROUP MOTOROLA',
0x00A0C0: u'DIGITAL LINK CORP.',
0x00A0C1: u'ORTIVUS MEDICAL AB',
0x00A0C2: u'R.A. SYSTEMS CO., LTD.',
0x00A0C3: u'UNICOMPUTER GMBH',
0x00A0C4: u'CRISTIE ELECTRONICS LTD.',
0x00A0C5: u'ZYXEL COMMUNICATION',
0x00A0C6: u'QUALCOMM INCORPORATED',
0x00A0C7: u'TADIRAN TELECOMMUNICATIONS',
0x00A0C8: u'ADTRAN INC.',
0x00A0C9: u'INTEL CORPORATION - HF1-06',
0x00A0CA: u'FUJITSU DENSO LTD.',
0x00A0CB: u'ARK TELECOMMUNICATIONS, INC.',
0x00A0CC: u'LITE-ON COMMUNICATIONS, INC.',
0x00A0CD: u'DR. JOHANNES HEIDENHAIN GmbH',
0x00A0CE: u'ASTROCOM CORPORATION',
0x00A0CF: u'SOTAS, INC.',
0x00A0D0: u'TEN X TECHNOLOGY, INC.',
0x00A0D1: u'INVENTEC CORPORATION',
0x00A0D2: u'ALLIED TELESIS INTERNATIONAL CORPORATION',
0x00A0D3: u'INSTEM COMPUTER SYSTEMS, LTD.',
0x00A0D4: u'RADIOLAN, INC.',
0x00A0D5: u'SIERRA WIRELESS INC.',
0x00A0D6: u'SBE, INC.',
0x00A0D7: u'KASTEN CHASE APPLIED RESEARCH',
0x00A0D8: u'SPECTRA - TEK',
0x00A0D9: u'CONVEX COMPUTER CORPORATION',
0x00A0DA: u'INTEGRATED SYSTEMS Technology, Inc.',
0x00A0DB: u'FISHER & PAYKEL PRODUCTION',
0x00A0DC: u'O.N. ELECTRONIC CO., LTD.',
0x00A0DD: u'AZONIX CORPORATION',
0x00A0DE: u'YAMAHA CORPORATION',
0x00A0DF: u'STS TECHNOLOGIES, INC.',
0x00A0E0: u'TENNYSON TECHNOLOGIES PTY LTD',
0x00A0E1: u'WESTPORT RESEARCH ASSOCIATES, INC.',
0x00A0E2: u'KEISOKU GIKEN CORP.',
0x00A0E3: u'XKL SYSTEMS CORP.',
0x00A0E4: u'OPTIQUEST',
0x00A0E5: u'NHC COMMUNICATIONS',
0x00A0E6: u'DIALOGIC CORPORATION',
0x00A0E7: u'CENTRAL DATA CORPORATION',
0x00A0E8: u'REUTERS HOLDINGS PLC',
0x00A0E9: u'ELECTRONIC RETAILING SYSTEMS INTERNATIONAL',
0x00A0EA: u'ETHERCOM CORP.',
0x00A0EB: u'Encore Networks',
0x00A0EC: u'TRANSMITTON LTD.',
0x00A0ED: u'Brooks Automation, Inc.',
0x00A0EE: u'NASHOBA NETWORKS',
0x00A0EF: u'LUCIDATA LTD.',
0x00A0F0: u'TORONTO MICROELECTRONICS INC.',
0x00A0F1: u'MTI',
0x00A0F2: u'INFOTEK COMMUNICATIONS, INC.',
0x00A0F3: u'STAUBLI',
0x00A0F4: u'GE',
0x00A0F5: u'RADGUARD LTD.',
0x00A0F6: u'AutoGas Systems Inc.',
0x00A0F7: u'V.I COMPUTER CORP.',
0x00A0F8: u'SYMBOL TECHNOLOGIES, INC.',
0x00A0F9: u'BINTEC COMMUNICATIONS GMBH',
0x00A0FA: u'Marconi Communication GmbH',
0x00A0FB: u'TORAY ENGINEERING CO., LTD.',
0x00A0FC: u'IMAGE SCIENCES, INC.',
0x00A0FD: u'SCITEX DIGITAL PRINTING, INC.',
0x00A0FE: u'BOSTON TECHNOLOGY, INC.',
0x00A0FF: u'TELLABS OPERATIONS, INC.',
0x00AA00: u'INTEL CORPORATION',
0x00AA01: u'INTEL CORPORATION',
0x00AA02: u'INTEL CORPORATION',
0x00AA3C: u'OLIVETTI TELECOM SPA (OLTECO)',
0x00B009: u'Grass Valley Group',
0x00B017: u'InfoGear Technology Corp.',
0x00B019: u'Casi-Rusco',
0x00B01C: u'Westport Technologies',
0x00B01E: u'Rantic Labs, Inc.',
0x00B02A: u'ORSYS GmbH',
0x00B02D: u'ViaGate Technologies, Inc.',
0x00B03B: u'HiQ Networks',
0x00B048: u'Marconi Communications Inc.',
0x00B04A: u'Cisco Systems, Inc.',
0x00B052: u'Intellon Corporation',
0x00B064: u'Cisco Systems, Inc.',
0x00B069: u'Honewell Oy',
0x00B06D: u'Jones Futurex Inc.',
0x00B080: u'Mannesmann Ipulsys B.V.',
0x00B086: u'LocSoft Limited',
0x00B08E: u'Cisco Systems, Inc.',
0x00B091: u'Transmeta Corp.',
0x00B094: u'Alaris, Inc.',
0x00B09A: u'Morrow Technologies Corp.',
0x00B09D: u'Point Grey Research Inc.',
0x00B0AC: u'SIAE-Microelettronica S.p.A.',
0x00B0AE: u'Symmetricom',
0x00B0B3: u'Xstreamis PLC',
0x00B0C2: u'Cisco Systems, Inc.',
0x00B0C7: u'Tellabs Operations, Inc.',
0x00B0CE: u'TECHNOLOGY RESCUE',
0x00B0D0: u'Dell Computer Corp.',
0x00B0DB: u'Nextcell, Inc.',
0x00B0DF: u'Reliable Data Technology, Inc.',
0x00B0E7: u'British Federal Ltd.',
0x00B0EC: u'EACEM',
0x00B0EE: u'Ajile Systems, Inc.',
0x00B0F0: u'CALY NETWORKS',
0x00B0F5: u'NetWorth Technologies, Inc.',
0x00BAC0: u'Biometric Access Company',
0x00BB01: u'OCTOTHORPE CORP.',
0x00BBF0: u'UNGERMANN-BASS INC.',
0x00C000: u'LANOPTICS, LTD.',
0x00C001: u'DIATEK PATIENT MANAGMENT',
0x00C002: u'SERCOMM CORPORATION',
0x00C003: u'GLOBALNET COMMUNICATIONS',
0x00C004: u'JAPAN BUSINESS COMPUTER CO.LTD',
0x00C005: u'LIVINGSTON ENTERPRISES, INC.',
0x00C006: u'NIPPON AVIONICS CO., LTD.',
0x00C007: u'PINNACLE DATA SYSTEMS, INC.',
0x00C008: u'SECO SRL',
0x00C009: u'KT TECHNOLOGY (S) PTE LTD',
0x00C00A: u'MICRO CRAFT',
0x00C00B: u'NORCONTROL A.S.',
0x00C00C: u'RELIA TECHNOLGIES',
0x00C00D: u'ADVANCED LOGIC RESEARCH, INC.',
0x00C00E: u'PSITECH, INC.',
0x00C00F: u'QUANTUM SOFTWARE SYSTEMS LTD.',
0x00C010: u'HIRAKAWA HEWTECH CORP.',
0x00C011: u'INTERACTIVE COMPUTING DEVICES',
0x00C012: u'NETSPAN CORPORATION',
0x00C013: u'NETRIX',
0x00C014: u'TELEMATICS CALABASAS INT\'L,INC',
0x00C015: u'NEW MEDIA CORPORATION',
0x00C016: u'ELECTRONIC THEATRE CONTROLS',
0x00C017: u'FORTE NETWORKS',
0x00C018: u'LANART CORPORATION',
0x00C019: u'LEAP TECHNOLOGY, INC.',
0x00C01A: u'COROMETRICS MEDICAL SYSTEMS',
0x00C01B: u'SOCKET COMMUNICATIONS, INC.',
0x00C01C: u'INTERLINK COMMUNICATIONS LTD.',
0x00C01D: u'GRAND JUNCTION NETWORKS, INC.',
0x00C01E: u'LA FRANCAISE DES JEUX',
0x00C01F: u'S.E.R.C.E.L.',
0x00C020: u'ARCO ELECTRONIC, CONTROL LTD.',
0x00C021: u'NETEXPRESS',
0x00C022: u'LASERMASTER TECHNOLOGIES, INC.',
0x00C023: u'TUTANKHAMON ELECTRONICS',
0x00C024: u'EDEN SISTEMAS DE COMPUTACAO SA',
0x00C025: u'DATAPRODUCTS CORPORATION',
0x00C026: u'LANS TECHNOLOGY CO., LTD.',
0x00C027: u'CIPHER SYSTEMS, INC.',
0x00C028: u'JASCO CORPORATION',
0x00C029: u'Nexans Deutschland AG - ANS',
0x00C02A: u'OHKURA ELECTRIC CO., LTD.',
0x00C02B: u'GERLOFF GESELLSCHAFT FUR',
0x00C02C: u'CENTRUM COMMUNICATIONS, INC.',
0x00C02D: u'FUJI PHOTO FILM CO., LTD.',
0x00C02E: u'NETWIZ',
0x00C02F: u'OKUMA CORPORATION',
0x00C030: u'INTEGRATED ENGINEERING B. V.',
0x00C031: u'DESIGN RESEARCH SYSTEMS, INC.',
0x00C032: u'I-CUBED LIMITED',
0x00C033: u'TELEBIT COMMUNICATIONS APS',
0x00C034: u'TRANSACTION NETWORK',
0x00C035: u'QUINTAR COMPANY',
0x00C036: u'RAYTECH ELECTRONIC CORP.',
0x00C037: u'DYNATEM',
0x00C038: u'RASTER IMAGE PROCESSING SYSTEM',
0x00C039: u'Teridian Semiconductor Corporation',
0x00C03A: u'MEN-MIKRO ELEKTRONIK GMBH',
0x00C03B: u'MULTIACCESS COMPUTING CORP.',
0x00C03C: u'TOWER TECH S.R.L.',
0x00C03D: u'WIESEMANN & THEIS GMBH',
0x00C03E: u'FA. GEBR. HELLER GMBH',
0x00C03F: u'STORES AUTOMATED SYSTEMS, INC.',
0x00C040: u'ECCI',
0x00C041: u'DIGITAL TRANSMISSION SYSTEMS',
0x00C042: u'DATALUX CORP.',
0x00C043: u'STRATACOM',
0x00C044: u'EMCOM CORPORATION',
0x00C045: u'ISOLATION SYSTEMS, LTD.',
0x00C046: u'KEMITRON LTD.',
0x00C047: u'UNIMICRO SYSTEMS, INC.',
0x00C048: u'BAY TECHNICAL ASSOCIATES',
0x00C049: u'U.S. ROBOTICS, INC.',
0x00C04A: u'GROUP 2000 AG',
0x00C04B: u'CREATIVE MICROSYSTEMS',
0x00C04C: u'DEPARTMENT OF FOREIGN AFFAIRS',
0x00C04D: u'MITEC, INC.',
0x00C04E: u'COMTROL CORPORATION',
0x00C04F: u'DELL COMPUTER CORPORATION',
0x00C050: u'TOYO DENKI SEIZO K.K.',
0x00C051: u'ADVANCED INTEGRATION RESEARCH',
0x00C052: u'BURR-BROWN',
0x00C053: u'Concerto Software',
0x00C054: u'NETWORK PERIPHERALS, LTD.',
0x00C055: u'MODULAR COMPUTING TECHNOLOGIES',
0x00C056: u'SOMELEC',
0x00C057: u'MYCO ELECTRONICS',
0x00C058: u'DATAEXPERT CORP.',
0x00C059: u'NIPPON DENSO CO., LTD.',
0x00C05A: u'SEMAPHORE COMMUNICATIONS CORP.',
0x00C05B: u'NETWORKS NORTHWEST, INC.',
0x00C05C: u'ELONEX PLC',
0x00C05D: u'L&N TECHNOLOGIES',
0x00C05E: u'VARI-LITE, INC.',
0x00C05F: u'FINE-PAL COMPANY LIMITED',
0x00C060: u'ID SCANDINAVIA AS',
0x00C061: u'SOLECTEK CORPORATION',
0x00C062: u'IMPULSE TECHNOLOGY',
0x00C063: u'MORNING STAR TECHNOLOGIES, INC',
0x00C064: u'GENERAL DATACOMM IND. INC.',
0x00C065: u'SCOPE COMMUNICATIONS, INC.',
0x00C066: u'DOCUPOINT, INC.',
0x00C067: u'UNITED BARCODE INDUSTRIES',
0x00C068: u'PHILIP DRAKE ELECTRONICS LTD.',
0x00C069: u'Axxcelera Broadband Wireless',
0x00C06A: u'ZAHNER-ELEKTRIK GMBH & CO. KG',
0x00C06B: u'OSI PLUS CORPORATION',
0x00C06C: u'SVEC COMPUTER CORP.',
0x00C06D: u'BOCA RESEARCH, INC.',
0x00C06E: u'HAFT TECHNOLOGY, INC.',
0x00C06F: u'KOMATSU LTD.',
0x00C070: u'SECTRA SECURE-TRANSMISSION AB',
0x00C071: u'AREANEX COMMUNICATIONS, INC.',
0x00C072: u'KNX LTD.',
0x00C073: u'XEDIA CORPORATION',
0x00C074: u'TOYODA AUTOMATIC LOOM',
0x00C075: u'XANTE CORPORATION',
0x00C076: u'I-DATA INTERNATIONAL A-S',
0x00C077: u'DAEWOO TELECOM LTD.',
0x00C078: u'COMPUTER SYSTEMS ENGINEERING',
0x00C079: u'FONSYS CO.,LTD.',
0x00C07A: u'PRIVA B.V.',
0x00C07B: u'ASCEND COMMUNICATIONS, INC.',
0x00C07C: u'HIGHTECH INFORMATION',
0x00C07D: u'RISC DEVELOPMENTS LTD.',
0x00C07E: u'KUBOTA CORPORATION ELECTRONIC',
0x00C07F: u'NUPON COMPUTING CORP.',
0x00C080: u'NETSTAR, INC.',
0x00C081: u'METRODATA LTD.',
0x00C082: u'MOORE PRODUCTS CO.',
0x00C083: u'TRACE MOUNTAIN PRODUCTS, INC.',
0x00C084: u'DATA LINK CORP. LTD.',
0x00C085: u'ELECTRONICS FOR IMAGING, INC.',
0x00C086: u'THE LYNK CORPORATION',
0x00C087: u'UUNET TECHNOLOGIES, INC.',
0x00C088: u'EKF ELEKTRONIK GMBH',
0x00C089: u'TELINDUS DISTRIBUTION',
0x00C08A: u'LAUTERBACH DATENTECHNIK GMBH',
0x00C08B: u'RISQ MODULAR SYSTEMS, INC.',
0x00C08C: u'PERFORMANCE TECHNOLOGIES, INC.',
0x00C08D: u'TRONIX PRODUCT DEVELOPMENT',
0x00C08E: u'NETWORK INFORMATION TECHNOLOGY',
0x00C08F: u'Matsushita Electric Works, Ltd.',
0x00C090: u'PRAIM S.R.L.',
0x00C091: u'JABIL CIRCUIT, INC.',
0x00C092: u'MENNEN MEDICAL INC.',
0x00C093: u'ALTA RESEARCH CORP.',
0x00C094: u'VMX INC.',
0x00C095: u'ZNYX',
0x00C096: u'TAMURA CORPORATION',
0x00C097: u'ARCHIPEL SA',
0x00C098: u'CHUNTEX ELECTRONIC CO., LTD.',
0x00C099: u'YOSHIKI INDUSTRIAL CO.,LTD.',
0x00C09A: u'PHOTONICS CORPORATION',
0x00C09B: u'RELIANCE COMM/TEC, R-TEC',
0x00C09C: u'TOA ELECTRONIC LTD.',
0x00C09D: u'DISTRIBUTED SYSTEMS INT\'L, INC',
0x00C09E: u'CACHE COMPUTERS, INC.',
0x00C09F: u'QUANTA COMPUTER, INC.',
0x00C0A0: u'ADVANCE MICRO RESEARCH, INC.',
0x00C0A1: u'TOKYO DENSHI SEKEI CO.',
0x00C0A2: u'INTERMEDIUM A/S',
0x00C0A3: u'DUAL ENTERPRISES CORPORATION',
0x00C0A4: u'UNIGRAF OY',
0x00C0A5: u'DICKENS DATA SYSTEMS',
0x00C0A6: u'EXICOM AUSTRALIA PTY. LTD',
0x00C0A7: u'SEEL LTD.',
0x00C0A8: u'GVC CORPORATION',
0x00C0A9: u'BARRON MCCANN LTD.',
0x00C0AA: u'SILICON VALLEY COMPUTER',
0x00C0AB: u'Telco Systems, Inc.',
0x00C0AC: u'GAMBIT COMPUTER COMMUNICATIONS',
0x00C0AD: u'MARBEN COMMUNICATION SYSTEMS',
0x00C0AE: u'TOWERCOM CO. INC. DBA PC HOUSE',
0x00C0AF: u'TEKLOGIX INC.',
0x00C0B0: u'GCC TECHNOLOGIES,INC.',
0x00C0B1: u'GENIUS NET CO.',
0x00C0B2: u'NORAND CORPORATION',
0x00C0B3: u'COMSTAT DATACOMM CORPORATION',
0x00C0B4: u'MYSON TECHNOLOGY, INC.',
0x00C0B5: u'CORPORATE NETWORK SYSTEMS,INC.',
0x00C0B6: u'Adaptec, Inc.',
0x00C0B7: u'AMERICAN POWER CONVERSION CORP',
0x00C0B8: u'FRASER\'S HILL LTD.',
0x00C0B9: u'FUNK SOFTWARE, INC.',
0x00C0BA: u'NETVANTAGE',
0x00C0BB: u'FORVAL CREATIVE, INC.',
0x00C0BC: u'TELECOM AUSTRALIA/CSSC',
0x00C0BD: u'INEX TECHNOLOGIES, INC.',
0x00C0BE: u'ALCATEL - SEL',
0x00C0BF: u'TECHNOLOGY CONCEPTS, LTD.',
0x00C0C0: u'SHORE MICROSYSTEMS, INC.',
0x00C0C1: u'QUAD/GRAPHICS, INC.',
0x00C0C2: u'INFINITE NETWORKS LTD.',
0x00C0C3: u'ACUSON COMPUTED SONOGRAPHY',
0x00C0C4: u'COMPUTER OPERATIONAL',
0x00C0C5: u'SID INFORMATICA',
0x00C0C6: u'PERSONAL MEDIA CORP.',
0x00C0C7: u'SPARKTRUM MICROSYSTEMS, INC.',
0x00C0C8: u'MICRO BYTE PTY. LTD.',
0x00C0C9: u'ELSAG BAILEY PROCESS',
0x00C0CA: u'ALFA, INC.',
0x00C0CB: u'CONTROL TECHNOLOGY CORPORATION',
0x00C0CC: u'TELESCIENCES CO SYSTEMS, INC.',
0x00C0CD: u'COMELTA, S.A.',
0x00C0CE: u'CEI SYSTEMS & ENGINEERING PTE',
0x00C0CF: u'IMATRAN VOIMA OY',
0x00C0D0: u'RATOC SYSTEM INC.',
0x00C0D1: u'COMTREE TECHNOLOGY CORPORATION',
0x00C0D2: u'SYNTELLECT, INC.',
0x00C0D3: u'OLYMPUS IMAGE SYSTEMS, INC.',
0x00C0D4: u'AXON NETWORKS, INC.',
0x00C0D5: u'QUANCOM ELECTRONIC GMBH',
0x00C0D6: u'J1 SYSTEMS, INC.',
0x00C0D7: u'TAIWAN TRADING CENTER DBA',
0x00C0D8: u'UNIVERSAL DATA SYSTEMS',
0x00C0D9: u'QUINTE NETWORK CONFIDENTIALITY',
0x00C0DA: u'NICE SYSTEMS LTD.',
0x00C0DB: u'IPC CORPORATION (PTE) LTD.',
0x00C0DC: u'EOS TECHNOLOGIES, INC.',
0x00C0DD: u'QLogic Corporation',
0x00C0DE: u'ZCOMM, INC.',
0x00C0DF: u'KYE Systems Corp.',
0x00C0E0: u'DSC COMMUNICATION CORP.',
0x00C0E1: u'SONIC SOLUTIONS',
0x00C0E2: u'CALCOMP, INC.',
0x00C0E3: u'OSITECH COMMUNICATIONS, INC.',
0x00C0E4: u'SIEMENS BUILDING',
0x00C0E5: u'GESPAC, S.A.',
0x00C0E6: u'Verilink Corporation',
0x00C0E7: u'FIBERDATA AB',
0x00C0E8: u'PLEXCOM, INC.',
0x00C0E9: u'OAK SOLUTIONS, LTD.',
0x00C0EA: u'ARRAY TECHNOLOGY LTD.',
0x00C0EB: u'SEH COMPUTERTECHNIK GMBH',
0x00C0EC: u'DAUPHIN TECHNOLOGY',
0x00C0ED: u'US ARMY ELECTRONIC',
0x00C0EE: u'KYOCERA CORPORATION',
0x00C0EF: u'ABIT CORPORATION',
0x00C0F0: u'KINGSTON TECHNOLOGY CORP.',
0x00C0F1: u'SHINKO ELECTRIC CO., LTD.',
0x00C0F2: u'TRANSITION NETWORKS',
0x00C0F3: u'NETWORK COMMUNICATIONS CORP.',
0x00C0F4: u'INTERLINK SYSTEM CO., LTD.',
0x00C0F5: u'METACOMP, INC.',
0x00C0F6: u'CELAN TECHNOLOGY INC.',
0x00C0F7: u'ENGAGE COMMUNICATION, INC.',
0x00C0F8: u'ABOUT COMPUTING INC.',
0x00C0F9: u'Motorola Embedded Computing Group',
0x00C0FA: u'CANARY COMMUNICATIONS, INC.',
0x00C0FB: u'ADVANCED TECHNOLOGY LABS',
0x00C0FC: u'ELASTIC REALITY, INC.',
0x00C0FD: u'PROSUM',
0x00C0FE: u'APTEC COMPUTER SYSTEMS, INC.',
0x00C0FF: u'DOT HILL SYSTEMS CORPORATION',
0x00CBBD: u'Cambridge Broadband Ltd.',
0x00CF1C: u'COMMUNICATION MACHINERY CORP.',
0x00D000: u'FERRAN SCIENTIFIC, INC.',
0x00D001: u'VST TECHNOLOGIES, INC.',
0x00D002: u'DITECH CORPORATION',
0x00D003: u'COMDA ENTERPRISES CORP.',
0x00D004: u'PENTACOM LTD.',
0x00D005: u'ZHS ZEITMANAGEMENTSYSTEME',
0x00D006: u'CISCO SYSTEMS, INC.',
0x00D007: u'MIC ASSOCIATES, INC.',
0x00D008: u'MACTELL CORPORATION',
0x00D009: u'HSING TECH. ENTERPRISE CO. LTD',
0x00D00A: u'LANACCESS TELECOM S.A.',
0x00D00B: u'RHK TECHNOLOGY, INC.',
0x00D00C: u'SNIJDER MICRO SYSTEMS',
0x00D00D: u'MICROMERITICS INSTRUMENT',
0x00D00E: u'PLURIS, INC.',
0x00D00F: u'SPEECH DESIGN GMBH',
0x00D010: u'CONVERGENT NETWORKS, INC.',
0x00D011: u'PRISM VIDEO, INC.',
0x00D012: u'GATEWORKS CORP.',
0x00D013: u'PRIMEX AEROSPACE COMPANY',
0x00D014: u'ROOT, INC.',
0x00D015: u'UNIVEX MICROTECHNOLOGY CORP.',
0x00D016: u'SCM MICROSYSTEMS, INC.',
0x00D017: u'SYNTECH INFORMATION CO., LTD.',
0x00D018: u'QWES. COM, INC.',
0x00D019: u'DAINIPPON SCREEN CORPORATE',
0x00D01A: u'URMET TLC S.P.A.',
0x00D01B: u'MIMAKI ENGINEERING CO., LTD.',
0x00D01C: u'SBS TECHNOLOGIES,',
0x00D01D: u'FURUNO ELECTRIC CO., LTD.',
0x00D01E: u'PINGTEL CORP.',
0x00D01F: u'CTAM PTY. LTD.',
0x00D020: u'AIM SYSTEM, INC.',
0x00D021: u'REGENT ELECTRONICS CORP.',
0x00D022: u'INCREDIBLE TECHNOLOGIES, INC.',
0x00D023: u'INFORTREND TECHNOLOGY, INC.',
0x00D024: u'Cognex Corporation',
0x00D025: u'XROSSTECH, INC.',
0x00D026: u'HIRSCHMANN AUSTRIA GMBH',
0x00D027: u'APPLIED AUTOMATION, INC.',
0x00D028: u'OMNEON VIDEO NETWORKS',
0x00D029: u'WAKEFERN FOOD CORPORATION',
0x00D02A: u'Voxent Systems Ltd.',
0x00D02B: u'JETCELL, INC.',
0x00D02C: u'CAMPBELL SCIENTIFIC, INC.',
0x00D02D: u'ADEMCO',
0x00D02E: u'COMMUNICATION AUTOMATION CORP.',
0x00D02F: u'VLSI TECHNOLOGY INC.',
0x00D030: u'SAFETRAN SYSTEMS CORP.',
0x00D031: u'INDUSTRIAL LOGIC CORPORATION',
0x00D032: u'YANO ELECTRIC CO., LTD.',
0x00D033: u'DALIAN DAXIAN NETWORK',
0x00D034: u'ORMEC SYSTEMS CORP.',
0x00D035: u'BEHAVIOR TECH. COMPUTER CORP.',
0x00D036: u'TECHNOLOGY ATLANTA CORP.',
0x00D037: u'PHILIPS-DVS-LO BDR',
0x00D038: u'FIVEMERE, LTD.',
0x00D039: u'UTILICOM, INC.',
0x00D03A: u'ZONEWORX, INC.',
0x00D03B: u'VISION PRODUCTS PTY. LTD.',
0x00D03C: u'Vieo, Inc.',
0x00D03D: u'GALILEO TECHNOLOGY, LTD.',
0x00D03E: u'ROCKETCHIPS, INC.',
0x00D03F: u'AMERICAN COMMUNICATION',
0x00D040: u'SYSMATE CO., LTD.',
0x00D041: u'AMIGO TECHNOLOGY CO., LTD.',
0x00D042: u'MAHLO GMBH & CO. UG',
0x00D043: u'ZONAL RETAIL DATA SYSTEMS',
0x00D044: u'ALIDIAN NETWORKS, INC.',
0x00D045: u'KVASER AB',
0x00D046: u'DOLBY LABORATORIES, INC.',
0x00D047: u'XN TECHNOLOGIES',
0x00D048: u'ECTON, INC.',
0x00D049: u'IMPRESSTEK CO., LTD.',
0x00D04A: u'PRESENCE TECHNOLOGY GMBH',
0x00D04B: u'LA CIE GROUP S.A.',
0x00D04C: u'EUROTEL TELECOM LTD.',
0x00D04D: u'DIV OF RESEARCH & STATISTICS',
0x00D04E: u'LOGIBAG',
0x00D04F: u'BITRONICS, INC.',
0x00D050: u'ISKRATEL',
0x00D051: u'O2 MICRO, INC.',
0x00D052: u'ASCEND COMMUNICATIONS, INC.',
0x00D053: u'CONNECTED SYSTEMS',
0x00D054: u'SAS INSTITUTE INC.',
0x00D055: u'KATHREIN-WERKE KG',
0x00D056: u'SOMAT CORPORATION',
0x00D057: u'ULTRAK, INC.',
0x00D058: u'CISCO SYSTEMS, INC.',
0x00D059: u'AMBIT MICROSYSTEMS CORP.',
0x00D05A: u'SYMBIONICS, LTD.',
0x00D05B: u'ACROLOOP MOTION CONTROL',
0x00D05C: u'TECHNOTREND SYSTEMTECHNIK GMBH',
0x00D05D: u'INTELLIWORXX, INC.',
0x00D05E: u'STRATABEAM TECHNOLOGY, INC.',
0x00D05F: u'VALCOM, INC.',
0x00D060: u'PANASONIC EUROPEAN',
0x00D061: u'TREMON ENTERPRISES CO., LTD.',
0x00D062: u'DIGIGRAM',
0x00D063: u'CISCO SYSTEMS, INC.',
0x00D064: u'MULTITEL',
0x00D065: u'TOKO ELECTRIC',
0x00D066: u'WINTRISS ENGINEERING CORP.',
0x00D067: u'CAMPIO COMMUNICATIONS',
0x00D068: u'IWILL CORPORATION',
0x00D069: u'TECHNOLOGIC SYSTEMS',
0x00D06A: u'LINKUP SYSTEMS CORPORATION',
0x00D06B: u'SR TELECOM INC.',
0x00D06C: u'SHAREWAVE, INC.',
0x00D06D: u'ACRISON, INC.',
0x00D06E: u'TRENDVIEW RECORDERS LTD.',
0x00D06F: u'KMC CONTROLS',
0x00D070: u'LONG WELL ELECTRONICS CORP.',
0x00D071: u'ECHELON CORP.',
0x00D072: u'BROADLOGIC',
0x00D073: u'ACN ADVANCED COMMUNICATIONS',
0x00D074: u'TAQUA SYSTEMS, INC.',
0x00D075: u'ALARIS MEDICAL SYSTEMS, INC.',
0x00D076: u'Merrill Lynch & Co., Inc.',
0x00D077: u'LUCENT TECHNOLOGIES',
0x00D078: u'ELTEX OF SWEDEN AB',
0x00D079: u'CISCO SYSTEMS, INC.',
0x00D07A: u'AMAQUEST COMPUTER CORP.',
0x00D07B: u'COMCAM INTERNATIONAL LTD.',
0x00D07C: u'KOYO ELECTRONICS INC. CO.,LTD.',
0x00D07D: u'COSINE COMMUNICATIONS',
0x00D07E: u'KEYCORP LTD.',
0x00D07F: u'STRATEGY & TECHNOLOGY, LIMITED',
0x00D080: u'EXABYTE CORPORATION',
0x00D081: u'REAL TIME DEVICES USA, INC.',
0x00D082: u'IOWAVE INC.',
0x00D083: u'INVERTEX, INC.',
0x00D084: u'NEXCOMM SYSTEMS, INC.',
0x00D085: u'OTIS ELEVATOR COMPANY',
0x00D086: u'FOVEON, INC.',
0x00D087: u'MICROFIRST INC.',
0x00D088: u'Terayon Communications Systems',
0x00D089: u'DYNACOLOR, INC.',
0x00D08A: u'PHOTRON USA',
0x00D08B: u'ADVA Limited',
0x00D08C: u'GENOA TECHNOLOGY, INC.',
0x00D08D: u'PHOENIX GROUP, INC.',
0x00D08E: u'NVISION INC.',
0x00D08F: u'ARDENT TECHNOLOGIES, INC.',
0x00D090: u'CISCO SYSTEMS, INC.',
0x00D091: u'SMARTSAN SYSTEMS, INC.',
0x00D092: u'GLENAYRE WESTERN MULTIPLEX',
0x00D093: u'TQ - COMPONENTS GMBH',
0x00D094: u'TIMELINE VISTA, INC.',
0x00D095: u'Alcatel North America ESD',
0x00D096: u'3COM EUROPE LTD.',
0x00D097: u'CISCO SYSTEMS, INC.',
0x00D098: u'Photon Dynamics Canada Inc.',
0x00D099: u'ELCARD OY',
0x00D09A: u'FILANET CORPORATION',
0x00D09B: u'SPECTEL LTD.',
0x00D09C: u'KAPADIA COMMUNICATIONS',
0x00D09D: u'VERIS INDUSTRIES',
0x00D09E: u'2WIRE, INC.',
0x00D09F: u'NOVTEK TEST SYSTEMS',
0x00D0A0: u'MIPS DENMARK',
0x00D0A1: u'OSKAR VIERLING GMBH + CO. KG',
0x00D0A2: u'INTEGRATED DEVICE',
0x00D0A3: u'VOCAL DATA, INC.',
0x00D0A4: u'ALANTRO COMMUNICATIONS',
0x00D0A5: u'AMERICAN ARIUM',
0x00D0A6: u'LANBIRD TECHNOLOGY CO., LTD.',
0x00D0A7: u'TOKYO SOKKI KENKYUJO CO., LTD.',
0x00D0A8: u'NETWORK ENGINES, INC.',
0x00D0A9: u'SHINANO KENSHI CO., LTD.',
0x00D0AA: u'CHASE COMMUNICATIONS',
0x00D0AB: u'DELTAKABEL TELECOM CV',
0x00D0AC: u'GRAYSON WIRELESS',
0x00D0AD: u'TL INDUSTRIES',
0x00D0AE: u'ORESIS COMMUNICATIONS, INC.',
0x00D0AF: u'CUTLER-HAMMER, INC.',
0x00D0B0: u'BITSWITCH LTD.',
0x00D0B1: u'OMEGA ELECTRONICS SA',
0x00D0B2: u'XIOTECH CORPORATION',
0x00D0B3: u'DRS FLIGHT SAFETY AND',
0x00D0B4: u'KATSUJIMA CO., LTD.',
0x00D0B5: u'IPricot formerly DotCom',
0x00D0B6: u'CRESCENT NETWORKS, INC.',
0x00D0B7: u'INTEL CORPORATION',
0x00D0B8: u'Iomega Corporation',
0x00D0B9: u'MICROTEK INTERNATIONAL, INC.',
0x00D0BA: u'CISCO SYSTEMS, INC.',
0x00D0BB: u'CISCO SYSTEMS, INC.',
0x00D0BC: u'CISCO SYSTEMS, INC.',
0x00D0BD: u'SICAN GMBH',
0x00D0BE: u'EMUTEC INC.',
0x00D0BF: u'PIVOTAL TECHNOLOGIES',
0x00D0C0: u'CISCO SYSTEMS, INC.',
0x00D0C1: u'HARMONIC DATA SYSTEMS, LTD.',
0x00D0C2: u'BALTHAZAR TECHNOLOGY AB',
0x00D0C3: u'VIVID TECHNOLOGY PTE, LTD.',
0x00D0C4: u'TERATECH CORPORATION',
0x00D0C5: u'COMPUTATIONAL SYSTEMS, INC.',
0x00D0C6: u'THOMAS & BETTS CORP.',
0x00D0C7: u'PATHWAY, INC.',
0x00D0C8: u'I/O CONSULTING A/S',
0x00D0C9: u'ADVANTECH CO., LTD.',
0x00D0CA: u'INTRINSYC SOFTWARE INC.',
0x00D0CB: u'DASAN CO., LTD.',
0x00D0CC: u'TECHNOLOGIES LYRE INC.',
0x00D0CD: u'ATAN TECHNOLOGY INC.',
0x00D0CE: u'ASYST ELECTRONIC',
0x00D0CF: u'MORETON BAY',
0x00D0D0: u'ZHONGXING TELECOM LTD.',
0x00D0D1: u'SIROCCO SYSTEMS, INC.',
0x00D0D2: u'EPILOG CORPORATION',
0x00D0D3: u'CISCO SYSTEMS, INC.',
0x00D0D4: u'V-BITS, INC.',
0x00D0D5: u'GRUNDIG AG',
0x00D0D6: u'AETHRA TELECOMUNICAZIONI',
0x00D0D7: u'B2C2, INC.',
0x00D0D8: u'3Com Corporation',
0x00D0D9: u'DEDICATED MICROCOMPUTERS',
0x00D0DA: u'TAICOM DATA SYSTEMS CO., LTD.',
0x00D0DB: u'MCQUAY INTERNATIONAL',
0x00D0DC: u'MODULAR MINING SYSTEMS, INC.',
0x00D0DD: u'SUNRISE TELECOM, INC.',
0x00D0DE: u'PHILIPS MULTIMEDIA NETWORK',
0x00D0DF: u'KUZUMI ELECTRONICS, INC.',
0x00D0E0: u'DOOIN ELECTRONICS CO.',
0x00D0E1: u'AVIONITEK ISRAEL INC.',
0x00D0E2: u'MRT MICRO, INC.',
0x00D0E3: u'ELE-CHEM ENGINEERING CO., LTD.',
0x00D0E4: u'CISCO SYSTEMS, INC.',
0x00D0E5: u'SOLIDUM SYSTEMS CORP.',
0x00D0E6: u'IBOND INC.',
0x00D0E7: u'VCON TELECOMMUNICATION LTD.',
0x00D0E8: u'MAC SYSTEM CO., LTD.',
0x00D0E9: u'ADVANTAGE CENTURY',
0x00D0EA: u'NEXTONE COMMUNICATIONS, INC.',
0x00D0EB: u'LIGHTERA NETWORKS, INC.',
0x00D0EC: u'NAKAYO TELECOMMUNICATIONS, INC',
0x00D0ED: u'XIOX',
0x00D0EE: u'DICTAPHONE CORPORATION',
0x00D0EF: u'IGT',
0x00D0F0: u'CONVISION TECHNOLOGY GMBH',
0x00D0F1: u'SEGA ENTERPRISES, LTD.',
0x00D0F2: u'MONTEREY NETWORKS',
0x00D0F3: u'SOLARI DI UDINE SPA',
0x00D0F4: u'CARINTHIAN TECH INSTITUTE',
0x00D0F5: u'ORANGE MICRO, INC.',
0x00D0F6: u'Alcatel Canada',
0x00D0F7: u'NEXT NETS CORPORATION',
0x00D0F8: u'FUJIAN STAR TERMINAL',
0x00D0F9: u'ACUTE COMMUNICATIONS CORP.',
0x00D0FA: u'RACAL GUARDATA',
0x00D0FB: u'TEK MICROSYSTEMS, INCORPORATED',
0x00D0FC: u'GRANITE MICROSYSTEMS',
0x00D0FD: u'OPTIMA TELE.COM, INC.',
0x00D0FE: u'ASTRAL POINT',
0x00D0FF: u'CISCO SYSTEMS, INC.',
0x00DD00: u'UNGERMANN-BASS INC.',
0x00DD01: u'UNGERMANN-BASS INC.',
0x00DD02: u'UNGERMANN-BASS INC.',
0x00DD03: u'UNGERMANN-BASS INC.',
0x00DD04: u'UNGERMANN-BASS INC.',
0x00DD05: u'UNGERMANN-BASS INC.',
0x00DD06: u'UNGERMANN-BASS INC.',
0x00DD07: u'UNGERMANN-BASS INC.',
0x00DD08: u'UNGERMANN-BASS INC.',
0x00DD09: u'UNGERMANN-BASS INC.',
0x00DD0A: u'UNGERMANN-BASS INC.',
0x00DD0B: u'UNGERMANN-BASS INC.',
0x00DD0C: u'UNGERMANN-BASS INC.',
0x00DD0D: u'UNGERMANN-BASS INC.',
0x00DD0E: u'UNGERMANN-BASS INC.',
0x00DD0F: u'UNGERMANN-BASS INC.',
0x00E000: u'FUJITSU, LTD',
0x00E001: u'STRAND LIGHTING LIMITED',
0x00E002: u'CROSSROADS SYSTEMS, INC.',
0x00E003: u'NOKIA WIRELESS BUSINESS COMMUN',
0x00E004: u'PMC-SIERRA, INC.',
0x00E005: u'TECHNICAL CORP.',
0x00E006: u'SILICON INTEGRATED SYS. CORP.',
0x00E007: u'NETWORK ALCHEMY LTD.',
0x00E008: u'AMAZING CONTROLS! INC.',
0x00E009: u'MARATHON TECHNOLOGIES CORP.',
0x00E00A: u'DIBA, INC.',
0x00E00B: u'ROOFTOP COMMUNICATIONS CORP.',
0x00E00C: u'MOTOROLA',
0x00E00D: u'RADIANT SYSTEMS',
0x00E00E: u'AVALON IMAGING SYSTEMS, INC.',
0x00E00F: u'SHANGHAI BAUD DATA',
0x00E010: u'HESS SB-AUTOMATENBAU GmbH',
0x00E011: u'UNIDEN SAN DIEGO R&D CENTER, INC.',
0x00E012: u'PLUTO TECHNOLOGIES INTERNATIONAL INC.',
0x00E013: u'EASTERN ELECTRONIC CO., LTD.',
0x00E014: u'CISCO SYSTEMS, INC.',
0x00E015: u'HEIWA CORPORATION',
0x00E016: u'RAPID CITY COMMUNICATIONS',
0x00E017: u'EXXACT GmbH',
0x00E018: u'ASUSTEK COMPUTER INC.',
0x00E019: u'ING. GIORDANO ELETTRONICA',
0x00E01A: u'COMTEC SYSTEMS. CO., LTD.',
0x00E01B: u'SPHERE COMMUNICATIONS, INC.',
0x00E01C: u'MOBILITY ELECTRONICSY',
0x00E01D: u'WebTV NETWORKS, INC.',
0x00E01E: u'CISCO SYSTEMS, INC.',
0x00E01F: u'AVIDIA Systems, Inc.',
0x00E020: u'TECNOMEN OY',
0x00E021: u'FREEGATE CORP.',
0x00E022: u'Analog Devices Inc.',
0x00E023: u'TELRAD',
0x00E024: u'GADZOOX NETWORKS',
0x00E025: u'dit CO., LTD.',
0x00E026: u'Redlake MASD LLC',
0x00E027: u'DUX, INC.',
0x00E028: u'APTIX CORPORATION',
0x00E029: u'STANDARD MICROSYSTEMS CORP.',
0x00E02A: u'TANDBERG TELEVISION AS',
0x00E02B: u'EXTREME NETWORKS',
0x00E02C: u'AST COMPUTER',
0x00E02D: u'InnoMediaLogic, Inc.',
0x00E02E: u'SPC ELECTRONICS CORPORATION',
0x00E02F: u'MCNS HOLDINGS, L.P.',
0x00E030: u'MELITA INTERNATIONAL CORP.',
0x00E031: u'HAGIWARA ELECTRIC CO., LTD.',
0x00E032: u'MISYS FINANCIAL SYSTEMS, LTD.',
0x00E033: u'E.E.P.D. GmbH',
0x00E034: u'CISCO SYSTEMS, INC.',
0x00E035: u'LOUGHBOROUGH SOUND IMAGES, PLC',
0x00E036: u'PIONEER CORPORATION',
0x00E037: u'CENTURY CORPORATION',
0x00E038: u'PROXIMA CORPORATION',
0x00E039: u'PARADYNE CORP.',
0x00E03A: u'CABLETRON SYSTEMS, INC.',
0x00E03B: u'PROMINET CORPORATION',
0x00E03C: u'AdvanSys',
0x00E03D: u'FOCON ELECTRONIC SYSTEMS A/S',
0x00E03E: u'ALFATECH, INC.',
0x00E03F: u'JATON CORPORATION',
0x00E040: u'DeskStation Technology, Inc.',
0x00E041: u'CSPI',
0x00E042: u'Pacom Systems Ltd.',
0x00E043: u'VitalCom',
0x00E044: u'LSICS CORPORATION',
0x00E045: u'TOUCHWAVE, INC.',
0x00E046: u'BENTLY NEVADA CORP.',
0x00E047: u'INFOCUS SYSTEMS',
0x00E048: u'SDL COMMUNICATIONS, INC.',
0x00E049: u'MICROWI ELECTRONIC GmbH',
0x00E04A: u'ENHANCED MESSAGING SYSTEMS, INC',
0x00E04B: u'JUMP INDUSTRIELLE COMPUTERTECHNIK GmbH',
0x00E04C: u'REALTEK SEMICONDUCTOR CORP.',
0x00E04D: u'INTERNET INITIATIVE JAPAN, INC',
0x00E04E: u'SANYO DENKI CO., LTD.',
0x00E04F: u'CISCO SYSTEMS, INC.',
0x00E050: u'EXECUTONE INFORMATION SYSTEMS, INC.',
0x00E051: u'TALX CORPORATION',
0x00E052: u'FOUNDRY NETWORKS, INC.',
0x00E053: u'CELLPORT LABS, INC.',
0x00E054: u'KODAI HITEC CO., LTD.',
0x00E055: u'INGENIERIA ELECTRONICA COMERCIAL INELCOM S.A.',
0x00E056: u'HOLONTECH CORPORATION',
0x00E057: u'HAN MICROTELECOM. CO., LTD.',
0x00E058: u'PHASE ONE DENMARK A/S',
0x00E059: u'CONTROLLED ENVIRONMENTS, LTD.',
0x00E05A: u'GALEA NETWORK SECURITY',
0x00E05B: u'WEST END SYSTEMS CORP.',
0x00E05C: u'MATSUSHITA KOTOBUKI ELECTRONICS INDUSTRIES, LTD.',
0x00E05D: u'UNITEC CO., LTD.',
0x00E05E: u'JAPAN AVIATION ELECTRONICS INDUSTRY, LTD.',
0x00E05F: u'e-Net, Inc.',
0x00E060: u'SHERWOOD',
0x00E061: u'EdgePoint Networks, Inc.',
0x00E062: u'HOST ENGINEERING',
0x00E063: u'CABLETRON - YAGO SYSTEMS, INC.',
0x00E064: u'SAMSUNG ELECTRONICS',
0x00E065: u'OPTICAL ACCESS INTERNATIONAL',
0x00E066: u'ProMax Systems, Inc.',
0x00E067: u'eac AUTOMATION-CONSULTING GmbH',
0x00E068: u'MERRIMAC SYSTEMS INC.',
0x00E069: u'JAYCOR',
0x00E06A: u'KAPSCH AG',
0x00E06B: u'W&G SPECIAL PRODUCTS',
0x00E06C: u'AEP Systems International Ltd',
0x00E06D: u'COMPUWARE CORPORATION',
0x00E06E: u'FAR SYSTEMS S.p.A.',
0x00E06F: u'Terayon Communications Systems',
0x00E070: u'DH TECHNOLOGY',
0x00E071: u'EPIS MICROCOMPUTER',
0x00E072: u'LYNK',
0x00E073: u'NATIONAL AMUSEMENT NETWORK, INC.',
0x00E074: u'TIERNAN COMMUNICATIONS, INC.',
0x00E075: u'Verilink Corporation',
0x00E076: u'DEVELOPMENT CONCEPTS, INC.',
0x00E077: u'WEBGEAR, INC.',
0x00E078: u'BERKELEY NETWORKS',
0x00E079: u'A.T.N.R.',
0x00E07A: u'MIKRODIDAKT AB',
0x00E07B: u'BAY NETWORKS',
0x00E07C: u'METTLER-TOLEDO, INC.',
0x00E07D: u'NETRONIX, INC.',
0x00E07E: u'WALT DISNEY IMAGINEERING',
0x00E07F: u'LOGISTISTEM s.r.l.',
0x00E080: u'CONTROL RESOURCES CORPORATION',
0x00E081: u'TYAN COMPUTER CORP.',
0x00E082: u'ANERMA',
0x00E083: u'JATO TECHNOLOGIES, INC.',
0x00E084: u'COMPULITE R&D',
0x00E085: u'GLOBAL MAINTECH, INC.',
0x00E086: u'CYBEX COMPUTER PRODUCTS',
0x00E087: u'LeCroy - Networking Productions Division',
0x00E088: u'LTX CORPORATION',
0x00E089: u'ION Networks, Inc.',
0x00E08A: u'GEC AVERY, LTD.',
0x00E08B: u'QLogic Corp.',
0x00E08C: u'NEOPARADIGM LABS, INC.',
0x00E08D: u'PRESSURE SYSTEMS, INC.',
0x00E08E: u'UTSTARCOM',
0x00E08F: u'CISCO SYSTEMS, INC.',
0x00E090: u'BECKMAN LAB. AUTOMATION DIV.',
0x00E091: u'LG ELECTRONICS, INC.',
0x00E092: u'ADMTEK INCORPORATED',
0x00E093: u'ACKFIN NETWORKS',
0x00E094: u'OSAI SRL',
0x00E095: u'ADVANCED-VISION TECHNOLGIES CORP.',
0x00E096: u'SHIMADZU CORPORATION',
0x00E097: u'CARRIER ACCESS CORPORATION',
0x00E098: u'AboCom Systems, Inc.',
0x00E099: u'SAMSON AG',
0x00E09A: u'POSITRON INDUSTRIES, INC.',
0x00E09B: u'ENGAGE NETWORKS, INC.',
0x00E09C: u'MII',
0x00E09D: u'SARNOFF CORPORATION',
0x00E09E: u'QUANTUM CORPORATION',
0x00E09F: u'PIXEL VISION',
0x00E0A0: u'WILTRON CO.',
0x00E0A1: u'HIMA PAUL HILDEBRANDT GmbH Co. KG',
0x00E0A2: u'MICROSLATE INC.',
0x00E0A3: u'CISCO SYSTEMS, INC.',
0x00E0A4: u'ESAOTE S.p.A.',
0x00E0A5: u'ComCore Semiconductor, Inc.',
0x00E0A6: u'TELOGY NETWORKS, INC.',
0x00E0A7: u'IPC INFORMATION SYSTEMS, INC.',
0x00E0A8: u'SAT GmbH & Co.',
0x00E0A9: u'FUNAI ELECTRIC CO., LTD.',
0x00E0AA: u'ELECTROSONIC LTD.',
0x00E0AB: u'DIMAT S.A.',
0x00E0AC: u'MIDSCO, INC.',
0x00E0AD: u'EES TECHNOLOGY, LTD.',
0x00E0AE: u'XAQTI CORPORATION',
0x00E0AF: u'GENERAL DYNAMICS INFORMATION SYSTEMS',
0x00E0B0: u'CISCO SYSTEMS, INC.',
0x00E0B1: u'Alcatel North America ESD',
0x00E0B2: u'TELMAX COMMUNICATIONS CORP.',
0x00E0B3: u'EtherWAN Systems, Inc.',
0x00E0B4: u'TECHNO SCOPE CO., LTD.',
0x00E0B5: u'ARDENT COMMUNICATIONS CORP.',
0x00E0B6: u'Entrada Networks',
0x00E0B7: u'PI GROUP, LTD.',
0x00E0B8: u'GATEWAY 2000',
0x00E0B9: u'BYAS SYSTEMS',
0x00E0BA: u'BERGHOF AUTOMATIONSTECHNIK GmbH',
0x00E0BB: u'NBX CORPORATION',
0x00E0BC: u'SYMON COMMUNICATIONS, INC.',
0x00E0BD: u'INTERFACE SYSTEMS, INC.',
0x00E0BE: u'GENROCO INTERNATIONAL, INC.',
0x00E0BF: u'TORRENT NETWORKING TECHNOLOGIES CORP.',
0x00E0C0: u'SEIWA ELECTRIC MFG. CO., LTD.',
0x00E0C1: u'MEMOREX TELEX JAPAN, LTD.',
0x00E0C2: u'NECSY S.p.A.',
0x00E0C3: u'SAKAI SYSTEM DEVELOPMENT CORP.',
0x00E0C4: u'HORNER ELECTRIC, INC.',
0x00E0C5: u'BCOM ELECTRONICS INC.',
0x00E0C6: u'LINK2IT, L.L.C.',
0x00E0C7: u'EUROTECH SRL',
0x00E0C8: u'VIRTUAL ACCESS, LTD.',
0x00E0C9: u'AutomatedLogic Corporation',
0x00E0CA: u'BEST DATA PRODUCTS',
0x00E0CB: u'RESON, INC.',
0x00E0CC: u'HERO SYSTEMS, LTD.',
0x00E0CD: u'SENSIS CORPORATION',
0x00E0CE: u'ARN',
0x00E0CF: u'INTEGRATED DEVICE TECHNOLOGY, INC.',
0x00E0D0: u'NETSPEED, INC.',
0x00E0D1: u'TELSIS LIMITED',
0x00E0D2: u'VERSANET COMMUNICATIONS, INC.',
0x00E0D3: u'DATENTECHNIK GmbH',
0x00E0D4: u'EXCELLENT COMPUTER',
0x00E0D5: u'ARCXEL TECHNOLOGIES, INC.',
0x00E0D6: u'COMPUTER & COMMUNICATION RESEARCH LAB.',
0x00E0D7: u'SUNSHINE ELECTRONICS, INC.',
0x00E0D8: u'LANBit Computer, Inc.',
0x00E0D9: u'TAZMO CO., LTD.',
0x00E0DA: u'Alcatel North America ESD',
0x00E0DB: u'ViaVideo Communications, Inc.',
0x00E0DC: u'NEXWARE CORP.',
0x00E0DD: u'ZENITH ELECTRONICS CORPORATION',
0x00E0DE: u'DATAX NV',
0x00E0DF: u'KE KOMMUNIKATIONS-ELECTRONIK',
0x00E0E0: u'SI ELECTRONICS, LTD.',
0x00E0E1: u'G2 NETWORKS, INC.',
0x00E0E2: u'INNOVA CORP.',
0x00E0E3: u'SK-ELEKTRONIK GmbH',
0x00E0E4: u'FANUC ROBOTICS NORTH AMERICA, Inc.',
0x00E0E5: u'CINCO NETWORKS, INC.',
0x00E0E6: u'INCAA DATACOM B.V.',
0x00E0E7: u'RAYTHEON E-SYSTEMS, INC.',
0x00E0E8: u'GRETACODER Data Systems AG',
0x00E0E9: u'DATA LABS, INC.',
0x00E0EA: u'INNOVAT COMMUNICATIONS, INC.',
0x00E0EB: u'DIGICOM SYSTEMS, INCORPORATED',
0x00E0EC: u'CELESTICA INC.',
0x00E0ED: u'SILICOM, LTD.',
0x00E0EE: u'MAREL HF',
0x00E0EF: u'DIONEX',
0x00E0F0: u'ABLER TECHNOLOGY, INC.',
0x00E0F1: u'THAT CORPORATION',
0x00E0F2: u'ARLOTTO COMNET, INC.',
0x00E0F3: u'WebSprint Communications, Inc.',
0x00E0F4: u'INSIDE Technology A/S',
0x00E0F5: u'TELES AG',
0x00E0F6: u'DECISION EUROPE',
0x00E0F7: u'CISCO SYSTEMS, INC.',
0x00E0F8: u'DICNA CONTROL AB',
0x00E0F9: u'CISCO SYSTEMS, INC.',
0x00E0FA: u'TRL TECHNOLOGY, LTD.',
0x00E0FB: u'LEIGHTRONIX, INC.',
0x00E0FC: u'HUAWEI TECHNOLOGIES CO., LTD.',
0x00E0FD: u'A-TREND TECHNOLOGY CO., LTD.',
0x00E0FE: u'CISCO SYSTEMS, INC.',
0x00E0FF: u'SECURITY DYNAMICS TECHNOLOGIES, Inc.',
0x00E6D3: u'NIXDORF COMPUTER CORP.',
0x020701: u'RACAL-DATACOM',
0x021C7C: u'PERQ SYSTEMS CORPORATION',
0x026086: u'LOGIC REPLACEMENT TECH. LTD.',
0x02608C: u'3COM CORPORATION',
0x027001: u'RACAL-DATACOM',
0x0270B0: u'M/A-COM INC. COMPANIES',
0x0270B3: u'DATA RECALL LTD',
0x029D8E: u'CARDIAC RECORDERS INC.',
0x02AA3C: u'OLIVETTI TELECOMM SPA (OLTECO)',
0x02BB01: u'OCTOTHORPE CORP.',
0x02C08C: u'3COM CORPORATION',
0x02CF1C: u'COMMUNICATION MACHINERY CORP.',
0x02E6D3: u'NIXDORF COMPUTER CORPORATION',
0x040AE0: u'XMIT AG COMPUTER NETWORKS',
0x04E0C4: u'TRIUMPH-ADLER AG',
0x080001: u'COMPUTERVISION CORPORATION',
0x080002: u'BRIDGE COMMUNICATIONS INC.',
0x080003: u'ADVANCED COMPUTER COMM.',
0x080004: u'CROMEMCO INCORPORATED',
0x080005: u'SYMBOLICS INC.',
0x080006: u'SIEMENS AG',
0x080007: u'APPLE COMPUTER INC.',
0x080008: u'BOLT BERANEK AND NEWMAN INC.',
0x080009: u'HEWLETT PACKARD',
0x08000A: u'NESTAR SYSTEMS INCORPORATED',
0x08000B: u'UNISYS CORPORATION',
0x08000C: u'MIKLYN DEVELOPMENT CO.',
0x08000D: u'INTERNATIONAL COMPUTERS LTD.',
0x08000E: u'NCR CORPORATION',
0x08000F: u'MITEL CORPORATION',
0x080011: u'TEKTRONIX INC.',
0x080012: u'BELL ATLANTIC INTEGRATED SYST.',
0x080013: u'EXXON',
0x080014: u'EXCELAN',
0x080015: u'STC BUSINESS SYSTEMS',
0x080016: u'BARRISTER INFO SYS CORP',
0x080017: u'NATIONAL SEMICONDUCTOR',
0x080018: u'PIRELLI FOCOM NETWORKS',
0x080019: u'GENERAL ELECTRIC CORPORATION',
0x08001A: u'TIARA/ 10NET',
0x08001B: u'DATA GENERAL',
0x08001C: u'KDD-KOKUSAI DEBNSIN DENWA CO.',
0x08001D: u'ABLE COMMUNICATIONS INC.',
0x08001E: u'APOLLO COMPUTER INC.',
0x08001F: u'SHARP CORPORATION',
0x080020: u'SUN MICROSYSTEMS INC.',
0x080021: u'3M COMPANY',
0x080022: u'NBI INC.',
0x080023: u'Panasonic Communications Co., Ltd.',
0x080024: u'10NET COMMUNICATIONS/DCA',
0x080025: u'CONTROL DATA',
0x080026: u'NORSK DATA A.S.',
0x080027: u'CADMUS COMPUTER SYSTEMS',
0x080028: u'Texas Instruments',
0x080029: u'MEGATEK CORPORATION',
0x08002A: u'MOSAIC TECHNOLOGIES INC.',
0x08002B: u'DIGITAL EQUIPMENT CORPORATION',
0x08002C: u'BRITTON LEE INC.',
0x08002D: u'LAN-TEC INC.',
0x08002E: u'METAPHOR COMPUTER SYSTEMS',
0x08002F: u'PRIME COMPUTER INC.',
0x080030: u'NETWORK RESEARCH CORPORATION',
0x080030: u'CERN',
0x080030: u'ROYAL MELBOURNE INST OF TECH',
0x080031: u'LITTLE MACHINES INC.',
0x080032: u'TIGAN INCORPORATED',
0x080033: u'BAUSCH & LOMB',
0x080034: u'FILENET CORPORATION',
0x080035: u'MICROFIVE CORPORATION',
0x080036: u'INTERGRAPH CORPORATION',
0x080037: u'FUJI-XEROX CO. LTD.',
0x080038: u'CII HONEYWELL BULL',
0x080039: u'SPIDER SYSTEMS LIMITED',
0x08003A: u'ORCATECH INC.',
0x08003B: u'TORUS SYSTEMS LIMITED',
0x08003C: u'SCHLUMBERGER WELL SERVICES',
0x08003D: u'CADNETIX CORPORATIONS',
0x08003E: u'CODEX CORPORATION',
0x08003F: u'FRED KOSCHARA ENTERPRISES',
0x080040: u'FERRANTI COMPUTER SYS. LIMITED',
0x080041: u'RACAL-MILGO INFORMATION SYS..',
0x080042: u'JAPAN MACNICS CORP.',
0x080043: u'PIXEL COMPUTER INC.',
0x080044: u'DAVID SYSTEMS INC.',
0x080045: u'CONCURRENT COMPUTER CORP.',
0x080046: u'SONY CORPORATION LTD.',
0x080047: u'SEQUENT COMPUTER SYSTEMS INC.',
0x080048: u'EUROTHERM GAUGING SYSTEMS',
0x080049: u'UNIVATION',
0x08004A: u'BANYAN SYSTEMS INC.',
0x08004B: u'PLANNING RESEARCH CORP.',
0x08004C: u'HYDRA COMPUTER SYSTEMS INC.',
0x08004D: u'CORVUS SYSTEMS INC.',
0x08004E: u'3COM EUROPE LTD.',
0x08004F: u'CYGNET SYSTEMS',
0x080050: u'DAISY SYSTEMS CORP.',
0x080051: u'EXPERDATA',
0x080052: u'INSYSTEC',
0x080053: u'MIDDLE EAST TECH. UNIVERSITY',
0x080055: u'STANFORD TELECOMM. INC.',
0x080056: u'STANFORD LINEAR ACCEL. CENTER',
0x080057: u'EVANS & SUTHERLAND',
0x080058: u'SYSTEMS CONCEPTS',
0x080059: u'A/S MYCRON',
0x08005A: u'IBM CORPORATION',
0x08005B: u'VTA TECHNOLOGIES INC.',
0x08005C: u'FOUR PHASE SYSTEMS',
0x08005D: u'GOULD INC.',
0x08005E: u'COUNTERPOINT COMPUTER INC.',
0x08005F: u'SABER TECHNOLOGY CORP.',
0x080060: u'INDUSTRIAL NETWORKING INC.',
0x080061: u'JAROGATE LTD.',
0x080062: u'GENERAL DYNAMICS',
0x080063: u'PLESSEY',
0x080064: u'AUTOPHON AG',
0x080065: u'GENRAD INC.',
0x080066: u'AGFA CORPORATION',
0x080067: u'COMDESIGN',
0x080068: u'RIDGE COMPUTERS',
0x080069: u'SILICON GRAPHICS INC.',
0x08006A: u'ATT BELL LABORATORIES',
0x08006B: u'ACCEL TECHNOLOGIES INC.',
0x08006C: u'SUNTEK TECHNOLOGY INT\'L',
0x08006D: u'WHITECHAPEL COMPUTER WORKS',
0x08006E: u'MASSCOMP',
0x08006F: u'PHILIPS APELDOORN B.V.',
0x080070: u'MITSUBISHI ELECTRIC CORP.',
0x080071: u'MATRA (DSIE)',
0x080072: u'XEROX CORP UNIV GRANT PROGRAM',
0x080073: u'TECMAR INC.',
0x080074: u'CASIO COMPUTER CO. LTD.',
0x080075: u'DANSK DATA ELECTRONIK',
0x080076: u'PC LAN TECHNOLOGIES',
0x080077: u'TSL COMMUNICATIONS LTD.',
0x080078: u'ACCELL CORPORATION',
0x080079: u'THE DROID WORKS',
0x08007A: u'INDATA',
0x08007B: u'SANYO ELECTRIC CO. LTD.',
0x08007C: u'VITALINK COMMUNICATIONS CORP.',
0x08007E: u'AMALGAMATED WIRELESS(AUS) LTD',
0x08007F: u'CARNEGIE-MELLON UNIVERSITY',
0x080080: u'AES DATA INC.',
0x080081: u'ASTECH INC.',
0x080082: u'VERITAS SOFTWARE',
0x080083: u'Seiko Instruments Inc.',
0x080084: u'TOMEN ELECTRONICS CORP.',
0x080085: u'ELXSI',
0x080086: u'KONICA MINOLTA HOLDINGS, INC.',
0x080087: u'XYPLEX',
0x080088: u'MCDATA CORPORATION',
0x080089: u'KINETICS',
0x08008A: u'PERFORMANCE TECHNOLOGY',
0x08008B: u'PYRAMID TECHNOLOGY CORP.',
0x08008C: u'NETWORK RESEARCH CORPORATION',
0x08008D: u'XYVISION INC.',
0x08008E: u'TANDEM COMPUTERS',
0x08008F: u'CHIPCOM CORPORATION',
0x080090: u'SONOMA SYSTEMS',
0x081443: u'UNIBRAIN S.A.',
0x08BBCC: u'AK-NORD EDV VERTRIEBSGES. mbH',
0x100000: u'PRIVATE',
0x10005A: u'IBM CORPORATION',
0x1000E8: u'NATIONAL SEMICONDUCTOR',
0x1100AA: u'PRIVATE',
0x800010: u'ATT BELL LABORATORIES',
0xA06A00: u'Verilink Corporation',
0xAA0000: u'DIGITAL EQUIPMENT CORPORATION',
0xAA0001: u'DIGITAL EQUIPMENT CORPORATION',
0xAA0002: u'DIGITAL EQUIPMENT CORPORATION',
0xAA0003: u'DIGITAL EQUIPMENT CORPORATION',
0xAA0004: u'DIGITAL EQUIPMENT CORPORATION',
0xACDE48: u'PRIVATE',
}
| gpl-3.0 |
hugs/django | tests/regressiontests/templates/unicode.py | 33 | 1131 | # -*- coding: utf-8 -*-
unicode_tests = ur"""
Templates can be created from unicode strings.
>>> from django.template import *
>>> from django.utils.safestring import SafeData
>>> t1 = Template(u'ŠĐĆŽćžšđ {{ var }}')
Templates can also be created from bytestrings. These are assumed by encoded
using UTF-8.
>>> s = '\xc5\xa0\xc4\x90\xc4\x86\xc5\xbd\xc4\x87\xc5\xbe\xc5\xa1\xc4\x91 {{ var }}'
>>> t2 = Template(s)
>>> s = '\x80\xc5\xc0'
>>> Template(s)
Traceback (most recent call last):
...
TemplateEncodingError: Templates can only be constructed from unicode or UTF-8 strings.
Contexts can be constructed from unicode or UTF-8 bytestrings.
>>> c1 = Context({'var': 'foo'})
>>> c2 = Context({u'var': 'foo'})
>>> c3 = Context({'var': u'Đđ'})
>>> c4 = Context({u'var': '\xc4\x90\xc4\x91'})
Since both templates and all four contexts represent the same thing, they all
render the same (and are returned as unicode objects and "safe" objects as
well, for auto-escaping purposes).
>>> t1.render(c3) == t2.render(c3)
True
>>> isinstance(t1.render(c3), unicode)
True
>>> isinstance(t1.render(c3), SafeData)
True
"""
| bsd-3-clause |
asdil12/pywikibase | things.py | 1 | 4025 | #!/usr/bin/python2
import re
class BaseValue(object):
def __init__(self, value):
self.value = value
def __str__(self):
return value
def __repr__(self):
return "<%s object: %s>" % (self.__class__.__name__, self.__str__())
def to_value(self):
return self.__str__()
# Note:
# to_value: generates value as expected by set_claim (py obj)
# from_value: expects datavalue.value as provided by get_claims (py obj)
class Property(BaseValue):
def __init__(self, id):
if isinstance(id, str):
self.id = int(id.upper().replace("P", ""))
else:
self.id = id
def __str__(self):
return "P%i" % self.id
def to_value(self):
return {"entity-type": "property", "numeric-id": self.id}
@classmethod
def from_value(cls, value):
assert value["entity-type"] == "property"
return cls(value["numeric-id"])
class Item(BaseValue):
def __init__(self, id):
if isinstance(id, str):
self.id = int(id.upper().replace("Q", ""))
else:
self.id = id
def __str__(self):
return "Q%i" % self.id
def to_value(self):
return {"entity-type": "item", "numeric-id": self.id}
@classmethod
def from_value(cls, value):
# ok this is ugly...
if value["entity-type"] == "property":
return Property.from_value(value)
assert value["entity-type"] == "item"
return cls(value["numeric-id"])
class String(BaseValue):
def __str__(self):
return self.value
def to_value(self):
return self.value
@classmethod
def from_value(cls, value):
return cls(value)
class Time(BaseValue):
# wikibase uses a datetime format based on ISO8601
# eg: +00000002013-01-01T00:00:00Z
iso8601_re = re.compile(r"(?P<ysign>[\+\-])(?P<year>\d+)-(?P<month>\d+)-(?P<day>\d+)T(?P<hour>\d+):(?P<minute>\d+):(?P<second>\d+)Z")
def __init__(self, time, timezone=0, before=0, after=0, precision=11, calendarmodel="http://www.wikidata.org/entity/Q1985727"):
self.time = time
self.timezone = timezone
self.before = before
self.after = after
self.precision = precision # FIXME: allow string input
self.calendarmodel = calendarmodel
def __str__(self):
return self.to_value()["time"]
def to_value(self):
ysign = '+' if self.time["year"] >= 0 else '-'
value_out = {
"time": ysign + "%(year)011i-%(month)02i-%(day)02iT%(hour)02i:%(minute)02i:%(second)02iZ" % self.time,
"timezone": self.timezone,
"before": self.before,
"after": self.after,
"precision": self.precision,
"calendarmodel": self.calendarmodel,
}
return value_out
@classmethod
def from_value(cls, value):
#FIXME: catch error exception when match is empty - raise proper error
time_raw = Time.iso8601_re.match(value["time"]).groupdict()
value_in = {
"time": {
"year": int("%(ysign)s%(year)s" % time_raw),
"month": int(time_raw["month"]),
"day": int(time_raw["day"]),
"hour": int(time_raw["hour"]),
"minute": int(time_raw["minute"]),
"second": int(time_raw["second"]),
},
"timezone": value["timezone"],
"before": value["before"],
"after": value["after"],
"precision": value["precision"],
"calendarmodel": value["calendarmodel"],
}
return cls(**value_in)
class GlobeCoordinate(BaseValue):
def __init__(self, latitude, longitude, precision=0.000001, globe="http://www.wikidata.org/entity/Q2"):
self.latitude = latitude
self.longitude = longitude
self.precision = precision # in degrees (or fractions of)
self.globe = globe
def __str__(self):
return "%f, %f" % (self.latitude, self.longitude)
def to_value(self):
value_out = {
"latitude": self.latitude,
"longitude": self.longitude,
"precision": self.precision,
"globe": self.globe,
}
return value_out
@classmethod
def from_value(cls, value):
try:
del value['altitude']
except KeyError:
pass
return cls(**value)
# datavalue.type -> type class
types = {
"wikibase-entityid": Item, # or Property
"string": String,
"time": Time,
"globecoordinate": GlobeCoordinate,
}
def thing_from_datavalue(datavalue):
return types[datavalue["type"]].from_value(datavalue["value"])
| gpl-3.0 |
shtrom/gtg | GTG/core/config.py | 3 | 7452 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Getting Things GNOME! - a personal organizer for the GNOME desktop
# Copyright (c) 2008-2015 - Lionel Dricot & Bertrand Rousseau
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
"""
Classes responsible for handling user configuration
"""
import configparser
import os
import re
from GTG.core.dirs import CONFIG_DIR
from GTG.tools.logger import Log
DEFAULTS = {
# TODO: Remove toolbar and quick_add options from configuration
# They won't be used in GTG 0.4
'browser': {
"bg_color_enable": True,
"contents_preview_enable": False,
'tag_pane': False,
"sidebar_width": 120,
"closed_task_pane": False,
'bottom_pane_position': 300,
'toolbar': True,
'quick_add': True,
'collapsed_tasks': [],
'expanded_tags': [],
'view': 'default',
"opened_tasks": [],
'width': 400,
'height': 400,
'max': False,
'x_pos': 10,
'y_pos': 10,
'tasklist_sort_column': 5,
'tasklist_sort_order': 1,
"font_name": "",
'hour': "00",
'min': "00",
},
'tag_editor': {
"custom_colors": [],
},
'plugins': {
"enabled": [],
"disabled": [],
},
'task': {
'position': [],
'size': [],
},
}
def open_config_file(config_file):
""" Opens config file and makes additional checks
Creates config file if it doesn't exist and makes sure it is readable and
writable by user. That prevents surprise when user is not able to save
configuration when exiting the app.
"""
dirname = os.path.dirname(config_file)
if not os.path.exists(dirname):
os.makedirs(dirname)
if not os.path.exists(config_file):
open(config_file, "w").close()
if not os.access(config_file, os.R_OK | os.W_OK):
raise Exception("File " + config_file + " is a configuration file "
"for gtg, but it cannot be read or written. "
"Please check it")
config = configparser.ConfigParser()
try:
config.read(config_file)
except configparser.Error as e:
Log.warning("Problem with opening file %s: %s", config_file, e)
return config
class SectionConfig(object):
""" Configuration only for a section (system or a task) """
def __init__(self, section_name, section, defaults, save_function):
""" Initiatizes section config:
- section_name: name for writing error logs
- section: section of the config handled by this object
- defaults: dictionary of default values
- save_function: function to be called to save changes (this function
needs to save the whole config)
"""
self._section_name = section_name
self._section = section
self._defaults = defaults
self._save_function = save_function
def _getlist(self, option):
""" Parses string representation of list from configuration
List can't contain an empty value as those are skipped over,
e.g. "a, ,b" is parsed as ['a', 'b'].
Accepted formats:
- "('a', 'b'),('c','d','e')" => ["('a', 'b')", "('c','d','e')"]
- "a, b" => ['a', 'b']
"""
raw = self._section.get(option)
if not raw:
return None
# Match tuples in format "('string1', 'string2', ...)"
values = re.findall(r'\(.*?\)', raw)
if not values:
# It only normal list
values = raw.split(',')
return [item.strip() for item in values if item]
def _type_function(self, default_value):
""" Returns function that returns correct type of value """
default_type = type(default_value)
if default_type in (list, tuple):
return self._getlist
elif default_type == int:
return self._section.getint
elif default_type == bool:
return self._section.getboolean
else:
return self._section.get
def get(self, option):
""" Get option from configuration.
If the option is not specified in the configuration or is of invalid
type, return default value. If there is no default value,
None is returned
"""
default_value = self._defaults.get(option)
if default_value is None:
Log.warning(
'No default value for %s in %s', option, self._section_name)
get_function = self._type_function(default_value)
try:
value = get_function(option)
except ValueError as e:
value = None
Log.warning(
'Invalid configuration value "%s" for %s in %s: %s',
self._section.get(option), option, self._section_name, e)
if value is None and default_value is None:
raise ValueError(
'No valid configuration value or default value was '
'found for %s in %s'.format(option, self._section_name))
elif value is None:
return default_value
else:
return value
def set(self, option, value):
if type(value) in (list, tuple):
value = ','.join(str(item) for item in value)
else:
value = str(value)
self._section[option] = value
# Immediately save the configuration
self.save()
def save(self):
self._save_function()
class CoreConfig(object):
""" Class holding configuration to all systems and tasks """
def __init__(self):
self._conf_path = os.path.join(CONFIG_DIR, 'gtg.conf')
self._conf = open_config_file(self._conf_path)
self._task_conf_path = os.path.join(CONFIG_DIR, 'tasks.conf')
self._task_conf = open_config_file(self._task_conf_path)
def save_gtg_config(self):
self._conf.write(open(self._conf_path, 'w'))
def save_task_config(self):
self._task_conf.write(open(self._task_conf_path, 'w'))
def get_subconfig(self, name):
""" Returns configuration object for special section of config """
if name not in self._conf:
self._conf.add_section(name)
defaults = DEFAULTS.get(name, dict())
return SectionConfig(
name, self._conf[name], defaults, self.save_gtg_config)
def get_task_config(self, task_id):
if task_id not in self._task_conf:
self._task_conf.add_section(task_id)
return SectionConfig(
'Task {}'.format(task_id),
self._task_conf[task_id],
DEFAULTS['task'],
self.save_task_config)
| gpl-3.0 |
mikekestemont/keras | keras/preprocessing/sequence.py | 76 | 4441 | from __future__ import absolute_import
# -*- coding: utf-8 -*-
import numpy as np
import random
from six.moves import range
def pad_sequences(sequences, maxlen=None, dtype='int32', padding='pre', truncating='pre', value=0.):
"""
Pad each sequence to the same length:
the length of the longuest sequence.
If maxlen is provided, any sequence longer
than maxlen is truncated to maxlen. Truncation happens off either the beginning (default) or
the end of the sequence.
Supports post-padding and pre-padding (default).
"""
lengths = [len(s) for s in sequences]
nb_samples = len(sequences)
if maxlen is None:
maxlen = np.max(lengths)
x = (np.ones((nb_samples, maxlen)) * value).astype(dtype)
for idx, s in enumerate(sequences):
if truncating == 'pre':
trunc = s[-maxlen:]
elif truncating == 'post':
trunc = s[:maxlen]
else:
raise ValueError("Truncating type '%s' not understood" % padding)
if padding == 'post':
x[idx, :len(trunc)] = trunc
elif padding == 'pre':
x[idx, -len(trunc):] = trunc
else:
raise ValueError("Padding type '%s' not understood" % padding)
return x
def make_sampling_table(size, sampling_factor=1e-5):
'''
This generates an array where the ith element
is the probability that a word of rank i would be sampled,
according to the sampling distribution used in word2vec.
The word2vec formula is:
p(word) = min(1, sqrt(word.frequency/sampling_factor) / (word.frequency/sampling_factor))
We assume that the word frequencies follow Zipf's law (s=1) to derive
a numerical approximation of frequency(rank):
frequency(rank) ~ 1/(rank * (log(rank) + gamma) + 1/2 - 1/(12*rank))
where gamma is the Euler-Mascheroni constant.
'''
gamma = 0.577
rank = np.array(list(range(size)))
rank[0] = 1
inv_fq = rank * (np.log(rank) + gamma) + 0.5 - 1./(12.*rank)
f = sampling_factor * inv_fq
return np.minimum(1., f / np.sqrt(f))
def skipgrams(sequence, vocabulary_size,
window_size=4, negative_samples=1., shuffle=True,
categorical=False, sampling_table=None):
'''
Take a sequence (list of indexes of words),
returns couples of [word_index, other_word index] and labels (1s or 0s),
where label = 1 if 'other_word' belongs to the context of 'word',
and label=0 if 'other_word' is ramdomly sampled
@param vocabulary_size: int. maximum possible word index + 1
@param window_size: int. actually half-window. The window of a word wi will be [i-window_size, i+window_size+1]
@param negative_samples: float >= 0. 0 for no negative (=random) samples. 1 for same number as positive samples. etc.
@param categorical: bool. if False, labels will be integers (eg. [0, 1, 1 .. ]),
if True labels will be categorical eg. [[1,0],[0,1],[0,1] .. ]
Note: by convention, index 0 in the vocabulary is a non-word and will be skipped.
'''
couples = []
labels = []
for i, wi in enumerate(sequence):
if not wi:
continue
if sampling_table is not None:
if sampling_table[wi] < random.random():
continue
window_start = max(0, i-window_size)
window_end = min(len(sequence), i+window_size+1)
for j in range(window_start, window_end):
if j != i:
wj = sequence[j]
if not wj:
continue
couples.append([wi, wj])
if categorical:
labels.append([0,1])
else:
labels.append(1)
if negative_samples > 0:
nb_negative_samples = int(len(labels) * negative_samples)
words = [c[0] for c in couples]
random.shuffle(words)
couples += [[words[i%len(words)], random.randint(1, vocabulary_size-1)] for i in range(nb_negative_samples)]
if categorical:
labels += [[1,0]]*nb_negative_samples
else:
labels += [0]*nb_negative_samples
if shuffle:
seed = random.randint(0,10e6)
random.seed(seed)
random.shuffle(couples)
random.seed(seed)
random.shuffle(labels)
return couples, labels
| mit |
adrgerez/ardublockly | package/build_pyinstaller.py | 1 | 9203 | #!/usr/bin/env python2
# -*- coding: utf-8 -*- #
#
# Builds the Ardublockly Python portion of the app for Linux or OS X.
#
# Copyright (c) 2015 carlosperate https://github.com/carlosperate/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# IMPORTANT: This script is designed to be located one directory level under the
# project root folder.
#
# This script file uses PyInstaller to create a self contained executable
# build of the Ardublockly application.
# It will remove the build folders left from PyInstaller and move the folder
# with the executable application into the project root folder.
#
# Due to all the debugging steps required to get a CI serve running properly
# this script ended up being quite verbose. In might be updated in the future
# to include a -v flag to select a verbose mode.
from __future__ import unicode_literals, print_function
import os
import sys
import shutil
import platform
import subprocess
from glob import glob
spec_coll_name = "server"
if platform.system() == "Darwin":
exec_folder = "arduexec.app"
else:
exec_folder = "arduexec"
py_exec_folder = os.path.join(exec_folder, "server")
script_tag = "[Ardublockly build] "
script_tab = " "
# The project_root_dir depends on the location of this file, so it cannot be
# moved without updating this line
project_root_dir = \
os.path.dirname( # going up 1 level
os.path.dirname(os.path.realpath(__file__))) # folder dir of this
# verbose_print = print if verbose else lambda *a, **k: None
def remove_directory(dir_to_remove):
""" Removes the a given directory. """
if os.path.exists(dir_to_remove):
print(script_tab + "Removing directory %s" % dir_to_remove)
shutil.rmtree(dir_to_remove)
else:
print(script_tab + "Directory %s was not found." % dir_to_remove)
def get_os():
"""
Gets the OS to based on the command line argument of the platform info.
Only possibilities are: "windows", "mac", "linux"
"""
valid_os = ["windows", "linux", "mac"]
print(script_tab + "Checking for command line argument indicated OS:")
if len(sys.argv) > 1:
if sys.argv[1] in valid_os:
# Take the first argument and use it as the os
print(script_tab + "Valid command line argument found: %s" %
sys.argv[1])
return "%s" % sys.argv[1]
else:
print(script_tab + "Invalid command line argument found: %s\n" %
sys.argv[1] + script_tab + "Options available: %s" % valid_os)
print(script_tab + "Valid command line arg not found, checking system.")
os_found = platform.system()
if os_found == "Windows":
raise SystemExit(script_tab + "OS found is: %s\n" % valid_os[0] +
"Exit: This script is not design to run on Windows.")
elif os_found == "Linux":
print(script_tab + "OS found is: %s" % valid_os[1])
return valid_os[1]
elif os_found == "Darwin":
print(script_tab + "OS found is: %s" % valid_os[2])
return valid_os[2]
else:
raise SystemExit("Exit: OS data found is invalid '%s'" % os_found)
def remove_pyinstaller_temps():
"""
Removes the temporary folders created by PyInstaller (dist and build).
"""
remove_directory(os.path.join(os.getcwd(), "dist"))
remove_directory(os.path.join(os.getcwd(), "build"))
def pyinstaller_build():
"""
Launches a subprocess running Python PyInstaller with the spec file from the
package folder. Captures the output streams and checks for errors.
:return: Boolean indicating the success state of the operation.
"""
process_args = [
"python",
"%s" % os.path.join("package", "pyinstaller", "pyinstaller.py"),
"%s" % os.path.join("package", "pyinstaller.spec")]
print(script_tab + "Command: %s" % process_args)
pyinstaller_process = subprocess.Popen(process_args)
std_op, std_err_op = pyinstaller_process.communicate()
if pyinstaller_process.returncode != 0:
print(script_tab + "ERROR: PyInstaller returned with exit code: %s" %
pyinstaller_process.returncode)
return False
return True
def move_executable_folder():
"""
Moves the PyInstaller executable folder from dist to project root.
:return: Boolean indicating the success state of the operation.
"""
original_exec_dir = os.path.join(project_root_dir, "dist", spec_coll_name)
final_exec_dir = os.path.join(project_root_dir, py_exec_folder)
if os.path.exists(original_exec_dir):
print(script_tab + "Moving exec files from %s \n" % original_exec_dir +
script_tab + "to %s" % final_exec_dir)
shutil.move(original_exec_dir, final_exec_dir)
else:
print(script_tab + "ERROR: PyInstaller executable output folder '%s' " %
original_exec_dir + "not found!")
return False
return True
def copy_data_files(os_type):
""" At the moment there are no additional data files required to copy """
pass
def create_shell_file(os_type):
"""
Creates a shell script fil into the project root to be able to easily launch
the Ardublockly application.
The Mac OS X build runs directly from clicking the .app folder, so it no
longer needs a shell script.
"""
shell_text = ""
shell_location = ""
# The script depends on platform
if os_type == "mac":
# There is no need for a shell file in Mac OS X
print(script_tab + "There is no need to create shell file in Mac OS X.")
return
elif os_type == "linux":
shell_text = '#!/bin/bash\n' \
'DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )\n' \
'echo "[Shell Launch Script] Executing from: $DIR"\n' \
'./%s' % os.path.join(exec_folder, "ardublockly")
shell_location = os.path.join(
project_root_dir, "ardublockly_run.sh")
else:
# No other OS expected, so just return. This should never happen
return
try:
print(script_tab + "Creating shell file into %s" % shell_location)
bash_file = open(shell_location, "w")
bash_file.write(shell_text)
bash_file.close()
except Exception as e:
print(script_tab + "%s" % e)
print(script_tab + "ERROR: Shell file to launch the Ardublockly "
"application could not be created.")
# Make shell script executable by launching a subprocess
process_args = ["chmod", "+x", "%s" % shell_location]
print(script_tab + "Command to make executable: %s" % process_args)
try:
pyinstaller_process = subprocess.Popen(process_args)
std_op, std_err_op = pyinstaller_process.communicate()
except Exception as e:
print(script_tab + "%s" % e)
print(script_tab + "ERROR: Could not make Shell file executable.")
def build_ardublockly():
print(script_tag + "Build procedure started.")
print(script_tag + "Checking for OS.")
os_type = get_os()
print(script_tag + "Building Ardublockly for %s." % os_type)
print(script_tag + "Project directory is: %s" % project_root_dir)
print(script_tag + "Script working directory: %s" % os.getcwd())
print(script_tag + "Removing PyInstaller old temp directories.")
remove_pyinstaller_temps()
print(script_tag + "Running PyInstaller process.")
success = pyinstaller_build()
if not success:
print(script_tab + "Removing PyInstaller recent temp directories.")
remove_pyinstaller_temps()
raise SystemExit(script_tab + "Exiting as there was an error in the "
"PyInstaller execution.")
print(script_tag + "Removing old ardublockly executable directory.")
remove_directory(os.path.join(project_root_dir, py_exec_folder))
print(script_tag + "Moving executable folder to project root.")
success = move_executable_folder()
if not success:
print(script_tab + "Removing PyInstaller recent temp directories.")
remove_pyinstaller_temps()
raise SystemExit(script_tab + "Exiting now as there was an error in "
"the PyInstaller execution.")
print(script_tag + "Coping data files into executable directory.")
copy_data_files(os_type)
print(script_tag + "Removing PyInstaller recent temp directories.")
remove_pyinstaller_temps()
print(script_tag + "Creating shell file to easily execute Ardublockly.")
create_shell_file(os_type)
if __name__ == "__main__":
build_ardublockly()
| apache-2.0 |
qewerty/moto.old | tools/scons/engine/SCons/Variables/PathVariable.py | 5 | 5658 | """SCons.Variables.PathVariable
This file defines an option type for SCons implementing path settings.
To be used whenever a a user-specified path override should be allowed.
Arguments to PathVariable are:
option-name = name of this option on the command line (e.g. "prefix")
option-help = help string for option
option-dflt = default value for this option
validator = [optional] validator for option value. Predefined
validators are:
PathAccept -- accepts any path setting; no validation
PathIsDir -- path must be an existing directory
PathIsDirCreate -- path must be a dir; will create
PathIsFile -- path must be a file
PathExists -- path must exist (any type) [default]
The validator is a function that is called and which
should return True or False to indicate if the path
is valid. The arguments to the validator function
are: (key, val, env). The key is the name of the
option, the val is the path specified for the option,
and the env is the env to which the Otions have been
added.
Usage example:
Examples:
prefix=/usr/local
opts = Variables()
opts = Variables()
opts.Add(PathVariable('qtdir',
'where the root of Qt is installed',
qtdir, PathIsDir))
opts.Add(PathVariable('qt_includes',
'where the Qt includes are installed',
'$qtdir/includes', PathIsDirCreate))
opts.Add(PathVariable('qt_libraries',
'where the Qt library is installed',
'$qtdir/lib'))
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Variables/PathVariable.py 4720 2010/03/24 03:14:11 jars"
__all__ = ['PathVariable',]
import os
import os.path
import SCons.Errors
class _PathVariableClass:
def PathAccept(self, key, val, env):
"""Accepts any path, no checking done."""
pass
def PathIsDir(self, key, val, env):
"""Validator to check if Path is a directory."""
if not os.path.isdir(val):
if os.path.isfile(val):
m = 'Directory path for option %s is a file: %s'
else:
m = 'Directory path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
def PathIsDirCreate(self, key, val, env):
"""Validator to check if Path is a directory,
creating it if it does not exist."""
if os.path.isfile(val):
m = 'Path for option %s is a file, not a directory: %s'
raise SCons.Errors.UserError(m % (key, val))
if not os.path.isdir(val):
os.makedirs(val)
def PathIsFile(self, key, val, env):
"""validator to check if Path is a file"""
if not os.path.isfile(val):
if os.path.isdir(val):
m = 'File path for option %s is a directory: %s'
else:
m = 'File path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
def PathExists(self, key, val, env):
"""validator to check if Path exists"""
if not os.path.exists(val):
m = 'Path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
def __call__(self, key, help, default, validator=None):
# NB: searchfunc is currenty undocumented and unsupported
"""
The input parameters describe a 'path list' option, thus they
are returned with the correct converter and validator appended. The
result is usable for input to opts.Add() .
The 'default' option specifies the default path to use if the
user does not specify an override with this option.
validator is a validator, see this file for examples
"""
if validator is None:
validator = self.PathExists
if SCons.Util.is_List(key) or SCons.Util.is_Tuple(key):
return (key, '%s ( /path/to/%s )' % (help, key[0]), default,
validator, None)
else:
return (key, '%s ( /path/to/%s )' % (help, key), default,
validator, None)
PathVariable = _PathVariableClass()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| gpl-2.0 |
tedelhourani/ansible | lib/ansible/modules/cloud/google/gcp_url_map.py | 99 | 17159 | #!/usr/bin/python
# Copyright 2017 Google Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_url_map
version_added: "2.4"
short_description: Create, Update or Destory a Url_Map.
description:
- Create, Update or Destory a Url_Map. See
U(https://cloud.google.com/compute/docs/load-balancing/http/url-map) for an overview.
More details on the Url_Map API can be found at
U(https://cloud.google.com/compute/docs/reference/latest/urlMaps#resource).
requirements:
- "python >= 2.6"
- "google-api-python-client >= 1.6.2"
- "google-auth >= 0.9.0"
- "google-auth-httplib2 >= 0.0.2"
notes:
- Only supports global Backend Services.
- Url_Map tests are not currently supported.
author:
- "Tom Melendez (@supertom) <tom@supertom.com>"
options:
url_map_name:
description:
- Name of the Url_Map.
required: true
default_service:
description:
- Default Backend Service if no host rules match.
required: true
host_rules:
description:
- The list of HostRules to use against the URL. Contains
a list of hosts and an associated path_matcher.
- The 'hosts' parameter is a list of host patterns to match. They
must be valid hostnames, except * will match any string of
([a-z0-9-.]*). In that case, * must be the first character
and must be followed in the pattern by either - or ..
- The 'path_matcher' parameter is name of the PathMatcher to use
to match the path portion of the URL if the hostRule matches the URL's
host portion.
required: false
path_matchers:
description:
- The list of named PathMatchers to use against the URL. Contains
path_rules, which is a list of paths and an associated service. A
default_service can also be specified for each path_matcher.
- The 'name' parameter to which this path_matcher is referred by the
host_rule.
- The 'default_service' parameter is the name of the
BackendService resource. This will be used if none of the path_rules
defined by this path_matcher is matched by the URL's path portion.
- The 'path_rules' parameter is a list of dictionaries containing a
list of paths and a service to direct traffic to. Each path item must
start with / and the only place a * is allowed is at the end following
a /. The string fed to the path matcher does not include any text after
the first ? or #, and those chars are not allowed here.
required: false
'''
EXAMPLES = '''
- name: Create Minimal Url_Map
gcp_url_map:
service_account_email: "{{ service_account_email }}"
credentials_file: "{{ credentials_file }}"
project_id: "{{ project_id }}"
url_map_name: my-url_map
default_service: my-backend-service
state: present
- name: Create UrlMap with pathmatcher
gcp_url_map:
service_account_email: "{{ service_account_email }}"
credentials_file: "{{ credentials_file }}"
project_id: "{{ project_id }}"
url_map_name: my-url-map-pm
default_service: default-backend-service
path_matchers:
- name: 'path-matcher-one'
description: 'path matcher one'
default_service: 'bes-pathmatcher-one-default'
path_rules:
- service: 'my-one-bes'
paths:
- '/data'
- '/aboutus'
host_rules:
- hosts:
- '*.'
path_matcher: 'path-matcher-one'
state: "present"
'''
RETURN = '''
host_rules:
description: List of HostRules.
returned: If specified.
type: dict
sample: [ { hosts: ["*."], "path_matcher": "my-pm" } ]
path_matchers:
description: The list of named PathMatchers to use against the URL.
returned: If specified.
type: dict
sample: [ { "name": "my-pm", "path_rules": [ { "paths": [ "/data" ] } ], "service": "my-service" } ]
state:
description: state of the Url_Map
returned: Always.
type: str
sample: present
updated_url_map:
description: True if the url_map has been updated. Will not appear on
initial url_map creation.
returned: if the url_map has been updated.
type: bool
sample: true
url_map_name:
description: Name of the Url_Map
returned: Always
type: str
sample: my-url-map
url_map:
description: GCP Url_Map dictionary
returned: Always. Refer to GCP documentation for detailed field descriptions.
type: dict
sample: { "name": "my-url-map", "hostRules": [...], "pathMatchers": [...] }
'''
try:
from ast import literal_eval
HAS_PYTHON26 = True
except ImportError:
HAS_PYTHON26 = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.gcp import check_params, get_google_api_client, GCPUtils
from ansible.module_utils.six import string_types
USER_AGENT_PRODUCT = 'ansible-url_map'
USER_AGENT_VERSION = '0.0.1'
def _validate_params(params):
"""
Validate url_map params.
This function calls _validate_host_rules_params to verify
the host_rules-specific parameters.
This function calls _validate_path_matchers_params to verify
the path_matchers-specific parameters.
:param params: Ansible dictionary containing configuration.
:type params: ``dict``
:return: True or raises ValueError
:rtype: ``bool`` or `class:ValueError`
"""
fields = [
{'name': 'default_service', 'type': str, 'required': True},
{'name': 'host_rules', 'type': list},
{'name': 'path_matchers', 'type': list},
]
try:
check_params(params, fields)
if 'path_matchers' in params and params['path_matchers'] is not None:
_validate_path_matcher_params(params['path_matchers'])
if 'host_rules' in params and params['host_rules'] is not None:
_validate_host_rules_params(params['host_rules'])
except:
raise
return (True, '')
def _validate_path_matcher_params(path_matchers):
"""
Validate configuration for path_matchers.
:param path_matchers: Ansible dictionary containing path_matchers
configuration (only).
:type path_matchers: ``dict``
:return: True or raises ValueError
:rtype: ``bool`` or `class:ValueError`
"""
fields = [
{'name': 'name', 'type': str, 'required': True},
{'name': 'default_service', 'type': str, 'required': True},
{'name': 'path_rules', 'type': list, 'required': True},
{'name': 'max_rate', 'type': int},
{'name': 'max_rate_per_instance', 'type': float},
]
pr_fields = [
{'name': 'service', 'type': str, 'required': True},
{'name': 'paths', 'type': list, 'required': True},
]
if not path_matchers:
raise ValueError(('path_matchers should be a list. %s (%s) provided'
% (path_matchers, type(path_matchers))))
for pm in path_matchers:
try:
check_params(pm, fields)
for pr in pm['path_rules']:
check_params(pr, pr_fields)
for path in pr['paths']:
if not path.startswith('/'):
raise ValueError("path for %s must start with /" % (
pm['name']))
except:
raise
return (True, '')
def _validate_host_rules_params(host_rules):
"""
Validate configuration for host_rules.
:param host_rules: Ansible dictionary containing host_rules
configuration (only).
:type host_rules ``dict``
:return: True or raises ValueError
:rtype: ``bool`` or `class:ValueError`
"""
fields = [
{'name': 'path_matcher', 'type': str, 'required': True},
]
if not host_rules:
raise ValueError('host_rules should be a list.')
for hr in host_rules:
try:
check_params(hr, fields)
for host in hr['hosts']:
if not isinstance(host, string_types):
raise ValueError("host in hostrules must be a string")
elif '*' in host:
if host.index('*') != 0:
raise ValueError("wildcard must be first char in host, %s" % (
host))
else:
if host[1] not in ['.', '-', ]:
raise ValueError("wildcard be followed by a '.' or '-', %s" % (
host))
except:
raise
return (True, '')
def _build_path_matchers(path_matcher_list, project_id):
"""
Reformat services in path matchers list.
Specifically, builds out URLs.
:param path_matcher_list: The GCP project ID.
:type path_matcher_list: ``list`` of ``dict``
:param project_id: The GCP project ID.
:type project_id: ``str``
:return: list suitable for submission to GCP
UrlMap API Path Matchers list.
:rtype ``list`` of ``dict``
"""
url = ''
if project_id:
url = GCPUtils.build_googleapi_url(project_id)
for pm in path_matcher_list:
if 'defaultService' in pm:
pm['defaultService'] = '%s/global/backendServices/%s' % (url,
pm['defaultService'])
if 'pathRules' in pm:
for rule in pm['pathRules']:
if 'service' in rule:
rule['service'] = '%s/global/backendServices/%s' % (url,
rule['service'])
return path_matcher_list
def _build_url_map_dict(params, project_id=None):
"""
Reformat services in Ansible Params.
:param params: Params from AnsibleModule object
:type params: ``dict``
:param project_id: The GCP project ID.
:type project_id: ``str``
:return: dictionary suitable for submission to GCP UrlMap API.
:rtype ``dict``
"""
url = ''
if project_id:
url = GCPUtils.build_googleapi_url(project_id)
gcp_dict = GCPUtils.params_to_gcp_dict(params, 'url_map_name')
if 'defaultService' in gcp_dict:
gcp_dict['defaultService'] = '%s/global/backendServices/%s' % (url,
gcp_dict['defaultService'])
if 'pathMatchers' in gcp_dict:
gcp_dict['pathMatchers'] = _build_path_matchers(gcp_dict['pathMatchers'], project_id)
return gcp_dict
def get_url_map(client, name, project_id=None):
"""
Get a Url_Map from GCP.
:param client: An initialized GCE Compute Disovery resource.
:type client: :class: `googleapiclient.discovery.Resource`
:param name: Name of the Url Map.
:type name: ``str``
:param project_id: The GCP project ID.
:type project_id: ``str``
:return: A dict resp from the respective GCP 'get' request.
:rtype: ``dict``
"""
try:
req = client.urlMaps().get(project=project_id, urlMap=name)
return GCPUtils.execute_api_client_req(req, raise_404=False)
except:
raise
def create_url_map(client, params, project_id):
"""
Create a new Url_Map.
:param client: An initialized GCE Compute Disovery resource.
:type client: :class: `googleapiclient.discovery.Resource`
:param params: Dictionary of arguments from AnsibleModule.
:type params: ``dict``
:return: Tuple with changed status and response dict
:rtype: ``tuple`` in the format of (bool, dict)
"""
gcp_dict = _build_url_map_dict(params, project_id)
try:
req = client.urlMaps().insert(project=project_id, body=gcp_dict)
return_data = GCPUtils.execute_api_client_req(req, client, raw=False)
if not return_data:
return_data = get_url_map(client,
name=params['url_map_name'],
project_id=project_id)
return (True, return_data)
except:
raise
def delete_url_map(client, name, project_id):
"""
Delete a Url_Map.
:param client: An initialized GCE Compute Disover resource.
:type client: :class: `googleapiclient.discovery.Resource`
:param name: Name of the Url Map.
:type name: ``str``
:param project_id: The GCP project ID.
:type project_id: ``str``
:return: Tuple with changed status and response dict
:rtype: ``tuple`` in the format of (bool, dict)
"""
try:
req = client.urlMaps().delete(project=project_id, urlMap=name)
return_data = GCPUtils.execute_api_client_req(req, client)
return (True, return_data)
except:
raise
def update_url_map(client, url_map, params, name, project_id):
"""
Update a Url_Map.
If the url_map has not changed, the update will not occur.
:param client: An initialized GCE Compute Disovery resource.
:type client: :class: `googleapiclient.discovery.Resource`
:param url_map: Name of the Url Map.
:type url_map: ``dict``
:param params: Dictionary of arguments from AnsibleModule.
:type params: ``dict``
:param name: Name of the Url Map.
:type name: ``str``
:param project_id: The GCP project ID.
:type project_id: ``str``
:return: Tuple with changed status and response dict
:rtype: ``tuple`` in the format of (bool, dict)
"""
gcp_dict = _build_url_map_dict(params, project_id)
ans = GCPUtils.are_params_equal(url_map, gcp_dict)
if ans:
return (False, 'no update necessary')
gcp_dict['fingerprint'] = url_map['fingerprint']
try:
req = client.urlMaps().update(project=project_id,
urlMap=name, body=gcp_dict)
return_data = GCPUtils.execute_api_client_req(req, client=client, raw=False)
return (True, return_data)
except:
raise
def main():
module = AnsibleModule(argument_spec=dict(
url_map_name=dict(required=True),
state=dict(choices=['absent', 'present'], default='present'),
default_service=dict(required=True),
path_matchers=dict(type='list', required=False),
host_rules=dict(type='list', required=False),
service_account_email=dict(),
service_account_permissions=dict(type='list'),
pem_file=dict(),
credentials_file=dict(),
project_id=dict(), ), required_together=[
['path_matchers', 'host_rules'], ])
client, conn_params = get_google_api_client(module, 'compute', user_agent_product=USER_AGENT_PRODUCT,
user_agent_version=USER_AGENT_VERSION)
params = {}
params['state'] = module.params.get('state')
params['url_map_name'] = module.params.get('url_map_name')
params['default_service'] = module.params.get('default_service')
if module.params.get('path_matchers'):
params['path_matchers'] = module.params.get('path_matchers')
if module.params.get('host_rules'):
params['host_rules'] = module.params.get('host_rules')
try:
_validate_params(params)
except Exception as e:
module.fail_json(msg=e.message, changed=False)
changed = False
json_output = {'state': params['state']}
url_map = get_url_map(client,
name=params['url_map_name'],
project_id=conn_params['project_id'])
if not url_map:
if params['state'] == 'absent':
# Doesn't exist in GCE, and state==absent.
changed = False
module.fail_json(
msg="Cannot delete unknown url_map: %s" %
(params['url_map_name']))
else:
# Create
changed, json_output['url_map'] = create_url_map(client,
params=params,
project_id=conn_params['project_id'])
elif params['state'] == 'absent':
# Delete
changed, json_output['url_map'] = delete_url_map(client,
name=params['url_map_name'],
project_id=conn_params['project_id'])
else:
changed, json_output['url_map'] = update_url_map(client,
url_map=url_map,
params=params,
name=params['url_map_name'],
project_id=conn_params['project_id'])
json_output['updated_url_map'] = changed
json_output['changed'] = changed
json_output.update(params)
module.exit_json(**json_output)
if __name__ == '__main__':
main()
| gpl-3.0 |
iulian787/spack | var/spack/repos/builtin/packages/cctools/package.py | 2 | 4409 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Cctools(AutotoolsPackage):
"""The Cooperative Computing Tools (cctools) enable large scale
distributed computations to harness hundreds to thousands of
machines from clusters, clouds, and grids.
"""
homepage = "https://cctools.readthedocs.io"
url = "https://ccl.cse.nd.edu/software/files/cctools-7.1.5-source.tar.gz"
version('7.1.7', sha256='63cbfabe52591d41a1b27040bf27700d2a11b2f30cb2e25132e0016fb1aade03')
version('7.1.5', sha256='c01415fd47a1d9626b6c556e0dc0a6b0d3cd67224fa060cabd44ff78eede1d8a')
version('7.1.3', sha256='b937878ab429dda31bc692e5d9ffb402b9eb44bb674c07a934bb769cee4165ba')
version('7.1.2', sha256='ca871e9fe245d047d4c701271cf2b868e6e3a170e8834c1887157ed855985131')
version('7.1.0', sha256='84748245db10ff26c0c0a7b9fd3ec20fbbb849dd4aadc5e8531fd1671abe7a81')
version('7.0.18', sha256='5b6f3c87ae68dd247534a5c073eb68cb1a60176a7f04d82699fbc05e649a91c2')
version('6.1.1', sha256='97f073350c970d6157f80891b3bf6d4f3eedb5f031fea386dc33e22f22b8af9d')
depends_on('openssl')
depends_on('perl+shared', type=('build', 'run'))
depends_on('python', type=('build', 'run'))
depends_on('readline')
depends_on('gettext') # Corrects python linking of -lintl flag.
depends_on('swig')
# depends_on('xrootd')
depends_on('zlib')
patch('arm.patch', when='target=aarch64:')
patch('cctools_7.0.18.python.patch', when='@7.0.18')
patch('cctools_6.1.1.python.patch', when='@6.1.1')
# Generally SYS_foo is defined to __NR_foo (sys/syscall.h) which
# is then defined to a syscall number (asm/unistd_64.h). Certain
# CentOS systems have SYS_memfd_create defined to
# __NR_memfd_create but are missing the second definition.
# This is a belt and suspenders solution to the problem.
def patch(self):
before = '#if defined(__linux__) && defined(SYS_memfd_create)'
after = '#if defined(__linux__) && defined(SYS_memfd_create) && defined(__NR_memfd_create)' # noqa: E501
f = 'dttools/src/memfdexe.c'
kwargs = {'ignore_absent': False, 'backup': True, 'string': True}
filter_file(before, after, f, **kwargs)
if self.spec.satisfies('%fj'):
makefiles = ['chirp/src/Makefile', 'grow/src/Makefile']
for m in makefiles:
filter_file('-fstack-protector-all', '', m)
def configure_args(self):
args = []
# make sure we do not pick a python outside spack:
if self.spec.satisfies('@6.1.1'):
if self.spec.satisfies('^python@3:'):
args.extend([
'--with-python3-path', self.spec['python'].prefix,
'--with-python-path', 'no'
])
elif self.spec.satisfies('^python@:2.9'):
args.extend([
'--with-python-path', self.spec['python'].prefix,
'--with-python3-path', 'no'
])
else:
args.extend([
'--with-python-path', 'no',
'--with-python3-path', 'no'
])
else:
# versions 7 and above, where --with-python-path recognized the
# python version:
if self.spec.satisfies('^python@3:'):
args.extend([
'--with-python-path', self.spec['python'].prefix,
'--with-python2-path', 'no'
])
elif self.spec.satisfies('^python@:2.9'):
args.extend([
'--with-python-path', self.spec['python'].prefix,
'--with-python3-path', 'no'
])
else:
args.extend([
'--with-python2-path', 'no',
'--with-python3-path', 'no'
])
# disable these bits
for p in ['mysql', 'xrootd']:
args.append('--with-{0}-path=no'.format(p))
# point these bits at the Spack installations
for p in ['openssl', 'perl', 'readline', 'swig', 'zlib']:
args.append('--with-{0}-path={1}'.format(p, self.spec[p].prefix))
return args
| lgpl-2.1 |
mkudlej/usmqe-tests | usmqe/web/grafana/hosts/models.py | 2 | 1367 | """
Common page model for Grafana volumes page
"""
from usmqe.web.grafana.auxiliary.models import GenericChartModel, \
SingleStatModel, GenericDropDownListModel
class ClusterListModel(GenericDropDownListModel):
"""
DropDown list of clusters
"""
_title = "Cluster Id"
class HostListModel(GenericDropDownListModel):
"""
DropDown list of hosts
"""
_title = "Host Name"
class MemoryFreeModel(SingleStatModel):
"""
Memory Free model
"""
_title = "Memory Free"
class MemoryUsedModel(SingleStatModel):
"""
Memory Used model
"""
_title = "Memory Used"
class StorageFreeModel(SingleStatModel):
"""
Storage Free model
"""
_title = "Storage Free"
class StorageUsedModel(SingleStatModel):
"""
Storage Used model
"""
_title = "Storage Used"
class MemoryModel(GenericChartModel):
"""
Memory Model
"""
_title = "Memory"
class StorageModel(GenericChartModel):
"""
Storage Model
"""
_title = "Storage"
class SwapModel(GenericChartModel):
"""
Swap Model
"""
_title = "Swap"
class CPUModel(GenericChartModel):
"""
CPU Model
"""
_title = "CPU"
class ThroughputClusterNetworkModel(GenericChartModel):
"""
Throughput - Cluster Network Model
"""
_title = "Throughput - Cluster Network"
| gpl-3.0 |
marcusmueller/measurement_toolbox | gr-mtb/python/qa_task_frontend.py | 2 | 2935 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014 Marcus Müller.
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import remote_agent
import task_frontend
import helpers
import benchmarking_task
from gnuradio import gr, gr_unittest
try:
import mtb_swig as mtb
except ImportError:
pass
import gc
import json
import numpy
import os
import tempfile
import time
try:
import cStringIO as StringIO
except ImportError:
import StringIO
from PyQt4 import QtGui
from PyQt4 import QtCore
class MyApplicationClass(QtGui.QApplication):
started = QtCore.pyqtSignal()
def exec_(self):
self.started.emit()
return QtGui.QApplication.exec_()
class qa_task_frontend (gr_unittest.TestCase):
def setUp(self):
self.taskstring = ""
self.task = []
self.range_spec = (0,1,100)
self.ref_task_grc = {
"class_name":"class",
"module_name":"module",
"instruction":"run_grc",
"attributes": {
"value": {
"param_type": "LIN_RANGE",
"value": list(self.range_spec),
"value_type": "float64"
},
"length": {
"param_type": "LIST",
"value": [10,20,30],
"value_type": "int64"
},
},
"sinks": [ "blocks_vector_sink_x_0" ]
}
self.xml_file = open(os.path.join(os.path.dirname(__file__), "extraction_test_topblock.grc"), "r")
self.ref_task_grc["grcxml"] = self.xml_file.read()
self.xml_file.close()
self.jsonfile = tempfile.NamedTemporaryFile(suffix=".json", delete=False)
self.jsonfilename = self.jsonfile.name
json.dump(self.ref_task_grc, self.jsonfile)
self.jsonfile.close()
self.qapp = MyApplicationClass([])
def tearDown(self):
os.unlink(self.jsonfilename)
def test_001_load_json_file(self):
self.my_ui = task_frontend.TaskFrontend()
self.my_ui._load_json_file_direct(self.jsonfilename)
if __name__ == '__main__':
gr_unittest.run(qa_task_frontend)#, "qa_task_frontend.xml")
| gpl-3.0 |
ted-gould/nova | nova/api/openstack/compute/legacy_v2/contrib/rescue.py | 59 | 3896 | # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The rescue mode extension."""
from oslo_config import cfg
import webob
from webob import exc
from nova.api.openstack import common
from nova.api.openstack import extensions as exts
from nova.api.openstack import wsgi
from nova import compute
from nova import exception
from nova import utils
CONF = cfg.CONF
authorize = exts.extension_authorizer('compute', 'rescue')
class RescueController(wsgi.Controller):
def __init__(self, ext_mgr, *args, **kwargs):
super(RescueController, self).__init__(*args, **kwargs)
self.compute_api = compute.API()
self.ext_mgr = ext_mgr
@wsgi.action('rescue')
def _rescue(self, req, id, body):
"""Rescue an instance."""
context = req.environ["nova.context"]
authorize(context)
if body['rescue'] and 'adminPass' in body['rescue']:
password = body['rescue']['adminPass']
else:
password = utils.generate_password()
instance = common.get_instance(self.compute_api, context, id)
try:
rescue_image_ref = None
if self.ext_mgr.is_loaded("os-extended-rescue-with-image"):
if body['rescue'] and 'rescue_image_ref' in body['rescue']:
rescue_image_ref = body['rescue']['rescue_image_ref']
self.compute_api.rescue(context, instance,
rescue_password=password, rescue_image_ref=rescue_image_ref)
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'rescue', id)
except exception.InvalidVolume as volume_error:
raise exc.HTTPConflict(explanation=volume_error.format_message())
except exception.InstanceNotRescuable as non_rescuable:
raise exc.HTTPBadRequest(
explanation=non_rescuable.format_message())
return {'adminPass': password}
@wsgi.action('unrescue')
def _unrescue(self, req, id, body):
"""Unrescue an instance."""
context = req.environ["nova.context"]
authorize(context)
instance = common.get_instance(self.compute_api, context, id)
try:
self.compute_api.unrescue(context, instance)
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'unrescue',
id)
return webob.Response(status_int=202)
class Rescue(exts.ExtensionDescriptor):
"""Instance rescue mode."""
name = "Rescue"
alias = "os-rescue"
namespace = "http://docs.openstack.org/compute/ext/rescue/api/v1.1"
updated = "2011-08-18T00:00:00Z"
def get_controller_extensions(self):
controller = RescueController(self.ext_mgr)
extension = exts.ControllerExtension(self, 'servers', controller)
return [extension]
| apache-2.0 |
RafaelTorrealba/odoo | addons/crm/wizard/crm_lead_to_opportunity.py | 146 | 13701 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
import re
class crm_lead2opportunity_partner(osv.osv_memory):
_name = 'crm.lead2opportunity.partner'
_description = 'Lead To Opportunity Partner'
_inherit = 'crm.partner.binding'
_columns = {
'name': fields.selection([
('convert', 'Convert to opportunity'),
('merge', 'Merge with existing opportunities')
], 'Conversion Action', required=True),
'opportunity_ids': fields.many2many('crm.lead', string='Opportunities'),
'user_id': fields.many2one('res.users', 'Salesperson', select=True),
'section_id': fields.many2one('crm.case.section', 'Sales Team', select=True),
}
def onchange_action(self, cr, uid, ids, action, context=None):
return {'value': {'partner_id': False if action != 'exist' else self._find_matching_partner(cr, uid, context=context)}}
def _get_duplicated_leads(self, cr, uid, partner_id, email, include_lost=False, context=None):
"""
Search for opportunities that have the same partner and that arent done or cancelled
"""
return self.pool.get('crm.lead')._get_duplicated_leads_by_emails(cr, uid, partner_id, email, include_lost=include_lost, context=context)
def default_get(self, cr, uid, fields, context=None):
"""
Default get for name, opportunity_ids.
If there is an exisitng partner link to the lead, find all existing
opportunities links with this partner to merge all information together
"""
lead_obj = self.pool.get('crm.lead')
res = super(crm_lead2opportunity_partner, self).default_get(cr, uid, fields, context=context)
if context.get('active_id'):
tomerge = [int(context['active_id'])]
partner_id = res.get('partner_id')
lead = lead_obj.browse(cr, uid, int(context['active_id']), context=context)
email = lead.partner_id and lead.partner_id.email or lead.email_from
tomerge.extend(self._get_duplicated_leads(cr, uid, partner_id, email, include_lost=True, context=context))
tomerge = list(set(tomerge))
if 'action' in fields and not res.get('action'):
res.update({'action' : partner_id and 'exist' or 'create'})
if 'partner_id' in fields:
res.update({'partner_id' : partner_id})
if 'name' in fields:
res.update({'name' : len(tomerge) >= 2 and 'merge' or 'convert'})
if 'opportunity_ids' in fields and len(tomerge) >= 2:
res.update({'opportunity_ids': tomerge})
if lead.user_id:
res.update({'user_id': lead.user_id.id})
if lead.section_id:
res.update({'section_id': lead.section_id.id})
return res
def on_change_user(self, cr, uid, ids, user_id, section_id, context=None):
""" When changing the user, also set a section_id or restrict section id
to the ones user_id is member of. """
if user_id:
if section_id:
user_in_section = self.pool.get('crm.case.section').search(cr, uid, [('id', '=', section_id), '|', ('user_id', '=', user_id), ('member_ids', '=', user_id)], context=context, count=True)
else:
user_in_section = False
if not user_in_section:
result = self.pool['crm.lead'].on_change_user(cr, uid, ids, user_id, context=context)
section_id = result.get('value') and result['value'].get('section_id') and result['value']['section_id'] or False
return {'value': {'section_id': section_id}}
def view_init(self, cr, uid, fields, context=None):
"""
Check some preconditions before the wizard executes.
"""
if context is None:
context = {}
lead_obj = self.pool.get('crm.lead')
for lead in lead_obj.browse(cr, uid, context.get('active_ids', []), context=context):
if lead.probability == 100:
raise osv.except_osv(_("Warning!"), _("Closed/Dead leads cannot be converted into opportunities."))
return False
def _convert_opportunity(self, cr, uid, ids, vals, context=None):
if context is None:
context = {}
lead = self.pool.get('crm.lead')
res = False
lead_ids = vals.get('lead_ids', [])
team_id = vals.get('section_id', False)
partner_id = vals.get('partner_id')
data = self.browse(cr, uid, ids, context=context)[0]
leads = lead.browse(cr, uid, lead_ids, context=context)
for lead_id in leads:
partner_id = self._create_partner(cr, uid, lead_id.id, data.action, partner_id or lead_id.partner_id.id, context=context)
res = lead.convert_opportunity(cr, uid, [lead_id.id], partner_id, [], False, context=context)
user_ids = vals.get('user_ids', False)
if context.get('no_force_assignation'):
leads_to_allocate = [lead_id.id for lead_id in leads if not lead_id.user_id]
else:
leads_to_allocate = lead_ids
if user_ids:
lead.allocate_salesman(cr, uid, leads_to_allocate, user_ids, team_id=team_id, context=context)
return res
def action_apply(self, cr, uid, ids, context=None):
"""
Convert lead to opportunity or merge lead and opportunity and open
the freshly created opportunity view.
"""
if context is None:
context = {}
lead_obj = self.pool['crm.lead']
w = self.browse(cr, uid, ids, context=context)[0]
opp_ids = [o.id for o in w.opportunity_ids]
vals = {
'section_id': w.section_id.id,
}
if w.partner_id:
vals['partner_id'] = w.partner_id.id
if w.name == 'merge':
lead_id = lead_obj.merge_opportunity(cr, uid, opp_ids, context=context)
lead_ids = [lead_id]
lead = lead_obj.read(cr, uid, lead_id, ['type', 'user_id'], context=context)
if lead['type'] == "lead":
context = dict(context, active_ids=lead_ids)
vals.update({'lead_ids': lead_ids, 'user_ids': [w.user_id.id]})
self._convert_opportunity(cr, uid, ids, vals, context=context)
elif not context.get('no_force_assignation') or not lead['user_id']:
vals.update({'user_id': w.user_id.id})
lead_obj.write(cr, uid, lead_id, vals, context=context)
else:
lead_ids = context.get('active_ids', [])
vals.update({'lead_ids': lead_ids, 'user_ids': [w.user_id.id]})
self._convert_opportunity(cr, uid, ids, vals, context=context)
return self.pool.get('crm.lead').redirect_opportunity_view(cr, uid, lead_ids[0], context=context)
def _create_partner(self, cr, uid, lead_id, action, partner_id, context=None):
"""
Create partner based on action.
:return dict: dictionary organized as followed: {lead_id: partner_assigned_id}
"""
#TODO this method in only called by crm_lead2opportunity_partner
#wizard and would probably diserve to be refactored or at least
#moved to a better place
if context is None:
context = {}
lead = self.pool.get('crm.lead')
if action == 'each_exist_or_create':
ctx = dict(context)
ctx['active_id'] = lead_id
partner_id = self._find_matching_partner(cr, uid, context=ctx)
action = 'create'
res = lead.handle_partner_assignation(cr, uid, [lead_id], action, partner_id, context=context)
return res.get(lead_id)
class crm_lead2opportunity_mass_convert(osv.osv_memory):
_name = 'crm.lead2opportunity.partner.mass'
_description = 'Mass Lead To Opportunity Partner'
_inherit = 'crm.lead2opportunity.partner'
_columns = {
'user_ids': fields.many2many('res.users', string='Salesmen'),
'section_id': fields.many2one('crm.case.section', 'Sales Team', select=True),
'deduplicate': fields.boolean('Apply deduplication', help='Merge with existing leads/opportunities of each partner'),
'action': fields.selection([
('each_exist_or_create', 'Use existing partner or create'),
('nothing', 'Do not link to a customer')
], 'Related Customer', required=True),
'force_assignation': fields.boolean('Force assignation', help='If unchecked, this will leave the salesman of duplicated opportunities'),
}
_defaults = {
'deduplicate': True,
}
def default_get(self, cr, uid, fields, context=None):
res = super(crm_lead2opportunity_mass_convert, self).default_get(cr, uid, fields, context)
if 'partner_id' in fields:
# avoid forcing the partner of the first lead as default
res['partner_id'] = False
if 'action' in fields:
res['action'] = 'each_exist_or_create'
if 'name' in fields:
res['name'] = 'convert'
if 'opportunity_ids' in fields:
res['opportunity_ids'] = False
return res
def on_change_action(self, cr, uid, ids, action, context=None):
vals = {}
if action != 'exist':
vals = {'value': {'partner_id': False}}
return vals
def on_change_deduplicate(self, cr, uid, ids, deduplicate, context=None):
if context is None:
context = {}
active_leads = self.pool['crm.lead'].browse(cr, uid, context['active_ids'], context=context)
partner_ids = [(lead.partner_id.id, lead.partner_id and lead.partner_id.email or lead.email_from) for lead in active_leads]
partners_duplicated_leads = {}
for partner_id, email in partner_ids:
duplicated_leads = self._get_duplicated_leads(cr, uid, partner_id, email)
if len(duplicated_leads) > 1:
partners_duplicated_leads.setdefault((partner_id, email), []).extend(duplicated_leads)
leads_with_duplicates = []
for lead in active_leads:
lead_tuple = (lead.partner_id.id, lead.partner_id.email if lead.partner_id else lead.email_from)
if len(partners_duplicated_leads.get(lead_tuple, [])) > 1:
leads_with_duplicates.append(lead.id)
return {'value': {'opportunity_ids': leads_with_duplicates}}
def _convert_opportunity(self, cr, uid, ids, vals, context=None):
"""
When "massively" (more than one at a time) converting leads to
opportunities, check the salesteam_id and salesmen_ids and update
the values before calling super.
"""
if context is None:
context = {}
data = self.browse(cr, uid, ids, context=context)[0]
salesteam_id = data.section_id and data.section_id.id or False
salesmen_ids = []
if data.user_ids:
salesmen_ids = [x.id for x in data.user_ids]
vals.update({'user_ids': salesmen_ids, 'section_id': salesteam_id})
return super(crm_lead2opportunity_mass_convert, self)._convert_opportunity(cr, uid, ids, vals, context=context)
def mass_convert(self, cr, uid, ids, context=None):
data = self.browse(cr, uid, ids, context=context)[0]
ctx = dict(context)
if data.name == 'convert' and data.deduplicate:
merged_lead_ids = []
remaining_lead_ids = []
lead_selected = context.get('active_ids', [])
for lead_id in lead_selected:
if lead_id not in merged_lead_ids:
lead = self.pool['crm.lead'].browse(cr, uid, lead_id, context=context)
duplicated_lead_ids = self._get_duplicated_leads(cr, uid, lead.partner_id.id, lead.partner_id and lead.partner_id.email or lead.email_from)
if len(duplicated_lead_ids) > 1:
lead_id = self.pool.get('crm.lead').merge_opportunity(cr, uid, duplicated_lead_ids, False, False, context=context)
merged_lead_ids.extend(duplicated_lead_ids)
remaining_lead_ids.append(lead_id)
active_ids = set(context.get('active_ids', []))
active_ids = active_ids.difference(merged_lead_ids)
active_ids = active_ids.union(remaining_lead_ids)
ctx['active_ids'] = list(active_ids)
ctx['no_force_assignation'] = context.get('no_force_assignation', not data.force_assignation)
return self.action_apply(cr, uid, ids, context=ctx)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
autotest/virt-test | virttest/rss_client.py | 21 | 19481 | #!/usr/bin/python
"""
Client for file transfer services offered by RSS (Remote Shell Server).
:author: Michael Goldish (mgoldish@redhat.com)
:copyright: 2008-2010 Red Hat Inc.
"""
import socket
import struct
import time
import sys
import os
import glob
# Globals
CHUNKSIZE = 65536
# Protocol message constants
RSS_MAGIC = 0x525353
RSS_OK = 1
RSS_ERROR = 2
RSS_UPLOAD = 3
RSS_DOWNLOAD = 4
RSS_SET_PATH = 5
RSS_CREATE_FILE = 6
RSS_CREATE_DIR = 7
RSS_LEAVE_DIR = 8
RSS_DONE = 9
# See rss.cpp for protocol details.
class FileTransferError(Exception):
def __init__(self, msg, e=None, filename=None):
Exception.__init__(self, msg, e, filename)
self.msg = msg
self.e = e
self.filename = filename
def __str__(self):
s = self.msg
if self.e and self.filename:
s += " (error: %s, filename: %s)" % (self.e, self.filename)
elif self.e:
s += " (%s)" % self.e
elif self.filename:
s += " (filename: %s)" % self.filename
return s
class FileTransferConnectError(FileTransferError):
pass
class FileTransferTimeoutError(FileTransferError):
pass
class FileTransferProtocolError(FileTransferError):
pass
class FileTransferSocketError(FileTransferError):
pass
class FileTransferServerError(FileTransferError):
def __init__(self, errmsg):
FileTransferError.__init__(self, None, errmsg)
def __str__(self):
s = "Server said: %r" % self.e
if self.filename:
s += " (filename: %s)" % self.filename
return s
class FileTransferNotFoundError(FileTransferError):
pass
class FileTransferClient(object):
"""
Connect to a RSS (remote shell server) and transfer files.
"""
def __init__(self, address, port, log_func=None, timeout=20):
"""
Connect to a server.
:param address: The server's address
:param port: The server's port
:param log_func: If provided, transfer stats will be passed to this
function during the transfer
:param timeout: Time duration to wait for connection to succeed
:raise FileTransferConnectError: Raised if the connection fails
"""
family = ":" in address and socket.AF_INET6 or socket.AF_INET
self._socket = socket.socket(family, socket.SOCK_STREAM)
self._socket.settimeout(timeout)
try:
addrinfo = socket.getaddrinfo(address, port, family,
socket.SOCK_STREAM,
socket.IPPROTO_TCP)
self._socket.connect(addrinfo[0][4])
except socket.error, e:
raise FileTransferConnectError("Cannot connect to server at "
"%s:%s" % (address, port), e)
try:
if self._receive_msg(timeout) != RSS_MAGIC:
raise FileTransferConnectError("Received wrong magic number")
except FileTransferTimeoutError:
raise FileTransferConnectError("Timeout expired while waiting to "
"receive magic number")
self._send(struct.pack("=i", CHUNKSIZE))
self._log_func = log_func
self._last_time = time.time()
self._last_transferred = 0
self.transferred = 0
def __del__(self):
self.close()
def close(self):
"""
Close the connection.
"""
self._socket.close()
def _send(self, sr, timeout=60):
try:
if timeout <= 0:
raise socket.timeout
self._socket.settimeout(timeout)
self._socket.sendall(sr)
except socket.timeout:
raise FileTransferTimeoutError("Timeout expired while sending "
"data to server")
except socket.error, e:
raise FileTransferSocketError("Could not send data to server", e)
def _receive(self, size, timeout=60):
strs = []
end_time = time.time() + timeout
try:
while size > 0:
timeout = end_time - time.time()
if timeout <= 0:
raise socket.timeout
self._socket.settimeout(timeout)
data = self._socket.recv(size)
if not data:
raise FileTransferProtocolError("Connection closed "
"unexpectedly while "
"receiving data from "
"server")
strs.append(data)
size -= len(data)
except socket.timeout:
raise FileTransferTimeoutError("Timeout expired while receiving "
"data from server")
except socket.error, e:
raise FileTransferSocketError("Error receiving data from server",
e)
return "".join(strs)
def _report_stats(self, sr):
if self._log_func:
dt = time.time() - self._last_time
if dt >= 1:
transferred = self.transferred / 1048576.
speed = (self.transferred - self._last_transferred) / dt
speed /= 1048576.
self._log_func("%s %.3f MB (%.3f MB/sec)" %
(sr, transferred, speed))
self._last_time = time.time()
self._last_transferred = self.transferred
def _send_packet(self, sr, timeout=60):
self._send(struct.pack("=I", len(sr)))
self._send(sr, timeout)
self.transferred += len(sr) + 4
self._report_stats("Sent")
def _receive_packet(self, timeout=60):
size = struct.unpack("=I", self._receive(4))[0]
sr = self._receive(size, timeout)
self.transferred += len(sr) + 4
self._report_stats("Received")
return sr
def _send_file_chunks(self, filename, timeout=60):
if self._log_func:
self._log_func("Sending file %s" % filename)
f = open(filename, "rb")
try:
try:
end_time = time.time() + timeout
while True:
data = f.read(CHUNKSIZE)
self._send_packet(data, end_time - time.time())
if len(data) < CHUNKSIZE:
break
except FileTransferError, e:
e.filename = filename
raise
finally:
f.close()
def _receive_file_chunks(self, filename, timeout=60):
if self._log_func:
self._log_func("Receiving file %s" % filename)
f = open(filename, "wb")
try:
try:
end_time = time.time() + timeout
while True:
data = self._receive_packet(end_time - time.time())
f.write(data)
if len(data) < CHUNKSIZE:
break
except FileTransferError, e:
e.filename = filename
raise
finally:
f.close()
def _send_msg(self, msg, timeout=60):
self._send(struct.pack("=I", msg))
def _receive_msg(self, timeout=60):
s = self._receive(4, timeout)
return struct.unpack("=I", s)[0]
def _handle_transfer_error(self):
# Save original exception
e = sys.exc_info()
try:
# See if we can get an error message
msg = self._receive_msg()
except FileTransferError:
# No error message -- re-raise original exception
raise e[0], e[1], e[2]
if msg == RSS_ERROR:
errmsg = self._receive_packet()
raise FileTransferServerError(errmsg)
raise e[0], e[1], e[2]
class FileUploadClient(FileTransferClient):
"""
Connect to a RSS (remote shell server) and upload files or directory trees.
"""
def __init__(self, address, port, log_func=None, timeout=20):
"""
Connect to a server.
:param address: The server's address
:param port: The server's port
:param log_func: If provided, transfer stats will be passed to this
function during the transfer
:param timeout: Time duration to wait for connection to succeed
:raise FileTransferConnectError: Raised if the connection fails
:raise FileTransferProtocolError: Raised if an incorrect magic number
is received
:raise FileTransferSocketError: Raised if the RSS_UPLOAD message cannot
be sent to the server
"""
super(FileUploadClient, self).__init__(
address, port, log_func, timeout)
self._send_msg(RSS_UPLOAD)
def _upload_file(self, path, end_time):
if os.path.isfile(path):
self._send_msg(RSS_CREATE_FILE)
self._send_packet(os.path.basename(path))
self._send_file_chunks(path, end_time - time.time())
elif os.path.isdir(path):
self._send_msg(RSS_CREATE_DIR)
self._send_packet(os.path.basename(path))
for filename in os.listdir(path):
self._upload_file(os.path.join(path, filename), end_time)
self._send_msg(RSS_LEAVE_DIR)
def upload(self, src_pattern, dst_path, timeout=600):
"""
Send files or directory trees to the server.
The semantics of src_pattern and dst_path are similar to those of scp.
For example, the following are OK:
::
src_pattern='/tmp/foo.txt', dst_path='C:\\'
(uploads a single file)
src_pattern='/usr/', dst_path='C:\\Windows\\'
(uploads a directory tree recursively)
src_pattern='/usr/*', dst_path='C:\\Windows\\'
(uploads all files and directory trees under /usr/)
The following is not OK:
::
src_pattern='/tmp/foo.txt', dst_path='C:\\Windows\\*'
(wildcards are only allowed in src_pattern)
:param src_pattern: A path or wildcard pattern specifying the files or
directories to send to the server
:param dst_path: A path in the server's filesystem where the files will
be saved
:param timeout: Time duration in seconds to wait for the transfer to
complete
:raise FileTransferTimeoutError: Raised if timeout expires
:raise FileTransferServerError: Raised if something goes wrong and the
server sends an informative error
message to the client
:note: Other exceptions can be raised.
"""
end_time = time.time() + timeout
try:
try:
self._send_msg(RSS_SET_PATH)
self._send_packet(dst_path)
matches = glob.glob(src_pattern)
for filename in matches:
self._upload_file(os.path.abspath(filename), end_time)
self._send_msg(RSS_DONE)
except FileTransferTimeoutError:
raise
except FileTransferError:
self._handle_transfer_error()
else:
# If nothing was transferred, raise an exception
if not matches:
raise FileTransferNotFoundError("Pattern %s does not "
"match any files or "
"directories" %
src_pattern)
# Look for RSS_OK or RSS_ERROR
msg = self._receive_msg(end_time - time.time())
if msg == RSS_OK:
return
elif msg == RSS_ERROR:
errmsg = self._receive_packet()
raise FileTransferServerError(errmsg)
else:
# Neither RSS_OK nor RSS_ERROR found
raise FileTransferProtocolError("Received unexpected msg")
except Exception:
# In any case, if the transfer failed, close the connection
self.close()
raise
class FileDownloadClient(FileTransferClient):
"""
Connect to a RSS (remote shell server) and download files or directory trees.
"""
def __init__(self, address, port, log_func=None, timeout=20):
"""
Connect to a server.
:param address: The server's address
:param port: The server's port
:param log_func: If provided, transfer stats will be passed to this
function during the transfer
:param timeout: Time duration to wait for connection to succeed
:raise FileTransferConnectError: Raised if the connection fails
:raise FileTransferProtocolError: Raised if an incorrect magic number
is received
:raise FileTransferSendError: Raised if the RSS_UPLOAD message cannot
be sent to the server
"""
super(FileDownloadClient, self).__init__(
address, port, log_func, timeout)
self._send_msg(RSS_DOWNLOAD)
def download(self, src_pattern, dst_path, timeout=600):
"""
Receive files or directory trees from the server.
The semantics of src_pattern and dst_path are similar to those of scp.
For example, the following are OK:
::
src_pattern='C:\\foo.txt', dst_path='/tmp'
(downloads a single file)
src_pattern='C:\\Windows', dst_path='/tmp'
(downloads a directory tree recursively)
src_pattern='C:\\Windows\\*', dst_path='/tmp'
(downloads all files and directory trees under C:\\Windows)
The following is not OK:
::
src_pattern='C:\\Windows', dst_path='/tmp/*'
(wildcards are only allowed in src_pattern)
:param src_pattern: A path or wildcard pattern specifying the files or
directories, in the server's filesystem, that will
be sent to the client
:param dst_path: A path in the local filesystem where the files will
be saved
:param timeout: Time duration in seconds to wait for the transfer to
complete
:raise FileTransferTimeoutError: Raised if timeout expires
:raise FileTransferServerError: Raised if something goes wrong and the
server sends an informative error
message to the client
:note: Other exceptions can be raised.
"""
dst_path = os.path.abspath(dst_path)
end_time = time.time() + timeout
file_count = 0
dir_count = 0
try:
try:
self._send_msg(RSS_SET_PATH)
self._send_packet(src_pattern)
except FileTransferError:
self._handle_transfer_error()
while True:
msg = self._receive_msg()
if msg == RSS_CREATE_FILE:
# Receive filename and file contents
filename = self._receive_packet()
if os.path.isdir(dst_path):
dst_path = os.path.join(dst_path, filename)
self._receive_file_chunks(dst_path, end_time - time.time())
dst_path = os.path.dirname(dst_path)
file_count += 1
elif msg == RSS_CREATE_DIR:
# Receive dirname and create the directory
dirname = self._receive_packet()
if os.path.isdir(dst_path):
dst_path = os.path.join(dst_path, dirname)
if not os.path.isdir(dst_path):
os.mkdir(dst_path)
dir_count += 1
elif msg == RSS_LEAVE_DIR:
# Return to parent dir
dst_path = os.path.dirname(dst_path)
elif msg == RSS_DONE:
# Transfer complete
if not file_count and not dir_count:
raise FileTransferNotFoundError("Pattern %s does not "
"match any files or "
"directories that "
"could be downloaded" %
src_pattern)
break
elif msg == RSS_ERROR:
# Receive error message and abort
errmsg = self._receive_packet()
raise FileTransferServerError(errmsg)
else:
# Unexpected msg
raise FileTransferProtocolError("Received unexpected msg")
except Exception:
# In any case, if the transfer failed, close the connection
self.close()
raise
def upload(address, port, src_pattern, dst_path, log_func=None, timeout=60,
connect_timeout=20):
"""
Connect to server and upload files.
:see:: FileUploadClient
"""
client = FileUploadClient(address, port, log_func, connect_timeout)
client.upload(src_pattern, dst_path, timeout)
client.close()
def download(address, port, src_pattern, dst_path, log_func=None, timeout=60,
connect_timeout=20):
"""
Connect to server and upload files.
:see:: FileDownloadClient
"""
client = FileDownloadClient(address, port, log_func, connect_timeout)
client.download(src_pattern, dst_path, timeout)
client.close()
def main():
import optparse
usage = "usage: %prog [options] address port src_pattern dst_path"
parser = optparse.OptionParser(usage=usage)
parser.add_option("-d", "--download",
action="store_true", dest="download",
help="download files from server")
parser.add_option("-u", "--upload",
action="store_true", dest="upload",
help="upload files to server")
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose",
help="be verbose")
parser.add_option("-t", "--timeout",
type="int", dest="timeout", default=3600,
help="transfer timeout")
options, args = parser.parse_args()
if options.download == options.upload:
parser.error("you must specify either -d or -u")
if len(args) != 4:
parser.error("incorrect number of arguments")
address, port, src_pattern, dst_path = args
port = int(port)
logger = None
if options.verbose:
def p(s):
print s
logger = p
if options.download:
download(address, port, src_pattern, dst_path, logger, options.timeout)
elif options.upload:
upload(address, port, src_pattern, dst_path, logger, options.timeout)
if __name__ == "__main__":
main()
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.