prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
"""`Factory of Factories` pattern."""
from dependency_injector import containers, providers
class SqlAlchemyDatabaseService:
def __init__(self, session, base_class):
self.session = session
self.base_class = base_class
class TokensService:
def __init__(self, id_generator, database):
self.id_generator = id_generator
self.database = database
class Token:
...
class UsersService:
def __init__(self, id_generator, database):
self.id_generator = id_generator
self.database = database
class User:
...
# Sample objects
session = ob | ject()
id_generator = object()
class Container(containers.DeclarativeContainer):
database_factory = providers.Factory(
providers.Factory,
SqlAlchemyDatabaseService,
session=session,
)
token_servi | ce = providers.Factory(
TokensService,
id_generator=id_generator,
database=database_factory(base_class=Token),
)
user_service = providers.Factory(
UsersService,
id_generator=id_generator,
database=database_factory(base_class=User),
)
if __name__ == '__main__':
container = Container()
token_service = container.token_service()
assert token_service.database.base_class is Token
user_service = container.user_service()
assert user_service.database.base_class is User
|
"""Local Media Source Implementation."""
from __future__ import annotations
import mimetypes
from pathlib import Path
from aiohttp import web
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.media_player.const import MEDIA_CLASS_DIRECTORY
from homeassistant.components.media_player.errors import BrowseError
from homeassistant.core import HomeAssistant, callback
from homeassistant.util import raise_if_invalid_path
from .const import DOMAIN, MEDIA_CLASS_MAP, MEDIA_MIME_TYPES
from .error import Unresolvable
from .models import BrowseMediaSource, MediaSource, MediaSourceItem, PlayMedia
@callback
def async_setup(hass: HomeAssistant) -> None:
"""Set up local media source."""
source = LocalSource(hass)
hass.data[DOMAIN][DOMAIN] = source
hass.http.register_view(LocalMediaView(hass, source))
class LocalSource(MediaSource):
"""Provide local directories as media sources."""
name: str = "Local Media"
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize local source."""
super().__init__(DOMAIN)
self.hass = hass
@callback
def async_full_path(self, source_dir_id: str, location: str) -> Path:
"""Return full path."""
return Path(self.hass.config.media_dirs[source_dir_id], location)
@callback
def async_parse_identifier(self, item: MediaSourceItem) -> tuple[str, str]:
"""Parse identifier."""
if not item.identifier:
# Empty source_dir_id and location
return "", ""
source_dir_id, location = item.identifier.split("/", 1)
if source_dir_id not in self.hass.config.media_dirs:
raise Unresolvable("Unknown source directory.")
try:
raise_if_invalid_path(location)
except ValueError as err:
raise Unresolvable("Invalid path.") from err
return source_dir_id, location
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
"""Resolve media to a url."""
source_dir_id, location = self.async_parse_identifier(item)
if source_dir_id == "" or source_dir_id not in self.hass.config.media_dirs:
raise Unresolvable("Unknown source directory.")
mime_type, _ = mimetypes.guess_type(
str(self.async_full_path(source_dir_id, location))
)
assert isinstance(mime_type, str)
return PlayMedia(f"/media/{item.identifier}", mime_type)
async def async_browse_media(self, item: MediaSourceItem) -> BrowseMediaSource:
"""Return media."""
try:
source_dir_id, location = self.async_parse_identifier(item)
except Unresolvable as err:
raise BrowseError(str(err)) from err
result = await self.hass.async_add_executor_job(
self._browse_media, source_dir_id, location
)
return result
def _browse_media(self, source_dir_id: str, location: str) -> BrowseMediaSource:
"""Browse media."""
# If only one media dir is configured, use that as the local media root
if source_dir_id == "" and len(self.hass.config.media_dirs) == 1:
source_dir_id = list(self.hass.config.media_dirs)[0]
# Multiple folder, root is requested
if source_dir_id == "":
if location:
raise BrowseError("Folder not found.")
base = BrowseMediaSource(
domain=DOMAIN,
identifier="",
media_class=MEDIA_CLASS_DIRECTORY,
media_content_type=None,
title=self.name,
can_play=False,
can_expand=True,
children_media_class=MEDIA_CLASS_DIRECTORY,
)
base.children = [
self._browse_media(source_dir_id, "")
for source_dir_id in self.hass.config.media_dirs
]
return base
full_path = Path(self.hass.config.media_dirs[source_dir_id], location)
if not full_path.exists():
if location == "":
raise BrowseError("Media directory does not exist.")
raise BrowseError("Path does not exist.")
if not full_path.is_dir():
raise BrowseError("Path is not a directory.")
result = self._build_item_response(source_dir_id, full_path)
if not result:
raise BrowseError("Unknown source directory.")
return result
def _build_item_response(
self, source_dir_id: str, path: Path, is_child: bool = False
) -> BrowseMediaSource | None:
mime_type, _ = mimetypes.guess_type(str(path))
is_file = path.is_file()
is_dir = path.is_dir()
# Make sure it's a file or directory
if not is_file and not is_dir:
return None
# Check that it's a media file
if is_file and (
not mime_type or mime_type.split("/")[0] not in MEDIA_MIME_TYPES
):
return None
title = path.name
if is_dir:
title += "/"
media_class = MEDIA_CLASS_DIRECTORY
if mime_type:
media_class = MEDIA_CLASS_MAP.get(
mime_type.split("/")[0], MEDIA_CLASS_DIRECTORY
)
media = BrowseMediaSource(
domain=DOMAIN,
identifier=f"{source_dir_id}/{path.relative_to(self.hass.config.media_dirs[source_dir_id])}",
media_class=media_class,
media_content_type=mime_type or "",
title=title,
can_play=is_file,
can_expand=is_dir,
)
if is_file or is_child:
return media
# Append first level children
media.children = []
for child_path in path.iterdir():
child = self._build_item_response(source_dir_id, child_path, True)
if child:
media.children.append(child)
# Sort children showing directories first, then by name
media.children.sort(key=lambda child: (child.can_play, child.title))
return media
class LocalMediaView(HomeAssistantView):
"""
Local Media Finder View.
Returns media files in config/media.
"""
url = "/media/{source_dir_id}/{location:.*}"
name = "media"
def __init__(self, hass: HomeAssistant, source: LocalSource) -> None:
"""Initialize the media view."""
self.hass = hass
self.source = source
async def get(
self, request: web.Request, source_dir_id: str, location: str
) -> web.FileResponse:
"""Start a GET request."""
try:
raise_if_invalid_path(location)
except ValueError as err:
raise web.HTTPBadRequest() from err
if source_dir_id not in self.hass.config.media_dirs:
raise | web.HTTPNotFound()
media_path = self.source.async_full_path(source_dir_id, location)
# Check that the file exists
if not media_path.is_file():
raise web.HTTPNotFound()
# | Check that it's a media file
mime_type, _ = mimetypes.guess_type(str(media_path))
if not mime_type or mime_type.split("/")[0] not in MEDIA_MIME_TYPES:
raise web.HTTPNotFound()
return web.FileResponse(media_path)
|
import django.dispatch
# Whenever a permission object | is saved, it sends out the signal. This | allows
# models to keep their permissions in sync
permission_changed = django.dispatch.Signal(providing_args=('to_whom', 'to_what')) |
# Copyright 2022 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import subprocess
from setup | _product.setup_cleanup import (
create_bq_dataset,
create_bq_table,
delete_bq_table,
upload_data_to_bq_table,
)
def test_import_products_bq(table_id_prefix):
dataset = "products"
valid_products_table = f"{table_id_prefix}products"
product_schema = "../resources/product_schema.json"
valid_products_source_file = "../resources/products.json"
create_bq_dataset(dataset)
create_bq_table(dat | aset, valid_products_table, product_schema)
upload_data_to_bq_table(
dataset, valid_products_table, valid_products_source_file, product_schema
)
output = str(
subprocess.check_output(
f"python import_products_big_query_table.py {dataset} {valid_products_table}",
shell=True,
)
)
delete_bq_table(dataset, valid_products_table)
assert re.match(".*import products from big query table request.*", output)
assert re.match(".*the operation was started.*", output)
assert re.match(
".*projects/.*/locations/global/catalogs/default_catalog/branches/0/operations/import-products.*",
output,
)
assert re.match(".*number of successfully imported products.*?316.*", output)
assert re.match(".*number of failures during the importing.*?0.*", output)
|
import pytest
from conftest import DeSECAPIV1Client
@pytest.mark.parametrize("init_rrsets", [
{
('www', 'A'): (3600, {'1.2.3.4'}),
('www', 'AAAA'): (3600, {'::1'}),
('one', 'CNAME'): (3600, {'some.example.net.'}) | ,
('other', 'TXT'): (3600, {'"foo" "bar"', '"bar" "foo"'}),
}
])
@pytest.mark.parametrize | ("rrsets", [
{ # create three RRsets
('a' * 63, 'A'): (7000, {'4.3.2.1', '7.6.5.4'}),
('b', 'PTR'): (7000, {'1.foo.bar.com.', '2.bar.foo.net.'}),
('c.' + 'a' * 63, 'MX'): (7000, {'10 mail.something.net.'}),
},
{ # update three RRsets
('www', 'A'): None, # ensure value from init_rrset is still there
('www', 'AAAA'): (7000, {'6666::6666', '7777::7777'}),
('one', 'CNAME'): (7000, {'other.example.net.'}),
('other', 'TXT'): (7000, {'"foobar"'}),
},
{ # delete three RRsets
('www', 'A'): (7000, {}),
('www', 'AAAA'): None, # ensure value from init_rrset is still there
('one', 'CNAME'): (7000, {}),
('other', 'TXT'): (7000, {}),
},
{ # create, update, delete
('a' * 63, 'A'): (7000, {'4.3.2.1', '7.6.5.4'}),
('www', 'A'): None, # ensure value from init_rrset is still there
('www', 'AAAA'): (7000, {'6666::6666', '7777::7777'}),
('one', 'CNAME'): None, # ensure value from init_rrset is still there
('other', 'TXT'): (7000, {}),
},
{ # complex usecase
('', 'A'): (3600, {'1.2.3.4', '255.254.253.252'}), # create apex reocrd
('*', 'MX'): (3601, {'0 mx.example.net.'}), # create wildcard record
('www', 'AAAA'): (3602, {}), # remove existing record
('www', 'A'): (7000, {'4.3.2.1', '7.6.5.4'}), # update existing record
('one', 'A'): (3603, {'1.1.1.1'}), # configure A instead of ...
('one', 'CNAME'): (3603, {}), # ... CNAME
('other', 'CNAME'): (3603, {'cname.example.com.'}), # configure CNAME instead of ...
('other', 'TXT'): (3600, {}), # ... TXT
('nonexistent', 'DNAME'): (3600, {}), # delete something that doesn't exist
('sub', 'CDNSKEY'): (3600, {'257 3 15 l02Woi0iS8Aa25FQkUd9RMzZHJpBoRQwAQEX1SxZJA4='}), # non-apex DNSSEC
('sub', 'CDS'): (3600, {'35217 15 2 401781b934e392de492ec77ae2e15d70f6575a1c0bc59c5275c04ebe80c6614c'}), # dto.
# ('sub', 'DNSKEY'): (3600, {'257 3 15 l02Woi0iS8Aa25FQkUd9RMzZHJpBoRQwAQEX1SxZJA4='}) # no pdns support >= 4.6
},
])
def test(api_user_domain_rrsets: DeSECAPIV1Client, rrsets: dict):
api_user_domain_rrsets.patch(f"/domains/{api_user_domain_rrsets.domain}/rrsets/", data=[
{"subname": k[0], "type": k[1], "ttl": v[0], "records": list(v[1])}
for k, v in rrsets.items()
if v is not None
])
api_user_domain_rrsets.assert_rrsets(rrsets)
|
'''
t4_adm.py - this file is part of S3QL (http://s3ql.googlecode.com)
Copyright (C) 2008-2009 Nikolaus Rath <Nikolaus@rath.org>
This program can be distributed under the terms of the GNU GPLv3.
'''
from __future__ import division, print_function, absolute_import
from s3ql.backends import local
from s3ql.backends.common import BetterBackend
import shutil
import sys
import tempfile
import unittest2 as unittest
import subprocess
import os.path
if __name__ == '__main__':
mypath = sys.argv[0]
else:
mypath = __file__
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(mypath), '..'))
class AdmTests(unittest.TestCase):
def setUp(self):
self.cache_dir = tempfile.mkdtemp()
self.backend_dir = tempfile.mkdtemp()
self.storage_url = 'local://' + self.backend_dir
self.passphrase = 'oeut3d'
def tearDown(self):
shutil.rmtree(self.cache_dir)
shutil.rmtree(self.backend_dir)
def mkfs(self):
proc = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'bin', 'mkfs.s3ql'),
'-L', 'test fs', '--max-obj-size', '500',
'--cachedir', self.cache_dir, '--quiet',
self.storage_url ], stdin=subprocess.PIPE)
print(self.passphrase, file=proc.stdin)
print(self.passphrase, file=proc.stdin)
proc.stdin.close()
self.assertEqual(proc.wait(), 0)
def test_passphrase(self):
self.mkfs()
passphrase_new = 'sd982jhd'
proc = subprocess. | Popen([sys.executable, os.path.join(BA | SEDIR, 'bin', 's3qladm'),
'--quiet', 'passphrase',
self.storage_url ], stdin=subprocess.PIPE)
print(self.passphrase, file=proc.stdin)
print(passphrase_new, file=proc.stdin)
print(passphrase_new, file=proc.stdin)
proc.stdin.close()
self.assertEqual(proc.wait(), 0)
plain_backend = local.Backend(self.storage_url, None, None)
backend = BetterBackend(passphrase_new, 'bzip2', plain_backend)
self.assertTrue(isinstance(backend['s3ql_passphrase'], str))
# Somehow important according to pyunit documentation
def suite():
return unittest.makeSuite(AdmTests)
# Allow calling from command line
if __name__ == "__main__":
unittest.main()
|
# -*- coding: utf-8 -*-
from __future__ i | mport unicode_literals
from django.shortcuts import render
from django.http import HttpResponse
def hello_world_view(request):
return HttpResponse("hello world", conten | t_type="text/plain")
|
# -*- coding: | utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import contents.models
class Migration(migrations.Migration):
dependencies = [
('contents', '0017_auto_20170329_1504'),
]
operations = [
migrations.AlterField(
model_name='frontpageimage',
name='image',
field=models.ImageField(null=True, upload_to=contents.models.get_front_ | page_image_path),
),
]
|
# coding: utf-8
#
# This file is part of Progdupeupl.
#
# Progdupeupl is fr | ee software: you | can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Progdupeupl is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Progdupeupl. If not, see <http://www.gnu.org/licenses/>.
"""Tests for utils app."""
import unittest
import hashlib
from django.contrib.auth.models import User
from django_dynamic_fixture import G
from pdp.member.models import Profile, ActivationToken
from pdp.utils.templatetags.profile import profile
from pdp.utils.templatetags.interventions import interventions_topics
from pdp.utils.paginator import paginator_range
from pdp.utils import mail
class TemplateTagsTests(unittest.TestCase):
"""Test for the custom template tags about users."""
def setUp(self):
self.user = G(User)
def test_profile_none(self):
"""Test the output of profile templatetag if profile does not exist."""
self.assertEqual(None, profile(self.user))
def test_profile_existing(self):
"""Test the output of profile templatetag if profile does exist."""
p = G(Profile, user=self.user)
self.assertEqual(p, profile(self.user))
def test_interventions_none(self):
"""Test templatetags when no topic should match."""
self.assertEqual(interventions_topics(self.user), {'unread': [],
'read': []})
class PaginatorRangeTests(unittest.TestCase):
"""Tests for the paginator_range function."""
def test_out_of_range(self):
self.assertRaises(ValueError, lambda: paginator_range(3, 2))
def test_one(self):
result = paginator_range(1, 1)
self.assertEqual(result, [1])
def test_small(self):
result = paginator_range(2, 3)
self.assertEqual(result, [1, 2, 3])
def test_small_limit(self):
result = paginator_range(1, 4)
self.assertEqual(result, [1, 2, 3, 4])
def test_big_start(self):
result = paginator_range(1, 10)
self.assertEqual(result, [1, 2, None, 10])
def test_big_start_limit(self):
result = paginator_range(3, 10)
self.assertEqual(result, [1, 2, 3, 4, None, 10])
def test_big_middle(self):
result = paginator_range(5, 10)
self.assertEqual(result, [1, None, 4, 5, 6, None, 10])
def test_big_end(self):
result = paginator_range(10, 10)
self.assertEqual(result, [1, None, 9, 10])
def test_big_end_limit(self):
result = paginator_range(7, 10)
self.assertEqual(result, [1, None, 6, 7, 8, 9, 10])
class MailTests(unittest.TestCase):
"""Tests for the mail utilities."""
def test_send_templated_mail(self):
recipients = ['test1@localhost']
result = mail.send_templated_mail(
subject='Fake subject',
template='base.txt',
context={},
recipients=recipients
)
self.assertEqual(result, 1)
def test_send_mail_to_confirm_registration(self):
user = G(User, username='Blaireau1', email='test1@localhost')
link = hashlib.sha1('blbl'.encode('ascii')).hexdigest()
token = G(ActivationToken, user=user, token=link)
result = mail.send_mail_to_confirm_registration(token)
self.assertEqual(result, 1)
|
',
'UCSR1C_UMSEL1': '$C0',
'UCSR1C_UPM1': '$30',
'UCSR1C_USBS1': '$08',
'UCSR1C_UCSZ1': '$06',
'UCSR1C_UCPOL1': '$01',
'UBRR1': '&204',
'SPMCSR': '&87',
'SPMCSR_SPMIE': '$80',
'SPMCSR_RWWSB': '$40',
'SPMCSR_SIGRD': '$20',
'SPMCSR_RWWSRE': '$10',
'SPMCSR_BLBSET': '$08',
'SPMCSR_PGWRT': '$04',
'SPMCSR_PGERS': '$02',
'SPMCSR_SPMEN': '$01',
'EEAR': '&65',
'EEDR': '&64',
'EECR': '&63',
'EECR_EEPM': '$30',
'EECR_EERIE': '$08',
'EECR_EEMPE': '$04',
'EECR_EEPE': '$02',
'EECR_EERE': '$01',
'OCR0B': '&72',
'OCR0A': '&71',
'TCNT0': '&70',
'TCCR0B': '&69',
'TCCR0B_FOC0A': '$80',
'TCCR0B_FOC0B': '$40',
'TCCR0B_WGM02': '$08',
'TCCR0B_CS0': '$07',
'TCCR0A': '&68',
'TCCR0A_COM0A': '$C0',
'TCCR0A_COM0B': '$30',
'TCCR0A_WGM0': '$03',
'TIMSK0': '&110',
'TIMSK0_OCIE0B': '$04',
'TIMSK0_OCIE0A': '$02',
'TIMSK0_TOIE0': '$01',
'TIFR0': '&53',
'TIFR0_OCF0B': '$04',
'TIFR0_OCF0A': '$02',
'TIFR0_TOV0': '$01',
'GTCCR': '&67',
'GTCCR_TSM': '$80',
'GTCCR_PSRSYNC': '$01',
'TCCR3A': '&144',
'TCCR3A_COM3A': '$C0',
'TCCR3A_COM3B': '$30',
'TCCR3A_COM3C': '$0C',
'TCCR3A_WGM3': '$03',
'TCCR3B': '&145',
'TCCR3B_ICNC3': '$80',
'TCCR3B_ICES3': '$40',
'TCCR3B_WGM3': '$18',
'TCCR3B_CS3': '$07',
'TCCR3C': '&146',
'TCCR3C_FOC3A': '$80',
'TCCR3C_FOC3B': '$40',
'TCCR3C_FOC3C': '$20',
'TCNT3': '&148',
'OCR3A': '&152',
'OCR3B': '&154',
'OCR3C': '&156',
'ICR3': '&150',
'TIMSK3': '&113',
'TIMSK3_ICIE3': '$20',
'TIMSK3_OCIE3C': '$08',
'TIMSK3_OCIE3B': '$04',
'TIMSK3_OCIE3A': '$02',
'TIMSK3_TOIE3': '$01',
'TIFR3': '&56',
'TIFR3_ICF3': '$20',
'TIFR3_OCF3C': '$08',
'TIFR3_OCF3B': '$04',
'TIFR3_OCF3A': '$02',
'TIFR3_TOV3': '$01',
'TCCR1A': '&128',
'TCCR1A_COM1A': '$C0',
'TCCR1A_COM1B': '$30',
'TCCR1A_COM1C': '$0C',
'TCCR1A_WGM1': '$03',
'TCCR1B': '&129',
'TCCR1B_ICNC1': '$80',
'TCCR1B_ICES1': '$40',
'TCCR1B_WGM1': '$18',
'TCCR1B_CS1': '$07',
'TCCR1C': '&130',
'TCCR1C_FOC1A': '$80',
'TCCR1C_FOC1B': '$40',
'TCCR1C_FOC1C': '$20',
'TCNT1': '&132',
'OCR1A': '&136',
'OCR1B': '&138',
'OCR1C': '&140',
'ICR1': '&134',
'TIMSK1': '&111',
'TIMSK1_ICIE1': '$20',
'TIMSK1_OCIE1C': '$08',
'TIMSK1_OCIE1B': '$04',
'TIMSK1_OCIE1A': '$02',
'TIMSK1_TOIE1': '$01',
'TIFR1': '&54',
'TIFR1_ICF1': '$20',
'TIFR1_OCF1C': '$08',
'TIFR1_OCF1B': '$04',
'TIFR1_OCF1A': '$02',
'TIFR1_TOV1': '$01',
'OCDR': '&81',
'MCUCR': '&85',
'MCUCR_JTD': '$80',
'MCUSR': '&84',
'MCUSR_JTRF': '$10',
'EICRA': '&105',
'EICRA_ISC3': '$C0',
'EICRA_ISC2': '$30',
'EICRA_ISC1': '$0C',
'EICRA_ISC0': '$03',
'EICRB': '&106',
'EICRB_ISC7': '$C0',
'EICRB_ISC6': '$30',
'EICRB_ISC5': '$0C',
'EICRB_ISC4': '$03',
'EIMSK': '&61',
'EIMSK_INT': '$FF',
'EIFR': '&60',
'EIFR_INTF': '$FF',
'PCMSK0': '&107',
'PCIFR': '&59',
'PCIFR_PCIF0': '$01',
'PCICR': '&104',
'PCICR_PCIE0': '$01',
'TCCR4A': '&192',
'TCCR4A_COM4A': '$C0',
'TCCR4A_COM4B': '$30',
'TCCR4A_FOC4A': '$08',
'TCCR4A_FOC4B': '$04',
'TCCR4A_PWM4A': '$02',
'TCCR4A_PWM4B': '$01',
'TCCR4B': '&193',
'TCCR4B_PWM4X': '$80',
'TCCR4B_PSR4': '$40',
'TCCR4B_DTPS4': '$30',
'TCCR4B_CS4': '$0F',
'TCCR4C': '&194',
'TCCR4C_COM4A1S': '$80',
'TCCR4C_COM4A0S': '$40',
'TCCR4C_COM4B1S': '$20',
'TCCR4C_COM4B0S': '$10',
'TCCR4C_COM4D': '$0C',
'TCCR4C_FOC4D': '$02',
'TCCR4C_PWM4D': '$01',
'TCCR4D': '&195',
'TCCR4D_FPIE4': '$80',
'TCCR4D_FPEN4': '$40',
'TCCR4D_FPNC4': '$20',
'TCCR4D_FPES4': '$10',
'TCCR4D_FPAC4': '$08',
'TCCR4D_FPF4': '$04',
'TCCR4D_WGM4': '$03',
'TCCR4E': '&196',
'TCCR4E_TLOCK4': '$80',
'TCCR4E_ENHC4': '$40',
'TCCR4E_OC4OE': '$3F',
'TCNT4': '&190',
'TC4H': '&191',
'OCR4A': '&207',
'OCR4B': '&208',
'OCR4C': '&209',
'OCR4D': '&210',
'TIMSK4': '&114',
'TIMSK4_OCIE4D': '$80',
'TIMSK4_OCIE4A': '$40',
'TIMSK4_OCIE4B': '$20',
'TIMSK4_TOIE4': '$04',
'TIFR4': '&57',
'TIFR4_OCF4D': '$80',
'TIFR4_OCF4A': '$40',
'TIFR4_OCF4B': '$20',
'TIFR4_TOV4': '$04',
'DT4': '&212',
'DT4_DT4L': '$FF',
'PORTB': '&37',
'DDRB': '&36',
'PINB': '&35',
'PORTC': '&40',
'DDRC': '&39',
'PINC': '&38',
'PORTE': '&46',
'DDRE': '&45',
'PINE': '&44',
'PORTF': '&49',
'DDRF': '&48',
'PINF': '&47',
'ADMUX': '&124',
'ADMUX_REFS': '$C0',
'ADMUX_ADLAR': '$20',
'ADMUX_MUX': '$1F',
'ADCSRA': '&122',
'ADCSRA_ADEN': '$80',
'ADCSRA_ADSC': '$40',
'ADCSRA_ADATE': '$20',
'ADCSRA_ADIF': '$10',
'ADCSRA_ADIE': '$08',
'ADCSRA_ADPS': '$07',
'ADC': '&120',
'ADCSRB': '&123',
'ADCSRB_ADHSM': '$80',
'ADCSRB_MUX5': '$20',
'ADCSRB_ADTS': '$17',
'DIDR0': '&126',
'DIDR0_ADC7D': '$80',
'DIDR0_ADC6D': '$40',
'DIDR0_ADC5D': '$20',
'DIDR0_ADC4D': '$10',
'DIDR0_ADC3D': '$08',
'DIDR0_ADC2D': '$04',
'DIDR0_ADC1D': '$02',
'DIDR0_ADC0D': '$01',
'DIDR2': '&125',
'DIDR2_ADC13D': '$20',
'DIDR2_ADC12D': '$10',
'DIDR2_ADC11D': '$08',
'DIDR2_ADC10D': '$04',
'DIDR2_ADC9D': '$02',
'DIDR2_ADC8D': '$01',
'ACSR': '&80',
'ACSR_ACD': '$80',
'ACSR_ACBG': '$40',
'ACSR_ACO': '$20',
'ACSR_ACI': '$10',
'ACSR_ACIE': '$08',
'ACSR_ACIC': '$04',
'ACSR_ACIS': '$03',
'DIDR1': '&127',
'DIDR1_AIN1D': '$02',
'DIDR1_AIN0D': '$01',
'SREG': '&95',
'SREG_I': '$80',
'SREG_T': '$40',
'SREG_H': '$20',
'SREG_S': '$10',
'SREG_V': '$08',
'SREG_N': '$04',
'SREG_Z': '$02',
'SREG_C': '$01',
'SP': '&93',
'OSCCAL': '&102',
'RCCTRL': '&103',
'RCCTRL_RCFREQ': '$01',
'CLKPR': '&97',
'CLKPR_CLKPCE': '$80',
'CLKPR_CLKPS': '$0F',
'SMCR': '&83',
'SMCR_SM': '$0E',
'SMCR_SE': '$01',
'EIND': '&92',
'GPIOR2': '&75',
'GPIOR2_GPIOR': '$FF',
'GPIOR1': '&74',
'GPIOR1_GPIOR': '$FF',
'GPIOR0': '&62',
'GPIOR0_GPIOR07': '$80',
'GPIOR0_GPIOR06': '$40',
'GPIOR0_GPIOR05': '$20',
'GPIOR0_GPIOR04': '$10',
'GPIOR0_GPIOR03': '$08',
'GPIOR0_GPIOR02': '$04',
'GPIOR0_GPIOR01': '$02',
'GPIOR0_GPIOR00': '$01',
'PRR1': '&101',
'PRR1_PRUSB': '$80',
'PRR1_PRTIM3': '$08',
'PRR1_PRUSART1': '$01',
'PRR0': '&100',
'PRR0_PRTWI': '$80',
'PRR0_PRTIM2': '$40',
'PRR0_PRTIM0': '$20',
'PRR0_PRTIM1': '$08',
'PRR0_PRSPI': '$04',
'PRR0_PRUSART0': '$02',
'PRR0_PRADC': '$01',
'CLKSTA': '&199',
'CLKSTA_RCON': '$02',
'CLKSTA_EXTON': '$01',
'CLKSEL1': '&198',
'CLKSEL1_RCCKSEL': '$F0',
'CLKSEL1_EXCKSEL': '$0F',
'CLKSEL0': '&197',
'CLKSEL0_RCSUT': '$C0',
'CLKSEL0_EXSUT': '$30',
'CLKSEL0_RCE': '$08',
'CLKSEL0_EXTE': '$04',
'CLKSEL0_CLKS': '$01',
'PLLCSR': '&73',
'PLLCSR_PINDIV': '$10',
'PLLCSR_PLLE': '$02',
'PLLCSR_PLOCK': '$01',
'PLLFRQ': '&82',
'PLLFRQ_PINMUX': '$80',
'PLLFRQ_PLLUSB': '$40',
'PLLFRQ_PLLTM': '$30',
'PLLFRQ_PDIV': '$0F',
'UEINT': '&244',
'UEBCHX': '&243',
'UEBCLX': '&242',
'UEDATX': '&241',
'UEDATX_DAT': '$FF',
'UEIENX': '&240',
'UEIENX_FLERRE': '$80',
'UEIENX_NAKINE': '$40',
'UEIENX_NAKOUTE': '$10',
'UEIENX_RXSTPE': '$08',
'UEIENX_RXOUTE': '$04',
'UEIENX_STALLEDE': '$02',
'UEIENX_TXINE': '$01',
'UESTA1X': '&239',
'UESTA1X_CTRLDIR': '$04',
'UESTA1X_CURRBK': '$03',
'UESTA0X': '&238',
'UESTA0X_CFGOK': '$80',
'UESTA0X_OVERFI': '$40',
'UESTA0X_UNDERFI': '$20',
'UESTA0X_DTSEQ': '$0C',
'UESTA0X_NBUSYBK': '$03',
'UECFG1X': '&237',
'UECFG1X_EPSIZE': '$70',
'UECFG1X_EPBK': '$0C',
'UECFG1X_ALLOC': '$02',
'UECFG0X': '&236',
'UECFG0X_EPTYPE': '$C0',
'UECFG0X_EPDIR': '$01',
'UECONX': '&235',
'UECONX_STALLRQ': '$20',
'UECONX_STALLRQC': '$10',
'UECONX_RSTDT': '$08',
'UECONX_EPEN': '$ | 01',
'UERST': '&234',
'UERST_EPRST': '$7F',
'UENUM': '&233',
'UEINTX': '&232',
'UEINTX_FIFOCON': '$80',
'UEINTX_NAKINI': '$40',
'UEINTX_RWAL': '$20',
'UEINTX_NAKO | UTI': '$10',
'UEINTX_RXSTPI': '$08',
'UEINTX_RXOUTI': '$04',
'UEINTX_STALLEDI': '$02',
'UEINTX_TXINI': '$01',
'UDMFN': '&230',
'UDMFN_FNCERR': '$10',
'UDFNUM': '&228',
'UDADDR': '&227',
'UDADDR_ADDEN': ' |
op is None:
io_loop = IOLoop.current()
request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout,
validate_cert=kwargs.get("validate_cert", True))
request = httpclient._RequestProxy(request, options)
conn = WebSocketClientConnection(io_loop, request)
if callback is not None:
io_loop.add_future(conn.connect_future, callback)
return conn.connect_future
class WebSocketProxy(TCPServer):
"""
Listen on a port and delegate the accepted connection to a WebSocketLocalProxyHandler
"""
def __init__(self, port, ws_url, **kwargs):
super(WebSocketProxy, self).__init__(kwargs.get("io_loop"),
| kwargs.get("ssl_options"))
self.bind(port,
kwargs.get("address", ''),
kwargs.get("family", socket.AF_UNSPEC),
kwargs.get("backlog", 128))
self.ws_url = ws_url
self.ws_options = kwargs.get("ws_options", {})
self.filters = kwargs.get("filters", [])
self.serving = False
| self.ws_conn = None
self._address_list = []
@property
def address_list(self):
return self._address_list
def handle_stream(self, stream, address):
"""
Handle a new client connection with a proxy over websocket
"""
logger.info("Got connection from %s on %s" % (tuple_to_address(stream.socket.getpeername()),
tuple_to_address(stream.socket.getsockname())))
self.ws_conn = WebSocketProxyConnection(self.ws_url, stream, address,
filters=self.filters,
ws_options=self.ws_options)
self.ws_conn.connect()
def start(self, num_processes=1):
super(WebSocketProxy, self).start(num_processes)
self._address_list = [(s.getsockname()[0], s.getsockname()[1]) for s in self._sockets.values()]
self.serving = True
def stop(self):
super(WebSocketProxy, self).stop()
self.serving = False
def __str__(self):
return "WebSocketProxy %s" % (" | ".join(["%s --> %s" %
("%s:%d" % (a, p), self.ws_url) for (a, p) in self.address_list]))
class WebSocketProxyConnection(object):
"""
Handles the client connection and works as a proxy over a websocket connection
"""
def __init__(self, url, io_stream, address, ws_options=None, **kwargs):
self.url = url
self.io_loop = kwargs.get("io_loop")
self.connect_timeout = kwargs.get("connect_timeout", None)
self.keep_alive = kwargs.get("keep_alive", None)
self.ws_options = ws_options
self.io_stream, self.address = io_stream, address
self.filters = kwargs.get("filters", [])
self.io_stream.set_close_callback(self.on_close)
self.ws_conn = None
def connect(self):
logger.info("Connecting WebSocket at url %s" % self.url)
websocket_connect(self.url,
self.io_loop,
callback=self.on_open,
connect_timeout=self.connect_timeout,
**self.ws_options)
def on_open(self, ws_conn):
"""
When the websocket connection is handshaked, start reading for data over the client socket
connection
"""
try:
self.ws_conn = ws_conn.result()
except httpclient.HTTPError as e:
#TODO: change with raise EndpointNotAvailableException(message="The server endpoint is not available") from e
raise EndpointNotAvailableException("The server endpoint is not available", cause=e)
self.ws_conn.on_message = self.on_message
self.ws_conn.release_callback = self.on_close
self.io_stream.read_until_close(self.on_close, streaming_callback=self.on_peer_message)
def on_message(self, message):
"""
On a message received from websocket, send back to client peer
"""
try:
data = None if message is None else bytes(message)
for filtr in self.filters:
data = filtr.ws_to_socket(data=data)
if data:
self.io_stream.write(data)
except FilterException as e:
logger.exception(e)
self.on_close()
def on_close(self, *args, **kwargs):
"""
Handles the close event from the client socket
"""
logger.info("Closing connection with client at {0}:{1}".format(*self.address))
logger.debug("Received args %s and %s", args, kwargs)
if not self.io_stream.closed():
self.io_stream.close()
def on_peer_message(self, message):
"""
On data received from client peer, forward through WebSocket
"""
try:
data = None if message is None else bytes(message)
for filtr in self.filters:
data = filtr.socket_to_ws(data=data)
if data:
self.ws_conn.write_message(data, binary=True)
except FilterException as e:
logger.exception(e)
self.on_close()
class WSTunnelClient(object):
"""
Manages redirects from local ports to remote websocket servers
"""
def __init__(self, proxies=None, address='', family=socket.AF_UNSPEC, io_loop=None, ssl_options=None,
ws_options=None):
self.stream_options = {
"address": address,
"family": family,
"io_loop": io_loop,
"ssl_options": ssl_options,
}
self.ws_options = ws_options or {}
self.proxies = proxies or {}
self.serving = False
self._num_proc = 1
if proxies:
for port, ws_url in proxies.items():
self.add_proxy(port, WebSocketProxy(port=port,
ws_url=ws_url,
ws_options=self.ws_options,
**self.stream_options))
def add_proxy(self, key, ws_proxy):
"""
Adds a proxy to the list.
If the tunnel is serving connection, the proxy it gets started.
"""
self.proxies[key] = ws_proxy
if self.serving:
ws_proxy.start(self._num_proc)
logger.info("Started %s" % ws_proxy)
def remove_proxy(self, key):
"""
Removes a proxy from the list.
If the tunnel is serving connection, the proxy it gets stopped.
"""
ws_proxy = self.proxies.get(key)
if ws_proxy:
if self.serving:
ws_proxy.stop()
logger.info("Removing %s" % ws_proxy)
del self.proxies[key]
def get_proxy(self, key):
"""
Return the proxy associated to the given name.
"""
return self.proxies.get(key)
@property
def address_list(self):
"""
Returns the address (<host>, <port> tuple) list of all the addresses used
"""
l = []
for service in self.proxies.values():
l.extend(service.address_list)
return l
def install_filter(self, filtr):
"""
Install the given filter to all the current mapped services
"""
for ws_proxy in self.proxies.values():
ws_proxy.filters.append(filtr)
def uninstall_filter(self, filtr):
"""
Uninstall the given filter from all the current mapped services
"""
for ws_proxy in self.proxies.values():
ws_proxy.filters.remove(filtr)
def start(self, num_processes=1):
"""
Start the client tunnel service by starting each configured proxy
"""
logger.info("Starting %d %s processes" % (num_processes, self.__class__.__name__))
self._num_processes = num_processes
for key, ws_proxy in self.proxies.items():
ws_proxy.start(num_processes)
logge |
hon.ops import array_ops
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.training import monitored_session
from tensorflow.python.training import session_run_hook
from tensorflow.python.util import nest
class ModeKeys(object):
"""Standard names for model modes.
The following standard keys are defined:
* `TRAIN`: training mode.
* `EVAL`: evaluation mode.
* `PREDICT`: inference mode.
"""
TRAIN = 'train'
EVAL = 'eval'
PREDICT = 'infer'
LOSS_METRIC_KEY = 'loss'
AVERAGE_LOSS_METRIC_KEY = 'average_loss'
class EstimatorSpec(
collections.namedtuple('EstimatorSpec', [
'predictions', 'loss', 'train_op', 'eval_metric_ops',
'export_outputs', 'training_chief_hooks', 'training_hooks',
'scaffold', 'evaluation_hooks'
])):
"""Ops and objects returned from a `model_fn` and passed to `Estimator`.
`EstimatorSpec` fully defines the model to be run by `Estimator`.
"""
def __new__(cls,
mode,
predictions=None,
loss=None,
train_op=None,
eval_metric_ops=None,
export_outputs=None,
training_chief_hooks=None,
training_hooks=None,
scaffold=None,
evaluation_hooks=None):
"""Creates a validated `EstimatorSpec` instance.
Depending on the value of `mode`, different arguments are required. Namely
* | For `mode == ModeKeys.TRAIN`: required fields are `loss` and `train_op`.
* For `mode == ModeKeys.EVAL`: required field is `loss`.
* For `mode == ModeKeys.PREDICT`: required fie | lds are `predictions`.
model_fn can populate all arguments independent of mode. In this case, some
arguments will be ignored by `Estimator`. E.g. `train_op` will be ignored
in eval and infer modes. Example:
```python
def my_model_fn(mode, features, labels):
predictions = ...
loss = ...
train_op = ...
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op)
```
Alternatively, model_fn can just populate the arguments appropriate to the
given mode. Example:
```python
def my_model_fn(mode, features, labels):
if (mode == tf.estimator.ModeKeys.TRAIN or
mode == tf.estimator.ModeKeys.EVAL):
loss = ...
else:
loss = None
if mode == tf.estimator.ModeKeys.TRAIN:
train_op = ...
else:
train_op = None
if mode == tf.estimator.ModeKeys.PREDICT:
predictions = ...
else:
predictions = None
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op)
```
Args:
mode: A `ModeKeys`. Specifies if this is training, evaluation or
prediction.
predictions: Predictions `Tensor` or dict of `Tensor`.
loss: Training loss `Tensor`. Must be either scalar, or with shape `[1]`.
train_op: Op for the training step.
eval_metric_ops: Dict of metric results keyed by name. The values of the
dict are the results of calling a metric function, namely a
`(metric_tensor, update_op)` tuple.
export_outputs: Describes the output signatures to be exported to
`SavedModel` and used during serving.
A dict `{name: output}` where:
* name: An arbitrary name for this output.
* output: an `ExportOutput` object such as `ClassificationOutput`,
`RegressionOutput`, or `PredictOutput`.
Single-headed models only need to specify one entry in this dictionary.
Multi-headed models should specify one entry for each head, one of
which must be named using
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY.
training_chief_hooks: Iterable of `tf.train.SessionRunHook` objects to
run on the chief worker during training.
training_hooks: Iterable of `tf.train.SessionRunHook` objects to run
on all workers during training.
scaffold: A `tf.train.Scaffold` object that can be used to set
initialization, saver, and more to be used in training.
evaluation_hooks: Iterable of `tf.train.SessionRunHook` objects to
run during evaluation.
Returns:
A validated `EstimatorSpec` object.
Raises:
ValueError: If validation fails.
TypeError: If any of the arguments is not the expected type.
"""
# Validate train_op.
if train_op is None:
if mode == ModeKeys.TRAIN:
raise ValueError('Missing train_op.')
else:
_check_is_tensor_or_operation(train_op, 'train_op')
# Validate loss.
if loss is None:
if mode in (ModeKeys.TRAIN, ModeKeys.EVAL):
raise ValueError('Missing loss.')
else:
loss = _check_is_tensor(loss, 'loss')
loss_shape = loss.get_shape()
if loss_shape.num_elements() not in (None, 1):
raise ValueError('Loss must be scalar, given: {}'.format(loss))
if not loss_shape.is_compatible_with(tensor_shape.scalar()):
loss = array_ops.reshape(loss, [])
# Validate predictions.
if predictions is None:
if mode == ModeKeys.PREDICT:
raise ValueError('Missing predictions.')
predictions = {}
else:
if isinstance(predictions, dict):
predictions = {
k: _check_is_tensor(v, 'predictions[{}]'.format(k))
for k, v in six.iteritems(predictions)
}
else:
predictions = _check_is_tensor(predictions, 'predictions')
# Validate eval_metric_ops.
if eval_metric_ops is None:
eval_metric_ops = {}
else:
if not isinstance(eval_metric_ops, dict):
raise TypeError(
'eval_metric_ops must be a dict, given: {}'.format(eval_metric_ops))
for key, metric_value_and_update in six.iteritems(eval_metric_ops):
if (not isinstance(metric_value_and_update, tuple) or
len(metric_value_and_update) != 2):
raise TypeError(
'Values of eval_metric_ops must be (metric_value, update_op) '
'tuples, given: {} for key: {}'.format(
metric_value_and_update, key))
metric_value, metric_update = metric_value_and_update
for metric_value_member in nest.flatten(metric_value):
# Allow (possibly nested) tuples for metric values, but require that
# each of them be Tensors or Operations.
_check_is_tensor_or_operation(metric_value_member,
'eval_metric_ops[{}]'.format(key))
_check_is_tensor_or_operation(metric_update,
'eval_metric_ops[{}]'.format(key))
# Validate export_outputs.
if export_outputs is not None:
if not isinstance(export_outputs, dict):
raise TypeError('export_outputs must be dict, given: {}'.format(
export_outputs))
for v in six.itervalues(export_outputs):
if not isinstance(v, ExportOutput):
raise TypeError(
'Values in export_outputs must be ExportOutput objects. '
'Given: {}'.format(export_outputs))
# Note export_outputs is allowed to be empty.
if len(export_outputs) == 1:
(key, value), = export_outputs.items()
if key != signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
export_outputs[
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = value
if len(export_outputs) > 1:
if (signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
not in export_outputs):
raise ValueError(
'Multiple export_outputs were provided, but none of them is '
'specified as the default. Do this by naming one of them with '
'signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY.')
# Validate that all tensors and ops are from the default graph.
default_graph = ops.get_default_graph()
# We enumerate possible error causes here to aid in debugging.
error_message_template = (
'{0} wit |
import logging
from airflow.contrib.hooks.bigquery_hook import BigQueryHook
from airflow.models import BaseOperator
from airflow.utils import apply_defaults
class BigQueryOperator(BaseOperator):
"""
Executes BigQuery SQL queries in a specific BigQuery database
"""
template_fields = ('bql', 'destination_dataset_table')
template_ext = ('.sql',)
ui_color = '#e4f0e8'
@apply_defaults
def __init__(self,
bql,
destination_dataset_table = False,
write_disposition = 'WRITE_EMPTY',
bigquery_conn_id='bigquery_default',
delegate_to=None,
*args,
**kwargs):
"""
Create a new BigQueryOperator.
:param bql: the sql code to be executed
:type bql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
:param destination_dataset_table: A dotted dataset.table that, if set,
will store the results of the query.
:type destination_dataset_table: string
:param bigquery_conn_id: reference to a specific BigQuery hook.
:type bigquery_conn_id: string
:param delegate_to: The account to impersonate, if any.
| For this to work, the service account making the request must have domain-wide delegation enabled.
:typ | e delegate_to: string
"""
super(BigQueryOperator, self).__init__(*args, **kwargs)
self.bql = bql
self.destination_dataset_table = destination_dataset_table
self.write_disposition = write_disposition
self.bigquery_conn_id = bigquery_conn_id
self.delegate_to = delegate_to
def execute(self, context):
logging.info('Executing: %s', str(self.bql))
hook = BigQueryHook(bigquery_conn_id=self.bigquery_conn_id, delegate_to=self.delegate_to)
conn = hook.get_conn()
cursor = conn.cursor()
cursor.run_query(self.bql, self.destination_dataset_table, self.write_disposition)
|
on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess, time, sys
import json
import datetime
from optparse import OptionParser
SKIP_TEST="-DskipTests"
AMBARI_AUTH_HEADERS = "--header 'Authorization:Basic YWRtaW46YWRtaW4=' --header 'X-Requested-By: PIVOTAL'"
AMBARI_BUILD_DOCKER_ROOT = "/tmp/ambari-build-docker"
NO_EXIT_SLEEP_TIME=60
RETRY_MAX=20
def git_deep_cleaning():
proc = subprocess.Popen("git clean -xdf",
shell=True,
cwd="/tmp/ambari")
return proc.wait()
def ambariUnitTest():
proc = subprocess.Popen("mvn -fae clean install",
shell=True,
cwd="/tmp/ambari")
return proc.wait()
def buildAmbari(stack_distribution, supplemental_distribution=None):
stack_distribution_param = ("-Dstack.distribution=" + stack_distribution) if stack_distribution is not None else ""
supplemental_distribution_param = ("-Dsupplemental.distribution=" + supplemental_distribution) if supplemental_distribution is not None else ""
proc = subprocess.Popen("mvn -B clean install package rpm:rpm -Dmaven.clover.skip=true -Dfindbugs.skip=true "
+ SKIP_TEST + " "
+ stack_distribution_param + " "
+ supplemental_distribution_param + " "
+ " -Dpython.ver=\"python >= 2.6\"",
shell=True,
cwd="/tmp/ambari")
return proc.wait()
def install_ambari_server():
proc = subprocess.Popen("sudo yum install -y ambari-server-*.x86_64.rpm",
shell=True,
cwd="/tmp/ambari/ambari-server/target/rpm/ambari-server/RPMS/x86_64")
return proc.wait()
def install_ambari_agent():
proc = subprocess.Popen("sudo yum install -y ambari-agent-*.x86_64.rpm",
shell=True,
cwd="/tmp/ambari/ambari-agent/target/rpm/ambari-agent/RPMS/x86_64")
return proc.wait()
def setup_ambari_server():
proc = subprocess.Popen("echo -e '\n\n\n\n' | sudo ambari-server setup",
shell=True)
return proc.wait()
def start_ambari_server(debug=False):
proc = subprocess.Popen("sudo ambari-server start" + (" --debug" if debug else ""),
shell=True)
return proc.wait()
def start_dependant_services():
retcode = 0
proc = subprocess.Popen("sudo service sshd start", shell=True)
retcode += proc.wait()
proc = subprocess.Popen("sudo service ntpd start", shell=True)
retcode += proc.wait()
return retcode
def configure_ambari_agent():
proc = subprocess.Popen("hostname -f", stdout=subprocess.PIPE, shell=True)
hostname = proc.stdout.read().rstrip()
proc = subprocess.Popen("sudo sed -i 's/hostname=localhost/hostname=" + hostname + "/g' /etc/ambari-agent/conf/ambari-agent.ini",
shell=True)
return proc.wait()
def start_ambari_agent(wait_until_registered = True):
retcode = 0
proc = subprocess.Popen("service ambari-agent start",
shell=True)
retcode += proc.wait()
if wait_until_registered:
if not wait_until_ambari_agent_registered():
print "ERROR: ambari-agent was not registered."
sys.exit(1)
return retcode
def wait_until_ambari_agent_registered():
'''
return True if ambari agent is found registered.
return False if timeout
'''
count = 0
while count < RETRY_MAX:
count += 1
proc = subprocess.Popen("curl " +
"http://localhost:8080/api/v1/hosts " +
AMBARI_AUTH_HEADERS,
stdout=subprocess.PIPE,
shell=True)
hosts_result_string = proc.stdout.read()
hosts_result_json = json.loads(hosts_result_string)
if len(hosts_result_json["items"]) != 0:
return True
time.sleep(5)
return False
def post_blueprint():
proc = subprocess.Popen("curl -X POST -D - " +
"-d @single-node-HDP-2.1-blueprint1.json http://localhost:8080/api/v1/blueprints/myblueprint1 " +
AMBARI_AUTH_HEADERS ,
cwd=AMBARI_BUILD_DOCKER_ROOT + "/blueprints",
shell=True)
return proc.wait()
def create_cluster():
proc = subprocess.Popen("curl -X POST -D - " +
"-d @single-node-hostmapping1.json http://localhost:8080/api/v1/clusters/mycluster1 " +
AMBARI_AUTH_HEADERS ,
cwd=AMBARI_BUILD_DOCKER_ROOT + "/blueprints",
shell=True)
return proc.wait()
# Loop to not to exit Docker container
def no_exit():
print ""
print "loop to not to exit docker container..."
print ""
while True:
time.sleep(NO_EXIT_SLEEP_TIME)
class ParseResult:
is_deep_clean = False
is_rebuild = False
stack_distribution = None
supplemental_distribution = None
is_test = False
is_install_server = False
is_install_agent = False
is_deploy = False
is_server_debug = False
def parse(argv):
result = ParseResult()
if len(argv) >=2:
parser = OptionParser()
parser.add_option("-c", "--clean",
dest="is_deep_clean",
action="store_true",
default=False,
help="if this option is set, git clean -xdf is executed for the ambari local git repo")
parser.add_option("-b", "--rebuild",
dest="is_rebuild",
action="store_true",
default=False,
help="set this flag if you want to rebuild Ambari code")
parser.add_option("-s", "--stack_distribution",
dest="stack_distribution",
help="set a stack distribution. [HDP|PHD|BIGTOP]. Make sure -b is also set when you set a stack distribution")
parser.add_option("-x", "--supplemental_distribution",
dest="supplemental_distribution",
help="set a supplement stack distribution in addition to the primary one. [BigInsights]. Make sure -b is also set when you set a supplement stack distribution")
parser.add_option("-d", "--server_debug",
dest="is_server_debug",
action="store_true",
default=False,
help="set a debug option for ambari-server")
(options, args) = parser.parse_args(argv[1:])
if options.is_deep_clean:
result.is_deep_clean = True
if options.is_rebuild:
result.is_rebuild = True
if options.stack_distribution:
result.stack_distribution = options.stack_distribution
if options.supplemental_distribution:
result.supplemental_distribution = options.supplemental_distribution
if options.is_server_debug:
result.is_server_debug = True
if argv[0] == "test":
result.is_test = True
if argv[0] == "server":
result.is_install_server = True
if argv[0] == "agent":
result.is_install_server = True
result.is_install_agent = True
if argv[0] == "deploy":
result.is_install_server = True
result.is_install_agent = True
result.is_deploy = True
return result
if __name__ == "__main__":
if len(sys.argv) == 1:
print "specify one of test, server, agent or deploy"
sys.exit(1)
start = datetime.datetime.utcnow()
# test: execute unit test
# server: install ambari-server
# with or without rebuild
# agent: install ambari-server and ambari-agent
# with or without rebuild
# deploy: install ambari-server, ambari-agent and deploy Hadoop
# with or without rebuild
parsed_args = parse(sys.argv[1:])
if parsed_args.is_deep_clean:
retcode = git_deep_cleaning()
if retcode != 0: sys.exit(retcode)
if parsed_args.is_test:
retcode = ambariUnitTest()
end = datetime.datetime.utcnow()
print ""
print "Duration: " + str((end-start).seconds) + " seconds"
sys.exit(retcode)
if parsed_args.is_rebuild:
retcode = buildAmbari(parsed_args.stack_distribution, supplemental_distribution=parsed_args.supplemental_distribution)
if retcode != 0: sys.exit(retcode)
if parsed_args.is_install_server:
retcode = install_ambari_server()
if retcode != 0: sys.exit(retcode)
retcode = setup_ambari_server()
if retcode != 0: sys.exit(retcode)
retcode = start_ambari_serv | er(parsed_args.is_server_debug)
if retcode != 0: sys.exit(retcode)
retcode = start_dependant_services()
if retcode != 0: sys.exit(retcode)
if parsed_args.is_install_agent:
retcode = install_ambari_agent()
if retcode != 0: sys.exit(retcode)
retcode = configure_ambari_agent()
if retcode != 0: sys.exit(retcode)
retcode = start_ambari_agent()
if retcode != 0: sys.exit(retcode)
if parsed_args.is_deploy:
retcode = post_blueprint()
| if retcode != 0: sys.exit(retcode)
retcode = create_cluster()
if retcode != 0: sys.exit(retcode)
end = datetime.datetime.utcnow()
print ""
print "Duration: " + str((end-start).seconds) + " seconds"
print "Parameters: " + str(sys.argv |
#!/usr/bin/env python
from setuptools import setup, find_packages
with open('pypd/version.py') as version_file:
exec(compile(version_file.read(), version_file.name, 'exec'))
options = {
'name': 'pypd',
'version': _ | _version__,
'packages': find_packages(),
'scripts': [],
'description': 'A python client for PagerDuty API',
'author': 'JD Cumpson',
'author_email': 'jdc@pagerduty.com',
'maintainer_email': 'jdc@pagerduty.com',
'license': 'MIT',
'url': 'https://github.com/PagerDuty/pypd',
'download_url': 'https://github.com/PagerDuty/pypd/archive/master.tar.gz',
'classifiers': [
'Programming Language | :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
'install_requires': ['ujson', 'requests'],
'tests_require': [],
'cmdclass': {}
}
setup(**options)
|
import unittest
from autosklearn.pipeline.components.classification.extra_trees import \
ExtraTreesClassifier
from autosklearn.pipeline.util import _test_classifier, \
_test_classifier_iterative_fit, _test_classifier_predict_proba
import numpy as np
import sklearn.metrics
import sklearn.ensemble
class ExtraTreesComponentTest(unittest.TestCase):
def test_default_configuration(self):
for i in range(10):
predictions, targets = \
_test_classifier(ExtraTreesClassifier)
self.assertAlmostEqual(0.95999999999999996,
sklearn.metrics.accuracy_score(targets, predictions))
def test_default_configuration_predict_proba(self):
for i in range(10):
predictions, targets = \
_test_classifier_predict_proba(ExtraTreesClassifier)
self.assertAlmostEqual(0.12052046298054782,
sklearn.metrics.log_loss(
targets, predictions))
def test_default_configuration_sparse(self):
for i in range(10):
predictions, targets = \
_test_classifier(ExtraTreesClassifier, sparse=True)
self.assertAlmostEqual(0.71999999999999997,
sklearn.metrics.accuracy_score(targets,
predictions))
def test_default_configuration_iterative_fit(self):
for i in range(10):
predictions, targets = \
_test_classifier_iterative_fit(ExtraTreesClassifier)
self.assertAlmostEqual(0.93999999999999995,
sklearn.metrics.accuracy_score(targets,
predictions))
def test_default_configuration_binary(self):
for i in range(10):
predictions, targets = \
| _test_classifier(ExtraTreesClassifier, make_binary=True)
self.assertAlmostEqual(1,
sklearn.metrics.accuracy_score(targets,
predictions))
def test_default_configuration_multilabel(self):
for i in range(10):
predictions, targets = \
_test_classifier(ExtraTreesClassifier, make_multilabel=True)
self.assertAlmostEqual(0.97 | 060428849902536,
sklearn.metrics.average_precision_score(
targets, predictions))
def test_default_configuration_predict_proba_multilabel(self):
for i in range(10):
predictions, targets = \
_test_classifier_predict_proba(ExtraTreesClassifier,
make_multilabel=True)
self.assertEqual(predictions.shape, ((50, 3)))
self.assertAlmostEqual(0.98976738180772728,
sklearn.metrics.average_precision_score(
targets, predictions))
def test_target_algorithm_multioutput_multiclass_support(self):
cls = sklearn.ensemble.ExtraTreesClassifier()
X = np.random.random((10, 10))
y = np.random.randint(0, 1, size=(10, 10))
# Running this without an exception is the purpose of this test!
cls.fit(X, y) |
ath import exists
from shutil import rmtree
from whoosh import fields, index, qparser, store, writing
class TestIndexing(unittest.TestCase):
def make_index(self, dirname, schema):
if not exists(dirname):
mkdir(dirname)
st = store.FileStorage(dirname)
ix = index.Index(st, schema, create = True)
return ix
def destroy_index(self, dirname):
if exists(dirname):
rmtree(dirname)
def test_creation(self):
s = fields.Schema()
s.add("content", fields.TEXT(phrase = True))
s.add("title", fields.TEXT(stored = True))
s.add("path", fields.ID(stored = True))
s.add("tags", fields.KEYWORD(stored = True))
s.add("quick", fields.NGRAM)
s.add("note", fields.STORED)
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = writing.IndexWriter(ix)
w.add_document(title = u"First", content = u"This is the first document", path = u"/a",
tags = u"first second third", quick = u"First document", note = u"This is the first document")
w.start_document()
w.add_field("content", u"Let's try this again")
w.add_field("title", u"Second")
w.add_field("path", u"/b")
w.add_field("tags", u"Uno Dos Tres")
w.add_field("quick", u"Second document")
w.add_field("note", u"This is the second document")
w.end_document()
w.commit()
def test_integrity(self):
s = fields.Schema(name = fields.TEXT, value = fields.TEXT)
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = writing.IndexWriter(ix)
w.add_document(name = u"Yellow brown", value = u"Blue red green purple?")
w.add_document(name = u"Alpha beta", value = u"Gamma delta epsilon omega.")
w.commit()
w = writing.IndexWriter(ix)
w.add_document(name = u"One two", value = u"Three four five.")
w.commit()
tr = ix.term_reader()
self.assertEqual(ix.doc_count_all(), 3)
self.assertEqual(list(tr.lexicon("name")), ["alpha", "beta", "brown", "one", "two", "yellow"])
def test_lengths(self):
s = fields.Schema(f1 = fields.KEYWORD(stored = True, scorable = True),
f2 = fields.KEYWORD(stored = True, scorable = True))
ix = self.make_index("testindex", s)
try:
w = ix.writer()
tokens = u"ABCDEFG"
from itertools import cycle, islice
lengths = [10, 20, 2, 102, 45, 3, 420, 2]
for length in lengths:
w.add_document(f2 = u" ".join(islice(cycle(tokens), length)))
w.commit()
dr = ix.doc_reader()
ls1 = [dr.doc_field_length(i, "f1") for i in xrange(0, len(lengths))]
ls2 = [dr.doc_field_length(i, "f2") for i in xrange(0, len(lengths))]
self.assertEqual(ls1, [0]*len(lengths))
self.assertEqual(ls2, lengths)
dr.close()
ix.close()
finally:
self.destroy_index("testindex")
def test_lengths_ram(self):
s = fields.Schema(f1 = fields.KEYWORD(stored = True, scorable = True),
f2 = fields.KEYWORD(stored = True, scorable = True))
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = writing.IndexWriter(ix)
w.add_document(f1 = u"A B C D E", f2 = u"X Y Z")
w.add_document(f1 = u"B B B B C D D Q", f2 = u"Q R S T")
w.add_document(f1 = u"D E F", f2 = u"U V A B C D E")
w.commit()
dr = ix.doc_reader()
ls1 = [dr.doc_field_length(i, "f1") for i in xrange(0, 3)]
ls2 = [dr.doc_field_length(i, "f2") for i in xrange(0, 3)]
self.assertEqual(dr[0]["f1"], "A B C D E")
self.assertEqual(dr.doc_field_length(0, "f1"), 5)
self.assertEqual(dr.doc_field_length(1, "f1"), 8)
self.assertEqual(dr.doc_field_length(2, "f1"), 3)
self.assertEqual(dr.doc_field_length(0, "f2"), 3)
self.assertEqual(dr.doc_field_length(1, "f2"), 4)
self.assertEqual(dr.doc_field_length(2, "f2"), 7)
self.assertEqual(ix.field_length("f1"), 16)
self.assertEqual(ix.field_length("f2"), 14)
def test_merged_lengths(self):
s = fields.Schema(f1 = fields.KEYWORD(stored = True, scorable = True),
f2 = fields.KEYWORD(stored = True, scorable = True))
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = writing.IndexWriter(ix)
w.add_document(f1 = u"A B C", f2 = u"X")
w.add_document(f1 = u"B C D E", f2 = u"Y Z")
w.commit()
w = writing.IndexWriter(ix)
w.add_document(f1 = u"A", f2 = u"B C D E X Y")
w.add_document(f1 = u"B C", f2 = u"X")
w.commit(writing.NO_MERGE)
w = writing.IndexWriter(ix)
w.add_document(f1 = u"A B X Y Z", f2 = u"B C")
w.add_document(f1 = u"Y X", f2 = u"A B")
w.commit(writing.NO_MERGE)
dr = ix.doc_reader()
self.assertEqual(dr[0]["f1"], u"A B C")
self.assertEqual(dr.doc_field_length(0, "f1"), 3)
self.assertEqual(dr.doc_field_length(2, "f2"), 6)
self.assertEqual(dr.doc_field_length(4, "f1"), 5)
def test_frequency_keyword(self):
s = fields.Schema(content = fields.KEYWORD)
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = ix.writer()
w.add_document(content = u"A B C D E")
w.add_document(content = u"B B B B C D D")
w.add_document(content = u"D E F")
w.commit()
tr = ix.term_reader()
self.assertEqual(tr.doc_frequency("content", u"B"), 2)
self.assertEqual(tr.frequency("content", u"B"), 5)
self.assertEqual(tr.doc_frequency("content", u"E"), 2)
self.assertEqual(tr.frequency("content", u"E"), 2)
self.assertEqual(tr.doc_frequency("content", u"A"), 1)
self.assertEqual(tr.frequency("content", u"A"), 1)
self.assertEqual(tr.doc_frequency("content", u"D"), 3)
self.assertEqual(tr.frequency("content", u"D"), 4)
self.assertEqual(tr.doc_frequency("content", u"F"), 1)
self.assertEqual(tr.frequency("content", u"F"), 1)
self.assertEqual(tr.doc_frequency("content", u"Z"), 0)
self.assertEqual(tr.frequency("content", u"Z"), 0)
self.assertEqual(list(tr), [(0, u"A", 1, 1), (0, u"B", 2, 5),
(0, u"C", 2, 2), (0, u"D", 3, 4),
(0, u"E", 2, 2), (0, u"F", 1, 1)])
def test_frequency_text(self):
s = fields.Schema(content = fields.KEYWORD)
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = ix.writer()
w.add_document(content = u"alfa bravo charlie delta echo")
w.add_document(content = u"bravo bravo bravo bravo charlie delta delta")
w.add_document(content = u"delta echo foxtrot")
w.commit()
tr = ix.term_reader()
self.assertEqual(tr.doc_frequ | ency("content", u"bravo"), 2) |
self.assertEqual(tr.frequency("content", u"bravo"), 5)
self.assertEqual(tr.doc_frequency("content", u"echo"), 2)
self.assertEqual(tr.frequency("content", u"echo"), 2)
self.assertEqual(tr.doc_frequency("content", u"alfa"), 1)
self.assertEqual(tr.frequency("content", u"alfa"), 1)
self.assertEqual(tr.doc_frequency("content", u"delta"), 3)
self.assertEqual(tr.frequency("content", u"delta"), 4)
self.assertEqual(tr.doc_frequency("content", u"foxtrot"), 1)
self.assertEqual(tr.frequency("content", u"foxtrot"), 1)
self.assertEqual(tr.doc_frequency("content", u"zulu"), 0)
self.assertEqual(tr.frequency("content", u"zulu"), 0)
self.assertEqual(list(tr), [(0, u"alfa", 1, 1), (0, u"bravo", 2, 5),
|
from django.conf import settings
from datetime import timedelta
# Endpoint settings
OAI_BASE_URL="http"
if settings.HTTPS == "on":
OAI_BASE_URL="https"
OAI_BASE_URL=OAI_B | ASE_URL+"://"+settings.SITE_NAME
REPOSITORY_NAME = settings.PLATFORM_NAME
ADMIN_EMAIL = settings.TECH_SUPPORT_EMAIL
OAI_ENDPOINT_NAME = 'oai'
RESULTS_LIMIT = 100
RESUMPTION_TOKEN_VALIDITY = timedelta(hours=6)
METADATA_FORMAT = 'oai_dc'
OWN_SET_PREFIX = s | ettings.PLATFORM_NAME
DISABLE_PRINT_OWN_SET_PREFIX= True
RESUMPTION_TOKEN_SALT = 'change_me' # salt used to generate resumption tokens
if hasattr(settings, 'OAI_SETTINGS'):
OAI_ENDPOINT_NAME = settings.OAI_SETTINGS.get('OAI_ENDPOINT_NAME')
RESULTS_LIMIT = settings.OAI_SETTINGS.get('RESULTS_LIMIT') or RESULTS_LIMIT
METADATA_FORMAT = settings.OAI_SETTINGS.get('METADATA_FORMAT') or METADATA_FORMAT
RESUMPTION_TOKEN_SALT = settings.OAI_SETTINGS.get('RESUMPTION_TOKEN_SALT') or RESUMPTION_TOKEN_SALT
DISABLE_PRINT_OWN_SET_PREFIX = settings.OAI_SETTINGS.get('DISABLE_PRINT_OWN_SET_PREFIX') or DISABLE_PRINT_OWN_SET_PREFIX
|
from djoser.conf import s | ettings
__all__ = ['settings']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
re | turn user.get_email_field_name()
|
from tr | ackers import *
| |
# -*- coding: utf-8 -*-
# This file is part of emesene.
#
# emesene is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# emesene is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with emesene; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
''' This module contains the tray icon's class'''
import sys
import PyQt4.QtGui as QtGui
import gui
import extension
from e3 import status
class TrayIcon (QtGui.QSystemTrayIcon, gui.BaseTray):
'''A class that implements the tray icon of emesene for Qt4'''
NAME = 'TrayIcon'
DESCRIPTION = 'Qt4 Tray Icon'
AUTHOR = 'Gabriele "Whisky" Visconti'
WEBSITE = ''
def __init__(self, handler, main_window=None):
'''
constructor
handler -- a e3common.Handler.TrayIconHandler object
'''
gui.BaseTray.__init__(self, handler)
QtGui.QSystemTrayIcon.__init__(self)
self._main_window = main_window
self.menu = None
self._conversations = None
self.setIcon(QtGui.QIcon(gui.theme.image_theme.logo))
self.activated.connect(self._on_tray_icon_clicked)
self.set_login()
# TODO: this is for mac os, and should be changed in the
# future (probably no tray icon at all, just the dock icon)
if sys.platform == 'darwin':
icon = QtGui.QIcon(gui.theme.image_theme.logo)
qt_app = QtGui.QApplication.instance()
qt_app.setWindowIcon(icon)
qt_app.setApplicationName('BHAWH')
else:
self.show()
def set_login(self):
'''Called when the login window is shown. Sets a proper
context menu in the Tray Icon.'''
tray_login_menu_cls = extension.get_default('tray login menu')
self.menu = tray_login_menu_cls(self.handler, self._main_window)
self.setIcon(QtGui.QIcon(gui.theme.image_theme.logo_panel))
self.setToolTip("emesene")
if sys.platform == 'darwin':
QtGui.qt_mac_set_dock_menu(self.menu)
else:
self.setContextMenu(self.menu)
def set_main(self, session):
'''Called when the main window is shown. Stores the contact list
and registers the callback for the status_change_succeed event'''
gui.BaseTray.set_main(self, session)
if self.menu:
self.menu.unsubscribe()
tray_main_menu_cls = extension.get_default('tray main menu')
self.menu = tray_main_menu_cls(self.handler, self._main_window)
self.setToolTip("emesene - " + self.handler.session.account.account)
self._on_status_change_succeed(self.handler.session.account.status)
if sys.platform == 'darwin':
QtGui.qt_mac_set_dock_menu(self.menu)
else:
self.setContextMenu(self.menu)
def set_conversations(self, conversations):
'''Store a reference to the conversation page'''
self._conversations = conversations
def set_visible(self, visible):
'''Changes icon's visibility'''
self.setVisible(visible)
def _on_tray_icon_clicked(self, reason):
'''This slot is called when the user clicks the tray icon.
Toggles main window's visibility'''
if not self._main_window:
return
if reason == QtGui.QSystemTrayIcon.Trigger:
if not self._main_window.isVisible():
self._main_window.show()
self._main_window.activateWindow()
self._main_window.raise_()
else: # visible
if self._main_window.isActiveWindow():
self._main_window.hide()
else:
self._main_window.activateWindow()
self._main_window.raise_()
elif reason == QtGui.QSystemTrayIcon.Context:
if self.menu:
self.menu.show()
def _on_contact_attr_changed(self, *args):
"""
This is called when a contact changes something
"""
self.men | u.list._on_contact_change_something(*args)
def _on_status_change_succeed(self, stat):
"""
This is called when status is successfully changed
"""
if stat not in status.ALL or stat == -1:
return
self.setIcon(QtGui.QIcon(
gu | i.theme.image_theme.status_icons_panel[stat]))
def hide(self):
self.unsubscribe()
QtGui.QSystemTrayIcon.setVisible(self, False)
def unsubscribe(self):
self.disconnect_signals()
if self.menu:
self.menu.unsubscribe()
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. | 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozi | lla.org/MPL/2.0/.
import datetime
import pytz
def now():
return datetime.datetime.utcnow().replace(tzinfo=pytz.UTC)
|
import smbus
from time import sleep
def delay(time):
sleep(time/1000.0)
def delayMicroseconds(time):
sleep(time/1000000.0)
from hd44780 import HD44780
class Screen(HD44780):
"""A driver for MCP23008-based I2C LCD backpacks. The one tested had "WIDE.HK" written on it."""
def __init__(self, bus=1, addr=0x27, debug=False, **kwargs):
"""Initialises the ``Screen`` object.
Kwargs:
* ``bus``: I2C bus number.
* ``addr``: I2C address of the board.
* ``debug``: enables printing out LCD commands.
* ``**kwargs``: all the other arguments, get passed further to HD44780 constructor
"""
self.bus_num = bus
self.bus = smbus.SMBus(self.bus_num)
if type(addr) in [str, unicode]:
addr = int(addr, 16)
self.addr = addr
self.debug = debug
self.i2c_init()
HD44780.__init__(self, debug=self.debug, **kwargs)
def i2c_init(self):
"""Inits the MCP23017 IC for desired operation."""
self.setMCPreg(0x05, 0x0c)
self.setMCPreg(0x00, 0x00)
def write_byte(self, byte, char_mode=False):
"""Takes a byte and sends the high nibble, then the low nibble (as per HD44780 doc). Passes ``char_mode`` to ``self.write4bits``."""
if self.debug and not char_mode:
print(hex(byte))
self.write4bits(byte >> 4, char_mode)
self.write4bits(byte & 0x0F, char_mode)
def write4bits(self, data, char_mode=False):
"""Writes a nibble to the display. If ``char_mode`` is set, holds the RS line high."""
if char_mode:
data |= 0x10
se | lf.setMCPreg(0x0a, data)
data ^= 0x80
delayMicroseconds(1.0)
self.setMCPreg(0x0a, data)
data ^= 0x80
delayMicro | seconds(1.0)
self.setMCPreg(0x0a, data)
delay(1.0)
def setMCPreg(self, reg, val):
"""Sets the MCP23017 register."""
self.bus.write_byte_data(self.addr, reg, val)
if __name__ == "__main__":
screen = Screen(bus=1, addr=0x27, cols=16, rows=2, debug=True, autoscroll=False)
line = "0123456789012345"
if True:
screen.display_data(line, line[::-1])
sleep(1)
screen.display_data(line[::-1], line)
sleep(1)
screen.clear()
|
# Copyright (c) 2014 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import lo | g as logging
from blazar import exceptions
from blazar.i18n import _
LOG = logging.getLogger(__name__)
class BlazarDBException(exceptions.BlazarException):
msg_fmt = _('An unknown database exception occurred')
class BlazarDBDuplicateEntry(BlazarDBException):
msg_fmt = _('Duplicate entry for %(columns)s in %(model)s model was found')
class BlazarDBNotFound(BlazarDBException):
msg_fmt = _('%(id)s %(model)s was not found')
class BlazarDBInvalidFilter(BlazarDBException):
| msg_fmt = _('%(query_filter)s is invalid')
class BlazarDBInvalidFilterOperator(BlazarDBException):
msg_fmt = _('%(filter_operator)s is invalid')
class BlazarDBExtraCapabilitiesNotEnabled(BlazarDBException):
msq_fmt = _('%(resource_type)s does not have extra capabilities enabled.')
class BlazarDBInvalidExtraCapability(BlazarDBException):
msg_fmt = _('%(property_name)s does not exist for resource type '
'%(resource_type)s.')
class BlazarDBForbiddenExtraCapability(BlazarDBException):
msg_fmt = _('%(property_name)s cannot be set as an extra capability')
|
from urlparse import urljoin
from os.path import dirname, basename
from xml.etree import ElementTree
from mimetypes import guess_type
from StringIO import StringIO
import requests
def update_print(apibase, password, print_id, progress):
"""
"""
params = {'id': print_id}
data = dict(progress=progress, password=password)
res = requests.post(urljoin(apibase, '/update-atlas.php'), params=params, data=data)
assert res.status_code == 200, 'POST to update-atlas.php resulting in status %s instead of 200' % res.status
def finish_print(apibase, password, print_id, print_info):
"""
"""
params = {'id': print_id}
print_info.update(dict(password=password))
res = requests.post(urljoin(apibase, '/finish-atlas.php'), params=params, data=print_info)
assert res.status_code == 200, 'POST to finish-atlas.php resulting in status %s instead of 200' % res.status
def update_scan(apibase, password, scan_id, progress):
"""
"""
params = {'id': scan_id}
data = {'password': password,
'progress': progress}
res = requests.post(urljoin(apibase, '/update-scan.php'), params=params, data=data)
assert res.status_code == 200, 'POST to update-scan.php resulting in status %s instead of 200' % res.status
def finish_scan(apibase, password, scan_id, uploaded_file, print_id, print_page_number, print_href, min_coord, max_coord, geojpeg_bounds):
"""
"""
params = {'id': scan_id}
data = {
'print_id': print_id,
'print_page_number': print_page_number,
'print_href': print_href,
'password': password,
'uploaded_file': uploaded_file,
'has_geotiff': 'yes',
'has_geojpeg': 'yes',
'has_stickers': 'no',
'min_row': min_coord.row, 'max_row': max_coord.row,
'min_column': min_coord.column, 'max_column': max_coord.column,
'min_zoom': min_coord.zoom, 'max_zoom': max_coord.zoom,
'geojpeg_bounds': '%.8f,%.8f,%.8f,%.8f' % geojpeg_bounds
}
res = requests.post(urljoin(apibase, '/finish-scan.php'), params=params, data=data)
assert res.status_code == 200, 'POST to finish-scan.php resulting in status %s instead of 200' % res.status
def fail_scan(apibase, password, scan_id):
"""
"""
params = {'id': scan_id}
data = {'password': password}
res = requests.post(urljoin(apibase, '/fail-scan.php'), params=params, data=data)
# TODO when does this fail? this failing shouldn't be fatal
assert res.status_code == 200, 'POST to fail-scan.php resulting in status %s instead of 200' % res.status
def finish_form(apibase, password, form_id, action_url, http_method, title, fields):
"""
"""
data = dict(password=password, action_url=action_url, http_method=http_method, title=title)
for (index, field) in enumerate(fields):
data['fields[%d][name]' % index] = field['name']
data['fields[%d][label]' % index] = field['label']
data['fields[%d][type]' % index] = field['type']
params = {'id': form_id}
res = requests.post(urljoin(apibase, '/finish-form.php'), params=params, data=data)
assert res.status_code == 200, 'POST to finish-form.php resulting in status %s instead of 200' % res.status
def fail_form(apibase, password, form_id):
"""
"""
params = {'id': form_id}
data = {'password': password}
res = requests.post(urljoin(apibase, '/fail-form.php'), params=params, data=data)
assert res.status_code == 200, 'POST to fail-form.php resulting in status %s instead of 200' % res.status
def upload(params, file_path, file_contents, apibase, password):
""" Upload a file via the API append.php form input provision thingie.
This allows uploads to either target S3 or the app itself.
"""
params.update(dict(password=password,
dirname=dirname(file_path),
| mimetype=(guess_type(file_path)[0] or '')))
res = requests.get(urljoin(apibase, '/append.php'), params=params, headers=dict(Accept='application/paperwalking+xml'))
form = ElementTree.parse(StringIO(res.text)).g | etroot()
if form.tag == 'form':
form_action = form.attrib['action']
inputs = form.findall('.//input')
fields = {}
files = {}
for input in inputs:
if input.attrib['type'] != 'file' and 'name' in input.attrib:
fields[input.attrib['name']] = input.attrib['value']
elif input.attrib['type'] == 'file':
files[input.attrib['name']] = (basename(file_path), file_contents)
if len(files) == 1:
base_url = [el.text for el in form.findall(".//*") if el.get('id', '') == 'base-url'][0]
resource_url = urljoin(base_url, file_path)
res = requests.post(urljoin(apibase, form_action), data=fields, files=files)
assert res.status_code in range(200, 308), 'POST of file to %s resulting in status %s instead of 2XX/3XX' % (form_action, res.status_code)
return resource_url
raise Exception('Did not find a form with a file input, why is that?')
def append_print_file(print_id, file_path, file_contents, apibase, password):
""" Upload a print.
"""
params = {
"print": print_id,
}
return upload(params, file_path, file_contents, apibase, password)
def append_scan_file(scan_id, file_path, file_contents, apibase, password):
""" Upload a scan.
"""
params = {
"scan": scan_id,
}
return upload(params, file_path, file_contents, apibase, password)
def get_print_info(print_url):
"""
"""
print print_url
res = requests.get(print_url, headers=dict(Accept='application/paperwalking+xml'))
if res.status_code == 404:
raise Exception("No such atlas: %s" % print_url)
print_ = ElementTree.parse(StringIO(res.text)).getroot()
print_id = print_.attrib['id']
paper = print_.find('paper').attrib['size']
orientation = print_.find('paper').attrib['orientation']
layout = print_.find('paper').attrib.get('layout', 'full-page')
north = float(print_.find('bounds').find('north').text)
south = float(print_.find('bounds').find('south').text)
east = float(print_.find('bounds').find('east').text)
west = float(print_.find('bounds').find('west').text)
print print_id, north, west, south, east, paper, orientation, layout
return print_id, north, west, south, east, paper, orientation, layout
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Provides utilities to preprocess images in CIFAR-10.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
_PADDING = 4
slim = tf.contrib.slim
def preprocess_for_train(image,
output_height,
output_width,
padding=_PADDING):
"""Preprocesses the given image for training.
Note that the actual resizing scale is sampled from
[`resize_size_min`, `resize_size_max`].
Args:
image: A `Tensor` representing an image of arbitrary size.
output_height: The height of the image after preprocessing.
output_width: The width of the image after preprocessing.
padding: The amound of padding before and after each dimension of the image.
Returns:
A preprocessed image.
"""
tf.summary.image('image', tf.expand_dims(image, 0))
# Transform the image to floats.
image = tf.to_float(image)
if padding > 0:
image = tf.pad(image, [[padding, padding], [padding, padding], [0, 0]])
# image = tf.image.resize_images(image,(output_height,output_width))
# Randomly crop a [height, width] section of the image.
distorted_image = tf.random_crop(image,
[32, 32, 3])
# Randomly flip the image horizontally.
distorted_image = tf.image.random_flip_left_right(distorted_image)
tf.summary.image('distorted_image', tf.expand_dims(distorted_image, 0))
# Because these operations are not commutative, consider randomizing
# the order their operation.
distorted_image = tf.image.random_brightness(distorted_image,
max_delta=63)
distorted_image = tf.image.random_contrast(distorted_image,
lower=0.2, upper=1.8)
# Subtract off the mean and divide by the variance of the pixels.
return tf.image.per_image_standardization(distorted_image)
def preprocess_for_eval(image, output_height, output_width):
"""Preprocesses the given image for evaluation.
Args:
image: A `Tensor` representing an image of arbitrary size.
output_height: The height of the image after preprocessing.
output_width: The width of the image after preprocessing.
Returns:
A preprocessed image.
"""
tf.summary.image('image', tf.expand_dims(image, 0))
# Transform the image to floats.
image = tf.to_float(image)
# image = tf.image.resize_ima | ges(image, (output_height, output_width))
# Resize and crop if needed.
resized_image = tf.image.resize_image_with_crop_or_pad(image,
output_width,
output_height)
tf.summary.image('res | ized_image', tf.expand_dims(resized_image, 0))
# Subtract off the mean and divide by the variance of the pixels.
return tf.image.per_image_standardization(resized_image)
def preprocess_image(image, output_height, output_width, is_training=False):
"""Preprocesses the given image.
Args:
image: A `Tensor` representing an image of arbitrary size.
output_height: The height of the image after preprocessing.
output_width: The width of the image after preprocessing.
is_training: `True` if we're preprocessing the image for training and
`False` otherwise.
Returns:
A preprocessed image.
"""
if is_training:
return preprocess_for_train(image, output_height, output_width)
else:
return preprocess_for_eval(image, output_height, output_width)
|
#!/usr/bin/env python
import time
from ni | cfit.aio import Application
async def _main(args):
print(args)
print("Sleeping 2...")
time.sleep(2)
print("Sleeping 0...")
return 0
def atexit():
print("atexit")
app = Application(_main, atexit=atexit)
app | .arg_parser.add_argument("--example", help="Example cli")
app.run()
assert not"will not execute"
|
from compare import expect
from django.contrib.auth.models import User
from django.test import TestCase
from django.test.client import Client
from django.core.management import call_command
import sys
from tardis.tardis_portal.models import \
Experiment, Dataset, Dataset_File, ExperimentACL, License, UserProfile, \
ExperimentParameterSet, ExperimentParameter, DatasetParameterSet, DatafileParameterSet
def _create_test_user():
user_ = User(username='tom',
first_name='Thomas',
last_name='Atkins',
email='tommy@atkins.net')
user_.save()
UserProfile(user=user_).save()
return user_
def _create_license():
license_ = License(name='Creative Commons Attribution-NoDerivs 2.5 Australia',
url='http://creativecommons.org/licenses/by-nd/2.5/au/',
interna | l_description='CC BY 2.5 AU',
allows_distribution=True)
license_.save()
return license_
def _create_test_experiment(user, license_):
experiment = Experiment(title='Norwegian Blue',
description='Parrot + 40kV',
created_by=user)
experiment.public_access = Experiment.PUBLIC_ACCESS_FULL
experiment.license = license_
exper | iment.save()
experiment.author_experiment_set.create(order=0,
author="John Cleese",
url="http://nla.gov.au/nla.party-1")
experiment.author_experiment_set.create(order=1,
author="Michael Palin",
url="http://nla.gov.au/nla.party-2")
acl = ExperimentACL(experiment=experiment,
pluginId='django_user',
entityId=str(user.id),
isOwner=True,
canRead=True,
canWrite=True,
canDelete=True,
aclOwnershipType=ExperimentACL.OWNER_OWNED)
acl.save()
return experiment
def _create_test_dataset(nosDatafiles):
ds_ = Dataset(description='happy snaps of plumage')
ds_.save()
for i in range (0, nosDatafiles) :
df_ = Dataset_File(dataset=ds_, url='http://planet-python.org/' + str(_next_id()))
df_.save()
ds_.save()
return ds_
def _create_test_data():
# Create 2 experiments with 3 datasets, one of which is in both experiments.
user_ = _create_test_user()
license_ = _create_license()
exp1_ = _create_test_experiment(user_, license_)
exp2_ = _create_test_experiment(user_, license_)
ds1_ = _create_test_dataset(1)
ds2_ = _create_test_dataset(2)
ds3_ = _create_test_dataset(3)
ds1_.experiments.add(exp1_);
ds2_.experiments.add(exp1_);
ds2_.experiments.add(exp2_);
ds3_.experiments.add(exp2_);
ds1_.save()
ds2_.save()
ds3_.save()
exp1_.save()
exp2_.save()
return (exp1_, exp2_)
_counter = 1
def _next_id():
global _counter
res = _counter
_counter += 1
return res
class RmExperimentTestCase(TestCase):
def setUp(self):
pass
def testList(self):
(exp1_, exp2_) = _create_test_data()
expect(Dataset_File.objects.all().count()).to_be(6)
expect(len(exp1_.get_datafiles())).to_be(3)
expect(len(exp2_.get_datafiles())).to_be(5)
# Check that --list doesn't remove anything
call_command('rmexperiment', exp1_.pk, list=True)
expect(Dataset_File.objects.all().count()).to_be(6)
expect(len(exp1_.get_datafiles())).to_be(3)
expect(len(exp2_.get_datafiles())).to_be(5)
def testRemove(self):
(exp1_, exp2_) = _create_test_data()
expect(Dataset_File.objects.all().count()).to_be(6)
expect(len(exp1_.get_datafiles())).to_be(3)
expect(len(exp2_.get_datafiles())).to_be(5)
# Remove first experiment and check that the shared dataset hasn't been removed
call_command('rmexperiment', exp1_.pk, confirmed=True)
expect(Dataset_File.objects.all().count()).to_be(5)
expect(len(exp2_.get_datafiles())).to_be(5)
#Remove second experiment
call_command('rmexperiment', exp2_.pk, confirmed=True)
expect(Dataset_File.objects.all().count()).to_be(0)
#Check that everything else has been removed too
expect(ExperimentACL.objects.all().count()).to_be(0)
expect(ExperimentParameterSet.objects.all().count()).to_be(0)
expect(ExperimentParameter.objects.all().count()).to_be(0)
expect(DatasetParameterSet.objects.all().count()).to_be(0)
expect(DatafileParameterSet.objects.all().count()).to_be(0)
def tearDown(self):
pass
|
import subprocess
from pkg_resources import resource_filename
def playit(file):
"""
Function used to play a sound file
"""
filepath = resource_filename(__name__, 'sou | nd/' + file)
subprocess.Popen(["pa | play", filepath])
|
"""revert: add plugin event acl to the admin backend
Revision ID: 97e2d9949db
Revises: 1e5140290977
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '97e2d9949db'
down_revision = '1e5140290977'
POLICY_NAME = 'wazo_default_admin_policy'
ACL_TEMPLATES = ['events.plugin.#']
policy_table = sa.sql.table(
'auth_policy', sa.Column('uuid', sa.String(38)), sa.Column('name', sa.String(80))
)
acl_template_table = sa.sql.table(
'auth_acl_template', sa.Column('id', sa.Integer), sa.Column('template', sa.Text)
)
policy_template = sa.sql.table(
'auth_policy_template',
sa.Column('policy_uuid', sa.String(38)),
sa.Column('template_id', sa.Integer),
)
def _find_acl_template(conn, acl_template):
query = (
sa.sql.select([acl_template_table.c.id])
.where(acl_template_table.c.template == acl_template)
.limit(1)
)
return conn.execute(query).scalar()
def _find_acl_templates(conn, acl_templates):
acl_template_ids = []
for acl_template in acl_templates:
acl_template_id = _find_acl_template(conn, acl_template)
if acl_template_id:
acl_template_ids.append(acl_template_id)
return acl_template_ids
def _get_policy_uuid(conn, policy_name):
policy_query = sa.sql.select([policy_table.c.uuid]).where(
policy_table.c.name == policy_name
)
for policy in conn.execute(policy_query).fetchall():
return policy[0]
def _insert_acl_template(conn, acl_templates):
acl_template_ids = []
for acl_template in acl_templates:
acl_template_id = _find_acl_template(conn, acl_template)
if not acl_template_id:
query = (
acl_template_table.insert()
.returning(acl_template_table.c.id)
.values(template=acl_template)
)
acl_template_id = conn.execute(query).scalar()
acl_template_ids.append(acl_template_id)
return acl_template_ids
def downgrade():
conn = op.get_bind()
policy_uuid = _get_policy_uuid(conn, POLICY_NAME)
acl_template_ids = _insert_acl_template(conn, ACL_TEMPLATES)
op.bulk_insert(
policy_template,
[
{'policy_uuid': policy_uuid, 'template_id': template_id}
for template_id in acl_template_ids
],
)
def upgrade():
conn = op.get_bind()
acl_template_ids = _find_acl | _templates(conn, ACL_TEMPLATES)
if acl_template_ | ids:
policy_uuid = _get_policy_uuid(conn, POLICY_NAME)
delete_query = policy_template.delete().where(
sa.sql.and_(
policy_template.c.policy_uuid == policy_uuid,
policy_template.c.template_id.in_(acl_template_ids),
)
)
op.execute(delete_query)
|
from . import minic_ast
class PrettyGenerator(object | ):
def __init__(self):
# Statements start with indentation of self.indent_level spaces, using
# the _make_indent method
#
self.indent_level = 0
def _make_indent(self):
return ' ' * self.indent_level
def visit(self, node):
method = 'visit_' + node.__class__.__name__
return getattr(self, method, self.generic_visit)(node)
def generic_visit(self, node):
#~ print('generic:', type(node))
if node is No | ne:
return ''
else:
return ''.join(self.visit(c) for c_name, c in node.children())
|
from node_view import NodeGraphView
from node_scene import NodeGraphScene
from items.node_item import NodeIt | em
from items.connection_item import Connectio | nItem
from items.connector_item import BaseConnectorItem, IOConnectorItem, InputConnectorItem, OutputConnectorItem
import node_utils |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db.mo | dels import F, Func, Value
class AlphabeticalPaginationMixin(object):
alphabetical_pagination_field = 'name'
def get_alphabetical_pagination_field(self):
return self.alphabetical_pagination_field
def get_selected_letter(self):
return self.request.GET.get('letter', 'a' | )
def get_base_queryset(self):
"""
Queryset before applying pagination filters.
"""
qs = super(AlphabeticalPaginationMixin, self).get_queryset().exclude(
**{self.get_alphabetical_pagination_field(): ''}
)
return qs
def get_queryset(self):
qs = self.get_base_queryset()
# FIXME Select Umlauts (using downgrade and also downgrade sort_name field?)
# FIXME Select on TRIM/LEFT as in get_letter_choices
filter = {
"{}__istartswith".format(self.get_alphabetical_pagination_field()):
self.get_selected_letter()}
return qs.filter(**filter).order_by(self.alphabetical_pagination_field)
def get_letter_choices(self):
return self.get_base_queryset().annotate(name_lower=Func(
Func(
Func(
F(self.get_alphabetical_pagination_field()), function='LOWER'),
function='TRIM'),
Value("1"), function='LEFT')).order_by(
'name_lower').distinct('name_lower').values_list('name_lower', flat=True)
def get_context_data(self, **kwargs):
context = super(AlphabeticalPaginationMixin, self).get_context_data(**kwargs)
context['selected_letter'] = self.get_selected_letter()
context['alphabet'] = self.get_letter_choices()
return context
|
"""add graphql ACL to users
Revision ID: 2d4882d39dbb
Revises: c4d0e9ec46a9
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2d4882d39dbb'
down_revision = 'dc2848563b53'
POLICY_NAME = 'wazo_default_user_policy'
ACL_TEMPLATES = ['dird.graphql.me']
policy_table = sa.sql.table(
'auth_policy', sa.Column('uuid', sa.String(38)), sa.Column('name', sa.String(80))
)
acl_template_table = sa.sql.table(
'auth_acl_template', sa.Column('id', sa.Integer), sa.Column('template', sa.Text)
)
policy_template = sa.sql.table(
'auth_policy_template',
sa.Column('policy_uuid', sa.String(38)),
sa.Column('template_id', sa.Integer),
)
def _find_acl_template(conn, acl_template):
query = (
sa.sql.select([acl_template_table.c.id])
.where(acl_template_table.c.template == acl_template)
.limit(1)
)
return conn.execute(query).scalar()
def _find_acl_templates(conn, acl_templates):
acl_template_ids = []
for acl_template in acl_templates:
acl_template_id = _find_acl_template(conn, acl_template)
if acl_template_id:
acl_template_ids.append(acl_template_id)
return acl_template_ids
def _get_policy_uuid(conn, policy_name):
policy_query = sa.sql.select([policy_table.c.uuid]).where(
policy_t | able.c.name == policy_name
)
for policy in conn.execute(policy_query).fetchall():
return policy[0]
def _insert_acl_ | template(conn, acl_templates):
acl_template_ids = []
for acl_template in acl_templates:
acl_template_id = _find_acl_template(conn, acl_template)
if not acl_template_id:
query = (
acl_template_table.insert()
.returning(acl_template_table.c.id)
.values(template=acl_template)
)
acl_template_id = conn.execute(query).scalar()
acl_template_ids.append(acl_template_id)
return acl_template_ids
def _get_acl_template_ids(conn, policy_uuid):
query = sa.sql.select([policy_template.c.template_id]).where(
policy_template.c.policy_uuid == policy_uuid
)
return [acl_template_id for (acl_template_id,) in conn.execute(query).fetchall()]
def upgrade():
conn = op.get_bind()
policy_uuid = _get_policy_uuid(conn, POLICY_NAME)
if not policy_uuid:
return
acl_template_ids = _insert_acl_template(conn, ACL_TEMPLATES)
acl_template_ids_already_associated = _get_acl_template_ids(conn, policy_uuid)
for template_id in set(acl_template_ids) - set(acl_template_ids_already_associated):
query = policy_template.insert().values(
policy_uuid=policy_uuid, template_id=template_id
)
conn.execute(query)
def downgrade():
conn = op.get_bind()
acl_template_ids = _find_acl_templates(conn, ACL_TEMPLATES)
if not acl_template_ids:
return
policy_uuid = _get_policy_uuid(conn, POLICY_NAME)
if not policy_uuid:
return
delete_query = policy_template.delete().where(
sa.sql.and_(
policy_template.c.policy_uuid == policy_uuid,
policy_template.c.template_id.in_(acl_template_ids),
)
)
op.execute(delete_query)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# ndvi_test.py
#
# Copyright 2015 rob <rob@Novu>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General P | ublic License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WI | THOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
'''just a annoying dummy to get rid of Gtk2 and Gtk3 incompatibilities'''
from infrapix import infrapix
import sys
infrapix.ndvi(sys.argv[1],sys.argv[2], show_histogram = True,)
|
lt behavior; C(no) does not install recommended packages. Suggested packages are never installed.
required: false
default: yes
choices: [ "yes", "no" ]
force:
description:
- If C(yes), force installs/removes.
required: false
default: "no"
choices: [ "yes", "no" ]
upgrade:
description:
- 'If yes or safe, performs an aptitude safe-upgrade.'
- 'If full, performs an aptitude full-upgrade.'
- 'If dist, performs an apt-get dist-upgrade.'
- 'Note: This does not upgrade a specific package, use state=latest for that.'
version_added: "1.1"
required: false
default: "yes"
choices: [ "yes", "safe", "full", "dist"]
dpkg_options:
description:
- Add dpkg options to apt command. Defaults to '-o "Dpkg::Options::=--force-confdef" -o "Dpkg::Options::=--force-confold"'
- Options should be supplied as comma separated list
required: false
default: 'force-confdef,force-confold'
deb:
description:
- Path to a .deb package on the remote machine.
required: false
version_added: "1.6"
requirements: [ python-apt, aptitude ]
author: Matthew Williams
notes:
- Three of the upgrade modes (C(full), C(safe) and its alias C(yes)) require C(aptitude), otherwise
C(apt-get) suffices.
'''
EXAMPLES = '''
# Update repositories cache and install "foo" package
- apt: name=foo update_cache=yes
# Remove "foo" package
- apt: name=foo state=absent
# Install the package "foo"
- apt: name=foo state=present
# Install the version '1.00' of package "foo"
- apt: name=foo=1.00 state=present
# Update the repository cache and update package "nginx" to latest version using default release squeeze-backport
- apt: name=nginx state=latest default_release=squeeze-backports update_cache=yes
# Install latest version of "openjdk-6-jdk" ignoring "install-recommends"
- apt: name=openjdk-6-jdk state=latest install_recommends=no
# Update all packages to the latest version
- apt: upgrade=dist
# Run the equivalent of "apt-get update" as a separate step
- apt: update_cache=yes
# Only run "update_cache=yes" if the last one is more than 3600 seconds ago
- apt: update_cache=yes cache_valid_time=3600
# Pass options to dpkg on run
- apt: upgrade=dist update_cache=yes dpkg_options='force-confold,force-confdef'
# Install a .deb package
- apt: deb=/tmp/mypackage.deb
# Install the build dependencies for package "foo"
- apt: pkg=foo state=build-dep
'''
import traceback
# added to stave off future warnings about apt api
import warnings
warnings.filterwarnings('ignore', "apt API not stable yet", FutureWarning)
import os
import datetime
import fnmatch
import itertools
# APT related constants
APT_ENV_VARS = dict(
DEBIAN_FRONTEND = 'noninteractive',
DEBIAN_PRIORITY = 'critical',
LANG = 'C'
)
DPKG_OPTIONS = 'force-confdef,force-confold'
APT_GET_ZERO = "0 upgraded, 0 newly installed"
APTITUDE_ZERO = "0 packages upgraded, 0 newly installed"
APT_LISTS_PATH = "/var/lib/apt/lists"
APT_UPDATE_SUCCESS_STAMP_PATH = "/var/lib/apt/periodic/update-success-stamp"
HAS_PYTHON_APT = True
try:
import apt
import apt.debfile
import apt_pkg
except ImportError:
HAS_PYTHON_APT = False
def package_split(pkgspec):
parts = pkgspec.split('=', 1)
if len(parts) > 1:
return parts[0], parts[1]
else:
return parts[0], None
def package_versions(pkgname, pkg, pkg_cache):
try:
versions = set(p.version for p in pkg.versions)
except AttributeError:
# assume older version of python-apt is installed
# apt.package.Package#versions require python-apt >= 0.7.9.
pkg_cache_list = (p for p in pkg_cache.Packages if p.Name == pkgname)
pkg_versions = (p.VersionList for p in pkg_cache_list)
versions = set(p.VerStr for p in itertools.chain(*pkg_versions))
return versions
def package_version_compare(version, other_version):
try:
return apt_pkg.version_compare(version, other_version)
except AttributeError:
return apt_pkg.VersionCompare(version, other_vers | ion)
def package_status(m, pkgname, version, cache, state):
try:
# get the package from the cache, as well as the
# the low-level apt_pkg.Package object which contains
# state fields not directly acc | cesible from the
# higher-level apt.package.Package object.
pkg = cache[pkgname]
ll_pkg = cache._cache[pkgname] # the low-level package object
except KeyError:
if state == 'install':
try:
if cache.get_providing_packages(pkgname):
return False, True, False
m.fail_json(msg="No package matching '%s' is available" % pkgname)
except AttributeError:
# python-apt version too old to detect virtual packages
# mark as upgradable and let apt-get install deal with it
return False, True, False
else:
return False, False, False
try:
has_files = len(pkg.installed_files) > 0
except UnicodeDecodeError:
has_files = True
except AttributeError:
has_files = False # older python-apt cannot be used to determine non-purged
try:
package_is_installed = ll_pkg.current_state == apt_pkg.CURSTATE_INSTALLED
except AttributeError: # python-apt 0.7.X has very weak low-level object
try:
# might not be necessary as python-apt post-0.7.X should have current_state property
package_is_installed = pkg.is_installed
except AttributeError:
# assume older version of python-apt is installed
package_is_installed = pkg.isInstalled
if version:
versions = package_versions(pkgname, pkg, cache._cache)
avail_upgrades = fnmatch.filter(versions, version)
if package_is_installed:
try:
installed_version = pkg.installed.version
except AttributeError:
installed_version = pkg.installedVersion
# Only claim the package is installed if the version is matched as well
package_is_installed = fnmatch.fnmatch(installed_version, version)
# Only claim the package is upgradable if a candidate matches the version
package_is_upgradable = False
for candidate in avail_upgrades:
if package_version_compare(candidate, installed_version) > 0:
package_is_upgradable = True
break
else:
package_is_upgradable = bool(avail_upgrades)
else:
try:
package_is_upgradable = pkg.is_upgradable
except AttributeError:
# assume older version of python-apt is installed
package_is_upgradable = pkg.isUpgradable
return package_is_installed, package_is_upgradable, has_files
def expand_dpkg_options(dpkg_options_compressed):
options_list = dpkg_options_compressed.split(',')
dpkg_options = ""
for dpkg_option in options_list:
dpkg_options = '%s -o "Dpkg::Options::=--%s"' \
% (dpkg_options, dpkg_option)
return dpkg_options.strip()
def expand_pkgspec_from_fnmatches(m, pkgspec, cache):
new_pkgspec = []
for pkgspec_pattern in pkgspec:
pkgname_pattern, version = package_split(pkgspec_pattern)
# note that none of these chars is allowed in a (debian) pkgname
if frozenset('*?[]!').intersection(pkgname_pattern):
# handle multiarch pkgnames, the idea is that "apt*" should
# only select native packages. But "apt*:i386" should still work
if not ":" in pkgname_pattern:
try:
pkg_name_cache = _non_multiarch
except NameError:
pkg_name_cache = _non_multiarch = [pkg.name for pkg in cache if not ':' in pkg.name]
else:
try:
pkg_name_cache = _all_pkg_names
except NameError:
pkg_name_cache = _all_pkg_names = [pkg.name for pkg in cache]
matches = fnmatch.filter(pkg_name_ca |
#!/usr/bin/env python
###############################################################################
# $Id: gdal2grd.py 27044 2014-03-16 23:41:27Z rouault $
#
# Project: GDAL Python samples
# Purpose: Script to write | out ASCII GRD rasters (used in Golden Software
# Surfer)
# from any source supported by GDAL.
# Author: Andrey Kiselev, dron@remotesensing.org
#
###############################################################################
# Copyright (c) | 2003, Andrey Kiselev <dron@remotesensing.org>
# Copyright (c) 2009, Even Rouault <even dot rouault at mines-paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
try:
from osgeo import gdal
from osgeo.gdalconst import *
gdal.TermProgress = gdal.TermProgress_nocb
except ImportError:
import gdal
from gdalconst import *
try:
import numpy as Numeric
Numeric.arrayrange = Numeric.arange
except ImportError:
import Numeric
import sys
# =============================================================================
def Usage():
print('Usage: gdal2grd.py [-b band] [-quiet] infile outfile')
print('Write out ASCII GRD rasters (used in Golden Software Surfer)')
print('')
print(' -b band Select a band number to convert (1 based)')
print(' -quiet Do not report any diagnostic information')
print(' infile Name of the input GDAL supported file')
print(' outfile Name of the output GRD file')
print('')
sys.exit(1)
# =============================================================================
infile = None
outfile = None
iBand = 1
quiet = 0
# Parse command line arguments.
i = 1
while i < len(sys.argv):
arg = sys.argv[i]
if arg == '-b':
i = i + 1
iBand = int(sys.argv[i])
elif arg == '-quiet':
quiet = 1
elif infile is None:
infile = arg
elif outfile is None:
outfile = arg
else:
Usage()
i = i + 1
if infile is None:
Usage()
if outfile is None:
Usage()
indataset = gdal.Open(infile, GA_ReadOnly)
if infile == None:
print('Cannot open', infile)
sys.exit(2)
geotransform = indataset.GetGeoTransform()
band = indataset.GetRasterBand(iBand)
if band == None:
print('Cannot load band', iBand, 'from the', infile)
sys.exit(2)
if not quiet:
print('Size is ',indataset.RasterXSize,'x',indataset.RasterYSize,'x',indataset.RasterCount)
print('Projection is ',indataset.GetProjection())
print('Origin = (',geotransform[0], ',',geotransform[3],')')
print('Pixel Size = (',geotransform[1], ',',geotransform[5],')')
print('Converting band number',iBand,'with type',gdal.GetDataTypeName(band.DataType))
# Header printing
fpout = open(outfile, "wt")
fpout.write("DSAA\n")
fpout.write(str(band.XSize) + " " + str(band.YSize) + "\n")
fpout.write(str(geotransform[0] + geotransform[1] / 2) + " " +
str(geotransform[0] + geotransform[1] * (band.XSize - 0.5)) + "\n")
if geotransform[5] < 0:
fpout.write(str(geotransform[3] + geotransform[5] * (band.YSize - 0.5)) + " " +
str(geotransform[3] + geotransform[5] / 2) + "\n")
else:
fpout.write(str(geotransform[3] + geotransform[5] / 2) + " " +
str(geotransform[3] + geotransform[5] * (band.YSize - 0.5)) + "\n")
fpout.write(str(band.ComputeRasterMinMax(0)[0]) + " " +
str(band.ComputeRasterMinMax(0)[1]) + "\n")
for i in range(band.YSize - 1, -1, -1):
scanline = band.ReadAsArray(0, i, band.XSize, 1, band.XSize, 1)
j = 0
while j < band.XSize:
fpout.write(str(scanline[0, j]))
j = j + 1
if j % 10: # Print no more than 10 values per line
fpout.write(" ")
else:
fpout.write("\n")
fpout.write("\n")
# Display progress report on terminal
if not quiet:
gdal.TermProgress(float(band.YSize - i) / band.YSize)
|
ename)
return
else:
model.api.devlog("loading file %s" % filename)
infilepath = os.path.join(self._path, filename)
host_dict = {}
try:
with open(infilepath) as infile:
host_dict = json.load(infile)
except Exception, e:
model.api.log("An error ocurred while parsing file %s\n%s" %
(filename, str(e)), "ERROR")
return mockito.mock()
try:
newHost = Host(name=None, dic=host_dict)
return newHost
except Exception, e:
model.api.log("Could not load host from file %s" % filename, "ERROR")
model.api.devlog(str(e))
return None
class WorkspaceOnCouch(Workspace):
"""A Workspace that is syncronized in couchdb"""
def __init__(self, name, manager, *args):
super(WorkspaceOnCouch, self).__init__(name, manager)
self._is_replicated = replicated = CONF.getCouchIsReplicated()
self.cdm = self._dmanager = manager.couchdbmanager
if not self.cdm.workspaceExists(name):
self.cdm.addWorkspace(name)
if self.is_replicated():
self.cdm.replicate(self.name, *self.validate_replic_urls(CONF.getCouchReplics()), create_target = True)
self.cdm.syncWorkspaceViews(name)
self.container = CouchedModelObjectContainer(name, self.cdm)
def syncFiles(self):
self.load()
@staticmethod
def isAvailable():
return CouchdbManager.testCouch(CONF.getCouchURI())
def is_replicated(self):
return self._is_replicated
def validate_replic_urls(self, urlsString):
urls = urlsString.split(";") if urlsString is not None else ""
valid_replics = []
for url in urls:
try:
self.cdm.testCouchUrl(url)
valid_replics.append(url)
except:
pass
return valid_replics
def saveObj(self, obj):
self.cdm.saveDocument(self.name, obj._toDict())
self.cdm.compactDatabase(self.name)
def delObj(self, obj):
obj_id = obj.ancestors_path()
if self._dmanager.checkDocument(self.name, obj_id):
self._dmanager.remove(self.name, obj_id)
def save(self):
model.api.devlog("Saving workspaces")
for host in self.getContainee().itervalues():
host_as_dict = host.toDict()
for obj_dic in host_as_dict:
self.cdm.saveDocument(self.name, obj_dic)
def load(self):
self._model_controller.setSavingModel(True)
hosts = {}
def find_leaf(path, sub_graph = hosts):
for i in path:
if len(path) > 1:
return find_leaf(path[1:], sub_graph['subs'][i])
else:
return sub_graph
try:
t = time.time()
model.api.devlog("load start: %s" % str(t))
docs = [i["doc"] for i in self.cdm.workspaceDocumentsIterator(self.name)]
model.api.devlog("time to get docs: %s" % str(time.time() - t))
t = time.time()
for d in docs:
id_path = d['_id'].split('.')
if d['type'] == "Host":
hosts[d['_id']] = d
subs = hosts.get('subs', {})
subs[d['_id']] = d
hosts['subs'] = subs
continue
leaf = {}
try:
leaf = find_leaf(id_path)
except Exception as e:
model.api.devlog('Object parent not found, skipping: %s' % '.'.join(id_path))
continue
subs = leaf.get('subs', {})
subs[d['obj_id']] = d
leaf['subs'] = subs
key = "%s" % d['type']
key = key.lower()
sub = leaf.get(key, {})
sub[d['obj_id']] = d
leaf[key] = sub
model.api.devlog("time to reconstruct: %s" % str(time.time() - t))
t = time.time()
self.container.clear()
for k, v in hosts.items():
if k is not "subs":
h = Host(name=None, dic=v)
self.container[k] = h
model.api.devlog("time to fill container: %s" % str(time.time() - t))
t = time.time()
except Exception, e:
model.api.devlog("Exception during load: %s" % e)
finally:
self._model_controller.setSavingModel(False)
notifier.workspaceLoad(self.getAllHosts())
class WorkspaceManager(object):
"""
This handles all workspaces. It checks for existing workspaces inside
the persistence directory.
It is in charge of starting the WorkspacesAutoSaver to persist each workspace.
This class stores information in $HOME/.faraday/config/workspacemanager.xml file
to keep track | of created workspaces to be able to load them
"""
def __ | init__(self, model_controller, plugin_controller):
self.active_workspace = None
self._couchAvailable = False
self.report_manager = ReportManager(10, plugin_controller)
self.couchdbmanager = PersistenceManagerFactory().getInstance()
self.fsmanager = FSManager()
self._workspaces = {}
self._workspaces_types = {}
self._model_controller = model_controller
self._excluded_directories = [".svn"]
self.workspace_persister = WorkspacePersister()
def couchAvailable(self, isit):
self._couchAvailable = isit
def _notifyWorkspaceNoConnection(self):
notifier.showPopup("Couchdb Connection lost. Defaulting to memory. Fix network and try again in 5 minutes.")
def reconnect(self):
if not self.reconnectCouchManager():
self._notifyWorkspaceNoConnection()
def getCouchManager(self):
return self.couchdbmanager
def setCouchManager(self, cm):
self.couchdbmanager = cm
@staticmethod
def getAvailableWorkspaceTypes():
av = [w.__name__ for w in Workspace.__subclasses__() if w.isAvailable() ]
model.api.devlog("Available wortkspaces: %s" ", ".join(av))
return av
def reconnectCouchManager(self):
retval = True
if not self.couchdbmanager.reconnect():
retval = False
return retval
WorkspacePersister.reExecutePendingActions()
return retval
def startAutoLoader(self):
pass
def stopAutoLoader(self):
pass
def startReportManager(self):
self.report_manager.start()
def stopReportManager(self):
self.report_manager.stop()
self.report_manager.join()
def getActiveWorkspace(self):
return self.active_workspace
def saveWorkspaces(self):
pass
def addWorkspace(self, workspace):
self._workspaces[workspace.name] = workspace
def createVisualizations(self):
stat = False
url = ""
if self.couchdbmanager.isAvailable():
stat = True
url = self.couchdbmanager.pushReports()
else:
self._notifyNoVisualizationAvailable()
return stat, url
def _notifyNoVisualizationAvailable(self):
notifier.showPopup("No visualizations available, please install and configure CouchDB")
def createWorkspace(self, name, description="", workspaceClass = None, shared=CONF.getAutoShareWorkspace(),
customer="", sdate=None, fdate=None):
model.api.devlog("Creating Workspace")
|
ll always eat "
"the data away from the other."
)
read_all = "Read as many bytes as are currently available"
read_some = "Read exactly this number of bytes:"
read_time = "and wait this maximum number of milliseconds for them:"
import wx
import threading
import win32event
import win32file
import codecs
import binascii
BAUDRATES = [
'110', '300', '600', '1200', '2400', '4800', '9600', '14400', '19200',
'38400', '57600', '115200', '128000', '256000'
]
def MyHexDecoder(input):
return (binascii.b2a_hex(input).upper(), len(input))
DECODING_FUNCS = [
codecs.getdecoder(eg.systemEncoding),
MyHexDecoder,
codecs.getdecoder("latin1"),
codecs.getdecoder("utf8"),
codecs.getdecoder("utf16"),
codecs.getencoder("string_escape"),
]
class Serial(eg.RawReceiverPlugin):
text = Text
def __init__(self):
eg.RawReceiverPlugin.__init__(self)
self.AddAction(Write)
self.AddAction(Read)
self.serial = None
self.buffer = ""
def __start__(
self,
port,
baudrate,
bytesize=8,
parity=0,
stopbits=0,
handshake=0,
generateEvents=False,
terminator="",
prefix="Serial",
encodingNum=0,
):
xonxoff = 0
rtscts = 0
if handshake == 1:
xonxoff = 1
elif handshake == 2:
rtscts = 1
try:
self.serial = eg.SerialPort(
| port,
baudrate=baudrate,
bytesize=(5, 6, 7, 8)[bytesize],
stopbits=(1, 2)[stopbits],
parity=('N', | 'O', 'E')[parity],
xonxoff=xonxoff,
rtscts=rtscts,
)
except:
self.serial = None
raise self.Exceptions.SerialOpenFailed
self.serial.timeout = 1.0
self.serial.setRTS()
if generateEvents:
self.decoder = DECODING_FUNCS[encodingNum]
self.terminator = eg.ParseString(terminator).decode('string_escape')
self.info.eventPrefix = prefix
self.stopEvent = win32event.CreateEvent(None, 1, 0, None)
self.receiveThread = threading.Thread(target=self.ReceiveThread, name="SerialThread")
self.receiveThread.start()
else:
self.receiveThread = None
def __stop__(self):
if self.serial is not None:
if self.receiveThread:
win32event.SetEvent(self.stopEvent)
self.receiveThread.join(1.0)
self.serial.close()
self.serial = None
def HandleChar(self, ch):
self.buffer += ch
pos = self.buffer.find(self.terminator)
if pos != -1:
eventstring = self.buffer[:pos]
if eventstring:
self.TriggerEvent(self.decoder(eventstring)[0])
self.buffer = self.buffer[pos+len(self.terminator):]
def ReceiveThread(self):
from win32event import (
ResetEvent,
MsgWaitForMultipleObjects,
QS_ALLINPUT,
WAIT_OBJECT_0,
WAIT_TIMEOUT,
)
from win32file import ReadFile, AllocateReadBuffer, GetOverlappedResult
from win32api import GetLastError
continueLoop = True
overlapped = self.serial._overlappedRead
hComPort = self.serial.hComPort
hEvent = overlapped.hEvent
stopEvent = self.stopEvent
n = 1
waitingOnRead = False
buf = AllocateReadBuffer(n)
while continueLoop:
if not waitingOnRead:
ResetEvent(hEvent)
hr, _ = ReadFile(hComPort, buf, overlapped)
if hr == 997:
waitingOnRead = True
elif hr == 0:
pass
#n = GetOverlappedResult(hComPort, overlapped, 1)
#self.HandleChar(str(buf))
else:
self.PrintError("error")
raise
rc = MsgWaitForMultipleObjects(
(hEvent, stopEvent),
0,
1000,
QS_ALLINPUT
)
if rc == WAIT_OBJECT_0:
n = GetOverlappedResult(hComPort, overlapped, 1)
if n:
self.HandleChar(str(buf))
#else:
# print "WAIT_OBJECT_0", n, str(buf[:n])
waitingOnRead = False
elif rc == WAIT_OBJECT_0+1:
continueLoop = False
elif rc == WAIT_TIMEOUT:
pass
else:
self.PrintError("unknown message")
def Configure(
self,
port=0,
baudrate=9600,
bytesize=3,
parity=0,
stopbits=0,
handshake=0,
generateEvents=False,
terminator="\\r",
prefix="Serial",
encodingNum=0,
):
text = self.text
panel = eg.ConfigPanel()
portCtrl = panel.SerialPortChoice(port)
baudrateCtrl = panel.ComboBox(
str(baudrate),
BAUDRATES,
style=wx.CB_DROPDOWN,
validator=eg.DigitOnlyValidator()
)
bytesizeCtrl = panel.Choice(bytesize, ['5', '6', '7', '8'])
parityCtrl = panel.Choice(parity, text.parities)
stopbitsCtrl = panel.Choice(stopbits, ['1', '2'])
handshakeCtrl = panel.Choice(handshake, text.handshakes)
generateEventsCtrl = panel.CheckBox(generateEvents, text.generateEvents)
terminatorCtrl = panel.TextCtrl(terminator)
terminatorCtrl.Enable(generateEvents)
prefixCtrl = panel.TextCtrl(prefix)
prefixCtrl.Enable(generateEvents)
encodingCtrl = panel.Choice(encodingNum, text.codecChoices)
encodingCtrl.Enable(generateEvents)
def OnCheckBox(event):
flag = generateEventsCtrl.GetValue()
terminatorCtrl.Enable(flag)
prefixCtrl.Enable(flag)
encodingCtrl.Enable(flag)
event.Skip()
generateEventsCtrl.Bind(wx.EVT_CHECKBOX, OnCheckBox)
panel.SetColumnFlags(1, wx.EXPAND)
portSettingsBox = panel.BoxedGroup(
"Port settings",
(text.port, portCtrl),
(text.baudrate, baudrateCtrl),
(text.bytesize, bytesizeCtrl),
(text.parity, parityCtrl),
(text.stopbits, stopbitsCtrl),
(text.flowcontrol, handshakeCtrl),
)
eventSettingsBox = panel.BoxedGroup(
"Event generation",
(generateEventsCtrl),
(text.terminator, terminatorCtrl),
(text.eventPrefix, prefixCtrl),
(text.encoding, encodingCtrl),
)
eg.EqualizeWidths(portSettingsBox.GetColumnItems(0))
eg.EqualizeWidths(portSettingsBox.GetColumnItems(1))
eg.EqualizeWidths(eventSettingsBox.GetColumnItems(0)[1:])
eg.EqualizeWidths(eventSettingsBox.GetColumnItems(1))
panel.sizer.Add(eg.HBoxSizer(portSettingsBox, (10, 10), eventSettingsBox))
while panel.Affirmed():
panel.SetResult(
portCtrl.GetValue(),
int(baudrateCtrl.GetValue()),
bytesizeCtrl.GetValue(),
parityCtrl.GetValue(),
stopbitsCtrl.GetValue(),
handshakeCtrl.GetValue(),
generateEventsCtrl.GetValue(),
terminatorCtrl.GetValue(),
prefixCtrl.GetValue(),
encodingCtrl.GetValue(),
)
class Write(eg.ActionWithStringParameter):
def __call__(self, data):
data = eg.ParseString(data, self.replaceFunc)
data = data.decode('string_escape')
self.plugin.serial.write(str(data))
return self.plugin.serial
def replaceFun |
ngs_override)s
'''
SCRIPT_TEMPLATES = {
'wsgi': easy_install.script_header + '''
%(relative_paths_setup)s
import sys
sys.path[0:0] = [
%(path)s,
]
%(initialization)s
import os
try:
from django.core.wsgi import get_wsgi_application
IS_14_PLUS = True
except ImportError:
from django.core.handlers.wsgi import WSGIHandler
IS_14_PLUS = False
os.environ['DJANGO_SETTINGS_MODULE'] = "%(module_name)s%(attrs)s"
def app_factory(global_config, **local_config):
"""This function wraps our simple WSGI app so it
can be used with paste.deploy"""
if IS_14_PLUS:
return get_wsgi_application()
else:
return WSGIHandler()
application = app_factory(%(arguments)s)
''',
'manage': easy_install.script_header + '''
%(relative_paths_setup)s
import sys
sys.path[0:0] = [
%(path)s,
]
%(initialization)s
import os
try:
from django.core.management import execute_from_command_line
IS_14_PLUS = True
except ImportError:
from django.core.management import ManagementUtility
IS_14_PLUS = False
os.environ['DJANGO_SETTINGS_MODULE'] = "%(module_name)s%(attrs)s"
if IS_14_PLUS:
execute_from_command_line(%(arguments)s)
else:
utility = ManagementUtility(%(arguments)s)
utility.execute()
'''
}
class Recipe(object):
wsgi_file = 'wsgi.py'
settings_file = 'settings.py'
sites_default = 'sites'
site_settings_template = '%(name)s_site_config'
secret_cfg = '.secret.cfg'
def __init__(self, buildout, name, options):
self.buildout, self.name, self.options = buildout, name, options
self.logger = logging.getLogger(name)
self.options['location'] = os.path.join(
self.buildout['buildout']['parts-directory'], self.name
)
self.options.setdefault('extra-paths', '')
self.options.setdefault('environment-vars', '')
self.options.setdefault('sites-directory', self.sites_default)
self.options.setdefault('settings-override', '')
self.options.setdefault('settings-file', self.settings_file)
self.options.setdefault('wsgi-file', self.wsgi_file)
self.options.setdefault('manage-py-file', 'django')
self.eggs = [ ]
if 'eggs' in self.buildout['buildout']:
self.eggs.extend(self.buildout['buildout']['eggs'].split())
if 'eggs' in self.options:
self.eggs.extend(self.options['eggs'].split())
self.working_set = None
self.extra_paths = [ self.options['location'] ]
sites_path = os.path.join(
self.buildout['buildout']['directory'],
self.options['sites-directory']
)
if os.path.isdir(sites_path):
self.extra_paths.append(sites_path)
if os.path.isdir(sites_path) and 'settings-module' not in self.options:
# Check if the user has created a module %(name)s_config
settings_module = self.site_settings_template % {
'name': self.name
}
settings_module_path = os.path.join(sites_path, settings_module)
initpy = os.path.join(settings_module_path, '__init__.py')
settingspy = os.path.join(settings_module_path, 'settings.py')
if os.path.isdir(settings_module_path) and \
os.path.isfile(initpy) and os.path.isfile(settingspy):
self.options.setdefault('settings-module',
'%s.settings' % settings_module)
self.extra_paths.extend(self.options['extra-paths'].split())
self.secret_key = None
def setup_working_set(self):
egg = Egg(
self.buildout, 'Django', self.options
| )
self.working_set = egg.working_set(self.eggs)
def setup_secret(self):
secret_file = os.path.join(
self.buildout['buildout']['directory'],
self.secret_cfg
)
if os.path.isfile(secret_file):
stream | = open(secret_file, 'rb')
data = stream.read().decode('utf-8').strip()
stream.close()
self.logger.debug("Read secret: %s" % data)
else:
stream = open(secret_file, 'wb')
chars = u'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
data = u''.join([random.choice(chars) for __ in range(50)])
stream.write(data.encode('utf-8')+u"\n")
stream.close()
self.logger.debug(
"Generated secret: %s (and written to %s)" % (data, secret_file)
)
self.secret_key = data
return secret_file
def setup_module_file(self, module, name, data):
with open(os.path.join(module, name), 'wb') as stream:
stream.write(data)
def get_settings(self, static_directory=None, media_directory=None):
if 'settings-module' not in self.options:
raise UserError(
("You should specify 'settings-module' in %(name)s "
"or create a module named '"+self.site_settings_template+"' "
"in '%(sites)s' with a 'settings.py' file in it") % {
'name': self.name,
'sites': self.options['sites-directory']
}
)
settings_override = self.options['settings-override']
if static_directory is not None:
settings_override += '\nSTATIC_ROOT = "%s"\n' % (
static_directory,
)
if media_directory is not None:
settings_override += '\nMEDIA_ROOT = "%s"\n' % (
media_directory,
)
return SETTINGS_TEMPLATE % {
'settings_module': self.options['settings-module'],
'secret': self.secret_key,
'settings_override': settings_override
}
def setup_directories(self):
result = []
for directory in [ 'static-directory', 'media-directory' ]:
result.append(None)
if directory in self.options:
path = os.path.join(
self.buildout['buildout']['directory'],
self.options[directory]
)
if not os.path.isdir(path):
os.makedirs(path)
result[-1] = path
return result
def get_initialization(self):
# The initialization code is expressed as a list of lines
initialization = []
# Gets the initialization code: the tricky part here is to preserve
# indentation.
# Since buildout does totally waste whitespace, if one wants to
# preserve indentation must prefix its lines with '>>> ' or '... '
raw_value = self.options.get('initialization', '')
is_indented = False
indentations = ('>>> ', '... ')
for line in raw_value.splitlines():
if line != "":
if len(initialization) == 0:
if line.startswith(indentations[0]):
is_indented = True
else:
if is_indented and not line.startswith(indentations[1]):
raise UserError(
("Line '%s' should be indented "
"properly but is not") % line
)
if is_indented:
line = line[4:]
initialization.append(line)
# Gets the environment-vars option and generates code to set the
# enviroment variables via os.environ
environment_vars = []
for line in self.options.get('environment-vars', '').splitlines():
line = line.strip()
if len(line) > 0:
try:
var_name, raw_value = line.split(' ', 1)
except ValueError:
raise RuntimeError(
"Bad djc.recipe2 environment-vars contents: %s" % line
)
environment_vars.append(
'os.environ["%s"] = r"%s"' % (
var_name,
raw_value.strip()
)
)
|
from distutils.core import setup
setup(
name = 'mirobot',
packages = ['mirobot'],
version = '1.0.3',
description = 'A Python library to control Mirobot (http://mirobot.io)',
author = 'Ben Pirt',
author_email = 'ben@pirt.co.uk',
url = 'https://github.com/mirobot/mirobot-py',
download_url = 'https://github.com/mirobot/mirobot-py/tarball/v1.0.2', |
keywords = ['robotics', 'control', 'mirobot'],
classifiers = ['Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', ' | Topic :: Education', 'License :: OSI Approved :: MIT License'],
install_requires=[
"websocket-client",
],
)
|
"""Base rop | e package
This package contains rope core modules that are used by other modules
and packages.
"""
__all__ = ["project", " | libutils", "exceptions"]
|
import scipy.sparse as ss
import warnings
warnings.simplefilter('ignore', ss.SparseEfficiencyWarning)
from sparray import FlatSparray
class Operations(object):
params = [['FlatSparray', 'csr_matrix']]
param_names = ['arr_type']
def setup(self, arr_type):
mat = ss.rand(3000, 4000, density=0.1, format='csr')
if arr_type == 'FlatSparray':
self.arr = FlatSparray.from_spmatrix(mat)
else:
self.arr = mat
def time_scalar_multiplication(self, arr_type):
self.arr * 3
def time_sum(self, arr_type):
self.arr.sum()
def time_getitem_scalar(self, arr_type):
self.arr[154, 145]
def time_getitem_subarray(self, arr_type):
self.arr[:5, :5]
def time_getitem_row(self, arr_type):
self.arr[876]
def time_getitem_col(self, arr_type):
self. | arr[:,273]
def time_diagonal(self, arr_type):
self.arr.diagonal()
class Imp | ureOperations(object):
params = [['FlatSparray', 'csr_matrix']]
param_names = ['arr_type']
number = 1 # make sure we re-run setup() before each timing
def setup(self, arr_type):
mat = ss.rand(3000, 4000, density=0.1, format='csr')
if arr_type == 'FlatSparray':
self.arr = FlatSparray.from_spmatrix(mat)
else:
self.arr = mat
def time_setdiag(self, arr_type):
self.arr.setdiag(99)
|
from pyspark import SparkConf, SparkContext
from jsonrpc.authproxy import AuthServiceProxy
import json
import sys
#This is batch processing of bitcoind (locally run bitcoin daemon)
#RPC (Remote Procedure Call) block's json stored
#in HDFS. Currently 187,990 blocks' json representation is
#stored in HDFS. The HDFS file size is around 6.5GB
#The output of this program is block_number and the corresponding
#transaction fee in units of Satoshi. This data is written to HBASE
#table.
#The program takes only 69 minutes to run. While the streaming version
#of the program takes 177 minutes.
#It is a Good illustration of time-space(memory) tradeoff
conf = SparkConf().setMaster("local").setAppName("bitcoin_TransactionFee_calcultor")
sc = SparkContext(conf=conf)
rpcuser="bitcoinrpc"
rpcpassword="5C3Y6So6sCRPgBao8KyWV2bYpTHZt5RCVAiAg5JmTnHr"
rpcip = "127.0.0.1"
bitcoinrpc = AuthServiceProxy("http://"+rpcuser+":"+rpcpassword+"@"+rpcip+":8332")
#function SaveRecord: saves tx_fee for a block to hbase database
def SaveRecord(tx_fee_rdd):
host = 'localhost' #sys.argv[1]
table = 'tx_fee_table_sp_batch' #needs to be created before hand in hbase shell
conf = {"hbase.zookeeper.quorum": host,
"hbase.mapred.outputtable": table,
"mapreduce.outputformat.class": "org.apache.hadoop.hbase.mapreduce.TableOutputFormat",
"mapreduce.job.ou | tput.key.class": "org.apache.hadoop.hbase.io.ImmutableBytesWritable",
"mapreduce.job.output.value.class": "org.apache.hadoop.io.Writable"}
keyConv = "org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter"
valueConv = "org.apache.spark.examples.pythonconverters.StringListToPutConverter"
#row key id,id, cfam | ily=tx_fee_col,column_name = tx_fee, column_value=x
#datamap = tx_fee_rdd.map(lambda x: ("tx_fee",x) )
#( rowkey , [ row key , column family , column name , value ] )
datamap = tx_fee_rdd.map(lambda x: (str(x[0]),
[str(x[0]),"tx_fee_col","tx_fee",str(x[1])])
)
datamap.saveAsNewAPIHadoopDataset(conf=conf,
keyConverter=keyConv,
valueConverter=valueConv)
def get_tx_fee(gen_tx):
gen_tx_json = bitcoinrpc.decoderawtransaction(bitcoinrpc.getrawtransaction(gen_tx))
return gen_tx_json
content_rdd = sc.textFile("hdfs://ec2-52-21-47-235.compute-1.amazonaws.com:9000/bitcoin/block_chain_full.txt")
#The file below is for testing purposes
#content_rdd = sc.textFile("file:///home/ubuntu/unix_practice/bitcoin/2_blocks.txt")
dump_rdd = content_rdd.map(lambda x: json.dumps(x)).map(lambda x : x.decode('unicode_escape').encode('ascii','ignore'))
#print dump_rdd.take(2)
load_rdd = dump_rdd.map(lambda x: json.loads(x))
#print load_rdd.take(2)
split_blk_rdd = load_rdd.map(lambda x: x.split(":"))
#tx = load_rdd.filter(lambda x: "tx" in x)
#print split_blk_rdd.take(split_blk_rdd.count())
gen_tx_rdd = split_blk_rdd.map(lambda x : (x[8][1:7],x[6][4:68]) ) #this gets generation transactions
#print "*************HERE***************"
#print gen_tx_rdd.take(gen_tx_rdd.count()) #from the blocks
tx_json_rdd = gen_tx_rdd.map(lambda x : (x[0],get_tx_fee(x[1])) ) #function call
#print tx_json_rdd.take(tx_json_rdd.count())
tx_fee_rdd = tx_json_rdd.map(lambda x : (x[0],x[1].items()
[3][1][0]["value"]-25) )#.filter(lambda x : "value" in x)
#print tx_fee_rdd.take(tx_fee_rdd.count())
SaveRecord(tx_fee_rdd) #function call
#just to display values for debugging
#val_lst = tx_fee_rdd.take(tx_fee_rdd.count()) #use [3][1]
#print val_lst
|
# encoding: utf-8
"""Provides collection of events emitters"""
import time
from . import EndPoint
def container_count(host_fqdn, docker_client, statistics):
"""
Emit events providing:
- number of containers
- number of running containers
- number of crashed containers
:param host_fqdn: FQDN of the host where the docker-zabbix-daemon is running, for instance docker.acme.com
:type host_fqdn: string
:param docker_client: instance of docker.Client see http://docker-py.readthedocs.org/en/latest/api/
:type docker_client: docker.Client
:param statistics: List of dicts providing collected container statistics. see Docker stats API c | all on https://docs.docker.com/reference/api/docker_remote_api_v1.17/#get-container-stats-based-on-resource-usage
:return: list of dicts providing additional events to push to Zabbix.
Each dict is composed of 4 keys:
- hostname
- timestamp
- key
- value
"""
running = 0
crashed = 0
now = int(time.time())
containers = docker_client.containers(all=True)
for container in containers:
| status = container['Status']
if status.startswith('Up'):
running += 1
elif not status.startswith('Exited (0)'):
crashed += 1
data = {
'all': len(containers),
'running': running,
'crashed': crashed,
}
return [
{
'hostname': '-',
'timestamp': now,
'key': EndPoint.EVENT_KEY_PREFIX + 'count.' + key,
'value': value
}
for key, value in data.items()
]
def container_ip(host_fqdn, docker_client, statistics):
"""Emit the ip addresses of containers.
"""
for stat in statistics:
containerId = stat['id']
details = docker_client.inspect_container(containerId)
yield {
'hostname': EndPoint.container_hostname(host_fqdn, stat['name']),
'timestamp': stat['timestamp'],
'key': EndPoint.EVENT_KEY_PREFIX + 'ip',
'value': details['NetworkSettings']['IPAddress']
}
def cpu_count(host_fqdn, docker_client, statistics):
"""Emit the number of CPU available for each container.
"""
for stat in statistics:
yield {
'hostname': EndPoint.container_hostname(host_fqdn, stat['name']),
'timestamp': stat['timestamp'],
'key': EndPoint.EVENT_KEY_PREFIX + 'cpu.count',
'value': len(stat['cpu_stats']['cpu_usage']['percpu_usage'])
}
|
# From Python 3.6 func | tools.py
# Bug was in detecting "nonlocal" access
def not_bug():
cache_token = 5
def register():
nonlocal cache_token
return ca | che_token == 5
return register()
assert not_bug()
|
# codi | ng: utf-8
from flask import Blueprint
__author__ = 'Jux.Liu'
user = Blueprint('user', __name__)
from . | import views
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import apps
from django.forms import widgets
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
from cmsplugin_cascade.fields import PartialFormField
from cmsplugin_cascade.plugin_base import CascadePluginBase
from .forms import LinkForm
class LinkPluginBase(CascadePluginBase):
text_enabled = True
allow_children = False
parent_classes = []
require_parent = False
glossary_fields = (
PartialFormField('target',
widgets.RadioSelect(choices=(('', _("Same Window")), ('_blank', _("New Window")),
('_parent', _("Parent Window")), ('_top', _("Topmost Frame")),)),
initi | al='',
label=_("Link Target"),
help_text=_("Open Link in other target.")
),
PartialFormField('title',
widgets.T | extInput(),
label=_("Title"),
help_text=_("Link's Title")
),
)
html_tag_attributes = {'title': 'title', 'target': 'target'}
# map field from glossary to these form fields
glossary_field_map = {'link': ('link_type', 'cms_page', 'ext_url', 'mail_to',)}
@classmethod
def get_link(cls, obj):
link = obj.glossary.get('link', {})
linktype = link.get('type')
if linktype == 'exturl':
return '{url}'.format(**link)
if linktype == 'email':
return 'mailto:{email}'.format(**link)
# otherwise try to resolve by model
if 'model' in link and 'pk' in link:
if not hasattr(obj, '_link_model'):
Model = apps.get_model(*link['model'].split('.'))
try:
obj._link_model = Model.objects.get(pk=link['pk'])
except Model.DoesNotExist:
obj._link_model = None
if obj._link_model:
return obj._link_model.get_absolute_url()
def get_ring_bases(self):
bases = super(LinkPluginBase, self).get_ring_bases()
bases.append('LinkPluginBase')
return bases
def get_form(self, request, obj=None, **kwargs):
kwargs.setdefault('form', LinkForm.get_form_class())
return super(LinkPluginBase, self).get_form(request, obj, **kwargs)
@python_2_unicode_compatible
class LinkElementMixin(object):
"""
A mixin class to convert a CascadeElement into a proxy model for rendering the ``<a>`` element.
Note that a Link inside the Text Editor Plugin is rendered using ``str(instance)`` rather
than ``instance.content``.
"""
def __str__(self):
return self.content
@property
def link(self):
return self.plugin_class.get_link(self)
@property
def content(self):
return mark_safe(self.glossary.get('link_content', ''))
|
__all__ = ['scraper', 'local_scraper', 'pw_scraper', 'uflix_scraper', 'watchseries_scraper', 'movie25_scraper', 'merdb_scraper', '2movies_scraper', 'icefilms_scraper',
'movieshd_scraper', 'yifytv_scraper', 'viooz_scraper', 'filmstreaming_scraper', 'myvideolinks_scraper', 'filmikz_scraper', 'clickplay_scraper', 'nitertv_scraper',
'iwatch_scraper', 'ororotv_scraper', 'view47_scraper', 'vidics_scraper', 'oneclickwatch_scraper', 'istreamhd_scraper', 'losmovies_scraper', 'movie4k_scraper',
'noobroom_scraper', 'solar_scraper', 'vkbox_scraper', 'directdl_scraper', 'movietv_scraper', 'moviesonline7_scraper', 'streamallthis_scraper', 'afdah_scraper',
'streamtv_scraper', 'moviestorm_scraper', 'wmo_scraper', 'zumvo_scraper', 'wso_scraper', 'tvrelease_scraper', 'hdmz_scraper', 'ch131_scraper', 'watchfree_scraper',
'pftv_scraper', 'flixanity_scraper', 'cmz_scraper', 'movienight_scraper', 'gvcenter_scraper', 'alluc_scraper', 'afdahorg_scraper', 'xmovies8_scraper',
'yifystreaming_scraper', 'mintmovies_scraper', 'playbox_scraper', 'shush_proxy', 'mvsnap_scraper', 'pubfilm_scraper', 'pctf_scraper', 'rlssource_scraper',
'couchtunerv1_scraper', 'couchtunerv2_scraper', 'tunemovie_scraper', 'watch8now_scraper', 'megabox_scraper', 'dizilab_scraper', 'beinmovie_scraper',
'dizimag_scraper', 'ayyex_scraper']
import re
import os
import xbmcaddon
import xbmc
import datetime
import time
from salts_lib import log_utils
from salts_lib.constants import VIDEO_TYPES
from . import scraper # just to avoid editor warning
from . import *
class ScraperVideo:
def __init__(self, video_type, title, year, trakt_id, season='', episode='', ep_title='', ep_airdate=''):
assert(video_type in (VIDEO_TYPES.__dict__[k] for k in VIDEO_TYPES.__dict__ if not k.startswith('__')))
self.video_type = video_type
self.title = title
self.year = year
self.season = season
self.episode = episode
self.ep_title = ep_title
self.trakt_id = trakt_id
self.ep_airdate = None
if ep_airdate:
try: self.ep_airdate = datetime.datetime.strptime(ep_airdate, "%Y-%m-%d").date()
except (TypeError, ImportError): self.ep_airdate = datetime.date(*(time.strptime(ep_airdate, '%Y-%m-%d')[0:3]))
def __str__(self):
return '|%s|%s|%s|%s|%s|%s|%s|' % (self.video_type, self.title, self.year, self.season, self.episode, self.ep_title, self.ep_airdate)
def update_xml(xml, new_settings, cat_count):
new_settings.insert(0, '<category label="Scrapers %s">' % (cat_count))
new_settings.append(' </category>')
new_str = '\n'.join(new_settings)
match = re.search('(<category label="Scrapers %s">.*?</category>)' % (cat_count), xml, re.DOTALL | re.I)
if match:
old_settings = match.group(1)
if old_settings != new_settings:
xml = xml.replace(old_settings, new_str)
else:
log_utils.log('Unable to match category: %s' % (cat_count), xbmc.LOGWARNING)
return xml
def update_settings():
path = xbmcaddon.Addon().getAddonInfo('path')
full_path = os.path.join(path, 'resources', 'settings.xml')
try:
with open(full_p | ath, 'r') as f:
xml = f.read()
except:
raise
new_settings = []
cat_count = 1
old_xml = xml
classes = scraper.Scraper.__class__.__subclasses__(scraper.Scraper)
for cls in sorted(classes, key=lambda x: x.get_name().upper()):
new_settings += cls.get_settings()
if len(new_settings) > 90:
xml = update_xml(xml, new_settings, cat_count)
new_settings = []
cat_count += 1
if n | ew_settings:
xml = update_xml(xml, new_settings, cat_count)
if xml != old_xml:
try:
with open(full_path, 'w') as f:
f.write(xml)
except:
raise
else:
log_utils.log('No Settings Update Needed', xbmc.LOGDEBUG)
update_settings()
|
x")
self.similarity = similarity
if 'gap_open' not in self.similarity:
raise ValueError(
"No gap_open open penalty in alignment scoring matrix.")
if 'gap_extend' not in self.similarity:
raise ValueError(
"No gap_open extend penalty in alignment scoring matrix.")
self.matrix = None
self.seq1 = None
self.seq2 = None
self.calls = 0
# TODO: uncomment aligner backend
# global _AMBIVERT
# if backend == 'ambivert' and _AMBIVERT:
# self.align = self.align_ambivert
# log_message(
# logging_callback=logging.info,
# msg="Using ambivert alignment backend.",
# extra={'oname': 'Aligner'}
# )
# else:
# self.align = self.align_enrich2
# log_message(
# logging_callback=logging.info,
# msg="Using enrich2 alignment backend.",
# extra={'oname': 'Aligner'}
# )
self.align = self.align_enrich2
log_message(
logging_callback=logging.info,
msg="Using enrich2 alignment backend.",
extra={'oname': 'Aligner'}
)
def align_ambivert(self, seq1, seq2):
"""
Aligns the two sequences, *seq1* and *seq2* and returns a list of
tuples describing the differences between the sequences.
The tuple format is ``(i, j, type, length)``, where ``i`` and ``j``
are the positions in *seq1* and *seq2*, respectively, and type is one
of ``"match"``, ``"mismatch"``, ``"insertion"``, or ``"deletion"``.
For indels, the ``length`` value is the number of bases inserted or
deleted with respect to *seq1* starting at ``i``.
Parameters
----------
seq1 : `str`
Reference sequence.
seq2 : `str`
The sequence that is to be aligned.
Returns
-------
`list`
list of tuples describing the differences between the sequences.
"""
if not isinstance(seq1, str):
raise TypeError("First sequence must be a str type")
if not isinstance(seq2, str):
raise TypeError("Second sequence must be a str type")
if not seq1:
raise ValueError("First sequence must not be empty.")
if not seq2:
raise ValueError("Second sequence must not be empty.")
self.matrix = np.ndarray(
shape=(len(seq1) + 1, len(seq2) + 1),
dtype=np.dtype([('score', np.int), ('trace', np.byte)])
)
seq1 = seq1.upper()
seq2 = seq2.upper()
a1, a2, *_ = self.needleman_wunsch(
seq1, seq2,
gap_open=self.similarity['gap_open'],
gap_extend=self.similarity['gap_extend']
)
backtrace = cigar_to_backtrace(
seq1, seq2,
gapped_alignment_to_cigar(a1, a2)[0]
)
return backtrace
def align_enrich2(self, seq1, seq2):
"""
Aligns the two sequences, *seq1* and *seq2* and returns a list of
tuples describing the differences between the sequences.
The tuple format is ``(i, j, type, length)``, where ``i`` and ``j``
are the positions in *seq1* and *seq2*, respectively, and type is one
of ``"match"``, ``"mismatch"``, ``"insertion"``, or ``"deletion"``.
For indels, the ``length`` value is the number of bases inserted or
deleted with respect to *seq1* starting at ``i``.
Parameters
----------
seq1 : `str`
Reference sequence.
seq2 : `str`
The sequence that is to be aligned.
Returns
-------
`list`
list of tuples describing the differences between the sequences.
"""
if not isinstance(seq1, str):
raise TypeError("First sequence must be a str type")
if not isinstance(seq2, str):
raise TypeError("Second sequence must be a str type")
if not seq1:
raise ValueError("First sequence must not be empty.")
if not seq2:
raise ValueError("Second sequence must not be empty.")
self.matrix = np.ndarray(
shape=(len(seq1) + 1, len(seq2) + 1),
dtype=np.dtype([('score', np.int), ('trace', np.byte)])
)
seq1 = seq1.upper()
seq2 = seq2.upper()
# build matrix of scores/traceback information
for i in range(len(seq1) + 1):
self.matrix[i, 0] = (self.similarity['gap_open'] * i, Aligner._DEL)
for j in range(len(seq2) + 1):
self.matrix[0, j] = (self.similarity['gap_open'] * j, Aligner._INS)
for i in range(1, len(seq1) + 1):
for j in range(1, len(seq2) + 1):
match = (self.matrix[i - 1, j - 1]['score'] +
self.similarity[seq1[i - 1]][seq2[j - 1]],
Aligner._MAT)
delete = (self.matrix[i - 1, j]['score'] +
self.similarity['gap_open'], Aligner._DEL)
insert = (self.matrix[i, j - 1]['score'] +
self.similarity['gap_open'], Aligner._INS)
# traces = [delete, insert, match]
# max_score = max(delete, insert, match, key=lambda x: x[0])[0]
# possible_traces = [t for t in traces if t[0] == max_score]
# priority_move = sorted(possible_traces, key=lambda x: x[1])[0]
# self.matrix[i, j] = priority_move
# def dotype(lol):
# if lol == self._MAT:
# return 'match'
# if lol == self._INS:
# return 'insertion'
# if lol == self._DEL:
# return 'deletion'
# print(i, j)
# print("Possible Scores: {}".format([t[0] for t in possible_traces]))
# print("Possible Tracebacks: {}".format([dotype(t[1]) for t in possible_traces]))
# print("Chosen Traceback: {}".format(dotype(priority_move[1])))
max_score = max(delete, insert, match, key=lambda x: x[0])
self.matrix[i, j] = max_score
self.matrix[0, 0] = (0, Aligner._END)
# calculate alignment from the traceback
i = len(seq1)
j = len(seq2)
traceback = list()
while i > 0 or j > 0:
if self.matrix[i, j]['trace'] == Aligner._MAT:
if seq1[i - 1] == seq2[j - 1]:
traceback.append((i - 1, j - 1, "match", None))
else:
traceback.append((i - | 1, j - 1, "mismatch", None))
i -= 1
j -= 1
elif self.matrix[i, j]['trace'] == Aligner._INS:
pos_1 = 0 if (i - 1) < 0 else (i - 1)
tracebac | k.append((pos_1, j - 1, "insertion", 1))
j -= 1
elif self.matrix[i, j]['trace'] == Aligner._DEL:
pos_2 = 0 if (j - 1) < 0 else (j - 1)
traceback.append((i - 1, pos_2, "deletion", 1))
i -= 1
elif self.matrix[i, j]['trace'] == Aligner._END:
pass
else:
raise RuntimeError("Invalid value in alignment traceback.")
traceback.reverse()
# combine indels
indel = None
traceback_combined = list()
for t in traceback:
if t[2] == "insertion" or t[2] == "deletion":
if indel is not None:
if t[2] == indel[2]:
indel[3] += t[3]
else:
raise RuntimeError("Aligner failed to combine indels. "
"Check 'gap_open' penalty.")
else:
indel = list(t)
else:
if indel is not None:
traceback_combined.append(tuple(indel))
|
#!/usr/bin/env python
from distutils.core import setup, run_setup, Command
import zmq.auth
import shutil
import os
OSAD2_PATH = os.path.dirname(os.path.realpath(__file__))
OSAD2_SERVER_CERTS_DIR = "/etc/rhn/osad2-server/certs/"
OSAD2_SERVER_PUB_KEY = os.path.join(OSAD2_SERVER_CERTS_DIR, "public_keys/server.key")
OSAD2_SERVER_PRIVATE_KEY = os.path.join(OSAD2_SERVER_CERTS_DIR, "private_keys/server.key_secret")
OSAD2_CLIENT_SETUP_FILE = os.path.join(OSAD2_PATH, "setup_client.py")
PKGNAME_FILE = os.path.join(OSAD2_PATH, "PKGNAME")
class OSAD2Command(Command):
def _create_curve_certs(self, name):
print "Creating CURVE certificates for '%s'..." % name
pk_file, sk_file = zmq.auth.create_certificates(OSAD2_SERVER_CERTS_DIR,
name)
# OSAD2 certificates storage
pk_dst = os.path.join(OSAD2_SERVER_CERTS_DIR, "public_keys")
sk_dst = os.path.join(OSAD2 | _SERVER_CERTS_DIR, "private_keys")
shutil.move(pk_file, pk_dst)
shutil.move(sk_file, sk_dst)
pk_dst = os.path.join(pk_dst, name + ".key")
sk_dst = os.path.join(sk_dst, name + ".key_secret")
print pk_dst
print sk_dst
return pk_dst, sk_dst
class CreateServerCommand(OSAD2Command):
description = "Create and install CURVE server key"
use | r_options = []
def initialize_options(self):
self.name = None
def finalize_options(self):
assert os.path.isdir(OSAD2_SERVER_CERTS_DIR), \
'Certificates storage dir doesn\'t exist: %s' % OSAD2_SERVER_CERTS_DIR
server_keyfile = os.path.join(OSAD2_SERVER_CERTS_DIR, 'private_keys/server.key_secret')
assert not os.path.isfile(server_keyfile), 'Server key already exists'
def run(self):
self._create_curve_certs("server")
class CreateClientCommand(OSAD2Command):
description = "Create a new client. Generate a RPM package"
user_options = [
('name=', None, 'Specify the new client name.'),
]
def initialize_options(self):
self.name = None
def finalize_options(self):
assert self.name, 'You must specify a client name'
assert os.path.isdir(OSAD2_SERVER_CERTS_DIR), \
'Certificates storage dir doesn\'t exist: %s' % OSAD2_SERVER_CERTS_DIR
keyfile = os.path.join(OSAD2_SERVER_CERTS_DIR, "public_keys/" + self.name + '.key')
server_keyfile = os.path.join(OSAD2_SERVER_CERTS_DIR, 'private_keys/server.key_secret')
assert os.path.isfile(server_keyfile), 'Server key doesn\'t exist'
assert not os.path.isfile(keyfile), 'Client name already exists'
def run(self):
pk_file, sk_file = self._create_curve_certs(self.name)
# Temporary key storage for RPM build
import shutil
shutil.copy(pk_file, "etc/client.key_secret")
shutil.copy(OSAD2_SERVER_PUB_KEY, "etc/")
self._build_client_rpm()
def _build_client_rpm(self):
print "Creating RPM package for '%s'..." % self.name
open(PKGNAME_FILE, "w").write(self.name)
run_setup(OSAD2_CLIENT_SETUP_FILE, script_args=["bdist_rpm", "--quiet"])
os.remove(PKGNAME_FILE)
os.remove("etc/client.key_secret")
os.remove("etc/server.key")
setup(name='spacewalk-osad2-server',
version='alpha',
license='GPLv2',
description='An alternative OSA dispatcher module for Spacewalk',
long_description='This is an experiment to improve osad, a service '
'that simulates instant execution of actions in a '
'Spacewalk environment.',
platforms=['All'],
packages=['osad2', 'osad2.server'],
scripts=['bin/osad2_server.py'],
data_files=[
('/etc/rhn/osad2-server/', ['etc/osad_server.prod.cfg']),
('/etc/rhn/osad2-server/certs/private_keys/', []),
('/etc/rhn/osad2-server/certs/public_keys/', []),
],
cmdclass={'createclient': CreateClientCommand,
'createserver': CreateServerCommand})
|
import pylab as pl
import scipy as sp
from serpentine import *
from elements import *
import visualize
class AtfExt :
def __init__(self) :
print 'AtfExt:__init__'
# set twiss parameters
mytwiss = Twiss()
mytw | iss.betax = 6.85338806855804
mytwiss.alphax = 1.11230788371885
mytwiss.etax = 3.89188697330735e-012
mytwiss.etaxp = 63.1945125619190e-015
mytwiss.betay = 2.94129410712918
mytwiss.alphay = -1.91105724003646
mytwiss.etay = 0
mytwiss.etayp = 0
mytwiss.nemitx = 5.08807339588144e-006
mytwiss.nemity = 50.8807339588144e-009
mytwiss.sigz = 8.00000000000000e-003
mytwiss. | sigP = 1.03999991965541e-003
mytwiss.pz_cor = 0
# load beam line
self.atfFull = Serpentine(line='newATF2lat.aml',twiss=mytwiss)
self.atfExt = Serpentine(line=beamline.Line(self.atfFull.beamline[947:]),twiss=mytwiss)
# zero zero cors
self.atfExt.beamline.ZeroCors()
# Track
self.atfExt.Track()
readings = self.atfExt.GetBPMReadings()
# Visualisation
self.v = visualize.Visualize()
def moverCalibration(self, mag, bpms) :
pass
def correctorCalibration(self, corr, bpms) :
pass
def bba(self, mag, bpm) :
pass
def magMoverCalibration(self, mag, bpm) :
pass
def setMagnet(self,name, value) :
ei = self.atfExt.beamline.FindEleByName(name)
print ei
e = self.atfExt.beamline[ei[0]]
e.B = value
def plotOrbit(self) :
self.v.PlotBPMReadings(self.atfExt)
def plotTwiss(self) :
self.v.PlotTwiss(self.atfExt)
def run(self) :
self.atfExt.Track()
def jitterBeam(self) :
r = 1+sp.random.standard_normal()
# self.s.beam_in.x[5,:] = (1+r/3e4)*self.nominalE
# print r,self.s.BeamIn.x[5,:]
|
port noPadding
from binascii import a2b_hex
import unittest
class Rijndael_TestVectors(unittest.TestCase):
""" Test Rijndael algorithm using know values."""
def testGladman_dev_vec(self):
""" All 25 combinations of block and key size.
These test vectors were generated by Dr Brian Gladman
using the program aes_vec.cpp <brg@gladman.uk.net> 24th May 2001.
vectors in file: dev_vec.txt
http://fp.gladman.plus.com/cryptography_technology/rijndael/index.htm
"""
def RijndaelTestVec(i, key, pt, ct):
""" Run single AES test vector with any legal blockSize
and any legal key size. """
bkey, plainText, cipherText = a2b_hex(key), a2b_hex(pt), a2b_hex(ct)
kSize = len(bkey)
bSize = len(cipherText) # set block size to length of block
alg = Rijndael(bkey, keySize=kSize, blockSize=bSize, padding=noPadding())
self.assertEqual( alg.encrypt(plainText), cipherText )
self.assertEqual( alg.decrypt(cipherText), plainText )
RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 16 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c',
pt = '3243f6a8885a308d313198a2e0370734',
ct = '3925841d02dc09fbdc118597196a0b32')
RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 20 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160',
pt = '3243f6a8885a308d313198a2e0 | 370734',
ct = '231d844639b31b412211cfe93712b880')
| RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 24 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5',
pt = '3243f6a8885a308d313198a2e0370734',
ct = 'f9fb29aefc384a250340d833b87ebc00')
RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 28 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90',
pt = '3243f6a8885a308d313198a2e0370734',
ct = '8faa8fe4dee9eb17caa4797502fc9d3f')
RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 32 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe',
pt = '3243f6a8885a308d313198a2e0370734',
ct = '1a6e6c2c662e7da6501ffb62bc9e93f3')
RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 16 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c',
pt = '3243f6a8885a308d313198a2e03707344a409382',
ct = '16e73aec921314c29df905432bc8968ab64b1f51')
RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 20 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160',
pt = '3243f6a8885a308d313198a2e03707344a409382',
ct = '0553eb691670dd8a5a5b5addf1aa7450f7a0e587')
RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 24 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5',
pt = '3243f6a8885a308d313198a2e03707344a409382',
ct = '73cd6f3423036790463aa9e19cfcde894ea16623')
RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 28 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90',
pt = '3243f6a8885a308d313198a2e03707344a409382',
ct = '601b5dcd1cf4ece954c740445340bf0afdc048df')
RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 32 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe',
pt = '3243f6a8885a308d313198a2e03707344a409382',
ct = '579e930b36c1529aa3e86628bacfe146942882cf')
RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 16 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d',
ct = 'b24d275489e82bb8f7375e0d5fcdb1f481757c538b65148a')
RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 20 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d',
ct = '738dae25620d3d3beff4a037a04290d73eb33521a63ea568')
RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 24 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d',
ct = '725ae43b5f3161de806a7c93e0bca93c967ec1ae1b71e1cf')
RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 28 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d',
ct = 'bbfc14180afbf6a36382a061843f0b63e769acdc98769130')
RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 32 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d',
ct = '0ebacf199e3315c2e34b24fcc7c46ef4388aa475d66c194c')
RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 16 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9',
ct = 'b0a8f78f6b3c66213f792ffd2a61631f79331407a5e5c8d3793aceb1')
RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 20 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9',
ct = '08b99944edfce33a2acb131183ab0168446b2d15e958480010f545e3')
RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 24 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9',
ct = 'be4c597d8f7efe22a2f7e5b1938e2564d452a5bfe72399c7af1101e2')
RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 28 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9',
ct = 'ef529598ecbce297811b49bbed2c33bbe1241d6e1a833dbe119569e8')
RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 32 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9',
ct = '02fafc200176ed05deb8edb82a3555b0b10d47a388dfd59cab2f6c11')
RijndaelTestVec( i = 'dev_vec.txt 32 byte block, 16 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa98ec4e6c8',
ct = '7d15479076b69a46ffb3b3beae97ad8313f622f67fedb487de9f06b9ed9c8f19')
RijndaelTestVec( i = 'dev_vec.txt 32 byte block, 20 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa98ec4e6c8',
ct = '514f93fb296b5ad16aa7df8b577abcbd484decacccc7fb1f18dc567309ceeffd')
RijndaelTestVec( i = 'dev_vec.txt 32 byte block, 24 byte key',
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Plugin's Base Class
"""
import sys
import cli
from cli.docopt import docopt
PLUGIN_NAME = "base-plugin"
PLUGIN_CLASS = "PluginBase"
VERSION = "Mesos Plugin Base 1.0"
SHORT_HELP = "This is the base plugin from which all other plugins inherit."
USAGE = \
"""
{short_help}
Usage:
mesos {plugin} (-h | --help)
mesos {plugin} --version
mesos {plugin} <command> (-h | --help)
mesos {plugin} [options] <command> [<args>...]
Options:
-h --help Show this screen.
--version Show version info.
Commands:
{commands}
"""
SUBCOMMAND_USAGE = \
"""{short_help}
Usage:
mesos {plugin} {command} (-h | --help)
mesos {plugin} {command} --version
mesos {plugin} {command} [options] {arguments}
Options:
{flags}
Description:
{long_help}
"""
class PluginBase():
"""
Base class from which all CLI plugins should inherit.
"""
# pylint: disable=too-few-public-methods
COMMANDS = {}
def __setup__(self, command, argv):
pass
def __module_reference__(self):
return sys.modules[self.__module__]
def __init__(self, settings, config):
# pylint: disable=invalid-name
self.PLUGIN_NAME = PLUGIN_NAME
self.PLUGIN_CLASS = PLUGIN_CLASS
self.VERSION = VERSION
self.SHORT_HELP = SHORT_HELP
self.USAGE = USAGE
module = self.__module_reference__()
if hasattr(module, "PLUGIN_NAME"):
self.PLUGIN_NAME = getattr(module, "PLUGIN_NAME")
if hasattr(module, "PLUGIN_CLASS"):
self.PLUGIN_CLASS = getattr(module, "PLUGIN_CLASS")
if hasattr(module, "VERSION"):
self.VERSION = getattr(module, "VERSION")
if hasattr(module, "SHORT_HELP"):
self.SHORT_HELP = getattr(mod | ule, "SHORT_HELP")
if hasattr(module, "USAGE"):
self.USAGE = getattr(module, "USAGE")
self.settings = settings
self.config = config
def __autocomplete__(self, command, current_word, argv):
# pylint: disable=unused-variable,unused-argument,
# attribute-defined-outside-init
return ("default", [])
def __autocomplete_base__(self, current_word, argv):
option = "default"
| # <command>
comp_words = list(self.COMMANDS.keys())
comp_words = cli.util.completions(comp_words, current_word, argv)
if comp_words is not None:
return (option, comp_words)
# <args>...
comp_words = self.__autocomplete__(argv[0], current_word, argv[1:])
# In general, we expect a tuple to be returned from __autocomplete__,
# with the first element being a valid autocomplete option, and the
# second being a list of completion words. However, in the common
# case we usually use the default option, so it's OK for a plugin to
# just return a list. We will add the "default" option for them.
if isinstance(comp_words, tuple):
option, comp_words = comp_words
return (option, comp_words)
def main(self, argv):
"""
Main method takes argument from top level mesos and parses them
to call the appropriate method.
"""
command_strings = cli.util.format_commands_help(self.COMMANDS)
usage = self.USAGE.format(
plugin=self.PLUGIN_NAME,
short_help=self.SHORT_HELP,
commands=command_strings)
arguments = docopt(
usage,
argv=argv,
version=self.VERSION,
program="mesos " + self.PLUGIN_NAME,
options_first=True)
cmd = arguments["<command>"]
argv = arguments["<args>"]
if cmd in self.COMMANDS.keys():
if "external" not in self.COMMANDS[cmd]:
argument_format, short_help, long_help, flag_format = \
cli.util.format_subcommands_help(self.COMMANDS[cmd])
usage = SUBCOMMAND_USAGE.format(
plugin=self.PLUGIN_NAME,
command=cmd,
arguments=argument_format,
flags=flag_format,
short_help=short_help,
long_help=long_help)
arguments = docopt(
usage,
argv=argv,
program="mesos " + self.PLUGIN_NAME + " " + cmd,
version=self.VERSION,
options_first=True)
if "alias" in self.COMMANDS[cmd]:
cmd = self.COMMANDS[cmd]["alias"]
self.__setup__(cmd, argv)
return getattr(self, cmd.replace("-", "_"))(arguments)
return self.main(["--help"])
|
"""
Django settings for dts_test_project project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
TENANT_APPS_DIR = os.path.join(BASE_DIR, os.pardir)
sys.path.insert(0, TENANT_APPS_DIR)
sys.path.insert(0, BASE_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'cl1)b#c&xmm36z3e(quna-vb@ab#&gpjtdjtpyzh!qn%bc^xxn'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
SHARED_APPS = (
'django_tenants', # mandatory
'customers', # you must list the app where your tenant model resides in
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
TENANT_APPS = (
'dts_test_app',
)
TENANT_MODEL = "customers.Client" # app.Model
TENANT_DOMAIN_MODEL = "customers.Domain" # app.Model
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
INSTALLED_APPS = list(SHARED_APPS) + [app for app in TENAN | T_APPS if app not in SHARED_APPS]
ROOT_URLCONF = 'dts_test_project.urls'
WSGI_APPLICATION = 'dts_test_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django_tenants. | postgresql_backend',
'NAME': 'dts_test_project',
'USER': 'postgres',
'PASSWORD': os.environ.get('DATABASE_PASSWORD', 'root'),
'HOST': os.environ.get('DATABASE_HOST', 'localhost'),
'PORT': '',
}
}
DATABASE_ROUTERS = (
'django_tenants.routers.TenantSyncRouter',
)
MIDDLEWARE = (
'tenant_tutorial.middleware.TenantTutorialMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.contrib.messages.context_processors.messages',
)
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
|
#! /usr/bin/env python3
import argparse
import logging
import os
from utils import run
logging.basicConfig(level=logging.INFO)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('dist_dir')
parser.add_argument('version')
| return parser.parse_args()
args = parse_args()
DIR = os.path.dirname(os.path.abspath(__file__))
BASE_DIR = os.path.dirname(DIR)
DIST_DIR = os.path.abspath(args.dist_dir)
DRIVE_C = os.p | ath.join(DIST_DIR, 'drive_c')
WINE_RN_DIR = os.path.join(DRIVE_C, 'rednotebook')
WINE_RN_WIN_DIR = os.path.join(WINE_RN_DIR, 'win')
os.environ['WINEPREFIX'] = DIST_DIR
ISCC = os.path.join(DRIVE_C, 'Program Files (x86)', 'Inno Setup 5', 'ISCC.exe')
VERSION_PARAM = '/dREDNOTEBOOK_VERSION=%s' % args.version
run(['wine', ISCC, VERSION_PARAM, 'rednotebook.iss'], cwd=WINE_RN_WIN_DIR)
|
from django.contrib import admin
from common.admin import AutoUserMixin
from licenses.models import License
class LicenseAdmin(AutoUserMixin, admin.ModelAdmin):
fieldsets = [
(None, {
'fields': ['added', 'name', 'url', 'creative_commons',
| 'cc_attribution', 'cc_noncommercial',
'cc_no_deriv', 'cc_share_alike']
}),
]
# fields
readonly_fields = ['added']
list_display = ['name', 'url']
# field display
list_filter = ['name', 'added']
search_fields = ['name', 'url']
admin.site.reg | ister(License, LicenseAdmin)
|
.dot(V_uM.T.conj(), V_uM)
U_ow, U_lw, U_Ml = get_rot(F_MM, V_oM, Nw - No)
self.U_nw = np.vstack((U_ow, dots(V_uM, U_Ml, U_lw)))
# stop here ?? XXX
self.S_ww = self.rotate_matrix(np.ones(1))
if ortho:
lowdin(self.U_nw, self.S_ww)
self.S_ww = np.identity(Nw)
self.norms_n = np.dot(self.U_nw, np.linalg.solve(
self.S_ww, self.U_nw.T.conj())).diagonal()
def rotate_matrix(self, A_nn):
if A_nn.ndim == 1:
return np.dot(self.U_nw.T.conj() * A_nn, self.U_nw)
else:
return dots(self.U_nw.T.conj(), A_nn, self.U_nw)
def rotate_projections(self, P_ani):
P_awi = {}
for a, P_ni in P_ani.items():
P_awi[a] = np.tensordot(self.U_nw, P_ni, axes=[[0], [0]])
return P_awi
def rotate_function(self, psit_nG):
return np.tensordot(self.U_nw, psit_nG, axes=[[0], [0]])
class ProjectedWannierFunctionsIBL:
"""PWF in the infinite band limit.
::
--No --Nw
|w_w> = > |psi_o> U_ow + > |f_M> U_Mw
--o=1 --M=1
"""
def | __init__(self, V_n | M, S_MM, No, lcaoindices=None):
Nw = V_nM.shape[1]
assert No <= Nw
self.V_oM, V_uM = V_nM[:No], V_nM[No:]
F_MM = S_MM - np.dot(self.V_oM.T.conj(), self.V_oM)
U_ow, U_lw, U_Ml = get_rot(F_MM, self.V_oM, Nw - No)
self.U_Mw = np.dot(U_Ml, U_lw)
self.U_ow = U_ow - np.dot(self.V_oM, self.U_Mw)
if lcaoindices is not None:
for i in lcaoindices:
self.U_ow[:, i] = 0.0
self.U_Mw[:, i] = 0.0
self.U_Mw[i, i] = 1.0
# stop here ?? XXX
self.S_ww = self.rotate_matrix(np.ones(1), S_MM)
P_uw = np.dot(V_uM, self.U_Mw)
self.norms_n = np.hstack((
np.dot(U_ow, np.linalg.solve(self.S_ww, U_ow.T.conj())).diagonal(),
np.dot(P_uw, np.linalg.solve(self.S_ww, P_uw.T.conj())).diagonal()))
def rotate_matrix(self, A_o, A_MM):
assert A_o.ndim == 1
A_ww = dots(self.U_ow.T.conj() * A_o, self.V_oM, self.U_Mw)
A_ww += np.conj(A_ww.T)
A_ww += np.dot(self.U_ow.T.conj() * A_o, self.U_ow)
A_ww += dots(self.U_Mw.T.conj(), A_MM, self.U_Mw)
return A_ww
def rotate_projections(self, P_aoi, P_aMi, indices=None):
if indices is None:
U_ow = self.U_ow
U_Mw = self.U_Mw
else:
U_ow = self.U_ow[:, indices]
U_Mw = self.U_Mw[:, indices]
P_awi = {}
for a, P_oi in P_aoi.items():
P_awi[a] = np.tensordot(U_Mw, P_aMi[a], axes=[[0], [0]])
if len(U_ow) > 0:
P_awi[a] += np.tensordot(U_ow, P_oi, axes=[[0], [0]])
return P_awi
def rotate_function(self, psit_oG, bfs, q=-1, indices=None):
if indices is None:
U_ow = self.U_ow
U_Mw = self.U_Mw
else:
U_ow = self.U_ow[:, indices]
U_Mw = self.U_Mw[:, indices]
w_wG = np.zeros((U_ow.shape[1],) + psit_oG.shape[1:])
if len(U_ow) > 0:
gemm(1., psit_oG, U_ow.T.copy(), 0., w_wG)
bfs.lcao_to_grid(U_Mw.T.copy(), w_wG, q)
return w_wG
class PWFplusLCAO(ProjectedWannierFunctionsIBL):
def __init__(self, V_nM, S_MM, No, pwfmask, lcaoindices=None):
Nw = V_nM.shape[1]
self.V_oM = V_nM[:No]
dtype = V_nM.dtype
# Do PWF optimization for pwfbasis submatrix only!
Npwf = len(pwfmask.nonzero()[0])
pwfmask2 = np.outer(pwfmask, pwfmask)
s_MM = S_MM[pwfmask2].reshape(Npwf, Npwf)
v_oM = self.V_oM[:, pwfmask]
f_MM = s_MM - np.dot(v_oM.T.conj(), v_oM)
nw = len(s_MM)
assert No <= nw
u_ow, u_lw, u_Ml = get_rot(f_MM, v_oM, nw - No)
u_Mw = np.dot(u_Ml, u_lw)
u_ow = u_ow - np.dot(v_oM, u_Mw)
# Determine U for full lcao basis
self.U_ow = np.zeros((No, Nw), dtype)
for U_w, u_w in zip(self.U_ow, u_ow):
np.place(U_w, pwfmask, u_w)
self.U_Mw = np.identity(Nw, dtype)
np.place(self.U_Mw, pwfmask2, u_Mw.flat)
if lcaoindices is not None:
for i in lcaoindices:
self.U_ow[:, i] = 0.0
self.U_Mw[:, i] = 0.0
self.U_Mw[i, i] = 1.0
self.S_ww = self.rotate_matrix(np.ones(1), S_MM)
self.norms_n = None
def set_lcaoatoms(calc, pwf, lcaoatoms):
ind = get_bfi(calc, lcaoatoms)
for i in ind:
pwf.U_ow[:, i] = 0.0
pwf.U_Mw[:, i] = 0.0
pwf_U_Mw[i, i] = 1.0
class PWF2:
def __init__(self, gpwfilename, fixedenergy=0., spin=0, ibl=True,
basis='sz', zero_fermi=False, pwfbasis=None, lcaoatoms=None,
projection_data=None):
calc = GPAW(gpwfilename, txt=None, basis=basis)
assert calc.wfs.gd.comm.size == 1
assert calc.wfs.kpt_comm.size == 1
assert calc.wfs.band_comm.size == 1
if zero_fermi:
try:
Ef = calc.get_fermi_level()
except NotImplementedError:
Ef = calc.get_homo_lumo().mean()
else:
Ef = 0.0
self.ibzk_kc = calc.get_ibz_k_points()
self.nk = len(self.ibzk_kc)
self.eps_kn = [calc.get_eigenvalues(kpt=q, spin=spin) - Ef
for q in range(self.nk)]
self.M_k = [sum(eps_n <= fixedenergy) for eps_n in self.eps_kn]
print 'Fixed states:', self.M_k
self.calc = calc
self.dtype = self.calc.wfs.dtype
self.spin = spin
self.ibl = ibl
self.pwf_q = []
self.norms_qn = []
self.S_qww = []
self.H_qww = []
if ibl:
if pwfbasis is not None:
pwfmask = basis_subset2(calc.atoms.get_chemical_symbols(),
basis, pwfbasis)
if lcaoatoms is not None:
lcaoindices = get_bfi2(calc.atoms.get_chemical_symbols(),
basis,
lcaoatoms)
else:
lcaoindices = None
self.bfs = get_bfs(calc)
if projection_data is None:
V_qnM, H_qMM, S_qMM, self.P_aqMi = get_lcao_projections_HSP(
calc, bfs=self.bfs, spin=spin, projectionsonly=False)
else:
V_qnM, H_qMM, S_qMM, self.P_aqMi = projection_data
H_qMM -= Ef * S_qMM
for q, M in enumerate(self.M_k):
if pwfbasis is None:
pwf = ProjectedWannierFunctionsIBL(V_qnM[q], S_qMM[q], M,
lcaoindices)
else:
pwf = PWFplusLCAO(V_qnM[q], S_qMM[q], M, pwfmask,
lcaoindices)
self.pwf_q.append(pwf)
self.norms_qn.append(pwf.norms_n)
self.S_qww.append(pwf.S_ww)
self.H_qww.append(pwf.rotate_matrix(self.eps_kn[q][:M],
H_qMM[q]))
else:
if projection_data is None:
V_qnM = get_lcao_projections_HSP(calc, spin=spin)
else:
V_qnM = projection_data
for q, M in enumerate(self.M_k):
pwf = ProjectedWannierFunctionsFBL(V_qnM[q], M, ortho=False)
self.pwf_q.append(pwf)
self.norms_qn.append(pwf.norms_n)
self.S_qww.append(pwf.S_ww)
self.H_qww.append(pwf.rotate_matrix(self.eps_kn[q]))
for S in self.S_qww:
print 'Condition number: %0.1e' % condition_number(S)
def get_hamiltonian(self, q=0, indices=None):
if indices is None:
return self.H_qww[q]
else:
return self.H_qww[q].take(indices, 0).take(indices, 1)
def get_overlap(self, q=0, indices=None):
if indices is None:
return self.S_qww[q]
else:
|
import unittest, uuid
from nixie.core import Nixie, KeyError
class NixieErrorsTestCase(unittest.TestCase):
def test_read_mi | ssing(self):
nx = Nixie()
self.assertIsNone(nx.read('missing'))
def test_update_m | issing(self):
nx = Nixie()
with self.assertRaises(KeyError):
nx.update('missing')
def test_update_with_wrong_value(self):
nx = Nixie()
key = nx.create()
with self.assertRaises(ValueError):
nx.update(key, 'a')
def test_delete_missing(self):
nx = Nixie()
with self.assertRaises(KeyError):
nx.delete('missing')
|
from django.shortcuts import render_to_resp | onse
from django.core.context_processors import csrf
from django.conf import settings
def my_render(request, template, context={}):
context.update(csrf(request))
context['STATIC_URL'] = settings.STATIC_URL
context['flash'] = request.get_flash()
context['user'] = request.user
context['user_perfil'] = request.get_perfil()
context['credenciales'] = set(request.get_credenciales())
context['settings'] = settings
return render_to_response(template, context)
| |
# -*- coding: utf-8 -*-
from django import forms
from cmskit.articles.models import Index, Article
from cms.plugin_pool import plugin_pool
from cms.plugins.text.widgets.wymeditor_widget import WYMEditor
from cms.plugins.text.settings import USE_TINYMCE
def get_editor_widget():
"""
Returns the Django form Widget to be used for
the text area
"""
#plugins = plugin_pool.get_text_enabled_plugins(self.placeholder, self.page)
if USE_TINYMCE and "tinymce" in settings.INSTALLED_APPS:
from cms.plugins.text.widgets.tinymce_widget import TinyMCEEditor
return TinyMCEEditor()
else:
return WYMEditor()
class IndexForm(forms.ModelForm):
class Meta:
model = Index
def __init__(self, *args, **kwargs):
super(IndexForm, self).__init__(*args, **kwargs)
choices = [self.fields['page'].choices.__iter__().next()]
| for page in self.fields['page'].querys | et:
choices.append(
(page.id, ''.join(['- '*page.level, page.__unicode__()]))
)
self.fields['page'].choices = choices
class ArticleForm(forms.ModelForm):
body = forms.CharField(widget=get_editor_widget())
class Meta:
model = Article
|
et ips that aren't the loopback
unwrap00 = [ip for ip in socket.gethostbyname_ex(socket.gethostname())[2] if not ip.startswith("127.")][:1]
# ???
unwrap01 = [[(s.connect(('8.8.8.8', 53)), s.getsockname()[0], s.close()) for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1]]
unwrap2 = [l for l in (unwrap00,unwrap01) if l][0][0]
return unwrap2
def handshake(self):
"""
Send the terminal our name and IP t | o signal that we are alive
"""
# send the terminal some information about ourselves
# TODO: Report any calibrations that we have
hello = {'pilot':self.name, 'ip':self.ip, 'state':self.state}
self.node.send(self.parentid, 'HANDSHAKE', value=hello)
def update_state(self):
"""
Send our current state to the Terminal,
our Station object will cache this and | will handle any
future requests.
"""
self.node.send(self.parentid, 'STATE', self.state, flags={'NOLOG':True})
def l_start(self, value):
"""
Start running a task.
Get the task object by using `value['task_type']` to select from
:data:`.tasks.TASK_LIST` , then feed the rest of `value` as kwargs
into the task object.
Calls :meth:`.autopilot.run_task` in a new thread
Args:
value (dict): A dictionary of task parameters
"""
# TODO: If any of the sounds are 'file,' make sure we have them. If not, request them.
# Value should be a dict of protocol params
# The networking object should have already checked that we have all the files we need
if self.state == "RUNNING" or self.running.is_set():
self.logger.warning("Asked to a run a task when already running")
return
self.state = 'RUNNING'
self.running.set()
try:
# Get the task object by its type
if 'child' in value.keys():
task_class = tasks.CHILDREN_LIST[value['task_type']]
else:
task_class = tasks.TASK_LIST[value['task_type']]
# Instantiate the task
self.stage_block.clear()
# Make a group for this subject if we don't already have one
self.subject = value['subject']
prefs.set('SUBJECT', self.subject)
# Run the task and tell the terminal we have
# self.running.set()
threading.Thread(target=self.run_task, args=(task_class, value)).start()
self.update_state()
except Exception as e:
self.state = "IDLE"
self.logger.exception("couldn't start task: {}".format(e))
# TODO: Send a message back to the terminal with the runtime if there is one so it can handle timed stops
def l_stop(self, value):
"""
Stop the task.
Clear the running event, set the stage block.
TODO:
Do a coherence check between our local file and the Terminal's data.
Args:
value: ignored
"""
# Let the terminal know we're stopping
# (not stopped yet because we'll still have to sync data, etc.)
self.state = 'STOPPING'
self.update_state()
# We just clear the stage block and reset the running flag here
# and call the cleanup routine from run_task so it can exit cleanly
self.running.clear()
self.stage_block.set()
# TODO: Cohere here before closing file
if hasattr(self, 'h5f'):
self.h5f.close()
self.state = 'IDLE'
self.update_state()
def l_param(self, value):
"""
Change a task parameter mid-run
Warning:
Not Implemented
Args:
value:
"""
pass
def l_cal_port(self, value):
"""
Initiate the :meth:`.calibrate_port` routine.
Args:
value (dict): Dictionary of values defining the port calibration to be run, including
- ``port`` - which port to calibrate
- ``n_clicks`` - how many openings should be performed
- ``open_dur`` - how long the valve should be open
- ``iti`` - 'inter-trial interval`, or how long should we wait between valve openings.
"""
port = value['port']
n_clicks = value['n_clicks']
open_dur = value['dur']
iti = value['click_iti']
threading.Thread(target=self.calibrate_port,args=(port, n_clicks, open_dur, iti)).start()
def calibrate_port(self, port_name, n_clicks, open_dur, iti):
"""
Run port calibration routine
Open a :class:`.hardware.gpio.Solenoid` repeatedly,
measure volume of water dispersed, compute lookup table mapping
valve open times to volume.
Continuously sends progress of test with ``CAL_PROGRESS`` messages
Args:
port_name (str): Port name as specified in ``prefs``
n_clicks (int): number of times the valve should be opened
open_dur (int, float): how long the valve should be opened for in ms
iti (int, float): how long we should :func:`~time.sleep` between openings
"""
pin_num = prefs.get('HARDWARE')['PORTS'][port_name]
port = gpio.Solenoid(pin_num, duration=int(open_dur))
msg = {'click_num': 0,
'pilot': self.name,
'port': port_name
}
iti = float(iti)/1000.0
cal_name = "Cal_{}".format(self.name)
for i in range(int(n_clicks)):
port.open()
msg['click_num'] = i + 1
self.node.send(to=cal_name, key='CAL_PROGRESS',
value= msg)
time.sleep(iti)
port.release()
def l_cal_result(self, value):
"""
Save the results of a port calibration
"""
# files for storing raw and fit calibration results
cal_fn = os.path.join(prefs.get('BASEDIR'), 'port_calibration.json')
if os.path.exists(cal_fn):
try:
with open(cal_fn, 'r') as cal_file:
calibration = json.load(cal_file)
except ValueError:
# usually no json can be decoded, that's fine calibrations aren't expensive
calibration = {}
else:
calibration = {}
for port, results in value.items():
if port in calibration.keys():
calibration[port].extend(results)
else:
calibration[port] = results
with open(cal_fn, 'w+') as cal_file:
json.dump(calibration, cal_file)
def l_bandwidth(self, value):
"""
Send messages with a poissonian process according to the settings in value
"""
#turn off logging for now
self.networking.logger.setLevel(logging.ERROR)
self.node.logger.setLevel(logging.ERROR)
n_msg = int(value['n_msg'])
rate = float(value['rate'])
payload = int(value['payload'])
confirm = bool(value['confirm'])
payload = np.zeros(payload*1024, dtype=np.bool)
payload_size = sys.getsizeof(payload)
message = {
'pilot': self.name,
'payload': payload,
}
# make a fake message to test how large the serialized message is
test_msg = Message(to='bandwith', key='BANDWIDTH_MSG', value=message, repeat=confirm, flags={'MINPRINT':True},
id="test_message", sender="test_sender")
msg_size = sys.getsizeof(test_msg.serialize())
message['message_size'] = msg_size
message['payload_size'] = payload_size
if rate > 0:
spacing = 1.0/rate
else:
spacing = 0
# wait for half a second to let the terminal get messages out
time.sleep(0.25)
if spacing > 0:
last_message = time.perf_counter()
for i in range(n_msg):
message['n_msg'] = i
message['timestamp'] = dat |
# -*- coding: ascii -*-
r"""
:Copyright:
Copyright 2014 - 2016
Andr\xe9 Malo or his licensors, as applicable
:License:
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
======
Jobs
======
Jobs have been entered into the scheduler once. They may be even finished
already.
"""
if __doc__: # pragma: no cover
# pylint: disable = redefined-builtin
__doc__ = __doc__.encode('ascii').decode('unicode_escape')
__author__ = r"Andr\xe9 Malo".encode('ascii').decode('unicode_escape')
__docformat__ = "restructuredtext en"
import collections as _collections
import itertools as _it
from .. import _graph
from .. import interfaces as _interfaces
from .. import _lock
#: Exception raised on cycles, when a todo DAG is resolved
DependencyCycle = _graph.DependencyCycle
#: Job ID sequence
#:
#: :Type: callable
_gen_id = _it.count(1).next
def last_job_id():
"""
Determine the largest job ID assigned until now
:Return: The ID. It's ``0``, if no job ID was assigned until now (job IDs
start with ``1``)
:Rtype: ``id``
"""
# this inspects the counter iterable by calling pickling methods | and
# retrieving the next value from there and then subtracting one.
# __reduce__ returns the factory ('count') and the argument tuple
# containing the initial value (advanced with each call to next())
# pylint: disable = no-member
return _gen_id.__self__.__reduce__()[1][0] - 1
class Job(object):
"""
Job after is been scheduled.
:See: `JobInterface`
"""
__implements__ = [_interfaces.JobInterface]
def __init__(se | lf, job_id, desc, group, locks, importance, not_before,
extra, predecessors, attempts):
"""
Initialization
:Parameters:
`job_id` : ``int``
Job ID
`desc` : `TodoDescription`
Job description
`group` : ``str``
Job Group
`locks` : iterable
List of locks that need to be aquired (``(`LockInterface`, ...)``)
`importance` : ``int``
Job importance
`not_before` : various
execute job not before this time. Special formats are allowed:
``int``
Number of seconds from now (delay)
``datetime.datetime``
a specific point in time (server time). Use UTC if you can. For
naive date times, UTC is assumed.
If omitted or ``None``, ``0`` is assumed.
`extra` : ``dict``
Extra job data
`predecessors` : iterable
List of jobs to be run successfully before this one
(``(int, ...)``)
`attempts` : ``list``
execution attempts (``[ExecutionAttemptInterface, ...]``)
"""
self.id = job_id
self.desc = desc
self.group = group
self.locks = _lock.validate(locks)
self.locks_waiting = None
self.importance = importance
self.extra = extra
self.predecessors = set()
self.predecessors_waiting = None
self.attempts = attempts
self.not_before = not_before
for item in predecessors or ():
self.depend_on(item)
def depend_on(self, job_id):
"""
Add predecessor job ID
Duplicates are silently ignored.
:See: `interfaces.JobInterface.depend_on`
"""
assert self.predecessors_waiting is None
try:
job_id = int(job_id)
except TypeError:
raise ValueError("Invalid job_id: %r" % (job_id,))
if job_id < 1 or job_id >= self.id:
raise ValueError("Invalid job_id: %r" % (job_id,))
self.predecessors.add(job_id)
def job_from_todo(todo):
"""
Construct Job from Todo
:Parameters:
`todo` : `Todo`
Todo to construct from
:Return: New job instance
:Rtype: `JobInterface`
"""
return Job(
_gen_id(), todo.desc, todo.group, todo.locks, todo.importance,
todo.not_before, {}, set(), []
)
def joblist_from_todo(todo):
"""
Construct a list of jobs from Todo graph
:Parameters:
`todo` : `Todo`
todo to be inspected.
:Return: List of jobs (``[JobInterface, ...]``)
:Rtype: ``list``
"""
jobs, todos, virtuals = [], {}, {}
toinspect = _collections.deque([(todo, None)])
graph = _graph.DependencyGraph()
# 1) fill the dependency graph with the todo nodes (detects cycles, too)
try:
while toinspect:
todo, parent = toinspect.pop()
todo_id = id(todo)
if todo_id in todos:
virtual_id, pre, _ = todos[todo_id]
else:
pre = []
virtual_id = len(virtuals)
todos[todo_id] = virtual_id, pre, todo
virtuals[virtual_id] = todo_id
for parent_id in todo.predecessors():
graph.add((False, parent_id), (True, virtual_id))
pre.append((False, parent_id))
for succ in todo.successors():
toinspect.appendleft((succ, (True, virtual_id)))
if parent is not None:
graph.add(parent, (True, virtual_id))
pre.append(parent)
else:
graph.add((False, None), (True, virtual_id))
except DependencyCycle as e:
# remap to our input (todos and not some weird virtual IDs)
raise DependencyCycle([
todos[virtuals[tup[1]]][2] for tup in e.args[0]
])
# 2) resolve the graph (create topological order)
id_mapping = {}
for is_virtual, virtual_id in graph.resolve():
if is_virtual:
_, pres, todo = todos[virtuals[virtual_id]]
job = job_from_todo(todo)
for is_virtual, pre in pres:
if is_virtual:
pre = id_mapping[pre]
job.depend_on(pre)
id_mapping[virtual_id] = job.id
jobs.append(job)
return jobs
|
from builtins import str
from builtins import object
import smtplib
import email.utils
from biomaj.workflow import Workflow
import lo | gging
import sys
if sys.version < '3':
from emai | l.MIMEText import MIMEText
else:
from email.mime.text import MIMEText
class Notify(object):
"""
Send notifications
"""
@staticmethod
def notifyBankAction(bank):
if not bank.config.get('mail.smtp.host') or bank.session is None:
logging.info('Notify:none')
return
admins = bank.config.get('mail.admin')
if not admins:
logging.info('Notify: no mail.admin defined')
return
admin_list = admins.split(',')
logging.info('Notify:' + bank.config.get('mail.admin'))
mfrom = bank.config.get('mail.from')
log_file = bank.config.log_file
msg = MIMEText('')
if log_file:
fp = None
if sys.version < '3':
fp = open(log_file, 'rb')
else:
fp = open(log_file, 'r')
msg = MIMEText(fp.read(2000000))
fp.close()
msg['From'] = email.utils.formataddr(('Author', mfrom))
msg['Subject'] = 'BANK[' + bank.name + '] - STATUS[' + str(bank.session.get_status(Workflow.FLOW_OVER)) + '] - UPDATE[' + str(bank.session.get('update')) + '] - REMOVE[' + str(bank.session.get('remove')) + ']' + ' - RELEASE[' + str(bank.session.get('release')) + ']'
logging.info(msg['subject'])
server = None
for mto in admin_list:
msg['To'] = email.utils.formataddr(('Recipient', mto))
try:
server = smtplib.SMTP(bank.config.get('mail.smtp.host'))
if bank.config.get('mail.tls') is not None and str(bank.config.get('mail.tls')) == 'true':
server.starttls()
if bank.config.get('mail.user') is not None and str(bank.config.get('mail.user')) != '':
server.login(bank.config.get('mail.user'), bank.config.get('mail.password'))
server.sendmail(mfrom, [mto], msg.as_string())
except Exception as e:
logging.error('Could not send email: ' + str(e))
finally:
if server is not None:
server.quit()
|
from __future__ import print_function
import os
twyg = ximport('twyg')
# reload(twyg)
datafiles = list(filelist( os.path.abspath('example-data')))
datafile = choice(datafiles)
configs | = [ 'boxes', 'bubbles', 'edge', 'flowchart', 'hive', 'ios', 'jellyfish',
'junction1', 'junction2', 'modern', 'nazca', 'rounded', 'square',
'synapse', 'tron']
colorschemes = [ 'aqua', 'azure', 'bordeaux', 'clay', 'cmyk', 'cobalt', 'colors21',
'crayons', 'earth', 'forest', 'grape', 'honey', 'inca', 'jelly', 'kelp' | ,
'mango', 'mellow', 'merlot', 'milkshake', 'mint-gray', 'mint', 'moon',
'mustard', 'neo', 'orbit', 'pastels', 'quartz', 'salmon', 'tentacle',
'terracotta', 'turquoise', 'violet']
config = choice(configs)
colorscheme = choice(colorschemes)
margins = ['10%', '5%']
print( config )
print( colorscheme )
print( os.path.basename(datafile) )
print()
twyg.generate_output_nodebox(datafile, config, colorscheme=colorscheme, margins=margins)
|
"clsmethod"
@staticmethod
def staticmethod():
return "staticmethod"
@attr.s(init=False, eq=False, order=False, hash=False, repr=False)
class C1Bare(object):
x = attr.ib(validator=attr.validators.instance_of(int))
y = attr.ib()
def method(self):
return self.x
@classmethod
def classmethod(cls):
return "clsmethod"
@staticmethod
def staticmethod():
return "staticmethod"
@attr.s(slots=True, hash=True)
class C2Slots(C1BareSlots):
z = attr.ib()
@attr.s(slots=True, hash=True)
class C2(C1Bare):
z = attr.ib()
c2 = C2Slots(x=1, y=2, z="test")
assert 1 == c2.x
assert 2 == c2.y
assert "test" == c2.z
assert 1 == c2.method()
assert "clsmethod" == c2.classmethod()
assert "staticmethod" == c2.staticmethod()
with pytest.raises(AttributeError):
c2.t = "test"
non_slot_instance = C2(x=1, y=2, z="test")
if has_pympler:
assert asizeof(c2) < asizeof(non_slot_instance)
c3 = C2Slots(x=1, y=3, z="test")
assert c3 > c2
c2_ = C2Slots(x=1, y=2, z="test")
assert c2 == c2_
assert "C2Slots(x=1, y=2, z='test')" == repr(c2)
hash(c2) # Just to assert it doesn't raise.
assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
@pytest.mark.skipif(PY2, reason="closure cell rewriting is PY3-only.")
class TestClosureCellRewriting(object):
def test_closure_cell_rewriting(self):
"""
Slotted classes support proper closure cell rewriting.
This affects features like `__class__` and the no-arg super().
"""
non_slot_instance = C1(x=1, y="test")
slot_instance = C1Slots(x=1, y="test")
assert non_slot_instance.my_class() is C1
assert slot_instance.my_class() is C1Slots
# Just assert they return something, and not an exception.
assert non_slot_instance.my_super()
assert slot_instance.my_super()
def test_inheritance(self):
"""
Slotted classes support proper closure cell rewriting when inheriting.
This affects features like `__class__` and the no-arg super().
"""
@attr.s
class C2(C1):
def my_subclass(self):
return __class__
@attr.s
class C2Slots(C1Slots):
def my_subclass(self):
return __class__
non_slot_instance = C2(x=1, y="test")
slot_instance = C2Slots(x=1, y="test")
assert non_slot_instance.my_class() is C1
assert slot_instance.my_class() is C1Slots
# Just assert they return something, and not an exception.
assert non_slot_instance.my_super()
assert slot_instance.my_super()
assert non_slot_instance.my_subclass() is C2
assert slot_instance.my_subclass() is C2Slots
@pytest.mark.parametrize("slots", [True, False])
def test_cls_static(self, slots):
"""
Slotted classes support proper closure cell rewriting for class- and
static methods.
"""
# Python can reuse closure cells, so we create new classes just for
# this test.
@attr.s(slots=slots)
class C:
@classmethod
def clsmethod(cls):
return __class__
assert C.clsmethod() is C
@attr.s(slots=slots)
class D:
@staticmethod
def statmethod():
return __class__
assert D.statmethod() is D
@pytest.mark.skipif(PYPY, reason="set_closure_cell always works on PyPy")
@pytest.mark.skipif(
sys.version_info >= (3, 8),
reason="can't break CodeType.replace() via monkeypatch",
)
def test_code_hack_failure(self, monkeypatch):
"""
Keeps working if function/code object introspection doesn't work
on this (nonstandard) interpreter.
A warning is emitted that points to the actual code.
"""
# This is a pretty good approximation of the behavior of
# the actual types.CodeType on Brython.
monkeypatch.setattr(types, "CodeType", lambda: None)
func = make_set_closure_cell()
with pytest.warns(RuntimeWarning) as wr:
func()
w = wr.pop()
assert __file__ == w.filename
assert (
"Running interpreter doesn't sufficiently support code object "
"introspection. Some features like bare super() or accessing "
"__class__ will not work with slotted classes.",
) == w.message.args
assert just_warn is func
@pytest.mark.skipif(PYPY, reason="__slots__ only block weakref on CPython")
def test_not_weakrefable():
"""
Instance is not weak-referenceable when `weakref_slot=False` in CPython.
"""
@attr.s(slots=True, weakref_slot=False)
class C(object):
pass
c = C()
with pytest.raises(TypeError):
weakref.ref(c)
@pytest.mark.skipif(
not PYPY, reason="slots without weakref_slot should only work on PyPy"
)
def test_implicitly_weakrefable():
"""
Instance is weak-referenceable even when `weakref_slot=False` in PyPy.
"""
@attr.s(slots=True, weakref_slot=False)
class C(object):
pass
c = C()
w = weakref.ref(c)
assert c is w()
def test_weakrefable():
"""
Instance is weak-referenceable when `weakref_slot=True`.
"""
@attr.s(slots=True, weakref_slot=True)
class C(object):
pass
c = C()
w = weakref.ref(c)
assert c is w()
def test_weakref_does_not_add_a_field():
"""
`weakref_slot=True` does not add a field to the class.
"""
@attr.s(slots=True, weakref_slot=True)
class C(object):
field = attr.ib()
assert [f.name for f in attr.fields(C)] == ["field"]
def tests_weakref_does_not_add_when_inheriting_with_weakref():
"""
`weakref_slot=True` does not add a new __weakref__ slot when inheriting
one.
"""
@attr.s(slots=True, weakref_slot=True)
class C(object):
pass
@attr.s(slots=True, weakref_slot=True)
class D(C):
pass
d = D()
w = weakref.ref(d)
assert d is w()
def tests_weakref_does_not_add_with_weakref_attribute():
"""
`weakref_slot=True` does not add a new __weakref__ slot when an attribute
of that name exists.
"""
@attr.s(slots=True, weakref_slot=True)
class C(object):
__weakref__ = attr.ib(
init=False, hash=False, repr=False, eq=False, order=False
)
c = C()
w = weakref.ref(c)
assert c is w()
def test_slots_empty_cell():
"""
Tests that no `ValueError: Cell is empty` exception is raised when
closure cells are present with no contents in a `slots=True` class.
(issue https://github.com/python-attrs/attrs/issues/589)
On Python 3, if a method mentions `__class__` or uses the no-arg `super()`,
the compiler will bake a reference to the class in the method itself as
`method.__closure__`. Since `attrs` replaces the class with a clone,
`_ClassBuilder._create_slots_class(self)` will rewrite these references so
it keeps working. This method was not properly covering the edge case where
the closure cell was empty, we fixed it and this is the non-regression
test.
"""
@attr.s(slots=True)
class C(object):
field = attr.ib()
def f(s | elf, a):
super(C, self).__init__()
C(field=1)
@attr.s(getstate_setstate=True)
class C2(object):
x = attr.ib()
@attr.s(slots=True, getstate_setstate=True)
class C2Slots(object):
| x = attr.ib()
class TestPickle(object):
@pytest.mark.parametrize("protocol", range(pickle.HIGHEST_PROTOCOL))
def test_pickleable_by_default(self, protocol):
"""
If nothing else is passed, slotted classes can be pickled and
unpickled with all supported protocols.
"""
i1 = C1Slots(1, 2)
i2 = pickle.loads(pickle.dumps(i1, protocol))
assert i1 == i2
assert i1 is not i2
def test_no_getstate_setstate_for_dict_c |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WAR | RANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import pytest
from sqlalchemy imp | ort create_engine
from sqlalchemy.orm import sessionmaker
from daklib.database.all import Base
Session = sessionmaker()
@pytest.fixture(scope='session')
def engine():
engine = create_engine('sqlite://', echo=True)
Base.metadata.create_all(engine)
return engine
@pytest.yield_fixture
def session(engine):
connection = engine.connect()
trans = connection.begin()
session = Session(bind=connection)
yield session
session.close()
trans.rollback()
connection.close()
|
in the provenance
doc_id = provenance[0].get('document', {}).get('@id')
if doc_id:
title = self.doc.documents.get(doc_id, {}).get('title')
if title:
provenance[0]['document']['title'] = title
annotations = {'found_by': relation.get('rule'),
'provenance': provenance}
if self.doc.dct is not None:
annotations['document_creation_time'] = self.doc.dct.to_json()
epistemics = {}
negations = self.get_negation(relation)
hedgings = self.get_hedging(relation)
if hedgings:
epistemics['hedgings'] = hedgings
if negations:
# This is the INDRA standard to show negation
epistemics['negated'] = True
# But we can also save the texts associated with | the negation
# under annotations, just in case it's needed
annotations['negated_texts'] = negations
# If that fails, we can still get the text of the relation
if text is None:
text = _sanitize(relation.get('text'))
| ev = Evidence(source_api='eidos', text=text, annotations=annotations,
context=context, epistemics=epistemics)
return ev
@staticmethod
def get_negation(event):
"""Return negation attached to an event.
Example: "states": [{"@type": "State", "type": "NEGATION",
"text": "n't"}]
"""
states = event.get('states', [])
if not states:
return []
negs = [state for state in states
if state.get('type') == 'NEGATION']
neg_texts = [neg['text'] for neg in negs]
return neg_texts
@staticmethod
def get_hedging(event):
"""Return hedging markers attached to an event.
Example: "states": [{"@type": "State", "type": "HEDGE",
"text": "could"}
"""
states = event.get('states', [])
if not states:
return []
hedgings = [state for state in states
if state.get('type') == 'HEDGE']
hedging_texts = [hedging['text'] for hedging in hedgings]
return hedging_texts
def extract_entity_states(self, states):
if states is None:
return {'polarity': None, 'adjectives': []}
polarity = None
adjectives = []
time_context = None
geo_context = None
for state in states:
if polarity is None:
if state['type'] == 'DEC':
polarity = -1
# Handle None entry here
mods = state.get('modifiers') if \
state.get('modifiers') else []
adjectives += [mod['text'] for mod in mods]
elif state['type'] == 'INC':
polarity = 1
mods = state.get('modifiers') if \
state.get('modifiers') else []
adjectives += [mod['text'] for mod in mods]
elif state['type'] == 'QUANT':
adjectives.append(state['text'])
if state['type'] == 'TIMEX':
time_context = self.time_context_from_ref(state)
elif state['type'] == 'LocationExp':
# TODO: here we take only the first geo_context occurrence.
# Eidos sometimes provides a list of locations, it may
# make sense to break those up into multiple statements
# each with one location
if not geo_context:
geo_context = self.geo_context_from_ref(state)
return {'polarity': polarity, 'adjectives': adjectives,
'time_context': time_context, 'geo_context': geo_context}
def get_groundings(self, entity):
"""Return groundings as db_refs for an entity."""
def get_grounding_entries(grounding):
if not grounding:
return None
entries = []
values = grounding.get('values', [])
# Values could still have been a None entry here
if values:
for entry in values:
ont_concept = entry.get('ontologyConcept')
value = entry.get('value')
if ont_concept is None or value is None:
continue
entries.append((ont_concept, value))
return entries
# Save raw text and Eidos scored groundings as db_refs
db_refs = {'TEXT': entity['text']}
groundings = entity.get('groundings')
if not groundings:
return db_refs
for g in groundings:
entries = get_grounding_entries(g)
# Only add these groundings if there are actual values listed
if entries:
key = g['name'].upper()
if self.grounding_ns is not None and \
key not in self.grounding_ns:
continue
if key == 'UN':
db_refs[key] = [(s[0].replace(' ', '_'), s[1])
for s in entries]
elif key == 'WM_FLATTENED' or key == 'WM':
db_refs['WM'] = [(s[0].strip('/'), s[1])
for s in entries]
else:
db_refs[key] = entries
return db_refs
def get_concept(self, entity):
"""Return Concept from an Eidos entity."""
# Use the canonical name as the name of the Concept
name = entity['canonicalName']
db_refs = self.get_groundings(entity)
concept = Concept(name, db_refs=db_refs)
return concept
def time_context_from_ref(self, timex):
"""Return a time context object given a timex reference entry."""
# If the timex has a value set, it means that it refers to a DCT or
# a TimeExpression e.g. "value": {"@id": "_:DCT_1"} and the parameters
# need to be taken from there
value = timex.get('value')
if value:
# Here we get the TimeContext directly from the stashed DCT
# dictionary
tc = self.doc.timexes.get(value['@id'])
return tc
return None
def geo_context_from_ref(self, ref):
"""Return a ref context object given a location reference entry."""
value = ref.get('value')
if value:
# Here we get the RefContext from the stashed geoloc dictionary
rc = self.doc.geolocs.get(value['@id'])
return rc
return None
def get_all_events(self):
"""Return a list of all standalone events from a list
of statements."""
events = []
for stmt in self.statements:
stmt = copy.deepcopy(stmt)
if isinstance(stmt, Influence):
for member in [stmt.subj, stmt.obj]:
member.evidence = stmt.evidence[:]
# Remove the context since it may be for the other member
for ev in member.evidence:
ev.context = None
events.append(member)
elif isinstance(stmt, Association):
for member in stmt.members:
member.evidence = stmt.evidence[:]
# Remove the context since it may be for the other member
for ev in member.evidence:
ev.context = None
events.append(member)
elif isinstance(stmt, Event):
events.append(stmt)
return events
class EidosDocument(object):
def __init__(self, json_dict):
self.tree = objectpath.Tree(json_dict)
self.extractions = []
self.sentences = {}
self.entities = {}
self.documents = {}
self.coreferences = {}
self.timexes = {}
self.geolocs = {}
self.dct = None
self._preprocess_extractions()
def _preprocess_extractions(self):
extra |
.
def create_uuid():
return str(uuid.uuid4().int)
##
# GID is a tuple:
# (uuid, urn, public_key)
#
# UUID is a unique identifier and is created by the python uuid module
# (or the utility function create_uuid() in gid.py).
#
# HRN is a human readable name. It is a dotted form similar to a backward domain
# name. For example, planetlab.us.arizona.bakers.
#
# URN is a human readable identifier of form:
# "urn:publicid:IDN+toplevelauthority[:sub-auth.]*[\res. type]\ +object name"
# For example, urn:publicid:IDN+planetlab:us:arizona+user+bakers
#
# PUBLIC_KEY is the public key of the principal identified by the UUID/HRN.
# It is a Keypair object as defined in the cert.py module.
#
# It is expected that there is a one-to-one pairing between UUIDs and HRN,
# but it is uncertain how this would be inforced or if it needs to be enforced.
#
# These fields are encoded using xmlrpc into the subjectAltName field of the
# x509 certificate. Note: Call encode() once the fields have been filled in
# to perform this encoding.
class GID(Certificate):
uuid = None
hrn = None
urn = None
email = None # for adding to the SubjectAltName
##
# Create a new GID object
#
# @param create If true, create the X509 certificate
# @param subject If subject!=None, create the X509 cert and set the subject name
# @param string If string!=None, load the GID from a string
# @param filename If filename!=None, load the GID from a file
# @param lifeDays life of GID in days - default is 1825==5 years
def __init__(self, create=False, subject=None, string=None, filename=None, uuid=None, hrn=None, urn=None, lifeDays=1825):
Certificate.__init__(self, lifeDays, create, subject, string, filename)
if subject:
logger.debug("Creating GID for subject: %s" % subject)
if uuid:
self.uuid = int(uuid)
if hrn:
self.hrn = hrn
self.urn = hrn_to_urn(hrn, 'unknown')
if urn:
self.urn = urn
self.hrn, type = urn_to_hrn(urn)
def set_uuid(self, uuid):
if isinstance(uuid, str):
self.uuid = int(uuid)
else:
self.uuid = uuid
def get_uuid(self):
if not self.uuid:
self.decode()
return self.uuid
def set_hrn(self, hrn):
self.hrn = hrn
def get_hrn(self):
if not self.hrn:
self.decode()
return self.hrn
def set_urn(self, urn):
self.urn = urn
self.hrn, type = urn_to_hrn(urn)
def get_urn(self):
if not self.urn:
self.decode()
return self.urn
# Will be stuffed into subjectAltName
def set_email(self, email):
self.email = email
def get_email(self):
if not self.email:
self.decode()
return self.email
def get_type(self):
if not self.urn:
self.decode()
_, t = urn_to_hrn(self.urn)
return t
##
# Encode the GID fields and package them into the subject-alt-name field
# of the X509 certificate. This must be called prior to signing the
# certificate. It may only be called once per certificate.
def encode(self):
if self.urn:
urn = self.urn
else:
urn = hrn_to_urn(self.hrn, None)
str = "URI:" + urn
if self.uuid:
str += ", " + "URI:" + uuid.UUID(int=self.uuid).urn
if self.email:
str += ", " + "email:" + self.email
self.set_data(str, 'subjectAltName')
##
# Decode the subject-alt-name field of the X509 certificate into the
# fields of the GID. This is automatically called by the various get_*()
# functions in this class.
def decode(self):
data = self.get_data('subjectAltName')
dict = {}
if data:
if data.lower().startswith('uri:http://<params>'):
dict = xmlrpclib.loads(data[11:])[0][0]
else:
spl = data.split(', ')
for val in spl:
if val.lower().startswith('uri:urn:uuid:'):
dict['uuid'] = uuid.UUID(val[4:]).int
elif val.lower().startswith('uri:urn:publicid:idn+'):
dict['urn'] = val[4:]
elif val.lower().startswith('email:'):
# FIXME: Ensure there isn't cruft in that address...
# EG look for email:copy,....
dict['email'] = val[6:]
self.uuid = dict.get("uuid", None)
self.urn = dict.get("urn", None)
self.hrn = dict.get("hrn", None)
self.email = dict.get("email", None)
if self.urn:
self.hrn = urn_to_hrn(self.urn)[0]
##
# Dump the credential to stdout.
#
# @param indent specifies a number of spaces to indent the output
# @param dump_parents If true, also dump the parents of the GID
def dump(self, *args, **kwargs):
print self.dump_string(*args,**kwargs)
def dump_string(self, indent=0, dump_parents=False):
result=" "*(indent-2) + "GID\n"
result += " "*indent + "hrn:" + str(self.get_hrn()) +"\n"
result += " "*indent + "urn:" + str(self.get_urn()) +"\n"
result += " "*indent + "uuid:" + str(self.get_uuid()) + "\n"
if self.get_email() is not None:
result += " "*indent + "email:" + str(self.get_email()) + "\n"
filename=self.get_filename()
if filename: result += "Filename %s\n"%filename
if self.parent and dump_parents:
result += " "*indent + "parent:\n"
result += self.parent.dump_string(indent+4, dump_parents)
return result
##
# Verify the chain of authenticity of the GID. First perform the checks
# of the certificate class (verifying that each parent signs the child,
# etc). In addition, GIDs also confirm that the parent's HRN is a prefix
# of the child's HRN, and the parent is of type 'authority'.
#
# Verifying these prefixes prevents a rogue authority from signing a GID
# for a principal that is not a member of that authority. For example,
# planetlab.us.arizona cannot sign a GID for planetlab.us.princeton.foo.
def verify_chain(self, trusted_certs = None):
# do the normal certificate verification stuff
trusted_root = Certificate.verify_chain(self, trusted_certs)
if self.parent:
# make sure the parent's hrn is a prefix of the child's hrn
if not hrn_authfor_hrn(self.parent.get_hrn(), self.get_hrn()):
raise GidParentHrn("This cert HR | N %s isn't in the namespace for parent HRN %s" % (self.get_hrn(), self.parent.get_hrn()))
# Parent must also be an authority (of some type) to sign a GID
# There are multiple types of authority - accept them all here
if not self.parent.get_type().find('authority') == 0:
raise GidInvalidParentHrn("This cert %s's parent %s is not an authority (is a %s)" % (self.get_hrn(), self.parent.get_hrn(), self.parent.get_t | ype()))
# Then recurse up the chain - ensure the parent is a trusted
# root or is in the namespace of a trusted root
self.parent.verify_chain(trusted_certs)
else:
# make sure that the trusted root's hrn is a prefix of the child's
trusted_gid = GID(string=trusted_root.save_to_string())
trusted_type = trusted_gid.get_type()
trusted_hrn = trusted_gid.get_hrn()
#if trusted_type == 'authority':
# trusted_hrn = trusted_hrn[:trusted_hrn.rindex('.')]
cur_hrn = self.get_hrn()
if not hrn_authfor_hrn(trusted_hrn, cur_hrn):
raise GidParentHrn("Trusted root with HRN %s isn't a namespace authority for this cert: %s" % (trusted_hrn, cur_hrn))
# There are multiple types of authority - accept them all here
if not trusted_type.find('authority') |
"""
Django settings for DocumentsFlow project.
Generated by 'django-admin startproject' using Django 1.10.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '-3v-w43(q0sg$!%e+i@#f#=w(j40i=afhjrmyedj-+x36+z'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'DocumentsFlow.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'DocumentsFlow.wsgi.application'
# Database
# http | s://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
| 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
|
# -*- coding: utf-8
from __future__ import absolute_import, unicode_literals
import django
DEBUG = True
USE_TZ = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "o0fy)a6pmew*fe9b+^wf)96)2j8)%6oz555d7by7_(*i!b8wj8"
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
}
}
ROOT_URLCONF = "tests.urls"
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.staticfiles",
"django.contrib.admin",
"dja | ngo.contrib.messages",
"lock_tokens.apps.LockTokensConfig",
"tests",
]
SITE_ID = 1
STATIC_URL = '/static/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
]
}
},
]
if django.VERSION >= (1, 10):
MIDDLEWARE = (
"django.contrib.sessions.middleware.SessionMiddleware",
| "django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
)
else:
MIDDLEWARE_CLASSES = (
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
)
|
rvice not in sd.services
def test_unregisteringAnythingElseFails(sd):
item = 34
try:
sd.unregister(item)
except TypeError:
assert True
assert item not in sd.services
def test_unregisteringWhenRunningThrowsError(dns_sd, sd):
service = dns_sd.Service()
def dummy():
pass
sd._start_discovery = dummy
sd.start()
try:
sd.unregister(service)
except RuntimeError:
assert True
assert service not in sd.services
class ServiceInfoFactory(object):
def create(
self,
base_type='machinekit',
domain='local',
sd_protocol='tcp',
name='Hugo on Franz',
service=b'halrcomp',
uuid=b'12345678',
host='127.0.0.1',
protocol='tcp',
port=12345,
version=0,
properties=None,
server='127.0.0.1',
address=None,
):
from zeroconf import ServiceInfo
typestring = '_%s._%s.%s.' % (base_type, sd_protocol, domain)
dsn = b'%s://%s:%i' % (protocol.encode(), host.encode(), port)
if properties is None:
properties = {
b'uuid': uuid,
b'service': service,
b'dsn': dsn,
b'version': version,
}
return ServiceInfo(
type_=typestring,
name='%s %s.%s' % (name, host, typestring),
properties=properties,
address=(address or host).encode(),
port=port,
server=server,
)
@pytest.fixture
def zeroconf(mocker):
from zeroconf import Zeroconf
service_info = ServiceInfoFactory().create()
zeroconf_stub = mocker.stub(name='get_service_info')
zeroconf_stub.return_value = service_info
stub_object = Zeroconf()
stub_object.get_service_info = zeroconf_stub
return stub_object
@pytest.fixture
def zeroconf_without_service_info(mocker):
from zeroconf import Zeroconf
zeroconf_stub = mocker.stub(name='get_service_info')
zeroconf_stub.return_value = None
stub_object = Zeroconf()
stub_object.get_service_info = zeroconf_stub
return stub_object
def test_serviceDiscoveredUpdatesRegisteredServices(dns_sd, sd, zeroconf):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is True
def test_serviceDisappearedUpdatesRegisteredServices(dns_sd, sd, zeroconf):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
sd.remove_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is False
def test_stoppingServiceDiscoveryResetsAllServices(dns_sd, sd, zeroconf):
service1 = dns_sd.Service(type_='halrcomp')
sd.register(service1)
service2 = dns_sd.Service(type_='halrcmd')
sd.register(service2)
sd.browser = object() # dummy
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
sd.stop()
assert service1.ready is False
assert service2.ready is False
def test_serviceDiscoveredWithoutServiceInfoDoesNotUpdateRegisteredServices(
dns_sd, sd, zeroconf_without_service_info
):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.add_service(
zeroconf_without_service_info,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is False
def test_serviceDisappearedWithoutServiceInfoDoesNotUpdateRegisteredServices(
dns_sd, sd, zeroconf_without_service_info
):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
service.ready = True
sd.remove_service(
zeroconf_without_service_info,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is True
def test_serviceInfoSetsAllRelevantValuesOfService(dns_sd):
service = dns_sd.Service(type_='halrcomp')
service_info = ServiceInfoFactory().create(
name='Foo on Bar',
uuid=b'987654321',
version=5,
host='10.0.0.10',
protocol='tcp',
port=12456,
server='sandybox.local',
)
service.add_service_info(service_info)
assert service.uri == 'tcp://10.0.0.10:12456'
assert service.name == service_info.name
assert service.uuid == '987654321'
assert service.version == 5
assert service.host_name == 'sandybox.local'
assert service.host_address == '10.0.0.10'
def test_serviceInfoResolvesLocalHostnameIfMatched(dns_sd):
service = dns_sd.Service(type_='halrcomp')
service_info = ServiceInfoFactory().create(
host='sandybox.local',
protocol='tcp',
port=12456,
server='sandybox.local',
address='10.0.0.10',
)
service.add_service_info(service_info)
assert service.uri == 'tcp://10.0.0.10:12456'
def test_serviceInfoRetursRawUriIfHostnameIsNotMatched(dns_sd):
service = dns_sd.Service(type_='halrcomp')
service_info = ServiceInfoFactory().create(
host='thinkpad.local',
protocol='tcp',
port=12456,
server='sandybox.local',
address='10.0.0.10',
)
service.add_service_info(service_info)
assert service.uri == 'tcp://thinkpad.local:12456'
def test_serviceInfoWithIncompleteValuesIsIgnoredByService(dns_sd):
service = dns_sd.Service(type_='launcher')
service_info = ServiceInfoFactory().create(properties={})
service.add_service_info(service_info)
assert service.uri == ''
assert service.uuid == ''
assert service.version == b''
def test_removingServiceInfoResetsAllRelevantValuesOfService(dns_sd):
service = dns_sd.Service(type_='blahus')
service_info = ServiceInfoFactory().create()
service.add_service_info(service_info)
service.remove_service_info(service_info)
assert service.uri == ''
assert service.name == ''
assert service.uuid == ''
assert service.version == 0
assert service.host_name == ''
assert service.host_address == ''
def test_clearingServiceInfosResetsValuesOfService(dns_sd):
service = dns_sd.Service(type_='foobar')
service.add_service_info(ServiceInfoFactory().create())
service.add_service_info(ServiceInfoFactory().create())
service.clear_service_infos()
assert service.ready is False
assert service.uri == ''
def test_settingReadyPropertyOfServiceTriggersCallback(dns_sd):
cb_called = [False]
def cb(_):
cb_called[0] = True
service = dns_sd.Service(type_='halrcomp')
service.on_ready_changed.append(cb)
service_info = ServiceInfoFactory().create()
service.add_service_info(service_info)
assert cb_called[0] is True
def test_discoverableAddingServiceWorks(dns_sd):
discoverable = dns_sd.ServiceContainer()
service = dns_sd.Service(type_='foo')
discoverable.add_service(service)
assert service in discoverable.services
def test_discoverableAddingAnythingElseFails(dns_sd):
discoverable = dns_sd.ServiceContainer()
item = object()
try:
discoverable.add_service(item)
assert False
except TypeError:
assert True
assert item not in discoverable.services
def test_discoverableRemovingServiceWorks(dns_sd):
discoverable = dns_sd.ServiceContainer()
service = dns_sd.Service(type_='foo')
discoverable.add_service(service)
discoverable.remove_service(service)
assert service not in discoverable.servi | ces
def test_discoverableRemvoingAnythingElseFails(dns_sd):
discoverable = dns_sd.ServiceContainer()
item = object( | )
try:
discoverable.remove_service(item)
assert False
except TypeError:
assert True
assert item not in discoverable.services
|
from discord. | ext import commands
class Github:
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def permrole(self, ctx, argument:str):
await self.b | ot.say(';py for perm in discord.utils.get(ctx.message.server.roles, name="{}").permissions: print(perm)'.format(argument))
def setup(bot):
bot.add_cog(Github(bot))
|
from .command_line_mix | ins import CommandLineMixins
from .module import Module
from .console_app import ConsoleApp
__all__ = ['CommandLineMixins', 'Module' | , 'ConsoleApp'] |
# | -*- coding: utf-8 -*-
'''Caution:
For Python 2.7, `__init__.py` file in folder | s is nessary.
'''
|
# Copyright 2015 Nicta
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
"""
Module for storing default and static values
"""
local_config_dir = '~/.clusterous'
local_session_data_dir = local_config_dir + '/' + 'session'
local_environment_dir = local_config_dir + '/' + 'environment'
cached_cluster_file = 'cluster_spec.yml'
cached_environment_file = 'environment.yml'
cached_cluster_file_path = local_environment_dir + '/' + cached_cluster_file
cached_environment_file_path = local_environment_dir + '/' + cached_environment_file
remote_environment_dir = '/home/ubuntu/environment'
current_nat_ip_file = local_config_dir + '/' + 'current_controller'
cluster_info_file = local_config_dir + '/' + 'cluster_info.yml'
taggable_name_re = re.compile('^[\w-]+$') # For user supplied strings such as cluster name
taggable_name_max_length = 64 # Arbitrary but ample, keeping in mind AWS keys can be max 127 chars
nat_name_format = '{0}-nat'
nat_name_tag_value = 'nat'
nat_instance_type = 't2.micro'
controller_name_format = '{0}-controller'
controller_name_tag_value = 'controller'
controller_instance_type = 't2.small'
node_name_format = '{0}-node-{1}'
instance_tag_key = '@clusterous'
instance_node_type_tag_key = 'NodeType'
registry_s3_path = '/docker-registry'
central_logging_name_format = '{0}-central-logging'
central_logging_name_tag_value = 'central-logging'
central_logging_instance_type = 't2.small'
default_zone = 'a'
controller_root_volume_size = 50 # GB
cluster_username = 'ubuntu'
cluster_user_home_dir = '/home/ubuntu'
shared_volume_path = '/home/data/'
shared_volume_size = 20 # GB
remote_scripts_dir = 'ansible/remote'
default_cluster_def_filename = 'default_cluster.yml'
rem | ote_host_scripts_dir = 'clusterous'
remote_host_key_file = 'key.pem'
remote_host_vars_file = 'vars.yml'
container_id_script_file = 'container_id.sh'
mesos_port = 5050
marathon_port = 8080
central_logging_port = 8 | 081
nat_ssh_port_forwarding = 22000
# How many seconds to wait for all Marathon applications to reach "started" state
# Currently 30 minutes
app_launch_start_timeout = 1800
app_destroy_timeout = 60
def get_script(filename):
"""
Takes script relative filename, returns absolute path
Assumes this file is in Clusterous source root, uses __file__
"""
return '{0}/{1}/{2}'.format(os.path.dirname(__file__), 'scripts', filename)
def get_remote_dir():
"""
Return full path of remote scripts directory
"""
return '{0}/{1}/{2}'.format(os.path.dirname(__file__), 'scripts', remote_scripts_dir)
|
tion
"""
result = {}
# Process the property to list hook by
if list_by == 'priority':
if show_info:
def _append_hook(d, priority, name, path):
# Use the priority as key and a dict of hooks names
# with their info as value
value = { 'path': path }
try:
d[priority][name] = value
except KeyError:
d[priority] = { name: value }
else:
def _append_hook(d, priority, name, path):
# Use the priority as key and the name as value
try:
d[priority].add(name)
except KeyError:
d[priority] = set([name])
elif list_by == 'name' or list_by == 'folder':
if show_info:
def _append_hook(d, priority, name, path):
# Use the name as key and a list of hooks info - the
# executed ones with this name - as value
l = d.get(name, list())
for h in l:
# Only one priority for the hook is accepted
if h['priority'] == priority:
# Custom hooks overwrite system ones and they
# are appended at the end - so overwite it
if h['path'] != path:
h['path'] = path
return
l.append({ 'priority': priority, 'path': path })
d[name] = l
else:
if list_by == 'name':
result = set()
def _append_hook(d, priority, name, path):
# Add only the name
d.add(name)
else:
raise MoulinetteError(errno.EINVAL, m18n.n('hook_list_by_invalid'))
def _append_folder(d, folder):
# Iterate over and add hook from a folder
for f in os.listdir(folder + action):
path = '%s%s/%s' % (folder, action, f)
priority, name = _extract_filename_parts(f)
_append_hook(d, priority, name, path)
try:
# Append system hooks first
if list_by == 'folder':
result['system'] = dict() if show_info else set()
_append_folder(result['system'], hook_folder)
else:
_append_folder(result, hook_folder)
except OSError:
logger.debug("system hook folder not found for action '%s' in %s",
action, hook_folder)
try:
# Append custom hooks
if list_by == 'folder':
result['custom'] = dict() if show_info else set()
_append_folder(result['custom'], custom_hook_folder)
else:
_append_folder(result, custom_hook_folder)
except OSError:
logger.debug("custom hook folder not found for action '%s' in %s",
action, custom_hook_folder)
return { 'hooks': result }
def hook_callback(action, hooks=[], args=None):
"""
Execute all scripts binded to an action
Keyword argument:
action -- Action name
hooks -- List of hooks names to execute
args -- Ordered list of arguments to pass to the script
"""
result = { 'succeed': list(), 'failed': list() }
hooks_dict = {}
# Retrieve hooks
if not hooks:
hooks_dict = hook_list(action, list_by='priority',
show_info=True)['hooks']
else:
hooks_names = hook_list(action, list_by='name',
show_info=True)['hooks']
# Iterate over given hooks names list
for n in hooks:
try:
hl = hooks_names[n]
except KeyError:
raise MoulinetteError(errno.EINVAL,
m18n.n('hook_name_unknown', n))
# Iterate over hooks with this name
for h in hl:
# Update hooks dict
d = hooks_dict.get(h['priority'], dict())
d.update({ n: { 'path': h['path'] }})
hooks_dict[h['priority']] = d
if not hooks_dict:
return result
# Format arguments
if args is None:
args = []
elif not isinstance(args, list):
args = [args]
# Iterate over hooks and execute them
for priority in sorted(hooks_dict):
for name, info in iter(hooks_dict[priority].items()):
filename = '%s-%s' % (priority, name)
try:
hook_exec(info['path'], args=args)
except:
logger.exception("error while executing hook '%s'",
info['path'])
result['failed'].append(filename)
else:
result['succeed'].append(filename)
return result
def hook_check(file):
"""
Parse the script file and get arguments
Keyword argument:
file -- File to check
"""
try:
with open(file[:file.index('scripts/')] + 'manifest.json') as f:
manifest = json.loads(str(f.read()))
except:
raise MoulinetteError(errno.EIO, m18n.n('app_manifest_invalid'))
action = file[file.index('scripts/') + 8:]
if 'arguments' in manifest and action in manifest['arguments']:
return manifest['arguments'][action]
else:
return {}
def hook_exec(file, args=None):
"""
Execute hook from a file with arguments
Keyword argument:
file -- Script to execute
args -- Arguments to pass to the script
"""
from moulinette.utils.stream import NonBlockingStreamReader
from yunohost.app import _value_for_locale
if isinstance(args, list):
arg_list = args
else:
required_args = hook_check(file)
if args is None:
args = {}
arg_list = []
for arg in required_args:
if arg['name'] in args:
if 'choices' in arg and args[arg['name']] not in arg['choices']:
raise MoulinetteError(errno.EINVAL,
m18n.n('hook_choice_invalid', args[arg['name']]))
arg_list.append(args[arg['name']])
else:
if os.isatty(1) and 'ask' in arg:
# Retrieve proper ask str | ing
| ask_string = _value_for_locale(arg['ask'])
# Append extra strings
if 'choices' in arg:
ask_string += ' ({:s})'.format('|'.join(arg['choices']))
if 'default' in arg:
ask_string += ' (default: {:s})'.format(arg['default'])
input_string = msignals.prompt(ask_string)
if input_string == '' and 'default' in arg:
input_string = arg['default']
arg_list.append(input_string)
elif 'default' in arg:
arg_list.append(arg['default'])
else:
raise MoulinetteError(errno.EINVAL,
m18n.n('hook_argument_missing', arg['name']))
file_path = "./"
if "/" in file and file[0:2] != file_path:
file_path = os.path.dirname(file)
file = file.replace(file_path +"/", "")
#TODO: Allow python script
arg_str = ''
if arg_list:
# Concatenate arguments and escape them with double quotes to prevent
# bash related issue if an argument is empty and is not the last
arg_str = '\\"{:s}\\"'.format('\\" \\"'.join(arg_list))
msignals.display(m18n.n('executing_script'))
p = subprocess.Popen(
arg_split('su - admin -c "cd \\"{:s}\\" && ' \
'/bin/bash -x \\"{:s}\\" {:s}"'.format(
file_path, file, arg_str)),
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
shell=False)
# Wrap and get process ouput
stream = NonBlockingStreamReader(p.stdout)
while True:
line = stream.readline(True, 0.1)
if not line:
# Check if process has terminated
returncode = p.poll()
if returncode is not None |
import sys # this allows you to read the user input from keyboard also called "stdin"
import classOne # This imports all the classOne functions
import classTwo # This imports all the classTwo functions
import classThree # This imports all the classThree functions
import classFour # This imports all the classFour functions
TIMEOUT=10 # this is the amount of time you will wait for an answer in Seconds. 10 means 10 seconds
MAX_CLASS=5
QUIZ_INSTRUCTIONS = """
Get ready for the quiz. You will have 10 questions out of which you
will need 8 right to win the prize. You will have """ + str(TIMEOUT) + """ seconds
to answer each question.Press Enter to start."""
def getUsersClass(): #main
''' This function will get the user's class. It will compare the class with MAX_CLASS and
will return False if it is more than the MAX_CLASS. Class also has to be a natural number '''
print("Please tell me which Class you are in? ")
try:
usersClass = int(sys.stdin.re | adline().strip())
if (usersClass < 1 or usersClass > MAX_CLASS) :
p | rint("No Quiz available for Class " + str(usersClass))
return False
else :
return usersClass
except :
print("Exception")
return False
if __name__ == '__main__':
while(True) :
usersClass = getUsersClass()
if (usersClass != False) :
break
print(QUIZ_INSTRUCTIONS)
sys.stdin.readline()
if (usersClass == 1) :
classOne.classOneQuiz()
elif (usersClass == 2) :
classTwo.classTwoQuiz()
elif(usersClass == 3):
classThree.classThreeQuiz()
elif(usersClass == 4):
classFour.classFourQuiz()
|
self.setAccess(queryLoadUserData.value(queryLoadUserData.record().indexOf("access")))
def setAccess(self,access):
self.access = access
def setName(self,name):
self.name = name
def getUsername(self):
return self.username.lower()
def getAccess(self):
return self.access
def getName(self):
return self.name
def __checkUsername(self):
pass
def checkAccessControlIsActive(self):
if os.path.isfile(os.path.dirname(os.path.realpath(__file__)) + '\config.json'):
with open(os.path.dirname(os.path.realpath(__file__)) + '\config.json') as config_file:
config = json.load(config_file)
accessControl = config['accessControl']
pass
else:
settings = QSettings("PostNAS", "PostNAS-Suche")
accessControl = settings.value("accessControl")
if(accessControl == 1):
if (self.checkAccessTable() == False):
accessControl = 0
else:
if (self.checkAccessTable() == True):
accessControl = 1
if os.path.isfile(os.path.dirname(os.path.realpath(__file__)) + '\config.json'):
config['accessControl'] = accessControl
with open(os.path.dirname(os.path.realpath(__file__)) + '\config.json', 'w') as config_file:
json.dump(config, config_file)
else:
settings.setValue("accessControl", accessControl)
if(accessControl == 1):
return True
else:
return False
def checkAccessTable(self):
sql = "SELECT table_name FROM information_schema.tables WHERE table_name = 'postnas_search_access_control'";
self.__openDB()
query = QSqlQuery(self.db)
query.exec_(sql)
if(query.size() > 0):
return True
else:
return False
def createAccessTable(self):
file_path = os.path.dirname(os.path.realpath(__file__)) + "/create_accesstable/create_table.sql"
sql = open(file_path).read()
self.__openDB()
query = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
query.exec_(sql)
if(query.lastError().number() == -1):
return True
else:
return False
def checkAccessTableHasAdmin(self):
sql = "SELECT lower(username) FROM public.postnas_search_access_control WHERE access = 0";
self.__openDB()
query = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
query.exec_(sql)
if(query.size() > 0):
return True
else:
return False
def insertUser(self):
if(self.getUsername() != None):
self.__openDB()
sql = "INSERT INTO public.postnas_search_access_control (username,name,access) VALUES (:username,:name,:access)"
query = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
query.prepare(sql)
query.bindValue(":username",self.getUsername().lower())
query.bindValue(":name",self.name)
query.bindValue(":access",self.access)
query.exec_()
if(query.lastError().number() == -1):
return True
else:
return False
else:
return False
def insertAdminUser(self):
self.access = 0
return self.insertUser()
def updateUser(self,username_old):
if(self.getUsername() != None):
self.__openDB()
sql = "UPDATE public.postnas_search_access_control SET username = :username, name = :name, access = :access WHERE username = :username_old"
query = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
query.prepare(sql)
query.bindValue(":username",self.getUsername().lower())
query.bindValue(":username_old",username_old)
query.bindValue(":name",self.name)
query.bindValue(":access",self.access)
query.exec_()
if(query.lastError().number() == -1):
return True
else:
QgsMessageLog.logMessage("Datenbankfehler beim Update: " + query.lastError().text(),'PostNAS-Suche', Qgis.Critical)
return False
else:
return False
def checkUserIsAdmin(self):
if(self.getUsername() != None):
self.__openDB()
sql = "SELECT lower(username) as username FROM public.postnas_search_access_control WHERE access = 0 AND lower(username) = :username"
query = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
query.prepare(sql)
query.bindValue(":username",self.getUsername())
query.exec_()
if(query.lastError().number() == -1):
if(query.size() > 0):
return True
else:
return False
else:
return False
else:
return False
def checkUserHasEigentuemerAccess(self):
if(self.getUsername() != None):
self.__openDB()
sql = "SELECT lower(username) as username FROM public.postnas_search_access_control WHERE access IN (0,1) AND lower(username) = :username"
queryEigentuemerAccess = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
queryEigentuemerAccess.prepare(sql)
queryEigentuemerAccess.bindValue(":username",self.getUsername())
queryEigentuemerAccess.exec_()
if(queryEigentuemerAccess.lastError().number() == -1):
if(queryEigentuemerAccess.size() > 0):
return True
else:
return False
else:
return False
else:
return False
def loadUserAccessTable(self):
sql = "SELECT lower(username) as username,name,bezeichnung FROM public.postnas_search_access_control LEFT JOIN public.postnas_search_accessmode ON postnas_search_access_control.access = postnas_search_accessmode.id";
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
self.__openDB()
queryLoadAccessTable = QSqlQuery(self.db)
queryLoadAccessTable.prepare(sql)
queryLoadAccessTable.exec_()
results = []
if(queryLoadAccessTable.size() > 0):
while(queryLoadAccessTable.next()):
list = {'username': queryLoadAccessTable.value(queryLoadAccessTable.record().indexOf("username")),
'name': queryLoadAccessTable.valu | e(queryLoadAccessTable.record().index | Of("name")),
'access': queryLoadAccessTable.value(queryLoadAccessTable.record().indexOf("bezeichnung"))}
results.append(list)
return results
def deleteUser(self):
sql = "DELETE FROM public.postnas_search_access_control WHERE lower(username) = :username"
self.__openDB()
queryDeleteUser = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
queryDeleteUser.prepare(sql)
queryDeleteUser.bindValue(":username",self.getUsername())
queryDeleteUser.exec_()
if(queryDeleteUser.lastError().number() == -1):
return True
else:
QgsMessageLog.logMessage("Datenbankfehler beim Löschen: " + queryDeleteUser.lastError().text(), 'PostNAS-Suche',Qgis.Critical)
return False
def getAccessModes(self):
sql = "SELECT id,bezeichnung FROM p |
from behave import given, when, then
from slackrest.app import SlackrestApp
from slackrest.command import Visibility, Method
import json
class GiveMeAReply:
pattern = '!givemeareply'
url_format = '/reply'
visibility = Visibility.Any
body = None
method = Method.GET
class GiveMeANotification:
pattern = '!givemeanotification'
url_format = '/notify'
visibility = Visibility.Any
body = None
method = Method.GET
class MakeAPost:
pattern = '!makeapost'
url_format = '/makeapost'
visibility = Visibility.Any
method = Method.POST
@classmethod
def body(cls, **kwargs):
return json.dumps({'param': 'value'})
commands = [GiveMeAReply, GiveMeANotification, MakeAPost]
@given(u'Slackrest is connected to Slack')
def step_impl(context):
context.app = SlackrestApp(context.chat_url, commands, context.notification_channel_id)
context.app.run_async()
context.slack_events.await_event(event_type='login')
@when(u'I send "{message}" from channel "{channel_id}"')
def step_impl(context, message, channel_id):
user_id = 'U123456'
msg = {'type': 'message', 'text': message, 'channel': channel_id, 'user': user_id}
context.slack_events.send_message(msg)
@then(u'I should get a message in channel "{channel_id}"')
def step_impl(contex | t, channel_id):
event = conte | xt.slack_events.await_event(event_type='message')
assert event['message']['channel'] == channel_id
@then(u'I should get a message containing "{msg}"')
def step_impl(context, msg):
event = context.slack_events.await_event(event_type='message')
print("Got message containing '{}'".format(event['message']['text']))
print("Got message containing '{}'".format(event['message']['text']))
assert msg in event['message']['text']
@given(u'I set the notification channel to "{notification_channel_id}"')
def step_impl(context, notification_channel_id):
context.notification_channel_id = notification_channel_id
@given(u'I map "!givemeareply" to /reply')
def step_impl(context):
pass
@given(u'I map "!givemeanotification" to /notify')
def step_impl(context):
pass
@given(u'the chat bot is at {url}')
def step_impl(context, url):
context.chat_url = url |
with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
import django
import getpaid
sys.path.append(
os.path.join(os.path.join(os.path.dirname(__file__), os.pardir), os.pardir)
)
sys.path.append(
os.path.join(
os.path.join(os.path.join(os.path.dirname(__file__), os.pardir), os.pardir),
"example",
)
)
sys.path.append(
os.path.join(
os.path.join(os.path.join(os.path.dirname(__file__), os.pardir), os.pardir),
"django-getpaid",
)
)
|
sys.path.insert(0, os.path.abspath("../example"))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example.settings")
django.setup()
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -------------------------------------------- | ---------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx_rtd_theme",
"sphinx.ext.autosectionlabel",
]
autodoc_member_order = "bysource"
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "django-getpaid"
copyright = "2012-2013 Krzysztof Dorosz, 2013-2020 Dominik Kozaczko"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "2.2"
# The full version, including alpha/beta/rc tags.
release = getpaid.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "django-getpaiddoc"
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
(
"index",
"django-getpaid.tex",
"django-getpaid Documentation",
"Sunscrapers",
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
("index", "django-getpaid", "django-getpaid Documentation", ["Sunscrapers"], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
"index",
"django-getpaid",
"django-getpaid Documentation",
"Sunscrapers",
"django-get |
# Script loads 3d data from text file (after Gwyddion text importing of AFM file)
import re
import numpy as np
def ReadData(file_name):
'''
Load 3d data array from a text file. The text file is imported from Gwyddion (free SPM data analysis software).
Parameters
----------
file_name : str
Relative path to a text file
Returns
-------
data : ndarray
MxM matrix of SPM data
width : float
Width of image (in meters)
height : float
Height of image (in met | ers)
pixel_height : float
Height of one pixel (in meters)
height_unit : float
Measurement unit coefficient (in unit/meter)
'''
comments = [] # List of comments in text file
f = open(file_name)
for line in f:
if line.startswith('#'):
comments.append(line)
else:
break
f.close()
rex = r"(\d+[.]\d+)\s(\S+)" # regular expre | ssion for image size searching
width_match = re.search(rex, comments[1])
height_match = re.search(rex, comments[2])
if (width_match.group(2) == 'µm') and (height_match.group(2) == 'µm'):
width_unit = 1e-6
height_unit = 1e-6
else:
raise ValueError("Attention! The measurement units aren't micrometers!") # My data was only in micrometers :)
width = float(width_match.group(1)) * width_unit
height = float(height_match.group(1)) * height_unit
data = np.genfromtxt(file_name) # NumPy function for data importing
M = np.shape(data)[0] # ---!!--- Needs to add rectangular area ---!!---
pixel_height = height/M
return data, width, height, pixel_height, height_unit
|
# -------------------------------- Database models----------------------------------------------------------------------
import sys, os
import sqlalchemy
from sqlalchemy import create_engine
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
import secrets
import settings
MYSQL_USERNAME = secrets.MYSQL_USERNAME
MYSQL_PASSWORD = secrets.MYSQL_PASSWORD
MYSQL_HOSTNAME = secrets.MYSQL_HOSTNAME
MYSQL_DATABASE_NAME = secrets.MYSQL_DATABASE_NAME
MYSQL_HOST_PORT = secrets.MYSQL_HOST_PORT
MAX_MESSAGE_SIZE = settings.MAX_MESSAGE_SIZE
database_url = 'mysql://{}:{}@{}:{}/{}'.format(MYSQL_USERNAME, MYSQL_PASSWORD, MYSQL_HOSTNAME, MYSQL_HOST_PORT,
MYSQL_DATABASE_NAME)
engine = create_engine(database_url)
from sqlalchemy import Column, Integer, String, Boolean, DateTime, Text
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import ForeignKey
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class ModelManager(object):
"""
Model manager
"""
@classmethod
def create_session(cls, engine):
"""
create a session based
:param engine: engine object
:return: returns the created session object
"""
Session = sessionmaker(bind=engine)
session = Session()
return session
@classmethod
def add_to_session(cls, session, obj):
"""
add the object to the session
:param obj:
:pa | ram session: session object
:return:
"""
session.add(obj)
@classmethod
def commit_session(cls, session):
"""
commit to session
:param session:
:return:
"""
session.commit()
@classmethod
def delete_from_session(cls, session, obj):
"""
delete the object from the session
:param session:
:return:
"""
session.delete | (obj)
@classmethod
def rollback_session(cls, session):
"""
rollback the current session
:param session:
:return:
"""
session.rollback()
@classmethod
def close_session(cls, session):
"""
close the current session
:param session:
:return:
"""
session.close()
class Queue(Base):
"""
Queues model class
"""
__tablename__ = "Queue"
id = Column(Integer, primary_key=True)
name = Column(String(20), unique=True)
created_timestamp = Column(DateTime)
message = relationship("Message", back_populates="queue")
def __repr__(self):
"""
representation of the Queue class
:return:
"""
return "<Queue (name: {}, created_timestamp: {})>".format(self.name, self.created_timestamp)
class Message(Base):
"""
Message model class
"""
__tablename__ = "Message"
id = Column(Integer, primary_key=True)
queue_id = Column(Integer, ForeignKey('Queue.id'))
is_fetched = Column(Boolean, default=False)
content = Column(Text)
publish_timestamp = Column(DateTime)
consumed_timestamp = Column(DateTime)
queue = relationship("Queue", back_populates="message")
# The consumed_timestamp should ideally have a null value for default but that is not feasible here so
# for checking we will first check whether the is_fetched value is true, if so we consider the consumed_timestamp
# as the date and time when the message was dequeued.
def __repr__(self):
"""
representation of the Message class
:return:
"""
return "<Message (queue_id: {}, is_fetched: {}, content: {}...{}, publish_timestamp: {}, " \
"consumed_timestamp: {})>".format(self.queue_id, self.is_fetched, self.content[:10],self.content[10:],
self.publish_timestamp, self.consumed_timestamp) |
"""Module for parsing and testing package version predicate strings.
"""
import re
import distutils.version
import operator
re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)",
re.ASCII)
# (package) (rest)
re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses
re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$")
# (comp) (version)
def splitUp(pred):
"""Parse a single version comparison.
Return (comparison string, StrictVersion)
"""
res = re_splitComparison.match(pred)
if not res:
raise ValueError("bad package restriction syntax: %r" % pred)
comp, verStr = res.groups()
return (comp, distutils.version.StrictVersion(verStr))
compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq,
">": operator.gt, ">=": operator.ge, "!=": operator.ne}
class VersionPredicate:
"""Parse and test package version predicates.
>>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)')
The `name` attribute provides the full dotted name that is given::
>>> v.name
'pyepat.abc'
The str() of a `VersionPredicate` provides a normalized
human-readable version of the expression::
>>> print(v)
pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3)
The `satisfied_by()` method can be used to determine with a given
version number is included in the set described by the version
restrictions::
>>> v.satisfied_by('1.1')
True
>>> v.satisfied_by('1.4')
True
>>> v.satisfied_by('1.0')
False
>>> v.satisfied_by('4444.4')
False
>>> v.satisfied_by('1555.1b3')
False
`VersionPredicate` is flexible in accepting extra whitespace::
>>> v = VersionPredicate(' pat( == 0.1 ) ')
>>> v.name
'pat'
>>> v.satisfied_by('0.1')
True
>>> v.satisfied_by('0.2')
False
If any version numbers passed in do not conform to the
restrictions of `StrictVersion`, a `ValueError` is raised::
>>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)')
Traceback (most recent call last):
...
ValueError: invalid version number '1.2zb3'
It the module or package name given does not conform to what's
allowed as a legal module or package name, `ValueError` is
raised::
>>> v = VersionPredicate('foo-bar')
Traceback (most recent call last):
...
ValueError: expected parenthesized list: '-bar'
>>> v = VersionPredicate('foo bar (12.21)')
Traceback (most recent call last):
...
ValueError: expected parenthesi | zed list: 'bar (12.21)'
"""
def __init__(self, versionPredicateStr):
"""Parse a version predicate string.
"""
# Fields:
# name: package name
# pred: list of (comparison string, StrictVersion)
versionPredicateStr = versionPredicateStr.strip()
if not versionPredicateS | tr:
raise ValueError("empty package restriction")
match = re_validPackage.match(versionPredicateStr)
if not match:
raise ValueError("bad package name in %r" % versionPredicateStr)
self.name, paren = match.groups()
paren = paren.strip()
if paren:
match = re_paren.match(paren)
if not match:
raise ValueError("expected parenthesized list: %r" % paren)
str = match.groups()[0]
self.pred = [splitUp(aPred) for aPred in str.split(",")]
if not self.pred:
raise ValueError("empty parenthesized list in %r"
% versionPredicateStr)
else:
self.pred = []
def __str__(self):
if self.pred:
seq = [cond + " " + str(ver) for cond, ver in self.pred]
return self.name + " (" + ", ".join(seq) + ")"
else:
return self.name
def satisfied_by(self, version):
"""True if version is compatible with all the predicates in self.
The parameter version must be acceptable to the StrictVersion
constructor. It may be either a string or StrictVersion.
"""
for cond, ver in self.pred:
if not compmap[cond](version, ver):
return False
return True
_provision_rx = None
def split_provision(value):
"""Return the name and optional version number of a provision.
The version number, if given, will be returned as a `StrictVersion`
instance, otherwise it will be `None`.
>>> split_provision('mypkg')
('mypkg', None)
>>> split_provision(' mypkg( 1.2 ) ')
('mypkg', StrictVersion ('1.2'))
"""
global _provision_rx
if _provision_rx is None:
_provision_rx = re.compile(
"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$",
re.ASCII)
value = value.strip()
m = _provision_rx.match(value)
if not m:
raise ValueError("illegal provides specification: %r" % value)
ver = m.group(2) or None
if ver:
ver = distutils.version.StrictVersion(ver)
return m.group(1), ver
|
ort ckan.model as model
from ckanext.archiver.model import init_tables
init_tables(model.meta.engine)
self.log.info('Archiver tables are initialized')
elif cmd == 'migrate-archive-dirs':
self.migrate_archive_dirs()
elif cmd == 'migrate':
self.migrate()
else:
self.log.error('Command %s not recognized' % (cmd,))
def update(self):
from ckan import model
from ckanext.archiver import lib
packages = []
resources = []
if len(self.args) > 1:
for arg in self.args[1:]:
# try arg as a group id/name
group = model.Group.get(arg)
if group:
if group.is_organization:
packages.extend(
model.Session.query(model.Package)
.filter_by(owner_org=group.id))
else:
packages.extend(group.packages(with_private=True))
if not self.options.queue:
self.options.queue = 'bulk'
continue
# try arg as a package id/name
pkg = model.Package.get(arg)
if pkg:
packages.append(pkg)
if not self.options.queue:
self.options.queue = 'priority'
continue
# try arg as a resource id
res = model.Resource.get(arg)
if res:
resources.append(res)
if not self.options.queue:
self.options.queue = 'priority'
continue
else:
self.log.error('Could not recognize as a group, package '
'or resource: %r', arg)
sys.exit(1)
else:
# all packages
pkgs = model.Session.query(model.Package)\
.filter_by(state='active')\
.order_by('name').all()
packages.extend(pkgs)
if not self.options.queue:
self.options.queue = 'bulk'
if packages:
self.log.info('Datasets to archive: %d', len(packages))
if resources:
self.log.info('Resources to archiv | e: %d', len(resources))
if not (packages or resources) | :
self.log.error('No datasets or resources to process')
sys.exit(1)
self.log.info('Queue: %s', self.options.queue)
for package in packages:
if p.toolkit.check_ckan_version(max_version='2.2.99'):
# earlier CKANs had ResourceGroup
pkg_resources = \
[res for res in
itertools.chain.from_iterable(
(rg.resources_all
for rg in package.resource_groups_all)
)
if res.state == 'active']
else:
pkg_resources = \
[res for res in package.resources_all
if res.state == 'active']
self.log.info('Queuing dataset %s (%s resources)',
package.name, len(pkg_resources))
lib.create_archiver_package_task(package, self.options.queue)
time.sleep(0.1) # to try to avoid Redis getting overloaded
for resource in resources:
if p.toolkit.check_ckan_version(max_version='2.2.99'):
package = resource.resource_group.package
else:
package = resource.package
self.log.info('Queuing resource %s/%s', package.name, resource.id)
lib.create_archiver_resource_task(resource, self.options.queue)
time.sleep(0.05) # to try to avoid Redis getting overloaded
self.log.info('Completed queueing')
def view(self, package_ref=None):
from ckan import model
from ckanext.archiver.model import Archival
r_q = model.Session.query(model.Resource).filter_by(state='active')
print 'Resources: %i total' % r_q.count()
a_q = model.Session.query(Archival)
print 'Archived resources: %i total' % a_q.count()
num_with_cache_url = a_q.filter(Archival.cache_url!='').count()
print ' %i with cache_url' % num_with_cache_url
last_updated_res = a_q.order_by(Archival.updated.desc()).first()
print 'Latest archival: %s' % (last_updated_res.updated.strftime('%Y-%m-%d %H:%M') if last_updated_res else '(no)')
if package_ref:
pkg = model.Package.get(package_ref)
print 'Package %s %s' % (pkg.name, pkg.id)
for res in pkg.resources:
print 'Resource %s' % res.id
for archival in a_q.filter_by(resource_id=res.id):
print '* %r' % archival
def clean_status(self):
from ckan import model
from ckanext.archiver.model import Archival
print 'Before:'
self.view()
q = model.Session.query(Archival)
q.delete()
model.Session.commit()
print 'After:'
self.view()
def clean_cached_resources(self):
from ckan import model
from ckanext.archiver.model import Archival
print 'Before:'
self.view()
q = model.Session.query(Archival).filter(Archival.cache_url != '')
archivals = q.all()
num_archivals = len(archivals)
progress = 0
for archival in archivals:
archival.cache_url = None
archival.cache_filepath = None
archival.size = None
archival.mimetype = None
archival.hash = None
progress += 1
if progress % 1000 == 0:
print 'Done %i/%i' % (progress, num_archivals)
model.Session.commit()
model.Session.commit()
model.Session.remove()
print 'After:'
self.view()
def report(self, output_file, delete=False):
"""
Generates a report containing orphans (either files or resources)
"""
import csv
from ckan import model
archive_root = config.get('ckanext-archiver.archive_dir')
if not archive_root:
self.log.error("Could not find archiver root")
return
# We'll use this to match the UUID part of the path
uuid_re = re.compile(".*([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}).*")
not_cached_active = 0
not_cached_deleted = 0
file_not_found_active = 0
file_not_found_deleted = 0
perm_error = 0
file_no_resource = 0
with open(output_file, "w") as f:
writer = csv.writer(f)
writer.writerow(["Resource ID", "Filepath", "Problem"])
resources = {}
for resource in model.Session.query(model.Resource).all():
resources[resource.id] = True
# Check the resource's cached_filepath
fp = resource.extras.get('cache_filepath')
if fp is None:
if resource.state == 'active':
not_cached_active += 1
else:
not_cached_deleted += 1
writer.writerow([resource.id, str(resource.extras), "Resource not cached: {0}".format(resource.state)])
continue
# Check that the cached file is there and readable
if not os.path.exists(fp):
if resource.state == 'active':
file_not_found_active += 1
else:
file_not_found_deleted += 1
writer.writerow([resource.id, fp.encode('utf-8'), "File not found: {0}".format(resource.state)])
continue
try:
os.stat(fp)
except OSError:
perm_error += 1
|
from common import bounty, peers, settings
from common.safeprint import safeprint
from multiprocessing import Queue, Value
from time import sleep, time
import pickle
def sync():
from multiprocessing import Manager
man = Manager()
items = {'config': man.dict(),
'peerList': man.list(),
'bountyList': man.list(),
'bountyLock': bounty.bountyLock,
'keyList': man.list()}
items['config'].update(settings.config)
items['peerList'].extend(peers.peerlist)
items['boun | tyList'].extend(bounty.bountyList)
safeprint(items)
peers.sync(items)
return items
def initParallels():
queue = Queue()
live = Value('b', True)
ear = peers.listener(settings.config['port'], settings.config['outbound'], queue, live, settings.config['server'])
ear.daemon = True
ear.items = sync()
| ear.start()
mouth = peers.propagator(settings.config['port'] + 1, live)
mouth.daemon = True
mouth.items = ear.items
mouth.start()
feedback = []
stamp = time()
while queue.empty():
if time() - 15 > stamp:
break
global ext_ip, ext_port
ext_ip = ""
ext_port = -1
try:
feedback = queue.get(False)
settings.outbound = feedback[0]
if settings.outbound is not True:
ext_ip, ext_port = feedback[1:3]
except:
safeprint("No feedback received from listener")
return live
def main():
# Begin Init
settings.setup()
try:
import miniupnpc
except:
safeprint("Miniupnpc is not installed. Running in outbound only mode")
settings.config['outbound'] = True
safeprint("settings are:")
safeprint(settings.config)
live = initParallels()
global ext_ip, ext_port
peers.initializePeerConnections(settings.config['port'], ext_ip, ext_port)
# End Init
# Begin main loop
if settings.config.get('seed'):
safeprint("Seed mode activated")
try:
while True and not settings.config.get('test'):
sleep(0.1)
except KeyboardInterrupt:
safeprint("Keyboard Interrupt")
elif settings.config.get('server'):
safeprint("Server mode activated")
else:
safeprint("Client mode activated")
# End main loop
# Begin shutdown
safeprint("Beginning exit process")
live.value = False
settings.saveSettings()
peers.saveToFile()
bounty.saveToFile()
# End shutdown
if __name__ == "__main__":
main()
|
()[old] = type(native_str(old),
(_Deprecated,),
{"old": old, "new": new})
class AdminLoginInterfaceSelectorMiddleware(object):
"""
Checks for a POST from the admin login view and if authentication is
successful and the "site" interface is selected, redirect to the site.
"""
def process_view( | self, request, view_func, view_args, view_kwargs):
login_type = request.POST.get("mezzanine_login_interface")
if login_type and not request.user.is_authenticated():
response = view_func(request, *view_args, **view_kwargs)
if request.user.is_authenticated():
if login_type == "admin":
| next = request.get_full_path()
username = request.user.get_username()
if (username == DEFAULT_USERNAME and
request.user.check_password(DEFAULT_PASSWORD)):
error(request, mark_safe(_(
"Your account is using the default password, "
"please <a href='%s'>change it</a> immediately.")
% reverse("user_change_password",
args=(request.user.id,))))
else:
next = next_url(request) or "/"
return HttpResponseRedirect(next)
else:
return response
return None
class SitePermissionMiddleware(object):
"""
Marks the current user with a ``has_site_permission`` which is
used in place of ``user.is_staff`` to achieve per-site staff
access.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
has_site_permission = False
if request.user.is_superuser:
has_site_permission = True
elif request.user.is_staff:
lookup = {"user": request.user, "sites": current_site_id()}
try:
SitePermission.objects.get(**lookup)
except SitePermission.DoesNotExist:
admin_index = reverse("admin:index")
if request.path.startswith(admin_index):
logout(request)
view_func = admin.site.login
extra_context = {"no_site_permission": True}
return view_func(request, extra_context=extra_context)
else:
has_site_permission = True
request.user.has_site_permission = has_site_permission
class TemplateForDeviceMiddleware(object):
"""
Inserts device-specific templates to the template list.
"""
def process_template_response(self, request, response):
if hasattr(response, "template_name"):
if not isinstance(response.template_name, Template):
templates = templates_for_device(request,
response.template_name)
response.template_name = templates
return response
class TemplateForHostMiddleware(object):
"""
Inserts host-specific templates to the template list.
"""
def process_template_response(self, request, response):
if hasattr(response, "template_name"):
if not isinstance(response.template_name, Template):
templates = templates_for_host(request,
response.template_name)
response.template_name = templates
return response
class UpdateCacheMiddleware(object):
"""
Response phase for Mezzanine's cache middleware. Handles caching
the response, and then performing the second phase of rendering,
for content enclosed by the ``nevercache`` tag.
"""
def process_response(self, request, response):
# Caching is only applicable for text-based, non-streaming
# responses. We also skip it for non-200 statuses during
# development, so that stack traces are correctly rendered.
is_text = response.get("content-type", "").startswith("text")
valid_status = response.status_code == 200
streaming = getattr(response, "streaming", False)
if not is_text or streaming or (settings.DEBUG and not valid_status):
return response
# Cache the response if all the required conditions are met.
# Response must be marked for updating by the
# ``FetchFromCacheMiddleware`` having a cache get miss, the
# user must not be authenticated, the HTTP status must be OK
# and the response mustn't include an expiry age, indicating it
# shouldn't be cached.
marked_for_update = getattr(request, "_update_cache", False)
anon = hasattr(request, "user") and not request.user.is_authenticated()
timeout = get_max_age(response)
if timeout is None:
timeout = settings.CACHE_MIDDLEWARE_SECONDS
if anon and valid_status and marked_for_update and timeout:
cache_key = cache_key_prefix(request) + request.get_full_path()
_cache_set = lambda r: cache_set(cache_key, r.content, timeout)
if callable(getattr(response, "render", None)):
response.add_post_render_callback(_cache_set)
else:
_cache_set(response)
# Second phase rendering for non-cached template code and
# content. Split on the delimiter the ``nevercache`` tag
# wrapped its contents in, and render only the content
# enclosed by it, to avoid possible template code injection.
token = nevercache_token()
try:
token = token.encode('utf-8')
except AttributeError:
pass
parts = response.content.split(token)
# Restore csrf token from cookie - check the response
# first as it may be being set for the first time.
csrf_token = None
try:
csrf_token = response.cookies[settings.CSRF_COOKIE_NAME].value
except KeyError:
try:
csrf_token = request.COOKIES[settings.CSRF_COOKIE_NAME]
except KeyError:
pass
if csrf_token:
request.META["CSRF_COOKIE"] = csrf_token
context = RequestContext(request)
for i, part in enumerate(parts):
if i % 2:
part = Template(part).render(context).encode("utf-8")
parts[i] = part
response.content = b"".join(parts)
response["Content-Length"] = len(response.content)
if hasattr(request, '_messages'):
# Required to clear out user messages.
request._messages.update(response)
return response
class FetchFromCacheMiddleware(object):
"""
Request phase for Mezzanine cache middleware. Return a response
from cache if found, othwerwise mark the request for updating
the cache in ``UpdateCacheMiddleware``.
"""
def process_request(self, request):
if (cache_installed() and request.method == "GET" and
not request.user.is_authenticated()):
cache_key = cache_key_prefix(request) + request.get_full_path()
response = cache_get(cache_key)
# We need to force a csrf token here, as new sessions
# won't receieve one on their first request, with cache
# middleware running.
csrf_mw_name = "django.middleware.csrf.CsrfViewMiddleware"
if csrf_mw_name in settings.MIDDLEWARE_CLASSES:
csrf_mw = CsrfViewMiddleware()
csrf_mw.process_view(request, lambda x: None, None, None)
get_token(request)
if response is None:
request._update_cache = True
else:
return HttpResponse(response)
class SSLRedirectMiddleware(object):
"""
Handles redirections required for SSL when ``SSL_ENABLED`` is ``True``.
If ``SSL_FORCE_HOST`` is ``True``, and is not the current host,
redirect to it.
Also ensure URLs defined by ``SSL_FORCE_URL_PREFIXES`` are redirect
to HTTPS, and redirect all other URLs to HTTP if on HTTPS.
"""
def process_ |
re
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags"}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
raise NotThisMethod("no .git directory")
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-ta | g"]
| if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"]}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
|
# Utility functions for Ope | nMORA scripts
#
# Part of OpenMora - https://github.com/OpenMORA
import os, sys, string
import platform
import yaml
def get_mora_paths():
""" Returns a list of paths with MORA modules, from the env var MORA_PATH
"""
if not 'MORA_PATH' in os.environ:
print('**ERROR** Environment variable MORA_PATH not set')
sys.exit(1)
sMoraPaths=os.en | viron['MORA_PATH'];
if platform.system()=="Windows":
sPathDelim = ";"
else:
sPathDelim = ":"
morabase_dir="";
return sMoraPaths.split(sPathDelim)
def get_morabase_dir():
""" Returns the path of "mora-base" pkg
"""
mora_paths = get_mora_paths() # Get env vars
for p in mora_paths:
tstPath = os.path.normpath(p + "/mora-base")
if os.path.exists(tstPath):
morabase_dir = tstPath
if (len(morabase_dir)==0) or (not os.path.exists(morabase_dir)):
print("Couldn't detect mora-base in MORA_PATH!!")
sys.exit(1)
return morabase_dir
import sys, math
def progress(percent):
''' source: http://gunslingerc0de.wordpress.com/2010/08/13/python-command-line-progress-bar/ '''
width = 74
marks = math.floor(width * (percent / 100.0))
spaces = math.floor(width - marks)
loader = '[' + ('=' * int(marks)) + (' ' * int(spaces)) + ']'
if percent >= 100:
percent = 100
sys.stdout.write("%s %d%%\r" % (loader, percent))
if percent >= 100:
pass
sys.stdout.write("\n")
sys.stdout.flush()
def get_pkgs_root():
'''Returns the path to the parent directory of mora-base'''
morabase_dir = get_morabase_dir()
pkgs_root = os.path.dirname(morabase_dir)
return pkgs_root
def read_distro_file():
'''Returns the yaml contents of the distro file'''
morabase_dir = get_morabase_dir()
pkgs_root = os.path.dirname(morabase_dir)
sDistroFile = os.path.normpath( morabase_dir + "/distro/openmora-pkgs.yaml")
assert os.path.exists(sDistroFile)
assert os.path.exists(pkgs_root + "/mora-base")
# Parse distro file:
fil = open(sDistroFile, 'r')
distro = yaml.load(fil)
fil.close()
#print distro
return distro
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from foo_receiver import FooReceiver
from foo_listener_bf import FooListenerBfHelper
from PyCFFIlib_cffi import ffi, lib
import gc
class FooListenerBfImpl:
def delete_fl_in_fl(self):
print ("Not to be used")
def on_string_change(self, prs):
print ("FooListenerImpl.py: on_string_change prs", prs)
self._prs = prs
return self._prs
def get_string(self):
return self._prs
def set_listener_bf(self,fl):
self._fl = fl
def get_listener_bf(self):
return self._fl
def set_binary(self,b):
print ("setting Binary in FooListenerBfImpl ", b)
self._b = b
def get_binary(self):
return self._b
def send_return(self,fl):
return fl
def create():
# TODO: decide if we want to have this here or make checks in the helper.frompy for all
# methods to exist as attributes on the class more lenient
print ("I don't use it but the +p +c plus the check in fromPy for having all methods needs me to have this")
def fr_set_get(fr, fl, s):
fr.add_listener_bf(fl)
assert fr.set_private_bf_string(s) == s, "test_interface_back_forth failed"
# assert fl._prs == s, "test_interface_back_forth failed"
assert fr.get_listener_bf_string() == s, "test_interface_back_forth failed"
# back and forth via regular calls from python to cpp
def test_interface_back_forth():
print ("start test len ", len(FooListenerBfHelper.c_data_set))
fr = FooReceiver.create()
fl = FooListenerBfImpl() # python implementation of listener
fl_cpp = fr.get_foo_listener_bf() # cpp implementation of listener
# both direct and indirect test for python impl of FooListenerBf
fr_set_get(fr, fl, "Hello world!")
# both direct and indirect test for cpp impl of FooListenerBf
fr_set_get(fr, fl_cpp, "Goodbye world!")
fr_set_get(fr, fl_cpp, "Goodbye world!")
# send python implementation back and forth and see that it can still be used, and that no wrapper was added
fl_1 = fr.send_return(fl)
fl_2 = fr.send_return(fl_1)
fr_set_get(fr, fl_2, "Hello")
assert fl == fl_1 and fl_1 == fl_2, "test_interface_back_forth failed"
# send cpp implementation back and forth and see that is can still be used, and handles hold same implementation
fl_cpp_1 = fr.send_return(fl_cpp)
fl_cpp_2 = fr.send_return(fl_cpp_1)
fr_set_get(fr, fl_cpp_2, "Goodbye")
assert lib.equal_handles_cw__foo_listener_bf(fl_cpp._cpp_impl, fl_cpp_1._cpp_impl) and \
lib.equal_handles_cw__foo_listener_bf(fl_cpp_1._cpp_impl, fl_cpp_2._cpp_impl)
fl = fl_1 = fl_2 = fl_cpp = fl_cpp_1 = fl_cpp_2 = None
gc.collect()
fr = None
gc.collect()
assert 0 == len(FooListenerBfHelper.c_data_set)
def fr_fl_set_get(fr, fl_in_fl, b):
fr.set_listener_bf_in_listener_bf(fl_in_fl)
fr.set_binary_in_listener_bf_in_listener_bf(b)
assert b == fr.get_binary_in_listener_bf_in_listener_bf(), "test_interface_back_forth failed"
# back and forth via callbacks cpp to python
def test_interface_callback_back_forth():
fr = FooReceiver.create()
fl = FooListenerBfImpl()
fr.add_listener_bf(fl)
fl_in_fl = FooListenerBfImpl()
b = b'Some Binary 11'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in python, listener 2 in python
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert fl_in_fl == fl_in_fl_1 and fl_in_fl_1 == fl_in_fl_2, "test_interface_back_forth failed"
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in python, listener 2 in python after back&forth
fl_in_fl = fr.get_foo_listener_bf()
b = b'Some Other Binary 12'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in python, listener 2 in cpp
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert lib.equal_handles_cw__foo_listener_bf(fl_in_fl._cpp_impl, fl_in_fl_1._cpp_impl) and \
lib.equal_handles_cw__foo_listener_bf(fl_in_fl_1._cpp_impl, fl_in_fl_2._cpp_impl)
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in python, listener 2 in cpp after back&forth
fl = fr.get_foo_listener_bf()
fr.add_listener_bf(fl)
fl_in_fl = FooListenerBfImpl()
b = b'Some Binary 21'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in cpp, listener 2 in python
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert fl_in_fl == fl_in_fl_1 and fl_in_fl_1 == fl_in_fl_2, "test_interface_back_forth failed"
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in cpp, listener 2 in python after back&forth
fl_in_fl = fr.get_foo_listener_bf()
b = b'Some Other Binary 22'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in cpp, listener 2 in cpp
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert lib.equal_handles_cw__foo_listener_bf(fl_in_fl._cpp_impl, fl_in_fl_1._cpp_impl) and \
lib.equal_handles_cw__foo_listener_bf(fl_in_fl_1._cpp_impl, fl_in_fl_2._cpp_impl)
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in cpp, listener 2 in cpp after back&forth
fl = fl_in_fl = fl_in_fl_1 = fl_in_fl_ | 2 = None
gc.collect()
fr = None
gc.c | ollect()
assert 0 == len(FooListenerBfHelper.c_data_set)
|
# Tests for source4/libnet/py_net_dckeytab.c
#
# Copyright (C) David Mulder <dmulder@suse.com> 2018
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import string
from samba.net import Net
from samba import enable_net_export_keytab
from samba import tests
from samba.param import LoadParm
enable_net_export_keytab()
def open_bytes(filename):
if sys.version_info[0] == 3:
return open(filename, errors='ignore')
else:
return open(filename, 'rb')
class DCKeytabTests(tests.TestCase):
def setUp(self):
super(DCKeytabTests, self).setUp()
self.lp = LoadParm()
self.lp.load_default()
self.creds = self.insta_creds(template=self.get_credentials())
self.ktfile = os.path.join(self.lp.get('private dir'), 'test.keytab')
self.principal = self.creds.get_principal()
def tearDown(self):
super(DCKeytabTests, self).tearDown()
os.remove(self.ktfile)
def test_export_keytab(self):
net = Net(None, self.lp)
net.export_keytab | (keytab=self.ktfile, principal=self.principal)
assert os.path.exists(self.ktfile), 'keytab was not created'
with open_bytes(self.ktfile) as bytes_kt:
result = ''
for c in bytes_kt.read():
| if c in string.printable:
result += c
principal_parts = self.principal.split('@')
assert principal_parts[0] in result and \
principal_parts[1] in result, \
'Principal not found in generated keytab'
|
self.subscribe(method='self.get_names')
other_ws._send(method='self.change_name_then_error')
self.assert_incoming()
def test_triggered_error(self):
with self.open_ws() as other_ws:
self.subscribe(method='self.get_names')
self.names.append(object())
other_ws._send(method='self.change_name_then_error')
self.names[:] = ['Hello'] * 2
other_ws._send(method='self.change_name')
self.assert_incoming()
def test_multiple_subscriptions(self):
self.subscribe(method='self.get_names')
self.subscribe(method='self.get_places')
self.assert_no_response()
with self.open_ws() as other_ws:
other_ws._send(method='self.change_name')
self.assert_incoming()
other_ws._send(method='self.change_place')
self.assert_incoming()
other_ws._send(method='self.echo', params='Hello')
self.assert_no_response()
def test_multiple_triggers(self):
self.subscribe(method='self.get_names_and_places')
self.assert_no_response()
with self.open_ws() as other_ws:
other_ws._send(method='self.change_name')
self.assert_incoming()
other_ws._send(method='self.change_place')
self.assert_incoming()
other_ws._send(method='self.echo', params='Hello')
self.assert_no_response()
def test_multiple_clients(self):
self.subscribe(method='self.get_names', client='client1')
self.subscribe(method='self.get_names', client='client2')
self.assert_no_response()
with self.open_ws() as other_ws:
other_ws._send(method='self.change_name')
assert {'client1', 'client2'} == {self.next()['client'], self.next()['client']}
def test_nonlocking_echo(self):
self.ws._send(method='self.slow_echo', params=['foo'],
client='client1', callback='cb11')
sleep(1)
self.ws._send(method='self.echo', params=['bar'], client='client2',
callback='cb22')
self.assert_incoming(data='bar', client='client2')
self.assert_incoming(data='foo', client='client1', timeout=2)
def test_client_locking(self):
self.ws._send(method='self.slow_echo', params=['foo'],
client=self.client, callback='cb1')
sleep(1)
self.ws._send(method='self.echo', params=['bar'],
client=self.client, callback='cb2')
self.assert_incoming(data='foo', timeout=2)
self.assert_incoming(data='bar')
def test_jsonrpc_notification(self):
self.subscribe(method='self.get_names')
self.jsonrpc.self.change_name()
self.assert_incoming()
def test_jsonrpc_websocket_client(self):
self.addCleanup(setattr, self.jsonrpc, "_prepare_request",
self.jsonrpc._prepare_request)
self.jsonrpc._prepare_request = lambda data, headers: data.update(
{'websocket_client': self.client})
self.jsonrpc.self.change_name()
self.assert_no_response()
class TestWebsocketCall(SideboardServerTest):
@pytest.fixture(autouse=True)
def override(self, service_patcher, config_patcher):
config_patcher(1, 'ws.call_timeout')
service_patcher('test', self)
def fast(self):
return 'fast'
def slow(self):
sleep(2)
return 'slow'
def test_fast(self):
assert self.ws.call('test.fast') == 'fast'
def test_slow(self):
pytest.raises(Exception, self.ws.call, 'test.slow')
class TestWebsocketsCrudSubscriptions(SideboardServerTest):
@pytest.fixture(autouse=True)
def override(self, service_patcher):
class MockCrud: pass
mr = self.mr = MockCrud()
for name in ['create', 'update', 'delete']:
setattr(mr, name, Session.crud.crud_notifies(self.make_crud_method(name), delay=0.5))
for name in ['read', 'count']:
setattr(mr, name, Session.crud.crud_subscribes(self.make_crud_method(name)))
service_patcher('crud', mr)
def setUp(self):
SideboardServerTest.setUp(self)
self.ws.close()
self.ws = self.open_ws()
self.client = self._testMethodName
def make_crud_method(self, name):
def crud_method(*args, **kwargs):
log.debug('mocked crud.{}'.format(name))
assert not getattr(self.mr, name + '_error', False)
return uuid4().hex
crud_method.__name__ = name.encode('utf-8')
return crud_method
def models(self, *models):
return [{'_model': model} for model in models]
def read(self, *models):
self.ws._send(method='crud.read', client=self.client, params=self.models(*models))
self.assert_incoming(trigger='subscribe')
def update(self, *models, **kwargs):
client = kwargs.get('client', 'unique_client_ | ' + uuid4().hex)
self.ws._send(method='crud.update', client=client, params=self.models(*models))
self.assert_incoming(client=client)
def test_read(self):
self.read('User')
self.assert_no_response()
def test_triggered_read(self):
self.read('User')
self.update('User')
self.assert_incoming(trigger='update')
def test_unsubscribe(self):
self.test_triggered_read()
self.unsubscribe()
self.update('User')
self.a | ssert_no_response()
def test_triggered_error(self):
self.mr.update_error = True
with self.open_ws() as other_ws:
other_ws._send(method='crud.read', client='other_tte', params=self.models('User'))
self.assert_incoming(other_ws, client='other_tte')
self.update('User')
self.ws._send(method='crud.update', client=self.client, params=self.models('User'))
assert 'error' in self.next()
self.assert_incoming(other_ws, client='other_tte', trigger='update')
def test_indirect_trigger(self):
def account(*attrs):
if len(attrs) == 1:
return {'_model': 'Account', 'field': attrs[0]}
else:
return {'_model': 'Account',
'or': [{'field': attr} for attr in attrs]}
def call(*attrs):
self.call(method='crud.read', client=self.client, params=account(*attrs))
def assert_update_triggers(model):
self.update(model)
self.assert_incoming()
call('xxx')
assert_update_triggers('Account')
self.unsubscribe()
call('user.xxx')
assert_update_triggers('User')
assert_update_triggers('Account')
self.unsubscribe()
call('user.xxx', 'boss.xxx')
assert_update_triggers('Account')
assert_update_triggers('User')
assert_update_triggers('Account')
self.unsubscribe()
call('user.tags.xxx')
assert_update_triggers('Account')
assert_update_triggers('User')
assert_update_triggers('Tag')
self.update('Boss')
self.assert_no_response()
def test_trigger_and_callback(self):
result = self.call(method='crud.read', params=self.models('User'), client='ds_ttac')
self.assert_no_response()
def test_multiple_triggers(self):
self.read('User', 'Boss')
self.update('User')
self.assert_incoming()
self.update('Boss')
self.assert_incoming()
self.update('Account')
self.assert_no_response()
def test_trigger_changed(self):
self.read('User')
self.read('Boss')
self.update('User')
self.assert_no_response()
self.update('Boss')
self.assert_incoming()
self.assert_no_response()
def test_multiple_clients(self):
self.read('Boss')
self.ws._send(method='crud.read', client='other_tmc', params=self.models('Boss'))
self.assert_incoming(client='other_tmc')
self.update('User')
self.assert_no_response()
self.read('Boss')
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
""" astropy.cosmology contains classes and functions for cosmological
distance measures and other cosmology-related calculations.
See the `Astropy | documentation
<https://docs.astropy.org/en/latest/cosmology/index.html>`_ for more
detailed usage examples and references.
"""
from . import core, flrw, funcs, parameter, units, utils
from . import io # needed before 'realizations' # isort: split
from . import realizations
from .core import | *
from .flrw import *
from .funcs import *
from .parameter import *
from .realizations import *
from .utils import *
__all__ = (core.__all__ + flrw.__all__ # cosmology classes
+ realizations.__all__ # instances thereof
+ funcs.__all__ + parameter.__all__ + utils.__all__) # utils
|
()
aset1 = amcattest.create_test_set(4, project=project)
aset2 = amcattest.create_test_set(5, project=project)
aset3 = amcattest.create_test_set(0)
# Creates a codingjob for each articleset, as handle_split should account
# for "codedarticlesets" as well.
cj1 = amcattest.create_test_job(articleset=aset1)
cj2 = amcattest.create_test_job(articleset=aset2)
cj3 = amcattest.create_test_job(articleset=aset3)
for _set in [aset1, aset2]:
for _article in _set.articles.all():
sbd.create_sentences(_article)
a1, a2 = aset1.articles.all()[0], aset2.articles.all()[0]
aset1.add_articles([article])
aset3.add_articles([a1])
form = partial(navigator.forms.SplitArticleForm, project, article, initial={
"remove_from_sets": False
})
# Test form defaults (should do nothing!)
f = form(dict())
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
self.assertEquals(5, aset1.articles.all().count())
self.assertEquals(5, aset2.articles.all().count())
self.assertEquals(1, aset3.articles.all().count())
self.assertTrue(self.article_in(cj1, aset1, article))
self.assertFalse(self.article_in(cj2, aset2, article))
self.assertFalse(self.article_in(cj3, aset3, article))
# Passing invalid form should raise exception
f = form(dict(add_to_sets=[-1]))
self.assertFalse(f.is_valid())
self.assertRaises(ValueError, handle_split, f, project, article, Sentence.objects.none())
# Test add_to_new_set
f = form(dict(add_to_new_set="New Set 1"))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
aset = project.all_articlesets().filter(name="New Set 1")
self.assertTrue(aset.exists())
self.assertEquals(project, aset[0].project)
# Test add_to_sets
f = form(dict(add_to_sets=[aset3.id]))
self.assertFalse(f.is_valid())
f = form(dict(add_to_sets=[aset2.id]))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
self.assertTrue(self.article_in(cj2, aset2, article))
# Test add_splitted_to_new_set
f = form(dict(add_splitted_to_new_set="New Set 2"))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
aset = project.all_articlesets().filter(name="New Set 2")
self.assertTrue(aset.exists())
self.assertEquals(project, aset[0].project)
self.assertEquals(1, aset[0].articles.count())
self.assertFalse(self.article_in(None, aset[0], article))
# Test add_splitted_to_sets
f = form(dict(add_splitted_to_sets=[aset2.id]))
| self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
self.assertTrue(article in aset2.articles.all())
# Test remove_from_sets
f = form(dict(remove_from_sets=[aset1.id]))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
self.a | ssertTrue(article not in aset1.articles.all())
# Test remove_from_all_sets
aset1.add_articles([article])
aset2.add_articles([article])
aset3.add_articles([article])
f = form(dict(remove_from_all_sets=True))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
self.assertTrue(aset1 in project.all_articlesets())
self.assertTrue(aset2 in project.all_articlesets())
self.assertFalse(aset3 in project.all_articlesets())
self.assertFalse(self.article_in(cj1, aset1, article))
self.assertFalse(self.article_in(cj2, aset2, article))
self.assertTrue(self.article_in(cj3, aset3, article))
def article_in(self, codingjob, articleset, article):
from amcat.tools.amcates import ES
ES().refresh()
if codingjob is not None:
if not codingjob.coded_articles.filter(article=article):
return False
return article.id in (articleset.get_article_ids() | articleset.get_article_ids(use_elastic=True))
class TestArticleViews(amcattest.AmCATTestCase):
@amcattest.use_elastic
def create_test_sentences(self):
article = amcattest.create_test_article(text="foo\n\nDit is. Tekst.\n\n"*3 + "Einde.")
sbd.create_sentences(article)
return article, article.sentences.all()
@amcattest.use_elastic
def test_get_articles(self):
from amcat.models import Sentence
_get_articles = lambda a, s : list(get_articles(a, s))
# Should raise exception if sentences not in article
article, sentences = self.create_test_sentences()
s1 = Sentence.objects.filter(id=amcattest.create_test_sentence().id)
self.assertRaises(ValueError, _get_articles, article, s1)
# Should raise an exception if we try to split on title
self.assertRaises(ValueError, _get_articles, article, sentences.filter(parnr=1))
# Should return a "copy", with byline in "text" property
arts = _get_articles(article, Sentence.objects.none())
Article.create_articles(arts)
self.assertEquals(len(arts), 1)
sbd.create_sentences(arts[0])
self.assertEquals(
[s.sentence for s in sentences[1:]],
[s.sentence for s in arts[0].sentences.all()[1:]]
)
self.assertTrue("foo" in arts[0].text)
# Should be able to split on byline
self.assertEquals(2, len(_get_articles(article, sentences[1:2])))
a, b = _get_articles(article, sentences[4:5])
# Check if text on splitted articles contains expected
self.assertTrue("Einde" not in a.text)
self.assertTrue("Einde" in b.text)
@amcattest.use_elastic
def test_permissions(self):
# articles should be visible if any of the sets it is in has the correct permissions
role_metareader = Role.objects.get(label="metareader")
role_reader = Role.objects.get(label="reader")
user = amcattest.create_test_user(username="fred", password="secret")
p1 = amcattest.create_test_project(name="p1")
p2 = amcattest.create_test_project(name="p2", owner=user)
s1 = amcattest.create_test_set(project=p1)
a1 = amcattest.create_test_article(project=p1, articleset=s1, text="Dit is de tekst", title="hoofdlijn")
client = Client()
client.login(username="fred", password="secret")
url = reverse("navigator:" + ArticleDetailsView.get_view_name(), args=[p1.id, s1.id, a1.id])
def test(url, can_view=True, can_read_article=True):
response = client.get(url)
self.assertEqual(response.status_code, 200 if can_view else 403)
if can_view:
self.assertEqual(response.context['can_view_text'], can_read_article)
return response
# fred can read it if p1 is reader
p1.guest_role = role_reader
p1.save()
response = test(url)
self.assertIn(b"Dit is de tekst", response.content)
# but not if guest role is metareader
p1.guest_role = role_metareader
p1.save()
response = test(url, can_read_article=False)
self.assertNotIn(b"Dit is de tekst", response.content)
self.assertIn(b"hoofdlijn", response.content)
# and an error if there is no guest role at all
p1.guest_role = None
p1.save()
test(url, can_view=False)
# Unless the article set is added to project 2 (where Fred is owner)
p2.articlesets.add(s1)
test(url)
# Also if project 1 has metareader as guest role
p1.guest_role = role_metareader
p1.save()
test(url)
#TODO: Test that you can only link a set on which you have read rights (i.e. |
import logging
from discord.ext import commands
from bot.cooldowns import CooldownMapping, Cooldown
from bot.globals import Auth
from utils.utilities import is_owner, check_blacklist, no_dm
terminal = logging.getLogger('terminal')
def command(*args, **attrs):
if 'cls' not in attrs:
attrs['cls'] = Command
return commands.command(*args, **attrs)
def group(name=None, **attrs):
"""Uses custom Group class"""
if 'cls' not in attrs:
attrs['cls'] = Group
return commands.command(name=name, **attrs)
def cooldown(rate, per, type=commands.BucketType.default):
"""See `commands.cooldown` docs"""
def decorator(func):
if isinstance(func, Command):
func._buckets = CooldownMapping(Cooldown(rate, per, type))
else:
func.__commands_cooldown__ = Cooldown(rate, per, type)
return func
return decorator
class Command(commands.Command):
def __init__(self, func, **kwargs):
# Init called twice because commands are copied
super(Command, self).__init__(func, **kwargs)
self._buckets = CooldownMapping(self._buckets._cooldown)
self.owner_only = kwargs.pop('owner_only', False)
self.auth = kwargs.pop('auth', Auth.NONE)
self.checks.insert(0, check_blacklist)
if self.owner_only:
terminal.info(f'registered owner_only command {self.name}')
self.checks.insert(0, is_owner)
if 'no_pm' in kwargs or 'no_dm' in kwargs:
self.checks.insert(0, no_dm)
def undo_use(self, ctx):
"""Undoes one use of command"""
if self._buckets.valid:
bucket = self._buckets.get_bucket(ctx.message)
bucket.undo_one()
class Group(Command, commands.Group):
def __init__(self, *args, **attrs): # skipcq: PYL-W0231
Command.__init__(self, *args, **attrs)
self.invoke_without_command = attrs.pop('invoke_without_command', False)
def group(self, *args, **kwargs):
def decorator(func):
kwargs.setdefault('parent', self)
result = group(*args, **kwargs)(func)
self. | add_command(result)
return result
return decorator
def command(self, *args, **kwargs):
def decorator(func):
if 'owner_only' not in kwargs:
kwargs['owner_only'] = self | .owner_only
kwargs.setdefault('parent', self)
result = command(*args, **kwargs)(func)
self.add_command(result)
return result
return decorator
|
#!/usr/bin/python
################################################################################
# Bus Supervisor Interface
#
# - interfaces to the MCP23017 and PCF8574 IO expander chips
#
# The logic for this was ported from Dr Scott M. Baker's project:
# http://www.smbaker.com/z80-retrocomputing-4-bus-supervisor
#
################################################################################
from libArdySer import ArdySer
from lib_GenericChip import GenericChip
from GS_Timing import delay
from lib_MCP23017_IOExpander16 import MCP23017_IOExpander16
from lib_PCF8574_IOExpander8 import PCF8574_IOExpander8
class RC2014_BusSupervisor:
##################################
# class variables
ardy = None
cpuIoData = None
# A0-A7 - Data byte
# B0-B7 - Bus control
M1 = 0x01 # B0
CLK = 0x02 # B1
INT = 0x04 # B2
MREQ = 0x08 # B3
WR = 0x10 # B4
RD = 0x20 # B5
IORQ = 0x40 # B6
BUSACK = 0x80 # B7
cpuControl = None
# 0x0F - control, clock, etc
BUSREQ = 0x01
RESET = 0x02
CLKEN = 0x04
CLKOUT = 0x08
# 0xF0 - unused
cpuAddress = None
# A0-A7, B0-B7 - Address lines (reversed)
# our mirror values here
data = 0
dataControl = 0
dataAddress = 0
##############################
def bitReverse( data ):
retval = 0
if( (data & 0x80) == 0x80 ): retval = retval | 0x01
if( (data & 0x40) == 0x40 ): retval = retval | 0x02
if( (data & 0x20) == 0x20 ): retval = retval | 0x04
if( (data & 0x10) == 0x10 ): retval = retval | 0x08
if( (data & 0x08) == 0x08 ): retval = retval | 0x10
if( (data & 0x04) == 0x04 ): retval = retval | 0x20
if( (data & 0x02) == 0x02 ): retval = retval | 0x40
if( (data & 0x01) == 0x01 ): retval = retval | 0x80
return retval
##################################
# Initialization
def __init__( self, _ardy, _i2cAddr8 = None ):
# set the arduino object
baseAddr = | _i2cAddr8
if _i2cAddr8 is None:
baseAddr = 0x21
self.data = 0
self.dataControl = 0
self.dataAddress = 0
self.ardy = _ardy
self.cpuIoData = MCP23017_IOExpander16( _ardy, baseAddr + 0 )
self.cpuControl = PCF8574_IOExpander8( _ardy, baseAddr + 1 )
self.cpuAddre | ss = MCP23017_IOExpander16( _ardy, baseAddr + 2 )
self.ClearAllExpanders()
def ClearAllExpaners( self ):
# clear data register
self.cpuIoData.DirectionA( IODIRA, IOALLINPUT )
self.cpuIoData.SetA( 0x00 )
self.cpuIoData.DirectionB( IODIRA, IOALLINPUT )
self.cpuIoData.SetB( 0x00 )
# clear control register
self.cpuControl.Set( 0x00 )
# clear address register
self.cpuAddress.DirectionA( IOALLINPUT )
self.cpuAddress.SetA( 0x00 )
self.cpuAddress.DirectionB( IOALLINPUT )
self.cpuAddress.SetB( 0x00 )
##################################
# Low-level commands
##################################
# Package commands
def SupervisorDelay( self ):
delay( 1 )
def Reset( self ):
# RESET = 0
value = 0x00
self.cpuControl.Set( value )
self.SupervisorDelay()
# RESET = 1
value = self.RESET
self.cpuControl.Set( value )
return
def TakeBus( self ):
value = self.BUSREQ
self.cpuControl.Set( value )
while True:
value = self.cpuIoData.GetB( )
if (value & BUSAQ) == 0
break
self.cpuAddress.DirectionA( IOALLINPUT )
self.cpuAddress.DirectionB( IOALLINPUT )
value = M1 | C
data.iodir |= M1, CLK, INT, BUSACK
data, setgpio MREQ WR RD IORQ
return
def ReleaseBus( self ):
address[0].iodir = 0xff # input (high-z)
address[1].iodir = 0xff # input (high-z)
data.iodir = 0xff
if( reset ) supervisorDelay
busreq = 1
while trie
get gpio[1]
if busaq != 0
break
return
def SlowClock( self ):
period = 1.0/Float( rate )/2.0
clken = 0
while true:
clkout = 0
sleep( period )
clkout = 1
sleep( period )
return
def NormalClock( self ):
CLKEN =1
return
def SetAddress( self, addr ):
gpio0 = bitswap( addr >> 8 )
gpio1 = bitswap( addr & 0xff )
return
##############################
def MemRead( self, addr ):
set address( addr)
rd = 0
mreq = 0
result = daa.getgpio(0)
rd = 1
MREQ = 1
return 0xff
def MemWrite( self, addr, data ):
set address( addr )
data.setgpio( val )
wr = 0
mreq = 0
wr = 1
mreq = 1
iodir0 = 0xff
return
def IORead( self, addr ):
set address (addr )
rd = 0
iorq = 0
val = data.getgpio
rd = 1
iorq = 1
return 0xff
def IOWrite( self, addr, data ):
set address( addr )
iodir 0 = 0x00
data.setgpio( data )
wr = 0
iorq = 0
wr = 1
iorq = 1
iodir 0 = 0xff
return
|
import sys, os
sys.path.insert(1, "../../../")
import h2o
from tests import pyunit_utils
from random import randint
import tempfile
def glm_gamma_offset_mojo():
train = h2o.import_file(path=pyunit_utils.locate("smalldata/prostate/prostate_complete.csv.zip"))
y = "DPROS"
x = ["AGE","RACE","CAPSULE","DCAPS","PSA","VOL"]
x_offset = ["AGE","RACE","CAPSULE","DCAPS","PSA","VOL", "C1"]
params = {'family':"gamma", 'offset_column':"C1"}
offset = pyunit_utils.random_dataset_real_only(train.nrow, 1, realR=3, misFrac=0, randSeed=12345)
train = train.cbind(offset)
tmpdir = tempfile.mkdtemp()
glm_gamma_model = pyunit_utils.build_save_model_generic(params, x, train, y, "glm", tmpdir) # build and save mojo model
MOJONAME = pyunit_utils.getMojoName(glm_gamma_model._id)
h2o.download_csv(train[x_offset], os.path.join(tmpdir, 'in.csv')) # save test file, h2o predict/mojo use same file
pred_h2o, pred_mojo = pyunit_utils.mojo_predict(glm_gamma_model, tmp | dir, MOJONAME) # load model and perform predict
h2o.download_csv(pred_h2o, os.path.join(tmpdir, "h2oPred.csv"))
print("Comparing mojo predict and h2o predict...")
pyunit_utils.compare_frames_local(pred_h2o, pred_mojo, 0.1, tol=1e-10) # compare mojo and model predict
if __name__ == "__main__":
pyunit_utils.standalone_ | test(glm_gamma_offset_mojo)
else:
glm_gamma_offset_mojo()
|
#!/usr/bin/env python
import os
import sy | s
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "timberwyck.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Dev")
from configurations.man | agement import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import paramiko
import threading
import sys
import re
import time
import os
def start_shell(h, u, p):
ssh = | paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(h, 22, u, p)
| s = ssh.invoke_shell()
w = threading.Thread(target=write_chanel, args=(s, ))
# r = threading.Thread(target=read_chanel, args=(s, ))
w.setDaemon(True)
w.start()
# w.start()
read_chanel(s)
# w.join()
try:
s.close()
ssh.close()
except:
pass
def read_chanel(s):
while True:
d = s.recv(4096)
if not d:
break
# for i in ['\x1b.*?m','\x0f','\x1b\[6;1H','\x1b\[K','\x1b25;1H']:
# d=re.sub(str(i),"",d)
sys.stdout.write(d)
sys.stdout.flush()
# time.sleep(0.1)
try:
s.close()
except:
pass
# os.kill(os.getpid(), 15)
# sys.exit(0)
def write_chanel(s):
try:
while True:
c = sys.stdin.read(1)
if not c:
s.close()
break
a = s.send(c)
if a == 0:
s.close()
break
except:
pass
if __name__ == '__main__':
import sys
if len(sys.argv) < 4:
print 'usage:%s host user passwd' % sys.argv[0]
sys.exit(1)
(host, user, passwd) = sys.argv[1:4]
start_shell(host, user, passwd)
|
#!/usr/bin/python3
import sys, subprocess
def main(argv=None):
if argv is None:
argv = sys.argv
experiments = {
1 : ('Continuous', 'COPD'),
2 : ('Binary', ' COPD'),
3 : ('Continuous', 'EmphysemaExtentLung'),
4 : ('Binary', 'EmphysemaExtentLung'),
}
try:
experiment = experiments[ int(argv[1]) ]
except Exception as e:
print( 'usage: prog <experiment number>' )
return 1
prog = '../../Build/Classification/PredictClusterModel'
labels = {
'COPD' : '../../Data/Training/Labels/COPD.csv',
'EmphysemaExtentLung' : '../../Data/Training/Labels/EmphysemaExtentLung. | csv',
}
instances = '../../Data/Training/Instances.csv'
bagMembership = '../../Data/Training/BagMembership.csv'
modelPattern = "Out/Training/MaxIterati | ons1000/%s_%s_k%s_1.model"
numberOfClusters = ['5', '10', '20', ]#'15', '20', ]#'25', '30']
params = {
'histograms' : '24',
}
for k in numberOfClusters:
out = 'Out/Training/MaxIterations1000/%s_%s_k%s_' % (experiment + (k,))
cmd = [
prog,
"--instances", instances,
'--bag-membership', bagMembership,
'--model', modelPattern % (experiment + (k,)),
"--histograms", params['histograms'],
"--output", out,
]
print( ' '.join( cmd ) )
if subprocess.call( cmd ) != 0:
print( 'Error running %s : %s : k = %s' % ( experiment + (k,)) )
return 1
return 0
if __name__ == '__main__':
sys.exit( main() )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.