prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
@param p_mi: the Media Player to free.
'''
f = _Cfunctions.get('libvlc_media_player_release', None) or \
_Cfunction('libvlc_media_player_release', ((1,),), None,
None, MediaPlayer)
return f(p_mi)
def libvlc_media_player_retain(p_mi):
'''Retain a reference to a media player object. Use
L{libvlc_media_player_release}() to decrement reference count.
@param p_mi: media player object.
'''
f = _Cfunctions.get('libvlc_media_player_retain', None) or \
_Cfunction('libvlc_media_player_retain', ((1,),), None,
None, MediaPlayer)
return f(p_mi)
def libvlc_media_player_set_media(p_mi, p_md):
'''Set the media that will be used by the media_player. If any,
previous md will be released.
@param p_mi: the Media Player.
@param p_md: the Media. Afterwards the p_md can be safely destroyed.
'''
f = _Cfunctions.get('libvlc_media_player_set_media', None) or \
_Cfunction('libvlc_media_player_set_media', ((1,), (1,),), None,
None, MediaPlayer, Media)
return f(p_mi, p_md)
def libvlc_media_player_get_media(p_mi):
'''Get the media used by the media_player.
@param p_mi: the Media Player.
@return: the media associated with p_mi, or NULL if no media is associated.
'''
f = _Cfunctions.get('libvlc_media_player_get_media', None) or \
_Cfunction('libvlc_media_player_get_media', ((1,),), class_result(Media),
ctypes.c_void_p, MediaPlayer)
return f(p_mi)
def libvlc_media_player_event_manager(p_mi):
'''Get the Event Manager from which the media player send event.
@param p_mi: the Media Player.
@return: the event manager associated with p_mi.
'''
f = _Cfunctions.get('libvlc_media_player_event_manager', None) or \
_Cfunction('libvlc_media_player_event_manager', ((1,),), class_result(EventManager),
ctypes.c_void_p, MediaPlayer)
return f(p_mi)
def libvlc_media_player_is_playing(p_mi):
'''is_playing.
@param p_mi: the Media Player.
@return: 1 if the media player is playing, 0 otherwise \libvlc_return_bool.
'''
f = _Cfunctions.get('libvlc_media_player_is_playing', None) or \
_Cfunction('libvlc_media_player_is_playing', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_media_player_play(p_mi):
'''Play.
@param p_mi: the Media Player.
@return: 0 if playback started (and was already started), or -1 on error.
'''
f = _Cfunctions.get('libvlc_media_player_play', None) or \
_Cfun | ction('libvlc_media_player_play', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_media_player_set_pause(mp, do_pause):
'''Pause or resume (no effect if there is no media).
@param mp: the Media Player.
@ | param do_pause: play/resume if zero, pause if non-zero.
@version: LibVLC 1.1.1 or later.
'''
f = _Cfunctions.get('libvlc_media_player_set_pause', None) or \
_Cfunction('libvlc_media_player_set_pause', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_int)
return f(mp, do_pause)
def libvlc_media_player_pause(p_mi):
'''Toggle pause (no effect if there is no media).
@param p_mi: the Media Player.
'''
f = _Cfunctions.get('libvlc_media_player_pause', None) or \
_Cfunction('libvlc_media_player_pause', ((1,),), None,
None, MediaPlayer)
return f(p_mi)
def libvlc_media_player_stop(p_mi):
'''Stop (no effect if there is no media).
@param p_mi: the Media Player.
'''
f = _Cfunctions.get('libvlc_media_player_stop', None) or \
_Cfunction('libvlc_media_player_stop', ((1,),), None,
None, MediaPlayer)
return f(p_mi)
def libvlc_video_set_callbacks(mp, lock, unlock, display, opaque):
'''Set callbacks and private data to render decoded video to a custom area
in memory.
Use L{libvlc_video_set_format}() or L{libvlc_video_set_format_callbacks}()
to configure the decoded format.
@param mp: the media player.
@param lock: callback to lock video memory (must not be NULL).
@param unlock: callback to unlock video memory (or NULL if not needed).
@param display: callback to display video (or NULL if not needed).
@param opaque: private pointer for the three callbacks (as first parameter).
@version: LibVLC 1.1.1 or later.
'''
f = _Cfunctions.get('libvlc_video_set_callbacks', None) or \
_Cfunction('libvlc_video_set_callbacks', ((1,), (1,), (1,), (1,), (1,),), None,
None, MediaPlayer, VideoLockCb, VideoUnlockCb, VideoDisplayCb, ctypes.c_void_p)
return f(mp, lock, unlock, display, opaque)
def libvlc_video_set_format(mp, chroma, width, height, pitch):
'''Set decoded video chroma and dimensions.
This only works in combination with L{libvlc_video_set_callbacks}(),
and is mutually exclusive with L{libvlc_video_set_format_callbacks}().
@param mp: the media player.
@param chroma: a four-characters string identifying the chroma (e.g. "RV32" or "YUYV").
@param width: pixel width.
@param height: pixel height.
@param pitch: line pitch (in bytes).
@version: LibVLC 1.1.1 or later.
@bug: All pixel planes are expected to have the same pitch. To use the YCbCr color space with chrominance subsampling, consider using L{libvlc_video_set_format_callbacks}() instead.
'''
f = _Cfunctions.get('libvlc_video_set_format', None) or \
_Cfunction('libvlc_video_set_format', ((1,), (1,), (1,), (1,), (1,),), None,
None, MediaPlayer, ctypes.c_char_p, ctypes.c_uint, ctypes.c_uint, ctypes.c_uint)
return f(mp, chroma, width, height, pitch)
def libvlc_video_set_format_callbacks(mp, setup, cleanup):
'''Set decoded video chroma and dimensions. This only works in combination with
L{libvlc_video_set_callbacks}().
@param mp: the media player.
@param setup: callback to select the video format (cannot be NULL).
@param cleanup: callback to release any allocated resources (or NULL).
@version: LibVLC 2.0.0 or later.
'''
f = _Cfunctions.get('libvlc_video_set_format_callbacks', None) or \
_Cfunction('libvlc_video_set_format_callbacks', ((1,), (1,), (1,),), None,
None, MediaPlayer, VideoFormatCb, VideoCleanupCb)
return f(mp, setup, cleanup)
def libvlc_media_player_set_nsobject(p_mi, drawable):
'''Set the NSView handler where the media player should render its video output.
Use the vout called "macosx".
The drawable is an NSObject that follow the VLCOpenGLVideoViewEmbedding
protocol:
@begincode
\@protocol VLCOpenGLVideoViewEmbedding <NSObject>
- (void)addVoutSubview:(NSView *)view;
- (void)removeVoutSubview:(NSView *)view;
\@end
@endcode
Or it can be an NSView object.
If you want to use it along with Qt4 see the QMacCocoaViewContainer. Then
the following code should work:
@begincode
NSView *video = [[NSView alloc] init];
QMacCocoaViewContainer *container = new QMacCocoaViewContainer(video, parent);
L{libvlc_media_player_set_nsobject}(mp, video);
[video release];
@endcode
You can find a live example in VLCVideoView in VLCKit.framework.
@param p_mi: the Media Player.
@param drawable: the drawable that is either an NSView or an object following the VLCOpenGLVideoViewEmbedding protocol.
'''
f = _Cfunctions.get('libvlc_media_player_set_nsobject', None) or \
_Cfunction('libvlc_media_player_set_nsobject', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_void_p)
return f(p_mi, drawable)
def libvlc_media_player_get_nsobject(p_mi):
'''Get the NSView handler previously set with L{libvlc_media_player_set_nsobject}().
@param p_mi: the Media Player.
@return: the NSView handler or 0 if none where set.
'''
f = _Cfunctions.get('libvlc_media_player_get_nsobject', None) or \
_Cfunction('libvlc_media_player_get_nsobject', ((1,),), None,
ctypes.c_void_p, |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-28 13:35
from __future__ import unicode_literals
from django.conf import settings
from django. | db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUT | H_USER_MODEL),
('accounts', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='AccountRules',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('permissions', models.CharField(choices=[(b'A', b'Administration'), (b'W', b'Read/write'), (b'R', b'Read')], max_length=1)),
],
),
migrations.DeleteModel(
name='InvitationRequest',
),
migrations.AlterModelOptions(
name='account',
options={'ordering': ('create', 'name'), 'verbose_name': 'Account'},
),
migrations.RemoveField(
model_name='account',
name='user',
),
migrations.AlterField(
model_name='account',
name='create',
field=models.DateField(auto_now_add=True, verbose_name='Creation date'),
),
migrations.AddField(
model_name='accountrules',
name='account',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.Account'),
),
migrations.AddField(
model_name='accountrules',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='account',
name='users',
field=models.ManyToManyField(related_name='account', through='accounts.AccountRules', to=settings.AUTH_USER_MODEL),
),
]
|
'invalid'),
id=11)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPBindResponse(
resultCode=ldaperrors.LDAPProtocolError.resultCode,
errorMessage='Version 4 not supported'),
id=11)))
def test_unbind(self):
self.server.dataReceived(str(pureldap.LDAPMessage(pureldap.LDAPUnbindRequest(), id=7)))
self.assertEquals(self.server.transport.value(),
'')
def test_search_outOfTree(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPSearchRequest(
baseObject='dc=invalid',
), id=2)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultDone(resultCode=ldaperrors.LDAPNoSuchObject.resultCode),
id=2)),
)
def test_search_matchAll_oneResult(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPSearchRequest(
baseObject='cn=thingie,ou=stuff,dc=example,dc=com',
), id=2)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultEntry(
objectName='cn=thingie,ou=stuff,dc=example,dc=com',
attributes=[ ('objectClass', ['a', 'b']),
('cn', ['thingie']),
]),
id=2))
+ str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultDone(resultCode=0),
id=2)),
)
def test_search_matchAll_manyResults(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPSearchRequest(
baseObject='ou=stuff,dc=example,dc=com',
), id=2)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultEntry(
objectName='ou=stuff,dc=example,dc=com',
attributes=[ ('objectClass', ['a', 'b']),
('ou', ['stuff']),
]),
id=2))
+ str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultEntry(
objectName='cn=another,ou=stuff,dc=example,dc=com',
attributes=[ ('objectClass', ['a', 'b']),
('cn', ['another']),
]),
id=2))
+ str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultEntry(
objectName='cn=thingie,ou=stuff,dc=example,dc=com',
attributes=[ ('objectClass', ['a', 'b']),
('cn', ['thingie']),
]),
id=2))
+ str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultDone(resultCode=0),
id=2)),
)
def test_search_scope_oneLevel(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPSearchRequest(
baseObject='ou=stuff,dc=example,dc=com',
scope=pureldap.LDAP_SCOPE_singleLevel,
), id=2)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultEntry(
objectName='cn=thingie,ou=stuff,dc=example,dc=com',
attributes=[ ('objectClass', ['a', 'b']),
('cn', ['thingie']),
]),
id=2))
+ str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultEntry(
objectName='cn=another,ou=stuff,dc=example,dc=com',
attributes=[ ('objectClass', ['a', 'b']),
('cn', ['another']),
]),
id=2))
+ str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultDone(resultCode=0),
id=2)),
)
def test_search_scope_wholeSubtree(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPSearchRequest(
baseObject='ou=stuff,dc=example,dc=com',
scope=pureldap.LDAP_SCOPE_wholeSubtree,
), id=2)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultEntry(
objectName='ou=stuff,dc=example,dc=com',
attributes=[ ('objectClass', ['a', 'b']),
('ou', ['stuff']),
]),
id=2))
+ str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultEntry(
objectName='cn=another,ou=stuff,dc=example,dc=com',
attributes=[ ('objectClass', ['a', 'b']),
('cn', ['another']),
]),
id=2))
+ str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultEntry(
objectName='cn=thingie,ou=stuff,dc=example,dc=com',
attributes=[ ('objectClass', ['a', 'b']),
('cn', ['thingie']),
]),
id=2))
+ str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultDone(resultCode=0),
id=2)),
)
def test_search_scope_baseObject(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPSearchRequest(
baseObject='ou=stuff,dc=example,dc=com',
scope=pureldap.LDAP_SCOPE_baseObject,
), id=2)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultEntry(
objectName='ou=stuff,dc=example,dc=com',
attributes=[ ('objectClass', ['a', 'b']),
('ou', ['stuff']),
]),
id=2))
+ str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultDone(resultCode=0),
id=2)),
)
def test_rootDSE(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPSearchRequest(
baseObject='',
scope=pureldap.LDAP_SCOPE_baseObject,
filter=pureldap.LDAPFilter_present('objectClass'),
| ), id=2)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultEntry(
objectName='',
attributes=[ ('supportedLDAPVersion', ['3']),
('namingContexts', ['dc=example,dc=com']),
('supportedExtension', [
pureldap.LDAPPasswordModifyRequest.oid,
]),
| ]),
id=2))
+ str(pureldap.LDAPMessage(
pureldap.LDAPSearchResultDone(resultCode=ldaperrors.Success.resultCode),
id=2)),
)
def test_delete(self):
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPDelRequest(str(self.thingie.dn)), id=2)))
self.assertEquals(self.server.transport.value(),
str(pureldap.LDAPMessage(
pureldap.LDAPDelResponse(resultCode=0),
id=2)),
)
d = self.stuff.children()
d.addCallback(self.assertEquals, [self.another])
return d
def test_add_success(self):
dn = 'cn=new,ou=stuff,dc=example,dc=com'
self.server.dataReceived(str(pureldap.LDAPMessage(
pureldap.LDAPAddRequest(entry=dn,
attributes=[
(pureldap.LDAPAttributeDescription("objectClass"),
pureber.BERSet(value=[
pureldap.LDAPAttributeValue('some |
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""'logging sinks update' command."""
from googlecloudsdk.api_lib.logging import util
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core import list_printer
from googlecloudsdk.core import log
class Update(base.Command):
"""Updates a sink."""
@staticmethod
def Args(parser):
"""Register flags for this command."""
parser.add_argument(
'sink_name', help='The name of the sink to update.')
parser.add_argument(
'destination', nargs='?',
help=('A new destination for the sink. '
'If omitted, the sink\'s existing destination is unchanged.'))
parser.add_argument(
'--log-filter', required=False,
help=('A new filter expression for the sink. '
'If omitted, the sink\' | s existing filter (if any) is unchanged.'))
parser.add_argument(
'--output-version-format', required=False,
| help=('Format of the log entries being exported. Detailed information: '
'https://cloud.google.com/logging/docs/api/introduction_v2'),
choices=('V1', 'V2'))
def GetLogSink(self):
"""Returns a log sink specified by the arguments."""
client = self.context['logging_client_v1beta3']
return client.projects_logs_sinks.Get(
self.context['sink_reference'].Request())
def GetLogServiceSink(self):
"""Returns a log service sink specified by the arguments."""
client = self.context['logging_client_v1beta3']
return client.projects_logServices_sinks.Get(
self.context['sink_reference'].Request())
def GetProjectSink(self):
"""Returns a project sink specified by the arguments."""
# Use V2 logging API for project sinks.
client = self.context['logging_client_v2beta1']
messages = self.context['logging_messages_v2beta1']
sink_ref = self.context['sink_reference']
return client.projects_sinks.Get(
messages.LoggingProjectsSinksGetRequest(
projectsId=sink_ref.projectsId, sinksId=sink_ref.sinksId))
def UpdateLogSink(self, sink_data):
"""Updates a log sink specified by the arguments."""
client = self.context['logging_client_v1beta3']
messages = self.context['logging_messages_v1beta3']
sink_ref = self.context['sink_reference']
return client.projects_logs_sinks.Update(
messages.LoggingProjectsLogsSinksUpdateRequest(
projectsId=sink_ref.projectsId, logsId=sink_ref.logsId,
sinksId=sink_data['name'], logSink=messages.LogSink(**sink_data)))
def UpdateLogServiceSink(self, sink_data):
"""Updates a log service sink specified by the arguments."""
client = self.context['logging_client_v1beta3']
messages = self.context['logging_messages_v1beta3']
sink_ref = self.context['sink_reference']
return client.projects_logServices_sinks.Update(
messages.LoggingProjectsLogServicesSinksUpdateRequest(
projectsId=sink_ref.projectsId,
logServicesId=sink_ref.logServicesId, sinksId=sink_data['name'],
logSink=messages.LogSink(**sink_data)))
def UpdateProjectSink(self, sink_data):
"""Updates a project sink specified by the arguments."""
# Use V2 logging API for project sinks.
client = self.context['logging_client_v2beta1']
messages = self.context['logging_messages_v2beta1']
sink_ref = self.context['sink_reference']
# Change string value to enum.
sink_data['outputVersionFormat'] = getattr(
messages.LogSink.OutputVersionFormatValueValuesEnum,
sink_data['outputVersionFormat'])
return client.projects_sinks.Update(
messages.LoggingProjectsSinksUpdateRequest(
projectsId=sink_ref.projectsId, sinksId=sink_data['name'],
logSink=messages.LogSink(**sink_data)))
@util.HandleHttpError
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
The updated sink with its new destination.
"""
util.CheckSinksCommandArguments(args)
# One of the flags is required to update the sink.
# log_filter can be an empty string, so check explicitly for None.
if not (args.destination or args.log_filter is not None or
args.output_version_format):
raise exceptions.ToolException(
'[destination], --log-filter or --output-version-format is required')
# Calling Update on a non-existing sink creates it.
# We need to make sure it exists, otherwise we would create it.
if args.log:
sink = self.GetLogSink()
elif args.service:
sink = self.GetLogServiceSink()
else:
sink = self.GetProjectSink()
# Only update fields that were passed to the command.
if args.destination:
destination = args.destination
else:
destination = sink.destination
if args.log_filter is not None:
log_filter = args.log_filter
else:
log_filter = sink.filter
sink_ref = self.context['sink_reference']
sink_data = {'name': sink_ref.sinksId, 'destination': destination,
'filter': log_filter}
if args.log:
result = util.TypedLogSink(self.UpdateLogSink(sink_data),
log_name=args.log)
elif args.service:
result = util.TypedLogSink(self.UpdateLogServiceSink(sink_data),
service_name=args.service)
else:
if args.output_version_format:
sink_data['outputVersionFormat'] = args.output_version_format
else:
sink_data['outputVersionFormat'] = sink.outputVersionFormat.name
result = util.TypedLogSink(self.UpdateProjectSink(sink_data))
log.UpdatedResource(sink_ref)
return result
def Display(self, unused_args, result):
"""This method is called to print the result of the Run() method.
Args:
unused_args: The arguments that command was run with.
result: The value returned from the Run() method.
"""
list_printer.PrintResourceList('logging.typedSinks', [result])
util.PrintPermissionInstructions(result.destination)
Update.detailed_help = {
'DESCRIPTION': """\
Changes the *[destination]* or *--log-filter* associated with a sink.
If you don't include one of the *--log* or *--log-service* flags,
this command updates a project sink.
The new destination must already exist and Cloud Logging must have
permission to write to it.
Log entries are exported to the new destination immediately.
""",
'EXAMPLES': """\
To only update a project sink filter, run:
$ {command} my-sink --log-filter='metadata.severity>=ERROR'
Detailed information about filters can be found at:
https://cloud.google.com/logging/docs/view/advanced_filters
""",
}
|
#!/usr/bin/env python
# Support a YAML file hosts.yml as external inventory in Ansible
# Copyright (C) 2012 Jeroen Hoekx <jeroen@hoekx.be>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
File format:
- <hostname>
or
- host: <hostname>
vars:
- myvar: value
- myvbr: vblue
groups:
- mygroup1
- mygroup2
or
- group: <groupname>
vars:
- groupvar: value
hosts:
- myhost1
- myhost2
groups:
- subgroup1
- subgroup2
Any statement except the first definition is optional.
"""
import json
import os
import sys
from optparse import OptionParser
import yaml
class Host():
def __init__(self, name):
self.name = name
self.groups = []
self.vars = {}
def __repr__(self):
return "Host('%s')"%(self.name)
def set_variable(self, key, value):
self.vars[key] = value
def get_variables(self):
result = {}
for group in self.groups:
for k,v in group.get_variables().items():
result[k] = v
for k, v in self.vars.items():
result[k] = v
return result
def add_group(self, group):
if group not in self.groups:
self.groups.append(group)
class Group():
def __init__(self, name):
self.name = name
self.hosts = []
self.vars = {}
self.subgroups = []
self.parents = | []
def __repr__(self):
return "Group('%s')"%(self.name)
def get_hosts(self):
""" List all hosts in this group, including subgroups """
result = [ host for host in self.hosts ]
for group in self.subgroups:
for host in group.get_hosts():
if host not in result: |
result.append(host)
return result
def add_host(self, host):
if host not in self.hosts:
self.hosts.append(host)
host.add_group(self)
def add_subgroup(self, group):
if group not in self.subgroups:
self.subgroups.append(group)
group.add_parent(self)
def add_parent(self, group):
if group not in self.parents:
self.parents.append(group)
def set_variable(self, key, value):
self.vars[key] = value
def get_variables(self):
result = {}
for group in self.parents:
result.update( group.get_variables() )
result.update(self.vars)
return result
def find_group(name, groups):
for group in groups:
if name == group.name:
return group
def parse_vars(vars, obj):
### vars can be a list of dicts or a dictionary
if type(vars) == dict:
for k,v in vars.items():
obj.set_variable(k, v)
elif type(vars) == list:
for var in vars:
k,v = var.items()[0]
obj.set_variable(k, v)
def parse_yaml(yaml_hosts):
groups = []
all_hosts = Group('all')
ungrouped = Group('ungrouped')
groups.append(ungrouped)
### groups first, so hosts can be added to 'ungrouped' if necessary
subgroups = []
for entry in yaml_hosts:
if 'group' in entry and type(entry)==dict:
group = find_group(entry['group'], groups)
if not group:
group = Group(entry['group'])
groups.append(group)
if 'vars' in entry:
parse_vars(entry['vars'], group)
if 'hosts' in entry:
for host_name in entry['hosts']:
host = None
for test_host in all_hosts.get_hosts():
if test_host.name == host_name:
host = test_host
break
else:
host = Host(host_name)
all_hosts.add_host(host)
group.add_host(host)
if 'groups' in entry:
for subgroup in entry['groups']:
subgroups.append((group.name, subgroup))
for name, sub_name in subgroups:
group = find_group(name, groups)
subgroup = find_group(sub_name, groups)
group.add_subgroup(subgroup)
for entry in yaml_hosts:
### a host is either a dict or a single line definition
if type(entry) in [str, unicode]:
for test_host in all_hosts.get_hosts():
if test_host.name == entry:
break
else:
host = Host(entry)
all_hosts.add_host(host)
ungrouped.add_host(host)
elif 'host' in entry:
host = None
no_group = False
for test_host in all_hosts.get_hosts():
### all hosts contains only hosts already in groups
if test_host.name == entry['host']:
host = test_host
break
else:
host = Host(entry['host'])
all_hosts.add_host(host)
no_group = True
if 'vars' in entry:
parse_vars(entry['vars'], host)
if 'groups' in entry:
for test_group in groups:
if test_group.name in entry['groups']:
test_group.add_host(host)
all_hosts.add_host(host)
no_group = False
if no_group:
ungrouped.add_host(host)
return groups, all_hosts
parser = OptionParser()
parser.add_option('-l', '--list', default=False, dest="list_hosts", action="store_true")
parser.add_option('-H', '--host', default=None, dest="host")
parser.add_option('-e', '--extra-vars', default=None, dest="extra")
options, args = parser.parse_args()
base_dir = os.path.dirname(os.path.realpath(__file__))
hosts_file = os.path.join(base_dir, 'hosts.yml')
with open(hosts_file) as f:
yaml_hosts = yaml.safe_load( f.read() )
groups, all_hosts = parse_yaml(yaml_hosts)
if options.list_hosts == True:
result = {}
for group in groups:
result[group.name] = [host.name for host in group.get_hosts()]
print json.dumps(result)
sys.exit(0)
if options.host is not None:
result = {}
host = None
for test_host in all_hosts.get_hosts():
if test_host.name == options.host:
host = test_host
break
result = host.get_variables()
if options.extra:
k,v = options.extra.split("=")
result[k] = v
print json.dumps(result)
sys.exit(0)
parser.print_help()
sys.exit(1)
|
te')
@test.idempotent_id('0d148aa3-d54c-4317-aa8d-42040a475e20')
def test_aggregate_create_delete(self):
# Create and delete an aggregate.
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
aggregate = self.client.create_aggregate(name=aggregate_name)
self.addCleanup(self._try_delete_aggregate, aggregate['id'])
self.assertEqual(aggregate_name, aggregate['name'])
self.assertIsNone(aggregate['availability_zone'])
self.client.delete_aggregate(aggregate['id'])
self.client.wait_for_resource_deletion(aggregate['id'])
@test.attr(type='gate')
@test.idempotent_id('5873a6f8-671a-43ff-8838-7ce430bb6d0b')
def test_aggregate_create_delete_with_az(self):
# Create and delete an aggregate.
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
az_name = data_utils.rand_name(self.az_name_prefix)
aggregate = self.client.create_aggregate(
name=aggregate_name, availability_zone=az_name)
self.addCleanup(self._try_delete_aggregate, aggregate['id'])
self.assertEqual(aggregate_name, aggregate['name'])
self.assertEqual(az_name, aggregate['availability_zone'])
self.client.delete_aggregate(aggregate['id'])
self.client.wait_for_resource_deletion(aggregate['id'])
@test.attr(type='gate')
@test.idempotent_id('68089c38-04b1-4758-bdf0-cf0daec4defd')
def test_aggregate_create_verify_entry_in_list(self):
# Create an aggregate and ensure it is listed.
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
aggregate = self.client.create_aggregate(name=aggregate_name)
self.addCleanup(self.client.delete_aggregate, aggregate['id'])
aggregates = self.client.list_aggregates()
self.assertIn((aggregate['id'], aggregate['availability_zone']),
map(lambda x: (x['id'], x['availability_zone']),
aggregates))
@test.attr(type='gate')
@test.idempotent_id('36ec92ca-7a73-43bc-b920-7531809e8540')
def test_aggregate_create_update_metadata_get_details(self):
# Create an aggregate and ensure its details are returned.
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
aggregate = self.client.create_aggregate(name=aggregate_name)
self.addCleanup(self.client.delete_aggregate, aggregate['id'])
body = self.client.get_aggregate(aggregate['id'])
self.assertEqual(aggregate['name'], body['name'])
self.assertEqual(aggregate['availability_zone'],
body['availability_zone'])
self.assertEqual({}, body["metadata"])
| # set the metadata of the aggregate
meta = {"key": "value"}
body = self.client.set_metadata(aggregate['id'], meta)
self.assertEqual(meta, body["metadata"])
# verify the metadata has been set
body = self.client.get_aggregate(aggregate['id'])
self.assertEqual(meta, body["metadata"])
@test.attr(type='gate')
@test.idempotent_id('4d2b2004-40fa-40a1-aab2-66f4dab81beb')
de | f test_aggregate_create_update_with_az(self):
# Update an aggregate and ensure properties are updated correctly
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
az_name = data_utils.rand_name(self.az_name_prefix)
aggregate = self.client.create_aggregate(
name=aggregate_name, availability_zone=az_name)
self.addCleanup(self.client.delete_aggregate, aggregate['id'])
self.assertEqual(aggregate_name, aggregate['name'])
self.assertEqual(az_name, aggregate['availability_zone'])
self.assertIsNotNone(aggregate['id'])
aggregate_id = aggregate['id']
new_aggregate_name = aggregate_name + '_new'
new_az_name = az_name + '_new'
resp_aggregate = self.client.update_aggregate(aggregate_id,
new_aggregate_name,
new_az_name)
self.assertEqual(new_aggregate_name, resp_aggregate['name'])
self.assertEqual(new_az_name, resp_aggregate['availability_zone'])
aggregates = self.client.list_aggregates()
self.assertIn((aggregate_id, new_aggregate_name, new_az_name),
map(lambda x:
(x['id'], x['name'], x['availability_zone']),
aggregates))
@test.attr(type='gate')
@test.idempotent_id('c8e85064-e79b-4906-9931-c11c24294d02')
def test_aggregate_add_remove_host(self):
# Add an host to the given aggregate and remove.
self.useFixture(fixtures.LockFixture('availability_zone'))
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
aggregate = self.client.create_aggregate(name=aggregate_name)
self.addCleanup(self.client.delete_aggregate, aggregate['id'])
body = self.client.add_host(aggregate['id'], self.host)
self.assertEqual(aggregate_name, body['name'])
self.assertEqual(aggregate['availability_zone'],
body['availability_zone'])
self.assertIn(self.host, body['hosts'])
body = self.client.remove_host(aggregate['id'], self.host)
self.assertEqual(aggregate_name, body['name'])
self.assertEqual(aggregate['availability_zone'],
body['availability_zone'])
self.assertNotIn(self.host, body['hosts'])
@test.attr(type='gate')
@test.idempotent_id('7f6a1cc5-2446-4cdb-9baa-b6ae0a919b72')
def test_aggregate_add_host_list(self):
# Add an host to the given aggregate and list.
self.useFixture(fixtures.LockFixture('availability_zone'))
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
aggregate = self.client.create_aggregate(name=aggregate_name)
self.addCleanup(self.client.delete_aggregate, aggregate['id'])
self.client.add_host(aggregate['id'], self.host)
self.addCleanup(self.client.remove_host, aggregate['id'], self.host)
aggregates = self.client.list_aggregates()
aggs = filter(lambda x: x['id'] == aggregate['id'], aggregates)
self.assertEqual(1, len(aggs))
agg = aggs[0]
self.assertEqual(aggregate_name, agg['name'])
self.assertIsNone(agg['availability_zone'])
self.assertIn(self.host, agg['hosts'])
@test.attr(type='gate')
@test.idempotent_id('eeef473c-7c52-494d-9f09-2ed7fc8fc036')
def test_aggregate_add_host_get_details(self):
# Add an host to the given aggregate and get details.
self.useFixture(fixtures.LockFixture('availability_zone'))
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
aggregate = self.client.create_aggregate(name=aggregate_name)
self.addCleanup(self.client.delete_aggregate, aggregate['id'])
self.client.add_host(aggregate['id'], self.host)
self.addCleanup(self.client.remove_host, aggregate['id'], self.host)
body = self.client.get_aggregate(aggregate['id'])
self.assertEqual(aggregate_name, body['name'])
self.assertIsNone(body['availability_zone'])
self.assertIn(self.host, body['hosts'])
@test.attr(type='gate')
@test.idempotent_id('96be03c7-570d-409c-90f8-e4db3c646996')
def test_aggregate_add_host_create_server_with_az(self):
# Add an host to the given aggregate and create a server.
self.useFixture(fixtures.LockFixture('availability_zone'))
aggregate_name = data_utils.rand_name(self.aggregate_name_prefix)
az_name = data_utils.rand_name(self.az_name_prefix)
aggregate = self.client.create_aggregate(
name=aggregate_name, availability_zone=az_name)
self.addCleanup(self.client.delete_aggregate, aggregate['id'])
self.client.add_host(aggregate['id'], self.host)
self.addCleanup(self.client.remove_host, aggregate['id'], self.host)
server_name = data_utils.rand_name('test_server_')
admin_servers_client = self.os_adm |
import logging
import datetime
import mediacloud.api
import re
from server import mc
from server.auth import is_user_logged_in
from server.util.csv import SOURCE_LIST_CSV_METADATA_PROPS
logger = logging.getLogger(__name__)
TOPIC_MEDIA_INFO_PROPS = ['media_id', 'name', 'url']
TOPIC_MEDIA_PROPS = ['story_count', 'media_inlink_count', 'inlink_count', 'outlink_count',
'facebook_share_count', 'simple_tweet_count']
TOPIC_MEDIA_URL_SHARING_PROPS = ['sum_post_count', 'sum_channel_count', 'sum_author_count']
TOPIC_MEDIA_CSV_PROPS = TOPIC_MEDIA_INFO_PROPS + TOPIC_MEDIA_PROPS + TOPIC_MEDIA_URL_SHARING_PROPS + \
SOURCE_LIST_CSV_METADATA_PROPS
def _parse_media_ids(args):
media_ids = []
if 'sources[]' in args:
src = args['sources[]']
if isinstance(src, str):
media_ids = src.split(',')
media_ids = " ".join([str(m) for m in media_ids])
src = re.sub(r'\[*\]*', '', str(src))
if len(src) == 0:
media_ids = []
media_ids = src.split(',') if len(src) > 0 else []
else:
media_ids = src
return media_ids
def _parse_collection_ids(args):
collection_ids = []
if 'collections[]' in args:
coll = args['collections[]']
if isinstance(coll, str):
tags_ids = coll.split(',')
tags_ids = " ".join([str(m) for m in tags_ids])
coll = re.sub(r'\[*\]*', '', str(tags_ids))
if len(coll) == 0:
collection_ids = []
else:
collection_ids = coll.split(',') # make a list
else:
collection_ids = coll
return collection_ids
# TODO: Migrate eto use mediapicker.concate!
# helper for topic preview queries
def concatenate_query_for_solr(solr_seed_query=None, media_ids=None, tags_ids=None):
query = ''
if solr_seed_query not in [None,'']:
query = '({})'.format(solr_seed_query)
if len(media_ids) > 0 or len(tags_ids) > 0:
if solr_seed_query not in [None,'']:
query += " AND ("
else:
query += "(*) AND ("
# a | dd in the media sources they specified
if len(media_ids) > 0:
media_ids = media_ids.split(',') if isinstance(media_ids, str) else media_ids
query_media_ | ids = " ".join(map(str, media_ids))
query_media_ids = re.sub(r'\[*\]*', '', str(query_media_ids))
query_media_ids = " media_id:({})".format(query_media_ids)
query += '(' + query_media_ids + ')'
if len(media_ids) > 0 and len(tags_ids) > 0:
query += " OR "
# add in the collections they specified
if len(tags_ids) > 0:
tags_ids = tags_ids.split(',') if isinstance(tags_ids, str) else tags_ids
query_tags_ids = " ".join(map(str, tags_ids))
query_tags_ids = re.sub(r'\[*\]*', '', str(query_tags_ids))
query_tags_ids = " tags_id_media:({})".format(query_tags_ids)
query += '(' + query_tags_ids + ')'
query += ')'
return query
def concatenate_solr_dates(start_date, end_date):
publish_date = mediacloud.api.MediaCloud.dates_as_query_clause(
datetime.datetime.strptime(start_date, '%Y-%m-%d').date(),
datetime.datetime.strptime(end_date, '%Y-%m-%d').date())
return publish_date
|
javelin.create_tenants([self.fake_object['name']])
mocked_function = self.fake_client.identity.create_tenant
self.assertFalse(mocked_function.called)
def test_create_users(self):
self.fake_client.identity.get_tenant_by_name.return_value = \
self.fake_object['tenant']
self.fake_client.identity.get_user_by_username.side_effect = \
lib_exc.NotFound("user is not found")
self.useFixture(mockpatch.PatchObject(javelin, "keystone_admin",
return_value=self.fake_client))
javelin.create_users([self.fake_object])
fake_tenant_id = self.fake_object['tenant']['id']
fake_email = "%s@%s" % (self.fake_object['user'], fake_tenant_id)
mocked_function = self.fake_client.identity.create_user
mocked_function.assert_called_once_with(self.fake_object['name'],
self.fake_object['password'],
fake_tenant_id,
fake_email,
enabled=True)
def test_create_user_missing_tenant(self):
self.fake_client.identity.get_tenant_by_name.side_effect = \
lib_exc.NotFound("tenant is not found")
self.useFixture(mockpatch.PatchObject(javelin, "keystone_admin",
return_value=self.fake_client))
javelin.create_users([self.fake_object])
mocked_function = self.fake_client.identity.create_user
self.assertFalse(mocked_function.called)
def test_create_objects(self):
self.useFixture(mockpatch.PatchObject(javelin, "client_for_user",
return_value=self.fake_client))
self.useFixture(mockpatch.PatchObject(javelin, "_assign_swift_role"))
self.useFixture(mockpatch.PatchObject(javelin, "_file_contents",
return_value=self.fake_object.content))
javelin.create_objects([self.fake_object])
mocked_function = self.fake_client.containers.create_container
mocked_function.assert_called_once_with(self.fake_object['container'])
mocked_function = self.fake_client.objects.create_object
mocked_function.assert_called_once_with(self.fake_object['container'],
self.fake_object['name'],
self.fake_object.content)
def test_create_images(self):
self.fake_client.images.create_image.return_value = \
self.fake_object['body']
self.useFixture(mockpatch.PatchObject(javelin, "client_for_user",
return_value=self.fake_client))
self.useFixture(mockpatch.PatchObject(javelin, "_get_image_by_name",
return_value=[]))
self.useFixture(mockpatch.PatchObject(javelin, "_resolve_image",
return_value=(None, None)))
with mock.patch('six.moves.builtins.open', mock.mock_open(),
create=True) as open_mock:
javelin.create_images([self.fake_object])
mocked_function = self.fake_client.images.create_image
mocked_function.assert_called_once_with(self.fake_object['name'],
self.fake_object['format'],
self.fake_object['format'])
mocked_function = self.fake_client.images.store_image_file
fake_image_id = self.fake_object['body'].get('id')
mocked_function.assert_called_once_with(fake_image_id, open_mock())
def test_create_networks(self):
self.fake_client.networks.list_networks.return_value = {
'networks': []}
self.useFixture(mockpatch.PatchObject(javelin, "client_for_user",
return_value=self.fake_client))
javelin.create_networks([self.fake_object])
mocked_function = self.fake_client.networks.create_network
mocked_function.assert_called_once_with(name=self.fake_object['name'])
def test_create_subnet(self):
fake_network = self.fake_object['network']
self.useFixture(mockpatch.PatchObject(javelin, "client_for_user",
return_value=self.fake_client))
self.useFixture(mockpatch.PatchObject(javelin, "_get_resource_by_name",
return_value=fake_network))
fake_netaddr = mock.MagicMock()
self.useFixture(mockpatch.PatchObject(javelin, "netaddr",
return_value=fake_netaddr))
fake_version = javelin.netaddr.IPNetwork().version
javelin.create_subnets([self.fake_object])
mocked_function = self.fake_client.networks.create_subnet
mocked_function.assert_called_once_with(network_id=fake_network['id'],
cidr=self.fake_object['range'],
name=self.fake_object['name'],
ip_version=fake_version)
def test_create_volumes(self):
self.useFixture(mockpatch.PatchObject(javelin, "client_for_user",
return_value=self.fake_client))
self.useFixture(mockpatch.PatchObject(javelin, "_get_volume_by_name",
return_value=None))
self.fake_client.volumes.create_volume.return_value = \
self.fake_object.body
javelin.create_volumes([self.fake_object])
mocked_function = self.fake_client.volumes.create_volume
mocked_function.assert_called_once_with(
size=self.fake_object['gb'],
display_name=self.fake_object['name'])
mocked_function = self.fake_client.volumes.wait_for_volume_status
mocked_function.assert_called_once_with(
self.fake_object.body['volume']['id'],
'available')
def test_create_volume_existing(self):
self.useFixture(mockpatch.PatchObject(javelin, "client_for_user",
return_value=self.fake_client))
self.useFixture(mockpatch.PatchObject(javelin, "_get_volume_by_name",
return_value=self.fake_object))
self.fake_client.volumes.create_volume.return_value = \
self.fake_object.body
javelin.create_volumes([self.fake_object])
mocked_function = self.fake_client.volumes.create_volume
self.assertFalse(mocked_function.called)
mocked_function = self.fake_client.volumes.wait_for_volume_status
self.assertFalse(mocked_function.called)
def test_create_router(self):
self.fake_client.networks.list_routers.return_value = {'routers': []}
self.useFixture(mockpatch.PatchObject(javelin, "client_for_user",
return_value=self.fake_client))
javelin.create_routers([self.fake_object])
mocked_function = self.fake_client.networks.create_router
mocked_function.assert_called_once_with(self.fake_object['name'])
def test_create_router_existing(self):
self.fake_client.networks.list_routers.return_value = {
'routers': [self.fake_object]}
self.useFixture(mockpatch.PatchObject(javelin, "client_for_user",
return_value=self.fake_client))
jav | elin.create_routers([self.fake_object])
mocked_function = self.fake_client.networks.create_router
self.assertFalse(mocked_function.called)
def test_create_secgroup(self):
self.useFixture(mockpatch.PatchObject(javelin, "client_for_user",
| return_value=self.fake_client))
self.fake_client.secgroups.list_security_groups.return_value = (
|
# -*- coding: utf-8 -*-
# Generated by Django 1. | 10.4 on 2018-03-05 05:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sponsors', '0012_sponsor_level_smallint'),
]
operations = [
migrations.AlterField(
model_name='sponsor',
name='conference',
field=models.SlugField(choices=[('pycontw-2016', 'P | yCon Taiwan 2016'), ('pycontw-2017', 'PyCon Taiwan 2017'), ('pycontw-2018', 'PyCon Taiwan 2018')], default='pycontw-2018', verbose_name='conference'),
),
]
|
###############################################################################
# ilastik: interactive learning and segmentation toolkit
#
# Copyright (C) 2011-2014, the ilastik developers
# <team@ilastik.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version | 2
# of the License, or (at your option) any later version.
#
# In addition, as a special exception, the copyright holders of
# ilastik give you permission to combine ilastik with applets,
# workflows and plugins which are not covered under the GNU
# General Public License.
#
# See the LICENSE file for details. License information is also av | ailable
# on the ilastik web site at:
# http://ilastik.org/license.html
###############################################################################
import sys
from functools import partial
from PyQt4.QtGui import QApplication
import threading
# This function was copied from: http://bugs.python.org/issue1230540
# It is necessary because sys.excepthook doesn't work for unhandled exceptions in other threads.
def install_thread_excepthook():
"""
Workaround for sys.excepthook thread bug
(https://sourceforge.net/tracker/?func=detail&atid=105470&aid=1230540&group_id=5470).
Call once from __main__ before creating any threads.
If using psyco, call psycho.cannotcompile(threading.Thread.run)
since this replaces a new-style class method.
"""
import sys
run_old = threading.Thread.run
def run(*args, **kwargs):
try:
run_old(*args, **kwargs)
except (KeyboardInterrupt, SystemExit):
raise
except:
sys.excepthook(*sys.exc_info())
threading.Thread.run = run
#python launch_workflow.py --workflow=PixelClassificationWorkflow --playback_script=$f --playback_speed=2.0 --exit_on_failure --exit_on_success
#sys.argv.append( "/Users/bergs/MyProject.ilp" )
## EXAMPLE PLAYBACK TESTING ARGS
#sys.argv.append( "--playback_script=/Users/bergs/Documents/workspace/ilastik-meta/ilastik/tests/event_based/recording-20130450-2111.py" )
#sys.argv.append( "--playback_speed=3" )
#sys.argv.append( "--exit_on_failure" )
sys.argv.append( "--workflow=PixelClassificationWorkflow" )
import argparse
parser = argparse.ArgumentParser( description="Ilastik Pixel Classification Workflow" )
parser.add_argument('--playback_script', help='An event recording to play back after the main window has opened.', required=False)
parser.add_argument('--playback_speed', help='Speed to play the playback script.', default=0.5, type=float)
parser.add_argument('--exit_on_failure', help='Immediately call exit(1) if an unhandled exception occurs.', action='store_true', default=False)
parser.add_argument('--exit_on_success', help='Quit the app when the playback is complete.', action='store_true', default=False)
parser.add_argument('--project', nargs='?', help='A project file to open on startup.')
parser.add_argument('--workflow', help='A project file to open on startup.')
parsed_args = parser.parse_args()
init_funcs = []
# Start the GUI
if parsed_args.project is not None:
def loadProject(shell):
shell.openProjectFile(parsed_args.project)
init_funcs.append( loadProject )
onfinish = None
if parsed_args.exit_on_success:
onfinish = QApplication.quit
if parsed_args.playback_script is not None:
from ilastik.utility.gui.eventRecorder import EventPlayer
def play_recording(shell):
player = EventPlayer(parsed_args.playback_speed)
player.play_script(parsed_args.playback_script, onfinish)
init_funcs.append( partial(play_recording) )
if parsed_args.exit_on_failure:
old_excepthook = sys.excepthook
def print_exc_and_exit(*args):
old_excepthook(*args)
sys.stderr.write("Exiting early due to an unhandled exception. See error output above.\n")
QApplication.exit(1)
sys.excepthook = print_exc_and_exit
install_thread_excepthook()
# Import all possible workflows so they are registered with the base class
import ilastik.workflows
# Ask the base class to give us the workflow type
from ilastik.workflow import Workflow
workflowClass = Workflow.getSubclass(parsed_args.workflow)
# Launch the GUI
from ilastik.shell.gui.startShellGui import startShellGui
sys.exit( startShellGui( workflowClass, *init_funcs ) )
|
from __future__ import division
import encoder
import socket_class as socket
import threading
import time
import sys
rightC,leftC = (0,0)
s = None
IP = "10.42.0.1"
host = 50679
class sendData(threading.Thread):
def __init__(self,waitTime):
self.waitTime = waitTime
threading.Thread.__init__(self)
def run(self):
#send info every waitTime
global s
global rightC,leftC
conf = "OK"
while True:
if(conf == "OK"):
s.send(str(rightC)+","+str(leftC))
conf = s.r | ecv(10)
print "sent",str(rightC),",",str(leftC)
time.sleep(self.waitTime)
def right():
global rightC
rightC += 1
print "right: ",rightC,"\t","left :",leftC
def left():
global leftC
leftC += 1
print "right: ",rightC,"\t","left :",leftC
def | checkArgs():
global IP,host
if(len(sys.argv)!=1):
IP = sys.argv[1]
host = sys.argv[2]
if __name__ == "__main__":
"""if 2 arguments are passed in overwrite IP and port number to those values else use IP = 10.42.0.1 and 50679"""
encoder.encoderSetup()
if len(sys.argv) in (1,3):
checkArgs()
s = socket.initSocket()
while True:
try:
socket.connect(s,IP,host)
break
except:
pass
#start thread to send info in background
t = sendData(.01)
t.daemon = True
t.start()
#read encoder values
encoder.getEncoder(right,left)
else:
encoder.getEncoder(right,left)
|
import numpy as np
import scipy.sparse as sp
from scipy.optimize import fmin_l_bfgs_b
from Orange.classification import Learner, Model
__all__ = ["LinearRegressionLearner"]
class LinearRegressionLearner(Learner):
def __init__(self, lambda_=1.0, preprocessors=None, **fmin_args):
'''L2 regularized linear regression (a.k.a Ridge regression)
This model uses the L-BFGS algorithm to minimize the linear least
squares penalty with L2 regularization. When using this model you
should:
- Choose a suitable regularization parameter lambda_
- Continuize all discrete attributes
- Consider appending a column of ones to the dataset (intercept term)
- Transform the dataset so that the columns are on a similar scale
:param lambda_: the regularization parameter. Higher values of lambda_
force the coefficients to be small.
:type lambda_: float
Examples
--------
import numpy as np
from Orange.data import Table
from Orange.classification.linear_regression import LinearRegressionLearner
data = Table('housing')
data.X = (data.X - np.mean(data.X, axis | =0)) / np.std(data.X, axis=0) # normalize
data.X = np.hstack((data.X, np.ones((d | ata.X.shape[0], 1)))) # append ones
m = LinearRegressionLearner(lambda_=1.0)
c = m(data) # fit
print(c(data)) # predict
'''
super().__init__(preprocessors=preprocessors)
self.lambda_ = lambda_
self.fmin_args = fmin_args
def cost_grad(self, theta, X, y):
t = X.dot(theta) - y
cost = t.dot(t)
cost += self.lambda_ * theta.dot(theta)
cost /= 2.0 * X.shape[0]
grad = X.T.dot(t)
grad += self.lambda_ * theta
grad /= X.shape[0]
return cost, grad
def fit(self, X, Y, W):
if Y.shape[1] > 1:
raise ValueError('Linear regression does not support '
'multi-target classification')
if np.isnan(np.sum(X)) or np.isnan(np.sum(Y)):
raise ValueError('Linear regression does not support '
'unknown values')
theta = np.zeros(X.shape[1])
theta, cost, ret = fmin_l_bfgs_b(self.cost_grad, theta,
args=(X, Y.ravel()), **self.fmin_args)
return LinearRegressionModel(theta)
class LinearRegressionModel(Model):
def __init__(self, theta):
self.theta = theta
def predict(self, X):
return X.dot(self.theta)
if __name__ == '__main__':
import Orange.data
import sklearn.cross_validation as skl_cross_validation
np.random.seed(42)
def numerical_grad(f, params, e=1e-4):
grad = np.zeros_like(params)
perturb = np.zeros_like(params)
for i in range(params.size):
perturb[i] = e
j1 = f(params - perturb)
j2 = f(params + perturb)
grad[i] = (j2 - j1) / (2.0 * e)
perturb[i] = 0
return grad
d = Orange.data.Table('housing')
d.X = np.hstack((d.X, np.ones((d.X.shape[0], 1))))
d.shuffle()
# m = LinearRegressionLearner(lambda_=1.0)
# print(m(d)(d))
# # gradient check
# m = LinearRegressionLearner(lambda_=1.0)
# theta = np.random.randn(d.X.shape[1])
#
# ga = m.cost_grad(theta, d.X, d.Y.ravel())[1]
# gm = numerical_grad(lambda t: m.cost_grad(t, d.X, d.Y.ravel())[0], theta)
#
# print(np.sum((ga - gm)**2))
for lambda_ in (0.01, 0.03, 0.1, 0.3, 1, 3):
m = LinearRegressionLearner(lambda_=lambda_)
scores = []
for tr_ind, te_ind in skl_cross_validation.KFold(d.X.shape[0]):
s = np.mean((m(d[tr_ind])(d[te_ind]) - d[te_ind].Y.ravel())**2)
scores.append(s)
print('{:5.2f} {}'.format(lambda_, np.mean(scores)))
m = LinearRegressionLearner(lambda_=0)
print('test data', np.mean((m(d)(d) - d.Y.ravel())**2))
print('majority', np.mean((np.mean(d.Y.ravel()) - d.Y.ravel())**2))
|
f tearDownClass(cls):
cls.selenium.quit()
super(BasicTestCase, cls).tearDownClass()
def setUpBasic(self, prop, base_url, entity):
self.property = prop
self.entity = entity
self.base_url = base_url
self.verificationErrors = []
# Login test user
authenticate("opmtest", "secretpass", self.live_server_url+"/",
self.selenium, self.client)
driver = self.selenium
driver.get(self.live_server_url + "/")
driver.find_element_by_xpath("//li/a[text()='"+self.property+"']").click()
driver.get(self.live_server_url + self.base_url)
driver.find_element_by_link_text(self.entity).click()
super(BasicTestCase, self).setUp()
def tearDown(self):
self.assertEqual([], self.verificationErrors)
# Logout test user
driver = self.selenium
driver.find_element_by_link_text("Logout").click()
super(BasicTestCase, self).tearDown()
def check_highlight_property(self):
try: self.assertTrue(self.is_element_present(By.XPATH, "//a[contains(@style,'yellow') and .//text()='"+self.entity+"']"))
except AssertionError as e: self.verificationErrors.append(str(e))
def check_highlight_entity(self):
driver = self.selenium
try: self.assertRegexpMatches(driver.title, r"^[\s\S]*"+self.entity+"[\s\S]*$")
except AssertionError as e: self.verificationErrors.append(str(e))
def is_element_present(self, how, what):
try: self.selenium.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
class TenantTestCase(BasicTestCase):
def setUp(self):
super(TenantTestCase, self).setUpBasic('Broad Ripple Trails', "/tenants/", "Tenants")
def test_highlight_property(self):
super(TenantTestCase, self).check_highlight_property()
def test_highlight_entity(self):
super(TenantTestCase, self).check_highlight_entity()
def test_filter_pos(self):
driver = self.selenium
driver.find_element_by_id("id_last_name").clear()
driver.find_element_by_id("id_last_name").send_keys("obam")
driver.find_element_by_xpath("//input[@value='Filter']").click()
try: self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='content']/table[@id='id_list_view']/tbody/tr/td/a[contains(text(),'Obama')]"))
except AssertionError as e: self.verificationErrors.append(str(e))
driver.find_element_by_css_selector("input[type=\"button\"]").click()
Select(driver.find_element_by_id("id_unit")).select_by_visible_text("5209 CV")
driver.find_element_by_xpath("//input[@value='Filter']").click()
try: self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='content']/table[@id='id_list_view']/tbody/tr/td/a[contains(text(),'Obama')]"))
except AssertionError as e: self.verificationErrors.append(str(e))
def test_filter_neg(self):
driver = self.selenium
driver.find_element_by_id("id_last_name").clear()
driver.find_element_by_id("id_last_name").send_keys("obamertrte")
driver.find_element_by_xpath("//input[@value='Filter']").click()
try: self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='content']/table[@id='id_list_view']/tbody[count(tr)=0]"))
except AssertionError as e: self.verificationErrors.append(str(e))
driver.find_element_by_css_selector("input[type=\"button\"]").click()
Select(driver.find_element_by_id("id_unit")).select_by_visible_text("5211 CV")
driver.find_element_by_xpath("//input[@value='Filter']").click()
try: self.assertFalse(self.is_element_present(By.XPATH, "//div[@id='content']/table[@id='id_list_view']/tbody/tr/td/a[contains(text(),'Obama')]"))
except AssertionError as e: self.verificationErrors.append(str(e))
def test_add_pos(self):
driver = self.selenium
driver.find_element_by_id("id_add_item").click()
driver.find_element_by_id("id_first_name").clear()
driver.find_element_by_id("id_first_name").send_keys("Barack")
driver.find_element_by_id("id_last_name").clear()
driver.find_element_by_id("id_last_name").send_keys("Obama")
driver.find_element_by_id("id_start_date").clear()
driver.find_element_by_id("id_start_date").send_keys("2012-12-02")
driver.find_element_by_id("id_end_date").clear()
driver.find_element_by_id("id_end_date").send_keys("2012-12-31")
Select(driver.find_element_by_id("id_unit")).select_by_visible_text("5209 CV")
driver.find_element_by_id("id_permanent_address1").clear()
driver.find_element_by_id("id_permanent_address1").send_keys("1220 Montgomery St.")
driver.find_element_by_id("id_permanent_address2").clear()
driver.find_element_by_id("id_permanent_address2").send_keys("1995 Shattuck St.")
driver.find_element_by_id("id_permanent_city").clear()
driver.find_element_by_id("id_permanent_city").send_keys("San Francisco")
Select(driver.find_element_by_id("id_permanent_state")).select_by_visible_text("California")
driver.find_element_by_id("id_permanent_zip_code").clear()
driver.find_element_by_id("id_permanent_zip_code").send_keys("94112")
driver.find_element_by_id("id_permanent_contact_name").clear()
driver.find_element_by_id("id_permanent_contact_name").send_keys("Bary")
driver.find_element_by_id("id_phone1").clear()
driver.find_element_by_id("id_phone1").send_keys("(415) 344 8992")
driver.find_element_by_id("id_phone2").clear()
driver.find_element_by_id("id_phone2").send_keys("(510) 223-6533")
driver.find_element_by_id("id_email").clear()
driver.find_element_by_id("id_email").send_keys("barack.obama@usa.gov")
driver.find_element_by_id("id_comments").clear()
driver.find_element_by_id("id_comments").send_keys("Nothing else to add")
driver.find_element_by_css_selector("input[type=\"submit\"]").click()
try: self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='content']/table[@id='id_list_view']/tbody/tr[td/text()='Barack']"))
except AssertionError as e: self.verificationErrors.append(str(e))
def test_add_neg(self):
driver = self.selenium
driver.find_element_by_css_selector("img").click()
driver.find_element_by_id("id_first_name").clear()
driver.find_element_by_id("id_first_name").send_keys("Barack2")
driver | .find_element_by_id("id_last_name").clear()
driver.find_element_by_id("id_last_name").send_keys("Obama2")
driver.find_element_by_id("id_start_date").click()
driver.find_element_by_id("id_start_date").clear()
driver.find_element_by_id("id_start_date").send_keys("2013-01-15")
driver.find_element_by_id("id_end_date").clear()
driver.find_element_by_id("id_end_date").send_keys("2013-01-26")
driver.find_element_b | y_id("id_permanent_address1").clear()
driver.find_element_by_id("id_permanent_address1").send_keys("6666 Wrong St.")
driver.find_element_by_css_selector("input[type=\"submit\"]").click()
self.assertTrue(self.is_element_present(By.XPATH, "//ul[contains(@class, 'errorlist') and .//text() = 'This field is required.']"))
def test_edit_pos(self):
driver = self.selenium
driver.find_element_by_xpath("(//a[contains(text(),'Obama')])").click()
Select(driver.find_element_by_id("id_permanent_state")).select_by_visible_text("Alabama")
driver.find_element_by_id("id_permanent_zip_code").clear()
driver.find_element_by_id("id_permanent_zip_code").send_keys("95788")
driver.find_element_by_id("id_first_name").clear()
driver.find_element_by_id("id_first_name").send_keys("Michelle")
driver.find_element_by_id("id_start_date").click()
driver.find_element_by_id("id_start_date").clear()
driver.find_element_by_id("id_st |
# -*- coding: utf-8 -*-
#
# sample documentation build configuration file, created by
# sphinx-quickstart on Mon Apr 16 21:22:43 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'sample'
copyright = u'2012, Kenneth Reitz'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'v0.0.1'
# The full version, including alpha/beta/rc tags.
release = 'v0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The nam | e of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any | paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'sampledoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'sample.tex', u'sample Documentation',
u'Kenneth Reitz', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'sample', u'sample Documentation',
[u'Kenneth Reitz'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'sample', u'sample Documentation',
u'Kenneth Reitz', 'sample', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
#!/ | usr/bin/env python
import gtk
#import NoteBuffer
import notemeister
class Note:
def __init__(self, path=None, title='', body='', link='', wrap="1"):
self.path = path
self.title = title
self.body = body
self.link = link
self.wrap = wrap
self.buffer = notemeister.NoteBuffer.NoteBuffer()
self.buffer.set_text(self.body)
def __str__(self):
return '(%d) Note "%s" has body: %s' % (self.index, self.title, self.bo | dy)
|
#!/usr/bin/env python
import setuptools
# Hack to prevent stupid TypeError: 'NoneType' object is not callable error on
# exit of python setup.py test # in multiprocessing/util.py _exit_function when
# running python setup.py test (see
# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
assert multiprocessing
except I | mportError:
pass
setuptools.setup(
name='orwell.agent',
version='0.0.1',
description='Agent connecting to the game server.',
author='',
author_email='',
packages=setuptools.find_packages(exclude="test"),
test_suite='nose.collector',
install_requires=['pyzmq', 'cliff'],
tests_require=['nose', 'coverage', 'mock'],
entry_points={
'console_scripts': [
'thought_pol | ice = orwell.agent.main:main',
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX :: Linux',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6'],
python_requires='>=3.6.0',
)
|
#!/usr/bin/env python3
"""
py_fanos_test.py: Tests for py_fanos.py
"""
import socket
import sys
import unittest
import py_fanos # module under test
class FanosTest(unittest.TestCase):
def testSendReceive(self):
left, right = socket.socketpair()
py_fanos.send(left, b'foo')
fd_out = []
msg = py_fanos.recv(right, fd_out=fd_out)
self.assertEqual(b'foo', msg)
self.assertEqual([], fd_out)
py_fanos.send(left, b'spam', [sys.stdin.fileno(), sys.stdout.fileno(), sys.stderr.fileno()])
msg = py_fanos.recv(right, fd_out=fd_out)
self.a | ssertEqual(b'spam', msg)
self.assertEqual(3, len(fd_out))
print(fd_out)
left.close()
msg = py_fanos.recv(right)
self.assertEqual(None, msg) # Valid EOF
right | .close()
class InvalidMessageTests(unittest.TestCase):
"""COPIED to native/fanos_test.py."""
def testInvalidColon(self):
left, right = socket.socketpair()
left.send(b':') # Should be 3:foo,
try:
msg = py_fanos.recv(right)
except ValueError as e:
print(type(e))
print(e)
else:
self.fail('Expected failure')
left.close()
right.close()
def testInvalidDigits(self):
left, right = socket.socketpair()
left.send(b'34') # EOF in the middle of length
left.close()
try:
msg = py_fanos.recv(right)
except ValueError as e:
print(type(e))
print(e)
else:
self.fail('Expected failure')
right.close()
def testInvalidMissingColon(self):
left, right = socket.socketpair()
left.send(b'34foo') # missing colon
left.close()
try:
msg = py_fanos.recv(right)
except ValueError as e:
print(type(e))
print(e)
else:
self.fail('Expected failure')
right.close()
def testInvalidMissingComma(self):
left, right = socket.socketpair()
# Short payload BLOCKS indefinitely?
#left.send(b'3:fo')
left.send(b'3:foo') # missing comma
left.close()
try:
msg = py_fanos.recv(right)
except ValueError as e:
print(type(e))
print(e)
else:
self.fail('Expected failure')
right.close()
if __name__ == '__main__':
unittest.main()
|
# Copyright 2012 NetApp
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Interface for shares extension."""
try:
from urllib import urlencode # noqa
except ImportError:
from urllib.parse import urlencode # noqa
from manilaclient import api_versions
from manilaclient import base
from manilaclient.common import constants
from manilaclient.openstack.common.apiclient import base as common_base
class ShareSnapshot(common_base.Resource):
"""Represent a snapshot of a share."""
def __repr__(self):
return "<ShareSnapshot: %s>" % self.id
def update(self, **kwargs):
"""Update this snapshot."""
self.manager.update(self, **kwargs)
def reset_state(self, state):
"""Update the snapshot with the privided state."""
self.manager.reset_state(self, state)
def delete(self):
"""Delete this snapshot."""
self.manager.delete(self)
def force_delete(self):
"""Delete the specified snapshot ignoring its current state."""
self.manager.force_delete(self)
class ShareSnapshotManager(base.ManagerWithFind):
"""Manage :class:`ShareSnapshot` resources."""
resource_class = ShareSnapshot
def create(self, share, force=False, name=None, description=None):
"""Create a snapshot of the given share.
:param share_id: The ID of the share to snapshot.
:param force: If force is True, create a snapshot even if the
share is busy. Default is False.
:param name: Name of the snapshot
:param description: Description of the snapshot
:rtype: :class:`ShareSnapshot`
"""
body = {'snapshot': {'share_id': common_base.getid(share),
'force': force,
'name': name,
'description': description}}
return self._create('/snapshots', body, 'snapshot')
def get(self, snapshot):
"""Get a snapshot.
:param snapshot: The :class:`ShareSnapshot` instance or string with ID
of snapshot to delete.
:rtype: :class:`ShareSnapshot`
"""
snapshot_id = common_base.getid(snapshot)
return self._get('/snapshots/%s' % snapshot_id, 'snapshot')
def list(self, detailed=True, search_opts=None, sort_key=None,
sort_dir=None):
"""Get a list of snapshots of shares.
:param search_opts: Search options to filter out shares.
:param sort_key: Key to be sorted.
:param sort_dir: Sort direction, should be 'desc' or 'asc'.
:rtype: list of :class:`ShareSnapshot`
"""
if search_opts is None:
search_opts = {}
if sort_key is not None:
if sort_key in constants.SNAPSHOT_SORT_KEY_VALUES:
search_opts['sort_key'] = sort_key
else:
raise ValueError(
'sort_key must be one of the following: %s.'
% ', '.join(constants.SNAPSHOT_SORT_KEY_VALUES))
if sort_dir is not None:
if sort_dir in constants.SORT_DIR_VALUES:
search_opts['sort_dir'] = sort_dir
else:
raise ValueError(
'sort_dir must be one of the following: %s.'
% ', '.join(constants.SORT_DIR_VALUES))
if search_opts:
query_string = urlencode(
sorted([(k, v) for (k, v) in list(search_opts.items()) if v]))
if query_string:
query_string = "?%s" % (query_string,)
else:
query_string = ''
if detailed:
path = "/snapshots/detail%s" % (query_string,)
else:
path = "/snapshots%s" % (query_string,)
return self._list(path, 'snapshots')
def delete(self, snapshot):
"""Delete a snapshot of a share.
| :param snapshot: The :class:`ShareSnapshot` to delete.
"""
self._delete("/snapshots/%s" % common_base.getid(snapshot))
def _do_force_delete(self, snapshot, action_name="force_delete"):
"""Delete the specified snapshot ignoring its current state."""
return self._action(action_name, commo | n_base.getid(snapshot))
@api_versions.wraps("1.0", "2.6")
def force_delete(self, snapshot):
return self._do_force_delete(snapshot, "os-force_delete")
@api_versions.wraps("2.7") # noqa
def force_delete(self, snapshot):
return self._do_force_delete(snapshot, "force_delete")
def update(self, snapshot, **kwargs):
"""Update a snapshot.
:param snapshot: The :class:`ShareSnapshot` instance or string with ID
of snapshot to delete.
:rtype: :class:`ShareSnapshot`
"""
if not kwargs:
return
body = {'snapshot': kwargs, }
snapshot_id = common_base.getid(snapshot)
return self._update("/snapshots/%s" % snapshot_id, body)
def _do_reset_state(self, snapshot, state, action_name="reset_status"):
"""Update the specified share snapshot with the provided state."""
return self._action(action_name, snapshot, {"status": state})
@api_versions.wraps("1.0", "2.6")
def reset_state(self, snapshot, state):
return self._do_reset_state(snapshot, state, "os-reset_status")
@api_versions.wraps("2.7") # noqa
def reset_state(self, snapshot, state):
return self._do_reset_state(snapshot, state, "reset_status")
def _action(self, action, snapshot, info=None, **kwargs):
"""Perform a snapshot 'action'."""
body = {action: info}
self.run_hooks('modify_body_for_action', body, **kwargs)
url = '/snapshots/%s/action' % common_base.getid(snapshot)
return self.api.client.post(url, body=body)
|
#!/usr/bin/env python
"""Run pytest with coverage and generate an html report."""
from sys import argv
from os import system as run
# To run a specific file with debug logging prints:
# py -3 -m pytest test_can.py --log-cli-format="%(asctime)s.%(msecs)d %(levelname)s: %(message)s (%(filename)s:%(lineno)d)" --log-cli-level=debug
def main(): # noqa
run_str = 'python -m coverage run --include={} --omit=./* -m pytest {} {}'
| arg = ''
# All source files included in coverage
includes = '../*'
if | len(argv) >= 2:
arg = argv[1]
if ':' in argv[1]:
includes = argv[1].split('::')[0]
other_args = ' '.join(argv[2:])
run(run_str.format(includes, arg, other_args))
# Generate the html coverage report and ignore errors
run('python -m coverage html -i')
if __name__ == '__main__':
main()
|
from uuid import uuid4, UUID
from behave import given, when, then
from formencode import Invalid, validators
@given("I made a Device linking request")
@given("I have made a Device linking request")
@when("I make a Device linking request")
def make_device_linking_request(context):
current_directory = context.entity_manager.get_current_directory()
context.directory_device_manager.create_linking_request(
user_identifier=str(uuid4()),
directory_id=current_directory.id
)
@then("the Device linking response contains a valid QR Code URL")
def linking_response_contains_valid_qr_code_url(context):
try:
validators.URL().to_python(
context.entity_manager.get_current_linking_response().qrcode
)
except Invalid as e:
raise Exception("Could not parse QR Code as URL: %s" % e)
@then("the Device linking response contains a valid Linking Code")
def linking_response_contains_valid_linking_code(context):
code = context.entity_manager.get_current_linking_response().code
if not code:
raise Exception("Linking code was not valid: %s" % code)
@then("the Device linking response contains a valid Device ID")
def linking_response_contains_valid_linking_code(context):
device_id = context.entity_manager.get_current_linking_response().device_id
try:
if not device_id:
raise ValueError
UUID(device_id)
except ValueError:
raise Exception("Device ID was not valid: %s" % device_id)
@given("I retrieve the Devices list for the current User")
@when("I retrieve the Devices list for the current User")
def retrieve_devices_list_for_current_user(context):
current_directory = context.entity_manager.get_current_directory()
current_user_identifier = context.directory_device_manager.\
current_user_identifier
context.directory_device_manager.retrieve_user_devices(
current_user_identifier, current_directory.id)
@when("I retrieve the Devices list for the user \"{user_identifier}\"")
def retrieve_devices_list_for_current_user(context, user_identifier):
current_directory = context.entity_manager.get_current_directory()
context.directory_device_manager.retrieve_user_devices(
user_identifier,
current_directory.id
)
@then("the Device List has {count:d} Device")
@then("the Device List has {count:d} Devices")
@then("there should be {count:d} Device in the Devices list")
@then("there should be {count:d} Devices in the Devices list")
def verify_device_list_count(context, count):
current_device_list = context.entity_manager.get_current_device_list()
if current_device_list is None or len(current_device_list) != count:
raise Exception("Device list length length is not %s: %s" % (
count, current_device_list))
@then("all of the devices should be inactive")
def verify_device_list_count(context):
current_device_list = context.entity_manager.get_current_device_list()
for device in current_device_list:
if device.status.is_active:
raise Exception("Device was active: %s" % device)
@then("all of the devices should be active")
def verify_device_list_count(context):
current_device_list = context.entity_manager.get_current_device_list()
for device in current_device_list:
if not device.status.is_active:
raise Exception("Device was not active: %s" % device)
@when("I unlink the Device with the ID \"{device_id}\"")
def unlink_device_with_id(context, device_id):
current_directory = context.entity_manager.get_current_directory()
current_user_identifier = context.directory_device_manager. \
current_user_identifier
context.directory_device_manager.unlink_device(
device_id,
current_user_identifier,
current_directory.id
)
@when("I unlink the current Device")
def unlink_current_device(context):
current_directory = context.entity_manager.get_current_directory()
current_user_identifier = context.directory_device_manager. \
current_user_identifier
current_device = context.entity_manager.get_current_device()
context.directory_device_manager.unlink_device(
current_device.id,
current_user_identifier,
current_directory.id
)
@when("I attempt to unlink the device with the ID \"{device_id}\"")
def attempt_to_unlink_device_with_id(context, device_id):
current_directory = context.entity_manager.get_current_directory()
current_user_identifier = context.directory_device_manager. \
current_user_identifier
try:
context.directory_device_manager.unlink_device(
device_id,
current_user_identifier,
current_directory.id
)
except Exception as e:
context.current_exception = e
@when("I attempt to unlink the device from the User Identifier "
"\"{user_identifier}\"")
def attempt_to_unlink_user_identifier_device(context, user_identifier):
current_directory = context.entity_manager.get_current_directory()
try:
context.directory_device_manager.unli | nk_device(
str(uuid4()),
user_identifier,
current_directory.id
)
except Exception as e:
context.current_exception = e
# Device manager steps
@given("I have a linked device")
def link_device(context):
context.execute_steps(u'''
Given I made a Device linking request
When I link my device
''')
@when("I link my device")
def link_phys | ical_device(context):
sdk_key = context.entity_manager.get_current_directory_sdk_keys()[0]
context.sample_app_device_manager.set_sdk_key(sdk_key)
linking_code = context.entity_manager.get_current_linking_response().code
context.sample_app_device_manager.link_device(linking_code)
# We should now be on the home page if everything succeeded
context.appium_device_manager.get_scrollable_element_by_text("Auth Methods")
@when("I link my physical device with the name \"{device_name}\"")
def link_device_with_name(context, device_name):
sdk_key = context.entity_manager.get_current_directory_sdk_keys()[0]
linking_code = context.entity_manager.get_current_linking_response().code
context.sample_app_device_manager.link_device(linking_code,
device_name=device_name)
@when("I approve the auth request")
def approve_auth_request(context):
context.sample_app_device_manager.approve_request()
@when("I deny the auth request")
def deny_auth_request(context):
context.sample_app_device_manager.deny_request()
@when("I receive the auth request and acknowledge the failure message")
def deny_auth_request(context):
context.sample_app_device_manager.receive_and_acknowledge_auth_failure()
@when("I make a Device linking request with a TTL of {ttl:d} seconds")
def step_impl(context, ttl):
current_directory = context.entity_manager.get_current_directory()
context.directory_device_manager.create_linking_request(
user_identifier=str(uuid4()),
directory_id=current_directory.id,
ttl=ttl
) |
# -*- coding: utf-8 -*-
import sys
from argparse import ArgumentParser
from DatabaseLogin import DatabaseLogin
from GlobalInstaller import GlobalInstaller
from PyQt5 import QtWidgets
from Ui_MainWindow import Ui_MainWindow
# import damit Installer funktioniert. auch wenn diese nicht hier benoetigt werden.
from PyQt5 import QtCore, QtGui
import cx_Oracle
import json
import base64
import urllib
from Crypto.Cipher import AES
from chardet import UniversalDetector
def get_parser():
parser = ArgumentParser()
# Parameter, welche die Gui Initalisierung Regeln.
parser.add_argument('--inst_synonym', action='store_true', default=False,
help=r"Setzt Flag für die Installation von Synonymen.")
parser.add_argument('--inst_sequence', action='store_true', default=False,
| help=r"Setzt Flag für die Installation von Sequenzen.")
parser.add_argument('--inst_tab_save', action='store_true', default=False,
help=r"S | etzt Flag für die Installation von Tab Save Tabellen.")
parser.add_argument('--inst_tab', action='store_false', default=True,
help=r"Entfernt Flag für die Installation von Tab Tabellen.")
parser.add_argument('--inst_view', action='store_false', default=True,
help=r"Entfernt Flag für die Installation von Views.")
parser.add_argument('--inst_package', action='store_false', default=True,
help=r"Entfernt Flag für die Installation von Packages.")
parser.add_argument('--inst_sql', action='store_false', default=True,
help=r"Entfernt Flag für die Installation von Sqls.")
# Erweiterte Parameter, welche die Gui Initalisierung Regeln.
parser.add_argument('--username', default=r"", help=r"Benutzername der Datenbank Verbindung.")
parser.add_argument('--password', default=r"", help=r"Passwort der Datenbank Verbindung.")
parser.add_argument('--connection', default=r"", help=r"Connection der Datenbank Verbindung.")
parser.add_argument('--svnBasePath', default=r"", help=r"Schreibt Pfad in SVN Basis Pfad.")
parser.add_argument('--svnKndPath', default=r"", help=r"Schreibt Pfad in SVN Kassen Pfad.")
parser.add_argument('--installationPath', default=r"", help=r"Schreibt Pfad in Installation Pfad.")
parser.add_argument('--global_defines_file', default=r"",
help=r"Pfad zu einem TAB seperierten File wo die Defines vordefiniert sind.")
# jsonl_parameters ueberschreibt alle anderen Parameter.
parser.add_argument('--jsonl_parameters', type=str, default=r'',
help=(r"Übergabe von allen Parameter in einem JSONL Format."
"Dieses Format überschreibt alle Parameter."))
# Parameter welche eine blinde Installation ohne Gui zulassen. Dazu muss showGui Paramter zwingend False sein.
parser.add_argument('--hideGui', action='store_true', default=False, help=r"Startet DB Installer ohne GUI.")
parser.add_argument('--clean_installation_path', action='store_true', default=False,
help=r"Führt Aktion Installationspfad Bereinigen durch. Nur in Kombi-nation von Parameter –-hideGui oder --json_file_path.")
parser.add_argument('--copy_all_data_to_installation', action='store_true', default=False,
help=r"Führt Aktion Dateien ab Pfade Laden durch. Nur in Kombination von Parameter -–hideGui oder --json_file_path.")
parser.add_argument('--install_objects', action='store_true', default=False,
help=r"Führt Aktion Objekte installieren durch. Nur in Kombination von Parameter –-hideGui oder --json_file_path.")
parser.add_argument('--json_file_path', default=r"",
help=(r"Übergabe eines Parameter Files in Jsonl Format."
"Zusammen mit den Argumenten für die Aktionen kann damit eine ganze Kette von "
"Arbeiten mit einem einzigen Aufruf erledigt werden. "
"Arbeiten in einem Jsonl File sind immer ohne Gui "
"und schreiben Debug Informationen auf die Konsole."))
return parser
#
# Main Programm. All starts at this point.
#
if __name__ == "__main__":
parser = get_parser()
args = parser.parse_args()
dbLogin = DatabaseLogin(userName=args.username, passWord=args.password, connection=args.connection)
dbLogin.testConnection(printInfo=False)
globalInstaller = GlobalInstaller(dbLogin=dbLogin, svnBasePath=args.svnBasePath, svnKndPath=args.svnKndPath,
installationPath=args.installationPath, flag_synonym=args.inst_synonym,
flag_sequence=args.inst_sequence, flag_tab_save=args.inst_tab_save,
flag_tab=args.inst_tab, flag_view=args.inst_view, flag_package=args.inst_package,
flag_sql=args.inst_sql, global_defines_file=args.global_defines_file,
jsonl_parameters=args.jsonl_parameters
)
if len(args.json_file_path) > 0:
globalInstaller.workJsonlFile(json_file_path=args.json_file_path,
cleanInstallationPath=args.clean_installation_path,
copy_all_data_to_installation=args.copy_all_data_to_installation,
install_objects=args.install_objects)
elif args.hideGui:
# Calls function without gui.
# used in command line only.
if args.clean_installation_path:
globalInstaller.cleanInstallationPath()
if args.copy_all_data_to_installation:
globalInstaller.readInstallationObjectFromPath()
globalInstaller.copyAllData2InstallationPath()
if args.install_objects:
globalInstaller.installAllObjects2Database()
else:
# Default Obption starts Gui
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
ui.connect_user_isgnals()
ui.set_user_variables(globalInstaller=globalInstaller)
MainWindow.show()
sys.exit(app.exec_())
|
or the
requested NIC
"""
cmd_result = virsh.dumpxml(self.name, uri=self.connect_uri)
if cmd_result.exit_status:
raise exceptions.TestFail("dumpxml %s failed.\n"
"Detail: %s.\n" % (self.name, cmd_result))
thexml = cmd_result.stdout.strip()
xtf = xml_utils.XMLTreeFile(thexml)
interfaces = xtf.find('devices').findall('interface')
# Range check
try:
mac = interfaces[nic_index].find('mac').get('address')
if mac is not None:
return mac
except IndexError:
pass # Allow other exceptions through
# IndexError (range check) or mac is None
raise virt_vm.VMMACAddressMissingError(nic_index)
def get_pid(self):
"""
Return the VM's PID.
:return: int with PID. If VM is not alive, returns None.
"""
if self.is_lxc():
pid_file = "/var/run/libvirt/lxc/%s.pid" % self.name
elif self.is_qemu():
pid_file = "/var/run/libvirt/qemu/%s.pid" % self.name
elif self.is_esx():
pid_file = "/var/run/libvirt/esx/%s.pid" % self.name
# TODO: Add more vm driver type
else:
raise ValueError("Unsupport connect uri: %s." % self.connect_uri)
pid = None
if os.path.exists(pid_file):
try:
pid_file_contents = open(pid_file).read()
pid = int(pid_file_contents)
except IOError:
logging.error("Could not read %s to get PID", pid_file)
except TypeError:
logging.error("PID file %s has invalid contents: '%s'",
pid_file, pid_file_contents)
else:
logging.debug("PID file %s not present", pid_file)
return pid
def get_vcpus_pid(self):
"""
Return the vcpu's pid for a given VM.
:return: list of PID of vcpus of a VM.
"""
output = virsh.qemu_monitor_command(self.name, "info cpus", "--hmp",
uri=self.connect_uri)
vcpu_pids = re.findall(r'thread_id=(\d+)', output.stdout)
return vcpu_pids
def get_shell_pid(self):
"""
Return the PID of the parent shell process.
:note: This works under the assumption that ``self.process.get_pid()``
returns the PID of the parent shell process.
"""
return self.process.get_pid()
def get_shared_meminfo(self):
"""
Returns the VM's shared memory information.
:return: Shared memory used by VM (MB)
"""
if self.is_dead():
logging.error("Could not get shared memory info from dead VM.")
return None
filename = "/proc/%d/statm" % self.get_pid()
shm = int(open(filename).read().split()[2])
# statm stores informations in pages, translate it to MB
return shm * 4.0 / 1024
def get_cpu_topology_in_cmdline(self):
"""
Return the VM's cpu topology in VM cmdline.
:return: A dirt of cpu topology
"""
cpu_topology = {}
vm_pid = self.get_pid()
if vm_pid is None:
logging.error("Fail to get VM pid")
else:
cmdline = open("/proc/%d/cmdline" % vm_pid).read()
values = re.findall("sockets=(\d+),cores=(\d+),threads=(\d+)",
cmdline)[0]
cpu_topology = dict(zip(["sockets", "cores", "threads"], values))
return cpu_topology
def get_cpu_topology_in_vm(self):
cpu_topology = {}
cpu_info = utils_misc.get_cpu_info(self.wait_for_login())
if cpu_info:
cpu_topology['sockets'] = cpu_info['Socket(s)']
cpu_topology['cores'] = cpu_info['Core(s) per socket']
cpu_topology['threads'] = cpu_info['Thread(s) per core']
return cpu_topology
def activate_nic(self, nic_index_or_name):
# TODO: Implement nic hotplugging
pass # Just a stub for now
def deactivate_nic(self, nic_index_or_name):
# TODO: Implement nic hot un-plugging
pass # Just a stub for now
@error_context.context_aware
def reboot(self, session=None, method="shell", nic_index=0, timeout=240,
serial=False):
"""
Reboot the VM and wait for it to come back up by trying to log in until
timeout expires.
:param session: A shell session object or None.
:param method: Reboot method. Can be "shell" (send a shell reboot
command).
:param nic_index: Index of NIC to access in the VM, when logging in
after rebooting.
:param timeout: Time to wait for login to succeed (after rebooting).
:param serial: Just use to unify api in virt_vm module.
:return: A new shell session object.
"""
error_context.base_context("rebooting '%s'" % self.name, logging.info)
error_context.context("before reboot")
session = session or self.login(timeout=timeout)
error_context.context()
if method == "shell":
session.sendline(self.params.get("reboot_command"))
else:
raise virt_vm.VMRebootError("Unknown reboot method: %s" % method)
error_context.context("waiting for guest to go down", logging.info)
if not utils_misc.wait_for(lambda: not
session.is_responsive(timeout=30),
120, 0, 1):
raise virt_vm.VMRebootError("Guest refuses to go down")
session.close()
error_context.context("logging in after reboot", logging.info)
return self.wait_for_login(nic_index, timeout=timeout)
def screendump(self, filename, debug=False):
if debug:
logging.debug("Requesting screenshot %s" % filename)
return virsh.screenshot(self.name, filename, uri=self.connect_uri)
def start(self, autoconsole=True):
"""
Starts this VM.
"""
self.uuid = virsh.domuuid(self.name,
uri=self.connect_uri).stdout.strip()
logging.debug("Starting vm '%s'", self.name)
result = virsh.start(self.name, uri=self.connect_uri)
if not result.exit_status:
# Wait for the domain to be created
has_started = utils_misc.wait_for(func=self.is_alive, timeout=60,
text=("waiting for domain %s "
"to start" % self.name))
if has_started is None:
raise virt_vm.VMStartError(self.name, "libvirt domain not "
"active after start")
self.uuid = virsh.domuuid(self.name,
uri=self.connect_uri).stdout.strip()
# Establish a session with the serial console
if autoconsole:
| self.create_serial_console()
else:
raise virt_vm.VMStartError(self.name, result.stderr.strip())
# Pull in mac addresses from libvirt guest definition
for index | , nic in enumerate(self.virtnet):
try:
mac = self.get_virsh_mac_address(index)
if not nic.has_key('mac'):
logging.debug("Updating nic %d with mac %s on vm %s"
% (index, mac, self.name))
nic.mac = mac
elif nic.mac != mac:
logging.warning("Requested mac %s doesn't match mac %s "
"as defined for vm %s", nic.mac, mac,
self.name)
# TODO: Checkout/Set nic_model, nettype, netdst also
except virt_vm.VMMACAddressMissingError:
logging.warning("Nic %d requested by test but not defined for"
" vm %s" % (index, self.name))
def wait_for_shutdown(self, count=60):
"""
|
# -*- coding: utf-8 -*-
from __futur | e__ import unicode_literals, absolute_import
"""
django_twilio specific settings.
"""
from .utils import discover_twilio_credentials
TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN = discover_twilio_cred | entials()
|
, 2),))
assert fractional_slice(('x', 3, 5.1), {0: 2, 1: 3}) == \
(getitem, ('x', 3, 5), (slice(None, None, None), slice(-3, None)))
assert fractional_slice(('x', 2.9, 5.1), {0: 2, 1: 3}) == \
(getitem, ('x', 3, 5), (slice(0, 2), slice(-3, None)))
def test_ghost_internal():
x = np.arange(64).reshape((8, 8))
d = da.from_array(x, chunks=(4, 4))
g = ghost_internal(d, {0: 2, 1: 1})
result = g.compute(get=get)
assert g.chunks == ((6, 6), (5, 5))
expected = np.array([
[ 0, 1, 2, 3, 4, 3, 4, 5, 6, 7],
[ 8, 9, 10, 11, 12, 11, 12, 13, 14, 15],
[16, 17, 18, 19, 20, 19, 20, 21, 22, 23],
[24, 25, 26, 27, 28, 27, 28, 29, 30, 31],
[32, 33, 34, 35, 36, 35, 36, 37, 38, 39],
[40, 41, 42, 43, 44, 43, 44, 45, 46, 47],
[16, 17, 18, 19, 20, 19, 20, 21, 22, 23],
[24, 25, 26, 27, 28, 27, 28, 29, 30, 31],
[32, 33, 34, 35, 36, 35, 36, 37, 38, 39],
[40, 41, 42, 43, 44, 43, 44, 45, 46, 47],
[48, 49, 50, 51, 52, 51, 52, 53, 54, 55],
[56, 57, 58, 59, 60, 59, 60, 61, 62, 63]])
assert eq(result, expected)
def test_trim_internal():
d = da.ones((40, 60), chunks=(10, 10))
e = trim_internal(d, axes={0: 1, 1: 2})
assert e.chunks == ((8, 8, 8, 8), (6, 6, 6, 6, 6, 6))
def test_periodic():
x = np.arange(64).reshape((8, 8))
d = da.from_array(x, chunks=(4, 4))
e = periodic(d, axis=0, depth=2)
assert e.shape[0] == d.shape[0] + 4
assert e.shape[1] == d.shape[1]
assert eq(e[1, :], d[-1, :])
assert eq(e[0, :], d[-2, :])
def test_reflect():
x = np.arange(10)
d = da.from_array(x, chunks=(5, 5))
e = reflect(d, axis=0, depth=2)
expected = np.array([1, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 8])
assert eq(e, expected)
e = reflect(d, axis=0, depth=1)
expected = np.array([0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9])
assert eq(e, expected)
def test_nearest():
x = np.arange(10)
d = da.from_array(x, chunks=(5, 5))
e = nearest(d, axis=0, depth=2)
expected = np.array([0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 9])
assert eq(e, expected)
e = nearest(d, axis=0, depth=1)
expected = np.array([0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9])
assert eq(e, expected)
def test_constant():
x = np.arange(64).reshape((8, 8))
d = da.from_array(x, chunks=(4, 4))
e = constant(d, axis=0, depth=2, value=10)
assert e.shape[0] == d.shape[0] + 4
assert e.shape[1] == d.shape[1]
assert eq(e[1, :], 10)
assert eq(e[-1, :], 10)
def test_boundaries():
x = np.arange(64).reshape((8, 8))
d = da.from_array(x, chunks=(4, 4))
e = boundaries(d, {0: 2, 1: 1}, {0: 0, 1: 'periodic'})
expected = np.array(
[[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 7, 0, 1, 2, 3, 4, 5, 6, 7, 0],
[15, 8, 9,10,11,12,13,14,15, 8],
[23,16,17,18,19,20,21,22,23,16],
[31,24,25,26,27,28,29,30,31,24],
[39,32,33,34,35,36,37,38,39,32],
[47,40,41,42,43,44,45,46,47,40],
[55,48,49,50,51,52,53,54,55,48],
[63,56,57,58,59,60,61,62,63,56],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])
assert eq(e, expected)
def test_ghost():
x = np.arange(64).reshape((8, 8))
d = da.from_array(x, chunks=(4, 4))
g = ghost(d, depth={0: 2, 1: 1}, boundary={0: 100, 1: 'reflect'})
assert g.chunks == ((8, 8), (6, 6))
expected = np.array(
[[100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100],
[100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100],
[ 0, 0, 1, 2, 3, 4, 3, 4, 5, 6, 7, 7],
[ 8, 8, 9, 10, 11, 12, 11, 12, 13, 14, 15, 15],
[ 16, 16, 17, 18, 19, 20, 19, 20, 21, 22, 23, 23],
[ 24, 24, 25, 26, 27, 28, 27, 28, 29, 30, 31, 31],
[ 32, 32, 33, 34, 35, 36, 35, 36, 37, 38, 39, 39],
[ 40, 40, 41, 42, 43, 44, 43, 44, 45, 46, 47, 47],
[ 16, 16, 17, 18, 19, 20, 19, 20, | 21, 22, 23, 23],
[ 24, 24, 25, 26, 27, 28, 27, 28, 29, 30, 31, 31],
[ 3 | 2, 32, 33, 34, 35, 36, 35, 36, 37, 38, 39, 39],
[ 40, 40, 41, 42, 43, 44, 43, 44, 45, 46, 47, 47],
[ 48, 48, 49, 50, 51, 52, 51, 52, 53, 54, 55, 55],
[ 56, 56, 57, 58, 59, 60, 59, 60, 61, 62, 63, 63],
[100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100],
[100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100]])
assert eq(g, expected)
g = ghost(d, depth={0: 2, 1: 1}, boundary={0: 100})
assert g.chunks == ((8, 8), (5, 5))
def test_map_overlap():
x = da.arange(10, chunks=5)
y = x.map_overlap(lambda x: x + len(x), depth=2)
assert eq(y, np.arange(10) + 5 + 2 + 2)
def test_nearest_ghost():
a = np.arange(144).reshape(12, 12).astype(float)
darr = da.from_array(a, chunks=(6, 6))
garr = ghost(darr, depth={0: 5, 1: 5},
boundary={0: 'nearest', 1: 'nearest'})
tarr = trim_internal(garr, {0: 5, 1: 5})
assert_array_almost_equal(tarr, a)
def test_0_depth():
expected = np.arange(100).reshape(10, 10)
darr = da.from_array(expected, chunks=(5, 2))
depth = {0: 0, 1: 0}
reflected = ghost(darr, depth=depth, boundary='reflect')
nearest = ghost(darr, depth=depth, boundary='nearest')
periodic = ghost(darr, depth=depth, boundary='periodic')
constant = ghost(darr, depth=depth, boundary=42)
result = trim_internal(reflected, depth)
assert_array_equal(result, expected)
result = trim_internal(nearest, depth)
assert_array_equal(result, expected)
result = trim_internal(periodic, depth)
assert_array_equal(result, expected)
result = trim_internal(constant, depth)
assert_array_equal(result, expected)
def test_some_0_depth():
expected = np.arange(100).reshape(10, 10)
darr = da.from_array(expected, chunks=(5, 5))
depth = {0: 4, 1: 0}
reflected = ghost(darr, depth=depth, boundary='reflect')
nearest = ghost(darr, depth=depth, boundary='nearest')
periodic = ghost(darr, depth=depth, boundary='periodic')
constant = ghost(darr, depth=depth, boundary=42)
result = trim_internal(reflected, depth)
assert_array_equal(result, expected)
result = trim_internal(nearest, depth)
assert_array_equal(result, expected)
result = trim_internal(periodic, depth)
assert_array_equal(result, expected)
result = trim_internal(constant, depth)
assert_array_equal(result, expected)
def test_one_chunk_along_axis():
a = np.arange(2 * 9).reshape(2, 9)
darr = da.from_array(a, chunks=((2,), (2, 2, 2, 3)))
g = ghost(darr, depth=0, boundary=0)
assert a.shape == g.shape
def test_constant_boundaries():
a = np.arange(1 * 9).reshape(1, 9)
darr = da.from_array(a, chunks=((1,), (2, 2, 2, 3)))
b = boundaries(darr, {0: 0, 1: 0}, {0: 0, 1: 0})
assert b.chunks == darr.chunks
def test_depth_equals_boundary_length():
expected = np.arange(100).reshape(10, 10)
darr = da.from_array(expected, chunks=(5, 5))
depth = {0: 5, 1: 5}
reflected = ghost(darr, depth=depth, boundary='reflect')
nearest = ghost(darr, depth=depth, boundary='nearest')
periodic = ghost(darr, depth=depth, boundary='periodic')
constant = ghost(darr, depth=depth, boundary=42)
result = trim_internal(reflected, depth)
assert_array_equal(result, expected)
result = trim_internal(nearest, depth)
assert_array_equal(result, expected)
result = trim_internal(periodic, depth)
assert_array_equal(result, expected)
result = trim_internal(constant, depth)
assert_array_equal(result, expected)
@pytest.mark.xfail
def test_depth_greater_than_boundary_length():
expected = np.arange(100).reshape(10, 10)
darr = da.from_array(expected, chunks=(5, 5))
depth = {0: 8, 1: 7}
reflected = ghost(darr, depth=depth, boundary='reflect')
nearest = ghost(darr, depth=depth, boundary='nearest') |
uuid' % (self._ds.name)
expected_image_path = '[%s] vmware_temp/tmp-uuid/%s/%s-flat.vmdk' % (
self._ds.name, self._image_id, self._image_id)
expected_image_path_parent = '[%s] vmware_temp/tmp-uuid/%s' % (
self._ds.name, self._image_id)
expected_path_to_create = '[%s] vmware_temp/tmp-uuid/%s/%s.vmdk' % (
self._ds.name, self._image_id, self._image_id)
mock_mkdir.assert_called_once_with(
self._session, DsPathMatcher(expected_image_path_parent),
self._dc_info.ref)
self.assertEqual(str(tmp_dir_loc), expected_tmp_dir_path)
self.assertEqual(str(tmp_image_ds_loc), expected_image_path)
image_info = vi.ii
mock_create_virtual_disk.assert_called_once_with(
self._session, self._dc_info.ref,
image_info.adapter_type,
image_info.disk_type,
DsPathMatcher(expected_path_to_create),
image_info.file_size_in_kb)
mock_delete_datastore_file.assert_called_once_with(
DsPathMatcher(expected_image_path),
self._dc_info.ref)
@mock.patch.object(ds_util, 'file_move')
def test_cache_iso_image(self, mock_file_move):
vi = self._make_vm_config_info(is_iso=True)
tmp_image_ds_loc = mock.Mock()
self._vmops._cache_iso_image(vi, tmp_image_ds_loc)
mock_file_move.assert_called_once_with(
self._session, self._dc_info.ref,
tmp_image_ds_loc.parent,
DsPathMatcher('[fake_ds] vmware_base/%s' % self._image_id))
@mock.patch.object(ds_util, 'file_move')
def test_cache_flat_image(self, mock_file_move):
vi = self._make_vm_config_info()
tmp_image_ds_loc = mock.Mock()
self._vmops._cache_flat_image(vi, tmp_image_ds_loc)
mock_file_move.assert_called_once_with(
self._session, self._dc_info.ref,
tmp_image_ds_loc.parent,
DsPathMatcher('[fake_ds] vmware_base/%s' % self._image_id))
@mock.patch.object(ds_util, 'disk_move')
@mock.patch.object(ds_util, 'mkdir')
def test_cache_stream_optimized_image(self, mock_mkdir, mock_disk_move):
vi = self._make_vm_config_info()
self._vmops._cache_stream_optimized_image(vi, mock.sentinel.tmp_image)
mock_mkdir.assert_called_once_with(
self._session,
DsPathMatcher('[fake_ds] vmware_base/%s' % self._image_id),
self._dc_info.ref)
mock_disk_move.assert_called_once_with(
self._session, self._dc_info.ref,
mock.sentinel.tmp_image,
DsPathMatcher('[fake_ds] vmware_base/%s/%s.vmdk' %
(self._image_id, self._image_id)))
@mock.patch.object(ds_util, 'file_move')
@mock.patch.object(vm_util, 'copy_virtual_disk')
@mock.patch.object(vmops.VMwareVMOps, '_delete_datastore_file')
@mock.patch.object(vmops.VMwareVMOps, '_update_image_size')
def test_cache_sparse_image(self,
mock_update_image_size,
mock_delete_datastore_file,
mock_copy_virtual_disk,
mock_file_move):
vi = self._make_vm_config_info(is_sparse_disk=True)
sparse_disk_path = "[%s] vmware_temp/tmp-uuid/%s/tmp-sparse.vmdk" % (
self._ds.name, self._image_id)
tmp_image_ds_loc = ds_obj.DatastorePath.parse(sparse_disk_path)
self._vmops._cache_sparse_image(vi, tmp_image_ds_loc)
target_disk_path = "[%s] vmware_temp/tmp-uuid/%s/%s.vmdk" % (
self._ds.name,
self._image_id, self._image_id)
mock_copy_virtual_disk.assert_called_once_with(
self._session, self._dc_info.ref,
sparse_disk_path,
DsPathMatcher(target_disk_path))
mock_update_image_size.assert_called_once_with(vi)
def test_get_storage_policy_none(self):
flavor = objects.Flavor(name='m1.small',
memory_mb=6,
vcpus=28,
root_gb=496,
ephemeral_gb=8128,
swap=33550336,
extra_specs={})
self.flags(pbm_enabled=True,
pbm_default_policy='fake-policy', group='vmware')
extra_specs = self._vmops._get_extra_specs(flavor, None)
self.assertEqual('fake-policy', extra_specs.storage_policy)
def test_get_storage_policy_extra_specs(self):
extra_specs = {'vmware:storage_policy': 'flavor-policy'}
flavor = objects.Flavor(name='m1.small',
memory_mb=6,
vcpus=28,
root_gb=496,
ephemeral_gb=8128,
swap=33550336,
extra_specs=extra_specs)
self.flags(pbm_enabled=True,
pbm_default_policy='default-policy', group='vmware')
extra_specs = self._vmops._get_extra_specs(flavor, None)
self.assertEqual('flavor-policy', extra_specs.storage_policy)
def test_get_base_folder_not_set(self):
self.flags(image_cache_subdirectory_name='vmware_base')
base_folder = self._vmops._get_base_folder()
self.assertEqual('vmware_base', base_folder)
def test_get_base_folder_host_ip(self):
self.flags(my_ip='7.7.7.7',
image_cache_subdirectory_name= | '_base')
base_folder = self._vmops._get_base_folder()
self.assertEqual('7.7.7.7_base', base_folder)
def test_get_base_folder_cache_prefix(se | lf):
self.flags(cache_prefix='my_prefix', group='vmware')
self.flags(image_cache_subdirectory_name='_base')
base_folder = self._vmops._get_base_folder()
self.assertEqual('my_prefix_base', base_folder)
def _test_reboot_vm(self, reboot_type="SOFT"):
expected_methods = ['get_object_properties_dict']
if reboot_type == "SOFT":
expected_methods.append('RebootGuest')
else:
expected_methods.append('ResetVM_Task')
query = {}
query['runtime.powerState'] = "poweredOn"
query['summary.guest.toolsStatus'] = "toolsOk"
query['summary.guest.toolsRunningStatus'] = "guestToolsRunning"
def fake_call_method(module, method, *args, **kwargs):
expected_method = expected_methods.pop(0)
self.assertEqual(expected_method, method)
if (expected_method == 'get_object_properties_dict'):
return query
elif (expected_method == 'ResetVM_Task'):
return 'fake-task'
with contextlib.nested(
mock.patch.object(vm_util, "get_vm_ref",
return_value='fake-vm-ref'),
mock.patch.object(self._session, "_call_method",
fake_call_method),
mock.patch.object(self._session, "_wait_for_task")
) as (_get_vm_ref, fake_call_method, _wait_for_task):
self._vmops.reboot(self._instance, self.network_info, reboot_type)
_get_vm_ref.assert_called_once_with(self._session,
self._instance)
if reboot_type == "HARD":
_wait_for_task.assert_has_calls([
mock.call('fake-task')])
def test_reboot_vm_soft(self):
self._test_reboot_vm()
def test_reboot_vm_hard(self):
self._test_reboot_vm(reboot_type="HARD")
def test_get_instance_metadata(self):
flavor = objects.Flavor(id=7,
name='m1.small',
memory_mb=6,
vcpus=28,
root_gb=496,
ephemeral_gb=8128,
swap=33550336,
extra_specs={}) |
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe import _
from frappe.model.document import Document
from frappe.model.rename_doc import rename_doc
class Medication(Document):
def validate(self):
self.enable_disable_item()
def after_insert(self):
create_item_from_medication(self)
def on_update(self):
if self.change_in_item:
self.update_item_and_item_price()
def enable_disable_item(self) | :
if self.is_billable:
if self.disabled:
frappe.db.set_value('Item', self.item, 'disabled', 1)
else:
frappe.db.set_value(' | Item', self.item, 'disabled', 0)
def update_item_and_item_price(self):
if self.is_billable and self.item:
item_doc = frappe.get_doc('Item', {'item_code': self.item})
item_doc.item_name = self.medication_name
item_doc.item_group = self.item_group
item_doc.description = self.description
item_doc.stock_uom = self.stock_uom
item_doc.disabled = 0
item_doc.save(ignore_permissions=True)
if self.rate:
item_price = frappe.get_doc('Item Price', {'item_code': self.item})
item_price.item_name = self.medication_name
item_price.price_list_rate = self.rate
item_price.save()
elif not self.is_billable and self.item:
frappe.db.set_value('Item', self.item, 'disabled', 1)
self.db_set('change_in_item', 0)
def create_item_from_medication(doc):
disabled = doc.disabled
if doc.is_billable and not doc.disabled:
disabled = 0
uom = doc.stock_uom or frappe.db.get_single_value('Stock Settings', 'stock_uom')
item = frappe.get_doc({
'doctype': 'Item',
'item_code': doc.medication_name,
'item_name':doc.medication_name,
'item_group': doc.item_group,
'description':doc.description,
'is_sales_item': 1,
'is_service_item': 1,
'is_purchase_item': 0,
'is_stock_item': 0,
'show_in_website': 0,
'is_pro_applicable': 0,
'disabled': disabled,
'stock_uom': uom
}).insert(ignore_permissions=True, ignore_mandatory=True)
make_item_price(item.name, doc.rate)
doc.db_set('item', item.name)
def make_item_price(item, item_price):
price_list_name = frappe.db.get_value('Price List', {'selling': 1})
frappe.get_doc({
'doctype': 'Item Price',
'price_list': price_list_name,
'item_code': item,
'price_list_rate': item_price
}).insert(ignore_permissions=True, ignore_mandatory=True)
@frappe.whitelist()
def change_item_code_from_medication(item_code, doc):
doc = frappe._dict(json.loads(doc))
if frappe.db.exists('Item', {'item_code': item_code}):
frappe.throw(_('Item with Item Code {0} already exists').format(item_code))
else:
rename_doc('Item', doc.item_code, item_code, ignore_permissions=True)
frappe.db.set_value('Medication', doc.name, 'item_code', item_code)
return
|
""" TODO: Add docstring """
import re
import pexpect
class MediaObject(object):
"""Represents an encodable object"""
def __init__(self, input_filename, output_filename):
self.input_filename = input_filename
self.output_filename = output_filename
self.media_duration = self.get_media_duration()
# INFO: All other media information could potentially be put here too
def get_media_duration(self):
"""
Spawns an avprobe process to get the media duration.
Spawns an avprobe p | rocess and saves | the output to a list, then uses
regex to find the duration of the media and return it as an integer.
"""
info_process = pexpect.spawn("/usr/bin/avprobe " + self.input_filename)
subprocess_output = info_process.readlines()
info_process.close
# Non-greedy match on characters 'Duration: ' followed by
# number in form 00:00:00:00
regex_group = re.compile(".*?Duration: .*?(\\d+):(\\d+):(\\d+).(\\d+)",
re.IGNORECASE | re.DOTALL)
# Exits as soon as duration is found
# PERF: Perform some tests to find the min number of lines
# certain not to contain the duration, then operate on a slice
# not containing those lines
for line in subprocess_output:
regex_match = regex_group.search(line)
if regex_match:
# Return the total duration in seconds
return ((int(regex_match.group(1)) * 3600) + # Hours
(int(regex_match.group(2)) * 60) + # Minutes
int(regex_match.group(3)) + # Seconds
# Round milliseconds to nearest second
1 if int(regex_match.group(3)) > 50 else 0)
# Not found so it's possible the process terminated early or an update
# broke the regex. Unlikely but we must return something just in case.
return -1
|
m __future__ import division
from __future__ import print_function
#import cv2
from scipy.misc import imresize
from scipy.misc import imread
import numpy as np
try:
import cPickle as pickle
except ImportError:
import pickle
import os
import math
import tensorflow as tf
from utils.timer import Timer
from utils.cython_nms import nms, nms_new
from utils.boxes_grid import get_boxes_grid
from utils.blob import im_list_to_blob
from model.config import cfg, get_output_dir
from model.bbox_transform import clip_boxes, bbox_transform_inv
def _get_image_blob(im):
"""Converts an image into a network input.
Arguments:
im (ndarray): a color image in BGR order
Returns:
blob (ndarray): a data blob holding an image pyramid
im_scale_factors (list): list of image scales (relative to im) used
in the image pyramid
"""
im_orig = im.astype(np.float32, copy=True)
im_orig -= cfg.PIXEL_MEANS
im_shape = im_orig.shape
im_size_min = np.min(im_shape[0:2])
im_size_max = np.max(im_shape[0:2])
processed_ims = []
im_scale_factors = []
for target_size in cfg.TEST.SCALES:
im_scale = float(target_size) / float(im_size_min)
# Prevent the biggest axis from being more than MAX_SIZE
if np.round(im_scale * im_size_max) > cfg.TEST.MAX_SIZE:
im_scale = float(cfg.TEST.MAX_SIZE) / float(im_size_max)
im_row,im_col,_ = im.shape
im = imresize(im_orig, (int(im_row*im_scale), int(im_col*im_scale)))
im_scale_factors.append(im_scale)
processed_ims.append(im)
# Create a blob to hold the input images
blob = im_list_to_blob(processed_ims)
return blob, np.array(im_scale_factors)
def _get_blobs(im):
"""Convert an image and RoIs within that image into network inputs."""
blobs = {}
blobs['data'], im_scale_factors = _get_image_blob(im)
return blobs, im_scale_factors
def _clip_boxes(boxes, im_shape):
"""Clip boxes to image boundaries."""
# x1 >= 0
boxes[:, 0::4] = np.maximum(boxes[:, 0::4], 0)
# y1 >= 0
boxes[:, 1::4] = np.maximum(boxes[:, 1::4], 0)
# x2 < im_shape[1]
boxes[:, 2::4] = np.minimum(boxes[:, 2::4], im_shape[1] - 1)
# y2 < im_shape[0]
boxes[:, 3::4] = np.minimum(boxes[:, 3::4], im_shape[0] - 1)
return boxes
def _rescale_boxes(boxes, inds, scales):
"""Rescale boxes according to image rescaling."""
for i in range(boxes.shape[0]):
boxes[i,:] = boxes[i,:] / scales[int(inds[i])]
return boxes
def im_detect(sess, net, im):
blobs, im_scales = _get_blobs(im)
assert len(im_scales) == 1, "Only single-image batch implemented"
im_blob = blobs['data']
# seems to have height, width, and image scales
# still not sure about the scale, maybe full image it is 1.
blobs['im_info'] = \
np.array([[im_blob.shape[1], im_blob.shape[2], im_scales[0]]], dtype=np.float32)
_, scores, bbox_pred, rois = \
net.test_image(sess, blobs['data'], blobs['im_info'])
boxes = rois[:, 1:5] / im_scales[0]
# print(scores.shape, bbox_pred.shape, rois.shape, boxes.shape)
scores = np.reshape(scores, [scores.shape[0], -1])
bbox_pred = np.reshape(bbox_pred, [bbox_pred.shape[0], -1])
if cfg.TEST.BBOX_REG:
# Apply bounding-box regression deltas
box_deltas = bbox_pred
pred_boxes = bbox_transform_inv(boxes, box_deltas)
pred_boxes = _clip_boxes(pred_boxes, im.shape)
else:
# Simply repeat the boxes, once for each class
pred_boxes = np.tile(boxes, (1, scores.shape[1]))
return scores, pred_boxes
def apply_nms(all_boxes, thresh):
"""Apply non-maximum suppression to all predicted boxes output by the
test_net method.
"""
num_classes = len(all_boxes)
num_images = len(all_boxes[0])
nms_boxes = [[[] for _ in range(num_images)] for _ in range(num_classes)]
for cls_ind in range(num_classes):
for im_ind in range(num_images):
dets = all_boxes[cls_ind][im_ind]
if dets == []:
continue
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
scores = dets[:, 4]
inds = np.where((x2 > x1) & (y2 > y1) & (scores > cfg.TEST.DET_THRESHOLD))[0]
dets = dets[inds,:]
if dets == []:
continue
keep = nms(dets, thresh)
if len(keep) == 0:
continue
nms_boxes[cls_ind][im_ind] = dets[keep, :].copy()
return nms_boxes
def test_net(sess, net, imdb, weights_filename, experiment_setup=None,
max_per_image=100, thresh=0.05):
np.random.seed(cfg.RNG_SEED)
"""Test a Fast R-CNN network on an image database."""
num_images = len(imdb.image_index)
# num_images = 2
# all detections are collected into:
# all_boxes[cls][image] = N x 5 array of detections in
# (x1, y1, x2, y2, score)
all_boxes = [[[] for _ in range(num_images)]
for _ in range(imdb.num_classes)]
output_dir = get_output_dir(imdb, weights_filename)
print('using output_dir: ', output_dir)
# timers
_t = {'im_detect' : Timer(), 'misc' : Timer()}
# define a writer to write the histogram of summaries
# test_tbdir = '/home/shuang/projects/tf-faster-rcnn/tensorboard/'
# if not os.path.exists(test_tbdir):
# print('making directory for test tensorboard result')
# os.mkdir(test_tbdir)
# writer = tf.summary.FileWriter(test_tbdir,sess.graph)
# define a folder for activation results
te | st_actdir = '../activations_retrained'
if not os.path.exists(test_actdir):
os.mkdir(test_actdir)
# define a folder for zero fractions
test_zerodir = './zero_fractions'
if not os.path.exists(test_zerodir):
os.mkdir(test_zerodir)
for i in range(num_images):
im = imread(imdb.image_path_at(i))
_t['im_detect'].tic()
scores, | boxes = im_detect(sess, net, im)
_t['im_detect'].toc()
# write act summaries to tensorboard
# writer.add_summary(act_summaries)
# record the zero fraction -> only for vgg16
# zero_frac = []
# for layer_ind in range(13):
# batch_num,row,col,filter_num = acts[layer_ind].shape
# zero_frac.append([])
# for j in range(filter_num):
# # print(acts[0][:,:,:,i].shape)
# fraction = 1-np.count_nonzero(acts[layer_ind][:,:,:,j])/(batch_num*row*col)
# zero_frac[layer_ind].append(fraction)
_t['misc'].tic()
# skip j = 0, because it's the background class
chosen_classes = []
for j in range(1, imdb.num_classes):
# for j, clas in enumerate(imdb._classes[1:]):
inds = np.where(scores[:, j] > thresh)[0]
cls_scores = scores[inds, j]
cls_boxes = boxes[inds, j*4:(j+1)*4]
cls_dets = np.hstack((cls_boxes, cls_scores[:, np.newaxis])) \
.astype(np.float32, copy=False)
keep = nms(cls_dets, cfg.TEST.NMS)
cls_dets = cls_dets[keep, :]
all_boxes[j][i] = cls_dets
# if len(cls_dets)!=0: # only for recording activations_res
# chosen_classes.append(imdb._classes[j])
# Limit to max_per_image detections *over all classes*
if max_per_image > 0:
image_scores = np.hstack([all_boxes[j][i][:, -1]
for j in range(1, imdb.num_classes)])
if len(image_scores) > max_per_image:
image_thresh = np.sort(image_scores)[-max_per_image]
for j in range(1, imdb.num_classes):
keep = np.where(all_boxes[j][i][:, -1] >= image_thresh)[0]
all_boxes[j][i] = all_boxes[j][i][keep, :]
_t['misc'].toc()
# write acts to a seperate text file for each seprate image file -> only vgg
# f_name = '{}/{}.txt'.format(test_actdir,i)
# act_file = open(f_name,'w')
# act_file.write('\n'.join(chosen_classes))
# act_file.write('\n')
# sum_act = []
# for arr in acts:
# temp = np.sum(arr,axis = (0,1,2))
# sum_act.append(temp)
# for item in sum_act:
# act_file.write('{}\n'.format(str(item)))
# act_file.close()
# chosen_classes = []
# write zero fractions to text files -> only vgg
# file_name = '{}/{}.txt'.format(test_zerodir,i)
# zero_file = open(file_name,'w')
# zero_file.write('\n'.join(chosen_classes))
# zero_file.write('\n')
# for arr in zero_frac:
# zero_file.write('{}\n'.format(str(arr)))
# zero_file.close()
# chosen_classes = []
if i%1000==0:
print('im_detect: { |
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c) Ansible Inc, 2016
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the followin | g disclaimer in the documentation
# | and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import glob
import os
import pickle
import platform
import select
import shlex
import subprocess
import traceback
from ansible.module_utils.six import PY2, b
from ansible.module_utils._text import to_bytes, to_text
def sysv_is_enabled(name):
'''
This function will check if the service name supplied
is enabled in any of the sysv runlevels
:arg name: name of the service to test for
'''
return bool(glob.glob('/etc/rc?.d/S??%s' % name))
def get_sysv_script(name):
'''
This function will return the expected path for an init script
corresponding to the service name supplied.
:arg name: name or path of the service to test for
'''
if name.startswith('/'):
result = name
else:
result = '/etc/init.d/%s' % name
return result
def sysv_exists(name):
'''
This function will return True or False depending on
the existence of an init script corresponding to the service name supplied.
:arg name: name of the service to test for
'''
return os.path.exists(get_sysv_script(name))
def fail_if_missing(module, found, service, msg=''):
'''
This function will return an error or exit gracefully depending on check mode status
and if the service is missing or not.
:arg module: is an AnsibleModule object, used for it's utility methods
:arg found: boolean indicating if services was found or not
:arg service: name of service
:kw msg: extra info to append to error/success msg when missing
'''
if not found:
if module.check_mode:
module.exit_json(msg="Service %s not found on %s, assuming it will exist on full run" % (service, msg), changed=True)
else:
module.fail_json(msg='Could not find the requested service %s: %s' % (service, msg))
def daemonize(module, cmd):
'''
Execute a command while detaching as a daemon, returns rc, stdout, and stderr.
:arg module: is an AnsibleModule object, used for it's utility methods
:arg cmd: is a list or string representing the command and options to run
This is complex because daemonization is hard for people.
What we do is daemonize a part of this module, the daemon runs the command,
picks up the return code and output, and returns it to the main process.
'''
# init some vars
chunk = 4096 # FIXME: pass in as arg?
errors = 'surrogate_or_strict'
# start it!
try:
pipe = os.pipe()
pid = os.fork()
except OSError:
module.fail_json(msg="Error while attempting to fork: %s", exception=traceback.format_exc())
# we don't do any locking as this should be a unique module/process
if pid == 0:
os.close(pipe[0])
# Set stdin/stdout/stderr to /dev/null
fd = os.open(os.devnull, os.O_RDWR)
# clone stdin/out/err
for num in range(3):
if fd != num:
os.dup2(fd, num)
# close otherwise
if fd not in range(3):
os.close(fd)
# Make us a daemon
pid = os.fork()
# end if not in child
if pid > 0:
os._exit(0)
# get new process session and detach
sid = os.setsid()
if sid == -1:
module.fail_json(msg="Unable to detach session while daemonizing")
# avoid possible problems with cwd being removed
os.chdir("/")
pid = os.fork()
if pid > 0:
os._exit(0)
# if command is string deal with py2 vs py3 conversions for shlex
if not isinstance(cmd, list):
if PY2:
cmd = shlex.split(to_bytes(cmd, errors=errors))
else:
cmd = shlex.split(to_text(cmd, errors=errors))
# make sure we always use byte strings
run_cmd = []
for c in cmd:
run_cmd.append(to_bytes(c, errors=errors))
# execute the command in forked process
p = subprocess.Popen(run_cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=lambda: os.close(pipe[1]))
fds = [p.stdout, p.stderr]
# loop reading output till its done
output = {p.stdout: b(""), p.sterr: b("")}
while fds:
rfd, wfd, efd = select.select(fds, [], fds, 1)
if (rfd + wfd + efd) or p.poll():
for out in fds:
if out in rfd:
data = os.read(out.fileno(), chunk)
if not data:
fds.remove(out)
output[out] += b(data)
# even after fds close, we might want to wait for pid to die
p.wait()
# Return a pickled data of parent
return_data = pickle.dumps([p.returncode, to_text(output[p.stdout]), to_text(output[p.stderr])], protocol=pickle.HIGHEST_PROTOCOL)
os.write(pipe[1], to_bytes(return_data, errors=errors))
# clean up
os.close(pipe[1])
os._exit(0)
elif pid == -1:
module.fail_json(msg="Unable to fork, no exception thrown, probably due to lack of resources, check logs.")
else:
# in parent
os.close(pipe[1])
os.waitpid(pid, 0)
# Grab response data after child finishes
return_data = b("")
while True:
rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]])
if pipe[0] in rfd:
data = os.read(pipe[0], chunk)
if not data:
break
return_data += b(data)
# Note: no need to specify encoding on py3 as this module sends the
# pickle to itself (thus same python interpreter so we aren't mixing
# py2 and py3)
return pickle.loads(to_bytes(return_data, errors=errors))
def check_ps(module, pattern):
# Set ps flags
if platform.system() == 'SunOS':
psflags = '-ef'
else:
psflags = 'auxww'
# Find ps binary
psbin = module.get_bin_path('ps', True)
(rc, out, err) = module.run_command('%s %s' % (psbin, psflags))
# If rc is 0, set running as appropriate
if rc == 0:
for line in out.split('\n'):
if pattern in line:
return True
return False
|
(token.getY2())
except TypeError:
pass
import numpy as np
if len(lX) > 0:
a,bx = np.polyfit(lX, lY, 1)
lPoints = ','.join(["%d,%d"%(xa,ya) for xa,ya in zip(lX, lY)])
# print 'ANLGE:',math.degrees(math.atan(a))
ymax = a*self.getWidth()+bx
from ObjectModel.XMLDSBASELINEClass import XMLDSBASELINEClass
b= XMLDSBASELINEClass()
b.setNode(self)
# b.addAttribute("points",lPoints)
b.setAngle(a)
b.setBx(bx)
b.setPoints(lPoints)
b.setParent(self)
self.setBaseline(b)
b.computePoints()
def getTokens(self):
"""
if dom tokens: rturn them
else split content
"""
if self.getAllNamedObjects(XMLDSTOKENClass) != []:
return self.getAllNamedObjects(XMLDSTOKENClass)
else:
for token in self.getContent().split():
oT=XMLDSTOKENClass()
oT.setParent(self)
oT.setPage(self.getPage())
self.addObject(oT)
oT.setContent(token)
return self.getAllNamedObjects(XMLDSTOKENClass)
def getSetOfFeaturesXPos(self,TH,lAttr,myObject):
from spm.feature import featureObject
if self._lBasicFeatures is None:
self._lBasicFeatures = []
ftype= featureObject.NUMERICAL
feature = featureObject()
feature.setName('x')
feature.setTH(TH)
feature.addNode(self)
feature.setObjectName(self)
feature.setValue(round(self.getX()))
feature.setType(ftype)
self.addFeature(feature)
ftype= featureObject.NUMERICAL
feature = featureObject()
feature.setName('x2')
feature.setTH(TH)
feature.addNode(self)
feature.setObjectName(self)
feature.setValue(round(self.getX()+self.getWidth()))
feature.setType(ftype)
self.addFeature(feature)
ftype= featureObject.NUMERICAL
feature = featureObject()
feature.setName('xc')
feature.setTH(TH)
feature.addNode(self)
feature.setObjectName(self)
feature.setValue(round(self.getX()+self.getWidth()/2))
feature.setType(ftype)
self.addFeature(feature)
return self.getSetofFeatures()
def getSetOfListedAttributes(self,TH,lAttributes,myObject):
"""
Generate a set of features: X start of the lines
"""
from spm.feature import featureObject
if self._lBasicFeatures is None:
self._lBasicFeatures = []
# needed to keep canonical values!
elif self.getSetofFeatures() != []:
return self.getSetofFeatures()
lHisto = {}
for elt in self.getAllNamedObjects(myObject):
for attr in lAttributes:
try:lHisto[attr]
except KeyError:lHisto[attr] = {}
if elt.hasAttribute(attr):
# if elt.getWidth() >500:
# print elt.getName(),attr, elt.getAttribute(attr) #, elt.getNode()
try:
try:lHisto[attr][round(float(elt.getAttribute(attr)))].append(elt)
except KeyError: lHisto[attr][round(float(elt.getAttribute(attr)))] = [elt]
except TypeError:pass
for attr in lAttributes:
for value in lHisto[attr]:
# print attr, value, lHisto[attr][value]
if len(lHisto[attr][value]) > 0.1:
ftype= featureObject.NUMERICAL
feature = featureObject()
feature.setName(attr)
# feature.setName('f')
feature.setTH(TH)
feature.addNode(self)
feature.setObjectName(self)
feature.setValue(value)
feature.setType(ftype)
self.addFeature(feature)
if 'text' in lAttributes:
if len(self.getContent()):
ftype= featureObject.EDITDISTANCE
feature = featureObject()
# feature.setName('content')
feature.setName('f')
feature.setTH(90)
feature.addNode(self)
feature.setObjectName(self)
feature.setValue(self.getContent().split()[0])
feature.setType(ftype)
self.addFeature(feature)
if 'tokens' in lAttributes:
if len(self.getContent()):
for token in self.getContent().split():
if len(token) > 4:
ftype= featureObject.EDITDISTANCE
feature = featureObject()
feature.setName('token')
feature.setTH(TH)
feature.addNode(self)
feature.setObjectName(self)
feature.setValue(token.lower())
feature.setType(ftype)
self.addFeature(feature)
if 'xc' in lAttributes:
ftype= featureObject.NUMERICAL
feature = featureObject()
# feature.setName('xc')
feature.setName('xc')
feature.setTH(TH)
feature.addNode(self)
feature.setObjectName(self)
feature.setValue(round(self.getX()+self.getWidth()/2))
feature.setType(ftype)
self.ad | dFeature(feature)
#
if 'virtual' in lAttributes:
ftype= featureObject.BOOLEAN
feature = featureObject()
feature.setName('f')
feature.setTH(TH)
feature.addNode(self)
feature.setObjectName(self)
feature.setValue(self.getAttribute('virtual'))
feature.s | etType(ftype)
self.addFeature(feature)
if 'bl' in lAttributes:
for inext in self.next:
ftype= featureObject.NUMERICAL
feature = featureObject()
baseline = self.getBaseline()
nbl = inext.getBaseline()
if baseline and nbl:
feature.setName('bl')
feature.setTH(TH)
feature.addNode(self)
feature.setObjectName(self)
# avg of baseline?
avg1= baseline.getY() +(baseline.getY2() -baseline.getY())/2
avg2= nbl.getY() +(nbl.getY2()-nbl.getY())/2
feature.setValue(round(abs(avg2-avg1)))
feature.setType(ftype)
self.addFeature(feature)
if 'linegrid' in lAttributes:
#lgridlist.append((ystart,rowH, y1,yoverlap))
for ystart,rowh,_,_ in self.lgridlist:
ftype= featureObject.BOOLEAN
feature = featureObject()
feature.setName('linegrid%s'%rowh)
feature.setTH(TH)
feature.addNode(self)
feature.setObjectName(self)
feature.setValue(ystart)
feature.setType(ftype)
self.addFeature(feature)
return self.getSetofFeatures()
def getSetOfMutliValuedFeatures(self,TH,lMyFeatures,myObject):
"""
define a multivalued features
"""
from spm.feature import multiValueFeatureObject
#reinit
self._lBasicFeatures = None
mv =multiValueFeatureObject()
name= "multi" #'|'.join(i.getName() for i in lMyFeatures)
mv.setName(name)
mv.addNode(self)
mv.setObjectName(self)
mv.setTH(TH)
mv.setObject |
def add(x, y):
return | x + y
x = 0
import pdb; pdb.set_trace()
x = add(1, 2)
| |
# This script has to run using the Python executable found in:
# /opt/mgmtworker/env/bin/python in order to properly load the manager
# blueprints utils.py module.
import argparse
import logging
import utils
class CtxWithLogger(object):
logger = logging.getLogger('internal-ssl-certs-logger')
utils.ctx = CtxWithLogger()
parser = argparse.ArgumentParser()
parser.add_argument('--metadata', default=utils.CERT_METADATA_FILE_PATH,
help='File containing the cert metadata. It should be a '
'JSON file containing an object with the '
'"internal_rest_host" and "networks" fields.')
parser.add_argument('manager_ip', default=None, nargs='?',
help='The IP of this machine on the default network')
if __name__ == '__main__':
args = parser.parse_args()
cert_metadata = utils.load_cert_metadata(filename=args.metadata)
internal_rest_host = args.manager_ip or cert_metadata['internal_rest_host']
networks = cert_metadata.get | ('networks', {})
networks['default'] = internal_rest_host
cert_ | ips = [internal_rest_host] + list(networks.values())
utils.generate_internal_ssl_cert(ips=cert_ips, name=internal_rest_host)
utils.store_cert_metadata(internal_rest_host, networks,
filename=args.metadata)
|
"""
Copyright (C) 2017 Open Source Robotics Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import math
import numpy as np
import latlon
import ecef
class Enu(object):
def __init__(self, e, n, u):
self.e = e
self.n = n
self.u = u
def __eq__(self | , other):
return self.e == other.e and self.n == other.n and self.u == other.u
def __hash__(self):
return hash((self.e, self.n, self.u))
def to_ecef(self, origin):
# this doesn't work at the poles because longitude is not uniquely defined there
sin_lon = origin._sin_lon()
sin_lat = origin._sin_lat()
cos_lon = origin._cos_lon()
cos_lat = origin._cos_lat()
global_to_ecef_matrix = np.a | rray([[-sin_lon, -cos_lon * sin_lat, cos_lon * cos_lat],
[cos_lon, - sin_lon * sin_lat, sin_lon * cos_lat],
[0, cos_lat, sin_lat]])
enu_vector = np.array([[self.e], [self.n], [self.u]])
ecef_vector = np.dot(global_to_ecef_matrix, enu_vector)
return ecef.Ecef(ecef_vector[0][0], ecef_vector[1][0], ecef_vector[2][0])
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
FS Pairtree storage - Reverse lookup
====================================
Conventions used:
From http://www.cdlib.org/inside/diglib/pairtree/pairtreespec.html version 0.1
This is an implementation of a reverse lookup index, using the pairtree path spec to
record the link between local id and the id's that it corresponds to.
eg to denote issn:1234-1234 as being linked to a global id of "uuid:1e4f..."
--> create a file at ROOT_DIR/pairtree_rl/is/sn/+1/23/4-/12/34/uuid+1e4f...
Note that the id it links to is recorded as a filename encoded as per the pairtree spec.
Usage
=====
>>> from pairtree import PairtreeReverseLookup
>>> rl = PairtreeReverseLookup(storage_dir="ROOT")
>>> rl["issn:1234-1234"].append("uuid:1e4f...")
>>> rl["issn:1234-1234"]
["uuid:1e4f"]
>>> rl["issn:1234-1234"] = ["id:1", "uuid:32fad..."]
>>>
Notes
=====
This was created to avoid certain race conditions I had with a pickled dictionary for this index.
A sqllite or similar lookup would also be effective, but this one relies solely on pairtree.
"""
import os
from pairtree.pairtree_path import id_encode, id_decode, id_to_dirpath
PAIRTREE_RL = "pairtree_rl"
class PairtreeReverseLookup_list(object):
def __init__(self, rl_dir, id):
self._rl_dir = rl_dir
self._id = id
self._dirpath = id_to_dirpath(self._id, self._rl_dir)
def _get_ids(self):
if os.path.isdir(self._dirpath):
ids = []
for f in os.listdir(self._dirpath):
ids.append(id_decode(f))
return ids
else:
return []
def _add_id(self, new_id):
if not os.path.exists(self._dirpath):
os.makedir | s(self._dirpath)
enc_id = id_e | ncode(new_id)
if not os.path.isfile(enc_id):
with open(os.path.join(self._dirpath, enc_id), "w") as f:
f.write(new_id)
def _exists(self, id):
if os.path.exists(self._dirpath):
return id_encode(id) in os.listdir(self._dirpath)
else:
return False
def append(self, *args):
[self._add_id(x) for x in args if not self._exists(x)]
def __len__(self):
return len(os.listdir(self._dirpath))
def __repr__(self):
return "ID:'%s' -> ['%s']" % (self._id, "','".join(self._get_ids()))
def __str__(self):
return self.__repr__()
def __iter__(self):
for f in self._get_ids():
yield id_decode(f)
class PairtreeReverseLookup(object):
def __init__(self, storage_dir="data"):
self._storage_dir = storage_dir
self._rl_dir = os.path.join(storage_dir, PAIRTREE_RL)
self._init_store()
def _init_store(self):
if not os.path.isdir(self._storage_dir):
os.makedirs(self._storage_dir)
def __getitem__(self, id):
return PairtreeReverseLookup_list(self._rl_dir, id)
def __setitem__(self, id, value):
id_c = PairtreeReverseLookup_list(self._rl_dir, id)
if isinstance(list, value):
id_c.append(*value)
else:
id_c.append(value)
def __delitem__(self, id):
dirpath = id_to_dirpath(id, self._rl_dir)
if os.path.isdir(dirpath):
for f in os.listdir(dirpath):
os.remove(os.path.join(dirpath, f))
os.removedirs(dirpath) # will throw OSError if the dir cannot be removed.
self._init_store() # just in case
|
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
#htmlhelp_basename = 'libnacl'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'libnacl.tex', u'libnacl Documentation',
u'Thomas S Hatch', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'libnacl', u'libnacl Documentation',
[u'Thomas S Hatch'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'libnacl', u'libnacl Documentation',
u'Thomas S Hatch', 'libnacl', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'libnacl'
epub_author = u'Thomas S Hatch'
epub_publisher = u'Thomas S Hatch'
epub_copyright = u'2020, Thomas S Hatch'
# The basename for the epub file. It defaults to the project name.
#epub_basename = u'libnacl'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup | = True
# Choose between 'default' and 'includehidden' | .
#epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#e |
# Generated by Django 2.2.17 on 20 | 21-01-28 01:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('discounts', '0010_merge_20191028_1925'),
]
operations = [
migrations.AddField(
model_name='registrationdiscount',
name='applied',
field=models.BooleanField(null=True, verbose_name='Use finalized'),
),
migrations.DeleteModel(
name='Temporar | yRegistrationDiscount',
),
]
|
nder the BSD style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import codecs
import datetime
import itertools
import operator
import os
import re
import sys
try:
from lxml import etree
except ImportError:
etree = None
from . import colorize, config, source, utils
ISSUE_KIND_ERROR = 'ERROR'
ISSUE_KIND_WARNING = 'WARNING'
ISSUE_KIND_INFO = 'INFO'
ISSUE_KIND_ADVICE = 'ADVICE'
# field names in rows of json reports
JSON_INDEX_DOTTY = 'dotty'
JSON_INDEX_FILENAME = 'file'
JSON_INDEX_HASH = 'hash'
JSON_INDEX_INFER_SOURCE_LOC = 'infer_source_loc'
JSON_INDEX_ISL_FILE = 'file'
JSON_INDEX_ISL_LNUM = 'lnum'
JSON_INDEX_ISL_CNUM = 'cnum'
JSON_INDEX_ISL_ENUM = 'enum'
JSON_INDEX_KIND = 'kind'
JSON_INDEX_LINE = 'line'
JSON_INDEX_PROCEDURE = 'procedure'
JSON_INDEX_PROCEDURE_ID = 'procedure_id'
JSON_INDEX_QUALIFIER = 'qualifier'
JSON_INDEX_QUALIFIER_TAGS = 'qualifier_tags'
JSON_INDEX_TYPE = 'bug_type'
JSON_INDEX_TRACE = 'bug_trace'
JSON_INDEX_TRACE_LEVEL = 'level'
JSON_INDEX_TRACE_FILENAME = 'filename'
JSON_INDEX_TRACE_LINE = 'line_number'
JSON_INDEX_TRACE_DESCRIPTION = 'description'
JSON_INDEX_VISIBILITY = 'visibility'
ISSUE_TYPES_URL = 'http://fbinfer.com/docs/infer-issue-types.html#'
def _text_of_infer_loc(loc):
return ' ({}:{}:{}-{}:)'.format(
loc[JSON_INDEX_ISL_FILE],
loc[JSON_INDEX_ISL_LNUM],
loc[JSON_INDEX_ISL_CNUM],
loc[JSON_INDEX_ISL_ENUM],
)
def text_o | f_report(report):
filename = report[JSON_INDEX_FILENAME]
kind = report[JSON_INDEX_KIND] |
line = report[JSON_INDEX_LINE]
error_type = report[JSON_INDEX_TYPE]
msg = report[JSON_INDEX_QUALIFIER]
infer_loc = ''
if JSON_INDEX_INFER_SOURCE_LOC in report:
infer_loc = _text_of_infer_loc(report[JSON_INDEX_INFER_SOURCE_LOC])
return '%s:%d: %s: %s%s\n %s' % (
filename,
line,
kind.lower(),
error_type,
infer_loc,
msg,
)
def _text_of_report_list(project_root, reports, bugs_txt_path, limit=None,
formatter=colorize.TERMINAL_FORMATTER):
n_issues = len(reports)
if n_issues == 0:
if formatter == colorize.TERMINAL_FORMATTER:
out = colorize.color(' No issues found ',
colorize.SUCCESS, formatter)
return out + '\n'
else:
return 'No issues found'
text_errors_list = []
for report in reports[:limit]:
filename = report[JSON_INDEX_FILENAME]
line = report[JSON_INDEX_LINE]
source_context = ''
source_context = source.build_source_context(
os.path.join(project_root, filename),
formatter,
line,
)
indenter = source.Indenter() \
.indent_push() \
.add(source_context)
source_context = '\n' + unicode(indenter)
msg = text_of_report(report)
if report[JSON_INDEX_KIND] == ISSUE_KIND_ERROR:
msg = colorize.color(msg, colorize.ERROR, formatter)
elif report[JSON_INDEX_KIND] == ISSUE_KIND_WARNING:
msg = colorize.color(msg, colorize.WARNING, formatter)
elif report[JSON_INDEX_KIND] == ISSUE_KIND_ADVICE:
msg = colorize.color(msg, colorize.ADVICE, formatter)
text = '%s%s' % (msg, source_context)
text_errors_list.append(text)
error_types_count = {}
for report in reports:
t = report[JSON_INDEX_TYPE]
# assert failures are not very informative without knowing
# which assertion failed
if t == 'Assert_failure' and JSON_INDEX_INFER_SOURCE_LOC in report:
t += _text_of_infer_loc(report[JSON_INDEX_INFER_SOURCE_LOC])
if t not in error_types_count:
error_types_count[t] = 1
else:
error_types_count[t] += 1
max_type_length = max(map(len, error_types_count.keys())) + 2
sorted_error_types = error_types_count.items()
sorted_error_types.sort(key=operator.itemgetter(1), reverse=True)
types_text_list = map(lambda (t, count): '%s: %d' % (
t.rjust(max_type_length),
count,
), sorted_error_types)
text_errors = '\n\n'.join(text_errors_list)
if limit >= 0 and n_issues > limit:
text_errors += colorize.color(
('\n\n...too many issues to display (limit=%d exceeded), please ' +
'see %s or run `inferTraceBugs` for the remaining issues.')
% (limit, bugs_txt_path), colorize.HEADER, formatter)
issues_found = 'Found {n_issues}'.format(
n_issues=utils.get_plural('issue', n_issues),
)
msg = '{issues_found}\n\n{issues}\n\n{header}\n\n{summary}'.format(
issues_found=colorize.color(issues_found,
colorize.HEADER,
formatter),
issues=text_errors,
header=colorize.color('Summary of the reports',
colorize.HEADER, formatter),
summary='\n'.join(types_text_list),
)
return msg
def _is_user_visible(project_root, report):
kind = report[JSON_INDEX_KIND]
return kind in [ISSUE_KIND_ERROR, ISSUE_KIND_WARNING, ISSUE_KIND_ADVICE]
def print_and_save_errors(infer_out, project_root, json_report, bugs_out,
pmd_xml):
errors = utils.load_json_from_path(json_report)
errors = [e for e in errors if _is_user_visible(project_root, e)]
console_out = _text_of_report_list(project_root, errors, bugs_out,
limit=10)
utils.stdout('\n' + console_out)
plain_out = _text_of_report_list(project_root, errors, bugs_out,
formatter=colorize.PLAIN_FORMATTER)
with codecs.open(bugs_out, 'w',
encoding=config.CODESET, errors='replace') as file_out:
file_out.write(plain_out)
if pmd_xml:
xml_out = os.path.join(infer_out, config.PMD_XML_FILENAME)
with codecs.open(xml_out, 'w',
encoding=config.CODESET,
errors='replace') as file_out:
file_out.write(_pmd_xml_of_issues(errors))
def merge_reports_from_paths(report_paths):
json_data = []
for json_path in report_paths:
json_data.extend(utils.load_json_from_path(json_path))
return _sort_and_uniq_rows(json_data)
def _pmd_xml_of_issues(issues):
if etree is None:
print('ERROR: "etree" Python package not found.')
print('ERROR: You need to install it to use Infer with --pmd-xml')
sys.exit(1)
root = etree.Element('pmd')
root.attrib['version'] = '5.4.1'
root.attrib['date'] = datetime.datetime.now().isoformat()
for issue in issues:
fully_qualifed_method_name = re.search('(.*)\(.*',
issue[JSON_INDEX_PROCEDURE_ID])
class_name = ''
package = ''
if fully_qualifed_method_name is not None:
# probably Java
info = fully_qualifed_method_name.groups()[0].split('.')
class_name = info[-2:-1][0]
method = info[-1]
package = '.'.join(info[0:-2])
else:
method = issue[JSON_INDEX_PROCEDURE]
file_node = etree.Element('file')
file_node.attrib['name'] = issue[JSON_INDEX_FILENAME]
violation = etree.Element('violation')
violation.attrib['begincolumn'] = '0'
violation.attrib['beginline'] = str(issue[JSON_INDEX_LINE])
violation.attrib['endcolumn'] = '0'
violation.attrib['endline'] = str(issue[JSON_INDEX_LINE] + 1)
violation.attrib['class'] = class_name
violation.attrib['method'] = method
violation.attrib['package'] = package
violation.attrib['priority'] = '1'
violation.attrib['rule'] = issue[JSON_INDEX_TYPE]
violation.attrib['ruleset |
query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkSecurityGroup')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
network_security_group_name: str,
parameters: "_models.NetworkSecurityGroup",
**kwargs: Any
) -> AsyncLROPoller["_models.NetworkSecurityGroup"]:
"""Creates or updates a network security group in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param parameters: Parameters supplied to the create or update network security group
operation.
:type parameters: ~azure.mgmt.network.v2019_04_01.models.NetworkSecurityGroup
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkSecurityGroup or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_04_01.models.NetworkSecurityGroup]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return | deserialized
path | _format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def _update_tags_initial(
self,
resource_group_name: str,
network_security_group_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> "_models.NetworkSecurityGroup":
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = |
#!/usr/bin/env python3
import sys
from pathlib import Path
ALLOWED_SUFFIXES = ['.feature',
'.bugfix',
'.doc',
'.removal',
'.misc']
def get_root(script_path):
folder = script_path.absolute().par | ent
while not (folder / '.git').exists():
folder | = folder.parent
if folder == folder.anchor:
raise RuntimeError("git repo not found")
return folder
def main(argv):
print('Check "CHANGES" folder... ', end='', flush=True)
here = Path(argv[0])
root = get_root(here)
changes = root / 'CHANGES'
failed = False
for fname in changes.iterdir():
if fname.name in ('.gitignore', '.TEMPLATE.rst'):
continue
if fname.suffix not in ALLOWED_SUFFIXES:
if not failed:
print('')
print(fname, 'has illegal suffix', file=sys.stderr)
failed = True
if failed:
print('', file=sys.stderr)
print('Allowed suffixes are:', ALLOWED_SUFFIXES, file=sys.stderr)
print('', file=sys.stderr)
else:
print('OK')
return int(failed)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
for p in pad.GetListOfPrimitives():
if is_plottable(p):
if p.GetName() != "__frame":
plottables.append({'p': asrootpy(p.Clone(gen_random_name()))})
for legend_entry in legend_entries:
if p == legend_entry.GetObject():
plottables[-1]['legend_title'] = legend_entry.GetLabel()
else:
self.xtitle = p.GetXaxis().GetTitle()
self.ytitle = p.GetYaxis().GetTitle()
# set legend title if any
if legend.GetHeader():
self.legend.title = legend.GetHeader()
self._plottables += plottables
def draw_to_canvas(self):
"""
Draw this figure to a canvas, which is then returned.
"""
if len(self._plottables) == 0:
raise IndexError("No plottables defined")
c = Canvas(width=self.style.canvasWidth,
height=self.style.canvasHeight,
size_includes_decorations=True)
if self.legend.position == 'seperate':
legend_width = .2
pad_legend = Pad(1 - legend_width, 0, 1., 1., name="legend")
pad_legend.SetLeftMargin(0.0)
pad_legend.SetFillStyle(0) # make this pad transparent
pad_legend.Draw()
else:
legend_width = 0
pad_plot = Pad(0., 0., 1 - legend_width, 1., name="plot", )
pad_plot.SetMargin(*self.style.plot_margins)
pad_plot.Draw()
pad_plot.cd()
# awkward hack around a bug in get limits where everything fails if one plottable is shitty...
xmin, xmax, ymin, ymax = None, None, None, None
for pdic in self._plottables:
try:
limits = get_limits(pdic['p'], logx=self.plot.logx, logy=self.plot.logy)
# Beware: Python 2 evaluates min/max of None in an undefined way with no error! Wow...
xmin = min([xmin, limits[0]]) if xmin is not None else limits[0]
xmax = max([xmax, limits[1]]) if xmax is not None else limits[1]
ymin = min([ymin, limits[2]]) if ymin is not None else limits[2]
ymax = max([ymax, limits[3]]) if ymax is not None else limits[3]
except TypeError:
# some plottables do not work with this rootpy function (eg. graph without points, tf1)
# TODO: should be fixed upstream
pass
# overwrite these ranges if defaults are given
if self.plot.xmin is not None:
xmin = self.plot.xmin
if self.plot.xmax is not None:
xmax = self.plot.xmax
if self.plot.ymax is not None:
ymax = self.plot.ymax
if self.plot.ymin is not None:
ymin = self.plot.ymin
if not all([val is not None for val in [xmin, xmax, ymin, ymax]]):
raise TypeError("unable to determine plot axes ranges from the given plottables")
colors = get_color_generator(self.plot.palette, self.plot.palette_ncolors)
# draw an empty frame within the given ranges;
frame_from_plottable = [p for p in self._plottables if p.get('use_as_frame')]
if len(frame_from_plottable) > 0:
frame = frame_from_plottable[0]['p'].Clone('__frame')
frame.Reset()
frame.SetStats(0)
frame.xaxis.SetRangeUser(xmin, xmax)
frame.yaxis.SetRangeUser(ymin, ymax)
frame.GetXaxis().SetTitle(self.xtitle)
frame.GetYaxis().SetTitle(self.ytitle)
self._theme_plottable(frame)
frame.Draw()
else:
frame = Graph()
frame.SetName("__frame")
# add a silly point in order to have root draw this frame...
frame.SetPoint(0, 0, 0)
frame.GetXaxis().SetLimits(xmin, xmax)
frame.GetYaxis().SetLimits(ymin, ymax)
frame.SetMinimum(ymin)
frame.SetMaximum(ymax)
frame.GetXaxis().SetTitle(self.xtitle)
frame.GetYaxis().SetTitle(self.ytitle)
self._theme_plottable(frame)
# Draw this frame: 'A' should draw the axis, but does not work if nothing else is drawn.
# L would draw a line between the points but is seems to do nothing if only one point is present
# P would also draw that silly point but we don't want that!
frame.Draw("AL")
xtick_length = frame.GetXaxis().GetTickLength()
ytick_length = frame.GetYaxis().GetTickLength()
for i, pdic in enumerate(self._plottables):
obj = pdic['p']
if isinstance(obj, ROOT.TLegendEntry):
_root_color = Color(pdic['color'])
_root_markerstyle = MarkerStyle(pdic['markerstyle'])
obj.SetMarkerStyle(_root_markerstyle('root'))
obj.SetMarkerColor(_root_color('root'))
elif isinstance(obj, (ROOT.TH1, ROOT.TGraph, ROOT.TF1)):
self._theme_plottable(obj)
obj.SetMarkerStyle(pdic.get('markerstyle', 'circle'))
if pdic.get('color', None):
obj.color = pdic['color']
else:
try:
color = next(colors)
except StopIteration:
log.warning("Ran out of colors; defaulting to black")
color = 1
obj.color = color
xaxis = obj.GetXaxis()
yaxis = obj.GetYaxis()
# Set the title to the given title:
obj.title = self.title
# the xaxis depends on the type of the plottable :P
if isinstance(obj, ROOT.TGraph):
# SetLimit on a TH1 is simply messing up the
# lables of the axis to screw over the user, presumably...
xaxis.SetLimits(xmin, xmax)
yaxis.SetLimits(ymin, ymax) # for unbinned data
# 'P' plots the current marker, 'L' would connect the dots with a simple line
# see: https://root.cern.ch/doc/master/classTGraphPainter.html for more draw options
drawoption = 'Psame'
elif isinstance(obj, ROOT.TH1):
obj.SetStats(0)
xaxis.SetRangeUser(xmin, xmax)
yaxis.SetRangeUser(ymin, ymax)
drawoption = 'same'
elif isinstance(obj, ROOT.TF1):
# xaxis.SetLimits(xmin, xmax)
# yaxis.SetLimits(ymin, ymax) # for unbinned data
drawoption = 'same'
obj.Draw(drawoption)
# Its ok if obj is non; then we just add it to the legend.
else:
raise TypeError("Un-plottable type given.")
pad_plot.SetTicks()
pad_plot.SetLogx(self.plot.logx)
pad_plot.SetLogy(self.plot.logy)
pad_plot.SetGridx(self.plot.gridx)
pad_plot.SetGridy(self.plot.gridy)
# do we have legend titles?
if any([pdic.get('legend_title') for pdic in self._plottables]):
leg = self._create_legend()
longest_label = 0
for pdic in self._plottables:
if not pdic.get('legend_title', False):
continue
leg.AddEntry(pdic['p'], pdic['legend_title'])
if len(pdic['legend_title']) > longest_label:
longest_label = len(pdic['legend_title'])
# Set the legend position
| # vertical:
if self.legend.position.startswith('t'):
leg_hight = leg.y2 - leg.y1
leg.y2 = 1 - pad_p | lot.GetTopMargin() - ytick_length
leg.y1 = leg.y2 - leg_hight
elif self.legend.position.startswith('b'):
leg_hight = leg.y2 - leg.y1
leg.y1 = pad_plot.GetBottomMargin() + ytick_length
leg.y2 = leg.y1 + leg_hight
# horizontal:
if self.legend.position[1:].startswith('l |
from pyramid.httpexceptions import HTTPMovedPermanently
from pyramid.view import view_config
from zeit.redirect.db import Redirect
import json
@view_config(route_name='redirect', renderer='string')
def check_redirect(request):
redirect = Redirect.query().filter_by(source=request.path).first()
if redirect:
# XXX Should we be protocol-relat | ive (https etc.)?
raise HTTPMovedPermanently(
'http://' + request.headers['Host'] + redirect.target)
else:
return ''
@view_config(route_name='add', re | nderer='string', request_method='POST')
def add_redirect(request):
body = json.loads(request.body)
Redirect.add(body['source'], body['target'])
return '{}'
|
from staffjoy.resource import Resource
from staffjoy.resources.location import Location
from staffjoy.resources.admin import Admin
from staffjoy.resources.organization_worker import OrganizationWorker
class Organization(Resource):
PATH = "organizations/{organization_id}"
ID_NAME = "organization_id"
def get_locations(self, **kwargs):
return Location.get_all(parent=self, **kwargs)
def get_location(self, id):
return Location.get(parent=self, id=id)
def create_location(self, **kwargs):
return Location.create(parent=self, **kwargs)
def get_admins( | self):
return Admin.get_all(parent=self)
def get_admin(self, id):
return Admin.get(parent=self, id=id)
def create_admin(self, **kwargs):
"""Typically just pass email"""
return Admin.create(parent=self, **kwargs)
def get_workers(self, **kwargs):
return OrganizationWorker.ge | t_all(parent=self, **kwargs)
|
from setuptools import setup, find_packages
from fccsmap import __version__
test_requirements = []
with open('requirements-test.txt') as f:
test_requirements = [r for r in f.read().splitlines()]
setup(
name='fccsmap',
version=__version__,
author='Joel Dubowy',
license='GPLv3+',
author_email='jdubowy@gmail.com',
packages=find_packages(),
scripts=[
'bin/fccsmap'
],
package_data={
'fccsmap': ['data/*.nc']
},
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
"Programming Language :: Pyth | on :: 3.8",
"Operating System :: POSIX",
"Operating System :: MacOS"
],
url='https://github.com/pnwairfire/fccsmap/',
description='supports the look-up of FCCS fuelbed information by lat/lng or vector geo spatial data.',
install_requires=[
"afscripting>=2.0.0",
# Note: numpy and gdal must now be install | ed manually beforehand
"shapely==1.7.1",
"pyproj==3.0.0.post1",
"rasterstats==0.15.0"
],
dependency_links=[
"https://pypi.airfire.org/simple/afscripting/",
],
tests_require=test_requirements
)
|
#!/usr/bin/env python
people = 30
cars = 40
trucks = 15
if cars > people:
print("We should take the cars.")
elif cars < people:
print("We should not take the cars")
else:
print("We can't decide.")
if trucks > cars:
print("Tha | t's too many trucks.")
elif trucks < cars:
print("Maybe we coudl take the trucks.")
else:
print("We still can't decide.")
if people > trucks:
print("Alright, let's just take the trucks.")
else:
print("F | ine, let's stay home then.")
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
import string
from urlparse import urlparse, parse_qs
from mopidy import backend
from mopidy.models import SearchResult, Track, Album, Artist
import pykka
import pafy
import requests
import unicodedata
from mopidy_youtube import logger
yt_api_endpoint = 'https://www.googleapis.com/youtube/v3/'
yt_key = 'AIzaSyAl1Xq9DwdE_KD4AtPaE4EJl3WZe2zCqg4'
def resolve_track(track, stream=False):
logger.debug("Resolving Youtube for track '%s'", track)
if hasattr(track, 'uri'):
return resolve_url(track.comment, stream)
else:
return resolve_url(track.split('.')[-1], stream)
def safe_url(uri):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
safe_uri = unicodedata.normalize(
'NFKD',
unicode(uri)
).encode('ASCII', 'ignore')
return re.sub(
'\s+',
' ',
''.join(c for c in safe_uri if c in valid_chars)
).strip()
def resolve_url(url, stream=False):
video = pafy.new(url)
if not stream:
uri = 'youtube:video/%s.%s' % (
safe_url(video.title), video.videoid
)
else:
uri = video.getbestaudio()
if not uri: # get video url
uri = video.getbest()
logger.debug('%s - %s %s %s' % (
video.title, uri.bitrate, uri.mediatype, uri.extension))
uri = uri.url
if not uri:
return
if '-' in video.title:
title = video.title.split('-')
track = Track(
name=title[1].strip(),
comment=video.videoid,
length=video.length*1000,
artists=[Artist(name=title[0].strip())],
album=Album(
name='Youtube',
images=[video.bigthumb, video.bigthumbhd]
),
uri=uri
)
else:
track = Track(
name=video.title,
comment=video.videoid,
length=video.length*1000,
album=Album(
name='Youtube',
images=[video.bigthumb, video.bigthumbhd]
),
uri=uri
)
return track
def search_youtube(q):
query = {
'part': 'id',
'maxResults': 15,
'type': 'video',
'q': q,
'key': yt_key
}
pl = requests.get(yt_api_endpoint+'search', params=query)
playlist = []
for yt_id in pl.json().get('items'):
try:
track = resolve_url(yt_id.get('id').get('videoId'))
playlist.append(track)
except Exception as e:
logger.info(e.message)
return playlist
def resolve_playlist(url):
logger.info("Resolving Youtube for playlist '%s'", url)
query = {
'part': 'snippet',
'maxResults': 50,
'playlistId': url,
'fields': 'items/snippet/resourceId',
'key': yt_key
}
pl = requests.get(yt_api_endpoint+'playlistItem', params=query)
playlist = []
for yt_id in pl.json().get('items'):
try:
yt_id = yt_id.get('snippet').get('resourceId').get('videoId')
playlist.append(resolve_url(yt_id))
except Exception as e:
logger.info(e.message)
return playlist
class YoutubeBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(YoutubeBackend, self).__init__()
self.config = config
self.library = YoutubeLibraryProvider(backend=self)
self.playback = YoutubePlaybackProvider(audio=audio, backend=self)
self.uri_schemes = ['youtube', 'yt']
class YoutubeLibraryProvider(backend.LibraryProvider):
def lookup(self, track):
if 'yt:' in track:
track = track.replace('yt:', '')
if 'youtube.com' in track:
url = urlparse(track)
req = parse_qs(url.query)
if 'list' in req:
return resolve_playlist(req.get('list')[0])
else:
return [resolve_url(track)]
else:
return [resolve_url(track)]
def search(self, query=None, uris=None):
if not query:
return
if 'uri' in query:
search_query = ''.join(query['uri'])
url = urlparse(search_query)
if 'youtube.com' in url.netloc:
req = parse_qs(url.query)
if 'list' in req:
return SearchResult(
uri='youtube:search',
tracks=resolve_playlist(req.get('list')[0])
)
else:
logger.info(
"Resolving Youtube for track '%s'", search_query)
return SearchResult(
uri='youtube:search',
tracks=[resolve | _url(search_query)]
)
else:
search_query = '|'.join(query.values()[0])
logger.info("Searching Youtube for query '%s'", search_query)
return SearchResult(
uri='youtube:search',
tracks=search_youtube(search_query)
)
class YoutubePlaybackProvider(backend.PlaybackProvider):
def play(self, track):
track = resolve_track(track, True)
return super | (YoutubePlaybackProvider, self).play(track)
|
import unittest
import pysal
import numpy as np
import random
class Test_Maxp(unittest.TestCase):
def setUp(self):
random.seed(100)
np.random.seed(100)
def test_Maxp(self):
w = pysal.lat2W(10, 10)
z = np.random.random_sample((w.n, 2))
| p = np.ones((w.n, 1), float)
floor = 3
solution = pysal.region.Maxp(
w, z, floor, floor_variable=p, initial=100)
self.assertEquals(solution.p, 29)
self.assertEquals(solution.regions[0], [4, 14, 5, 24, 3, 25, 15, 23])
def test_inference(self):
w = pysal.weights.lat2W(5, 5)
z = np.random.random_sample((w.n, 2))
p = np.ones((w.n, 1), float)
floor | = 3
solution = pysal.region.Maxp(
w, z, floor, floor_variable=p, initial=100)
solution.inference(nperm=9)
self.assertAlmostEquals(solution.pvalue, 0.20000000000000001, 10)
def test_cinference(self):
w = pysal.weights.lat2W(5, 5)
z = np.random.random_sample((w.n, 2))
p = np.ones((w.n, 1), float)
floor = 3
solution = pysal.region.Maxp(
w, z, floor, floor_variable=p, initial=100)
solution.cinference(nperm=9, maxiter=100)
self.assertAlmostEquals(solution.cpvalue, 0.10000000000000001, 10)
def test_Maxp_LISA(self):
w = pysal.lat2W(10, 10)
z = np.random.random_sample((w.n, 2))
p = np.ones(w.n)
mpl = pysal.region.Maxp_LISA(w, z, p, floor=3, floor_variable=p)
self.assertEquals(mpl.p, 31)
self.assertEquals(mpl.regions[0], [99, 89, 98, 97])
suite = unittest.TestLoader().loadTestsFromTestCase(Test_Maxp)
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite)
|
class Node(object):
def __init__(self,pos,parent,costSoFar,distanceToEnd):
self.pos = pos
self.parent = parent
self.costSoFar = costSoFar
self. | distanceToEnd = distanceToEnd
self.totalCost = distan | ceToEnd +costSoFar
|
from django.views.generic.detail import DetailView
from django.views.generic.edit import UpdateView, DeleteView
from catalog.views.base import GenericListView, GenericCreateView
from catalog.models import Astronaut, CrewedMission
from catalog.forms import AstronautForm
from catalog.filters import AstronautFilter
from django.core.urlresolvers import reverse_lazy
from django.core.urlresolvers import reverse
from django.http import Http404
class AstronautListView(GenericListView):
model = Astronaut
f = AstronautFilter
display_data = ('organization', 'nationality', 'birth_date')
class AstronautDetailView(DetailView):
model = Astronaut
template_name = "catalog/astronaut_detail.html"
cl | ass AstronautCreateView(GenericCreateView):
model = Astronaut
form_class = AstronautForm
success_url = reverse_lazy("astronaut_list")
def form_valid(self, form):
obj = form.save(commit=False)
obj.creator = self.request.user
obj.save()
return super(AstronautUpdat | eView, self).form_valid(form)
def get_success_url(self):
return reverse("astronaut_detail", args=(self.object.pk,))
class AstronautUpdateView(UpdateView):
model = Astronaut
form_class = AstronautForm
template_name = "catalog/generic_update.html"
initial = {}
def form_valid(self, form):
obj = form.save(commit=False)
obj.modifier = self.request.user
obj.save()
return super(AstronautUpdateView, self).form_valid(form)
def get_success_url(self):
return reverse("astronaut_detail", args=(self.object.pk,))
class AstronautDeleteView(DeleteView):
model = Astronaut
template_name = "catalog/generic_delete.html"
success_url = reverse_lazy("astronaut_list")
|
# hello_asyncio.py
import asyncio
import tornado.ioloop
import tornado.web
import tornado.gen
from tornado.httpclient import AsyncHTTPClient
try:
import aioredis
except ImportError:
print("Please install aioredis: pip install aioredis")
exit(0)
class AsyncRequestHandler(tornado.web.RequestHandler):
"""Base class for request handlers with `asyncio` coroutines support.
It runs meth | ods on Tornado's ``AsyncIOMainLoop`` instance.
Subclasses have to implement one of `get_async()`, `post_async()`, etc.
Asynchronous method should be decorated with `@asyncio.coroutine`.
Usage example::
class MyAsyncRequestHandler(AsyncRequestHandler):
@asyncio.coroutine
def get_async(self):
html = yield from self.application.http.get('http://python.org')
| self.write({'html': html})
You may also just re-define `get()` or `post()` methods and they will be simply run
synchronously. This may be convinient for draft implementation, i.e. for testing
new libs or concepts.
"""
@tornado.gen.coroutine
def get(self, *args, **kwargs):
"""Handle GET request asyncronously, delegates to
``self.get_async()`` coroutine.
"""
yield self._run_method('get', *args, **kwargs)
@tornado.gen.coroutine
def post(self, *args, **kwargs):
"""Handle POST request asyncronously, delegates to
``self.post_async()`` coroutine.
"""
yield self._run_method('post', *args, **kwargs)
@asyncio.coroutine
def _run_async(self, coroutine, future_, *args, **kwargs):
"""Perform coroutine and set result to ``Future`` object."""
try:
result = yield from coroutine(*args, **kwargs)
future_.set_result(result)
except Exception as e:
future_.set_exception(e)
print(traceback.format_exc())
def _run_method(self, method_, *args, **kwargs):
"""Run ``get_async()`` / ``post_async()`` / etc. coroutine
wrapping result with ``tornado.concurrent.Future`` for
compatibility with ``gen.coroutine``.
"""
coroutine = getattr(self, '%s_async' % method_, None)
if not coroutine:
raise tornado.web.HTTPError(405)
future_ = tornado.concurrent.Future()
asyncio.async(
self._run_async(coroutine, future_, *args, **kwargs)
)
return future_
class MainHandler(AsyncRequestHandler):
@asyncio.coroutine
def get_async(self):
redis = self.application.redis
yield from redis.set('my-key', 'OK')
val = yield from redis.get('my-key')
self.write('Hello asyncio.coroutine: %s' % val)
class Application(tornado.web.Application):
def __init__(self):
# Prepare IOLoop class to run instances on asyncio
tornado.ioloop.IOLoop.configure('tornado.platform.asyncio.AsyncIOMainLoop')
handlers = [
(r"/", MainHandler),
]
super().__init__(handlers, debug=True)
def init_with_loop(self, loop):
self.redis = loop.run_until_complete(
aioredis.create_redis(('localhost', 6379), loop=loop)
)
if __name__ == "__main__":
print("Run hello_asyncio ... http://127.0.0.1:8888")
application = Application()
application.listen(8888)
loop = asyncio.get_event_loop()
application.init_with_loop(loop)
loop.run_forever()
|
# -*- coding: utf-8 -*-
#
# Python documentation build configuration file
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
import sys, os, time
sys.path.append(os.path.ab | spath('tools/sphinxext'))
# General configuration
# ---------------------
extensions = ['sphinx.ext.refcounting', 'sphinx.ext.coverage',
'sphinx.ext.doctest', 'pyspecific']
templates_path = ['tools/sphinxext'] |
# General substitutions.
project = 'Python'
copyright = '1990-%s, Python Software Foundation' % time.strftime('%Y')
# The default replacements for |version| and |release|.
#
# The short X.Y version.
# version = '2.6'
# The full version, including alpha/beta/rc tags.
# release = '2.6a0'
# We look for the Include/patchlevel.h file in the current Python source tree
# and replace the values accordingly.
import patchlevel
version, release = patchlevel.get_version_info()
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of files that shouldn't be included in the build.
unused_docs = [
'maclib/scrap',
'library/xmllib',
'library/xml.etree',
]
# Ignore .rst in Sphinx its self.
exclude_trees = ['tools/sphinx']
# Relative filename of the reference count data file.
refcount_file = 'data/refcounts.dat'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# Options for HTML output
# -----------------------
html_theme = 'default'
html_theme_options = {'collapsiblesidebar': True}
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# Custom sidebar templates, filenames relative to this file.
html_sidebars = {
'index': 'indexsidebar.html',
}
# Additional templates that should be rendered to pages.
html_additional_pages = {
'download': 'download.html',
'index': 'indexcontent.html',
}
# Output an OpenSearch description file.
html_use_opensearch = 'http://docs.python.org/'
# Additional static files.
html_static_path = ['tools/sphinxext/static']
# Output file base name for HTML help builder.
htmlhelp_basename = 'python' + release.replace('.', '')
# Split the index
html_split_index = True
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
latex_paper_size = 'a4'
# The font size ('10pt', '11pt' or '12pt').
latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
_stdauthor = r'Guido van Rossum\\Fred L. Drake, Jr., editor'
latex_documents = [
('c-api/index', 'c-api.tex',
'The Python/C API', _stdauthor, 'manual'),
('distutils/index', 'distutils.tex',
'Distributing Python Modules', _stdauthor, 'manual'),
('extending/index', 'extending.tex',
'Extending and Embedding Python', _stdauthor, 'manual'),
('install/index', 'install.tex',
'Installing Python Modules', _stdauthor, 'manual'),
('library/index', 'library.tex',
'The Python Library Reference', _stdauthor, 'manual'),
('reference/index', 'reference.tex',
'The Python Language Reference', _stdauthor, 'manual'),
('tutorial/index', 'tutorial.tex',
'Python Tutorial', _stdauthor, 'manual'),
('using/index', 'using.tex',
'Python Setup and Usage', _stdauthor, 'manual'),
('faq/index', 'faq.tex',
'Python Frequently Asked Questions', _stdauthor, 'manual'),
('whatsnew/' + version, 'whatsnew.tex',
'What\'s New in Python', 'A. M. Kuchling', 'howto'),
]
# Collect all HOWTOs individually
latex_documents.extend(('howto/' + fn[:-4], 'howto-' + fn[:-4] + '.tex',
'', _stdauthor, 'howto')
for fn in os.listdir('howto')
if fn.endswith('.rst') and fn != 'index.rst')
# Additional stuff for the LaTeX preamble.
latex_preamble = r'''
\authoraddress{
\strong{Python Software Foundation}\\
Email: \email{docs@python.org}
}
\let\Verbatim=\OriginalVerbatim
\let\endVerbatim=\endOriginalVerbatim
'''
# Documents to append as an appendix to all manuals.
latex_appendices = ['glossary', 'about', 'license', 'copyright']
# Get LaTeX to handle Unicode correctly
latex_elements = {'inputenc': r'\usepackage[utf8x]{inputenc}', 'utf8extra': ''}
# Options for the coverage checker
# --------------------------------
# The coverage checker will ignore all modules/functions/classes whose names
# match any of the following regexes (using re.match).
coverage_ignore_modules = [
r'[T|t][k|K]',
r'Tix',
r'distutils.*',
]
coverage_ignore_functions = [
'test($|_)',
]
coverage_ignore_classes = [
]
# Glob patterns for C source files for C API coverage, relative to this directory.
coverage_c_path = [
'../Include/*.h',
]
# Regexes to find C items in the source files.
coverage_c_regexes = {
'cfunction': (r'^PyAPI_FUNC\(.*\)\s+([^_][\w_]+)'),
'data': (r'^PyAPI_DATA\(.*\)\s+([^_][\w_]+)'),
'macro': (r'^#define ([^_][\w_]+)\(.*\)[\s|\\]'),
}
# The coverage checker will ignore all C items whose names match these regexes
# (using re.match) -- the keys must be the same as in coverage_c_regexes.
coverage_ignore_c_items = {
# 'cfunction': [...]
}
|
import zlib, base64, sys
MAX_DEPTH = 50
if __name__ == "__main__":
try:
hashfile = open("hashfile", "r")
except:
print("ERROR: While opening hash file!")
sys.exit(-1)
line_number = 0
depths = [0 for _ in range(MAX_DEPTH)]
for line in hashfile.readlines():
line_number += 1
l = line.strip().split()
if len(l) < 7:
| print(
"Bad entry on line " + str(line_number) + " (ignored): " + line.strip()
)
continue
hash = l[0]
dept | h = int(l[1])
score = int(l[2])
fen = " ".join(l[3:])
depths[depth] += 1
hashfile.close()
print("-- Depths --")
for i in range(MAX_DEPTH):
if not depths[i]:
continue
print("{:2d}: {:8d}".format(i, depths[i]))
print("------------")
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-21 04:50
from __futur | e__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies | = [
('blog', '0011_auto_20170621_1224'),
]
operations = [
migrations.AddField(
model_name='category',
name='slug',
field=models.SlugField(default=''),
),
migrations.AddField(
model_name='tag',
name='slug',
field=models.SlugField(default=''),
),
]
|
from django.conf.urls import patterns, url
from django.core.urlresolvers import reverse
from django.http import Http404
from modeltree.tree import MODELTREE_DEFAULT_ALIAS, trees
from restlib2.params import Parametizer, IntParam, StrParam
from avocado.export import BaseExporter, registry as exporters
from avocado.query import pipeline, utils
from serrano.resources import API_VERSION
from serrano.resources.base import BaseResource
from serrano.resources.processors import EXPORTER_RESULT_PROCESSOR_NAME, \
process_results
# Single list of all registered exporters
EXPORT_TYPES = zip(*exporters.choices)[0]
class ExporterRootResource(BaseResource):
def get_links(self, request):
uri = request.build_absolute_uri
links = {
'self': uri(reverse('serrano:data:exporter')),
}
for export_type in EXPORT_TYPES:
links[export_type] = {
'link': uri(reverse(
'serrano:data:exporter',
kwargs={'export_type': export_type}
)),
'data': {
'title': exporters.get(export_type).short_name,
'description': exporters.get(export_type).long_name,
}
}
return links
def get(self, request):
resp = {
'title': 'Serrano Exporter Endpoints',
'version': API_VERSION
}
return resp
class ExporterParametizer(Parametizer):
limit = IntParam(50)
processor = StrParam('default', choices=pipeline.query_processors)
reader = StrParam('cached', choices=BaseExporter.readers)
tree = StrParam(MODELTREE_DEFAULT_ALIAS, choices=trees)
class ExporterResource(BaseResource):
cache_max_age = 0
private_cache = True
parametizer = ExporterParametizer
QUERY_NAME_TEMPLATE = '{session_key}:{export_type}'
def _get_query_name(self, request, export_type):
return self.QUERY_NAME_TEMPLATE.format(
session_key=request.session.session_key,
export_type=export_type)
# Resource is dependent on the available export types
def is_not_found(self, request, response, export_type, **kwargs):
return export_type not in EXPORT_TYPES
def get(self, request, export_type, **kwargs):
view = self.get_view(request)
context = self.get_context(request)
| params = self.get_params(request)
# Configure the query options used for retrieving the results.
query_options = {
| 'export_type': export_type,
'query_name': self._get_query_name(request, export_type),
}
query_options.update(**kwargs)
query_options.update(params)
try:
row_data = utils.get_result_rows(context, view, query_options,
request=request)
except ValueError:
raise Http404
return process_results(
request, EXPORTER_RESULT_PROCESSOR_NAME, row_data)
post = get
def delete(self, request, export_type, **kwargs):
query_name = self._get_query_name(request, export_type)
canceled = utils.cancel_query(query_name)
return self.render(request, {'canceled': canceled})
exporter_resource = ExporterResource()
exporter_root_resource = ExporterRootResource()
# Resource endpoints
urlpatterns = patterns(
'',
url(r'^$', exporter_root_resource, name='exporter'),
url(r'^(?P<export_type>\w+)/$', exporter_resource, name='exporter'),
url(r'^(?P<export_type>\w+)/(?P<page>\d+)/$', exporter_resource,
name='exporter'),
url(r'^(?P<export_type>\w+)/(?P<page>\d+)\.\.\.(?P<stop_page>\d+)/$',
exporter_resource, name='exporter'),
)
|
# Copyright 2013 Nebula Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unl | ess required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Container v1 action implementations"""
import logging
import | six
from cliff import command
from cliff import lister
from cliff import show
from openstackclient.common import parseractions
from openstackclient.common import utils
class CreateContainer(lister.Lister):
"""Create new container"""
log = logging.getLogger(__name__ + '.CreateContainer')
def get_parser(self, prog_name):
parser = super(CreateContainer, self).get_parser(prog_name)
parser.add_argument(
'containers',
metavar='<container-name>',
nargs="+",
help='New container name(s)',
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
results = []
for container in parsed_args.containers:
data = self.app.client_manager.object_store.container_create(
container=container,
)
results.append(data)
columns = ("account", "container", "x-trans-id")
return (columns,
(utils.get_dict_properties(
s, columns,
formatters={},
) for s in results))
class DeleteContainer(command.Command):
"""Delete container"""
log = logging.getLogger(__name__ + '.DeleteContainer')
def get_parser(self, prog_name):
parser = super(DeleteContainer, self).get_parser(prog_name)
parser.add_argument(
'containers',
metavar='<container>',
nargs="+",
help='Container(s) to delete',
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
for container in parsed_args.containers:
self.app.client_manager.object_store.container_delete(
container=container,
)
class ListContainer(lister.Lister):
"""List containers"""
log = logging.getLogger(__name__ + '.ListContainer')
def get_parser(self, prog_name):
parser = super(ListContainer, self).get_parser(prog_name)
parser.add_argument(
"--prefix",
metavar="<prefix>",
help="Filter list using <prefix>",
)
parser.add_argument(
"--marker",
metavar="<marker>",
help="Anchor for paging",
)
parser.add_argument(
"--end-marker",
metavar="<end-marker>",
help="End anchor for paging",
)
parser.add_argument(
"--limit",
metavar="<limit>",
type=int,
help="Limit the number of containers returned",
)
parser.add_argument(
'--long',
action='store_true',
default=False,
help='List additional fields in output',
)
parser.add_argument(
'--all',
action='store_true',
default=False,
help='List all containers (default is 10000)',
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
if parsed_args.long:
columns = ('Name', 'Bytes', 'Count')
else:
columns = ('Name',)
kwargs = {}
if parsed_args.prefix:
kwargs['prefix'] = parsed_args.prefix
if parsed_args.marker:
kwargs['marker'] = parsed_args.marker
if parsed_args.end_marker:
kwargs['end_marker'] = parsed_args.end_marker
if parsed_args.limit:
kwargs['limit'] = parsed_args.limit
if parsed_args.all:
kwargs['full_listing'] = True
data = self.app.client_manager.object_store.container_list(
**kwargs
)
return (columns,
(utils.get_dict_properties(
s, columns,
formatters={},
) for s in data))
class SaveContainer(command.Command):
"""Save container contents locally"""
log = logging.getLogger(__name__ + ".SaveContainer")
def get_parser(self, prog_name):
parser = super(SaveContainer, self).get_parser(prog_name)
parser.add_argument(
'container',
metavar='<container>',
help='Container to save',
)
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
self.app.client_manager.object_store.container_save(
container=parsed_args.container,
)
class SetContainer(command.Command):
"""Set container properties"""
log = logging.getLogger(__name__ + '.SetContainer')
def get_parser(self, prog_name):
parser = super(SetContainer, self).get_parser(prog_name)
parser.add_argument(
'container',
metavar='<container>',
help='Container to modify',
)
parser.add_argument(
"--property",
metavar="<key=value>",
required=True,
action=parseractions.KeyValueAction,
help="Set a property on this container "
"(repeat option to set multiple properties)"
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
self.app.client_manager.object_store.container_set(
parsed_args.container,
properties=parsed_args.property,
)
class ShowContainer(show.ShowOne):
"""Display container details"""
log = logging.getLogger(__name__ + '.ShowContainer')
def get_parser(self, prog_name):
parser = super(ShowContainer, self).get_parser(prog_name)
parser.add_argument(
'container',
metavar='<container>',
help='Container to display',
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
data = self.app.client_manager.object_store.container_show(
container=parsed_args.container,
)
if 'properties' in data:
data['properties'] = utils.format_dict(data.pop('properties'))
return zip(*sorted(six.iteritems(data)))
class UnsetContainer(command.Command):
"""Unset container properties"""
log = logging.getLogger(__name__ + '.UnsetContainer')
def get_parser(self, prog_name):
parser = super(UnsetContainer, self).get_parser(prog_name)
parser.add_argument(
'container',
metavar='<container>',
help='Container to modify',
)
parser.add_argument(
'--property',
metavar='<key>',
required=True,
action='append',
default=[],
help='Property to remove from container '
'(repeat option to remove multiple properties)',
)
return parser
@utils.log_method(log)
def take_action(self, parsed_args):
self.app.client_manager.object_store.container_unset(
parsed_args.container,
properties=parsed_args.property,
)
|
ue=["foo"]))
def test_debug_application_debug_endpoint(self):
registry, server, debugged_app = TestDebuggedJsonRpcApplication.get_app()
environ = {
"SERVER_NAME": "localhost",
"SERVER_PORT": "5060",
"PATH_INFO": "/debug/1234",
"REQUEST_METHOD": "POST",
"wsgi.url_scheme": "http",
}
start_response = mock.Mock()
assert ["foo"] == debugged_app.debug_application(environ, start_response)
assert DebuggedJsonRpcApplication.handle_debug.called
@mock.patch("werkzeug.debug.DebuggedApplication.debug_application",
mock.Mock(return_value=["foo"]))
def test_debug_application_normal_endpoint(self):
registry, server, debugged_app = TestDebuggedJsonRpcApplication.get_app()
environ = {
"SERVER_NAME": "localhost",
"SERVER_PORT": "5060",
"PATH_INFO": "/api",
"REQUEST_METHOD": "POST",
"wsgi.url_scheme": "http",
}
start_response = mock.NonCallableMock()
result = debugged_app.debug_application(environ, start_response)
assert result == ["foo"]
assert werkzeug.debu | g.DebuggedApplication.debug_application.called
class TestServer(obj | ect):
@staticmethod
def _create_mock_registry():
mock_registry = mock.Mock()
mock_registry.json_encoder = json.JSONEncoder()
mock_registry.json_decoder = json.JSONDecoder()
mock_registry.dispatch.return_value = json.dumps({
"jsonrpc": "2.0",
"id": "foo",
"result": "bar"
})
return mock_registry
def test_wsgi_app_invalid_endpoint(self):
environ = {
"SERVER_NAME": "localhost",
"SERVER_PORT": "5060",
"PATH_INFO": "/bogus",
"REQUEST_METHOD": "POST",
"wsgi.url_scheme": "http",
}
mock_registry = self._create_mock_registry()
server = Server(mock_registry, "/foo")
with pytest.raises(HTTPException) as excinfo:
server(environ, None)
assert excinfo.value.code == 404
def test_wsgi_app_dispatch(self):
environ = {
"SERVER_NAME": "localhost",
"SERVER_PORT": "5060",
"PATH_INFO": "/foo",
"REQUEST_METHOD": "POST",
"wsgi.url_scheme": "http",
}
mock_registry = self._create_mock_registry()
server = Server(mock_registry, "/foo")
mock_start_response = mock.Mock()
server(environ, mock_start_response)
mock_registry.dispatch.assert_called_once_with(mock.ANY)
def test_before_first_request_funcs(self):
environ = {
"SERVER_NAME": "localhost",
"SERVER_PORT": "5060",
"PATH_INFO": "/foo",
"REQUEST_METHOD": "POST",
"wsgi.url_scheme": "http",
}
mock_registry = self._create_mock_registry()
mock_start = mock.Mock()
mock_start.return_value(None)
server = Server(mock_registry, "/foo")
server.register_before_first_request(mock_start)
mock_start_response = mock.Mock()
server(environ, mock_start_response)
server(environ, mock_start_response)
mock_start.assert_called_once_with()
def test_http_status_code_empty_response(self):
mock_registry = self._create_mock_registry()
mock_registry.dispatch.return_value = None
server = Server(mock_registry, "/foo")
app = TestApp(server)
app.post("/foo", status=204)
def test_http_status_code_success_response(self):
mock_registry = self._create_mock_registry()
server = Server(mock_registry, "/foo")
app = TestApp(server)
app.post("/foo", status=200)
def test_http_status_code_batched_response_half_success(self):
mock_registry = self._create_mock_registry()
server = Server(mock_registry, "/foo")
mock_registry.dispatch.return_value = json.dumps([
{
"jsonrpc": "2.0",
"id": "foo",
"result": "bar"
}, {
"jsonrpc": "2.0",
"id": "bar",
"error": typedjsonrpc.errors.MethodNotFoundError().as_error_object()
}
])
app = TestApp(server)
app.post("/foo", status=200)
def test_http_status_code_batched_response_all_failed(self):
mock_registry = self._create_mock_registry()
server = Server(mock_registry, "/foo")
mock_registry.dispatch.return_value = json.dumps([
{
"jsonrpc": "2.0",
"id": "foo",
"error": typedjsonrpc.errors.MethodNotFoundError().as_error_object()
}, {
"jsonrpc": "2.0",
"id": "bar",
"error": typedjsonrpc.errors.MethodNotFoundError().as_error_object()
}
])
app = TestApp(server)
app.post("/foo", status=200)
def test_http_status_code_method_not_found(self):
mock_registry = self._create_mock_registry()
server = Server(mock_registry, "/foo")
mock_registry.dispatch.return_value = json.dumps({
"jsonrpc": "2.0",
"id": "foo",
"error": typedjsonrpc.errors.MethodNotFoundError().as_error_object()
})
app = TestApp(server)
app.post("/foo", status=404)
def test_http_status_code_parse_error(self):
mock_registry = self._create_mock_registry()
server = Server(mock_registry, "/foo")
mock_registry.dispatch.return_value = json.dumps({
"jsonrpc": "2.0",
"id": "foo",
"error": typedjsonrpc.errors.ParseError().as_error_object()
})
app = TestApp(server)
app.post("/foo", status=400)
def test_http_status_code_invalid_request_error(self):
mock_registry = self._create_mock_registry()
server = Server(mock_registry, "/foo")
mock_registry.dispatch.return_value = json.dumps({
"jsonrpc": "2.0",
"id": "foo",
"error": typedjsonrpc.errors.InvalidRequestError().as_error_object()
})
app = TestApp(server)
app.post("/foo", status=400)
def test_http_status_code_other_errors(self):
other_error_types = [
typedjsonrpc.errors.InvalidReturnTypeError,
typedjsonrpc.errors.InvalidParamsError,
typedjsonrpc.errors.ServerError,
typedjsonrpc.errors.InternalError,
typedjsonrpc.errors.Error,
]
mock_registry = self._create_mock_registry()
server = Server(mock_registry, "/foo")
for error_type in other_error_types:
mock_registry.dispatch.return_value = json.dumps({
"jsonrpc": "2.0",
"id": "foo",
"error": error_type().as_error_object()
})
app = TestApp(server)
app.post("/foo", status=500)
class TestCurrentRequest(object):
def test_current_request_set(self):
registry = Registry()
server = Server(registry)
def fake_dispatch_request(request):
assert current_request == request
return Response()
server._dispatch_request = fake_dispatch_request
environ = {
"SERVER_NAME": "localhost",
"SERVER_PORT": "5060",
"PATH_INFO": "/foo",
"REQUEST_METHOD": "POST",
"wsgi.url_scheme": "http",
}
mock_start_response = mock.Mock()
server(environ, mock_start_response)
def test_current_request_passed_to_registry(self):
registry = Registry()
server = Server(registry)
def fake_dispatch(request):
assert current_request == request
return json.dumps({
"jsonrpc": "2.0",
"id": "foo",
"result": "bar"
})
registry.dispatch = fake_dispatch
environ = {
"SERVER_NAME": "localhost",
"SERVER_PORT": " |
#!/usr/bin/env python
import unittest
from pycoin.ecdsa import generator_secp256k1, sign, verify, public_pair_for_secret_exponent
class ECDSATestCase(unittest.TestCase):
def test_sign_verify(self):
def do_test(secret_exponent, val_list):
public_point = public_pair_for_secret_exponent(generator_secp256k1, secret_exponent)
| for v in val_list:
signature = sign(generator_secp256k1, secret_exponent, v)
r = verify(generator_secp256k1, public_point, v, signature)
# Check that the 's' value is ' | low', to prevent possible transaction malleability as per
# https://github.com/bitcoin/bips/blob/master/bip-0062.mediawiki#low-s-values-in-signatures
assert signature[1] <= 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF5D576E7357A4501DDFE92F46681B20A0
assert r == True
signature = signature[0],signature[1]+1
r = verify(generator_secp256k1, public_point, v, signature)
assert r == False
val_list = [100,20000,30000000,400000000000,50000000000000000,60000000000000000000000]
do_test(0x1111111111111111111111111111111111111111111111111111111111111111, val_list)
do_test(0xdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd, val_list)
do_test(0x47f7616ea6f9b923076625b4488115de1ef1187f760e65f89eb6f4f7ff04b012, val_list)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------
# File Name: utils.py
# Author: Zhao Yanbai
# Thu Oct 30 06:33:24 2014
# Description: none
# ------------------------------------------------------------------------
import logging
import struct
import socket
import web
import MySQLdb
import commands
import json
import time
from mail import SendMail
from weixin import SendWeiXinMsg
def init_logging(path) :
logging.basicConfig(filename=path, level = logging.INFO, format ='%(levelname)s\t%(asctime)s: %(message)s')
def SendMsg(title, msg) :
if not SendWeiXinMsg(msg) :
SendMail(title, msg)
class Storage(dict) :
def __getattr__(self, key) :
try :
return self[key]
except KeyError, k:
raise AttributeError, k
def __setattr__(self, key, value) :
self[key] = value
def __delattr__(self, key) :
try :
del self[key]
except KeyError, k:
raise AttributeError, k
def __repr__(self) :
return '<Storage ' + dict.__repr__(self) + '>'
def ip2int(ip) :
return struct.unpack("!I", socket.inet_aton(ip))[0]
def int2ip(i) :
print i
return str(socket.inet_ntoa(struct.pack("!I", int(i))))
def INET_ATON(ipstr) :
ip = ip2int(ipstr)
return str(ip)
def INET_NTOA(ip) :
ipstr = int2ip(int(ip) & 0xFFFFFFFF)
return ipstr
def CheckIP(s) :
try :
return len([i for i in s.split('.') if (0<= int(i)<= 255)])== 4
except :
return False
def CheckPort(port) :
return port.isdigit() and int(port) > 0 and int(port) < 65536
def CheckLogic(logic) :
if not logic.isdigit() :
return False
logic = | int(logic)
return (logic == 0 or logic == 1 or logic == 2)
class PageBase(object):
def __init__(self) :
self.ActionMap = { }
| self.action = ''
self.SetActionHandler('New', self.New)
self.SetActionHandler('Add', self.Add)
self.SetActionHandler('Del', self.Del)
self.SetActionHandler('Edit', self.Edit)
self.SetActionHandler('List', self.List)
self.SetActionHandler('Search', self.Search)
self.SetActionHandler('UNIMPLEMENTED', self.UNIMPLEMENTED)
self.Ret = {
'Err' : -1,
'Msg' : 'Unknown'
}
def ErrMsg(self, msg) :
self.Ret['Err'] = 1
self.Ret['Msg'] = msg
return json.dumps(self.Ret, ensure_ascii=False)
return self.Ret
def SucMsg(self, msg) :
self.Ret['Err'] = 0
self.Ret['Msg'] = msg
return json.dumps(self.Ret, ensure_ascii=False)
return self.Ret
def SucJsonData(self, data) :
self.Ret['Err'] = 0
self.Ret['Msg'] = 'success'
self.Ret['Data'] = data
r = json.dumps(self.Ret, ensure_ascii=False)
return r
def AuthorizedUser(self) :
return True
def UNIMPLEMENTED(self) :
if len(self.action) == 0 :
return "UNIMPLEMENTED"
return "UNIMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
def REQUEST_HANDLER(self) :
self.action = web.input().get('action', '').strip()
return self.ActionMap.get(self.action, self.List)()
def GET(self) :
if not self.AuthorizedUser() :
return "UNAUTHORIZED USER"
return self.REQUEST_HANDLER()
def POST(self) :
if not self.AuthorizedUser() :
return "UNAUTHORIZED USER"
return self.REQUEST_HANDLER()
def SetActionHandler(self, action, handler) :
self.ActionMap[action] = handler
def New(self) :
return "YOU MUST IMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
def Add(self) :
return "YOU MUST IMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
def Del(self) :
return "YOU MUST IMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
def Edit(self) :
return "YOU MUST IMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
def List(self) :
return "YOU MUST IMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
def Update(self) :
return "YOU MUST IMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
def Search(self) :
return "YOU MUST IMPLEMENTED HANDLER FOR THE ACTION: {0}".format(self.action)
class DBBase(object):
def __init__(self, db) :
self.db = db
self.ret = {
"Err" : 0,
"Msg" : "No Error",
}
def SetSuccMsg(self, msg) :
self.ret["Err"] = 0
self.ret["Msg"] = msg
def SetFailMsg(self, msg) :
self.ret["Err"] = 1
self.ret["Msg"] = msg
def IsFail(self) :
return self.ret['Err'] == 1
def Fail(self, msg='UnSetErrReason') :
self.ret['Err'] = 1
self.ret['Msg'] = msg
return self.ret
def Ret(self) :
return self.ret
def GetRetMsg(self) :
return self.ret['Msg']
def Result(self, url='') :
if self.IsFail() :
return self.GetRetMsg()
#return config.render.ErrMsg(msg=self.GetRetMsg())
else :
#return config.render.Msg(msg=self.GetRetMsg(), url = url)
web.seeother(url)
def Read(self, sql, sidx="", sord="") :
if sidx != "" :
sord = sord.upper()
if sord != "ASC" and sord != "DESC" :
sord = "ASC"
sql = sql + " ORDER BY " + sidx + " " + sord
try :
#print sql
records = list(self.db.query(sql))
except MySQLdb.ProgrammingError :
records = []
return records
def Modify(self, sql) :
sqls = sql.split(';')
for sql in sqls :
if len(sql) < 5 :
break
#self.db.query(sql)
#return
try :
#print sql
self.db.query(sql)
self.SetSuccMsg(u"操作完成")
except MySQLdb.ProgrammingError :
self.SetFailMsg("MySQL Programming Error")
except MySQLdb.IntegrityError :
self.SetFailMsg("Duplicate Record")
except :
self.SetFailMsg("Unknown Error")
if self.IsFail() :
break
return self.ret
def GetSvrOutputLines(cmd) :
lines = []
o = commands.getoutput(cmd)
#print o
for line in o.splitlines() :
if len(line) == 0 :
break
if line[0] != '>' :
continue
line = line[1:]
line = line.strip()
lines.append(line)
return lines
def Ts2TmStr(ts=int(time.time())) :
return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(ts))
|
h+"greenblock/"
match_path = spath+"matches/"
rb_path = spath+"redblock/"
s1_path = spath+"stuffed/"
s2_path = spath+"stuffed2/"
s3_path = spath+"stuffed3/"
arbor_path = upath+"arborgreens/"
football_path = upath+"football/"
sanjuan_path = upath+"sanjuans/"
print('SVMPoly')
#Set up am SVM with a poly kernel
extractors = [hue]
path = [cactus_path,cup_path,basket_path]
classes = ['cactus','cup','basket']
props ={
'KernelType':'Poly', #default is a RBF Kernel
'SVMType':'C', #default is C
'nu':None, # NU for SVM NU
'c':None, #C for SVM C - the slack variable
'degree':3, #degree for poly kernels - defaults to 3
'coef':None, #coef for Poly/Sigmoid defaults to 0
'gamma':None, #kernel param for poly/rbf/sigma - default is 1/#samples
}
print('Train')
classifierSVMP = SVMClassifier(extractors,props)
data = []
for p in path:
data.append(ImageSet(p))
classifierSVMP.train(data,classes,disp=display,subset=n) #train
print('Test')
[pos,neg,confuse] = classifierSVMP.test(data,classes,disp=display,subset=n)
files = []
for ext in IMAGE_FORMATS:
files.extend(glob.glob( os.path.join(path[0], ext)))
for i in range(10):
img = Image(files[i])
cname = classifierSVMP.classify(img)
print(files[i]+' -> '+cname)
classifierSVMP.save('PolySVM.pkl')
print('Reloading from file')
testSVM = SVMClassifier.load('PolySVM.pkl')
#testSVM.setFeatureExtractors(extractors)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = testSVM.classify(img)
print(files[i]+' -> '+cname)
print('###############################################################################')
print('SVMRBF ')
# now try an RBF kernel
extractors = [hue,edge]
path = [cactus_path,cup_path,basket_path]
classes = ['cactus','cup','basket']
props ={
'KernelType':'RBF', #default is a RBF Kernel
'SVMType':'NU', #default is C
'nu':None, # NU for SVM NU
'c':None, #C for SVM C - the slack variable
'degree':None, #degree for poly kernels - defaults to 3
'coef':None, #coef for Poly/Sigmoid defaults to 0
'gamma':None, #kernel param for poly/rbf/sigma
}
print('Train')
classifierSVMRBF = SVMClassifier(extractors,props)
data = []
for p in path:
data.append(ImageSet(p))
classifierSVMRBF.train(data,classes,disp=display,subset=n) #train
print('Test')
[pos,neg,confuse] = classifierSVMRBF.test(data,classes,disp=display,subset=n)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = classifierSVMRBF.classify(img)
print(files[i]+' -> '+cname)
classifierSVMRBF.save('RBFSVM.pkl')
print('Reloading from file')
testSVMRBF = SVMClassifier.load('RBFSVM.pkl')
#testSVMRBF.setFeatureExtractors(extractors)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = testSVMRBF.classify(img)
print(files[i]+' -> '+cname)
print('###############################################################################')
print('Bayes')
extractors = [haar]
classifierBayes = NaiveBayesClassifier(extractors)#
print('Train')
path = [arbor_path,football_path,sanjuan_path]
classes = ['arbor','football','sanjuan']
classifierBayes.train(path,classes,disp=display,subset=n) #train
print('Test')
[pos,neg,confuse] = classifierBayes.test(path,classes,disp=display,subset=n)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = classifierBayes.classify(img)
print(files[i]+' -> '+cname)
classifierBayes.save('Bayes.pkl')
print('Reloading from file')
testBayes = NaiveBayesClassifier.load('Bayes.pkl')
testBayes.setFeatureExtractors(extractors)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = testBayes.classify(img)
print(files[i]+' -> '+cname)
print('###############################################################################')
print('###############################################################################')
print('Forest')
extractors = [morph]
classifierForest = TreeClassifier(extractors,flavor='Forest')#
print('Train')
path = [s1_path,s2_path,s3_path]
classes = ['s1','s2','s3']
classifierForest.train(path,classes,disp=display,subset=n) #train
print('Test')
[pos,neg,confuse] = classifierForest.test(path,classes,disp=display,subset=n)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = classifierForest.classify(img)
print(files[i]+' -> '+cname)
classifierForest.save('forest.pkl')
print('Reloading from file')
testForest = TreeClassifier.load('forest.pkl')
testForest.setFeatureExtractors(extractors)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = testForest.classify(img)
print(files[i]+' -> '+cname)
print('###############################################################################')
print('Bagged Tree')
extractors = [haar]
classifierBagTree = TreeClassifier(extractors,flavor='Bagged')#
print('Train')
path = [s1_path,s2_pa | th,s3_path]
classes = ['s1','s2','s3']
classifierBagTree.train | (path,classes,disp=display,subset=n) #train
print('Test')
[pos,neg,confuse] = classifierBagTree.test(path,classes,disp=display,subset=n)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = classifierBagTree.classify(img)
print(files[i]+' -> '+cname)
classifierBagTree.save('bagtree.pkl')
print('Reloading from file')
testBagTree = TreeClassifier.load('bagtree.pkl')
testBagTree.setFeatureExtractors(extractors)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = testBagTree.classify(img)
print(files[i]+' -> '+cname)
print('###############################################################################')
print('Vanilla Tree')
extractors = [haar]
classifierTree = TreeClassifier(featureExtractors=extractors)
print('Train')
path = [s1_path,s2_path,s3_path]
classes = ['s1','s2','s3']
classifierTree.train(path,classes,disp=display,subset=n) #train
print('Test')
[pos,neg,confuse] = classifierTree.test(path,classes,disp=display,subset=n)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = classifierTree.classify(img)
print(files[i]+' -> '+cname)
print('Reloading from file')
classifierTree.save('tree.pkl')
testTree = TreeClassifier.load('tree.pkl')
testTree.setFeatureExtractors(extractors)
for i in range(10):
img = Image(files[i])
cname = testTree.classify(img)
print(files[i]+' -> '+cname)
print('###############################################################################')
print('Boosted Tree')
extractors = [haar]
classifierBTree = TreeClassifier(extractors,flavor='Boosted')#
print('Train')
path = [s1_path,s2_path,s3_path]
classes = ['s1','s2','s3']
classifierBTree.train(path,classes,disp=display,subset=n) #train
print('Test')
[pos,neg,confuse] = classifierBTree.test(path,classes,disp=display,subset=n)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = classifierBTree.classify(img)
print(files[i]+' -> '+cname)
classifierBTree.save('btree.pkl')
print('Reloading from file')
testBoostTree = TreeClassifier.load('btree.pkl')
testBoostTree.setFeatureExtractors(extractors)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for i in range(10):
img = Image(files[i])
cname = testBoostTree.classify(img)
print(files[i]+' -> '+cname)
print('###############################################################################')
print('KNN')
extractors = [hue,edge]
classifierKNN = KNNClassifier(extractors)#
print('Train')
path = [s1_path,s2_path,s3_path]
classes = ['s1','s2','s3']
classifierKNN.train(path,classes,disp=display,subset=n) #train
print('Test')
[pos,neg,confuse] = classifierKNN.test(path,classes,disp=display,subset=n)
files = glob.glob( os.path.join(path[0], '*.jpg'))
for |
("days_to_keep_positive", "CHECK(days_to_keep >= 0)",
"I cannot remove backups from the future. Ask Doc for that."),
]
name = fields.Char(
string="Name",
compute="_compute_name",
store=True,
help="Summary of this backup process",
)
folder = fields.Char(
default=lambda self: self._default_folder(),
oldname="bkp_dir",
help='Absolute path for storing the backups',
required=True
)
days_to_keep = fields.Integer(
oldname="daystokeep",
required=True,
default=0,
help="Backups older than this will be deleted automatically. "
"Set 0 to disable autodeletion.",
)
method = fields.Selection(
selection=[("local", "Local disk"), ("sftp", "Remote SFTP server")],
default="local",
help="Choose the storage method for this backup.",
)
sftp_host = fields.Char(
string='SFTP Server',
oldname="sftpip",
help=(
"The host name or IP address from your remote"
" server. For example 192.168.0.1"
)
)
sftp_port = fields.Integer(
string="SFTP Port",
default=22,
oldname="sftpport",
help="The port on the FTP server that accepts SSH/SFTP calls."
)
sftp_user = fields.Char(
string='Username in the SFTP Server',
oldname="sftpusername",
help=(
"The username where the SFTP connection "
"should be made with. This is the user on the external server."
)
)
sftp_password = fields.Char(
string="SFTP Password",
oldname="sftppassword",
help="The password for the SFTP connection. If you specify a private "
"key file, then this is the password to decrypt it.",
)
sftp_private_key = fields.Char(
string="Private key location",
help="Path to the private key file. Only the Odoo user should have "
"read permissions for that file.",
)
@api.model
def _default_folder(self):
"""Default to ``backups`` folder inside current server datadir."""
return os.path.join(
tools.config["data_dir"],
"backups",
self.env.cr.dbname)
@api.multi
@api.depends("folder", "method", "sftp_host", "sftp_port", "sftp_user")
def _compute_name(self):
"""Get the right summary for this job."""
for rec in self:
if rec.method == "local":
rec.name = "%s @ localhost" % rec.folder
elif rec.method == "sftp":
rec.name = "sftp://%s@%s:%d%s" % (
rec.sftp_user, rec.sftp_host, rec.sftp_port, rec.folder)
@api.constrains("folder", "method")
@api.multi
def _check_folder(self):
"""Do not use the filestore or you will backup your backups."""
for s in self:
if (s.method == "local" and
s.folder.startswith(
tools.config.filestore(self.env.cr.dbname))):
raise exceptions.ValidationError(
_("Do not save backups on your filestore, or you will "
"backup your backups too!"))
@api.multi
def action_sftp_test_connection(self):
"""Check if the SFTP settings are correct."""
try:
# Just open and close the connection
with self.sftp_connection():
raise exceptions.Warning(_("Connection Test Succeeded!"))
except (pysftp.CredentialException, pysftp.ConnectionException):
_logger.info("Connection Test Failed!", exc_info=True)
raise exceptions.Warning(_("Connection Test Failed!"))
@api.multi
def action_backup(self):
"""Run selected backups."""
backup = None
filename = self.filename(datetime.now())
successful = self.browse()
# Start with local storage
for rec in self.filtered(lambda r: r.method == "local"):
with rec.backup_log():
# Directory must exist
try:
os.makedirs(rec.folder)
except OSError:
pass
with open(os.path.join(rec.folder, filename),
'wb') as destiny:
# Copy the cached backup
if backup:
with open(backup) as cached:
shutil.copyfileobj(cached, destiny)
# Generate new backup
else:
db.dump_db(self.env.cr.dbname, destiny)
backup = backup or destiny.name
successful |= rec
# Ensure a local backup exists if we are going to write it remotely
sftp = self.filtered(lambda r: r.method == "sftp")
if sftp:
if backup:
cached = open(backup)
else:
cached = tempfile.TemporaryFile()
db.dump_db(self.env.cr.dbname, cached)
with cached:
for rec in sftp:
with rec.backup_log():
with rec.sftp_connection() as remote:
# Directory must exist
try:
remote.makedirs(rec.folder)
except pysftp.ConnectionException:
pass
# Copy cached backup to remote server
with remote.open(
os.path.join(rec.folder, filename),
"wb") as destiny:
shutil.copyfileobj(cached, destiny)
successful |= rec
# Remove old files for successful backups
successful.cleanup()
@api.model
def action_backup_all(self):
"""Run all scheduled backups."""
return self.search([]).action_backup()
@api.multi
@contextmanager
def backup_log(self):
"""Log a backup result."""
try:
_logger.info("Starting database backup: %s", self.name)
yield
except:
_logger.exception("Database backup failed: %s", self.name)
escaped_tb = tools.html_escape(traceback.format_exc())
self.message_post(
"<p>%s</p><pre>%s</pre>" % (
_("Database backup failed."),
escaped_tb),
subtype=self.env.ref("auto_backup.failure"))
else:
_logger.info("Database backup succeeded: %s", self.name)
self.message_post(_("Database backup succeeded."))
@api.multi
def cleanup(self):
"""Clean up old backups."""
now = datetime.now()
for rec in self.filtered("days_to_keep"):
with rec.cleanup_log():
oldest = self.filename(now - timedelta(days=rec.days_to_keep))
if rec.method == "local":
for name in iglob(os.path.join(rec.folder,
"*.dump.zip")):
if os.path.basename(name) < oldest:
os.unlink(name)
elif rec.method == "sftp":
with rec.sftp_connection() as remote:
| for name in remote.listdir(rec.folder):
if (name.endswith(".dump.zip") and
| os.path.basename(name) < oldest):
remote.unlink(name)
@api.multi
@contextmanager
def cleanup_log(self):
"""Log a possible cleanup failure."""
try:
_logger.info("Starting cleanup process after database backup: %s",
self.name)
yield
except:
_logger.exception("Cleanup of old database backups failed: %s")
escaped_tb = tools.html_escape(traceback.format_exc())
self.message_post(
"<p>%s</p><pre>%s</pre>" % (
|
f:', name_='ResourceAllocationSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ResourceAllocationSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='ResourceAllocationSection_Type'):
super(ResourceAllocationSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='ResourceAllocationSection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='ResourceAllocationSection_Type', fromsubclass_=False):
super(ResourceAllocationSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for Item_ in self.Item:
Item_.export(outfile, level, namespace_, name_='Item')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Item or
self.anytypeobjs_ or
super(ResourceAllocationSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ResourceAllocationSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(ResourceAllocationSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(ResourceAllocationSection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('Item=[\n')
level += 1
for Item_ in self.Item:
showIndent(outfile, level)
outfile.write('model_.RASD_Type(\n')
Item_.exportLiteral(outfile, level, name_='RASD_Type')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[ | \n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
| def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(ResourceAllocationSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Item':
obj_ = RASD_Type.factory()
obj_.build(child_)
self.Item.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'ResourceAllocationSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(ResourceAllocationSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class ResourceAllocationSection_Type
class InstallSection_Type(Section_Type):
"""If present indicates that the virtual machine needs to be initially
booted to install and configure the softwareDelay in seconds to
wait for power off to complete after initial boot"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, initialBootStopDelay=0, anytypeobjs_=None):
super(InstallSection_Type, self).__init__(required, Info, )
self.initialBootStopDelay = _cast(int, initialBootStopDelay)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if InstallSection_Type.subclass:
return InstallSection_Type.subclass(*args_, **kwargs_)
else:
return InstallSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_initialBootStopDelay(self): return self.initialBootStopDelay
def set_initialBootStopDelay(self, initialBootStopDelay): self.initialBootStopDelay = initialBootStopDelay
def export(self, outfile, level, namespace_='ovf:', name_='InstallSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='InstallSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='InstallSection_Type'):
super(InstallSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='InstallSection_Type')
if self.initialBootStopDelay is not None and 'initialBootStopDelay' not in already_processed:
already_processed.append('initialBootStopDelay')
outfile.write(' initialBootStopDelay="%s"' % self.gds_format_integer(self.initialBootStopDelay, input_name='initialBootStopDelay'))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='InstallSection_Type', fromsubclass_=False):
super(InstallSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_ or
super(InstallSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='InstallSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.initialBootStopDelay is not None and 'initialBootStopDelay' not in already_processed:
already_processed.append('initialBootStopDelay')
showIndent(outfile, level)
outfile.write('initialBootStopDelay = %d,\n' % (self.initialBootStopDelay,))
super(InstallSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(InstallSection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
f |
'''
pyttsx setup script.
Copyright (c) 2009, 2013 Peter Parente
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED 'AS IS' AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO | THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE | LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
'''
from setuptools import setup, find_packages
setup(name='pyttsx',
version='1.2',
description='pyttsx - cross platform text-to-speech',
long_description='pyttsx is a Python package supporting common text-to-speech engines on Mac OS X, Windows, and Linux.',
author='Peter Parente',
author_email='parente@cs.unc.edu',
url='https://github.com/parente/pyttsx',
download_url='http://pypi.python.org/pypi/pyttsx',
license='BSD License',
packages=['pyttsx', 'pyttsx.drivers']
) |
#!/usr/bin/env python
# encoding: utf-8
"""
Generic | stock functions
"""
class Stock(object):
"""
Generic Stock informat | ion
"""
def __init__(self, symbol, name, sector):
super(Stock, self).__init__()
self.symbol = symbol
self.name = name
self.sector = sector
|
from __future__ import print_function
import sys
import subprocess
class AutoInstall(object):
_loaded = set()
@classmethod
def find_module(cls, name, path, target=None):
if path is None an | d name not in cls._loaded:
cls._loaded.add(name)
print("Installing", name)
try:
out = subprocess.check_output(['sudo', sys.executable, '-m', 'pip', 'install', name])
print(out)
except Exception as e:
print("Failed" + e.message | )
return None
sys.meta_path.append(AutoInstall)
|
# ----------------------------------------------------------------------------
# Copyright (c) 2017-, labman development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from os import environ
from os.path import expanduser, exists
from datetime import datetime
from configparser import ConfigParser
class ConfigurationManager(object):
"""Holds the labman configuration
Parameters
----------
conf_fp: str, optional
Filepath to the configuration file. Default: config_test.cfg
Attributes
----------
test_environment : bool
If true, we are in a test environment.
database : str
The postgres database to connect to
user : str
The postgres user
password : str
The postgres password for the previous user
admin_user : str
The administrator user, which can be used to create/drop environments
admin_password : str
The postgres password for the admin_user
host : str
The host where the database lives
port : int
The port used to connect to the postgres database in the previous host
qiita_server_cert : str
If qiita enabled, the qiita server certificate
Raises
------
RuntimeError
When an option is no longer available.
"""
@staticmethod
def create(config_fp, test_env, db_host, db_port, db_name, db_user,
db_password, db_admin_user, db_admin_password, log_dir,
qiita_server_cert):
"""Creates a new labman configuration file
Parameters
----------
config_fp : str
Path to the configuration file
test_env : bool
If true, a config file for a test environment will be created
db_host : str
The host where the database lives
db_port : int
The port used to connect to the postgres database in the previous
host
db_name : str
The postgres database to connect to
db_user : str
The postgres user
db_password : str
The postgres password for the previous user
db_admin_user : str
The administrator user, which can be used to create/drop
environments
db_admin_password : str
The postgres password for the admin_user
log_dir : str
Path to the log directory
qiita_server_cert : str
The qiita server certificate (for testing)
"""
with open(config_fp, 'w') as f:
f.write(CONFIG_TEMPLATE % {
'test': test_env,
'date': str(datetime.now()),
'user': db_user,
'admin_user': db_admin_user,
'password': db_password,
'admin_password': db_admin_password,
'database': db_name,
'host': db_host,
'port': db_port,
'logdir': log_dir,
'qiita_cert': qiita_server_cert})
def __init__(self):
# If conf_fp is None, we default to the test configuration file
try:
self.conf_fp = environ['LABMAN_CONFIG_FP']
except KeyError:
self.conf_fp = expanduser('~/.labman.cfg')
if not exists(self.conf_fp):
raise RuntimeError(
'Please, configure labman using `labman config`. If the '
'config file is not in `~/.labman.cfg`, please set the '
'`LABMAN_CONFIG_FP` environment variable to the '
'configuration file')
# Parse the configuration file
config = Co | nfigParser()
with open(self.conf_fp, 'U') as conf_file:
config.readfp(conf_file)
_required_sections = {'postgres'}
if not _required_sections.issubset(set(config.sections())):
missing = _required_sections - set(config.sections())
raise RuntimeError(', '.join(missing))
self._get_main(config)
self._get_postgres(config)
self._get_qiita(config)
def _get_main(self, config):
"""Get the main configur | ation"""
self.test_environment = config.getboolean('main', 'TEST_ENVIRONMENT')
self.log_dir = config.get('main', 'LOG_DIR')
def _get_postgres(self, config):
"""Get the configuration of the postgres section"""
self.user = config.get('postgres', 'USER')
self.admin_user = config.get('postgres', 'ADMIN_USER') or None
self.password = config.get('postgres', 'PASSWORD')
if not self.password:
self.password = None
self.admin_password = config.get('postgres', 'ADMIN_PASSWORD')
if not self.admin_password:
self.admin_password = None
self.database = config.get('postgres', 'DATABASE')
self.host = config.get('postgres', 'HOST')
self.port = config.getint('postgres', 'PORT')
def _get_qiita(self, config):
self.qiita_server_cert = config.get('qiita', 'SERVER_CERT')
CONFIG_TEMPLATE = """# Configuration file generated by labman on %(date)s
# ------------------------- MAIN SETTINGS ----------------------------------
[main]
TEST_ENVIRONMENT=%(test)s
LOG_DIR=%(logdir)s
# ----------------------- POSTGRES SETTINGS --------------------------------
[postgres]
USER=%(user)s
PASSWORD=%(password)s
ADMIN_USER=%(admin_user)s
ADMIN_PASSWORD=%(admin_password)s
DATABASE=%(database)s
HOST=%(host)s
PORT=%(port)s
# ------------------------- QIITA SETTINGS ----------------------------------
[qiita]
SERVER_CERT=%(qiita_cert)s
"""
|
# coding: utf-8
#
# Copyright © 2010—2014 Andrey Mikhaylenko and contributors
#
# This file is part of Argh.
#
# Argh is free software under terms of the GNU Lesser
# General Public License version 3 (LGPLv3) as published by the Free
# Software Foundation. See the file README.rst for copying conditions.
#
"""
Command decorators
~~~~~~~~~~~~~~~~~~
"""
from mitmflib.argh.constants import (ATTR_ALIASES, ATTR_ARGS, ATTR_NAME,
ATTR_WRAPPED_EXCEPTIONS,
ATTR_WRAPPED_EXCEPTIONS_PROCESSOR,
ATTR_EXPECTS_NAMESPACE_OBJECT)
__all__ = ['alia | ses', 'named', 'arg', 'wrap_errors', 'expects_obj']
def named(new_name):
"""
| Sets given string as command name instead of the function name.
The string is used verbatim without further processing.
Usage::
@named('load')
def do_load_some_stuff_and_keep_the_original_function_name(args):
...
The resulting command will be available only as ``load``. To add aliases
without renaming the command, check :func:`aliases`.
.. versionadded:: 0.19
"""
def wrapper(func):
setattr(func, ATTR_NAME, new_name)
return func
return wrapper
def aliases(*names):
"""
Defines alternative command name(s) for given function (along with its
original name). Usage::
@aliases('co', 'check')
def checkout(args):
...
The resulting command will be available as ``checkout``, ``check`` and ``co``.
.. note::
This decorator only works with a recent version of argparse (see `Python
issue 9324`_ and `Python rev 4c0426`_). Such version ships with
**Python 3.2+** and may be available in other environments as a separate
package. Argh does not issue warnings and simply ignores aliases if
they are not supported. See :attr:`~argh.assembling.SUPPORTS_ALIASES`.
.. _Python issue 9324: http://bugs.python.org/issue9324
.. _Python rev 4c0426: http://hg.python.org/cpython/rev/4c0426261148/
.. versionadded:: 0.19
"""
def wrapper(func):
setattr(func, ATTR_ALIASES, names)
return func
return wrapper
def arg(*args, **kwargs):
"""
Declares an argument for given function. Does not register the function
anywhere, nor does it modify the function in any way. The signature is
exactly the same as that of :meth:`argparse.ArgumentParser.add_argument`,
only some keywords are not required if they can be easily guessed.
Usage::
@arg('path')
@arg('--format', choices=['yaml','json'], default='json')
@arg('--dry-run', default=False)
@arg('-v', '--verbosity', choices=range(0,3), default=1)
def load(args):
loaders = {'json': json.load, 'yaml': yaml.load}
loader = loaders[args.format]
data = loader(args.path)
if not args.dry_run:
if 1 < verbosity:
print('saving to the database')
put_to_database(data)
Note that:
* you didn't have to specify ``action="store_true"`` for ``--dry-run``;
* you didn't have to specify ``type=int`` for ``--verbosity``.
"""
def wrapper(func):
declared_args = getattr(func, ATTR_ARGS, [])
# The innermost decorator is called first but appears last in the code.
# We need to preserve the expected order of positional arguments, so
# the outermost decorator inserts its value before the innermost's:
declared_args.insert(0, dict(option_strings=args, **kwargs))
setattr(func, ATTR_ARGS, declared_args)
return func
return wrapper
def wrap_errors(errors=None, processor=None, *args):
"""
Decorator. Wraps given exceptions into
:class:`~argh.exceptions.CommandError`. Usage::
@wrap_errors([AssertionError])
def foo(x=None, y=None):
assert x or y, 'x or y must be specified'
If the assertion fails, its message will be correctly printed and the
stack hidden. This helps to avoid boilerplate code.
:param errors:
A list of exception classes to catch.
:param processor:
A callable that expects the exception object and returns a string.
For example, this renders all wrapped errors in red colour::
from termcolor import colored
def failure(err):
return colored(str(err), 'red')
@wrap_errors(processor=failure)
def my_command(...):
...
"""
def wrapper(func):
if errors:
setattr(func, ATTR_WRAPPED_EXCEPTIONS, errors)
if processor:
setattr(func, ATTR_WRAPPED_EXCEPTIONS_PROCESSOR, processor)
return func
return wrapper
def expects_obj(func):
"""
Marks given function as expecting a namespace object.
Usage::
@arg('bar')
@arg('--quux', default=123)
@expects_obj
def foo(args):
yield args.bar, args.quux
This is equivalent to::
def foo(bar, quux=123):
yield bar, quux
In most cases you don't need this decorator.
"""
setattr(func, ATTR_EXPECTS_NAMESPACE_OBJECT, True)
return func
|
#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the S | oftware, | and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/EnumOption.py rel_2.3.5:3329:275e75118ad4 2015/06/20 11:18:26 bdbaddog"
__doc__ = """Place-holder for the old SCons.Options module hierarchy
This is for backwards compatibility. The new equivalent is the Variables/
class hierarchy. These will have deprecation warnings added (some day),
and will then be removed entirely (some day).
"""
import SCons.Variables
import SCons.Warnings
warned = False
def EnumOption(*args, **kw):
global warned
if not warned:
msg = "The EnumOption() function is deprecated; use the EnumVariable() function instead."
SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
warned = True
return SCons.Variables.EnumVariable(*args, **kw)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def ExtensionFaultTypeInfo(vim, *args, **kwargs):
'''This data object type describes fault types defined by the extension.'''
obj = vim.client.factory.create('ns0:ExtensionFaultTypeInfo')
# do some validation checking...
if (len(args) + len(kwargs)) < 1:
raise IndexError('Expected at least 2 arguments got: %d' % len(args))
required = [ 'faultID' ]
optional = [ 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, | args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one | of %s" % (name, ", ".join(required + optional)))
return obj
|
import unittest
import json
import forgi.threedee.utilities._dssr as ftud
import forgi.threedee.model.coarse_grain as ftmc
import forgi.graph.residue as fgr
class TestHelperFunctions(unittest.TestCase):
def test_dssr_to_pdb_atom_id_validIds(self):
self.assertEqual(ftud.dssr_to_pdb_resid(
"B.C24"), ("B", (" ", 24, " ")))
self.assertEqual(ftud.dssr_to_pdb_resid(
"1:B.C24"), ("B", (" ", 24, " ")))
self.assertEqual(ftud.dssr_to_pdb_resid(
"LYS124"), (None, (" ", 124, " ")))
self.assertEqual(ftud.dssr_to_pdb_resid(
"Z12.U13"), ("Z12 | ", (" ", 13, " ")))
self | .assertEqual(ftud.dssr_to_pdb_resid(
"A.5BU36"), ("A", (" ", 36, " ")))
self.assertEqual(ftud.dssr_to_pdb_resid(
"C.C47^M"), ("C", (" ", 47, "M")))
self.assertEqual(ftud.dssr_to_pdb_resid(
"C.5BU47^M"), ("C", (" ", 47, "M")))
self.assertEqual(ftud.dssr_to_pdb_resid(u'A.C1'), ("A", (" ", 1, " ")))
self.assertEqual(ftud.dssr_to_pdb_resid(
u'B.U-1'), ("B", (" ", -1, " ")))
self.assertEqual(ftud.dssr_to_pdb_resid(
u'A.A-2'), ("A", (" ", -2, " ")))
class TestCoaxialStacks(unittest.TestCase):
def setUp(self):
cg = ftmc.CoarseGrainRNA.from_bg_file("test/forgi/threedee/data/1J1U.cg")
with open("test/forgi/threedee/data/1J1U.json") as f:
j = json.load(f)
self.dssr = ftud.DSSRAnnotation(j, cg)
def test_coaxial_stacks(self):
self.assertEqual(sorted(self.dssr.coaxial_stacks()),
sorted([["s2", "s1"], ["s0", "s3"]]))
@unittest.skip("Currently not working. TODO")
def test_compare_coaxial_stacks(self):
forgi, dssr = self.dssr.compare_coaxial_stack_annotation()
self.assertEqual(len(dssr), 2)
self.assertGreaterEqual(len(forgi), 1)
self.assertGreaterEqual(len(forgi & dssr), 1)
self.assertIn(("s0", "s5"), (x.stems for x in forgi))
for x in forgi:
self.assertEqual(x.forgi, "stacking")
for x in dssr:
self.assertEqual(x.dssr, "stacking")
def test_stacking_nts(self):
stacks = self.dssr.stacking_nts()
self.assertIn((fgr.RESID("B:544"), fgr.RESID("B:545")), stacks)
self.assertNotIn((fgr.RESID("B:549"), fgr.RESID("B:544")), stacks)
|
'''
pysplat is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any lat | er version.
pysplat is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY | ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with kicad-footprint-generator. If not, see < http://www.gnu.org/licenses/ >.
(C) 2016 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
''' |
# -*- coding: | utf-8 -*-
from tornado.options import define
define('debug', default=False, type=bool)
# Tornado的监听端口
define('port', default=8888, type=int)
# WHoosh Search相关
define('whoosh_ix_path', default='/Users/liushuai/Desktop/index', type=str)
# MongoDB
define('mongo_a | ddr', default='127.0.0.1', type=str)
define('mongo_port', default=27017, type=int)
|
import unittest
import os
import numpy as np
from tools.sampling import read_log_file
from tools.walk_trees import walk_trees_with_data
from tools.game_tree.nodes import ActionNode, BoardCardsNode, HoleCardsNode
LEDUC_POKER_GAME_FILE_PATH = 'games/leduc.limit.2p.game'
class SamplingTests(unittest.TestCase):
def test_log_parsing_to_sample_trees(self):
| players = read_log_file(
LEDUC_POKER_GAME_FILE_PATH,
| 'test/sample_log.log',
['player_1', 'player_2'])
callback_was_called_at_least_once = False
def node_callback(data, node):
nonlocal callback_was_called_at_least_once
if isinstance(node, ActionNode):
callback_was_called_at_least_once = True
if data:
self.assertTrue(np.all(node.action_decision_counts == [0, 1, 0]))
else:
self.assertTrue(np.all(node.action_decision_counts == [0, 0, 0]))
return [data if action == 1 else False for action in node.children]
elif isinstance(node, HoleCardsNode):
return [cards == (43,) or cards == (47,) for cards in node.children]
elif isinstance(node, BoardCardsNode):
return [data if cards == (50,) else False for cards in node.children]
else:
return [data for _ in node.children]
for name in players:
player_tree = players[name]
walk_trees_with_data(node_callback, True, player_tree)
self.assertTrue(callback_was_called_at_least_once)
def test_log_parsing_to_sample_trees_performance(self):
players = read_log_file(
LEDUC_POKER_GAME_FILE_PATH,
'test/sample_log-large.log',
['CFR_trained', 'Random_1'])
visits_sum = 0
for name in players:
player_tree = players[name]
for _, root_action_node in player_tree.children.items():
visits_sum += np.sum(root_action_node.action_decision_counts)
self.assertEqual(visits_sum, 50000)
|
yName = os.environ.get("DISPLAY")
if not displayName:
raise QtileError("No DISPLAY set.")
if not fname:
# Dots might appear in the host part of the display name
# during remote X sessions. Let's strip the host part first.
displayNum = displayName.partition(":")[2]
if "." not in displayNum:
displayName += ".0"
fname = command.find_sockfile(displayName)
self.conn = xcbq.Connection(displayName)
self.config = config
self.fname = fname
hook.init(self)
self.windowMap = {}
self.widgetMap = {}
self.groupMap = {}
self.groups = []
self.keyMap = {}
# Find the modifier mask for the numlock key, if there is one:
nc = self.conn.keysym_to_keycode(xcbq.keysyms["Num_Lock"])
self.numlockMask = xcbq.ModMasks[self.conn.get_modifier(nc)]
self.validMask = ~(self.numlockMask | xcbq.ModMasks["lock"])
# Because we only do Xinerama multi-screening,
# we can assume that the first
# screen's root is _the_ root.
self.root = self.conn.default_screen.root
self.root.set_attribute(
eventmask=(
EventMask.StructureNotify |
EventMask.SubstructureNotify |
EventMask.SubstructureRedirect |
EventMask.EnterWindow |
EventMask.LeaveWindow
)
)
self.root.set_property(
'_NET_SUPPORTED',
[self.conn.atoms[x] for x in xcbq.SUPPORTED_ATOMS]
)
self.supporting_wm_check_window = self.conn.create_window(-1, -1, 1, 1)
self.root.set_property(
'_NET_SUPPORTING_WM_CHECK',
self.supporting_wm_check_window.wid
)
# setup the default cursor
self.root.set_cursor('left_ptr')
wmname = getattr(self.config, "wmname", "qtile")
self.supporting_wm_check_window.set_property('_NET_WM_NAME', wmname)
self.supporting_wm_check_window.set_property(
'_NET_SUPPORTING_WM_CHECK',
self.supporting_wm_check_window.wid
)
if config.main:
config.main(self)
self.dgroups = None
if self.config.groups:
key_binder = None
if hasattr(self.config, 'dgroups_key_binder'):
key_binder = self.config.dgroups_key_binder
self.dgroups = DGroups(self, self.config.groups, key_binder)
if hasattr(config, "widget_defaults") and config.widget_defaults:
_Widget.global_defaults = config.widget_defaults
else:
_Widget.global_defaults = {}
for i in self.groups:
self.groupMap[i.name] = i
self.setup_eventloop()
self.server = command._Server(self.fname, self, config, self._eventloop)
self.currentScreen = None
self.screens = []
self._process_screens()
self.currentScreen = self.screens[0]
| self._drag = None
self.ignoreEvents = set([
xcffib.xproto.KeyReleaseEvent,
xcffib.xproto.ReparentNotifyEvent,
xcffib.xproto.CreateNotifyEvent,
# DWM handles this to help "broken focusing windows".
xcffib.xproto.MapNotifyEvent,
xcffib.xproto.LeaveNotifyEvent,
xcffib.xproto.FocusOutEvent,
xcffib.xproto.FocusInEvent,
| xcffib.xproto.NoExposureEvent
])
self.conn.flush()
self.conn.xsync()
self._xpoll()
# Map and Grab keys
for key in self.config.keys:
self.mapKey(key)
# It fixes problems with focus when clicking windows of some specific clients like xterm
def noop(qtile):
pass
self.config.mouse += (Click([], "Button1", command.lazy.function(noop), focus="after"),)
self.mouseMap = {}
for i in self.config.mouse:
if self.mouseMap.get(i.button_code) is None:
self.mouseMap[i.button_code] = []
self.mouseMap[i.button_code].append(i)
self.grabMouse()
# no_spawn is set when we are restarting; we only want to run the
# startup hook once.
if not no_spawn:
hook.fire("startup_once")
hook.fire("startup")
self.scan()
self.update_net_desktops()
hook.subscribe.setgroup(self.update_net_desktops)
if state:
st = pickle.load(six.BytesIO(state.encode()))
try:
st.apply(self)
except:
logger.exception("failed restoring state")
self.selection = {
"PRIMARY": {"owner": None, "selection": ""},
"CLIPBOARD": {"owner": None, "selection": ""}
}
self.setup_selection()
def setup_selection(self):
PRIMARY = self.conn.atoms["PRIMARY"]
CLIPBOARD = self.conn.atoms["CLIPBOARD"]
self.selection_window = self.conn.create_window(-1, -1, 1, 1)
self.selection_window.set_attribute(eventmask=EventMask.PropertyChange)
self.conn.xfixes.select_selection_input(self.selection_window,
"PRIMARY")
self.conn.xfixes.select_selection_input(self.selection_window,
"CLIPBOARD")
r = self.conn.conn.core.GetSelectionOwner(PRIMARY).reply()
self.selection["PRIMARY"]["owner"] = r.owner
r = self.conn.conn.core.GetSelectionOwner(CLIPBOARD).reply()
self.selection["CLIPBOARD"]["owner"] = r.owner
# ask for selection on starup
self.convert_selection(PRIMARY)
self.convert_selection(CLIPBOARD)
def setup_eventloop(self):
self._eventloop = asyncio.new_event_loop()
self._eventloop.add_signal_handler(signal.SIGINT, self.stop)
self._eventloop.add_signal_handler(signal.SIGTERM, self.stop)
self._eventloop.set_exception_handler(
lambda x, y: logger.exception("Got an exception in poll loop")
)
logger.info('Adding io watch')
fd = self.conn.conn.get_file_descriptor()
self._eventloop.add_reader(fd, self._xpoll)
self.setup_python_dbus()
def setup_python_dbus(self):
# This is a little strange. python-dbus internally depends on gobject,
# so gobject's threads need to be running, and a gobject "main loop
# thread" needs to be spawned, but we try to let it only interact with
# us via calls to asyncio's call_soon_threadsafe.
try:
# We import dbus here to thrown an ImportError if it isn't
# available. Since the only reason we're running this thread is
# because of dbus, if dbus isn't around there's no need to run
# this thread.
import dbus # noqa
from gi.repository import GLib
def gobject_thread():
ctx = GLib.main_context_default()
while not self._finalize:
try:
ctx.iteration(True)
except Exception:
logger.exception("got exception from gobject")
self._glib_loop = self.run_in_executor(gobject_thread)
except ImportError:
logger.warning("importing dbus/gobject failed, dbus will not work.")
self._glib_loop = None
def finalize(self):
self._finalize = True
self._eventloop.remove_signal_handler(signal.SIGINT)
self._eventloop.remove_signal_handler(signal.SIGTERM)
self._eventloop.set_exception_handler(None)
try:
from gi.repository import GLib
GLib.idle_add(lambda: None)
self._eventloop.run_until_complete(self._glib_loop)
except ImportError:
pass
try:
for w in self.widgetMap.values():
w.finalize()
for l in self.config.layouts:
l.finalize()
for screen in self.screens:
for bar in [screen.top, s |
# Copyright (C) 2007, Eduardo Silva <edsiper@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# | it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# | along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from gi.repository import Gdk
from sugar3.graphics import style
from sugar3.graphics.palette import WidgetInvoker
def _get_screen_area():
frame_thickness = style.GRID_CELL_SIZE
screen_area = Gdk.Rectangle()
screen_area.x = screen_area.y = frame_thickness
screen_area.width = Gdk.Screen.width() - frame_thickness
screen_area.height = Gdk.Screen.height() - frame_thickness
return screen_area
class FrameWidgetInvoker(WidgetInvoker):
def __init__(self, widget):
WidgetInvoker.__init__(self, widget, widget.get_child())
self._position_hint = self.ANCHORED
self._screen_area = _get_screen_area()
|
# | !/usr/bin/env python
NAME = 'F5 Trafficshield'
def is_waf(self):
for hv in [['cookie', '^ASINFO='], ['server', 'F5-TrafficShield']]:
r = self.matchheader(hv)
if r is None:
return
elif r:
return r
# the following based on nmap's http-waf-fingerprint.nse
if self.matchheader(('server', 'F5-TrafficShield')):
return True
return Fals | e
|
"""Implementation of :class:`SymPyRealDomain` class. """
from sympy.polys.domains.realdomain import RealDomain
from sympy.polys.domains.groundtypes import SymPyRealType
class SymPyRealDomain(RealDomain):
"""Domain for real numbers based on SymPy Float type. """
dtype = SymPyRealType
zero = dtype(0)
one = dtype(1)
alias = 'RR_sympy'
def __init__(self):
pass
def from_ZZ_python(K1, a, K0):
"""Convert a Python `int` object to `dtype`. """
| return SymPyRealType(a)
def from_QQ_python(K1, a, K0):
"""Convert a Python `Fraction` object to `dtype`. """
return SymPyRealType(a.numerator) / a.denominator
def from_ZZ_sympy(K1, a, K0):
"""Convert a SymPy `Integer` object to `dtype`. """
return SymPyRealType(a.p)
def from_QQ_sympy(K1, a, K0):
"""Convert a SymPy `Rational` object to `dtype`. """
return SymPyRealType(a.p) / a.q
def from_ZZ_gmpy(K1, a, K0):
"""Convert a GMPY `mpz` object to `dtype`. """
return SymPyRealType(int(a))
def from_QQ_gmpy(K1, a, K0):
"""Convert a GMPY `mpq` object to `dtype`. """
return SymPyRealType(int(a.numer())) / int(a.denom())
def from_RR_sympy(K1, a, K0):
"""Convert a SymPy `Float` object to `dtype`. """
return a
def from_RR_mpmath(K1, a, K0):
"""Convert a mpmath `mpf` object to `dtype`. """
return SymPyRealType(a)
| |
rField')(max_length=40))
# Changing field 'LibertySessionSP.django_session_key'
db.alter_column(u'saml_libertysessionsp', 'django_session_key', self.gf('django.db.models.fields.CharField')(max_length=40))
# Changing field 'LibertyAssertion.provider_id'
db.alter_column(u'saml_libertyassertion', 'provider_id', self.gf('django.db.models.fields.CharField')(max_length=80))
# Changing field 'LibertyAssertion.assertion_id'
db.alter_column(u'saml_libertyassertion', 'assertion_id', self.gf('django.db.models.fields.CharField')(max_length=50))
# Changing field 'LibertyAssertion.session_index'
db.alter_column(u'saml_libertyassertion', 'session_index', self.gf('django.db.models.fields.CharField')(max_length=80))
# Changing field 'LibertySessionDump.django_session_key'
db.alter_column(u'saml_libertysessiondump', 'django_session_key', self.gf('django.db.models.fields.CharField')(max_length=40))
models = {
u'attribute_aggregator.attributesource': {
'Meta': {'object_name': 'AttributeSource'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'namespace': ('django.db.models.fields.CharField', [], {'default': "('Default', 'Default')", 'max_length': '100'})
},
user_model_label: {
'Meta': {'object_name': user_model_label.split('.')[-1]},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
},
u'idp.attributeitem': {
'Meta': {'object_name': 'AttributeItem'},
'attribute_name': ('django.db.models.fields.CharField', [], {'default': "('OpenLDAProotDSE', 'OpenLDAProotDSE')", 'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'output_name_format': ('django.db.models.fields.CharField', [], {'default': "('urn:oasis:names:tc:SAML:2.0:attrname-format:uri', 'SAMLv2 URI')", 'max_length': '100'}),
'output_namespace': ('django.db.models.fields.CharField', [], {'default': "('Default', 'Default')", 'max_length': '100'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['attribute_aggregator.AttributeSource']", 'null': 'True', 'blank': 'True'})
},
u'idp.attributelist': {
'Meta': {'object_name': 'AttributeList'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'attributes of the list'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['idp.AttributeItem']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
u'idp.attributepolicy': {
'Meta': {'object_name': 'AttributePolicy'},
'allow_attributes_selection': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'ask_consent_attributes': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'attribute_filter_for_sso_from_push_sources': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filter attributes of push sources with list'", 'null': 'True', 'to': u"orm['idp.AttributeList']"}),
'attribute_list_for_sso_from_pull_sources': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes from pull sources'", 'null': 'True', 'to': u"orm['idp.AttributeList']"}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'filter_source_of_filtered_attributes': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'forward_attributes_from_push_sources': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'map_attributes_from_push_sources': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'map_attributes_of_filtered_attributes': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'output_name_format': ('django.db.models.fields.CharField', [], {'default': "('urn:oasis:names:tc:SAML:2.0:attrname-format:uri', 'SAMLv2 URI')", 'max_length': '100'}),
'output_namespace': ('django.db.models.fields.CharField', [], {'default': "('Default', 'Default')", 'max_length': '100'}),
'send_error_and_no_attrs_if_missing_required_attrs': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source_filter_for_sso_from_push_sources': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'filter attributes of push sources with sources'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['attribute_aggregator.AttributeSource']"})
},
u'saml.authorizationattributemap': {
'Meta': {'object_name': 'AuthorizationAttributeMap'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'})
},
u'saml.authorizationattributemapping': {
'Meta': {'object_name': 'AuthorizationAttributeMapping'},
'attribute_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'attribute_value': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'attribute_value_format': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'map': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['saml.AuthorizationAttributeMap']"}),
'source_attribute_name': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'})
},
u'saml.authorizationsppolicy': {
'Meta': {'object_name': 'AuthorizationSPPolicy'},
'attribute_map': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'authorization_attributes'", 'null': 'True', 'to': u"orm['saml.AuthorizationAttributeMap']"}),
'default_denial_message': ('django.db.models.fields.CharField', [], {'default': "u'You are not authorized to access the service.'", 'max_length': '80'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'})
},
u'saml.idpoptionssppolicy': {
'Meta': {'object_name': 'IdPOptionsSPPolicy'},
| 'accept_slo': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'allow_create': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'back_url': ('django.db.models.fields.Ch | arField', [], {'default': "'/'", 'max_length': '200'}),
'binding_for_sso_response': ('django.db.models.fields.CharField', [], {'default': "'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Artifact'", 'max_length': '200'}),
'enable_binding_for_sso_response': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'enable_http_method_for_defederation_request': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'enable_http_method_for_slo_request': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'F |
'service': {
'name': 'telegram',
'username': update.message.from_user.username,
},
'type': 'web_service_binding',
'version': 1,
},
'ctime': int(time.time()),
'expire_in': 60 * 60 * 24 * 365 * 1, # Expire in 1 year
'tag': 'signature'
}
temp_proof_data[update.message.chat_id] = data
json_block = json.dumps(data, indent=4)
except Exception as e:
logging.exception(e)
bot.send_message(
chat_id=update.message.chat_id, text="Something went wrong!")
return
bot.send_message(chat_id=update.message.chat_id,
text="Okay, please paste the following into your terminal (where you can use the keybase cli client) and paste the output here.")
bot.send_message(
chat_id=update.message.chat_id,
text="```\nkeybase pgp sign --message \"{}\"\n```".format(json_block.replace(r'"', r'\"')),
parse_mode=ParseMode.MARKDOWN)
bot.send_message(chat_id=update.message.chat_id,
text="If want to use gpg(2) you can copy and paste this command instead:")
bot.send_message(chat_id=update.message.chat_id,
text="```\necho \"{}\" | gpg -a --sign\n```".format(json_block.replace(r'"', r'\"')),
parse_mode=ParseMode.MARKDOWN)
return 'sign_block'
@filter_private
def check_block(bot, update):
if update.message.text.startswith('/cancel'):
return cancel()
lines = update.message.text.split('\n')
if len(lines) > 1 and not ("BEGIN PGP MESSAGE" in lines[0] and "END PGP MESSAGE" in lines[-1]):
bot.send_message(
chat_id=update.message.chat_id,
text="Your message is not a valid gpg message.")
return ConversationHandler.END
del lines
pgp_content = fix_dashes(update.message.text)
proof_data = temp_proof_data[update.message.chat_id]
# See mom, i clean up after myself:
del temp_proof_data[update.message.chat_id]
fingerprint = ' '.join([
proof_data['body']['key']['fingerprint'][i:i + 4].upper()
for i in range(0, len(proof_data['body']['key']['fingerprint']), 4)
])
succes, proof = check_key(bot, proof_data, pgp_content,
update.message.from_user.username,
update.message.chat_id)
if succes:
bot.send_message(
chat_id=update.message.chat_id,
text="Your signed block is valid. You can now copy and paste the following "
"message to @KeybaseProofs.")
bot.send_message(chat_id=update.message.chat_id,
text="Keybase proof\n\n"
"I hereby claim:\n\n"
"- I am @{} on telegram.\n"
"- I am {} on keybase.\n"
"- I have a public key whose fingerprint is {}\n\n"
"To claim this, I am signing this object:\n"
"```\n{}\n```\n"
"with the key from above, yielding:\n"
"```\n{}\n```\n"
"Finally, I am proving my Telegram account by posting it in @KeybaseProofs"
.format(update.message.from_user.username,
proof_data['body']['key']['username'],
fingerprint,
json.dumps(
proof_data, sort_keys=True, indent=4),
pgp_content))
elif proof == 'invalid_sign':
bot.send_message(
chat_id=update.message.chat_id,
text="Your signed block is not valid.",
reply_to_message_id=update.message.message_id)
elif proof == 'notimplemented':
bot.send_message(
chat_id=update.message.chat_id,
text="Using other hosts than keybase.io is not supported yet.")
else:
logging.error("Unhandled check_proof result: " + proof)
@filter_private
def lookup_start(bot, update, args):
if len(args) >= 1:
update.message.text = ' '.join(args)
return lookup_username(bot, update)
bot.send_message(
chat_id=update.message.chat_id,
text="Please enter a query to search for.")
return 'enter_username'
@filter_private
def lookup_username(bot, update):
bot.send_chat_action(chat_id=update.message.chat_id, action='typing')
info = lookup_proof(bot, query=update.message.text)
if info:
proof_object = json.loads(info.proof_object)
fingerprint = ' '.join([
proof_object['body']['key']['fingerprint'][i:i + 4].upper()
for i in range(0,
len(proof_object['body']['key']['fingerprint']), 4)
])
bot.send_message(
chat_id=update.message.chat_id,
text="▶ Identifying https://keybase.io/{}".format(info.keybase_username))
bot.send_message(
chat_id=update.message.chat_id,
text="✅ public key fingerprint: " +
fingerprint)
bot.send_chat_action(chat_id=update.message.chat_id, action='typing')
succes | , proof = check_key(bot,
json.loads(info.proof_object), info.sig | ned_block,
info.telegram_username, info.user_id)
if succes == 'no_expiry':
bot.send_message(chat_id=update.message.chat_id,
text="😕 \"@{}\" on telegram. "
"But the proof has no expiry set, so be careful.".format(info.telegram_username))
elif succes:
bot.send_message(
chat_id=update.message.chat_id,
text="✅ \"@{}\" on telegram".format(info.telegram_username))
else:
if proof == 'not_username':
bot.send_message(chat_id=update.message.chat_id,
text="❌ WARNING: \"{}\" on telegram may have deleted their account, or changed their username. "
"The user may not be who they claim they are!".format(info.telegram_username))
elif proof == 'invalid_sign':
bot.send_message(chat_id=update.message.chat_id,
text="❌ WARNING: \"{}\" on telegram has not signed their proof correctly. "
"The user may not be who they claim they are!".format(info.telegram_username))
elif proof == 'malformed':
bot.send_message(chat_id=update.message.chat_id,
text="❌ WARNING: \"{}\" has a malformed proof, it could not be verified.".format(info.telegram_username))
elif proof == 'expired':
bot.send_message(chat_id=update.message.chat_id,
text="❌ WARNING: \"{}\" has let their proof expire. It cannot be trusted anymore. "
"The user may not be who they claim they are!".format(info.telegram_username))
else:
bot.send_message(chat_id=update.message.chat_id,
text="Could not verify Telegram username, you are advised to check for yourself. (Internal error)")
logging.error("Check proof failed for lookup. Return message: %s", proof)
bot.send_message(
chat_id=update.message.chat_id,
text="▶ If you want to check the proof message yourself, use the /forwardproof command."
)
else:
bot.send_message(chat_id=update.message.chat_id, text="No proof found for your query.")
return ConversationHandler.END
@filter_private
def forward_proof_start(bot, update, args):
if len(args) >= 1:
update.message.text = ' '.join(args)
return forward_proof(bot, update)
bot.send_message(
chat_id=update.message.chat_id,
text="Please enter a usernam |
nse, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
from base64 import b64encode
from datetime import datetime
from airflow import configuration
from airflow import models
from airflow.contrib.operators.sftp_operator import SFTPOperator, SFTPOperation
from airflow.contrib.operators.ssh_operator import SSHOperator
from airflow.models import DAG, TaskInstance
from airflow.settings import Session
TEST_DAG_ID = 'unit_tests'
DEFAULT_DATE = datetime(2017, 1, 1)
def reset(dag_id=TEST_DAG_ID):
session = Session()
tis = session.query(models.TaskInstance).filter_by(dag_id=dag_id)
tis.delete()
session.commit()
session.close()
reset()
class SFTPOperatorTest(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
from airflow.contrib.hooks.ssh_hook import SSHHook
hook = SSHHook(ssh_conn_id='ssh_default')
hook.no_host_key_check = True
args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE,
'provide_context': True
}
dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args)
dag.schedule_interval = '@once'
self.hook = hook
self.dag = dag
self.test_dir = "/tmp"
self.test_local_filename = 'test_local_file'
self.test_remote_filename = 'test_remote_file'
self.test_local_filepath = '{0}/{1}'.format(self.test_dir,
self.test_local_filename)
self.test_remote_filepath = '{0}/{1}'.format(self.test_dir,
self.test_remote_filename)
def test_pickle_file_transfer_put(self):
configuration.set("core", "enable_xcom_pickling", "True")
test_local_file_content = \
b"This is local file content \n which is multiline " \
b"continuing....with other character\nanother line here \n this is last line"
# create a test file locally
with open(self.test_local_filepath, 'wb') as f:
f.write(test_local_file_content)
# put test file to remote
put_test_task = SFTPOperator(
task_id="test_sftp",
ssh_hook=self.hook,
local_filepath=self.test_local_filepath,
remote_filepath=self.test_remote_filepath,
operation=SFTPOperation.PUT,
dag=self.dag
)
self.assertIsNotNone(put_test_task)
ti2 = TaskInstance(task=put_test_task, execution_date=datetime.now())
ti2.run()
# check the remote file content
check_file_task = SSHOperator(
task_id="test_check_file",
ssh_hook=self.hook,
command="cat {0}".format(self.test_remote_filepath),
do_xcom_push=True,
dag=self.dag
)
self.assertIsNotNone(check_file_task)
ti3 = TaskInstance(task=check_file_task, execution_date=datetime.now())
ti3.run()
self.assertEqual(
ti3.xcom_pull(task_ids='test_check_file', key='return_value').strip(),
test_local_file_content)
def test_json_file_transfer_put(self):
configuration.set("core", "enable_xcom_pickling", "False")
test_local_file_content = \
b"This is local file content \n which is multiline " \
b"continuing....with other character\nanother line here \n this is last line"
# create a test file locally
with open(self.test_local_filepath, 'wb') as f:
f.write(test_local_file_content)
# put test file to remote
put_test_task = SFTPOperator(
task_id="test_sftp",
ssh_hook=self.hook,
local_filepath=self.test_local_filepath,
remote_filepath=self.test_remote_filepath,
operation=SFTPOperation.PUT,
dag=self.dag
)
self.assertIsNotNone(put_test_task)
ti2 = TaskInstance(task=put_test_task, execution_date=datetime.now())
ti2.run()
# check the remote file content
check_file_task = SSHOperator(
task_id="test_check_file",
ssh_hook=self.hook,
command="cat {0}".format(self.test_remote_filepath),
do_xcom_push=True,
dag=self.dag
)
self.assertIsNotNone(check_file_task)
ti3 = TaskInstance(task=check_file_task, execution_date=datetime.now())
ti3.run()
self.assertEqual(
ti3.xcom_pull(task_ids='test_check_file', key='return_value').strip(),
b64encode(test_local_file_content).decode('utf-8'))
def test_pickle_file_transfer_get(self):
configuration.set("core", "enable_xcom_pickling", "True")
test_remote_file_content = \
"This is remote file content \n which is also multiline " \
"another line here \n this is last line. EOF"
# create a test file remotely
create_file_task = SSHOperator(
task_id="test_create_file",
ssh_hook=self.hook,
command="echo '{0}' > {1}".format(test_remote_file_content,
self.test_remote_filepath),
do_xcom_push=True,
dag=self.dag
)
self.assertIsNotNone(create_file_task)
ti1 = TaskInstance(task=create_file_task, execution_date=datetime.now())
ti1.run()
# get remote file to local
get_test_task = SFTPOperator(
task_id="test_sftp",
ssh_hook=self.hook,
local_filepath=self.test_local_filepath,
remote_filepath=self.test_remote_filepath,
operation=SFTPOperation.GET,
dag=self.dag
)
self.assertIsNotNone(get_test_task)
ti2 = TaskInstance(task=get_test_task, execution_date=datetime.now())
ti2.run()
# test the received content
content_received = None
with open(self.test_local_filepath, 'r') as f:
content_received = f.read()
self.assertEqual(content_received.strip(), test_remote_file_content)
def test_json_file_transfer_get(self):
configuration.set("core", "enable_xcom_pickling", "False")
test_remote_file_content = \
"This is remote file content \n which is also multiline " \
"another line here \n this is last line. EOF"
# create a test file remotely
create_file_task = SSHOperator(
task_id="test_create_file",
ssh_hook=self.hook,
command="echo '{0}' > {1}".format(test_remote_file_content,
self.test_remote_filepath),
do_xcom_push=True,
dag=self.dag |
)
self.assertIsNotNone(create_file_task)
ti1 = TaskInstance(task=create_file_task, execution_date=datetime.now())
ti1.run()
# get remote file to local
get_test_task = SFTPOperator(
task_id="test_sftp",
ssh_hook=self.hook,
local_filepath=self.test_local_filepath,
remote_filepath=self.test_remote_filepath,
operation=SFTPOperation.GET,
dag=self.dag |
)
self.assertIsNotNone(get_test_task)
ti2 = TaskInstance(task=get_test_task, execution_date=datetime.now())
ti2.run()
# test the received content
content_received = None
with open(self.test_local_filepath, 'r') as f:
|
#! /usr/bin/python3
# -*- coding:utf-8 -*-
# Funciones y parametros arbitrarios
def funcion | (**nombres):
print (type(nombres))
for alumno in nombres:
print ("%s es alumno y tiene %d años" % (alumno, nombres[alumno]))
return nombres
#diccionario = {"Adrian":25, "Niño":25, "Roberto":23, "Celina":23}
print (funcion(Adrian = 25, Nino = 25, Roberto = 23, Celina | = 23))
|
# Topydo - A todo.txt client written in Python.
# Copyright (C) 2014 - 2015 Bram Schoenmakers <bram@topydo.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" Provides the AddCommand class that implements the 'add' subcommand. """
import codecs
import re
from datetime import date
from os.path import expanduser
from sys import stdin
from topydo.lib.Config import config
from topydo.lib.prettyprinters.Numbers import PrettyPrinterNumbers
from topydo.lib.WriteCommand import WriteCommand
class AddCommand(WriteCommand):
def __init__(self, p_args, p_todolist, # pragma: no branch
p_out=lambda a: None,
p_err=lambda a: None,
p_prompt=lambda a: None):
super().__init__(
p_args, p_todolist, p_out, p_err, p_prompt)
self.text = ' '.join(p_args)
self.from_file = None
def _process_flags(self):
opts, args = self.getopt('f:')
for opt, value in opts:
if opt == '-f':
self.from_file = expanduser(value)
self.args = args
def get_todos_from_file(self):
if self.from_file == '-':
f = stdin
else:
f = codecs.open(self.from_file, 'r', encoding='utf-8')
todos = f.read().splitlines()
return todos
def _add_todo(self, p_todo_text):
def _preprocess_input_todo(p_ | todo_text):
"""
Pre-proces | ses user input when adding a task.
It detects a priority mid-sentence and puts it at the start.
"""
todo_text = re.sub(r'^(.+) (\([A-Z]\))(.*)$', r'\2 \1\3',
p_todo_text)
return todo_text
todo_text = _preprocess_input_todo(p_todo_text)
todo = self.todolist.add(todo_text)
self.postprocess_input_todo(todo)
if config().auto_creation_date():
todo.set_creation_date(date.today())
self.out(self.printer.print_todo(todo))
def execute(self):
""" Adds a todo item to the list. """
if not super().execute():
return False
self.printer.add_filter(PrettyPrinterNumbers(self.todolist))
self._process_flags()
if self.from_file:
try:
new_todos = self.get_todos_from_file()
for todo in new_todos:
self._add_todo(todo)
except (IOError, OSError):
self.error('File not found: ' + self.from_file)
else:
if self.text:
self._add_todo(self.text)
else:
self.error(self.usage())
def usage(self):
return """Synopsis:
add <TEXT>
add -f <FILE> | -"""
def help(self):
return """\
This subcommand automatically adds the creation date to the added item.
TEXT may contain:
* Priorities mid-sentence. Example: add "Water flowers (C)"
* Dependencies using before, after, partof, parents-of and children-of tags.
These are translated to the corresponding 'id' and 'p' tags. The values of
these tags correspond to the todo number (not the dependency number).
Example: add "Subtask partof:1"
-f : Add todo items from specified FILE or from standard input.\
"""
|
##
# Copyright 2012-2016 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemi | sh Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for iompi compiler toolchain (includes Intel compilers (icc, ifort) and OpenMPI.
@author: Stijn De Weirdt (Ghent University)
@author: Kenneth Hoste (Ghent University)
"""
from easybuild.toolchains.iccifort import IccIfort
from easybuild.toolchains.mpi.openmpi import OpenMPI
class Iompi(IccIfort, OpenMPI):
"""
Compiler toolchain with Intel compilers (icc/ifort) and OpenMPI.
"""
NAME = 'iompi'
SUBTOOLCHAIN = IccIfort.NAME
| |
"""
Django settings for myclass project.
Generated by 'django-admin startproject' using Django 1.9.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'n^qif^$w3ooxd1m5&6ir7m^fy%3oq@s+d&pxyut32upkgzbg&4'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'myquiz',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'myclass.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.templ | ate.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'myclass.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'defa | ult': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
|
from fabric.api import env
from fabric.context_managers import cd
from fabric.operations import run, local, put
env.shell = '/bin/bash -l -c'
env.user = 'd'
env.roledefs.update({
'staging': ['staging.solebtc.com'],
'production': ['solebtc.com']
})
# Heaven will execute fab -R staging deploy:bra | nch_name=master
def deploy(branch_name):
deployProduction(branch_name) if env.roles[0] == | 'production' else deployStaging(branch_name)
def deployStaging(branch_name):
printMessage("staging")
codedir = '$GOPATH/src/github.com/solefaucet/solebtc'
run('rm -rf %s' % codedir)
run('mkdir -p %s' % codedir)
local('git archive --format=tar --output=/tmp/archive.tar %s' % branch_name)
local('ls /tmp')
put('/tmp/archive.tar', '~/')
local('rm /tmp/archive.tar')
run('mv archive.tar %s' % codedir)
with cd(codedir):
run('tar xf archive.tar')
run('go build -o solebtc')
# mv doc to nginx root
run('mv apidoc/v1.json /usr/share/nginx/html/doc')
# database version control
run("mysql -e 'create database if not exists solebtc_prod';")
run('go get bitbucket.org/liamstask/goose/cmd/goose')
run('goose -env production up')
# restart solebtc service with supervisorctl
run('supervisorctl restart solebtc')
def deployProduction(branch_name):
printMessage("production")
# TODO
# scp executable file from staging to production, database up, restart service
# mark current timestamp or commit as version number so we can rollback easily
def printMessage(server):
print("Deploying to %s server at %s as %s" % (server, env.host, env.user))
|
from game import models
from game.method.in_game import thread_fields, Thread_field
from game.tool.room_tool import *
from game.tool.tools import to_json
# 实时获得房间信息 room_id
def get_room_info(request):
room_id = int(request.POST['room_id'])
room = get_room_by_id(room_id)
print(room_id)
print(room.users_status)
users_array = []
for u_id in room.users:
find_user = models.User.objects.filter(id=u_id)
if find_user:
find_user = find_user[0]
u_dict = {
'user_id': find_user.id,
'user_name': find_user.username,
'win': find_user.win,
'fail': find_user.fail,
'user_status': room.users_status[u_id]
}
users_array.append(u_dict)
# 结果
response = {
'status': room.status,
'owner': room.owner,
'users': users_array
}
print(response)
return to_json(response)
# 更改准备状态 room_id user_id
def change_user_status(request):
user_id = int(request.POST['user_id'])
room_id = int(request.POST['room_id'])
room = get_room_by_id(room_id)
u_status = room.users_status[user_id]
room.users_status[user_id] = not u_status
return to_json({'response_code': 1, 'user_status': not u_status})
# 房主开始游戏 user_id room_id
def begin_game(request):
user_id = int(request.POST['user_id'])
room_id = int(request.POST['room_id'])
room = get_room_by_id(room_id)
if user_id == room.owner:
f = True
for u_id in room.users:
if u_id != room.owner and not room.users_status[u_id]:
f = False
if f:
room.users_status[user_id] = True
room.status = True
# 计算布局线程,存入线程
thread_fields[room_id] = Thread_field(room.users, room_id)
thread_fields[room_id].start()
return to_json({'response_code': 1})
else:
return to_json({'response_code': -1})
else:
return to_json({'response_code': -1})
# # 用户准备 user_id room_id
# def user_ready(request):
# user_id = request.POST.get('user_id')
# room_id = request.POST.get('room_id')
# # 找到此房间
# room = get_room_by_id(room_id)
# room.users_status[user_id] = True
#
#
# # 用户取消准备 user_id room_id
# def user_cancel_ready(request):
# user_id = request.POST.get('user_id')
# room_id = request.POST.get('room_id')
# # 找到此房间
# room = get_room_by_id(room_id)
# room.users_status[user_id] = False
#
#
# # 开始游戏 owner_id room_id
# def owner_begin(request):
# owner_id = request.POST.get('owner_id')
# room_id = request.POST.get('room_id')
# # 找到此房间
# room = get_room_by_id(room_id)
# room.u | sers_status[owner_id] = True
# all_ready = True
# if room_id == room.owner:
# for u in room.users:
# if not room.users_status[u]:
# all_ready = False
# break
# if all_ready:
# # 全部准备好
# room.status = True
# return 0
# else:
# # 有人没有准备好
# return 0
# else:
# # 这个人不 | 是房主
# return 0
#
#
# # 检查是否开始游戏了 room_id
# def check_room_status(request):
# room_id = request.POST.get('room_id')
# # 找到此房间
# room = get_room_by_id(room_id)
# if room.status:
# # 已经开始了
# return 0
# else:
# # 还没有开始
# return 0
|
sys.stdout = gclient.gclient_utils.MakeFileAutoFlush(sys.stdout)
sys.stdout = gclient.gclient_utils.MakeFileAnnotated(sys.stdout)
def tearDown(self):
self.assertEquals([], self._get_processed())
gclient.Dependency.CreateSCM = self._old_createscm
sys.stdout = self._old_sys_stdout
os.chdir(self.previous_dir)
super(GclientTest, self).tearDown()
def _createscm(self, parsed_url, root_dir, name, out_fh=None, out_cb=None):
self.assertTrue(parsed_url.startswith('svn://example.com/'), parsed_url)
self.assertTrue(root_dir.startswith(self.root_dir), root_dir)
return SCMMock(self, name, parsed_url)
def testDependencies(self):
self._dependencies('1')
def testDependenciesJobs(self):
self._dependencies('100 | 0')
def _dependencies(self, jobs):
"""Verifies that dependencies are processed in the right order.
e.g. if there is a | dependency 'src' and another 'src/third_party/bar', that
bar isn't fetched until 'src' is done.
Args:
|jobs| is the number of parallel jobs simulated.
"""
parser = gclient.OptionParser()
options, args = parser.parse_args(['--jobs', jobs])
write(
'.gclient',
'solutions = [\n'
' { "name": "foo", "url": "svn://example.com/foo" },\n'
' { "name": "bar", "url": "svn://example.com/bar" },\n'
' { "name": "bar/empty", "url": "svn://example.com/bar_empty" },\n'
']')
write(
os.path.join('foo', 'DEPS'),
'deps = {\n'
' "foo/dir1": "/dir1",\n'
# This one will depend on dir1/dir2 in bar.
' "foo/dir1/dir2/dir3": "/dir1/dir2/dir3",\n'
' "foo/dir1/dir2/dir3/dir4": "/dir1/dir2/dir3/dir4",\n'
'}')
write(
os.path.join('bar', 'DEPS'),
'deps = {\n'
# There is two foo/dir1/dir2. This one is fetched as bar/dir1/dir2.
' "foo/dir1/dir2": "/dir1/dir2",\n'
'}')
write(
os.path.join('bar/empty', 'DEPS'),
'deps = {\n'
'}')
obj = gclient.GClient.LoadCurrentConfig(options)
self._check_requirements(obj.dependencies[0], {})
self._check_requirements(obj.dependencies[1], {})
obj.RunOnDeps('None', args)
actual = self._get_processed()
first_3 = [
('bar', 'svn://example.com/bar'),
('bar/empty', 'svn://example.com/bar_empty'),
('foo', 'svn://example.com/foo'),
]
if jobs != 1:
# We don't care of the ordering of these items except that bar must be
# before bar/empty.
self.assertTrue(
actual.index(('bar', 'svn://example.com/bar')) <
actual.index(('bar/empty', 'svn://example.com/bar_empty')))
self.assertEquals(first_3, sorted(actual[0:3]))
else:
self.assertEquals(first_3, actual[0:3])
self.assertEquals(
[
('foo/dir1', 'svn://example.com/foo/dir1'),
('foo/dir1/dir2', 'svn://example.com/bar/dir1/dir2'),
('foo/dir1/dir2/dir3', 'svn://example.com/foo/dir1/dir2/dir3'),
('foo/dir1/dir2/dir3/dir4',
'svn://example.com/foo/dir1/dir2/dir3/dir4'),
],
actual[3:])
self.assertEquals(3, len(obj.dependencies))
self.assertEquals('foo', obj.dependencies[0].name)
self.assertEquals('bar', obj.dependencies[1].name)
self.assertEquals('bar/empty', obj.dependencies[2].name)
self._check_requirements(
obj.dependencies[0],
{
'foo/dir1': ['bar', 'bar/empty', 'foo'],
'foo/dir1/dir2/dir3':
['bar', 'bar/empty', 'foo', 'foo/dir1', 'foo/dir1/dir2'],
'foo/dir1/dir2/dir3/dir4':
[ 'bar', 'bar/empty', 'foo', 'foo/dir1', 'foo/dir1/dir2',
'foo/dir1/dir2/dir3'],
})
self._check_requirements(
obj.dependencies[1],
{
'foo/dir1/dir2': ['bar', 'bar/empty', 'foo', 'foo/dir1'],
})
self._check_requirements(
obj.dependencies[2],
{})
self._check_requirements(
obj,
{
'foo': [],
'bar': [],
'bar/empty': ['bar'],
})
def _check_requirements(self, solution, expected):
for dependency in solution.dependencies:
e = expected.pop(dependency.name)
a = sorted(dependency.requirements)
self.assertEquals(e, a, (dependency.name, e, a))
self.assertEquals({}, expected)
def _get_processed(self):
"""Retrieves the item in the order they were processed."""
items = []
try:
while True:
items.append(self.processed.get_nowait())
except Queue.Empty:
pass
return items
def testAutofix(self):
# Invalid urls causes pain when specifying requirements. Make sure it's
# auto-fixed.
url = 'proto://host/path/@revision'
d = gclient.Dependency(
None, 'name', url, url, None, None, None,
None, '', True, False, None, True)
self.assertEquals('proto://host/path@revision', d.url)
def testStr(self):
parser = gclient.OptionParser()
options, _ = parser.parse_args([])
obj = gclient.GClient('foo', options)
obj.add_dependencies_and_close(
[
gclient.Dependency(
obj, 'foo', 'raw_url', 'url', None, None, None, None, 'DEPS', True,
False, None, True),
gclient.Dependency(
obj, 'bar', 'raw_url', 'url', None, None, None, None, 'DEPS', True,
False, None, True),
],
[])
obj.dependencies[0].add_dependencies_and_close(
[
gclient.Dependency(
obj.dependencies[0], 'foo/dir1', 'raw_url', 'url', None, None, None,
None, 'DEPS', True, False, None, True),
],
[])
# Make sure __str__() works fine.
# pylint: disable=protected-access
obj.dependencies[0]._file_list.append('foo')
str_obj = str(obj)
self.assertEquals(263, len(str_obj), '%d\n%s' % (len(str_obj), str_obj))
def testHooks(self):
topdir = self.root_dir
gclient_fn = os.path.join(topdir, '.gclient')
fh = open(gclient_fn, 'w')
print >> fh, 'solutions = [{"name":"top","url":"svn://example.com/top"}]'
fh.close()
subdir_fn = os.path.join(topdir, 'top')
os.mkdir(subdir_fn)
deps_fn = os.path.join(subdir_fn, 'DEPS')
fh = open(deps_fn, 'w')
hooks = [{'pattern':'.', 'action':['cmd1', 'arg1', 'arg2']}]
print >> fh, 'hooks = %s' % repr(hooks)
fh.close()
fh = open(os.path.join(subdir_fn, 'fake.txt'), 'w')
print >> fh, 'bogus content'
fh.close()
os.chdir(topdir)
parser = gclient.OptionParser()
options, _ = parser.parse_args([])
options.force = True
client = gclient.GClient.LoadCurrentConfig(options)
work_queue = gclient_utils.ExecutionQueue(options.jobs, None, False)
for s in client.dependencies:
work_queue.enqueue(s)
work_queue.flush({}, None, [], options=options, patch_refs={})
self.assertEqual(
[h.action for h in client.GetHooks(options)],
[tuple(x['action']) for x in hooks])
def testCustomHooks(self):
topdir = self.root_dir
gclient_fn = os.path.join(topdir, '.gclient')
fh = open(gclient_fn, 'w')
extra_hooks = [{'name': 'append', 'pattern':'.', 'action':['supercmd']}]
print >> fh, ('solutions = [{"name":"top","url":"svn://example.com/top",'
'"custom_hooks": %s},' ) % repr(extra_hooks + [{'name': 'skip'}])
print >> fh, '{"name":"bottom","url":"svn://example.com/bottom"}]'
fh.close()
subdir_fn = os.path.join(topdir, 'top')
os.mkdir(subdir_fn)
deps_fn = os.path.join(subdir_fn, 'DEPS')
fh = open(deps_fn, 'w')
hooks = [{'pattern':'.', 'action':['cmd1', 'arg1', 'arg2']}]
hooks.append({'pattern':'.', 'action':['cmd2', 'arg1', 'arg2']})
skip_hooks = [
{'name': 'skip', 'pattern':'.', 'action':['cmd3', 'arg1', 'arg2']}]
skip_hooks.append(
{'name': 'skip', 'pattern':'.', 'action':['cmd4', 'arg1', 'arg2']})
print >> fh, 'hooks = %s' % repr(hooks + skip_hooks)
fh.close()
# Make sure the custom hooks for that project don't affect the next one.
subdir_fn = os.path.join(topdir, 'bottom')
os.mkdir(subdir_fn)
deps_fn = os.path.join(subdir_fn, 'DEPS')
fh = open(d |
from os import system
import xmlrpclib
from SimpleXMLRPCServer import SimpleXMLRPCServer
def clear_bu | ffer_cache():
system('free -g')
system('sync')
system("sudo sed -n 's/0/3/w /proc/sys/vm/drop_caches' /proc/sys/vm/drop_caches")
system('sync')
system("sudo sed -n 's/3/0/w /proc/sys/vm/drop_caches' /proc/sys/vm/drop_caches")
system('free -g')
return True
def is_even(n):
return n%2 == 0
server = SimpleXMLRPCServer(('0.0.0.0', 8888))
print 'Listening on port 8888...'
s | erver.register_function(clear_buffer_cache, 'clear_buffer_cache')
server.register_function(is_even, 'is_even')
server.serve_forever()
|
# Standard imports
import unittest
import json
import logging
from datetime import datetime, timedelta
# Our imports
from emission.clients.gamified import gamified
from emission.core.get_database import get_db, get_mode_db, get_section_db
from emission.core.wrapper.user import User
from emission.core.wrapper.client import Client
import emission.tests.common
logging.basicConfig(level=logging.DEBUG)
class TestGamified(unittest.TestCase):
def setUp(self):
import emission.tests.common
from copy import copy
self.testUsers = ["test@example.com", "best@example.com", "fest@example.com",
"rest@example.com", "nest@example.com"]
self.serverName = 'localhost'
# Sometimes, we may have entries left behind in the database if one of the tests failed
# or threw an exception, so let us start by cleaning up all entries
emission.tests.common.dropAllCollections(get_db())
self.ModesColl = get_mode_db()
self.assertEquals(self.ModesColl.find().count(), 0)
self.setupUserAndClient()
emission.tests.common.loadTable(self.serverName, "Stage_Modes", "emission/tests/data/modes.json")
emission.tests.common.loadTable(self.serverName, "Stage_Sections", "emission/tests/data/testCarbonFile")
self.SectionsColl = get_section_db()
self.walkExpect = 1057.2524056424411
self.busExpect = 2162.668467546699
self.busCarbon = 267.0/1609
self.airCarbon = 217.0/1609
self.driveCarbon = 278.0/1609
self.busOptimalCarbon = 92.0/1609
self.allDriveExpect = (self.busExpect * self.driveCarbon + self.walkExpect * self.driveCarbon)/1000
self.myFootprintExpect = float(self.busExpect * self.busCarbon)/1000
self.sb375GoalExpect = 40.142892/7
self.mineMinusOptimalExpect = 0
self.allDriveMinusMineExpect = float(self.allDriveExpect - self.myFootprintExpect)/self.allDriveExpect
self.sb375DailyGoalMinusMineExpect = float(self.sb375GoalExpect - self.myFootprintExpect)/self.sb375GoalExpect
self.now = datetime.now()
self.twodaysago = self.now - timedelta(days=2)
self.weekago = self.now - timedelta(weeks = 1)
for section in self.SectionsColl.find():
section['section_start_datetime'] = self.twodaysago
section['section_end_datetime'] = self.twodaysago + timedelta(hours = 1)
section['predicted_mode'] = {'walking': 1.0}
if section['user_id'] == 'fest@example.com':
logging.debug("Setting user_id for section %s, %s = %s" %
(section['trip_id'], section['section_id'], self.user.uuid))
section['user_id'] = self.user.uuid
if section['confirmed_mode'] == 5:
airSection = copy(section)
airSection['confirmed_mod | e'] = 9
airSection['_id'] = section['_id'] + "_air"
self.SectionsColl.insert(airSection)
airSection['confirmed_mode'] = ''
ai | rSection['_id'] = section['_id'] + "_unconf"
self.SectionsColl.insert(airSection)
# print("Section start = %s, section end = %s" %
# (section['section_start_datetime'], section['section_end_datetime']))
self.SectionsColl.save(section)
def setupUserAndClient(self):
# At this point, the more important test is to execute the query and see
# how well it works
fakeEmail = "fest@example.com"
client = Client("gamified")
client.update(createKey = False)
emission.tests.common.makeValid(client)
(resultPre, resultReg) = client.preRegister("this_is_the_super_secret_id", fakeEmail)
studyList = Client.getPendingClientRegs(fakeEmail)
self.assertEqual(studyList, ["gamified"])
user = User.register("fest@example.com")
self.assertEqual(user.getFirstStudy(), 'gamified')
self.user = user
def testGetScoreComponents(self):
components = gamified.getScoreComponents(self.user.uuid, self.weekago, self.now)
self.assertEqual(components[0], 0.75)
# bus_short disappears in optimal, air_short disappears as long motorized, so optimal = 0
# self.assertEqual(components[1], (self.busExpect * self.busCarbon) / 1000)
# TODO: Figure out what we should do when optimal == 0. Currently, we
# return 0, which seems sub-optimal (pun intended)
self.assertEqual(components[1], 0.0)
# air_short disappears as long motorized, but we need to consider walking
self.assertAlmostEqual(components[2], self.allDriveMinusMineExpect, places=4)
# air_short disappears as long motorized, so only bus_short is left
self.assertAlmostEqual(components[3], self.sb375DailyGoalMinusMineExpect, places = 4)
# Checks both calcScore and updateScore, since we calculate the score before we update it
def testUpdateScore(self):
self.assertEqual(gamified.getStoredScore(self.user), (0, 0))
components = gamified.updateScore(self.user.uuid)
print "self.allDriveMinusMineExpect = %s, self.sb375DailyGoalMinusMineExpect = %s" % \
(self.allDriveMinusMineExpect, self.sb375DailyGoalMinusMineExpect)
expectedScore = 0.75 * 50 + 30 * self.allDriveMinusMineExpect + 20 * 0.0 + \
10 * self.sb375DailyGoalMinusMineExpect
storedScore = gamified.getStoredScore(self.user)
self.assertEqual(storedScore[0], 0)
self.assertAlmostEqual(storedScore[1], expectedScore, 6)
def testGetLevel(self):
self.assertEqual(gamified.getLevel(0), (1, 1))
self.assertEqual(gamified.getLevel(11.0), (1, 1))
self.assertEqual(gamified.getLevel(21.0), (1, 2))
self.assertEqual(gamified.getLevel(100), (2, 1))
self.assertEqual(gamified.getLevel(199.0), (2, 1))
self.assertEqual(gamified.getLevel(200), (2, 2))
self.assertEqual(gamified.getLevel(201.0), (2, 2))
self.assertEqual(gamified.getLevel(999), (2, 5))
self.assertEqual(gamified.getLevel(1000), (3, 1))
self.assertEqual(gamified.getLevel(9999.0), (3, 5))
self.assertEqual(gamified.getLevel(10000), (3, 5))
self.assertEqual(gamified.getLevel(100000), (3, 5))
def testGetFileName(self):
self.assertEqual(gamified.getFileName(1, 1), "level_1_1.png")
self.assertEqual(gamified.getFileName(1.0, 2.0), "level_1_2.png")
self.assertEqual(gamified.getFileName(1.055, 2), "level_1_2.png")
def testRunBackgroundTasksForDay(self):
self.assertEqual(gamified.getStoredScore(self.user), (0, 0))
components = gamified.runBackgroundTasks(self.user.uuid)
expectedScore = 0.75 * 50 + 30 * self.allDriveMinusMineExpect + 20 * 0.0 + \
10 * self.sb375DailyGoalMinusMineExpect
storedScore = gamified.getStoredScore(self.user)
self.assertEqual(storedScore[0], 0)
self.assertAlmostEqual(storedScore[1], expectedScore, 6)
if __name__ == '__main__':
unittest.main()
|
from flas | k import Flask
server = Flask(__name__)
server.config['SERVER_N | AME'] = '127.0.0.1:5001'
from app import endpoints
|
form '{entity_name}_{column_name}'. For example, in the `game`
table, the `gsis_id` column must be named `game_gsis_id` in
`row`.
"""
obj = cls(db)
seta = setattr
prefix = cls._sql_primary_table() + '_'
slice_from = len(prefix)
for k in row:
if k.startswith(prefix):
seta(obj, k[slice_from:], row[k])
return obj
@classmethod
def from_row_tuple(cls, db, t):
"""
Given a tuple `t` corresponding to a result from a SELECT query,
this will construct a new instance for this entity. Note that
the tuple `t` must be in *exact* correspondence with the columns
returned by `nfldb.Entity.sql_fields`.
"""
cols = cls.sql_fields()
seta = setattr
obj = cls(db)
for i, field in enumerate(cols):
seta(obj, field, t[i])
return obj
@classmethod
def _sql_from(cls, aliases=None):
"""
Return a valid SQL `FROM table AS alias [LEFT JOIN extra_table
...]` string for this entity.
"""
# This is a little hokey. Pick the first table as the 'FROM' table.
# Subsequent tables are joined.
from_table = cls._sql_primary_table()
as_from_table = cls._sql_table_alias(from_table, aliases)
extra_tables = ''
for table, _ in cls._sql_tables['tables'][1:]:
extra_tables += cls._sql_join_to(cls,
from_table=from_table,
to_table=table,
from_aliases=aliases,
to_aliases=aliases)
return '''
FROM {from_table} AS {as_from_table}
{extra_tables}
'''.format(from_table=from_table, as_from_table=as_from_table,
extra_tables=extra_tables)
@classmethod
def _sql_select_fields(cls, fields, wrap=None, aliases=None):
"""
Returns correctly qualified SELECT expressions for each
field in `fields` (namely, a field may be a derived field).
If `wrap` is a not `None`, then it is applied to the result
of calling `cls._sql_field` on each element in `fields`.
All resulting fields are aliased with `AS` to correspond to
the name given in `fields`. Namely, this makes table aliases
opaque to the resulting query, but this also disallows
selecting columns of the same name from multiple tables.
"""
if wrap is None:
wrap = lambda x: x
sql = lambda f: wrap(cls._sql_field(f, aliases=aliases))
entity_prefix = cls._sql_primary_table()
return ['%s AS %s_%s' % (sql(f), entity_prefix, f) for f in fields]
@classmethod
def _sql_relation_distance(cls_from, cls_to):
primf = set(cls_from._sql_tables['primary'])
primt = set(cls_to._sql_tables['primary'])
if len(primf.intersection(primt)) == 0:
return None
outsiders = primf.difference(primt).union(primt.difference(primf))
if len(primf) > len(primt):
return -len(outsiders)
else:
return len(outsiders)
@classmethod
def _sql_join_all(cls_from, cls_tos):
"""
Given a list of sub classes `cls_tos` of `nfldb.Entity`,
produce as many SQL `LEFT JOIN` clauses as is necessary so
that all fields in all entity types given are available for
filtering.
Unlike the other join functions, this one has no alias support
or support for controlling particular tables.
The key contribution of this function is that it knows how to
connect a group of tables correctly. e.g., If the group of
tables is `game`, `play` and `play_player`, then `game` and
`play` will be joined and `play` and `play_player` will be
joined. (Instead of `game` and `play_player` or some other
erronoeous combination.)
In essence, each table is joined with the least general table
in the group.
"""
assert cls_from not in cls_tos, \
'cannot join %s with itself with `sql_join_all`' % cls_from
def dist(f, t):
return f._sql_relation_distance(t)
def relation_dists(froms, tos):
return filter(lambda (f, t, d): d is not None,
((f, t, dist(f, t)) for f in froms for t in tos))
def more_general(froms, tos):
return filter(lambda (f, t, d): d < 0, relation_dists(froms, tos))
def more_specific(froms, tos):
return filter(lambda (f, t, d): d > 0, relation_dists(froms, tos))
joins = ''
froms, tos = set([cls_from]), set(cls_tos)
while len(tos) > 0:
general = more_general(froms, tos)
specific = more_specific(froms, tos)
assert len(general) > 0 or len(specific) > 0, \
'Cannot compute distances between sets. From: %s, To: %s' \
% (froms, tos)
def add_join(f, t):
tos.discard(t)
froms.add(t)
return f._sql_join_to_all(t)
if general:
f, t, _ = max(general, key=lambda (f, t, d): d)
joins += add_join(f, t)
if specific:
f, t, _ = min(specific, key=lambda (f, t, d): d)
joins += add_join(f, t)
return joins
@classmethod
def _sql_join_to_all(cls_from, cls_to, from_table=None,
from_aliases=None, to_aliases=None):
"""
Given a **sub class** `cls_to` of `nfldb.Entity`, produce
as many SQL `LEFT JOIN` clauses as is necessary so that all
fields in `cls_to.sql_fields()` are available for filtering.
See the documentation for `nfldb.Entity._sql_join_to` for
information on the parameters.
"""
to_primary = cls_to._sql_primary_table()
joins = cls_from._sql_join_to(cls_to,
from_table=from_table,
to_table=to_primary,
from_aliases=from_aliases,
to_aliases=to_aliases)
for table, _ in cls_to._sql_tables['tables'][1:]:
joins += cls_to._sql_join_to(cls_to,
from_table=to_primary,
to_table=table,
from_aliases=to_aliases,
to_aliases=to_aliases)
return joins
@classmethod
def _sql_join_to(cls_from, cls_to,
from_table=None, to_table=None,
| from_aliases=None, to_aliases=None):
"""
Given a **sub class** `cls_to` of `nfldb.Entity`, produce
a SQL `LEFT JOIN` clause.
If the primary keys in `cls_from` and `cls_to` have an empty
intersection, then an assertion error is raised.
Note that the first table defined for each of `cls_from` and
`cls_to` is used to join them if `from_table` or `to_table`
a | re `None`.
`from_aliases` are only applied to the `from` tables and
`to_aliases` are only applied to the `to` tables. This allows
one to do self joins.
"""
if from_table is None:
from_table = cls_from._sql_primary_table()
if to_table is None:
to_table = cls_to._sql_primary_table()
from_table = cls_from._sql_table_alias(from_table,
aliases=from_aliases)
as_to_table = cls_to._sql_table_alias(to_table, aliases=to_aliases)
from_pkey = cls_from._sql_tables['primary']
to_pkey = cls_to._sql_tables['primary']
# Avoiding set.intersection so we can preserve order.
common = [k for k in from_pkey if k in to_pkey]
assert len(common) > 0, \
"Cannot join %s to %s with non-overlapping primary keys." |
rint "Exception info %s " % sys.exc_info()[0]
return -128
return result
if method == 'EnqueueCopyBuffer':
try:
nid = int(args['id'])
sourcebuffer = int(args['SourceBuffer'])
destinationbuffer = int(args['DestinationBuffer'])
bytecount = int(args['ByteCount'])
sourceoffset = int(args['SourceOffset'])
destinationoffset = int(args['DestinationOffset'])
result = PyOpenCLInterface.EnqueueCopyBuffer(nid, sourcebuffer, destinationbuffer,
sourceoffset, destinationoffset, bytecount)
except:
print "Exception caught in DispatchCommandQueues.%s " % method
print "Exception info %s " % sys.exc_info()[0]
return -128
return result
if method == 'EnqueueNDRangeKernel':
try:
nid = int(args['id'])
kernel = int(args[ | 'Kernel'])
gwo = args['GWO']
gws = args['GWS']
lws = args['LWS']
result = PyOpenCLInterface.EnqueueNDRangeKernel(nid, kernel, gwo, gws, lws)
except:
print "Exception caught in DispatchCommandQueues.%s " % method
print "Exception info %s " % sys.exc_info()[0]
return | -128
return result
if method == 'EnqueueTask':
try:
nid = int(args['id'])
kernel = int(args['Kernel'])
result = PyOpenCLInterface.EnqueueTask(nid, kernel)
except:
print "Exception caught in DispatchCommandQueues.%s " % method
print "Exception info %s " % sys.exc_info()[0]
return -128
return result
if method == 'EnqueueBarrier':
try:
nid = int(args['id'])
result = PyOpenCLInterface.EnqueueBarrier(nid)
except:
print "Exception caught in DispatchCommandQueues.%s " % method
print "Exception info %s " % sys.exc_info()[0]
return -128
return result
if method == 'Finish':
try:
nid = int(args['id'])
result = PyOpenCLInterface.Finish(nid)
except:
print "Exception caught in DispatchCommandQueues.%s " % method
print "Exception info %s " % sys.exc_info()[0]
return -128
return result
print "DISPATCHPROGRAMS : Unknown Method"
return -128
class C(ConsumerMixin):
def __init__(self, connection):
self.connection = connection
return
def get_consumers(self, Consumer, channel):
return [Consumer( queue_opencl_devices, accept = ['json'], callbacks = [self.on_devices_message]),
Consumer( queue_opencl_contexts, accept = ['json'], callbacks = [self.on_contexts_message]),
Consumer( queue_opencl_programs, accept = ['json'], callbacks = [self.on_programs_message]),
Consumer( queue_opencl_kernels, accept = ['json'], callbacks = [self.on_kernels_message]),
Consumer( queue_opencl_buffers, accept = ['json'], callbacks = [self.on_buffers_message]),
Consumer( queue_opencl_command_queues, accept = ['json'], callbacks = [self.on_command_queues_message]),
Consumer( queue_opencl_notify, accept = ['json'], callbacks = [self.on_message])]
def on_message(self, body, message):
message.ack()
return
def on_command_queues_message(self, body, message):
message.ack()
print ("notify: RECEIVED COMMAND QUEUES MSG - body: %r" % (body,))
print ("notify: RECEIVED COMMAND QUEUES MSG - message: %r" % (message,))
try:
respTarget = body['Source']
respQueue = body['RespQueue']
method = body['Method']
args = body['args']
# create the response connection
resp_connection = BrokerConnection(respTarget)
resp_queue = resp_connection.SimpleQueue(respQueue,
queue_opts = {'durable': False, 'auto_delete': True},
exchange_opts = {'delivery_mode' : 1,
'auto_delete' : True,
'durable' : False})
payload = {"Result": DispatchCommandQueues(method, args)}
resp_queue.put(payload, serializer='json')
resp_queue.close()
except:
print "Exception caught : %s" % sys.exc_info()[0]
return
def on_devices_message(self, body, message):
message.ack()
print ("notify: RECEIVED DEVICES MSG - body: %r" % (body,))
print ("notify: RECEIVED DEVICES MSG - message: %r" % (message,))
try:
respTarget = body['Source']
respQueue = body['RespQueue']
method = body['Method']
args = body['args']
# create the response connection
resp_connection = BrokerConnection(respTarget)
resp_queue = resp_connection.SimpleQueue(respQueue,
queue_opts = {'durable': False, 'auto_delete': True},
exchange_opts = {'delivery_mode' : 1,
'auto_delete' : True,
'durable' : False})
payload = {"Result": DispatchDevices(method, args)}
resp_queue.put(payload, serializer='json')
resp_queue.close()
except:
print "Exception caught : %s" % sys.exc_info()[0]
return
def on_contexts_message(self, body, message):
print ("notify: RECEIVED CONTEXTS MSG - body: %r" % (body,))
print ("notify: RECEIVED CONTEXTS MSG - message: %r" % (message,))
message.ack()
try:
respTarget = body['Source']
respQueue = body['RespQueue']
method = body['Method']
args = body['args']
# create the response connection
resp_connection = BrokerConnection(respTarget)
resp_queue = resp_connection.SimpleQueue(respQueue,
queue_opts = {'durable': False, 'auto_delete': True},
exchange_opts = {'delivery_mode' : 1,
'auto_delete' : True,
'durable' : False})
payload = {"Result": DispatchContexts(method, args)}
resp_queue.put(payload, serializer='json')
resp_queue.close()
except:
print "Exception caught : %s" % sys.exc_info()[0]
return
def on_buffers_message(self, body, message):
print ("notify: RECEIVED BUFFERS MSG - body: %r" % (body,))
print ("notify: RECEIVED BUFFERS MSG - message: %r" % (message,))
message.ack()
try:
respTarget = body['Source']
respQueue = body['RespQueue']
method = body['Method']
args = body['args']
# create the response connection
resp_connection = BrokerConnection(respTarget)
resp_queue = resp_connection.SimpleQueue(respQueue,
queue_opts = {'durable': False, 'auto_delete': True},
exchange_opts = {'delivery_mode' : 1,
'auto_delete' : True,
'durable' : False})
payload = {"Result": DispatchBuffers(method, args)}
resp_queue.put(payload, serializer='json')
resp_queue.close()
except:
print "Exception caught : %s" % sys.exc_info()[0]
return
def on_programs_message(self, body, message):
print ("notify: RECEIVED PROGRAMS MSG - body: %r" % (body,))
print ("notify: RECEIVED PROGRAMS MSG - message: %r" % (message,))
message.ack()
try:
respTarget = body['Source']
respQueue = body['RespQueue']
method = body[ |
env('GIT','git')
BASH = os.getenv('BASH','bash')
# OS specific configuration for terminal attributes
ATTR_RESET = ''
ATTR_PR = ''
COMMIT_FORMAT = '%h %s (%an)%d'
if os.name == 'posix': # if posix, assume we can use basic terminal escapes
ATTR_RESET = '\033[0m'
ATTR_PR = '\033[1;36m'
COMMIT_FORMAT = '%C(bold blue)%h%Creset %s %C(cyan)(%an)%Creset%C(green)%d%Creset'
def git_config_get(option, default=None):
'''
Get named configuration option from git repository.
'''
try:
return subprocess.check_output([GIT,'config','--get',option]).rstrip().decode('utf-8')
except subprocess.CalledProcessError as e:
return default
def retrieve_pr_info(repo,pull):
'''
Retrieve pull request information from github.
Return None if no title can be found, or an error happens.
'''
try:
req = Request("https://api.github.com/repos/"+repo+"/pulls/"+pull)
result = urlopen(req)
reader = codecs.getreader('utf-8')
obj = json.load(reader(result))
return obj
except Exception as e:
print('Warning: unable to retrieve pull information from github: %s' % e)
return None
def ask_prompt(text):
print(text,end=" ",file=stderr)
stderr.flush()
reply = stdin.readline().rstrip()
print("",file=stderr)
return reply
def get_symlink_files():
files = sorted(subprocess.check_output([GIT, 'ls-tree', '--full-tree', '-r', 'HEAD']).splitlines())
ret = []
for f in files:
if (int(f.decode('utf-8').split(" ")[0], 8) & 0o170000) == 0o120000:
ret.append(f.decode('utf-8').split("\t")[1])
return ret
def tree_sha512sum(commit='HEAD'):
# request metadata for entire tree, recursively
files = []
blob_by_name = {}
for line in subprocess.check_output([GIT, 'ls-tree', '--full-tree', '-r', commit]).splitlines():
name_sep = line.index(b'\t')
metadata = line[:name_sep].split() # perms, 'blob', blobid
assert(metadata[1] == b'blob')
name = line[name_sep+1:]
files.append(name)
blob_by_name[name] = metadata[2]
files.sort()
# open connection to git-cat-file in batch mode to request data for all blobs
# this is much faster than launching it per file
p = subprocess.Popen([GIT, 'cat-file', '--batch'], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
overall = hashlib.sha512()
for f in files:
blob = blob_by_name[f]
# request blob
p.stdin.write(blob + b'\n')
p.stdin.flush()
# read header: blob, "blob", size
reply = p.stdout.readline().split()
assert(reply[0] == blob and reply[1] == b'blob')
size = int(reply[2])
# hash the blob data
intern = hashlib.sha512()
ptr = 0
while ptr < size:
bs = min(65536, size - ptr)
piece = p.stdout.read(bs)
if len(piece) == bs:
intern.update(piece)
else:
raise IOError('Premature EOF reading git cat-file output')
ptr += bs
dig = intern.hexdigest()
assert(p.stdout.read(1) == b'\n') # ignore LF that follows blob data
# update overall hash with file hash
overall.update(dig.encode("utf-8"))
overall.update(" ".encode("utf-8"))
overall.update(f)
overall.update("\n".encode("utf-8"))
p.stdin.close()
if p.wait():
raise IOError('Non-zero return value executing git cat-file')
return overall.hexdigest()
def print_merge_details(pull, title, branch, base_branch, head_branch):
print('%s#%s%s %s %sinto %s%s' % (ATTR_RESET+ATTR_PR,pull,ATTR_RESET,title,ATTR_RESET+ATTR_PR,branch,ATTR_RESET))
subprocess.check_call([GIT,'log','--graph','--topo-order','--pretty=format:'+COMMIT_FORMAT,base_branch+'..'+head_branch])
def parse_arguments():
epilog = '''
In addition, you can set the following git configuration variables:
githubmerge.repository (mandatory),
user.signingkey (mandatory),
githubmerge.host (default: git@github.com),
githubmerge.branch (no default),
githubmerge.testcmd (default: none).
'''
parser = argparse.ArgumentParser(description='Utility to merge, sign and push github pull requests',
epilog=epilog)
parser.add_argument('pull', metavar='PULL', type=int, nargs=1,
help='Pull request ID to merge')
parser.add_argument('branch', metavar='BRANCH', type=str, nargs='?',
default=None, help='Branch to merge against (default: githubmerge.branch setting, or base branch for pull, or \'master\')')
return parser.parse_args()
def main():
# Extract settings from git repo
repo = git_config_get('githubmerge.repository')
host = git_config_get('githubmerge.host','git@github.com')
opt_branch = git_config_get('githubmerge.branch',None)
testcmd = git_config_get('githubmerge.testcmd')
signingkey = git_config_get('user.signingkey')
if repo is None:
print("ERROR: No repository configured. Use this command to set:", file=stderr)
print("git config githubmerge.repository <owner>/<repo>", file=stderr)
sys.exit(1)
if signingkey is None:
print("ERROR: No GPG signing key set. Set one using:",file=stderr)
print("git config --global user.signingkey <key>",file=stderr)
sys.exit(1)
host_repo = host+":"+repo # shortcut for push/pull target
# Extract settings from command line
args = parse_arguments()
pull = str(args.pull[0])
# Receive pull information from github
info = retrieve_pr_info(repo,pull)
if info is None:
sys.exit(1)
title = info['title'].strip()
body = info['body'].strip()
# precedence order for destination branch argument:
# - command line argument
# - githubmerge.branch setting
# - base branch for pull (as retrieved from github)
# - 'master'
branch = args.branch or opt_branch or info['base']['ref'] or 'master'
# Initialize source branches
head_branch = 'pull/'+pull+'/head'
base_branch = 'pull/'+pull+'/base'
merge_b | ranch = 'pull/'+pull+'/merge'
local_merge_branch = 'pull/'+pull+'/local-merge'
devnull = open(os.devnull,'w')
try:
subprocess.check_call([GIT,'checkout','-q',branch])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot check out branch %s." % (branch), file=stderr)
sys.exit(3)
try:
subprocess.check_call([GIT,'fetch | ','-q',host_repo,'+refs/pull/'+pull+'/*:refs/heads/pull/'+pull+'/*'])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find pull request #%s on %s." % (pull,host_repo), file=stderr)
sys.exit(3)
try:
subprocess.check_call([GIT,'log','-q','-1','refs/heads/'+head_branch], stdout=devnull, stderr=stdout)
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find head of pull request #%s on %s." % (pull,host_repo), file=stderr)
sys.exit(3)
try:
subprocess.check_call([GIT,'log','-q','-1','refs/heads/'+merge_branch], stdout=devnull, stderr=stdout)
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find merge of pull request #%s on %s." % (pull,host_repo), file=stderr)
sys.exit(3)
try:
subprocess.check_call([GIT,'fetch','-q',host_repo,'+refs/heads/'+branch+':refs/heads/'+base_branch])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find branch %s on %s." % (branch,host_repo), file=stderr)
sys.exit(3)
subprocess.check_call([GIT,'checkout','-q',base_branch])
subprocess.call([GIT,'branch','-q','-D',local_merge_branch], stderr=devnull)
subprocess.check_call([GIT,'checkout','-q','-b',local_merge_branch])
try:
# Go up to the repository's root.
toplevel = subprocess.check_output([GIT,'rev-parse','--show-toplevel']).strip()
os.chdir(toplevel)
# Create unsigned merge commit.
if title:
firstline = 'Merge #%s: %s' % (pull,title)
else:
firstline = 'Merge #%s' % (pull,)
message = fi |
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import six
from cryptography import utils
from cryptography.x509.oid import ObjectIdentifier
class NameAttribute(object):
def __init__(self, oid, value):
if not isinstance(oid, ObjectIdentifier):
raise TypeError(
"oid argument must be an ObjectIdentifier instance."
)
if not isinstance(value, six.text_type):
raise TypeError(
"value argument must be a text type."
)
self._oid = oid
self._value = value
oid = utils.read_only_property("_oid")
value = utils.read_only_property("_value")
def __eq__(self, other):
if not isinstance(other, NameAttribute):
return NotImplemented
return (
self.oid == other.oid and
self.value == other.value
)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.oid, self.value))
def __repr__(s | elf):
return "<NameAttribute(oid={0.oid}, value={0.value!r})>".format(self)
class Name(object):
def __init__(self, attributes):
self._attributes = attributes
def get_attributes_for_oid(self, oid):
return [i for i | in self if i.oid == oid]
def __eq__(self, other):
if not isinstance(other, Name):
return NotImplemented
return self._attributes == other._attributes
def __ne__(self, other):
return not self == other
def __hash__(self):
# TODO: this is relatively expensive, if this looks like a bottleneck
# for you, consider optimizing!
return hash(tuple(self._attributes))
def __iter__(self):
return iter(self._attributes)
def __len__(self):
return len(self._attributes)
def __repr__(self):
return "<Name({0!r})>".format(self._attributes)
|
# *****************************************************************
# Copyright (c) 2013 Massachusetts Institute of Technology
#
# Developed exclusively at US Government expense under US Air Force contract
# FA8721-05-C-002. The rights of the United States Government to use, modify,
# reproduce, release, perform, display or disclose this computer software and
# computer software documentation in whole or in part, in any manner and for
# any purpose whatsoever, and to have or authorize others to do so, are
# Unrestricted and Unlimited.
#
# Licensed for use under the BSD License as described in the BSD-LICENSE.txt
# file in the root directory of this release.
#
# Project: SPAR
# Authors: SY
# | Description: IBM TA2 wire class
#
# Modifications:
# Date Name Modification
# ---- ---- ------------
# 22 Oct 2012 SY Original Version
# *****************************************************************
import ibm_circuit_object as ico
class IBMInputWire(ico.IBMCircuitObject):
"""
This class represents a single IBM input wire.
"""
def __init__(self, displayname, c | ircuit):
"""Initializes the wire with the display name and circuit specified."""
ico.IBMCircuitObject.__init__(self, displayname, 0.0, 0, circuit)
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import hashlib
import itertools
import numpy
from nupic.bindings.math import Random
from nupic.encoders.base import Encoder
class CoordinateEncoder(Encoder):
"""
Given a coordinate in an N-dimensional space, and a radius around
that coordinate, the Coordinate Encoder returns an SDR representation
of that position.
The Coordinate Encoder uses an N-dimensional integer coordinate space.
For example, a valid coordinate in this space is (150, -49, 58), whereas
an invalid coordinate would be (55.4, -5, 85.8475).
It uses the following algorithm:
1. Find all the coordi | nates | around the input coordinate, within the
specified radius.
2. For each coordinate, use a uniform hash function to
deterministically map it to a real number between 0 and 1. This is the
"order" of the coordinate.
3. Of these coordinates, pick the top W by order, where W is the
number of active bits desired in the SDR.
4. For each of these W coordinates, use a uniform hash function to
deterministically map it to one of the bits in the SDR. Make this bit active.
5. This results in a final SDR with exactly W bits active
(barring chance hash collisions).
"""
def __init__(self,
w=21,
n=1000,
name=None,
verbosity=0):
"""
See `nupic.encoders.base.Encoder` for more information.
@param name An optional string which will become part of the description
"""
# Validate inputs
if (w <= 0) or (w % 2 == 0):
raise ValueError("w must be an odd positive integer")
if (n <= 6 * w) or (not isinstance(n, int)):
raise ValueError("n must be an int strictly greater than 6*w. For "
"good results we recommend n be strictly greater "
"than 11*w")
self.w = w
self.n = n
self.verbosity = verbosity
self.encoders = None
if name is None:
name = "[%s:%s]" % (self.n, self.w)
self.name = name
def getWidth(self):
"""See `nupic.encoders.base.Encoder` for more information."""
return self.n
def getDescription(self):
"""See `nupic.encoders.base.Encoder` for more information."""
return [('coordinate', 0), ('radius', 1)]
def getScalars(self, inputData):
"""See `nupic.encoders.base.Encoder` for more information."""
return numpy.array([0]*len(inputData))
def encodeIntoArray(self, inputData, output):
"""
See `nupic.encoders.base.Encoder` for more information.
@param inputData (tuple) Contains coordinate (numpy.array)
and radius (float)
@param output (numpy.array) Stores encoded SDR in this numpy array
"""
(coordinate, radius) = inputData
neighbors = self._neighbors(coordinate, radius)
winners = self._topWCoordinates(neighbors, self.w)
bitFn = lambda coordinate: self._bitForCoordinate(coordinate, self.n)
indices = numpy.array([bitFn(w) for w in winners])
output[:] = 0
output[indices] = 1
@staticmethod
def _neighbors(coordinate, radius):
"""
Returns coordinates around given coordinate, within given radius.
Includes given coordinate.
@param coordinate (numpy.array) Coordinate whose neighbors to find
@param radius (float) Radius around `coordinate`
@return (numpy.array) List of coordinates
"""
ranges = [range(n-radius, n+radius+1) for n in coordinate.tolist()]
return numpy.array(list(itertools.product(*ranges)))
@classmethod
def _topWCoordinates(cls, coordinates, w):
"""
Returns the top W coordinates by order.
@param coordinates (numpy.array) A 2D numpy array, where each element
is a coordinate
@param w (int) Number of top coordinates to return
@return (numpy.array) A subset of `coordinates`, containing only the
top ones by order
"""
orders = numpy.array([cls._orderForCoordinate(c)
for c in coordinates.tolist()])
indices = numpy.argsort(orders)[-w:]
return coordinates[indices]
@staticmethod
def _hashCoordinate(coordinate):
"""Hash a coordinate to a 64 bit integer."""
coordinateStr = ",".join(str(v) for v in coordinate)
# Compute the hash and convert to 64 bit int.
hash = int(int(hashlib.md5(coordinateStr).hexdigest(), 16) % (2 ** 64))
return hash
@classmethod
def _orderForCoordinate(cls, coordinate):
"""
Returns the order for a coordinate.
@param coordinate (numpy.array) Coordinate
@return (float) A value in the interval [0, 1), representing the
order of the coordinate
"""
seed = cls._hashCoordinate(coordinate)
rng = Random(seed)
return rng.getReal64()
@classmethod
def _bitForCoordinate(cls, coordinate, n):
"""
Maps the coordinate to a bit in the SDR.
@param coordinate (numpy.array) Coordinate
@param n (int) The number of available bits in the SDR
@return (int) The index to a bit in the SDR
"""
seed = cls._hashCoordinate(coordinate)
rng = Random(seed)
return rng.getUInt32(n)
def dump(self):
print "CoordinateEncoder:"
print " w: %d" % self.w
print " n: %d" % self.n
@classmethod
def read(cls, proto):
encoder = object.__new__(cls)
encoder.w = proto.w
encoder.n = proto.n
encoder.verbosity = proto.verbosity
encoder.name = proto.name
return encoder
def write(self, proto):
proto.w = self.w
proto.n = self.n
proto.verbosity = self.verbosity
proto.name = self.name
|
# coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Loss utility code."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from typing import Optional, Text
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from tf_agents.typing import types
def pinball_loss(
y_true: types.Tensor,
y_pred: types.Tensor,
weights: types.Float = 1.0,
| scope: Optional[Text] = None,
loss_collection: tf.compat.v1.GraphKeys = tf.compat.v1.GraphKeys.LOSSES,
reduction: tf.compat.v1.losses.Reduction = tf.compat.v1.losses.Reduction
.SUM_BY_NONZERO_WEIGHTS,
quantile: float = 0.5) -> types.Float:
"""Adds a Pinball loss for quantile regression.
```
loss = quantile * (y_true - y_pred) if y_true > y_pred
loss = (quantile - 1) * (y_true - y_pred) otherwise
```
See: https://e | n.wikipedia.org/wiki/Quantile_regression#Quantiles
`weights` acts as a coefficient for the loss. If a scalar is provided, then
the loss is simply scaled by the given value. If `weights` is a tensor of size
`[batch_size]`, then the total loss for each sample of the batch is rescaled
by the corresponding element in the `weights` vector. If the shape of
`weights` matches the shape of `predictions`, then the loss of each
measurable element of `predictions` is scaled by the corresponding value of
`weights`.
Args:
y_true: tensor of true targets.
y_pred: tensor of predicted targets.
weights: Optional `Tensor` whose rank is either 0, or the same rank as
`labels`, and must be broadcastable to `labels` (i.e., all dimensions must
be either `1`, or the same as the corresponding `losses` dimension).
scope: The scope for the operations performed in computing the loss.
loss_collection: collection to which the loss will be added.
reduction: Type of reduction to apply to loss.
quantile: A float between 0. and 1., the quantile we want to regress.
Returns:
Weighted Pinball loss float `Tensor`. If `reduction` is `NONE`, this has the
same shape as `labels`; otherwise, it is scalar.
Raises:
ValueError: If the shape of `predictions` doesn't match that of `labels` or
if the shape of `weights` is invalid. Also if `labels` or `predictions`
is None.
@compatibility(eager)
The `loss_collection` argument is ignored when executing eagerly. Consider
holding on to the return value or collecting losses via a `tf.keras.Model`.
@end_compatibility
"""
if y_true is None:
raise ValueError('y_true must not be None.')
if y_pred is None:
raise ValueError('y_pred must not be None.')
with tf.compat.v1.name_scope(scope, 'pinball_loss',
(y_pred, y_true, weights)) as scope:
y_pred = tf.cast(y_pred, dtype=tf.float32)
y_true = tf.cast(y_true, dtype=tf.float32)
y_pred.get_shape().assert_is_compatible_with(y_true.get_shape())
error = tf.subtract(y_true, y_pred)
loss_tensor = tf.maximum(quantile * error, (quantile - 1) * error)
return tf.compat.v1.losses.compute_weighted_loss(
loss_tensor, weights, scope, loss_collection, reduction=reduction)
|
'''
Notice:
1. The function for jit should locate in mfs
2. For the usage of jit types and signatures, please refer Numba documentation <http://numba.github.com/numba-doc/0.10/index.ht | ml>
'''
from dpark import _ctx as dpark, jit, autojit
import numpy
@jit('f8(f8[:])')
def add1(x):
sum = 0.0
for i in xrange(x.shape[0]):
sum += i*x[i]
return sum
@autojit
def add2(x):
sum = 0 | .0
for i in xrange(x.shape[0]):
sum += i*x[i]
return sum
def add3(x):
sum = 0.0
for i in xrange(x.shape[0]):
sum += i*x[i]
return sum
rdd = dpark.makeRDD(range(0, 10)).map(lambda x: numpy.arange(x*1e7, (x+1)*1e7))
print rdd.map(add1).collect()
print rdd.map(add2).collect()
print rdd.map(add3).collect()
|
Returns:
- resp: server response if successful
- list: list of strings returned by the server in response to the
HELP command
"""
return self._longcmdstring('HELP', file)
def _statparse(self, resp):
"""Internal: parse the response line of a STAT, NEXT, LAST,
ARTICLE, HEAD or BODY command."""
if not resp.startswith('22'):
raise NNTPReplyError(resp)
words = resp.split()
art_num = int(words[1])
message_id = words[2]
return resp, art_num, message_id
def _statcmd(self, line):
"""Internal: process a STAT, NEXT or LAST command."""
resp = self._shortcmd(line)
return self._statparse(resp)
def stat(self, message_spec=None):
"""Process a STAT command. Argument:
- message_spec: article number or message id (if not specified,
the current article is selected)
Returns:
- resp: server response if successful
- art_num: the article number
- message_id: the message id
"""
if message_spec:
return self._statcmd('STAT {0}'.format(message_spec))
else:
return self._statcmd('STAT')
def next(self):
"""Process a NEXT command. No arguments. Return as for STAT."""
return self._statcmd('NEXT')
def last(self):
"""Process a LAST command. No arguments. Return as for STAT."""
return self._statcmd('LAST')
def _artcmd(self, line, file=None):
"""Internal: process a HEAD, BODY or ARTICLE command."""
resp, lines = self._longcmd(line, file)
resp, art_num, message_id = self._statparse(resp)
return resp, ArticleInfo(art_num, message_id, lines)
def head(self, message_spec=None, *, file=None):
"""Process a HEAD command. Argument:
- message_spec: article number or message id
- file: filename string or file object to store the headers in
Returns:
- resp: server response if successful
- ArticleInfo: (article number, message id, list of header lines)
"""
if message_spec is not None:
cmd = 'HEAD {0}'.format(message_spec)
else:
cmd = 'HEAD'
return self._artcmd(cmd, file)
def body(self, message_spec=None, *, file=None):
"""Process a BODY command. Argument:
- message_spec: article number or message id
- file: filename string or file object to store the body in
Returns:
- resp: server response if successful
- ArticleInfo: (article number, message id, list of body lines)
"""
if message_spec is not None:
cmd = 'BODY {0}'.format(message_spec)
else:
cmd = 'BODY'
return self._artcmd(cmd, file)
def article(self, message_spec=None, *, file=None):
"""Process an ARTICLE command. Argument:
- message_spec: article number or message id
- file: filename string or file object to store the article in
Returns:
- resp: server response if successful
- ArticleInfo: (article number, message id, list of article lines)
"""
if message_spec is not None:
cmd = 'ARTICLE {0}'.format(message_spec)
else:
cmd = 'ARTICLE'
return self._artcmd(cmd, file)
def slave(self):
"""Process a SLAVE command. Returns:
- resp: server response if successful
"""
return self._shortcmd('SLAVE')
def xhdr(self, hdr, str, *, file=None):
"""Process an XHDR command (optional server extension). Arguments:
- hdr: the header type (e.g. 'subject')
- str: an article nr, a message id, or a range nr1-nr2
- file: Filename string or file object to store the result in
Returns:
- resp: server response if successful
- list: list of (nr, value) strings
"""
pat = re.compile('^([0-9]+) ?(.*)\n?')
resp, lines = self._longcmdstring('XHDR {0} {1}'.format(hdr, str), file)
def remove_number(line):
m = pat.match(line)
return m.group(1, 2) if m else line
return resp, [remove_number(line) for line in lines]
def xover(self, start, end, *, file=None):
"""Process an XOVER command (optional server extension) Arguments:
- start: start of range
- end: end of range
- file: Filename string or file object to store the result in
Returns:
- resp: server response if successful
- list: list of dicts containing the response fields
"""
resp, lines = self._longcmdstring('XOVER {0}-{1}'.format(start, end),
file)
fmt = self._getoverviewfmt()
return resp, _parse_overview(lines, fmt)
def over(self, message_spec, *, file=None):
"""Process an OVER command. If the command isn't supported, fall
back to XOVER. Arguments:
- message_spec:
- either a message id, indicating the article to fetch
information about
- or a (start, end) tuple, indicating a range of article numbers;
if end is None, information up to the newest message will be
retrieved
- or None, indicating the current article number must be used
- file: Filename string or file object to store the result in
Returns:
- resp: server response if successful
- list: list of dicts containing the response fields
NOTE: the "message id" form isn't supported by XOVER
"""
cmd = 'OVER' if 'OVER' in self._caps else 'XOVER'
if isinstance(message_spec, (tuple, list)):
start, end = message_spec
cmd += ' {0}-{1}'.format(start, end or '')
elif message_spec is not None:
cmd = cmd + ' ' + message_spec
resp, lines = self._longcmdstring(cmd, file)
fmt = self._getoverviewfmt()
return resp, _parse_overview(lines, fmt)
def xgtitle(self, group, *, file=None):
"""Process an XGTITLE command (optional server extension) Arguments:
- group: group name wildcard (i.e. news.*)
Returns:
- resp: server response if successful
- list: list of (name,title) strings"""
warnings.warn("The XGTITLE extension is not actively used, "
"use descriptions() instead",
DeprecationWarning, 2)
line_pat = re.compile('^([^ \t]+)[ \t]+(.*)$')
resp, raw_lines = self._longcmdstring('XGTITLE ' + group, file)
lines = []
for raw_line in raw_lines:
match = line_pat.search(raw_line.strip())
if match:
lines.append(match.group(1, 2))
return resp, lines
def xpath(self, id):
"""Process an XPATH command (optional server extension) Arguments:
- id: Message id of article
Returns:
resp: server response if successful
path: directory path to article
"""
warnings.warn("The XPATH extension is not actively used",
DeprecationWarning, 2)
resp = self._shortcmd('XPATH {0}'.format(id))
if not resp.startswith('223'):
raise NNTPReplyError(resp)
try:
[resp | _num, path] = resp.split()
except ValueError:
raise NNTPReplyError(resp)
else:
return resp, path
def date(self):
"""Process the DATE command.
Returns:
- resp: server response if successful
- date: datetime object
"""
resp = self._shortcmd("DATE")
if not resp.startswith('111'):
raise NNTPReplyError(resp)
| elem = resp.split()
if len(elem) != 2:
raise NNTPDataError(resp)
date = elem[1]
if len(date) != 14:
raise NNTPDataError(resp)
return resp, _parse_datetime(date, None)
def _post(self, command, f):
resp = self._shortcmd(command)
# Raises a specific exception if post |
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from clr import AddReference
AddReference("System")
AddReference("QuantConnect.Algorithm")
AddReference("QuantConnect.Common")
AddReference("QuantConnect.Indicators")
from System import *
from QuantConnect import *
from QuantConnect.Indicators import *
from QuantConnect.Data import *
from QuantConnect.Data.Market import *
from QuantConnect.Data.Custom import *
from QuantConnect.Algorithm import *
from QuantConnect.Python import PythonQuandl
### <summary>
### The algorithm creates new indicator value with the existing indicator method by Indicator Extensions
### Demonstration of using the external custom datasource Quandl to request the VIX and VXV daily data
### </summary>
### <meta name="tag" content="using data" />
### <meta name="tag" content="using quantconnect" />
### <meta name="tag" content="custom data" />
### <meta name="tag" content="indicators" />
### <meta name="tag" content="indicator classes" />
### <meta name="tag" content="plotting indicators" />
### <meta name="tag" content="charting" />
class CustomDataIndicatorExtensionsAlgorithm(QCAlgorithm):
# Initialize the data and resolution you require for your strategy
def Initialize(self):
self.SetStartDate(2014,1,1)
self.SetEndDate(2018,1,1)
self.SetCash(25000)
self.vix = 'CBOE/VIX'
self.vxv = 'CBOE/VXV'
# Define the symbol and "type" of our generic data
self.AddData(QuandlVix, self.vix, Resolution.Daily)
self.AddData(Quandl, self.vxv, Resolution.Daily)
# Set up default Indicators, these are just 'identities' of the closing price
self.vix_sma = self.SMA(self.vix, 1, Resolution.Daily)
self.vxv_sma = self.SMA(self.vxv, 1, Resolution.Daily)
# This will create a new indicator whose value is smaVXV / smaVIX
self.ratio = IndicatorExtensions.Over(self.vxv_sma, self.vix_sma)
# Plot indicators each time they update using the PlotIndicator function
self.PlotIndicator("Ratio", s | elf.ratio)
self.PlotIndicator("Data", self.vix_sma, self.vxv_sma)
# OnData event is the primary entry point for you | r algorithm. Each new data point will be pumped in here.
def OnData(self, data):
# Wait for all indicators to fully initialize
if not (self.vix_sma.IsReady and self.vxv_sma.IsReady and self.ratio.IsReady): return
if not self.Portfolio.Invested and self.ratio.Current.Value > 1:
self.MarketOrder(self.vix, 100)
elif self.ratio.Current.Value < 1:
self.Liquidate()
# In CBOE/VIX data, there is a "vix close" column instead of "close" which is the
# default column namein LEAN Quandl custom data implementation.
# This class assigns new column name to match the the external datasource setting.
class QuandlVix(PythonQuandl):
def __init__(self):
self.ValueColumnName = "VIX Close" |
from distutils.core import setup, Extension, Command
from distutils.command.build import build
from distutils.command.build_ext import build_ext
from distutils.command.config import config
from distutils.msvccompiler import MSVCCompiler
from distutils import sysconfig
import string
import sys
mkobjs = ['column', 'custom', 'derived', 'fileio', 'field',
'format', 'handler', 'persist', 'remap', 'std',
'store', 'string', 'table', 'univ', 'view', 'viewx']
class config_mk(config):
def run(self):
# work around bug in Python 2.2-supplied check_header, fixed
# in Python 2.3; body needs to be a valid, non-zero-length string
if self.try_cpp(body="/* body */", headers=['unicodeobject.h'],
include_dirs=[sysconfig.get_python_inc()]):
build = self.distribution.reinitialize_command('build_ext')
build.define = 'HAVE_UNICODEOBJECT_H'
# trust that mk4.h provides the correct HAVE_LONG_LONG value,
# since Mk4py doesn't #include "config.h"
class build_mk(build):
def initialize_options(self):
# build in builds directory by default, unless specified otherwise
build.initialize_options(self)
self.build_base = '../builds'
class build_mkext(build_ext):
def finalize_options(self):
self.run_command('config')
# force use of C++ compiler (helps on some platforms)
import os
cc = os.environ.get('CXX', sysconfig.get_config_var('CXX'))
if not cc:
cc = sysconfig.get_config_var('CCC') # Python 1.5.2
if cc:
os.environ['CC'] = cc
build_ext.finalize_options(self)
def build_extension(self, ext):
# work around linker problem with MacPython 2.3
if sys.platform == 'darwin':
try:
self.compiler.linker_so.remove("-Wl,-x")
except: pass
# work around linker problem with Linux, Python 2.2 and earlier:
# despite setting $CC above, still uses Python compiler
if sys.platform == 'linux2':
try:
ext.libraries.append("stdc++")
except: pass
if ext.name == "Mk4py":
if isinstance(self.compiler, MSVCCompiler):
suffix = '.obj'
if self.debug:
prefix = '../builds/msvc60/mklib/Debug/'
else:
prefix = '../builds/msvc60/mklib/Release/'
else:
suffix = '.o'
prefix = '../builds/'
for i in range(len(ext.extra_objects)):
nm = ext.extra_objects[i]
if nm in mkobjs:
if string.find(nm, '.') == -1:
nm = nm + suffix
nm = prefix + nm
ext.extra_objects[i] = nm
build_ext.build_extension(self, ext)
class test_regrtest(Command):
# Original version of this class posted
# by Berthold Hoellmann to distutils-sig@python.org
description = "test the distribution prior to install"
user_options = [
('build-base=', 'b',
"base build directory (default: 'build.build-base')"),
('build-purelib=', None,
"build directory for platform-neutral distributions"),
('build-platlib=', None,
"build directory for platform-specific distributions"),
('build-lib=', None,
"build directory for all distribution (defaults to either " +
"build-purelib or build-platlib"),
('test-dir=', None,
"directory that contains the test definitions"),
('test-options=', None,
"command-line options to pass to test.regrtest")
]
def initialize_options(self):
self.build_base = None
# these are decided only after 'build_base' has its final value
# (unless overridden by the user or client)
self.build_purelib = None
self.build_platlib = None
self.test_dir = 'test'
self.test_options = None
def finalize_options(self):
build = self.distribution.get_command_obj('build') |
build_options = ('build_base', 'build_purelib', 'build_platlib')
for option in build_options:
val = getattr(self, option)
if val:
setattr(build, | option, getattr(self, option))
build.ensure_finalized()
for option in build_options:
setattr(self, option, getattr(build, option))
def run(self):
# Invoke the 'build' command to "build" pure Python modules
# (ie. copy 'em into the build tree)
self.run_command('build')
# remember old sys.path to restore it afterwards
old_path = sys.path[:]
# extend sys.path
sys.path.insert(0, self.build_purelib)
sys.path.insert(0, self.build_platlib)
sys.path.insert(0, self.test_dir)
# Use test.regrtest, unlike the original version of this class
import test.regrtest
# jcw 2004-04-26 - why do I need to add these here to find the tests?
#import leaktest - not very portable
import test_inttypes
import test_stringtype
#import test_hash - doesn't work
# jcw end
test.regrtest.STDTESTS = []
test.regrtest.NOTTESTS = []
if self.test_options:
sys.argv[1:] = string.split(self.test_options, ' ')
else:
del sys.argv[1:]
# remove stale modules
del sys.modules['metakit']
try:
del sys.modules['Mk4py']
except:
pass
self.announce("running tests")
test.regrtest.main(testdir=self.test_dir)
# restore sys.path
sys.path = old_path[:]
#try:
# import metakit
#except:
# metakit = sys.modules['metakit']
setup(name = "metakit",
version = "2.4.9.7",
description = "Python bindings to the Metakit database library",
#long_description = metakit.__doc__,
author = "Gordon McMillan / Jean-Claude Wippler",
author_email = "jcw@equi4.com",
url = "http://www.equi4.com/metakit/python.html",
maintainer = "Jean-Claude Wippler",
maintainer_email = "jcw@equi4.com",
license = "X/MIT style, see: http://www.equi4.com/mklicense.html",
keywords = ['database'],
py_modules = ['metakit'],
cmdclass = {'build': build_mk, 'build_ext': build_mkext,
'test': test_regrtest, 'config': config_mk},
ext_modules = [Extension("Mk4py",
sources=["PyProperty.cpp",
"PyRowRef.cpp",
"PyStorage.cpp",
"PyView.cpp",
"scxx/PWOImp.cpp",
],
include_dirs=["scxx",
"../include"],
extra_objects=mkobjs,
)]
)
## Local Variables:
## compile-command: "python setup.py build -b ../builds"
## End:
|
, '--step_length', type=int, default=100,
help='Number of iterations between samples.')
parser.add_argument('-init', '--initial_soln', nargs="*",
help='Initial solution to use.')
parser.add_argument('-r', '--num_initial', default=1, type=int,
help='Number of different initial starts to use with MCMC.')
parser.add_argument('-tv', '--total_distance_cutoff', type=float, default=0.005,
help='stop condition of convergence (total distance).')
# Parameters for determining the test to be applied in CoMEt
parser.add_argument('--exact_cut', default=0.001, type=float,
help='Maximum accumulated table prob. to stop exact test.')
parser.add_argument('--binom_cut', type=float, default=0.005,
help='Minumum pval cutoff for CoMEt to perform binom test.')
parser.add_argument('-nt', '--nt', default=10, type=int,
help='Maximum co-occurrence cufoff to perform exact test.')
# Files for subtypes/core-events run
parser.add_argument('-sub', '--subtype', default=None,
help='File with a list of subtype for performing subtype-comet.')
parser.add_argument('-ce', '--core_events', default=None,
help='File with a list of core events for performing subtype-comet.')
# Hidden parameters: users can still use these parameters but they won't show in the options
# Parameters for marginal probability graph (optional)
# File mapping genes/events to new names (optional).
parser.add_argument('-e', '--event_names', default=None, help=argparse.SUPPRESS)
# File mapping samples to cancer types.
parser.add_argument('-st', '--sample_types_file', default=None, help=argparse.SUPPRESS)
# Minimum edge weight for showing in the graph
parser.add_argument('-mew', '--minimum_edge_weight', type=float, default=0.001,
help=argparse.SUPPRESS)
# Minimum sampling frequency for a gene set to be included.
parser.add_argument('-msf', '--minimum_sampling_frequency', type=float, default=50,
help=argparse.SUPPRESS)
# Template file (HTML). Change at your own risk.
parser.add_argument('-tf', '--template_file', default="comet/src/html/template.html",
type=str, help=argparse.SUPPRESS)
# Maximum standard error cutoff to consider a line
parser.add_argument('-rmse', '--standard_error_cutoff', default=0.01, type=float,
help=argparse.SUPPRESS)
# Input file with lists of pre-run results.
parser.add_argument('--precomputed_scores', default=None, help=argparse.SUPPRESS)
# Accelerating factor for target weight
parser.add_argument('-acc', '--accelerator', default=1, type=int, help=argparse.SUPPRESS)
# Flag verbose output
parser.add_argument('-v', '--verbose', default=True, action="store_true",
help=argparse.SUPPRESS)
# Set the seed of the PRNG.
parser.add_argument('--seed', default=int(time.time()), type=int,
help=argparse.SUPPRESS)
# Edge swapping parameter.
parser.add_argument('-q', '--Q', type=int, default=100,
help=argparse.SUPPRESS)
# Keep temp files (CoMEt results and permuted matrices).
parser.add_argument('--keep_temp_files', required=False, action='store_true', default=False,
help=argparse.SUPPRESS)
return parser
def runComet(cometArgs):
return RC.run( RC.get_parser().parse_args(cometArgs) )
def run( args ):
# Set up the arguments for a general CoMEt run on real data
realOutputDir = "{}/comet-results".format(args.output_directory)
realCometArgs = []
permuteFlags = ["-np", "--parallel", "--keep_temp_files", "-o"]
for i, arg in enumerate(sys.argv[1:]):
if arg not in permuteFlags and sys.argv[i] not in permuteFlags:
realCometArgs.append( arg )
realCometArgs += [ "-o", realOutputDir, "--noviz"]
# perform simple run without viz first.
results = runComet(realCometArgs)
# Load mutation data using Multi-Dendrix and output as a temporary file
realMutations = C.load_mutation_data(args.mutation_matrix, args.patient_file,
args.gene_file, args.min_freq, args.subtype)
m, n, genes, patients, geneToCases, patientToGenes, subtypes = realMutations
if args.verbose:
print '* Mutation data: %s genes x %s patients' % (m, n)
# Construct bipartite graph from mutation data
if args.verbose: print "* Creating bipartite graph..."
G = C.construct_mutation_graph(geneToCases, patientToGenes)
if args.verbose:
print '\t- Graph has', len( G.edges() ), 'edges among', len( G.nodes() ), 'nodes.'
# reset the arguments for a general CoMEt run on permuted matrices
cometArgs = []
permuteFlags = ["-np", "--parallel", "--keep_temp_files", "-m", "-o"]
for i, arg in enumerate(sys.argv[1:]):
if arg not in permuteFlags and sys.argv[i] not in permuteFlags:
cometArgs.append( arg )
cometArgs.append('--noviz')
# Create a permuted matrix, and then run it through CoMEt
import tempfile
arguments = []
if args.keep_temp_files:
directory = args.output_directory
else:
directory = tempfile.mkdtemp(dir=".", prefix=".tmp")
# Generate random seeds for each permutation
random.seed(args.seed)
seeds = [ random.randint(0, 2**31-1) for _ in range(args.num_permutations) ]
for i, seed in enumerate(seeds):
# Print simple progress bar
sys.stdout.write("* Running CoMEt on permuted matrices... {}/{}\r".format(i+1, args.num_permutations))
sys.stdout.flush()
# Create a permuted dataset and save it a temporary file
mutations = C.permute_mutation_data(G, genes, patients, seed, args.Q)
_, _, _, _, geneToCases, patientToGenes = mutations
adj_list = [ p + "\t" + "\t".join( sorted(patientToGenes[p]) ) for p in patients ]
permutation_file = "{}/permuted-matrix-{}.m2".format(directory, i+1)
with open(permutation_file, 'w') as outfile: outfile.write('\n'.join(adj_list))
# Add the new arguments
permuteArgs = map(str, cometArgs)
permuteArgs += [ "-m", permutation_file ]
permuteArgs += [ "-o", "{}/comet-results-on-permutation-{}".format(directory, i+1)]
arguments.append( permuteArgs )
if args.parallel:
pool = mp.Pool(25)
results = pool.map(runComet, arguments)
pool.close()
pool.join()
else:
results = [ runComet(permuteArgs) for permuteArgs in arguments ]
# Find the maximum test statistic on the permuted datasets
from itertools import islice
maxStat = 0
for rf in [ rf for rf in os.listdir(directory) if rf.startswith("comet-results-on-permutation") ]:
for df in [df for df in os.listdir("{}/{}/results".format(directory, rf) ) if df.endswith(".tsv")]:
with open("{}/{}/results/{}".format(directory, rf, df)) as infile:
for line in islice(infile, 1, 2):
score = float(line.split("\t")[1])
if score > maxStat:
maxStat = score
print "*" * 80
print "Number of permutations:", args.num_permutations
print "Max statistic:", maxStat
# Prepare comet results on real, mutation data, and output directory for viz
for rf in [rf for rf in os.listdir( "{}/results/".format(realOutputDir) ) if rf.endswith(".tsv")]:
resultsTable = [l.rstrip() for l in open( "{}/results/{}".format(realOutputDir, rf))] |
realMutations = (m, n, genes, patients, geneToCases, patientToGenes )
outputDirViz = realOutputDir + "/viz/"
C.ensure_dir(outputDirViz)
# Perform visualization
C.output_comet_viz(RC.get_parser().parse_args(realCometArgs), realMutations, \
results | Table, maxStat, args.num_permutations)
# Destroy the temporary di |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.