commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
cf36f9792886c6dd67b37c29af4a5d510b924902
|
Use UTF-8 by default instead of locale encoding.
|
pybtex/io.py
|
pybtex/io.py
|
# Copyright (c) 2009, 2010, 2011, 2012 Andrey Golovizin
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Unicode-aware IO routines."""
from __future__ import absolute_import
import io
import sys
import locale
from os import path, environ
from pybtex.exceptions import PybtexError
from pybtex.kpathsea import kpsewhich
def get_default_encoding():
try:
locale_encoding = locale.getpreferredencoding()
except locale.Error:
locale_encoding = None
return locale_encoding or 'UTF-8'
def get_stream_encoding(stream):
stream_encoding = getattr(stream, 'encoding', None)
return stream_encoding or get_default_encoding()
def _open_existing(opener, filename, mode, locate, **kwargs):
if not path.isfile(filename):
found = locate(filename)
if found:
filename = found
return opener(filename, mode, **kwargs)
def _open_or_create(opener, filename, mode, environ, **kwargs):
try:
return opener(filename, mode, **kwargs)
except EnvironmentError, error:
if 'TEXMFOUTPUT' in environ:
new_filename = path.join(environ['TEXMFOUTPUT'], filename)
try:
return opener(new_filename, mode, **kwargs)
except EnvironmentError:
pass
raise error
def _open(opener, filename, mode, **kwargs):
write_mode = 'w' in mode
try:
if write_mode:
return _open_or_create(opener, filename, mode, environ, **kwargs)
else:
return _open_existing(opener, filename, mode, locate=kpsewhich, **kwargs)
except EnvironmentError, error:
raise PybtexError("unable to open %s. %s" % (filename, error.strerror))
def open_raw(filename, mode='rb', encoding=None):
return _open(io.open, filename, mode)
def open_unicode(filename, mode='r', encoding=None):
if encoding is None:
encoding = get_default_encoding()
return _open(io.open, filename, mode, encoding=encoding)
def reader(stream, encoding=None, errors='strict'):
if encoding is None:
encoding = get_stream_encoding(stream)
return io.TextIOWrapper(stream, encoding=encoding, errors=errors)
stdout = sys.stdout
stderr = sys.stderr
|
Python
| 0
|
@@ -1210,22 +1210,8 @@
sys%0A
-import locale%0A
from
@@ -1354,154 +1354,14 @@
-try:%0A locale_encoding = locale.getpreferredencoding()%0A except locale.Error:%0A locale_encoding = None%0A return locale_encoding or
+return
'UT
|
f3ca8021f9fd4b6d00a6502ede414b1712c2a1ac
|
remove comment explaining where imports are from in hvc/extract.py
|
hvc/extract.py
|
hvc/extract.py
|
"""
feature extraction
"""
# from hvc
from .parseconfig import parse_config
from . import features
from .parse.extract import _validate_feature_group_and_convert_to_list
from .parse.ref_spect_params import refs_dict
def extract(config_file=None,
data_dirs=None,
file_format=None,
annotation_file=None,
labels_to_use=None,
feature_group=None,
feature_list=None,
output_dir=None,
spect_params=None,
return_features=None):
"""high-level function for feature extraction.
Accepts either a config file or a set of parameters and
uses them to extract features from audio files
that are then used to train machine learning classifiers.
Returns features and/or saves them to a file.
Parameters
----------
config_file : string
filename of YAML file that configures feature extraction
data_dirs : list
of str, directories that contain audio files from which features should be extracted.
hvc.extract attempts to create an annotation.csv file based on the audio file types in
the directories.
file_format : str
format of audio files. One of the following: {'cbin','wav'}
annotation_file : str
filename of an annotation.csv file
labels_to_use : str
either
a string representing unique set of labels which, if
a syllable/segment is annotated with that label, then features
will be calculated for that syllable
e.g., 'iabcdef' or '012345'
or
'all'
in which case features are extracted from all syllable segments
feature_group : str
One of the following set: {'knn', 'svm', 'flatwindow'}
Shorthand way of specifying a list of features to extract, see docs for more detail.
feature_list : list
list of features to extract
output_dir : str
absolute path to directory in which to save extracted features
spect_params : dict
parameters to compute spectrograms,
as defined for hvc.audiofileIO.Spectrogram.
Please consult docstring for that class to see valid parameters.
return_features : bool
if True, returns features and labels.
If a config file is used, defaults to False.
Otherwise, default is True.
"""
if config_file and (data_dirs or file_format or annotation_file or labels_to_use
or feature_group or feature_list or output_dir or spect_params):
raise ValueError('Cannot specify config_file and other parameters '
'when calling hvc.extract, '
'please specify either config_file or all other '
'parameters ')
if config_file and data_dirs:
raise ValueError('Please specify either config_file or data_dirs, '
'not clear which to use when both are specified')
if config_file and annotation_file:
raise ValueError('Please specify either config_file or annotation_file, '
'not clear which to use when both are specified')
if config_file:
extract_config = parse_config(config_file, 'extract')
print('Parsed extract config.')
todo_list = extract_config['todo_list']
for ind, todo in enumerate(todo_list):
print('Completing item {} of {} in to-do list'.format(ind + 1, len(todo_list)))
extract_init_params = {'feature_list': todo['feature_list']}
if 'feature_list_group_ID' in todo:
extract_init_params['feature_list_group_ID'] = todo['feature_list_group_ID']
extract_init_params['feature_group_ID_dict'] = todo['feature_group_ID_dict']
# segment_params defined for todo_list item takes precedence over any default
# defined for `extract` config
if 'segment_params' in todo:
extract_init_params['segment_params'] = todo['segment_params']
else:
extract_init_params['segment_params'] = extract_config['segment_params']
if 'spect_params' in todo:
extract_init_params['spect_params'] = todo['spect_params']
else:
extract_init_params['spect_params'] = extract_config['spect_params']
fe = features.extract.FeatureExtractor(**extract_init_params)
extract_params = {
'output_dir': todo['output_dir'],
'labels_to_use': todo['labels_to_use'],
'file_format': todo['file_format']
}
if 'return_features' not in extract_params:
if return_features is None:
extract_params['return_features'] = False
else:
extract_params['return_features'] = return_features
if 'data_dirs' in todo:
extract_params['data_dirs'] = todo['data_dirs']
extract_params['data_dirs_validated'] = True
elif 'annotation_file' in todo:
extract_params['annotation_file'] = todo['annotation_file']
fe.extract(**extract_params)
elif data_dirs or annotation_file:
if data_dirs and annotation_file:
raise ValueError('hvc.extract received values for both data_dirs and '
'annotation_file arguments, unclear which to use. '
'Please only specify one or the other.')
if feature_group and feature_list:
raise ValueError('hvc.extract received values for both feature_group and '
'feature_list arguments, unclear which to use. '
'Please only specify one or the other.')
extract_init_params = {}
if spect_params is None:
spect_params = refs_dict['evsonganaly']
extract_init_params['spect_params'] = spect_params
if feature_group:
if type(feature_group) != str and type(feature_group) != list:
raise TypeError('feature_group must be str or list but instead was {}'
.format(type(feature_group)))
if type(feature_group) == str:
feature_list, _, _ = _validate_feature_group_and_convert_to_list(feature_group)
elif type(feature_group) == list:
(feature_list,
feature_list_group_ID,
feature_group_ID_dict) = _validate_feature_group_and_convert_to_list(feature_group)
extract_init_params['feature_list_group_ID'] = feature_list_group_ID
extract_init_params['feature_group_ID_dict'] = feature_group_ID_dict
extract_init_params['feature_list'] = feature_list
fe = features.extract.FeatureExtractor(**extract_init_params)
extract_params = {
'file_format': file_format,
'labels_to_use': labels_to_use,
'output_dir': output_dir,
'return_features': return_features
}
if data_dirs:
extract_params['data_dirs'] = data_dirs
elif annotation_file:
extract_params['annotation_file'] = annotation_file
if return_features:
ftrs = fe.extract(**extract_params)
return ftrs
else:
fe.extract(**extract_params)
|
Python
| 0
|
@@ -25,19 +25,8 @@
%22%22%0A%0A
-# from hvc%0A
from
|
e1f45cba287d7964f7ed01e7ddae61db173d9c25
|
Fix binary type issues on Python 2.
|
fernet_fields/fields.py
|
fernet_fields/fields.py
|
from hashlib import sha256
from cryptography.fernet import Fernet, MultiFernet
from django.conf import settings
from django.core.exceptions import FieldError, ImproperlyConfigured
from django.db import models
from django.db.models import lookups
from django.utils.encoding import force_bytes, force_text
from django.utils.functional import cached_property
from . import hkdf
__all__ = [
'EncryptedFieldMixin',
'EncryptedTextField',
'EncryptedCharField',
'EncryptedEmailField',
'EncryptedIntegerField',
'EncryptedDateField',
'EncryptedDateTimeField',
]
class EncryptedFieldMixin(models.Field):
"""A field mixin to encrypt values using Fernet symmetric encryption."""
def __init__(self, *args, **kwargs):
if kwargs.get('primary_key'):
raise ImproperlyConfigured(
"EncryptedFieldMixin does not support primary_key=True."
)
key = kwargs.pop('key', None)
keys = kwargs.pop('keys', None)
self.use_hkdf = kwargs.pop(
'use_hkdf', getattr(settings, 'FERNET_USE_HKDF', True))
if (key is not None) and (keys is not None):
raise ImproperlyConfigured(
"Cannot pass both `key` and `keys` to encrypted field.")
if keys is None:
if key is not None:
keys = [key]
else:
keys = getattr(settings, 'FERNET_KEYS', None)
if keys is None:
keys = [settings.SECRET_KEY]
self.keys = keys
super(EncryptedFieldMixin, self).__init__(*args, **kwargs)
self.prepend_hash = None
if self.unique:
self.prepend_hash = 'unique'
elif self.db_index:
self.prepend_hash = 'index'
@cached_property
def fernet_keys(self):
if self.use_hkdf:
return [hkdf.derive_fernet_key(k) for k in self.keys]
return self.keys
@cached_property
def fernet(self):
if len(self.fernet_keys) == 1:
return Fernet(self.fernet_keys[0])
return MultiFernet([Fernet(k) for k in self.fernet_keys])
def db_type(self, connection):
# PostgreSQL and SQLite both support the BYTEA type.
return 'bytea'
def get_internal_type(self):
"""Prevent Django attempting type conversions on encrypted data."""
return None
def get_db_prep_save(self, *args, **kwargs):
value = super(
EncryptedFieldMixin, self
).get_db_prep_value(*args, **kwargs)
if value is not None:
value = force_bytes(value)
retval = self.fernet.encrypt(value)
if self.prepend_hash:
retval = sha256(value).digest() + retval
return retval
def get_prep_lookup(self, lookup_type, value):
if self.prepend_hash and lookup_type in {'exact', 'in'}:
if lookup_type == 'in':
return [self.get_prep_lookup('exact', v) for v in value]
return sha256(force_bytes(value)).digest()
raise FieldError(
"Encrypted field '%s' only supports exact and __in lookups, "
"and only if field has db_index=True or unique=True." % self.name
)
def from_db_value(self, value, expression, connection, context):
if value is not None:
value = bytes(value)
if self.prepend_hash:
value = value[32:]
return self.to_python(force_text(self.fernet.decrypt(value)))
def deconstruct(self):
name, path, args, kwargs = super(
EncryptedFieldMixin, self
).deconstruct()
kwargs['keys'] = self.keys
return name, path, args, kwargs
class HashPrefixLookupMixin(object):
def process_lhs(self, compiler, connection):
lhs, params = super(
HashPrefixLookupMixin, self
).process_lhs(compiler, connection)
if connection.vendor == 'postgresql':
return 'SUBSTRING(%s for 32)' % lhs, params
elif connection.vendor == 'sqlite':
return 'SUBSTR(%s, 0, 33)' % lhs, params
else:
raise ImproperlyConfigured(
"Unsupported database vendor (not postgres or sqlite)"
": %s" % connection.vendor
)
class HashPrefixExact(HashPrefixLookupMixin, lookups.Exact):
pass
EncryptedFieldMixin.register_lookup(HashPrefixExact)
class HashPrefixIn(HashPrefixLookupMixin, lookups.In):
pass
EncryptedFieldMixin.register_lookup(HashPrefixIn)
class EncryptedTextField(EncryptedFieldMixin, models.TextField):
pass
class EncryptedCharField(EncryptedFieldMixin, models.CharField):
pass
class EncryptedEmailField(EncryptedFieldMixin, models.EmailField):
pass
class EncryptedIntegerField(EncryptedFieldMixin, models.IntegerField):
pass
class EncryptedDateField(EncryptedFieldMixin, models.DateField):
pass
class EncryptedDateTimeField(EncryptedFieldMixin, models.DateTimeField):
pass
|
Python
| 0.000002
|
@@ -2370,16 +2370,94 @@
n None%0A%0A
+ def get_hashed_value(self, value):%0A return sha256(value).digest()%0A%0A
def
@@ -2475,39 +2475,41 @@
_save(self,
-*args, **kwargs
+value, connection
):%0A v
@@ -2586,29 +2586,30 @@
rep_
-value(*args, **kwargs
+save(value, connection
)%0A
@@ -2783,36 +2783,42 @@
tval = s
-ha256(value).digest(
+elf.get_hashed_value(value
) + retv
@@ -2839,22 +2839,50 @@
return
+connection.Database.Binary(
retval
+)
%0A%0A de
@@ -2883,24 +2883,27 @@
def get_
+db_
prep_lookup(
@@ -2922,24 +2922,46 @@
_type, value
+, connection, *a, **kw
):%0A i
@@ -3032,65 +3032,87 @@
-if lookup_type == 'in':%0A return %5Bself
+values = super(%0A EncryptedFieldMixin, self%0A )
.get_
+db_
prep
@@ -3123,90 +3123,219 @@
kup(
-'exact', v) for v in value%5D%0A return sha256(force_bytes(value)).digest()
+lookup_type, value, connection, *a, **kw)%0A return %5B%0A connection.Database.Binary(%0A self.get_hashed_value(force_bytes(v)))%0A for v in values%0A %5D
%0A
|
4283aaf601482ee2512c642101f587ffe3515ef9
|
raise if user doesn't exist in forgotten password form
|
authentification/forms.py
|
authentification/forms.py
|
from django import forms
class ForgottenPasswordForm(forms.Form):
username = forms.CharField(label="Identifiant")
email = forms.EmailField(label="Votre adresse e-mail")
|
Python
| 0.000001
|
@@ -19,16 +19,61 @@
forms%0A%0A
+from django.contrib.auth.models import User%0A%0A
%0Aclass F
@@ -217,8 +217,250 @@
-mail%22)%0A
+%0A def clean_username(self):%0A username = self.cleaned_data%5B'username'%5D%0A%0A if not User.objects.filter(username=username).exists():%0A raise forms.ValidationError(%22Cet utilisateur n'existe pas%22)%0A%0A return username%0A
|
f55c0bd8db7850668582bb7b47da4d0acafabc46
|
Optimize imports
|
digitalmanifesto/urls.py
|
digitalmanifesto/urls.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from . import views
urlpatterns = [
# Admin
url(r'^jet/', include('jet.urls', 'jet')), # Django JET URLS
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.IndexView.as_view(), name='index'),
# Simple template views
url(r'^about/$', TemplateView.as_view(template_name='about.html'), name='about'),
url(r'^contact/$', TemplateView.as_view(template_name='contact.html'), name='contact'),
url(r'^news/$', TemplateView.as_view(template_name='news.html'), name='news'),
url(r'^projects-we-like/$', TemplateView.as_view(template_name='projects_we_like.html'), name='projects'),
url(r'^resources/$', TemplateView.as_view(template_name='resources.html'), name='resources'),
url(r'^twitterbot/$', TemplateView.as_view(template_name='twitterbot.html'), name='twitterbot'),
url(r'^manifestos/', include('manifestos.urls', namespace='manifestos')),
url(r'^annotations/', include('annotations.urls', namespace='annotations')),
# Let's Encrypt challenge
url(r'^\.well-known/acme-challenge/(?P<key>.*)/', views.acme_challenge),
# allauth
url(r'^accounts/', include('allauth.urls')),
]
|
Python
| 0.000002
|
@@ -35,31 +35,9 @@
port
-%0Afrom __future__ import
+,
uni
|
a5f3ad5700aa766fec99a184bae1d732d0754491
|
Support of HACluster added
|
src/reactive/murano_handlers.py
|
src/reactive/murano_handlers.py
|
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import charms_openstack.charm as charm
import charms.reactive as reactive
import charmhelpers.core.hookenv as hookenv
# This charm's library contains all of the handler code associated with
# sdn_charm
import charm.openstack.murano as murano # noqa
charm.use_defaults(
'charm.installed',
'amqp.connected',
'shared-db.connected',
'identity-service.connected',
'identity-service.available', # enables SSL support
'config.changed',
'update-status')
COMPLETE_INTERFACE_STATES = [
'shared-db.available',
'identity-service.available',
'amqp.available',
]
@reactive.when(*COMPLETE_INTERFACE_STATES)
def render_config(*args):
"""Render the configuration for charm when all the interfaces are
available.
"""
with charm.provide_charm_instance() as charm_class:
charm_class.render_with_interfaces(args)
charm_class.assess_status()
murano.render_novarc_config(args)
reactive.set_state('config.rendered')
# db_sync checks if sync has been done so rerunning is a noop
@reactive.when('config.rendered')
def init_db():
with charm.provide_charm_instance() as charm_class:
charm_class.db_sync()
@reactive.when_not('io-murano.imported')
@reactive.when(*COMPLETE_INTERFACE_STATES)
@reactive.when('config.rendered')
def import_io_murano(*args):
murano.import_io_murano()
reactive.set_state('io-murano.imported')
|
Python
| 0
|
@@ -1547,16 +1547,17 @@
ered')%0A%0A
+%0A
# db_syn
@@ -1746,16 +1746,17 @@
sync()%0A%0A
+%0A
@reactiv
@@ -1924,16 +1924,16 @@
urano()%0A
-
reac
@@ -1949,28 +1949,243 @@
state('io-murano.imported')%0A
+%0A%0A@reactive.when('ha.connected')%0Adef cluster_connected(hacluster):%0A murano.configure_ha_resources(hacluster)%0A murano.assess_status()%0A%0A%0A@reactive.hook('upgrade-charm')%0Adef upgrade_charm():%0A murano.install()%0A
|
837aea7b39662a8285df01522461c51ce0f91de5
|
fix suppressions - don't overwrite config setting with super() - be explict in debug log what is going on with filters/suppression
|
nymms/reactor/handlers/Handler.py
|
nymms/reactor/handlers/Handler.py
|
import logging
logger = logging.getLogger(__name__)
from nymms.utils import load_object_from_string
class Handler(object):
def __init__(self, config=None):
self.config = config
self._filters = []
self._suppressions_enabled = self.config.pop('suppressions_enabled',
False)
def _load_filters(self):
filters = self.config.get('filters', [])
if filters:
for filter_string in filters:
logging.debug("Adding Filter %s to Handler %s.", filter_string,
self.__class__.__name__)
f = load_object_from_string(filter_string)
self._filters.append(f)
else:
logger.debug("No filters configured for Handler %s.",
self.__class__.__name__)
def _filter(self, result, previous_state):
""" Runs the result & previous state through all the configured
filters. A filter should be a callable that accepts two arguments:
the result and the previous state. It should return either True or
False regarding whether the message should be allowed through the
handler.
"""
if not self._filters:
self._load_filters()
# Assume that no filters means just that - that the result is
# not to be filtered for the handler.
if not self._filters:
return True
results = {}
for f in self._filters:
try:
results[f.__name__] = f(result, previous_state)
except Exception as e:
logger.exception("Filter %s on Handler %s had an unhandled "
"exception. Ignoring:",
f.__name__, self.__class__.__name__)
continue
logger.debug("Handler %s filter results: %s", self.__class__.__name__,
results)
return all(results.values())
def _process(self, result, previous_state, suppressor_check_method):
"""First checks to see if the given event should be filtered and
then sees if it passes the suppressor (if enabled). If pass, then
call the subclass's process() method"""
if self._filter(result, previous_state):
if self.suppressions_enabled and suppressor_check_method(result):
logger.debug("Handler %s filters & suppressors returned true" +
" for %s, reacting.", self.__class__.__name__, result.id)
return self.process(result, previous_state)
logger.debug("Handler %s filters returned false for %s, skipping.",
self.__class__.__name__, result.id)
def process(self, result, previous_state):
""" Meant to be overridden by subclasses - should handle the actual
process of reacting to a result.
"""
raise NotImplementedError
@property
def suppressions_enabled(self):
"""Are suppressions enabled for this handler?"""
return self._suppressions_enabled
|
Python
| 0.000001
|
@@ -230,33 +230,32 @@
elf._suppression
-s
_enabled = self.
@@ -265,16 +265,29 @@
fig.pop(
+%0A
'suppres
@@ -282,33 +282,32 @@
'suppression
-s
_enabled',%0A
@@ -313,25 +313,168 @@
-
False
+)%0A logger.debug(%22%25s suppression enabled is %25s%22,%0A self.__class__.__name__,%0A self._suppression_enabled
)%0A%0A d
@@ -1880,16 +1880,19 @@
gnoring:
+ %25s
%22,%0A
@@ -1946,32 +1946,35 @@
class__.__name__
+, e
)%0A
@@ -2460,24 +2460,28 @@
if
+not
self.suppres
@@ -2476,33 +2476,277 @@
self.suppression
-s
+_enabled:%0A logger.debug(%22Handler %25s filters returned true%22 +%0A %22 for %25s%22, self.__class__.__name__, result.id)%0A return self.process(result, previous_state)%0A elif self.suppression
_enabled and sup
@@ -2836,17 +2836,16 @@
ppressor
-s
returne
@@ -2866,32 +2866,41 @@
+
+
%22 for %25s, reacti
@@ -2904,16 +2904,45 @@
cting.%22,
+%0A
self.__
@@ -3022,32 +3022,254 @@
previous_state)%0A
+ else:%0A logger.debug(%22Handler %25s suppressor returned false%22 +%0A %22 for %25s, skipping.%22,%0A self.__class__.__name__, result.id)%0A else:%0A
logger.d
@@ -3320,32 +3320,35 @@
%25s, skipping.%22,%0A
+
@@ -3731,26 +3731,25 @@
elf._suppression
-s
_enabled%0A
|
f1d76955dad59c7456b570b0e94329e146a816b3
|
maintain original exception stack trace for raise_and_report PyxlException
|
pyxl/base.py
|
pyxl/base.py
|
#!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import random
from pyxl.utils import escape
from metaserver.common import exclog2_util
from metaserver.common.util import raise_and_report
class PyxlException(Exception):
pass
class x_base_metaclass(type):
def __init__(self, name, parents, attrs):
super(x_base_metaclass, self).__init__(name, parents, attrs)
x_base_parents = [parent for parent in parents if hasattr(parent, '__attrs__')]
parent_attrs = x_base_parents[0].__attrs__ if len(x_base_parents) else {}
self_attrs = self.__dict__.get('__attrs__', {})
# Dont allow '_' in attr names
for attr_name in self_attrs:
assert '_' not in attr_name, (
"%s: '_' not allowed in attr names, use '-' instead" % attr_name)
combined_attrs = dict(parent_attrs)
combined_attrs.update(self_attrs)
setattr(self, '__attrs__', combined_attrs)
setattr(self, '__tag__', name[2:])
class x_base(object):
__metaclass__ = x_base_metaclass
__attrs__ = {
# HTML attributes
'accesskey': unicode,
'class': unicode,
'dir': unicode,
'id': unicode,
'lang': unicode,
'style': unicode,
'tabindex': int,
'title': unicode,
'xml:lang': unicode,
# JS attributes
'onabort': unicode,
'onblur': unicode,
'onchange': unicode,
'onclick': unicode,
'ondblclick': unicode,
'onerror': unicode,
'onfocus': unicode,
'onkeydown': unicode,
'onkeypress': unicode,
'onkeyup': unicode,
'onload': unicode,
'onmousedown': unicode,
'onmouseenter': unicode,
'onmouseleave': unicode,
'onmousemove': unicode,
'onmouseout': unicode,
'onmouseover': unicode,
'onmouseup': unicode,
'onreset': unicode,
'onresize': unicode,
'onselect': unicode,
'onsubmit': unicode,
'onunload': unicode,
}
def __init__(self, **kwargs):
self.__attributes__ = {}
self.__children__ = []
for name, value in kwargs.iteritems():
self.set_attr(x_base._fix_attribute_name(name), value)
def __call__(self, *children):
self.append_children(children)
return self
def get_id(self):
eid = self.attr('id')
if not eid:
eid = 'pyxl%d' % random.randint(0, sys.maxint)
self.set_attr('id', eid)
return eid
def children(self, selector=None):
if not selector:
return self.__children__
# filter by class
if selector[0] == '.':
class_name = selector[1:]
return [c for c in self.__children__
if class_name in c.get_class()]
# filter by id
if selector[0] == '#':
id_name = selector[1:]
return [c for c in self.__children__
if c.get_id() == id_name]
# filter by tag name
tag_name = 'x_%s' % selector
return [c for c in self.__children__
if c.__class__.__name__ == tag_name]
def append(self, child):
if type(child) in (list, tuple):
self.__children__.extend(c for c in child if c is not None and c is not False)
elif child is not None and child is not False:
self.__children__.append(child)
def prepend(self, child):
if child is not None and child is not False:
self.__children__.insert(0, child)
def __getattr__(self, name):
return self.attr(name.replace('_', '-'))
def attr(self, name, default=None):
# this check is fairly expensive (~8% of cost)
if not self.allows_attribute(name):
raise PyxlException('<%s> has no attr named "%s"' % (self.__tag__, name))
return self.__attributes__.get(name, default)
def set_attr(self, name, value):
# this check is fairly expensive (~8% of cost)
if not self.allows_attribute(name):
raise PyxlException('<%s> has no attr named "%s"' % (self.__tag__, name))
if value is not None:
attr_type = self.__attrs__.get(name, unicode)
try:
# Validate type of attr and cast to correct type if possible
value = value if isinstance(value, attr_type) else attr_type(value)
except Exception:
exc_type, exc_obj, exc_tb = sys.exc_info()
exception = PyxlException('incorrect type for "%s" in <%s>. expected %s, got %s' % (
name, self.__tag__, attr_type, type(value)))
if exc_type == UnicodeDecodeError:
# special casing unicode errors till we've fixed them all in our logs
value = unicode(value, 'utf8')
raise_and_report(exception, severity2=exclog2_util.SeverityType.CRITICAL)
else:
raise exception
self.__attributes__[name] = value
elif name in self.__attributes__:
del self.__attributes__[name]
def get_class(self):
return self.attr('class', '')
def add_class(self, xclass):
if not xclass: return
current_class = self.attr('class')
if current_class: current_class += ' ' + xclass
else: current_class = xclass
self.set_attr('class', current_class)
def append_children(self, children):
for child in children:
self.append(child)
def attributes(self):
return self.__attributes__
def set_attributes(self, attrs_dict):
for name, value in attrs_dict.iteritems():
self.set_attr(name, value)
def allows_attribute(self, name):
return (name in self.__attrs__ or name.startswith('data-') or name.startswith('aria-'))
def to_string(self):
raise NotImplementedError()
def __str__(self):
return self.to_string()
def __unicode__(self):
return self.to_string()
@staticmethod
def render_child(child):
if isinstance(child, x_base): return child.to_string()
if child is None: return u''
return escape(child)
@staticmethod
def _fix_attribute_name(name):
if name == 'xclass': return 'class'
if name == 'xfor': return 'for'
return name.replace('_', '-').replace('COLON', ':')
|
Python
| 0
|
@@ -5068,34 +5068,14 @@
-exception = PyxlException(
+msg =
'inc
@@ -5192,16 +5192,62 @@
(value))
+%0A exception = PyxlException(msg
)%0A%0A
@@ -5433,16 +5433,82 @@
'utf8')%0A
+ severity = exclog2_util.SeverityType.CRITICAL%0A
@@ -5561,42 +5561,31 @@
ty2=
-exclog2_util.SeverityType.CRITICAL
+severity, exc_tb=exc_tb
)%0A
@@ -5639,16 +5639,30 @@
xception
+, None, exc_tb
%0A%0A
|
a839626fbb4374bcf8c3e23f97941bdbc7b15d61
|
fix conflict
|
qiniu/rpc.py
|
qiniu/rpc.py
|
# -*- coding: utf-8 -*-
import httplib
import json
class Client(object):
_conn = None
_header = None
def __init__(self, host):
self._conn = httplib.HTTPConnection(host)
self._header = {}
def round_tripper(self, method, path, body):
self._conn.request(method, path, body, self._header)
resp = self._conn.getresponse()
return resp
def call(self, path):
return self.call_with(path, None)
def call_with(self, path, body, content_type=None, content_length=None):
ret = None
self.set_header("User-Agent", config.USER_AGENT)
if content_type is not None:
self.set_header("Content-Type", content_type)
if content_length is not None:
self.set_header("Content-Length", content_length)
resp = self.round_tripper("POST", path, body)
try:
ret = resp.read()
ret = json.loads(ret)
except IOError, e:
return None, e
except ValueError:
pass
if resp.status / 100 != 2:
err_msg = ret if "error" not in ret else ret["error"]
detail = resp.getheader("x-log", None)
if detail is not None:
err_msg += ", detail:%s" % detail
return None, err_msg
return ret, None
def call_with_multipart(self, path, fields=None, files=None):
"""
* fields => [(key, value)]
* files => [(key, filename, value)]
"""
content_type, body = self.encode_multipart_formdata(fields, files)
return self.call_with(path, body, content_type, len(body))
def call_with_form(self, path, ops):
"""
* ops => {"key": value/list()}
"""
body = []
for i in ops:
if isinstance(ops[i], (list, tuple)):
data = ('&%s=' % i).join(ops[i])
else:
data = ops[i]
body.append('%s=%s' % (i, data))
body = '&'.join(body)
content_type = "application/x-www-form-urlencoded"
return self.call_with(path, body, content_type, len(body))
def set_header(self, field, value):
self._header[field] = value
def set_headers(self, headers):
self._header.update(headers)
def encode_multipart_formdata(self, fields, files):
"""
* fields => [(key, value)]
* files => [(key, filename, value)]
* return content_type, body
"""
if files is None:
files = []
if fields is None:
fields = []
BOUNDARY = '----------ThIs_Is_tHe_bouNdaRY_$'
CRLF = '\r\n'
L = []
for (key, value) in fields:
L.append('--' + BOUNDARY)
L.append('Content-Disposition: form-data; name="%s"' % key)
L.append('')
L.append(value)
for (key, filename, value) in files:
L.append('--' + BOUNDARY)
disposition = "Content-Disposition: form-data;"
L.append('%s name="%s"; filename="%s"' % (disposition, key, filename))
L.append('Content-Type: application/octet-stream')
L.append('')
L.append(value)
L.append('--' + BOUNDARY + '--')
L.append('')
body = CRLF.join(L)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
# def sign(secret, data):
# hashed = hmac.new(secret, data, sha1)
# return urlsafe_b64encode(hashed.digest())
# def sign_json(access, secret, data):
# data = urlsafe_b64encode(json.dumps(data, separators=(',',':')))
# return '%s:%s:%s' % (access, sign(secret, data), data)
|
Python
| 0.031708
|
@@ -43,16 +43,30 @@
ort json
+%0Aimport config
%0A%0Aclass
|
6280dbff42c47350ae88719c3a876ea03e7ebb6e
|
Add reuse_backend argument to pre-made editor constructors
|
pyqode/python/widgets/code_edit.py
|
pyqode/python/widgets/code_edit.py
|
# -*- coding: utf-8 -*-
"""
This package contains the python code editor widget
"""
import sys
from pyqode.core.api import ColorScheme
from pyqode.python.backend import server
from pyqode.qt import QtCore, QtGui
from pyqode.core import api
from pyqode.core import modes
from pyqode.core import panels
from pyqode.python import managers as pymanagers
from pyqode.python import modes as pymodes
from pyqode.python import panels as pypanels
from pyqode.python.folding import PythonFoldDetector
class PyCodeEditBase(api.CodeEdit):
"""
Base class for creating a python code editor widget. The base class
takes care of setting up the syntax highlighter.
.. note:: This code editor widget use PEP 0263 to detect file encoding.
If the opened file does not respects the PEP 0263,
:py:func:`locale.getpreferredencoding` is used as the default
encoding.
"""
def __init__(self, parent=None, create_default_actions=True,
color_scheme='qt'):
super(PyCodeEditBase, self).__init__(parent, create_default_actions)
self.file = pymanagers.PyFileManager(self)
def setPlainText(self, txt, mimetype='text/x-python', encoding='utf-8'):
"""
Extends QCodeEdit.setPlainText to allow user to setPlainText without
mimetype (since the python syntax highlighter does not use it).
"""
self.syntax_highlighter.docstrings[:] = []
self.syntax_highlighter.import_statements[:] = []
super(PyCodeEditBase, self).setPlainText(txt, mimetype, encoding)
class PyCodeEdit(PyCodeEditBase):
"""
Extends PyCodeEditBase with a set of hardcoded modes and panels specifics
to a python code editor widget.
"""
DARK_STYLE = 0
LIGHT_STYLE = 1
mimetypes = ['text/x-python']
def __init__(self, parent=None, server_script=server.__file__,
interpreter=sys.executable, args=None,
create_default_actions=True, color_scheme='qt'):
super(PyCodeEdit, self).__init__(
parent=parent, create_default_actions=create_default_actions,
color_scheme=color_scheme)
self.backend.start(server_script, interpreter, args)
self.setLineWrapMode(self.NoWrap)
self.setWindowTitle("pyQode - Python Editor")
# install those modes first as they are required by other modes/panels
self.modes.append(pymodes.DocumentAnalyserMode())
# panels
self.panels.append(panels.FoldingPanel())
self.panels.append(panels.LineNumberPanel())
self.panels.append(panels.CheckerPanel())
self.panels.append(panels.GlobalCheckerPanel(),
panels.GlobalCheckerPanel.Position.RIGHT)
self.panels.append(panels.SearchAndReplacePanel(),
panels.SearchAndReplacePanel.Position.BOTTOM)
self.panels.append(panels.EncodingPanel(), api.Panel.Position.TOP)
self.add_separator()
self.panels.append(pypanels.QuickDocPanel(), api.Panel.Position.BOTTOM)
# modes
# generic
self.modes.append(modes.CaretLineHighlighterMode())
self.modes.append(modes.FileWatcherMode())
self.modes.append(modes.RightMarginMode())
self.modes.append(modes.ZoomMode())
self.modes.append(modes.SymbolMatcherMode())
self.modes.append(modes.CodeCompletionMode())
self.modes.append(modes.OccurrencesHighlighterMode())
self.modes.append(modes.SmartBackSpaceMode())
self.modes.append(modes.ExtendedSelectionMode())
# python specifics
self.modes.append(pymodes.PyAutoIndentMode())
self.modes.append(pymodes.PyAutoCompleteMode())
self.modes.append(pymodes.FrostedCheckerMode())
self.modes.append(pymodes.PEP8CheckerMode())
self.modes.append(pymodes.CalltipsMode())
self.modes.append(pymodes.PyIndenterMode())
self.modes.append(pymodes.GoToAssignmentsMode())
self.modes.append(pymodes.CommentsMode())
self.modes.append(pymodes.PythonSH(
self.document(), color_scheme=ColorScheme(color_scheme)))
self.syntax_highlighter.fold_detector = PythonFoldDetector()
def clone(self):
clone = self.__class__(
parent=self.parent(), server_script=self.backend.server_script,
interpreter=self.backend.interpreter, args=self.backend.args,
color_scheme=self.syntax_highlighter.color_scheme.name)
return clone
def __repr__(self):
return 'PyCodeEdit(path=%r)' % self.file.path
|
Python
| 0
|
@@ -973,44 +973,8 @@
True
-,%0A color_scheme='qt'
):%0A
@@ -1961,19 +1961,57 @@
eme='qt'
+,%0A reuse_backend=False
):%0A
-
@@ -2120,47 +2120,8 @@
ions
-,%0A color_scheme=color_scheme
)%0A
@@ -2177,16 +2177,64 @@
er, args
+,%0A reuse=reuse_backend
)%0A
|
f5d948c159a4d398a1347220a4fcd4315c725b04
|
Fix issue handling Image as a paint source
|
pyrtist/pyrtist/lib2d/primitive.py
|
pyrtist/pyrtist/lib2d/primitive.py
|
__all__ = ('Primitive',)
from .core_types import Point
from .style import Color, Stroke, Fill, StrokeStyle, Style
from .path import Path
from .base import Taker, combination
from .cmd_stream import CmdStream, Cmd
from .window import Window
from .bbox import BBox
class Primitive(Taker):
def __init__(self, *args):
super(Primitive, self).__init__()
self.style = Style()
self.take(*args)
def build_path(self):
return []
@combination(Color, Primitive)
@combination(StrokeStyle, Primitive)
@combination(Style, Primitive)
def style_at_primitive(style, primitive):
primitive.style.take(style)
@combination(Primitive, Path)
def primitive_at_path(primitive, path):
path.cmd_stream.take(*primitive.build_path())
@combination(Primitive, CmdStream)
def primitive_at_cmd_stream(primitive, cmd_stream):
cmd_stream.take(Path(primitive), primitive.style)
@combination(Primitive, Window)
def primitive_at_window(primitive, window):
window.take(CmdStream(primitive))
@combination(Primitive, Stroke)
def primitive_at_stroke(primitive, stroke):
stroke.take(Path(primitive))
@combination(Primitive, Fill)
def primitive_at_fill(primitive, fill):
fill.take(Path(primitive))
@combination(Primitive, BBox)
def primitive_at_bbox(primitive, bbox):
bbox.take(Window(primitive))
|
Python
| 0.000001
|
@@ -71,15 +71,8 @@
port
- Color,
Str
@@ -101,16 +101,45 @@
, Style%0A
+from .pattern import Pattern%0A
from .pa
@@ -495,13 +495,15 @@
ion(
-Color
+Pattern
, Pr
|
91064ed8d7c6b6ab7eb8bb9da94136ba34e8a2e5
|
use length validator on description
|
abilian/sbe/apps/communities/forms.py
|
abilian/sbe/apps/communities/forms.py
|
import imghdr
from string import strip
import PIL
from flask import request
from flask.ext.babel import lazy_gettext as _l, gettext as _
from wtforms.fields import BooleanField, TextField, TextAreaField
from wtforms.validators import ValidationError, required
from abilian.web.forms import Form
from abilian.web.forms.fields import Select2Field, FileField
from abilian.web.forms.widgets import TextArea, ImageInput, BooleanWidget
from .models import Community
class CommunityForm(Form):
name = TextField(label=_l(u"Name"), validators=[required()])
description = TextAreaField(label=_l(u"Description"), validators=[required()],
widget=TextArea(resizeable="vertical"))
image = FileField(label=_l('Image'), widget=ImageInput(width=65, height=65),
allow_delete=False)
type = Select2Field(label=_(u"Type"), validators=[required()],
filters=(strip,),
choices=[(_l(u'informative'), 'informative'),
(_l(u'participative'), 'participative')])
has_documents = BooleanField(label=_l(u"Has documents"), widget=BooleanWidget(on_off_mode=True))
has_wiki = BooleanField(label=_l(u"Has a wiki"), widget=BooleanWidget(on_off_mode=True))
has_forum = BooleanField(label=_l(u"Has a forum"), widget=BooleanWidget(on_off_mode=True))
is_crm_visible = BooleanField(label=_l(u'visible in CRM'), widget=BooleanWidget(on_off_mode=True))
def validate_name(self, field):
name = field.data = field.data.strip()
if name and field.object_data:
# form is bound to an existing object, name is not empty
if name != field.object_data:
# name changed: check for duplicates
if len(list(Community.query.filter(Community.name==name).values('id'))) > 0:
raise ValidationError(_(u"A community with this name already exists"))
def validate_description(self, field):
field.data = field.data.strip()
# FIXME: code duplicated from the user edit form (UserProfileForm).
# Needs to be refactored.
def validate_image(self, field):
data = request.files.get('image')
if not data:
return
filename = data.filename
valid = any(map(filename.lower().endswith, ('.png', '.jpg', '.jpeg')))
if not valid:
raise ValidationError(_(u'Only PNG or JPG image files are accepted'))
img_type = imghdr.what('ignored', data.read())
if not img_type in ('png', 'jpeg'):
raise ValidationError(_(u'Only PNG or JPG image files are accepted'))
data.stream.seek(0)
try:
# check this is actually an image file
im = PIL.Image.open(data.stream)
im.load()
except:
raise ValidationError(_(u'Could not decode image file'))
data.stream.seek(0)
field.data = data
|
Python
| 0.000002
|
@@ -424,16 +424,64 @@
anWidget
+%0Afrom abilian.web.forms.validators import length
%0A%0Afrom .
@@ -622,24 +622,31 @@
xtAreaField(
+%0A
label=_l(u%22D
@@ -658,16 +658,22 @@
ption%22),
+%0A
validat
@@ -691,35 +691,28 @@
ed()
-%5D,%0A
+, length(max=500)%5D,%0A
@@ -751,16 +751,17 @@
rtical%22)
+,
)%0A%0A ima
|
848e12dde9685cf1c6e44178bb0f3eff9d4203be
|
Fix migrations
|
actistream/migrations/0001_initial.py
|
actistream/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-09-15 20:40
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Activity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('actor_id', models.PositiveIntegerField()),
('target_id', models.PositiveIntegerField()),
('action_object_id', models.PositiveIntegerField()),
('type', models.CharField(max_length=100, verbose_name='type')),
('flags', models.BigIntegerField(default=0)),
('extra_data', models.TextField(blank=True, verbose_name='additional data')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('action_object_ct', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='contenttypes.ContentType')),
('actor_ct', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='contenttypes.ContentType')),
('target_ct', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='contenttypes.ContentType')),
],
),
migrations.CreateModel(
name='Notice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(verbose_name='created at')),
('read_at', models.DateTimeField(blank=True, null=True, verbose_name='read at')),
('archived_at', models.DateTimeField(blank=True, null=True, verbose_name='archived at')),
('activity', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='actistream.Activity')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, verbose_name='user')),
],
),
]
|
Python
| 0.000006
|
@@ -509,16 +509,103 @@
ivity',%0A
+ options=%7B'verbose_name': 'activity', 'verbose_name_plural': 'activities'%7D,%0A
@@ -1853,16 +1853,98 @@
otice',%0A
+ options=%7B'verbose_name': 'notice', 'verbose_name_plural': 'notices'%7D,%0A
|
9783844b1597598fad833794b4b291fce49438d4
|
Send alerts as one mail
|
app/hr/tasks.py
|
app/hr/tasks.py
|
from django.conf import settings
import logging
from datetime import datetime, timedelta
from celery.decorators import task
from hr.utils import blacklist_values
from django.contrib.auth.models import User
from django.core.mail import send_mail
@task(ignore_result=True)
def blacklist_check():
log = blacklist_check.get_logger()
users = User.objects.filter(is_active=True)
for u in users:
if u.groups.count() > 0:
# Has groups
val = blacklist_values(u)
if len(val) > 0:
# Report possible issue
log.warning("Suspect User: %s, %s entries found: %s" % (u.username, len(val), val))
blstr = ""
for i in val:
blstr = "%s%s - %s - %s\n" % (blstr, i.get_type_display(), i.value, i.reason)
msg = "Suspect User found: %s\nGroups: %s\nBlacklist Items:\n\n%s" % (u.username, ", ".join(u.groups.all().values_list('name', flat=True)), blstr)
send_mail('Automated blacklist checker alert - %s' % u.username, msg, 'blacklist@pleaseignore.com', ['abuse@pleaseignore.com'])
|
Python
| 0
|
@@ -377,16 +377,45 @@
=True)%0A%0A
+ alerts = 0%0A msg = %22%22%0A%0A
for
@@ -426,16 +426,16 @@
users:%0A
-
@@ -547,24 +547,52 @@
n(val) %3E 0:%0A
+ alerts += 1%0A
@@ -875,16 +875,17 @@
reason)%0A
+%0A
@@ -896,16 +896,56 @@
msg
++= %22%5Cn%5Cn-----%5Cn%5Cn%22%0A msg +
= %22Suspe
@@ -1079,24 +1079,32 @@
blstr)%0A
+%0A
-
+if alerts:%0A
@@ -1151,27 +1151,10 @@
lert
- - %25s' %25 u.username
+s'
, ms
|
0dfd0ec2beb069d56d7b81911bb468199565672a
|
remove print
|
python/ccxtpro/base/fast_client.py
|
python/ccxtpro/base/fast_client.py
|
"""A faster version of aiohttp's websocket client that uses select and other optimizations"""
import asyncio
import collections
from ccxt import NetworkError
from ccxtpro.base.aiohttp_client import AiohttpClient
class FastClient(AiohttpClient):
transport = None
def __init__(self, url, on_message_callback, on_error_callback, on_close_callback, config={}):
super(FastClient, self).__init__(url, on_message_callback, on_error_callback, on_close_callback, config)
# instead of using the deque in aiohttp we implement our own for speed
# https://github.com/aio-libs/aiohttp/blob/1d296d549050aa335ef542421b8b7dad788246d5/aiohttp/streams.py#L534
self.stack = collections.deque()
def receive_loop(self):
def handler():
if not self.stack:
return
message = self.stack.popleft()
self.handle_message(message)
self.asyncio_loop.call_soon(handler)
def feed_data(message, size):
if not self.stack:
self.asyncio_loop.call_soon(handler)
self.stack.append(message)
def feed_eof():
self.on_error(NetworkError(1006))
def wrapper(func):
def parse_frame(buf):
while len(self.stack) > 1:
self.handle_message(self.stack.popleft())
return func(buf)
return parse_frame
connection = self.connection._conn
if connection.closed:
# connection got terminated after the connection was made and before the receive loop ran
self.on_close(1006)
return
self.transport = connection.transport
ws_reader = connection.protocol._payload_parser
ws_reader.parse_frame = wrapper(ws_reader.parse_frame)
ws_reader.queue.feed_data = feed_data
ws_reader.queue.feed_eof = feed_eof
# return a future so super class won't complain
return asyncio.sleep(0)
def reset(self, error):
super(FastClient, self).reset(error)
self.stack.clear()
if self.transport:
self.transport.abort()
def resolve(self, result, message_hash=None):
super(FastClient, self).resolve(result, message_hash)
print('resolved', message_hash)
|
Python
| 0.000793
|
@@ -2151,157 +2151,4 @@
t()%0A
-%0A def resolve(self, result, message_hash=None):%0A super(FastClient, self).resolve(result, message_hash)%0A print('resolved', message_hash)%0A
|
d51adea3d19578da9165202696d80c44949c43f6
|
remove debug level logging from i2tun.py
|
i2tun/i2tun.py
|
i2tun/i2tun.py
|
#!/usr/bin/env python3.4
from i2p.i2cp import client as i2cp
import pytun
import threading
import logging
import struct
import select
class IPV4Handler(i2cp.I2CPHandler):
def __init__(self, remote_dest, our_addr, their_addr, mtu):
self._them = remote_dest
self._iface = pytun.TunTapDevice()
self._iface.addr = our_addr
self._iface.dstaddr = their_addr
self._iface.mtu = mtu
self._iface.up()
def session_made(self, con):
print ('we are {}'.format(con.dest.base32()))
self.con = con
threading.Thread(target=self.mainloop, args=(con,)).start()
def mainloop(self, con):
while True:
print ('read')
buff = self._iface.read(self._iface.mtu)
print ('send')
self.con.send_dgram(self._them, buff)
def got_dgram(self, dest, data, srcport, dstport):
if dest.base32() == self._them:
self._iface.write(data)
def main():
import argparse
import logging
logging.basicConfig(level=logging.DEBUG)
ap = argparse.ArgumentParser()
ap.add_argument('--remote', required=True, type=str)
ap.add_argument('--our-addr', required=True, type=str)
ap.add_argument('--their-addr', required=True, type=str)
ap.add_argument('--mtu', default=3600 ,type=int)
ap.add_argument('--i2cp-host', default='127.0.0.1', type=str)
ap.add_argument('--i2cp-port', default=7654, type=int)
args = ap.parse_args()
handler = IPV4Handler(args.remote, args.our_addr, args.their_addr, args.mtu)
con = i2cp.Connection(handler, i2cp_host=args.i2cp_host, i2cp_port=args.i2cp_port)
con.open()
con.start()
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -800,16 +800,20 @@
on.send_
+dsa_
dgram(se
@@ -1006,72 +1006,8 @@
rse%0A
- import logging%0A logging.basicConfig(level=logging.DEBUG)%0A
|
a33ee32ec3d1ac8f239a36ac2459c7985ead2a50
|
Remove some unnecessary usage of `pytype: skip-file`.
|
tensorflow_federated/python/tensorflow_libs/variable_utils.py
|
tensorflow_federated/python/tensorflow_libs/variable_utils.py
|
# Copyright 2020, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# # pytype: skip-file
# This modules disables the Pytype analyzer, see
# https://github.com/tensorflow/federated/blob/main/docs/pytype.md for more
# information.
"""Library of helper functions for working with TensorFlow `tf.Variable`."""
import contextlib
import operator
import numpy as np
import tensorflow as tf
@contextlib.contextmanager
def record_variable_creation_scope():
"""Creates a single use contextmanager for capture variable creation calls."""
variable_list = []
def logging_variable_creator(next_creator, **kwargs):
variable = next_creator(**kwargs)
variable_list.append(variable)
return variable
with contextlib.ExitStack() as stack:
stack.enter_context(tf.variable_creator_scope(logging_variable_creator))
yield variable_list
class TensorVariable:
"""A class that is duck-typed to `tf.Variable` but only uses `tf.Tensor`.
This class implements the interface contract of `tf.Variable`, for
documentation see http://www.tensorflow.org/api_docs/python/tf/Variable. To
be true to the API sometimes arguments are ignored (e.g. `use_locking`).
This is intended for creating `tff.learning.models.FunctionalModel` and is
*not* compatible with `tf.distribute` strategies.
IMPORTANT: this class behaves as if
`tf.autograph.experimental.Feature.AUTO_CONTROL_DEPS` (ACD) was applied, which
is the same behavior as inside a `tf.function`. This may have surprising
side-effects if code authors were not expecting it, but also is more similar
to standard Python code where the line ordering implies execution ordering.
"""
def __init__(self,
initial_value,
dtype=None,
validate_shape=True,
shape=None,
**kwargs):
"""For details see https://www.tensorflow.org/api_docs/python/tf/Variable#args_1."""
del kwargs # Unused.
if callable(initial_value):
if dtype is None:
raise ValueError('When `initial_value` is a callable, `dtype` must be '
'specified.')
initial_value = initial_value()
if tf.is_tensor(initial_value):
self._initial_value = initial_value
else:
if dtype is not None:
self._initial_value = tf.convert_to_tensor(initial_value, dtype)
else:
self._initial_value = tf.convert_to_tensor(initial_value)
self._tensor = self._initial_value
self._validate_shape = validate_shape
if shape is None:
self._shape = self._initial_value.shape
else:
if not isinstance(shape, tf.TensorShape):
shape = tf.TensorShape(shape)
self._shape = shape
self._check_shape(self._tensor)
@property
def shape(self):
return self._tensor.shape
@property
def dtype(self):
return self._tensor.dtype
def assign(self, value, use_locking=False, name=None, read_value=True):
del use_locking # Unused.
value = tf.convert_to_tensor(value)
self._check_shape(value)
self._tensor = value
if tf.executing_eagerly() and not read_value:
return None
else:
return tf.identity(self._tensor, name)
def _check_shape(self, value):
if not self._validate_shape:
return
if not self._shape.is_compatible_with(tf.TensorShape(value.shape)):
raise ValueError(
'Cannot assign value to variable {self!r}. The TensorVariable shape '
f'{self._shape}, and the value shape {value.shape} are incompatible.')
def assign_add(self, value, use_locking=False, name=None, read_value=True):
del use_locking # Unused.
value = tf.convert_to_tensor(value)
self._check_shape(value)
self._tensor = tf.math.add(self._tensor, value, name=name)
if tf.executing_eagerly() and not read_value:
return None
else:
return self._tensor
def assign_sub(self, value, use_locking=False, name=None, read_value=True):
del use_locking # Unused.
value = tf.convert_to_tensor(value)
self._check_shape(value)
self._tensor = tf.math.subtract(self._tensor, value, name=name)
if tf.executing_eagerly() and not read_value:
return None
else:
return self._tensor
def get_shape(self):
return self._tensor.shape
def read_value(self):
return self._tensor
def value(self):
return self._tensor
def ref(self):
return self._tensor.ref()
def __abs__(self):
return operator.__abs__(self._tensor)
def __add__(self, value):
return operator.__add__(self._tensor, value)
def __sub__(self, value):
return operator.__sub__(self._tensor, value)
def __eq__(self, value):
return operator.__eq__(self._tensor, value)
def __ne__(self, value):
return operator.__ne__(self._tensor, value)
def __ge__(self, value):
return operator.__ge__(self._tensor, value)
def __gt__(self, value):
return operator.__gt__(self._tensor, value)
def __le__(self, value):
return operator.__le__(self._tensor, value)
def __lt__(self, value):
return operator.__lt__(self._tensor, value)
def __getitem__(self, slice_spec):
return self._tensor[slice_spec]
def __invert__(self):
return operator.__invert__(self._tensor)
def __mul__(self, value):
return operator.__mul__(self._tensor, value)
def __neg__(self):
return operator.__neg__(self._tensor)
def __truediv__(self, value):
return operator.__truediv__(self._tensor, value)
def __floordiv__(self, value):
return operator.__floordiv__(self._tensor, value)
def __pow__(self, value):
return operator.__pow__(self._tensor, value)
def __hash__(self):
raise TypeError(f'TensorVariable {self!r} is unhashable. Instead, use '
'tensorvariable.ref() as the key.')
def __repr__(self) -> str:
return f'<TensorVariable: {self._tensor}>'
def __array__(self):
return np.array(self._tensor)
def create_tensor_variable(next_creator_fn, **kwargs):
del next_creator_fn # Unused.
initial_value = kwargs.pop('initial_value')
return TensorVariable(initial_value, **kwargs)
def _convert_tensor_variable_to_tensor(value, *args, **kwargs):
del args # unused
del kwargs # unused
return value.read_value()
tf.register_tensor_conversion_function(TensorVariable,
_convert_tensor_variable_to_tensor)
|
Python
| 0.999987
|
@@ -596,170 +596,8 @@
se.%0A
-# # pytype: skip-file%0A# This modules disables the Pytype analyzer, see%0A# https://github.com/tensorflow/federated/blob/main/docs/pytype.md for more%0A# information.%0A
%22%22%22L
|
abe4f0577baef3dbbceb06fc6d569d2bec69257e
|
Fix internal import
|
tensorflow_probability/python/internal/backend/jax/rewrite.py
|
tensorflow_probability/python/internal/backend/jax/rewrite.py
|
# Copyright 2019 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Rewrite script for NP->JAX."""
from __future__ import absolute_import
from __future__ import division
# [internal] enable type annotations
from __future__ import print_function
from absl import app
def main(argv):
contents = open(argv[1]).read()
contents = contents.replace(
"tensorflow_probability.python.internal.backend.numpy",
"tensorflow_probability.python.internal.backend.jax")
contents = contents.replace(
"from tensorflow_probability.python.internal.backend import numpy",
"from tensorflow_probability.python.internal.backend import jax")
contents = contents.replace("scipy.linalg", "jax.scipy.linalg")
contents = contents.replace("scipy.special", "jax.scipy.special")
contents = contents.replace(
"MODE_JAX = False",
"MODE_JAX = True\n"
"from jax.config import config; config.update('jax_enable_x64', True)")
contents = contents.replace("\nimport numpy as np",
"\nimport numpy as onp\nimport jax.numpy as np")
contents = contents.replace("np.bool", "onp.bool")
contents = contents.replace("np.dtype", "onp.dtype")
contents = contents.replace("np.generic", "onp.generic")
contents = contents.replace("np.broadcast", "onp.broadcast")
contents = contents.replace("JAX_MODE = False", "JAX_MODE = True")
print(contents)
if __name__ == "__main__":
app.run(main)
|
Python
| 0.000012
|
@@ -853,16 +853,38 @@
nction%0A%0A
+# Dependency imports%0A%0A
from abs
|
695e171d1eca459075ad03adf0712f5b7427cac4
|
Add get_or_404() to __all__
|
flask_simon/__init__.py
|
flask_simon/__init__.py
|
__all__ = ('Simon',)
import simon.connection
from flask import abort
from pymongo import uri_parser
class Simon(object):
def __init__(self, app=None):
if app is not None:
self.init_app(app)
def init_app(self, app):
if 'simon' not in app.extensions:
app.extensions['simon'] = {}
if 'MONGO_URI' in app.config:
parsed = uri_parser.parse_uri(app.config['MONGO_URI'])
if not parsed.get('database'):
raise ValueError('MONGO_URI does not contain a database name.')
app.config['MONGO_DBNAME'] = parsed['database']
app.config['MONGO_USERNAME'] = parsed['username']
app.config['MONGO_PASSWORD'] = parsed['password']
app.config['REPLICA_SET'] = parsed['options'].get('replica_set')
host = app.config['MONGO_URI']
name = app.config['MONGO_DBNAME']
username = app.config['MONGO_USERNAME']
password = app.config['MONGO_PASSWORD']
replica_set = app.config['REPLICA_SET']
simon.connection.connect(host_or_uri=host, name=name,
username=username, password=password,
replica_set=replica_set)
else:
host = app.config['HOST'] = 'localhost'
name = app.config['MONGO_DBNAME'] = app.name
simon.connection.connect(host=host, name=name)
def get_or_404(model, *qs, **fields):
try:
return model.get(*qs, **fields)
except (model.NoDocumentFound, model.MultipleDocumentsFound):
abort(404)
|
Python
| 0
|
@@ -1,51 +1,4 @@
-__all__ = ('Simon',)%0A%0Aimport simon.connection%0A%0A
from
@@ -48,16 +48,75 @@
_parser%0A
+import simon.connection%0A%0A__all__ = ('Simon', 'get_or_404')%0A
%0A%0Aclass
|
acdb6bbba1d6114f6ccf9dfc3307905fc88e17bb
|
Put the updated format into the Cryomagnetics device test.
|
tests/unit/test_devices/test_abstract_cryomagnetics_device.py
|
tests/unit/test_devices/test_abstract_cryomagnetics_device.py
|
"""
Contains unit tests for :mod:`mr_freeze.devices.abstract_cryomagnetics_device`
"""
import unittest
from mr_freeze.devices.abstract_cryomagnetics_device import \
AbstractCryomagneticsDevice
class ConcreteCryomagneticsDevice(AbstractCryomagneticsDevice):
was_read_called = False
data_to_read = None
written_message = None
was_write_called = False
def __init__(self):
pass
@property
def terminator(self):
return self._terminator
@terminator.setter
def terminator(self, terminator):
self._terminator = terminator
def read(self, *args, **kwargs):
self.was_read_called = True
return self.data_to_read
def write(self, message):
self.written_message = message
self.was_write_called = True
def reset(self):
self.was_read_called = False
self.data_to_read = None
self.written_message = None
self.was_write_called = False
class TestAbstractCryomagneticsDevice(unittest.TestCase):
def setUp(self):
self.device = ConcreteCryomagneticsDevice()
def tearDown(self):
self.device.reset()
class TestQuery(TestAbstractCryomagneticsDevice):
command = "enter"
expected_response = "data"
data_to_read = "%s\r\n%s" % (
command, expected_response
)
def setUp(self):
TestAbstractCryomagneticsDevice.setUp(self)
self.device.data_to_read = self.data_to_read
def test_query(self):
self.assertEqual(
self.expected_response,
self.device.query(self.command)
)
self.assertTrue(
self.device.was_write_called
)
self.assertTrue(
self.device.was_read_called
)
class TestParseQuery(TestAbstractCryomagneticsDevice):
command = "Testing"
data_format = "%s\r\n%s"
def test_command_not_echoed_command(self):
bad_echo = "String1"
self.assertNotEqual(self.command, bad_echo)
data_to_return = self.data_format % (bad_echo, "Response")
with self.assertRaises(RuntimeError):
self.device.parse_query(self.command, data_to_return)
def test_command_bad_response(self):
data_to_read = "%s%s" % (self.command, "response")
with self.assertRaises(RuntimeError):
self.device.parse_query(self.command, data_to_read)
|
Python
| 0
|
@@ -1276,16 +1276,20 @@
%25s%5Cr%5Cn%25s
+%5Cr%5Cn
%22 %25 (%0A
@@ -1856,16 +1856,20 @@
%25s%5Cr%5Cn%25s
+%5Cr%5Cn
%22%0A%0A d
|
24ee61ecf5767d10b2fb92acc5d0217ffbfb3834
|
Update get_branches.py
|
Group8/get_branches.py
|
Group8/get_branches.py
|
from pyfbsdk import *
import math
'''
This file is to read all branches of both target and source skeleton
This should be using motion-builder
I used People.FBX as a testcase
'''
def get_banch(parents, children, index, branches):
parents.append(children.Name)
# if there is no children, append this branch to branches
if len(children.Children) == 0:
branches.append(parents)
# if there is a children, then go to the child
elif len(children.Children) == 1:
parents = get_banch(parents, children.Children[0], index+1, branches)
# if there are several leaves, then search each leaf
else:
for i in range(len(children.Children)):
new = []
new = get_banch(parents[:index+1], children.Children[i], index+1, branches)
return parents
def get_branches(root):
branches = []
if len(root.Children) > 0:
# you need to check len(root.Children)
for i in range(len(root.Children)): # this is to stop the loop
branch = []
branch.append(root.Name) # skeleton[0] -> root
# initialize the node and get its children
parents = branch[:len(branch)]
children = root.Children[i]
# start the loop to find all leaves
# the initial index may be wrong, you'd better check it.
branch = get_banch(parents, children, 1, branches)
#print()
#print("\n\n\n\n")
return branches
def get_branches_posi(branches):
branches_posi = []
node = FBVector3d()
for b in branches:
bran_posi = []
for name in b:
n = FBFindModelByLabelName(name)
n.GetVector(node, FBModelTransformationType.kModelTranslation)
bran_posi.append(node)
branches_posi.append(bran_posi)
return branches_posi
# Chose the node that has the highest betweeness
root = FBFindModelByLabelName('Bip01')
branches = get_branches(root)
branches_posi = get_branches_posi(branches)
#print(len(branches)) # this tells you how many branches we have
#print(branches) # this shows all branches in a list
#print(branches_posi)
|
Python
| 0
|
@@ -1,8 +1,107 @@
+ny branches we have%0A#print(branches) # this shows all branches in a list%0A#print(branches_posi)%0A
from pyf
@@ -1513,25 +1513,24 @@
, branches)%0A
-%0A
@@ -1533,40 +1533,8 @@
-#print()%0A #print(%22%5Cn%5Cn%5Cn%5Cn%22)%0A
%0A
@@ -1578,26 +1578,67 @@
osi(branches
+, file_name):%0A out = open(file_name,%22w%22
)
-:
%0A branche
@@ -1897,16 +1897,223 @@
d(node)%0A
+ %0A out.write(repr(node%5B0%5D) + %22 %22)%0A out.write(repr(node%5B1%5D) + %22 %22)%0A out.write(repr(node%5B2%5D) + '%5Cn') %0A out.write('------------------------------------%5Cn')%0A
@@ -2144,16 +2144,32 @@
n_posi)%0A
+ out.close()%0A
retu
@@ -2235,16 +2235,72 @@
weeness%0A
+#root = FBFindModelByLabelName('PMD_Kristoff__summer_')%0A
root = F
@@ -2407,12 +2407,19 @@
ches
+, %221.txt%22
)%0A
-%0A#
prin
@@ -2535,30 +2535,175 @@
a list%0A
-#print(branches_posi
+%0Aroot2 = FBFindModelByLabelName('PMD_Kristoff__summer_') %0Abranches2 = get_branches(root2)%0Abranches_posi2 = get_branches_posi(branches2, %222.txt%22)%0Aprint(len(branches2)
)%0A
|
c9df16f35af2cf51a4612eb76fab59819a32df64
|
Handle TypeError in is_float
|
src/sentry/utils/__init__.py
|
src/sentry/utils/__init__.py
|
"""
sentry.utils
~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from django.utils.encoding import force_unicode
import six
def to_unicode(value):
try:
value = six.text_type(force_unicode(value))
except (UnicodeEncodeError, UnicodeDecodeError):
value = '(Error decoding value)'
except Exception: # in some cases we get a different exception
try:
value = str(repr(type(value)))
except Exception:
value = '(Error decoding value)'
return value
def is_float(var):
try:
float(var)
except ValueError:
return False
return True
|
Python
| 0.003037
|
@@ -700,16 +700,28 @@
except
+ (TypeError,
ValueEr
@@ -723,16 +723,17 @@
lueError
+)
:%0A
|
1dff7ff24903f470bf1e1d325c6eb88590b9fa0f
|
Make generator to get last bot utterance
|
rasa/core/channels/twilio_voice.py
|
rasa/core/channels/twilio_voice.py
|
import inspect
from sanic import Blueprint, response
from sanic.request import Request
from sanic.response import HTTPResponse
from twilio.twiml.voice_response import VoiceResponse, Gather
from typing import Text, Callable, Awaitable, List
from rasa.core.channels.channel import (
InputChannel,
CollectingOutputChannel,
UserMessage,
)
class TwilioVoiceInput(InputChannel):
@classmethod
def name(cls) -> Text:
"""Name of your custom channel."""
return "twilio_voice"
def blueprint(
self, on_new_message: Callable[[UserMessage], Awaitable[None]]
) -> Blueprint:
twilio_voice_webhook = Blueprint(
"custom_webhook_{}".format(type(self).__name__),
inspect.getmodule(self).__name__,
)
@twilio_voice_webhook.route("/", methods=["GET"])
async def health(request: Request) -> HTTPResponse:
return response.json({"status": "ok"})
@twilio_voice_webhook.route("/webhook", methods=["POST"])
async def receive(request: Request):
sender_id = request.form.get("From") # method to get sender_id
text = request.form.get("SpeechResult") # method to fetch text
input_channel = self.name() # method to fetch input channel
call_status = request.form.get("CallStatus") # method to fetch call status
call_sid = request.form.get("CallSid") # Get the call identifier
collector = TwilioVoiceCollectingOutputChannel()
# Provide an initial greeting to answer the user's call.
if (text is None) & (call_status == "ringing"):
text = "hello"
# If the user doesn't respond to the previous message resend the last message.
elif text is None:
# Get last user utterance from tracker.
tracker = request.app.agent.tracker_store.retrieve(sender_id)
text = tracker.current_state()["latest_message"]["text"]
# determine the response.
if text is not None:
await on_new_message(
UserMessage(
text,
collector,
sender_id,
input_channel=input_channel,
)
)
# Parse the text responses and build the Twilio VoiceResponse.
respond_segments = []
for message in collector.messages:
respond_segments.append(message["text"])
if "buttons" in message:
for button in message["buttons"]:
respond_segments.append(button["title"])
twilio_response = build_twilio_voice_response(respond_segments)
return response.text(str(twilio_response), content_type="text/xml")
return twilio_voice_webhook
class TwilioVoiceCollectingOutputChannel(CollectingOutputChannel):
"""Output channel that collects send messages in a list
(doesn't send them anywhere, just collects them)."""
@classmethod
def name(cls) -> Text:
return "twilio_voice"
def build_twilio_voice_response(messages: List[Text]) -> VoiceResponse:
"""Builds the Twilio Voice Response object."""
vr = VoiceResponse()
gather = Gather(
input="speech",
action="/webhooks/twilio_voice/webhook",
actionOnEmptyResult=True,
speechTimeout="auto",
)
# Add pauses between messages.
# Add a listener to the last message to listen for user response.
for i, message in enumerate(messages):
if i + 1 == len(messages):
gather.say(message)
vr.append(gather)
else:
vr.say(message)
vr.pause(length=1)
return vr
|
Python
| 0.000011
|
@@ -234,16 +234,63 @@
, List%0A%0A
+from rasa.shared.core.events import BotUttered%0A
from ras
@@ -322,16 +322,16 @@
mport (%0A
-
Inpu
@@ -1715,337 +1715,8 @@
llo%22
-%0A # If the user doesn't respond to the previous message resend the last message.%0A elif text is None:%0A # Get last user utterance from tracker.%0A tracker = request.app.agent.tracker_store.retrieve(sender_id)%0A text = tracker.current_state()%5B%22latest_message%22%5D%5B%22text%22%5D
%0A%0A
@@ -2446,24 +2446,24 @@
n%5B%22title%22%5D)%0A
-
@@ -2613,16 +2613,768 @@
xt/xml%22)
+%0A # If the user doesn't respond to the previous message resend the last message.%0A elif text is None:%0A # Get last user utterance from tracker.%0A tracker = request.app.agent.tracker_store.retrieve(sender_id)%0A last_response = next((e for e in reversed(tracker.events) if isinstance(e, BotUttered)), None)%0A%0A # If no previous utterance found say something generic.%0A if last_response is None:%0A last_response = %22I didn't get that.%22%0A%09%09else:%0A%09%09 last_response = last_response.text%0A%0A twilio_response = build_twilio_voice_response(%5Blast_response%5D)%0A return response.text(str(twilio_response), content_type=%22text/xml%22)
%0A%0A
|
7fb89e4dbe2cbed4ef37e13073d4fa3f2a650049
|
Check for missing part thumbnails when the server first runs
|
InvenTree/part/apps.py
|
InvenTree/part/apps.py
|
from __future__ import unicode_literals
from django.apps import AppConfig
class PartConfig(AppConfig):
name = 'part'
|
Python
| 0
|
@@ -34,16 +34,90 @@
terals%0A%0A
+import os%0A%0Afrom django.db.utils import OperationalError, ProgrammingError%0A
from dja
@@ -142,16 +142,49 @@
pConfig%0A
+from django.conf import settings%0A
%0A%0Aclass
@@ -224,8 +224,898 @@
'part'%0A
+%0A def ready(self):%0A %22%22%22%0A This function is called whenever the Part app is loaded.%0A %22%22%22%0A%0A self.generate_part_thumbnails()%0A%0A def generate_part_thumbnails(self):%0A from .models import Part%0A%0A print(%22Checking Part image thumbnails%22)%0A%0A try:%0A for part in Part.objects.all():%0A if part.image:%0A url = part.image.thumbnail.name%0A #if url.startswith('/'):%0A # url = url%5B1:%5D%0A loc = os.path.join(settings.MEDIA_ROOT, url)%0A if not os.path.exists(loc):%0A print(%22InvenTree: Generating thumbnail for Part '%7Bp%7D'%22.format(p=part.name))%0A part.image.render_variations(replace=False)%0A except (OperationalError, ProgrammingError):%0A print(%22Could not generate Part thumbnails%22)%0A
|
b7c72b9c1d63e9b7881e931cae00a0d4f0fd9e08
|
version bump to 0.8.3
|
namebench.py
|
namebench.py
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple DNS server comparison benchmarking tool.
Designed to assist system administrators in selection and prioritization.
"""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import datetime
import optparse
import sys
import tempfile
# Make it easy to import 3rd party utilities without editing their imports.
sys.path.append('lib/third_party')
from lib import benchmark
from lib import config
from lib import history_parser
from lib import nameserver_list
from lib import util
VERSION = '0.8.2'
# Detect congestion problems early!
EXPECTED_DURATION = 120.0
SEVERE_CONGESTION_MULTIPLIER = 3
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option('-r', '--runs', dest='run_count', default=1, type='int',
help='Number of test runs to perform on each nameserver.')
parser.add_option('-c', '--config', dest='config', default='namebench.cfg',
help='Config file to use.')
parser.add_option('-o', '--output', dest='output_file', default='output.csv',
help='Filename to write query results to (CSV format).')
parser.add_option('-j', '--threads', dest='thread_count',
help='# of threads to use')
parser.add_option('-y', '--timeout', dest='timeout', type='float',
help='# of seconds general requests timeout in.')
parser.add_option('-Y', '--health_timeout', dest='health_timeout',
type='float', help='health check timeout (in seconds)')
parser.add_option('-f', '--filename', dest='data_file',
default='data/alexa-top-10000-global.txt',
help='File containing a list of domain names to query.')
parser.add_option('-i', '--import', dest='import_file',
help=('Import history from safari, google_chrome, '
'internet_explorer, opera, squid, or a file path.'))
parser.add_option('-t', '--tests', dest='test_count', type='int',
help='Number of queries per run.')
parser.add_option('-x', '--select_mode', dest='select_mode',
default='weighted',
help='Selection algorithm to use (weighted, random, chunk)')
parser.add_option('-s', '--num_servers', dest='num_servers',
type='int', help='Number of nameservers to include in test')
parser.add_option('-S', '--no_secondary', dest='no_secondary',
action='store_true', help='Disable secondary servers')
parser.add_option('-O', '--only', dest='only',
action='store_true',
help='Only test nameservers passed as arguments')
(cli_options, args) = parser.parse_args()
(opt, primary_ns, secondary_ns) = config.ProcessConfiguration(cli_options)
for arg in args:
if '.' in arg:
primary_ns.append((arg, arg))
include_internal = True
if opt.only:
include_internal = False
if not primary_ns:
print 'If you use --only, you must provide nameservers to use.'
sys.exit(1)
print('namebench %s - %s (%s) on %s' %
(VERSION, opt.import_file or opt.data_file, opt.select_mode,
datetime.datetime.now()))
print ('threads=%s tests=%s runs=%s timeout=%s health_timeout=%s servers=%s' %
(opt.thread_count, opt.test_count, opt.run_count, opt.timeout,
opt.health_timeout, opt.num_servers))
print '-' * 78
if opt.import_file:
importer = history_parser.HistoryParser()
history = importer.Parse(opt.import_file)
if history:
print '- Imported %s records from %s' % (len(history), opt.import_file)
else:
print '- Could not import anything from %s' % opt.import_file
sys.exit(2)
else:
history = None
nameservers = nameserver_list.NameServers(primary_ns, secondary_ns,
num_servers=opt.num_servers,
include_internal=include_internal,
timeout=opt.timeout,
health_timeout=opt.health_timeout)
(intercepted, duration) = util.AreDNSPacketsIntercepted()
print '- DNS Intercept test completed in %sms' % duration
congestion = duration / EXPECTED_DURATION
if intercepted:
print 'XXX[ OHNO! ]XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
print 'XX Someone upstream of this machine is doing evil things and XX'
print 'XX intercepting all outgoing nameserver requests. The results XX'
print 'XX of this program will be useless. Get your ISP to fix it. XX'
print 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
print ''
sys.exit(1)
elif congestion > SEVERE_CONGESTION_MULTIPLIER:
print '* Health checks are running %.1fX slower than expected!' % congestion
print '* NOTE: results may be inconsistent if your connection is saturated!'
print ''
nameservers.ApplyCongestionFactor(congestion / 2.5)
if len(nameservers) > 1:
nameservers.thread_count = int(opt.thread_count)
nameservers.cache_dir = tempfile.gettempdir()
nameservers.FindAndRemoveUndesirables()
print ''
print 'Final list of nameservers to benchmark:'
print '-' * 60
for ns in nameservers.SortByFastest():
print ' %-19.19s %-20.20s (%sms)' % (ns.ip, ns.name, ns.check_duration)
print ''
bmark = benchmark.NameBench(nameservers,
run_count=opt.run_count,
test_count=opt.test_count)
if history:
bmark.CreateTests(history, select_mode=opt.select_mode)
else:
bmark.CreateTestsFromFile(opt.data_file, select_mode=opt.select_mode)
bmark.Run()
bmark.DisplayResults()
if opt.output_file:
print ''
print '* Saving detailed results to %s' % opt.output_file
bmark.SaveResultsToCsv(opt.output_file)
best = bmark.BestOverallNameServer()
nearest = [x for x in bmark.NearestNameServers(3) if x.ip != best.ip][0:2]
print ''
print 'Recommended Configuration (fastest + nearest):'
print '----------------------------------------------'
for ns in [best] + nearest:
if ns.warnings:
warning = '(%s)' % ', '.join(ns.warnings)
else:
warning = ''
print 'nameserver %-15.15s # %s %s' % (ns.ip, ns.name, warning)
|
Python
| 0
|
@@ -1121,17 +1121,17 @@
= '0.8.
-2
+3
'%0A%0A# Det
|
fbd57d2c7ad428cfa240ef11f53eb7f5b81908d7
|
Bump to 0.6.2
|
namebench.py
|
namebench.py
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
"""Simple DNS server comparison benchmarking tool.
Designed to assist system administrators in selection and prioritization.
"""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import ConfigParser
import optparse
import sys
import tempfile
# Make it easy to import 3rd party utilities without editing their imports.
sys.path.append('lib/third_party')
from lib import benchmark
from lib import nameserver_list
from lib import util
from lib import web
VERSION = '0.6.1'
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option('-g', '--gui', dest='gui', default=False,
action='store_true',
help='Use graphical user interface (EXPERIMENTAL)')
parser.add_option('-r', '--runs', dest='run_count', default=2, type='int',
help='Number of test runs to perform on each nameserver.')
parser.add_option('-c', '--config', dest='config', default='namebench.cfg',
help='Config file to use.')
parser.add_option('-o', '--output', dest='output_file', default=False,
help='Filename to write query results to (CSV format).')
parser.add_option('-T', '--threads', dest='thread_count', default=False,
help='# of threads to use')
parser.add_option('-i', '--input', dest='input_file',
default='data/top-10000.txt',
help='File containing a list of domain names to query.')
parser.add_option('-t', '--tests', dest='test_count', default=40, type='int',
help='Number of queries per run.')
parser.add_option('-x', '--num_servers', dest='num_servers', default=13,
type='int', help='Number of nameservers to test')
(opt, args) = parser.parse_args()
config = ConfigParser.ConfigParser()
config.read(opt.config)
general = dict(config.items('general'))
primary_ns = config.items('primary')
secondary_ns = config.items('secondary')
# Include internal & global first
if opt.thread_count:
thread_count = int(opt.thread_count)
else:
thread_count = int(general['max_thread_count'])
print ('namebench %s - %s threads, %s tests, %s runs' %
(VERSION, thread_count, opt.test_count, opt.run_count))
print '-' * 78
for arg in args:
if '.' in arg:
primary_ns.append((arg, arg))
if util.AreDNSPacketsIntercepted():
print 'XXX[ OHNO! ]XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
print 'XX Someone upstream of this machine is doing evil things and XX'
print 'XX intercepting all outgoing nameserver requests. The results XX'
print 'XX of this program may be useless. Continuing anyway... XX'
print 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
print ''
nameservers = nameserver_list.NameServers(primary_ns, secondary_ns,
include_internal=True,
threads=thread_count,
cache_dir=tempfile.gettempdir())
nameservers.FilterUnwantedServers(count=int(opt.num_servers))
print ''
print 'Final list of nameservers to benchmark:'
print '---------------------------------------'
for ns in nameservers:
print ' %s [%s], health tests took %sms' % (ns.ip, ns.name,
ns.check_duration)
if opt.gui:
web.WebServerThread().start()
web.OpenBrowserWindow()
else:
bmark = benchmark.NameBench(nameservers, opt.input_file,
run_count=opt.run_count,
test_count=opt.test_count)
bmark.Run()
bmark.DisplayResults()
if opt.output_file:
print '* Saving detailed results to %s' % opt.output_file
bmark.SaveResultsToCsv(opt.output_file)
best = bmark.BestOverallNameServer()
nearest = [x for x in bmark.NearestNameServers(3) if x.ip != best.ip][0:2]
print ''
print 'Recommended Configuration (fastest + nearest):'
print '----------------------------------------------'
for ns in [best] + nearest:
if ns.warnings:
warning = '(%s)' % ', '.join(ns.warnings)
else:
warning = ''
print 'nameserver %-15.15s # %s %s' % (ns.ip, ns.name, warning)
|
Python
| 0.000187
|
@@ -545,17 +545,17 @@
= '0.6.
-1
+2
'%0A%0Aif __
|
7f5f10132334c1f6685497d3fff48c2c65617845
|
Remove broken URL (#3623)
|
InvenTree/part/urls.py
|
InvenTree/part/urls.py
|
"""URL lookup for Part app. Provides URL endpoints for:
- Display / Create / Edit / Delete PartCategory
- Display / Create / Edit / Delete Part
- Create / Edit / Delete PartAttachment
- Display / Create / Edit / Delete SupplierPart
"""
from django.urls import include, re_path
from . import views
part_detail_urls = [
re_path(r'^bom-download/?', views.BomDownload.as_view(), name='bom-download'),
re_path(r'^pricing/', views.PartPricing.as_view(), name='part-pricing'),
re_path(r'^bom-upload/?', views.BomUpload.as_view(), name='upload-bom'),
re_path(r'^qr_code/?', views.PartQRCode.as_view(), name='part-qr'),
# Normal thumbnail with form
re_path(r'^thumb-select/?', views.PartImageSelect.as_view(), name='part-image-select'),
# Any other URLs go to the part detail page
re_path(r'^.*$', views.PartDetail.as_view(), name='part-detail'),
]
category_urls = [
# Top level subcategory display
re_path(r'^subcategory/', views.PartIndex.as_view(template_name='part/subcategory.html'), name='category-index-subcategory'),
# Category detail views
re_path(r'(?P<pk>\d+)/', views.CategoryDetail.as_view(), name='category-detail'),
]
# URL list for part web interface
part_urls = [
# Upload a part
re_path(r'^import/', views.PartImport.as_view(), name='part-import'),
re_path(r'^import-api/', views.PartImportAjax.as_view(), name='api-part-import'),
# Download a BOM upload template
re_path(r'^bom_template/?', views.BomUploadTemplate.as_view(), name='bom-upload-template'),
# Individual part using pk
re_path(r'^(?P<pk>\d+)/', include(part_detail_urls)),
# Part category
re_path(r'^category/', include(category_urls)),
# Individual part using IPN as slug
re_path(r'^(?P<slug>[-\w]+)/', views.PartDetailFromIPN.as_view(), name='part-detail-from-ipn'),
# Top level part list (display top level parts and categories)
re_path(r'^.*$', views.PartIndex.as_view(), name='part-index'),
]
|
Python
| 0
|
@@ -898,175 +898,8 @@
%5B%0A%0A
- # Top level subcategory display%0A re_path(r'%5Esubcategory/', views.PartIndex.as_view(template_name='part/subcategory.html'), name='category-index-subcategory'),%0A%0A
|
912cb00aa1b0663265c14918422b0ec1220a21d3
|
Bump to 0.6.5
|
namebench.py
|
namebench.py
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
"""Simple DNS server comparison benchmarking tool.
Designed to assist system administrators in selection and prioritization.
"""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import ConfigParser
import optparse
import sys
import tempfile
import datetime
# Make it easy to import 3rd party utilities without editing their imports.
sys.path.append('lib/third_party')
from lib import benchmark
from lib import nameserver_list
from lib import util
from lib import web
VERSION = '0.6.4'
def processConfiguration(opt):
# Read the config file, set variables
config = ConfigParser.ConfigParser()
config.read(opt.config)
general = dict(config.items('general'))
primary_ns = config.items('primary')
secondary_ns = config.items('secondary')
# Set some important defaults.
for option in ('thread_count', 'timeout', 'health_timeout', 'num_servers'):
if not getattr(opt, option):
setattr(opt, option, float(general[option]))
# Include internal & global first
if opt.thread_count:
thread_count = int(opt.thread_count)
else:
thread_count = int(general['max_thread_count'])
return (opt, primary_ns, secondary_ns)
if __name__ == '__main__':
print 'namebench %s - %s' % (VERSION, datetime.datetime.now())
parser = optparse.OptionParser()
# parser.add_option('-g', '--gui', dest='gui', default=False,
# action='store_true',
# help='Use graphical user interface (EXPERIMENTAL)')
parser.add_option('-r', '--runs', dest='run_count', default=2, type='int',
help='Number of test runs to perform on each nameserver.')
parser.add_option('-c', '--config', dest='config', default='namebench.cfg',
help='Config file to use.')
parser.add_option('-o', '--output', dest='output_file',
help='Filename to write query results to (CSV format).')
parser.add_option('-j', '--threads', dest='thread_count',
help='# of threads to use')
parser.add_option('-y', '--timeout', dest='timeout', type='float',
help='# of seconds general requests timeout in.')
parser.add_option('-Y', '--health_timeout', dest='health_timeout',
type='float', help='# of seconds health checks timeout in.')
parser.add_option('-i', '--input', dest='input_file',
default='data/top-10000.txt',
help='File containing a list of domain names to query.')
parser.add_option('-t', '--tests', dest='test_count', default=40, type='int',
help='Number of queries per run.')
parser.add_option('-s', '--num_servers', dest='num_servers',
type='int', help='Number of nameservers to include in test')
(cli_options, args) = parser.parse_args()
(opt, primary_ns, secondary_ns) = processConfiguration(cli_options)
print ('threads=%s tests=%s runs=%s timeout=%s health_timeout=%s servers=%s' %
(opt.thread_count, opt.test_count, opt.run_count, opt.timeout,
opt.health_timeout, opt.num_servers))
print '-' * 78
for arg in args:
if '.' in arg:
primary_ns.append((arg, arg))
if util.AreDNSPacketsIntercepted():
print 'XXX[ OHNO! ]XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
print 'XX Someone upstream of this machine is doing evil things and XX'
print 'XX intercepting all outgoing nameserver requests. The results XX'
print 'XX of this program may be useless. Continuing anyway... XX'
print 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
print ''
nameservers = nameserver_list.NameServers(primary_ns, secondary_ns,
include_internal=True,
timeout=opt.timeout,
health_timeout=opt.health_timeout,
version=VERSION)
nameservers.thread_count = int(opt.thread_count)
nameservers.cache_dir = tempfile.gettempdir()
nameservers.FilterUnwantedServers(count=int(opt.num_servers))
print ''
print 'Final list of nameservers to benchmark:'
print '---------------------------------------'
for ns in nameservers:
print ' %s [%s], health tests took %sms' % (ns.ip, ns.name,
ns.check_duration)
bmark = benchmark.NameBench(nameservers, opt.input_file,
run_count=opt.run_count,
test_count=opt.test_count)
bmark.Run()
bmark.DisplayResults()
if opt.output_file:
print '* Saving detailed results to %s' % opt.output_file
bmark.SaveResultsToCsv(opt.output_file)
best = bmark.BestOverallNameServer()
nearest = [x for x in bmark.NearestNameServers(3) if x.ip != best.ip][0:2]
print ''
print 'Recommended Configuration (fastest + nearest):'
print '----------------------------------------------'
for ns in [best] + nearest:
if ns.warnings:
warning = '(%s)' % ', '.join(ns.warnings)
else:
warning = ''
print 'nameserver %-15.15s # %s %s' % (ns.ip, ns.name, warning)
|
Python
| 0.000234
|
@@ -561,17 +561,17 @@
= '0.6.
-4
+5
'%0A%0Adef p
|
d0e31fdb5ec99e91f7b5f7da5b81fc7a391689df
|
Update django_facebook/admin.py
|
django_facebook/admin.py
|
django_facebook/admin.py
|
from django.contrib import admin
from django.conf import settings
from django.core.urlresolvers import reverse
from django_facebook import admin_actions
from django_facebook import models
class FacebookUserAdmin(admin.ModelAdmin):
list_display = ('user_id', 'name', 'facebook_id',)
search_fields = ('name',)
class FacebookLikeAdmin(admin.ModelAdmin):
list_display = ('user_id', 'name', 'category', 'facebook_id',)
search_fields = ('name',)
filter_fields = ('category',)
class FacebookProfileAdmin(admin.ModelAdmin):
list_display = ('image_', 'user_', 'facebook_name', 'facebook_id',)
raw_id_fields = ('user',)
search_fields = ('facebook_name', 'facebook_id',)
def image_(self, instance):
return """<span style="
background-image: url({0});
background-size: cover;
width: 21px;
height: 21px;
display: inline-block;
outline: 1px solid #DDD;
position: absolute;
margin-top: -3px;
"></span>""".format(
instance.image.url
)
image_.allow_tags = True
def user_(self, instance):
admin_url = reverse('admin:auth_user_change', args=[instance.user.pk])
return '<a href="{0}">{1}</a>'.format(
admin_url,
instance.user
)
user_.allow_tags = True
def facebook_profile(open_graph_share):
'''
Nicely displayed version of the facebook user
with user id and image and link to facebook :)
'''
user = open_graph_share.user
profile = user.get_profile()
facebook_id = profile.facebook_id
facebook_url = 'http://www.facebook.com/%s/' % facebook_id
link = '<p><a href="%s"><img src="http://graph.facebook.com/%s/picture/?type=large" width="100px" style="float:left"/>%s</a><br/></p>' % (facebook_url, facebook_id, facebook_id)
return link
facebook_profile.allow_tags = True
facebook_profile.short_description = 'Profile'
class OpenGraphShareAdmin(admin.ModelAdmin):
raw_id_fields = ['user']
list_display = ['user', 'action_domain', facebook_profile,
'completed_at', 'error_message']
actions = [admin_actions.retry_open_graph_share,
admin_actions.retry_open_graph_share_for_user]
if settings.AUTH_PROFILE_MODULE == 'django_facebook.FacebookProfile':
admin.site.register(models.FacebookProfile, FacebookProfileAdmin)
admin.site.register(models.FacebookUser, FacebookUserAdmin)
admin.site.register(models.FacebookLike, FacebookLikeAdmin)
admin.site.register(models.OpenGraphShare, OpenGraphShareAdmin)
|
Python
| 0
|
@@ -1042,16 +1042,57 @@
mage.url
+ if (instance and instance.image) else ''
%0A
|
4d1e3e548ee80d4a3ef42ad22506fcb8dd64ef05
|
Make TestBackend compatible with Python 2 (Closes: #72)
|
django_slack/backends.py
|
django_slack/backends.py
|
import pprint
import logging
from six.moves import urllib
from django.http.request import QueryDict
from django.utils.module_loading import import_string
from .utils import Backend
from .app_settings import app_settings
logger = logging.getLogger(__name__)
class UrllibBackend(Backend):
def send(self, url, message_data, **kwargs):
qs = QueryDict(mutable=True)
qs.update(message_data)
r = urllib.request.urlopen(urllib.request.Request(
url,
qs.urlencode().encode('utf-8'),
))
result = r.read().decode('utf-8')
self.validate(r.headers['content-type'], result, message_data)
class RequestsBackend(Backend):
def __init__(self):
# Lazily import to avoid dependency
import requests
self.session = requests.Session()
def send(self, url, message_data, **kwargs):
r = self.session.post(url, data=message_data, verify=False)
self.validate(r.headers['Content-Type'], r.text, message_data)
class ConsoleBackend(Backend):
def send(self, url, message_data, **kwargs):
print("I: Slack message:")
pprint.pprint(message_data, indent=4)
print("-" * 79)
class LoggerBackend(Backend):
def send(self, url, message_data, **kwargs):
logger.info(pprint.pformat(message_data, indent=4))
class DisabledBackend(Backend):
def send(self, url, message_data, **kwargs):
pass
class CeleryBackend(Backend):
def __init__(self):
# Lazily import to avoid dependency
from .tasks import send
self._send = send
# Check we can import our specified backend up-front
import_string(app_settings.BACKEND_FOR_QUEUE)()
def send(self, *args, **kwargs):
# Send asynchronously via Celery
self._send.delay(*args, **kwargs)
class TestBackend(Backend):
"""
This backend is for testing.
Before a test, call `reset_messages`, and after a test, call
`retrieve_messages` for a list of all messages that have been sent during
the test.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.reset_messages()
def send(self, url, message_data, **kwargs):
self.messages.append(message_data)
def reset_messages(self):
self.messages = []
def retrieve_messages(self):
messages = self.messages
self.reset_messages()
return messages
# For backwards-compatibility
Urllib2Backend = UrllibBackend
|
Python
| 0
|
@@ -2124,16 +2124,33 @@
super(
+TestBackend, self
).__init
|
d06e5e51695d40b8248d5854454b7d291b76bafd
|
Fix a few first run issues.
|
observy/notifications/__init__.py
|
observy/notifications/__init__.py
|
#!/usr/bin/python
#
# Copyright 2016 Eldon Ahrold
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import json
import glob
import importlib
import socket
from datetime import datetime as date
from notifications import *
__version__ = '0.1'
class NotificationManager(object):
''' Notification Manager class responsible for running
any defined notification class in the subdirectory.
'''
def __init__(self, errors):
super(NotificationManager, self).__init__()
self.errors = errors
def send(self):
for c in self.notificationClasses():
print "sending to %s" % c
notifier = c(self.errors)
notifier.send()
def notificationClasses(self):
paths = glob.glob('./notifications/*Notification.py')
classes =[]
for p in paths:
class_name = os.path.splitext(os.path.basename(p))[0]
NotificationClass = getattr(importlib.import_module(
'%s.%s' % (__name__,class_name)), class_name)
classes.append(NotificationClass)
return classes
@staticmethod
def webhooks_file():
return os.path.join(os.path.dirname(os.path.realpath(__file__)),'webhooks.conf.json')
@staticmethod
def register_webhook(name, webhook):
NotificationManager.modify_webhooks(name, webhook, True)
@staticmethod
def remove_webhook(name, webhook):
NotificationManager.modify_webhooks(name, webhook, False)
@staticmethod
def modify_webhooks(name, webhook, add):
webhook_file = NotificationManager.webhooks_file()
data = open(webhook_file, 'r').read()
all_webhooks = json.loads(data)
registered_webhooks = set(all_webhooks.get(name, []))
if add:
registered_webhooks.add(webhook)
elif webhook in registered_webhooks:
registered_webhooks.remove(webhook)
all_webhooks[name] = list(registered_webhooks)
file = open(webhook_file, 'w')
data = json.dumps(all_webhooks)
file.write(data)
file.close()
class Notifications(object):
"""Base class for service notifications"""
errors = None
def __init__(self, errors):
super(Notifications, self).__init__()
self.errors = errors
def webhooks(self, name):
#url webhooks
path = os.path.join()
data = open('webhooks.conf.json', 'r').read()
return json.dumps().get(name,[]);
def send(self):
"""Send Notification"""
raise('Subclass must implement')
def host_info(self):
hostname = socket.gethostname()
return {
"host": hostname,
"ip" : socket.gethostbyname(hostname),
}
def timestamp(self):
return date.now()
class HookableNotifications(Notifications):
"""Notification class that uses webhooks"""
_webhook_service_name = ''
def __init__(self, errors):
super(HookableNotifications, self).__init__(errors)
def _all_hooks(self):
data = open(NotificationManager.webhooks_file(), 'r').read()
return json.loads(data)
def webhooks(self):
return self._all_hooks().get(self._webhook_service_name, []);
|
Python
| 0
|
@@ -2134,32 +2134,86 @@
_file()%0A
+%0A if os.path.isfile(webhook_file):%0A
data = open(webh
@@ -2238,29 +2238,24 @@
d()%0A
-%0A
all_webh
@@ -2274,24 +2274,69 @@
loads(data)%0A
+ else:%0A all_webhooks = %7B%7D%0A%0A
regi
@@ -2641,16 +2641,17 @@
file, 'w
++
')%0A
|
f6239d6df84718044d26f5e746b08a15afb944bf
|
Handle exceptions in the activity constructor so that log messages ends up in the activity log rather than in shell/journal.
|
sugar/activity/activityfactoryservice.py
|
sugar/activity/activityfactoryservice.py
|
# Copyright (C) 2006-2007 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import sys
from optparse import OptionParser
import gettext
import gobject
import gtk
import dbus
import dbus.service
import dbus.glib
from sugar.activity.bundle import Bundle
from sugar.activity import activityhandle
from sugar import logger
from sugar import _sugarext
from sugar import env
# Work around for dbus mutex locking issue
gobject.threads_init()
dbus.glib.threads_init()
_ACTIVITY_FACTORY_INTERFACE = "org.laptop.ActivityFactory"
class ActivityFactoryService(dbus.service.Object):
"""D-Bus service that creates instances of Python activities
The ActivityFactoryService is a dbus service created for
each Python based activity type (that is, each activity
bundle which declares a "class" in its activity.info file,
rather than an "exec").
The ActivityFactoryService is the actual process which
instantiates the Python classes for Sugar interfaces. That
is, your Python code runs in the same process as the
ActivityFactoryService itself.
The "service" process is created at the moment Sugar first
attempts to create an instance of the activity type. It
then remains in memory until the last instance of the
activity type is terminated.
"""
def __init__(self, service_name, activity_class):
"""Initialize the service to create activities of this type
service_name -- bundle's service name, this is used
to construct the dbus service name used to access
the created service.
activity_class -- dotted Python class name for the
Activity class which is to be instantiated by
the service. Assumed to be composed of a module
followed by a class.
if the module specified has a "start" attribute this object
will be called on service initialisation (before first
instance is created).
if the module specified has a "stop" attribute this object
will be called after every instance exits (note: may be
called multiple times for each time start is called!)
"""
self._activities = []
splitted_module = activity_class.rsplit('.', 1)
module_name = splitted_module[0]
class_name = splitted_module[1]
module = __import__(module_name)
for comp in module_name.split('.')[1:]:
module = getattr(module, comp)
if hasattr(module, 'start'):
module.start()
self._module = module
self._constructor = getattr(module, class_name)
bus = dbus.SessionBus()
bus_name = dbus.service.BusName(service_name, bus = bus)
object_path = '/' + service_name.replace('.', '/')
dbus.service.Object.__init__(self, bus_name, object_path)
@dbus.service.method("org.laptop.ActivityFactory", in_signature="a{ss}")
def create(self, handle):
"""Create a new instance of this activity
handle -- sugar.activity.activityhandle.ActivityHandle
compatible dictionary providing the instance-specific
values for the new instance
returns xid for the created instance' root window
"""
activity_handle = activityhandle.create_from_dict(handle)
activity = self._constructor(activity_handle)
activity.present()
self._activities.append(activity)
activity.connect('destroy', self._activity_destroy_cb)
return activity.window.xid
def _activity_destroy_cb(self, activity):
"""On close of an instance's root window
Removes the activity from the tracked activities.
If our implementation module has a stop, calls
that.
If there are no more tracked activities, closes
the activity.
"""
self._activities.remove(activity)
if hasattr(self._module, 'stop'):
self._module.stop()
if len(self._activities) == 0:
gtk.main_quit()
def run_with_args(args):
"""Start the activity factory."""
parser = OptionParser()
parser.add_option("-p", "--bundle-path", dest="bundle_path",
help="path to the activity bundle")
(options, args) = parser.parse_args()
run(options.bundle_path)
def run(bundle_path):
sys.path.append(bundle_path)
bundle = Bundle(bundle_path)
logger.start(bundle.get_name())
gettext.bindtextdomain(bundle.get_service_name(),
bundle.get_locale_path())
gettext.textdomain(bundle.get_service_name())
gtk.icon_theme_get_default().append_search_path(bundle.get_icons_path())
os.environ['SUGAR_BUNDLE_PATH'] = bundle_path
os.environ['SUGAR_ACTIVITY_ROOT'] = env.get_profile_path(bundle.get_service_name())
_sugarext.set_prgname(bundle.get_service_name())
_sugarext.set_application_name(bundle.get_name())
factory = ActivityFactoryService(bundle.get_service_name(),
bundle.activity_class)
|
Python
| 0
|
@@ -829,16 +829,48 @@
gettext
+%0Aimport traceback%0Aimport logging
%0A%0Aimport
@@ -2959,16 +2959,58 @@
ies = %5B%5D
+%0A self._service_name = service_name
%0A%0A
@@ -4134,32 +4134,50 @@
om_dict(handle)%0A
+%0A try:%0A
activity
@@ -4214,16 +4214,120 @@
handle)%0A
+ except Exception, e:%0A logging.error(traceback.format_exc())%0A sys.exit(1)%0A%0A
|
dbf736ba66fe6b530bfe3d9d503caa2e24ee8f01
|
Make /config more CORS-y
|
synapse/rest/media/v1/config_resource.py
|
synapse/rest/media/v1/config_resource.py
|
# -*- coding: utf-8 -*-
# Copyright 2018 Will Hunt <will@half-shot.uk>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from twisted.internet import defer
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
from synapse.http.server import respond_with_json, wrap_json_request_handler
class MediaConfigResource(Resource):
isLeaf = True
def __init__(self, hs):
Resource.__init__(self)
config = hs.get_config()
self.clock = hs.get_clock()
self.auth = hs.get_auth()
self.limits_dict = {
"m.upload.size": config.max_upload_size,
}
def render_GET(self, request):
self._async_render_GET(request)
return NOT_DONE_YET
@wrap_json_request_handler
@defer.inlineCallbacks
def _async_render_GET(self, request):
yield self.auth.get_user_by_req(request)
respond_with_json(request, 200, self.limits_dict)
def render_OPTIONS(self, request):
respond_with_json(request, 200, {}, send_cors=True)
return NOT_DONE_YET
|
Python
| 0
|
@@ -811,16 +811,34 @@
_handler
+, set_cors_headers
%0A%0A%0Aclass
@@ -1340,32 +1340,66 @@
self, request):%0A
+ set_cors_headers(request)%0A
yield se
@@ -1521,32 +1521,66 @@
self, request):%0A
+ set_cors_headers(request)%0A
respond_
|
1a00800940b64fe33bbba22eb33da14df84de1a1
|
Fix broken TPShim
|
nupic/research/TP_shim.py
|
nupic/research/TP_shim.py
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
A shim for the TP class that transparently implements TemporalMemory,
for use with OPF.
"""
import numpy
from nupic.research.temporal_memory import TemporalMemory
class TPShim(TemporalMemory):
"""
TP => Temporal Memory shim class.
"""
def __init__(self,
numberOfCols=500,
cellsPerColumn=10,
initialPerm=0.11,
connectedPerm=0.50,
minThreshold=8,
newSynapseCount=15,
permanenceInc=0.10,
permanenceDec=0.10,
permanenceMax=1.0,
globalDecay=0.10,
activationThreshold=12,
seed=42):
"""
Translate parameters and initialize member variables specific to `TP.py`.
"""
super(TPShim, self).__init__(
columnDimensions=(numberOfCols,),
cellsPerColumn=cellsPerColumn,
activationThreshold=activationThreshold,
initialPermanence=initialPerm,
connectedPermanence=connectedPerm,
minThreshold=minThreshold,
maxNewSynapseCount=newSynapseCount,
permanenceIncrement=permanenceInc,
permanenceDecrement=permanenceDec,
seed=seed)
self.infActiveState = {"t": None}
def compute(self, bottomUpInput, enableLearn, computeInfOutput=None):
"""
(From `TP.py`)
Handle one compute, possibly learning.
@param bottomUpInput The bottom-up input, typically from a spatial pooler
@param enableLearn If true, perform learning
@param computeInfOutput If None, default behavior is to disable the inference
output when enableLearn is on.
If true, compute the inference output
If false, do not compute the inference output
"""
super(TPShim, self).compute(set(bottomUpInput.nonzero()[0]),
learn=enableLearn)
numberOfCells = self.connections.numberOfCells()
activeState = numpy.zeros(numberOfCells)
activeState[list(self.activeCells)] = 1
self.infActiveState["t"] = activeState
output = numpy.zeros(numberOfCells)
output[list(self.predictiveCells | self.activeCells)] = 1
return output
|
Python
| 0.000116
|
@@ -2921,20 +2921,8 @@
elf.
-connections.
numb
|
f54690eb9962489a387674985055e305b9b57aa9
|
remove discription by message body
|
addons/project_mailgate/project_mailgate.py
|
addons/project_mailgate/project_mailgate.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
from tools.translate import _
import tools
import binascii
class project_tasks(osv.osv):
_inherit = 'project.task'
def message_new(self, cr, uid, msg, custom_values=None, context=None):
res_id = super(project_tasks,self).message_new(cr, uid, msg, custom_values=custom_values, context=context)
subject = msg.get('subject')
body = msg.get('body_text')
msg_from = msg.get('from')
data = {
'name': subject,
'description': body,
'planned_hours': 0.0,
}
data.update(self.message_partner_by_email(cr, uid, msg_from))
self.write(cr, uid, [res_id], data, context)
return res_id
def message_update(self, cr, uid, ids, msg, data={}, default_act='pending'):
data.update({
'description': msg['body_text'],
})
act = 'do_'+default_act
maps = {
'cost':'planned_hours',
}
for line in msg['body_text'].split('\n'):
line = line.strip()
res = tools.misc.command_re.match(line)
if res:
match = res.group(1).lower()
field = maps.get(match)
if field:
try:
data[field] = float(res.group(2).lower())
except (ValueError, TypeError):
pass
elif match.lower() == 'state' \
and res.group(2).lower() in ['cancel','close','draft','open','pending']:
act = 'do_%s' % res.group(2).lower()
self.write(cr, uid, ids, data, context=context)
getattr(self,act)(cr, uid, ids, context=context)
self.message_append_dict(cr, uid, [res_id], msg, context=context)
return True
def message_thread_followers(self, cr, uid, ids, context=None):
followers = super(project_tasks,self).message_thread_followers(cr, uid, ids, context=context)
for task in self.browse(cr, uid, followers.keys(), context=context):
task_followers = set(followers[task.id])
task_followers.add(task.user_id.user_email)
followers[task.id] = filter(None, task_followers)
return followers
project_tasks()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Python
| 0.000001
|
@@ -1353,44 +1353,8 @@
t')%0A
- body = msg.get('body_text')%0A
@@ -1434,41 +1434,8 @@
ct,%0A
- 'description': body,%0A
@@ -1705,86 +1705,8 @@
'):%0A
- data.update(%7B%0A 'description': msg%5B'body_text'%5D,%0A %7D)%0A
@@ -1750,17 +1750,16 @@
maps = %7B
-
%0A
@@ -2582,12 +2582,12 @@
end_
-dict
+note
(cr,
|
54e1cb0048ffd0024feae4e5dc0c1e047ca55328
|
remove debug print
|
openaps/devices/device.py
|
openaps/devices/device.py
|
import json
from openaps.configurable import Configurable
class ExtraConfig (Configurable):
prefix = 'device'
pass
class Device (Configurable):
vendor = None
required = ['name', 'vendor']
optional = [ ]
prefix = 'device'
_uses = [ ]
def __init__ (self, name, vendor):
self.name = name
self.vendor = vendor
self.fields = dict(vendor=vendor.__name__)
self.extra = ExtraConfig(name)
def read (self, args=None, config=None):
if args:
self.name = args.name
print "args", args
if getattr(args, 'extra', None):
self.fields['extra'] = args.extra.format(name=self.name, **self.fields)
self.vendor.set_config(args, self.extra)
else:
self.vendor.set_config(args, self)
if config:
# self.vendor.read_config(config)
self.fields.update(dict(config.items(self.section_name( ))))
extra_ini = self.fields.get('extra', None)
if extra_ini:
extra = config.Read(name=extra_ini)
self.extra.fields.update(dict(extra.items(self.section_name( ))))
def get (self, k, *args):
return self.fields.get(k, self.extra.get(k, *args))
def format_url (self):
parts = ['{0:s}://{1:s}'.format(self.vendor.__name__.split('.').pop( ), self.name), ]
parts.append(self.vendor.display_device(self))
return ''.join(parts)
def register_uses (self, uses):
for u in uses.usages:
if u not in self._uses:
self._uses.append(u)
def store (self, config):
extra_ini = self.fields.get('extra', None)
if extra_ini:
extra = config.Read(name=extra_ini)
self.extra.store(extra)
extra.save( )
super(Device, self).store(config)
@classmethod
def FromConfig (klass, vendors, config):
devices = [ ]
for candidate in config.sections( ):
if candidate.startswith(klass.prefix):
name = json.loads(candidate.split(' ').pop( ))
vendor = vendors.lookup(config.get(candidate, 'vendor').split('.').pop( ), config)
device = klass(name, vendor)
device.read(config=config)
devices.append(device)
return devices
|
Python
| 0.000008
|
@@ -495,33 +495,8 @@
ame%0A
- print %22args%22, args%0A
|
2d3cd9d0767869bc4747ed5671cb37c557eb78ec
|
use the constructed exename
|
pip/wheel.py
|
pip/wheel.py
|
"""
Support functions for installing the "wheel" binary package format.
"""
from __future__ import with_statement
import csv
import os
import sys
import shutil
import functools
import hashlib
from base64 import urlsafe_b64encode
from pip.util import make_path_relative
def rehash(path, algo='sha256', blocksize=1<<20):
"""Return (hash, length) for path using hashlib.new(algo)"""
h = hashlib.new(algo)
length = 0
with open(path) as f:
block = f.read(blocksize)
while block:
length += len(block)
h.update(block)
block = f.read(blocksize)
digest = 'sha256='+urlsafe_b64encode(h.digest()).decode('latin1').rstrip('=')
return (digest, length)
try:
unicode
def binary(s):
if isinstance(s, unicode):
return s.encode('ascii')
return s
except NameError:
def binary(s):
if isinstance(s, str):
return s.encode('ascii')
def open_for_csv(name, mode):
if sys.version_info[0] < 3:
nl = {}
bin = 'b'
else:
nl = { 'newline': '' }
bin = ''
return open(name, mode + bin, **nl)
def fix_script(path):
"""Replace #!python with #!/path/to/python
Return True if file was changed."""
# XXX RECORD hashes will need to be updated
if os.path.isfile(path):
script = open(path, 'rb')
try:
firstline = script.readline()
if not firstline.startswith(binary('#!python')):
return False
exename = sys.executable.encode(sys.getfilesystemencoding())
firstline = binary('#!') + sys.executable + binary(os.linesep)
rest = script.read()
finally:
script.close()
script = open(path, 'wb')
try:
script.write(firstline)
script.write(rest)
finally:
script.close()
return True
def move_wheel_files(req, wheeldir):
from pip.backwardcompat import get_path
if get_path('purelib') != get_path('platlib'):
# XXX check *.dist-info/WHEEL to deal with this obscurity
raise NotImplemented("purelib != platlib")
info_dir = []
data_dirs = []
source = wheeldir.rstrip(os.path.sep) + os.path.sep
location = dest = get_path('platlib')
installed = {}
changed = set()
def normpath(src, p):
return make_path_relative(src, p).replace(os.path.sep, '/')
def record_installed(srcfile, destfile, modified=False):
"""Map archive RECORD paths to installation RECORD paths."""
oldpath = normpath(srcfile, wheeldir)
newpath = normpath(destfile, location)
installed[oldpath] = newpath
if modified:
changed.add(destfile)
def clobber(source, dest, is_base, fixer=None):
for dir, subdirs, files in os.walk(source):
basedir = dir[len(source):].lstrip(os.path.sep)
if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
continue
for s in subdirs:
destsubdir = os.path.join(dest, basedir, s)
if is_base and basedir == '' and destsubdir.endswith('.data'):
data_dirs.append(s)
continue
elif (is_base
and s.endswith('.dist-info')
# is self.req.project_name case preserving?
and s.lower().startswith(req.project_name.replace('-', '_').lower())):
assert not info_dir, 'Multiple .dist-info directories'
info_dir.append(destsubdir)
if not os.path.exists(destsubdir):
os.makedirs(destsubdir)
for f in files:
srcfile = os.path.join(dir, f)
destfile = os.path.join(dest, basedir, f)
shutil.move(srcfile, destfile)
changed = False
if fixer:
changed = fixer(destfile)
record_installed(srcfile, destfile, changed)
clobber(source, dest, True)
assert info_dir, "%s .dist-info directory not found" % req
for datadir in data_dirs:
fixer = None
for subdir in os.listdir(os.path.join(wheeldir, datadir)):
fixer = None
if subdir == 'scripts':
fixer = fix_script
source = os.path.join(wheeldir, datadir, subdir)
dest = get_path(subdir)
clobber(source, dest, False, fixer=fixer)
record = os.path.join(info_dir[0], 'RECORD')
temp_record = os.path.join(info_dir[0], 'RECORD.pip')
with open_for_csv(record, 'r') as record_in:
with open_for_csv(temp_record, 'w+') as record_out:
reader = csv.reader(record_in)
writer = csv.writer(record_out)
for row in reader:
row[0] = installed.pop(row[0], row[0])
if row[0] in changed:
row[1], row[2] = rehash(row[0])
writer.writerow(row)
for f in installed:
writer.writerow((installed[f], '', ''))
shutil.move(temp_record, record)
def _unique(fn):
@functools.wraps(fn)
def unique(*args, **kw):
seen = set()
for item in fn(*args, **kw):
if item not in seen:
seen.add(item)
yield item
return unique
@_unique
def uninstallation_paths(dist):
"""
Yield all the uninstallation paths for dist based on RECORD-without-.pyc
Yield paths to all the files in RECORD. For each .py file in RECORD, add
the .pyc in the same directory.
UninstallPathSet.add() takes care of the __pycache__ .pyc.
"""
from pip.req import FakeFile # circular import
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
for row in r:
path = os.path.join(dist.location, row[0])
yield path
if path.endswith('.py'):
dn, fn = os.path.split(path)
base = fn[:-3]
path = os.path.join(dn, base+'.pyc')
yield path
|
Python
| 0.000863
|
@@ -1616,29 +1616,22 @@
'#!') +
-sys.executabl
+exenam
e + bina
|
294e8b120d507237f1129338c476939b20604f26
|
Save release test metrics under a single column (#30215)
|
release/ray_release/reporter/db.py
|
release/ray_release/reporter/db.py
|
import time
import json
import boto3
from botocore.config import Config
from ray_release.reporter.reporter import Reporter
from ray_release.result import Result
from ray_release.config import Test
from ray_release.logger import logger
class DBReporter(Reporter):
def __init__(self):
self.firehose = boto3.client("firehose", config=Config(region_name="us-west-2"))
def report_result(self, test: Test, result: Result):
logger.info("Persisting result to the databricks delta lake...")
result_json = {
"_table": "release_test_result",
"report_timestamp_ms": int(time.time() * 1000),
"status": result.status or "",
"results": result.results or {},
"name": test.get("name", ""),
"group": test.get("group", ""),
"team": test.get("team", ""),
"frequency": test.get("frequency", ""),
"cluster_url": result.cluster_url or "",
"wheel_url": result.wheels_url or "",
"buildkite_url": result.buildkite_url or "",
"runtime": result.runtime or -1.0,
"stable": result.stable,
"return_code": result.return_code,
"smoke_test": result.smoke_test,
}
result_json.update(result.prometheus_metrics)
logger.debug(f"Result json: {json.dumps(result_json)}")
try:
self.firehose.put_record(
DeliveryStreamName="ray-ci-results",
Record={"Data": json.dumps(result_json)},
)
except Exception:
logger.exception("Failed to persist result to the databricks delta lake")
else:
logger.info("Result has been persisted to the databricks delta lake")
|
Python
| 0
|
@@ -1248,38 +1248,35 @@
- %7D%0A
- result_json.update(
+%22prometheus_metrics%22:
resu
@@ -1296,17 +1296,33 @@
_metrics
-)
+ or %7B%7D,%0A %7D
%0A%0A
|
288c9a509d7d2aeb2fb5c21b6d5037feaa012eb2
|
Simplify SlackWebhookHook code and change docstring (#4696)
|
airflow/contrib/hooks/slack_webhook_hook.py
|
airflow/contrib/hooks/slack_webhook_hook.py
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import json
from airflow.hooks.http_hook import HttpHook
from airflow.exceptions import AirflowException
class SlackWebhookHook(HttpHook):
"""
This hook allows you to post messages to Slack using incoming webhooks.
Takes both Slack webhook token directly and connection that has Slack webhook token.
If both supplied, Slack webhook token will be used.
Each Slack webhook token can be pre-configured to use a specific channel, username and
icon. You can override these defaults in this hook.
:param http_conn_id: connection that has Slack webhook token in the extra field
:type http_conn_id: str
:param webhook_token: Slack webhook token
:type webhook_token: str
:param message: The message you want to send on Slack
:type message: str
:param attachments: The attachments to send on Slack. Should be a list of
dictionaries representing Slack attachments.
:type attachments: list
:param channel: The channel the message should be posted to
:type channel: str
:param username: The username to post to slack with
:type username: str
:param icon_emoji: The emoji to use as icon for the user posting to Slack
:type icon_emoji: str
:param link_names: Whether or not to find and link channel and usernames in your
message
:type link_names: bool
:param proxy: Proxy to use to make the Slack webhook call
:type proxy: str
"""
def __init__(self,
http_conn_id=None,
webhook_token=None,
message="",
attachments=None,
channel=None,
username=None,
icon_emoji=None,
link_names=False,
proxy=None,
*args,
**kwargs
):
super(SlackWebhookHook, self).__init__(*args, **kwargs)
self.http_conn_id = http_conn_id
self.webhook_token = self._get_token(webhook_token, http_conn_id)
self.message = message
self.attachments = attachments
self.channel = channel
self.username = username
self.icon_emoji = icon_emoji
self.link_names = link_names
self.proxy = proxy
def _get_token(self, token, http_conn_id):
"""
Given either a manually set token or a conn_id, return the webhook_token to use
:param token: The manually provided token
:param conn_id: The conn_id provided
:return: webhook_token (str) to use
"""
if token:
return token
elif http_conn_id:
conn = self.get_connection(http_conn_id)
extra = conn.extra_dejson
return extra.get('webhook_token', '')
else:
raise AirflowException('Cannot get token: No valid Slack '
'webhook token nor conn_id supplied')
def _build_slack_message(self):
"""
Construct the Slack message. All relevant parameters are combined here to a valid
Slack json message
:return: Slack message (str) to send
"""
cmd = {}
if self.channel:
cmd['channel'] = self.channel
if self.username:
cmd['username'] = self.username
if self.icon_emoji:
cmd['icon_emoji'] = self.icon_emoji
if self.link_names:
cmd['link_names'] = 1
if self.attachments:
cmd['attachments'] = self.attachments
cmd['text'] = self.message
return json.dumps(cmd)
def execute(self):
"""
Remote Popen (actually execute the slack webhook call)
:param cmd: command to remotely execute
:param kwargs: extra arguments to Popen (see subprocess.Popen)
"""
proxies = {}
if self.proxy:
# we only need https proxy for Slack, as the endpoint is https
proxies = {'https': self.proxy}
slack_message = self._build_slack_message()
self.run(endpoint=self.webhook_token,
data=slack_message,
headers={'Content-type': 'application/json'},
extra_options={'proxies': proxies})
|
Python
| 0
|
@@ -2733,49 +2733,8 @@
gs)%0A
- self.http_conn_id = http_conn_id%0A
@@ -3232,24 +3232,49 @@
vided token%0A
+ :type token: str%0A
:par
@@ -3276,16 +3276,21 @@
:param
+http_
conn_id:
@@ -3311,16 +3311,48 @@
rovided%0A
+ :type http_conn_id: str%0A
@@ -4532,128 +4532,8 @@
all)
-%0A%0A :param cmd: command to remotely execute%0A :param kwargs: extra arguments to Popen (see subprocess.Popen)
%0A
|
2402af19399419d84a8e791bdee58c4f519dfe35
|
Remove pk check from JSONFieldBase pre_init method.
|
jsonfield/fields.py
|
jsonfield/fields.py
|
import copy
from django.db import models
from django.core.serializers.json import DjangoJSONEncoder
from django.utils.translation import ugettext_lazy as _
try:
from django.utils import six
except ImportError:
import six
try:
import json
except ImportError:
from django.utils import simplejson as json
from django.forms import fields
try:
from django.forms.utils import ValidationError
except ImportError:
from django.forms.util import ValidationError
from .subclassing import SubfieldBase
class JSONFormFieldBase(object):
def to_python(self, value):
if isinstance(value, six.string_types):
try:
return json.loads(value, **self.load_kwargs)
except ValueError:
raise ValidationError(_("Enter valid JSON"))
return value
def clean(self, value):
if not value and not self.required:
return None
# Trap cleaning errors & bubble them up as JSON errors
try:
return super(JSONFormFieldBase, self).clean(value)
except TypeError:
raise ValidationError(_("Enter valid JSON"))
class JSONFormField(JSONFormFieldBase, fields.CharField):
pass
class JSONCharFormField(JSONFormFieldBase, fields.CharField):
pass
class JSONFieldBase(six.with_metaclass(SubfieldBase, models.Field)):
def __init__(self, *args, **kwargs):
self.dump_kwargs = kwargs.pop('dump_kwargs', {
'cls': DjangoJSONEncoder,
'separators': (',', ':')
})
self.load_kwargs = kwargs.pop('load_kwargs', {})
super(JSONFieldBase, self).__init__(*args, **kwargs)
def pre_init(self, value, obj):
"""Convert a string value to JSON only if it needs to be deserialized.
SubfieldBase metaclass has been modified to call this method instead of
to_python so that we can check the obj state and determine if it needs to be
deserialized"""
if obj._state.adding:
# Make sure the primary key actually exists on the object before
# checking if it's empty. This is a special case for South datamigrations
# see: https://github.com/bradjasper/django-jsonfield/issues/52
if getattr(obj, "pk", None) is not None:
if isinstance(value, six.string_types):
try:
return json.loads(value, **self.load_kwargs)
except ValueError:
raise ValidationError(_("Enter valid JSON"))
return value
def to_python(self, value):
"""The SubfieldBase metaclass calls pre_init instead of to_python, however to_python
is still necessary for Django's deserializer"""
return value
def get_db_prep_value(self, value, connection, prepared=False):
"""Convert JSON object to a string"""
if self.null and value is None:
return None
return json.dumps(value, **self.dump_kwargs)
def value_to_string(self, obj):
value = self._get_val_from_obj(obj)
return self.get_db_prep_value(value, None)
def value_from_object(self, obj):
value = super(JSONFieldBase, self).value_from_object(obj)
if self.null and value is None:
return None
return self.dumps_for_display(value)
def dumps_for_display(self, value):
return json.dumps(value, **self.dump_kwargs)
def formfield(self, **kwargs):
if "form_class" not in kwargs:
kwargs["form_class"] = self.form_class
field = super(JSONFieldBase, self).formfield(**kwargs)
if isinstance(field, JSONFormFieldBase):
field.load_kwargs = self.load_kwargs
if not field.help_text:
field.help_text = "Enter valid JSON"
return field
def get_default(self):
"""
Returns the default value for this field.
The default implementation on models.Field calls force_unicode
on the default, which means you can't set arbitrary Python
objects as the default. To fix this, we just return the value
without calling force_unicode on it. Note that if you set a
callable as a default, the field will still call it. It will
*not* try to pickle and encode it.
"""
if self.has_default():
if callable(self.default):
return self.default()
return copy.deepcopy(self.default)
# If the field doesn't have a default, then we punt to models.Field.
return super(JSONFieldBase, self).get_default()
class JSONField(JSONFieldBase, models.TextField):
"""JSONField is a generic textfield that serializes/deserializes JSON objects"""
form_class = JSONFormField
def dumps_for_display(self, value):
kwargs = { "indent": 2 }
kwargs.update(self.dump_kwargs)
return json.dumps(value, **kwargs)
class JSONCharField(JSONFieldBase, models.CharField):
"""JSONCharField is a generic textfield that serializes/deserializes JSON objects,
stored in the database like a CharField, which enables it to be used
e.g. in unique keys"""
form_class = JSONCharFormField
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^jsonfield\.fields\.(JSONField|JSONCharField)"])
except ImportError:
pass
|
Python
| 0
|
@@ -2232,65 +2232,8 @@
/52%0A
- if getattr(obj, %22pk%22, None) is not None:%0A
@@ -2288,36 +2288,32 @@
-
try:%0A
@@ -2313,36 +2313,32 @@
-
-
return json.load
@@ -2374,36 +2374,32 @@
-
except ValueErro
@@ -2393,36 +2393,32 @@
ept ValueError:%0A
-
|
0a94b8a4756e9b46211567c430560a314c554a1d
|
add help for org command
|
parse.py
|
parse.py
|
import argparse
class Parser(argparse.ArgumentParser):
def populate(self):
self.add_argument('--output', choices=('xml', 'text', 'html'),
default='text')
subparsers = self.add_subparsers(title='Commands', metavar='',
dest='call')
self._add_org(subparsers)
self._add_orgs(subparsers)
def _add_org(self, subparsers):
self.org = subparsers.add_parser('org')
self.org.add_argument('handle', metavar='HANDLE', help='Org handle')
def _add_orgs(self, subparsers):
self.org = subparsers.add_parser('orgs', help='--handle HANDLE --name NAME --dba DBA')
self.org.add_argument('--handle', help='Org handle')
self.org.add_argument('--name', help='Org name')
self.org.add_argument('--dba', help='Org DBA')
def run(self):
return self.parse_args()
|
Python
| 0.000001
|
@@ -464,16 +464,31 @@
er('org'
+, help='HANDLE'
)%0A
|
70477e0a8da15592f5f2197e8d1bffe57eece871
|
Add back import of operations, which was lost during cleanup.
|
nuage_amp/nuage_amp.py
|
nuage_amp/nuage_amp.py
|
#!/usr/bin/python
"""
Usage:
nuage-amp sync [--once] [options]
nuage-amp audit-vports [options]
nuage-amp network-macro-from-url (create|delete) <url> <enterprise> [options]
nuage-amp vsdmanaged-tenant (create|delete) <name> [--force] [options]
nuage-amp vsdmanaged-tenant list
nuage-amp (-h | --help)
Options:
-h --help Show this screen
-v --version Show version
--log-file=<file> Log file location
--config-file=<file> Configuration file location [default: /etc/nuage-amp/nuage-amp.conf]
Sync Options:
--once Run the sync only once
Tenant Operations:
--force Forces tenant deletion. Will remove existing VMs and VSD objects(domains,subnets)
"""
"""
@author: Philippe Jeurissen
@copyright: Alcatel-Lucent 2014
@version: 0.0.2
"""
from utils.config import cfg, readconfig
from utils.log import logger, setlogpath, setloglevel
from docopt import docopt
import time
import sys
def getargs():
return docopt(__doc__, version="nuage-amp 0.1.2")
def main(args):
try:
readconfig(args['--config-file'])
except Exception, e:
logger.error("Error reading config file from location: {0:s}".format(args['--config-file']))
logger.error(str(e))
sys.exit(1)
if args['--log-file']:
try:
setlogpath(args['--log-file'], logconfig=cfg)
except Exception, e:
logger.error("Error setting log location: {0:s}".format(args['--log-file']))
logger.error(str(e))
sys.exit(1)
if cfg.has_option('logging', 'loglevel'):
try:
setloglevel(cfg.get('logging', 'loglevel'))
except Exception, e:
logger.error("Error setting logging level to {0:s}".format(cfg.get('logging', 'loglevel')))
logger.error(str(e))
if args['sync']:
if args['--once']:
sync.sync_subnets()
else:
while True:
sync.sync_subnets()
time.sleep(10)
elif args['audit-vports']:
audit_vport.audit_vports()
elif args['network-macro-from-url']:
if args['create']:
nw_macro.create(args['<url>'], args['<enterprise>'])
elif args['delete']:
nw_macro.delete(args['<url>'], args['<enterprise>'])
elif args['vsdmanaged-tenant']:
if args['create']:
tenant.create_vsd_managed_tenant(args['<name>'])
elif args['delete']:
tenant.delete_vsd_managed_tenant(args['<name>'], args['--force'])
elif args['list']:
tenant.list_vsd_managed_tenants()
if __name__ == "__main__":
main(getargs())
|
Python
| 0
|
@@ -934,16 +934,41 @@
docopt%0A
+from operations import *%0A
import t
|
6b3363b1486bd92f5355023074db9a52e60b1b34
|
Set AWS MQTT timeouts to 120 / 60.
|
src/scs_core/aws/client/mqtt_client.py
|
src/scs_core/aws/client/mqtt_client.py
|
"""
Created on 6 Oct 2017
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
https://github.com/aws/aws-iot-device-sdk-python
https://stackoverflow.com/questions/20083858/how-to-extract-value-from-bound-method-in-python
"""
import AWSIoTPythonSDK.exception.AWSIoTExceptions as AWSIoTExceptions
import AWSIoTPythonSDK.MQTTLib as MQTTLib
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient
from scs_core.data.json import JSONify
# --------------------------------------------------------------------------------------------------------------------
class MQTTClient(object):
"""
classdocs
"""
__KEEP_ALIVE_INTERVAL = 600 # recommended: 600 (sec)
__PORT = 8883
__QUEUE_SIZE = -1 # recommended: infinite
__QUEUE_DROP_BEHAVIOUR = MQTTLib.DROP_OLDEST # not required for infinite queue
__QUEUE_DRAINING_FREQUENCY = 1 # recommended: 2 (Hz)
__RECONN_BASE = 1 # recommended: 1 (sec)
__RECONN_MAX = 32 # recommended: 32 (sec)
__RECONN_STABLE = 20 # recommended: 20 (sec)
__DISCONNECT_TIMEOUT = 10 # recommended: 10 (sec) was 30
__OPERATION_TIMEOUT = 5 # recommended: 5 (sec) was 30
__PUB_QOS = 1
__SUB_QOS = 1
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, *subscribers):
"""
Constructor
"""
self.__client = None
self.__subscribers = subscribers
# ----------------------------------------------------------------------------------------------------------------
def connect(self, auth):
# client...
self.__client = AWSIoTMQTTClient(auth.client_id)
# configuration...
self.__client.configureEndpoint(auth.endpoint, self.__PORT)
self.__client.configureCredentials(auth.root_ca_file_path, auth.private_key_path, auth.certificate_path)
self.__client.configureAutoReconnectBackoffTime(self.__RECONN_BASE, self.__RECONN_MAX, self.__RECONN_STABLE)
self.__client.configureOfflinePublishQueueing(self.__QUEUE_SIZE)
self.__client.configureDrainingFrequency(self.__QUEUE_DRAINING_FREQUENCY)
self.__client.configureConnectDisconnectTimeout(self.__DISCONNECT_TIMEOUT)
self.__client.configureMQTTOperationTimeout(self.__OPERATION_TIMEOUT)
# subscriptions...
for subscriber in self.__subscribers:
self.__client.subscribe(subscriber.topic, self.__SUB_QOS, subscriber.handler)
# connect...
try:
self.__client.connect(self.__KEEP_ALIVE_INTERVAL)
return True
except AWSIoTExceptions.connectTimeoutException:
return False
def disconnect(self):
try:
self.__client.disconnect()
except AWSIoTExceptions.disconnectError:
pass
# ----------------------------------------------------------------------------------------------------------------
def publish(self, publication):
payload = JSONify.dumps(publication.payload)
try:
return self.__client.publish(publication.topic, payload, self.__PUB_QOS)
except AWSIoTExceptions.publishTimeoutException as ex:
raise TimeoutError(ex)
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
subscribers = '[' + ', '.join(str(subscriber) for subscriber in self.__subscribers) + ']'
return "MQTTClient:{subscribers:%s}" % subscribers
# --------------------------------------------------------------------------------------------------------------------
class MQTTSubscriber(object):
"""
classdocs
"""
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, topic, handler):
"""
Constructor
"""
self.__topic = topic
self.__handler = handler
# ----------------------------------------------------------------------------------------------------------------
@property
def topic(self):
return self.__topic
@property
def handler(self):
return self.__handler
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "MQTTSubscriber:{topic:%s, handler:%s}" % (self.topic, self.handler.__self__)
|
Python
| 0
|
@@ -1292,18 +1292,18 @@
1
+2
0
-
@@ -1386,18 +1386,18 @@
-5
+60
|
400f127fb3264b5a4f403a67a89c25238ff192a4
|
Fix missing import in fs.sshfs.error_tools
|
fs/sshfs/error_tools.py
|
fs/sshfs/error_tools.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
import errno
import six
from .. import errors
class _ConvertSSHFSErrors(object):
"""Context manager to convert OSErrors in to FS Errors."""
FILE_ERRORS = {
64: errors.RemoteConnectionError, # ENONET
errno.ENOENT: errors.ResourceNotFound,
errno.EFAULT: errors.ResourceNotFound,
errno.ESRCH: errors.ResourceNotFound,
errno.ENOTEMPTY: errors.DirectoryNotEmpty,
errno.EEXIST: errors.FileExists,
183: errors.DirectoryExists,
#errno.ENOTDIR: errors.DirectoryExpected,
errno.ENOTDIR: errors.ResourceNotFound,
errno.EISDIR: errors.FileExpected,
errno.EINVAL: errors.FileExpected,
errno.ENOSPC: errors.InsufficientStorage,
errno.EPERM: errors.PermissionDenied,
errno.ENETDOWN: errors.RemoteConnectionError,
errno.ECONNRESET: errors.RemoteConnectionError,
errno.ENAMETOOLONG: errors.PathError,
errno.EOPNOTSUPP: errors.Unsupported,
errno.ENOSYS: errors.Unsupported,
}
#
DIR_ERRORS = FILE_ERRORS.copy()
DIR_ERRORS[errno.ENOTDIR] = errors.DirectoryExpected
DIR_ERRORS[errno.EEXIST] = errors.DirectoryExists
DIR_ERRORS[errno.EINVAL] = errors.DirectoryExpected
# if _WINDOWS_PLATFORM: # pragma: no cover
# DIR_ERRORS[13] = errors.DirectoryExpected
# DIR_ERRORS[267] = errors.DirectoryExpected
# FILE_ERRORS[13] = errors.FileExpected
def __init__(self, opname, path, directory=False):
self._opname = opname
self._path = path
self._directory = directory
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
ssh_errors = (
self.DIR_ERRORS
if self._directory
else self.FILE_ERRORS
)
if exc_type and isinstance(exc_value, EnvironmentError):
_errno = exc_value.errno
fserror = ssh_errors.get(_errno, errors.OperationFailed)
if _errno == errno.EACCES and sys.platform == "win32":
if getattr(exc_value, 'args', None) == 32: # pragma: no cover
fserror = errors.ResourceLocked
six.reraise(
fserror,
fserror(
self._path,
exc=exc_value
),
traceback
)
# Stops linter complaining about invalid class name
convert_sshfs_errors = _ConvertSSHFSErrors
|
Python
| 0.00021
|
@@ -97,16 +97,26 @@
ort six%0A
+import sys
%0A%0Afrom .
@@ -133,19 +133,16 @@
rrors%0A%0A%0A
-%0A%0A%0A
class _C
|
65fb9244df69646721c8273afae22fe6248976f0
|
optimise common.py
|
backend/service/common.py
|
backend/service/common.py
|
from service.base import BaseService
import config
### need to add rs
class CommonService(BaseService):
def __init__(self, db, rs):
super().__init__(db, rs)
CommonService.inst = self
def get_execute_type(self):
res = (yield self.db.execute("SELECT * FROM execute_types order by id")).fetchall()
ret = {}
for x in res:
ret[x['id']] = x
return ret
def get_verdict_type(self):
res = (yield self.db.execute("SELECT * FROM map_verdict_string order by id")).fetchall()
ret = {}
for x in res:
ret[x['id']] = x
return ret
|
Python
| 0.022141
|
@@ -235,32 +235,53 @@
):%0A res =
+%7B x%5B'id'%5D: x for x in
(yield self.db.
@@ -335,87 +335,9 @@
d%22))
-.fetchall()%0A ret = %7B%7D%0A for x in res:%0A ret%5Bx%5B'id'%5D%5D = x
+%7D
%0A
@@ -350,17 +350,17 @@
eturn re
-t
+s
%0A%0A de
@@ -398,16 +398,38 @@
res =
+ %7B x%5B'id'%5D: x for x in
(yield
@@ -496,87 +496,9 @@
d%22))
-.fetchall()%0A ret = %7B%7D%0A for x in res:%0A ret%5Bx%5B'id'%5D%5D = x
+%7D
%0A
@@ -511,10 +511,10 @@
eturn re
-t
+s
%0A
|
16c0f9fee7ee9f67a64645f41584083373ebb2cd
|
fix indexing in the donation db model
|
blueprints/donations/donation_model.py
|
blueprints/donations/donation_model.py
|
__author__ = 'HansiHE'
from mongoengine import *
from datetime import datetime
from blueprints.auth.user_model import User
class TransactionLog(Document):
username = StringField()
date = DateTimeField(required=True, default=datetime.utcnow)
data = DictField()
class DonationTransactionStatus(EmbeddedDocument):
date = DateTimeField(required=True, default=datetime.utcnow)
status = StringField(required=True) # = payment_status
reason = StringField() # = pending_reason if exists or reason_code if exists
valid = BooleanField(required=True)
gross = FloatField(default=0)
fee = FloatField(default=0)
complete_data = DictField()
class Transaction(Document):
amount = FloatField(required=True) # = the actual calculated amount
created = DateTimeField(required=True, default=datetime.utcnow)
# https://developer.paypal.com/webapps/developer/docs/classic/ipn/integration-guide/IPNandPDTVariables/#id091EB04C0HS__id0913D0E0UQU
# Canceled_Reversal: valid=true
# Completed: valid=true
# Created: valid=false
# Denied: valid=false
# Expired: valid=false
# Failed: valid=false
# Pending: valid=true, reason=pending_reason
# Refunded: valid=false
# Reversed: valid=false, reason=ReasonCode
# Processed: valid=true
# Voided: valid=false
meta = {
'collection': 'financial_transactions',
'allow_inheritance': True,
#'indexes': [
# 'username',
# 'amount',
# {
# 'fields': ['transaction_id'],
# 'unique': True,
# 'sparse': True
# }
#]
} # TODO: Make indexes work for inheritance
class DonationTransaction(Transaction):
username = StringField()
email = StringField()
gross = FloatField(required=True) # = the total amount donated
fee = FloatField(required=True) # = the amount paypal has robbed us for
payment_type = StringField() # Should be either echeck or instant
transaction_id = StringField() # = parent_txn_id or txn_id, unique id # TODO: Add unique=true when indexes are working
valid = BooleanField() #Could be used for easy querying, should be set when payment_status is Pending or Completed. Changed to false if shit happens.
payment_status_events = ListField(EmbeddedDocumentField(DonationTransactionStatus)) # list of states received for this transaction
type = "donation"
class PaymentTransaction(Transaction):
note = StringField()
period_begin = DateTimeField(required=True)
period_end = DateTimeField(required=True)
user = ReferenceField(User, dbref=False, required=True)
type = "payment"
|
Python
| 0.000001
|
@@ -1484,17 +1484,16 @@
-#
'indexes
@@ -1510,35 +1510,8 @@
-# 'username',%0D%0A #
@@ -1518,17 +1518,16 @@
'amount'
-,
%0D%0A
@@ -1532,206 +1532,16 @@
-# %7B%0D%0A # 'fields': %5B'transaction_id'%5D,%0D%0A # 'unique': True,%0D%0A # 'sparse': True%0D%0A # %7D%0D%0A #%5D%0D%0A %7D # TODO: Make indexes work for inheritance
+%5D%0D%0A %7D
%0D%0A%0D%0A
@@ -1888,32 +1888,43 @@
d = StringField(
+unique=True
) # = parent_tx
@@ -1952,57 +1952,8 @@
e id
- # TODO: Add unique=true when indexes are working
%0D%0A
@@ -2243,24 +2243,24 @@
nsaction%0D%0A%0D%0A
-
type = %22
@@ -2268,24 +2268,349 @@
onation%22%0D%0A%0D%0A
+ meta = %7B%0D%0A 'allow_inheritance': True,%0D%0A 'indexes': %5B%0D%0A 'username',%0D%0A # 'amount',%0D%0A %7B%0D%0A 'fields': %5B'transaction_id'%5D,%0D%0A 'unique': True,%0D%0A 'sparse': True%0D%0A %7D%0D%0A %5D%0D%0A %7D # TODO: Make indexes work for inheritance%0D%0A%0D%0A
%0D%0Aclass Paym
|
b9c076865f4e0ff9b4ab007472cbab735ccf01ab
|
Bump version to 3.1.2
|
osg_configure/version.py
|
osg_configure/version.py
|
__version__ = "3.1.1"
|
Python
| 0.000001
|
@@ -12,11 +12,11 @@
= %223.1.
-1
+2
%22%0A
|
214f4094b6b5c2f4a43ff96567a7bbe87ba63d28
|
Update bob.py
|
Python_sessions/session-2/practice_codes/bob.py
|
Python_sessions/session-2/practice_codes/bob.py
|
hello = "Hi Human, I am B.O.B. "
question1 = "What is your name? "
response1 = "Thats a lovely name! "
input(hello+question1)
print response1
answer_type = "Please answer in 'yes' of 'no'. "
question2 = "Can I help you? "
response2 = "I am a computer, not a human. "
input(question2+answer_type)
print response2
question3 = "Did you like that information? "
goodbye = "Great. Goodbye! "
input(question3+answer_type)
print goodbye
|
Python
| 0.000002
|
@@ -175,17 +175,17 @@
'yes' o
-f
+r
'no'. %22
|
0845eddc933e439fba77083c0668a3bcf74f975e
|
add index for format for python 2.6
|
encrypted_fields/tests.py
|
encrypted_fields/tests.py
|
import re
from datetime import datetime
from django.db import models, connection
from django.test import TestCase
from .fields import (
EncryptedCharField,
EncryptedTextField,
EncryptedDateTimeField,
EncryptedIntegerField,
)
class TestModel(models.Model):
char = EncryptedCharField(max_length=255, null=True)
text = EncryptedTextField(null=True)
datetime = EncryptedDateTimeField(null=True)
integer = EncryptedIntegerField(null=True)
class FieldTest(TestCase):
def get_db_value(self, field, model_id):
cursor = connection.cursor()
cursor.execute(
'select {} '
'from encrypted_fields_testmodel '
'where id = {};'.format(field, model_id)
)
return cursor.fetchone()[0]
def test_char_field_encrypted(self):
plaintext = 'Oh hi, test reader!'
model = TestModel()
model.char = plaintext
model.save()
ciphertext = self.get_db_value('char', model.id)
self.assertNotEqual(plaintext, ciphertext)
self.assertTrue('test' not in ciphertext)
fresh_model = TestModel.objects.get(id=model.id)
self.assertEqual(fresh_model.char, plaintext)
def test_text_field_encrypted(self):
plaintext = 'Oh hi, test reader!' * 10
model = TestModel()
model.text = plaintext
model.save()
ciphertext = self.get_db_value('text', model.id)
self.assertNotEqual(plaintext, ciphertext)
self.assertTrue('test' not in ciphertext)
fresh_model = TestModel.objects.get(id=model.id)
self.assertEqual(fresh_model.text, plaintext)
def test_datetime_field_encrypted(self):
plaintext = datetime.now()
model = TestModel()
model.datetime = plaintext
model.save()
ciphertext = self.get_db_value('datetime', model.id)
# Django's normal date serialization format
self.assertTrue(re.search('^\d\d\d\d-\d\d-\d\d', ciphertext) is None)
fresh_model = TestModel.objects.get(id=model.id)
self.assertEqual(fresh_model.datetime, plaintext)
def test_integer_field_encrypted(self):
plaintext = 42
model = TestModel()
model.integer = plaintext
model.save()
ciphertext = self.get_db_value('integer', model.id)
self.assertNotEqual(plaintext, ciphertext)
self.assertNotEqual(plaintext, str(ciphertext))
fresh_model = TestModel.objects.get(id=model.id)
self.assertEqual(fresh_model.integer, plaintext)
|
Python
| 0.000002
|
@@ -621,16 +621,17 @@
select %7B
+0
%7D '%0A
@@ -698,16 +698,17 @@
e id = %7B
+1
%7D;'.form
|
f44c7670ee06d0ff3976c11b921cc3f288b0259b
|
add TestMPEventLoopRunner.test_ProgressMonitor
|
tests/EventReader/test_MPEventLoopRunner.py
|
tests/EventReader/test_MPEventLoopRunner.py
|
from AlphaTwirl.EventReader import MPEventLoopRunner
import unittest
##____________________________________________________________________________||
class MockReader(object):
def __init__(self):
self._results = None
def setResults(self, results):
self._results = results
def results(self):
return self._results
##____________________________________________________________________________||
class MockEventLoop(object):
def __init__(self, readers):
self.readers = readers
def __call__(self, progressReporter):
for reader in self.readers:
reader._results = 3456
return self.readers
##____________________________________________________________________________||
class TestMPEventLoopRunner(unittest.TestCase):
def test_begin_end(self):
runner = MPEventLoopRunner()
runner.begin()
runner.end()
def test_run(self):
runner = MPEventLoopRunner()
runner.begin()
reader1 = MockReader()
reader2 = MockReader()
eventLoop = MockEventLoop([reader1, reader2])
runner.run(eventLoop)
self.assertIsNone(reader1._results)
self.assertIsNone(reader2._results)
runner.end()
self.assertEqual(3456, reader1._results)
self.assertEqual(3456, reader2._results)
##____________________________________________________________________________||
|
Python
| 0.000001
|
@@ -61,16 +61,26 @@
unittest
+%0Aimport os
%0A%0A##____
@@ -763,602 +763,2184 @@
ass
-TestMPEventLoopRunner(unittest.TestCase):%0A%0A def test_begin_end(self):%0A runner = MPEventLoopRunner()%0A runner.begin()%0A runner.end()%0A%0A def test_run(self):%0A runner = MPEventLoopRunner()%0A runner.begin()%0A%0A reader1 = MockReader()%0A reader2 = MockReader()%0A eventLoop = MockEventLoop(%5Breader1, reader2%5D)%0A runner.run(eventLoop)%0A%0A self.assertIsNone(reader1._results)%0A self.assertIsNone(reader2._results)%0A%0A runner.end()%0A%0A self.assertEqual(3456, reader1._results)%0A self.assertEqual(3456, reader2._results
+MockEventLoopForProgressReporterTest(object):%0A def __init__(self, readers):%0A self.readers = readers%0A%0A def __call__(self, progressReporter):%0A for reader in self.readers:%0A reader._results = %5B3456, progressReporter%5D%0A return self.readers%0A%0A##____________________________________________________________________________%7C%7C%0Aclass MockProgressReporter(object):%0A def report(self, event, component): pass%0A%0A##____________________________________________________________________________%7C%7C%0Aclass MockProgressMonitor(object):%0A def createReporter(self): return MockProgressReporter()%0A def addWorker(self, worker): pass%0A def monitor(self): pass%0A def last(self): pass%0A%0A##____________________________________________________________________________%7C%7C%0Aclass TestMPEventLoopRunner(unittest.TestCase):%0A%0A def test_begin_end(self):%0A runner = MPEventLoopRunner()%0A runner.begin()%0A runner.end()%0A%0A def test_run(self):%0A runner = MPEventLoopRunner()%0A runner.begin()%0A%0A reader1 = MockReader()%0A reader2 = MockReader()%0A eventLoop = MockEventLoop(%5Breader1, reader2%5D)%0A runner.run(eventLoop)%0A%0A self.assertIsNone(reader1._results)%0A self.assertIsNone(reader2._results)%0A%0A runner.end()%0A%0A self.assertEqual(3456, reader1._results)%0A self.assertEqual(3456, reader2._results)%0A%0A def test_ProgressMonitor(self):%0A progressMonitor = MockProgressMonitor()%0A runner = MPEventLoopRunner(nprocesses = 3, progressMonitor = progressMonitor)%0A runner.begin()%0A%0A reader1 = MockReader()%0A reader2 = MockReader()%0A eventLoop = MockEventLoopForProgressReporterTest(%5Breader1, reader2%5D)%0A runner.run(eventLoop)%0A%0A self.assertIsNone(reader1._results)%0A self.assertIsNone(reader2._results)%0A%0A runner.end()%0A%0A self.assertEqual(3456, reader1._results%5B0%5D)%0A self.assertEqual(3456, reader2._results%5B0%5D)%0A%0A # assert that the EventLoop received a ProgressReporter%0A self.assertIsInstance(reader1._results%5B1%5D, MockProgressReporter)%0A self.assertIsInstance(reader2._results%5B1%5D, MockProgressReporter
)%0A%0A#
|
e6e0d96790d71caccb3f00487bfeeddccdc78139
|
Fix variable and return value
|
app/raw/tasks.py
|
app/raw/tasks.py
|
from __future__ import absolute_import
from celery import shared_task
from twisted.internet import reactor
from scrapy.crawler import Crawler
from scrapy import log, signals
from scrapy.utils.project import get_project_settings
import os
from raw.scraper.spiders.legco_library import LibraryAgendaSpider
from raw.scraper.spiders.members import LibraryMemberSpider
@shared_task
def run_scraper():
output_name = 'foo.jl'
spider = LibraryAgendaSpider()
settings = get_project_settings()
url_path = os.path.join(settings.get('DATA_DIR_BASE'), 'scrapes', output_name)
settings.overrides['FEED_URI'] = url_path
crawler = Crawler(settings)
crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
crawler.configure()
crawler.crawl(spider)
crawler.start()
log.start(loglevel=log.INFO, logstdout=True)
reactor.run()
return output_name
|
Python
| 0.000005
|
@@ -495,19 +495,22 @@
s()%0A
-url
+output
_path =
@@ -618,11 +618,14 @@
%5D =
-url
+output
_pat
@@ -886,13 +886,13 @@
output_
-name
+path
%0A
|
8c1e0e5a7aef661152fc76757fb8f1403af56133
|
fix tests
|
tests/dataset/test_highly_variable_genes.py
|
tests/dataset/test_highly_variable_genes.py
|
from unittest import TestCase
import numpy as np
from scvi.dataset import GeneExpressionDataset, BrainLargeDataset
class TestHighlyVariableGenes(TestCase):
def test_sparse_no_batch_correction(self):
for flavor in ["seurat", "cell_ranger", "seurat_v3"]:
dataset = BrainLargeDataset(
save_path="tests/data",
sample_size_gene_var=10,
nb_genes_to_keep=128,
max_cells_to_keep=256,
)
n_genes = dataset.nb_genes
n_top = n_genes // 2
dataset.subsample_genes(mode=flavor, new_n_genes=n_top, n_bins=3)
assert dataset.nb_genes < n_genes
# For some reason the new number of genes can be slightly different than n_top
dataset._highly_variable_genes(flavor=flavor, n_bins=3)
dataset = BrainLargeDataset(
save_path="tests/data",
sample_size_gene_var=10,
nb_genes_to_keep=128,
max_cells_to_keep=256,
)
n_genes = dataset.nb_genes
dataset.subsample_genes()
assert dataset.nb_genes < n_genes, "subsample_genes did not filter out genes"
def test_batch_correction(self):
data = [
np.random.randint(1, 5, size=(50, 25)),
np.random.randint(1, 5, size=(50, 25)),
np.random.randint(1, 5, size=(50, 25)),
]
dataset = GeneExpressionDataset()
dataset.populate_from_per_batch_list(data)
n_genes = dataset.nb_genes
n_top = n_genes // 2
dataset.highly_variable_genes(n_bins=3, flavor="seurat")
dataset.highly_variable_genes(n_bins=3, flavor="seurat")
df = dataset.highly_variable_genes(n_bins=3, n_top_genes=n_top, flavor="seurat")
assert df["highly_variable"].sum() >= n_top
dataset.subsample_genes()
new_genes = dataset.nb_genes
assert n_genes > new_genes, "subsample_genes did not filter out genes"
pass
def test_dense_subsample_genes(self):
data = [
np.random.randint(1, 5, size=(50, 26)),
np.random.randint(1, 5, size=(50, 26)),
np.random.randint(1, 5, size=(50, 26)),
]
# With default
dataset = GeneExpressionDataset()
dataset.populate_from_per_batch_list(data)
n_genes = dataset.nb_genes
n_top = n_genes // 2
dataset.subsample_genes(new_n_genes=n_top, mode="cell_ranger")
assert dataset.nb_genes == n_top
# With Seurat
dataset = GeneExpressionDataset()
dataset.populate_from_per_batch_list(data)
dataset.subsample_genes(new_n_genes=n_top, mode="seurat")
assert dataset.nb_genes == n_top
# With Seurat v3
dataset = GeneExpressionDataset()
dataset.populate_from_per_batch_list(data)
dataset.subsample_genes(new_n_genes=n_top, mode="seurat_v3")
assert dataset.nb_genes == n_top
|
Python
| 0.000001
|
@@ -1563,32 +1563,33 @@
dataset.
+_
highly_variable_
@@ -1620,32 +1620,37 @@
seurat%22)%0A
+ df =
dataset.highly_
@@ -1634,32 +1634,33 @@
df = dataset.
+_
highly_variable_
@@ -1669,78 +1669,21 @@
nes(
-n_bins=3, flavor=%22seurat%22)%0A df = dataset.highly_variable_genes(
+%0A
n_bi
@@ -1714,32 +1714,41 @@
flavor=%22seurat%22
+%0A
)%0A assert
@@ -1778,32 +1778,32 @@
sum() %3E= n_top%0A%0A
-
dataset.
@@ -1810,32 +1810,49 @@
subsample_genes(
+n_top_genes=n_top
)%0A new_ge
|
600e68fc3e4b708090f5c3349d002ea9c3d2fbf8
|
improve examples group
|
tests/examples/user_code/publisher_group.py
|
tests/examples/user_code/publisher_group.py
|
import time
from celery import chord, group
from .tasks import *
chord(
group(function_value.s(0, value=i) for i in range(1000)),
function_any.s(from_chord=True)
)()
time.sleep(5)
|
Python
| 0.000002
|
@@ -49,21 +49,22 @@
om .
-tasks
+worker
import
*%0A%0Ac
@@ -63,9 +63,41 @@
ort
-*
+function_aggregate, function_test
%0A%0Ach
@@ -120,21 +120,20 @@
unction_
-value
+test
.s(0, va
@@ -180,10 +180,16 @@
on_a
-ny
+ggregate
.s(f
|
99a1f0bbc8cd8caf1aec5510af1629c23e9cd92f
|
Add prehook to exclude objects that are flagged as removed
|
objectset/resources.py
|
objectset/resources.py
|
from django.core.exceptions import ImproperlyConfigured
try:
import restlib2 # noqa
import preserialize # noqa
except ImportError:
raise ImproperlyConfigured('restlib2 and django-preserialize must be '
'installed to use the resource classes')
from django.conf.urls import patterns, url
from django.http import HttpResponse
from restlib2.resources import Resource
from restlib2.http import codes
from restlib2.params import Parametizer, BoolParam
from preserialize.serialize import serialize
from .models import ObjectSet
from .forms import objectset_form_factory
SET_OPERATIONS = {
'and': '__and__',
'or': '__or__',
'xor': '__xor__',
'sub': '__sub__',
}
INPLACE_SET_OPERATIONS = {
'and': '__iand__',
'or': '__ior__',
'xor': '__ixor__',
'sub': '__isub__',
}
class SetParametizer(Parametizer):
embed = BoolParam()
class BaseSetResource(Resource):
parametizer = SetParametizer
model = None
template = None
object_template = None
form_class = None
def get_params(self, request):
return self.parametizer().clean(request.GET)
def get_serialize_template(self, request, **kwargs):
"Prepare the serialize template"
# TODO
instance = self.model()
relation = instance._set_object_rel
if self.object_template:
object_template = self.object_template
else:
object_template = {'fields': [':pk']}
if self.template:
template = self.template
else:
# Use the generic 'objects' key for the target relation.
# This makes it simpler to consume by clients
template = {
'fields': [':local', 'objects'],
'exclude': [relation],
'aliases': {
'objects': relation,
},
'related': {
relation: object_template,
}
}
# If it is requested to not be embedded, exclude the target
# relation and the 'objects' alias from the template
if not kwargs.get('embed', False):
template['exclude'].append('objects')
return template
def get_queryset(self, request):
return self.model.objects.all()
def get_object(self, request, **kwargs):
try:
return self.get_queryset(request).get(**kwargs)
except self.model.DoesNotExist:
pass
class SetsResource(BaseSetResource):
def get(self, request):
params = self.get_params(request)
template = self.get_serialize_template(request, **params)
return serialize(self.get_queryset(request), **template)
def post(self, request):
form = self.form_class(request.data)
if form.is_valid():
instance = form.save()
params = self.get_params(request)
template = self.get_serialize_template(request, **params)
return serialize(instance, **template)
return HttpResponse(dict(form.errors),
status=codes.unprocessable_enity)
class SetResource(BaseSetResource):
def is_not_found(self, request, response, pk):
instance = self.get_object(pk=pk)
if instance is None:
return True
request.instance = instance
def get(self, request, pk):
return serialize(request.instance, **self.template)
def put(self, request, pk):
form = self.form_class(request.data, instance=request.instance)
if form.is_valid():
form.save()
return HttpResponse(status=codes.no_content)
return HttpResponse(dict(form.errors),
status=codes.unprocessable_enity)
def delete(self, request, pk):
request.instance.delete()
return HttpResponse(status=codes.no_content)
class SetObjectsResource(BaseSetResource):
pass
class SetOperationsResource(BaseSetResource):
def post(request, pk, *args):
pass
def get_url_patterns(Model, resources=None, prefix=None):
"""Returns urlpatterns for the defined resources.
`resources` is a dict corresponding to each resource:
- `sets` => SetsResource
- `set` => SetResource
- `operations` => SetOperationsResource
- `objects` => SetObjectsResource
"""
# A few checks to keep things sane..
if not issubclass(Model, ObjectSet):
raise TypeError('{0} must subclass ObjectSet'.format(Model.__name__))
if not resources:
resources = {}
default_form_class = objectset_form_factory(Model)
if 'sets' not in resources:
class DefaultSetsResource(SetsResource):
model = Model
form_class = default_form_class
resources['sets'] = DefaultSetsResource
if 'set' not in resources:
class DefaultSetResource(SetResource):
model = Model
form_class = default_form_class
resources['set'] = DefaultSetResource
if 'objects' not in resources:
class DefaultSetObjectsResource(SetObjectsResource):
model = Model
form_class = default_form_class
resources['objects'] = DefaultSetObjectsResource
if 'operations' not in resources:
class DefaultSetOperationsResource(SetOperationsResource):
model = Model
form_class = default_form_class
resources['operations'] = DefaultSetOperationsResource
# Define a prefix for the url names to prevent conflicts
if not prefix:
prefix = '{0}-'.format(Model.__name__.lower())
return patterns(
'',
url(r'^$', resources['sets'](),
name='{0}sets'.format(prefix)),
url(r'^(?P<pk>\d+)/$', resources['set'](),
name='{0}set'.format(prefix)),
url(r'^(?P<pk>\d+)/objects/$', resources['objects'](),
name='{0}objects'.format(prefix)),
url(r'^(?P<pk>\d+)/(?:(and|or|xor|sub)/(\d+)/)+/$',
resources['operations'](),
name='{0}operations'.format(prefix)),
)
|
Python
| 0
|
@@ -559,16 +559,27 @@
bjectSet
+, SetObject
%0Afrom .f
@@ -844,16 +844,235 @@
_',%0A%7D%0A%0A%0A
+def set_objects_prehook(queryset):%0A %22Prehook for set objects to exclude tracked deleted objects.%22%0A if issubclass(queryset.model, SetObject):%0A queryset = queryset.exclude(removed=True)%0A return queryset%0A%0A%0A
class Se
@@ -1693,25 +1693,107 @@
= %7B
-'fields': %5B':pk'%5D
+%0A 'fields': %5B':local'%5D,%0A 'prehook': set_objects_prehook,%0A
%7D%0A%0A
|
902cbd511f2f42948991713cdf0a98c4473c66c0
|
add tqdm to hagrid setup.py
|
packages/hagrid/setup.py
|
packages/hagrid/setup.py
|
# stdlib
import platform
# third party
from setuptools import find_packages
from setuptools import setup
__version__ = "0.2.89"
DATA_FILES = {
"img": ["hagrid/img/*.png"],
}
packages = [
"ascii_magic",
"click",
"cryptography>=37.0.2",
"gitpython",
"jinja2",
"names",
"packaging>=21.3",
"paramiko",
"pyOpenSSL>=22.0.0",
"requests",
"rich",
"setuptools",
"virtualenv-api",
"virtualenv",
"PyYAML",
]
if platform.system().lower() != "windows":
packages.extend(["ansible", "ansible-core"])
# Pillow binary wheels for Apple Silicon on Python 3.8 don't seem to work well
# try using Python 3.9+ for HAGrid on Apple Silicon
setup(
name="hagrid",
description="Happy Automation for Grid",
long_description="HAGrid is the swiss army knife of OpenMined's PySyft and PyGrid.",
long_description_content_type="text/plain",
version=__version__,
author="Andrew Trask <andrew@openmined.org>",
packages=find_packages(),
package_data=DATA_FILES,
install_requires=packages,
include_package_data=True,
entry_points={"console_scripts": ["hagrid = hagrid.cli:cli"]},
)
|
Python
| 0
|
@@ -456,16 +456,28 @@
yYAML%22,%0A
+ %22tqdm%22,%0A
%5D%0A%0Aif pl
|
61d9cc9ea9585550908300a11f49fbc44fdf17e1
|
add simple correctness test for knn kl estimator
|
skl_groups/tests/test_divs_knn.py
|
skl_groups/tests/test_divs_knn.py
|
from __future__ import division
from functools import partial
import logging
import os
import sys
import numpy as np
from scipy.special import psi
from sklearn.externals.six.moves import xrange
from nose.tools import assert_raises
from testfixtures import LogCapture
if __name__ == '__main__':
# make this copy of skl_groups importable
_this_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, os.path.dirname(os.path.dirname(_this_dir)))
from skl_groups import Features
from skl_groups.divergences import KNNDivergenceEstimator
################################################################################
def test_knn_basic():
dim = 3
n = 20
np.random.seed(47)
bags = Features([np.random.randn(np.random.randint(30, 100), dim)
for _ in xrange(n)])
# just make sure it runs
est = KNNDivergenceEstimator(
div_funcs=('kl', 'js', 'renyi:.9', 'l2'), Ks=(3, 4))
res = est.fit_transform(bags)
assert res.shape == (4, 2, n, n, 2)
assert np.all(np.isfinite(res))
# test that JS blows up when there's a huge difference in bag sizes
# (so that K is too low)
assert_raises(
ValueError,
partial(est.fit_transform, bags + [np.random.randn(1000, dim)]))
# test fit() and then transform() with JS, with different-sized test bags
est = KNNDivergenceEstimator(div_funcs=('js',), Ks=(5,))
est.fit(bags)
with LogCapture('skl_groups.divergences.knn', level=logging.WARNING) as l:
res = est.transform([np.random.randn(300, dim)])
assert len(l.records) == 1
assert l.records[0].message.startswith('Fit with a lower max_K')
def test_knn_js():
# verified by hand
x = np.reshape([0, 1, 3], (3, 1))
y = np.reshape([.2, 1.2, 3.2, 6.2], (4, 1))
mix_ent = np.log(2) + np.log(3) + psi(2) \
+ (np.log(.2) + np.log(.8) + np.log(1.8) - psi(1) - 2*psi(2)) / 6 \
+ (np.log(.2) + np.log(2) + np.log(3.2) - psi(1) - 3*psi(2)) / 8
x_ent = np.log(2) + (np.log(3) + np.log(2) + np.log(3)) / 3
y_ent = np.log(3) + (np.log(3) + np.log(2) + np.log(3) + np.log(5)) / 4
right_js = mix_ent - (x_ent + y_ent) / 2
# TODO: clamping???
est = KNNDivergenceEstimator(div_funcs=['js'], Ks=[2])
res = est.fit([x]).transform([y]).squeeze()
assert res.shape == (2,)
assert res[0] == res[1]
err_msg = "got {}, expected {}"
assert np.allclose(res[0], right_js), err_msg.format(res, right_js)
################################################################################
if __name__ == '__main__':
import nose
nose.main()
|
Python
| 0.000003
|
@@ -1672,16 +1672,819 @@
x_K')%0A%0A%0A
+def test_knn_kl():%0A # verified by hand%0A # Dhat(P%7C%7CQ) = %5Clog m/(n-1) + d / n %5Csum_%7Bi=1%7D%5En %5Clog %5Cnu_k(i)/rho_k(i)%0A x = np.reshape(%5B0., 1, 3%5D, (3, 1))%0A y = np.reshape(%5B.2, 1.2, 3.2, 7.2%5D, (4, 1))%0A%0A n = x.shape%5B0%5D%0A m = y.shape%5B0%5D%0A%0A x_to_y = np.log(m / (n-1)) + 1/n * (%0A np.log(1.2 / 3) + np.log(.8 / 2) + np.log(1.8 / 3))%0A y_to_x = np.log(n / (m-1)) + 1/m * (%0A np.log(.8 / 3) + np.log(1.2 / 2) + np.log(2.2 / 3) + np.log(6.2 / 6))%0A%0A est = KNNDivergenceEstimator(div_funcs=%5B'kl'%5D, Ks=%5B2%5D, clamp=False)%0A res = est.fit_transform(%5Bx, y%5D).squeeze()%0A # assert res%5B0, 0%5D == 0%0A # assert res%5B1, 1%5D == 0%0A assert np.allclose(res%5B0, 1%5D, x_to_y), %22%7B%7D vs %7B%7D%22.format(res%5B0, 1%5D, x_to_y)%0A assert np.allclose(res%5B1, 0%5D, y_to_x), %22%7B%7D vs %7B%7D%22.format(res%5B1, 0%5D, y_to_x)%0A%0A%0A
def test
|
f520d71d75dea757794b33f2d0e8a7c8c6204717
|
Add legacy_url for accepted orgs
|
app/soc/modules/gsoc/views/accepted_orgs.py
|
app/soc/modules/gsoc/views/accepted_orgs.py
|
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing the views for GSoC accepted orgs.
"""
__authors__ = [
'"Sverre Rabbelier" <sverre@rabbelier.nl>',
]
from django.conf.urls.defaults import url
from django.core.urlresolvers import reverse
from soc.logic.exceptions import AccessViolation
from soc.views.template import Template
from soc.modules.gsoc.views.base import RequestHandler
from soc.modules.gsoc.views.helper import lists
from soc.modules.gsoc.views.helper import url_patterns
from soc.modules.gsoc.logic.models.organization import logic as org_logic
class AcceptedOrgsList(Template):
"""Template for list of accepted organizations.
"""
def __init__(self, request, data):
self.request = request
self.data = data
r = data.redirect
list_config = lists.ListConfiguration()
list_config.addSimpleColumn('name', 'Name')
list_config.addSimpleColumn('link_id', 'Link ID', hidden=True)
list_config.setRowAction(
lambda e, *args, **kwargs: r.organization(e).urlOf('gsoc_org_home'))
list_config.addColumn('tags', 'Tags',
lambda e, *args, **kwargs: e.tags_string(e.org_tag))
self._list_config = list_config
def context(self):
description = 'List of organizations accepted into %s' % (
self.data.program.name)
list = lists.ListConfigurationResponse(self._list_config, 0, description)
return {
'lists': [list],
}
def getListData(self):
idx = lists.getListIndex(self.request)
if idx == 0:
fields = {'scope': self.data.program,
'status': ['active', 'inactive']}
response_builder = lists.QueryContentResponseBuilder(
self.request, self._list_config, org_logic, fields)
return response_builder.build()
else:
return None
def templatePath(self):
return "v2/modules/gsoc/accepted_orgs/_project_list.html"
class AcceptedOrgsPage(RequestHandler):
"""View for the accepted organizations page.
"""
def templatePath(self):
return 'v2/modules/gsoc/accepted_orgs/base.html'
def djangoURLPatterns(self):
return [
url(r'^gsoc/accepted_orgs/%s$' % url_patterns.PROGRAM, self,
name='gsoc_accepted_orgs')
]
def checkAccess(self):
self.check.acceptedOrgsAnnounced()
def jsonContext(self):
list_content = AcceptedOrgsList(self.request, self.data).getListData()
if not list_content:
raise AccessViolation(
'You do not have access to this data')
return list_content.content()
def context(self):
return {
'page_name': "Accepted organizations for %s" % self.data.program.name,
'accepted_orgs_list': AcceptedOrgsList(self.request, self.data),
}
|
Python
| 0
|
@@ -2796,16 +2796,94 @@
d_orgs')
+,%0A url(r'gsoc/program/accepted_orgs/%25s$' %25 url_patterns.PROGRAM, self),
%0A %5D%0A%0A
|
61d67f1be8ef87d2835786f03d2af34fba50ab5d
|
fix tests
|
smartmin/templatetags/smartmin.py
|
smartmin/templatetags/smartmin.py
|
from django import template
from datetime import datetime
from django.utils import simplejson
from django.template import TemplateSyntaxError
register = template.Library()
@register.simple_tag(takes_context=True)
def get_list_class(context, list):
"""
Returns the class to use for the passed in list. We just build something up
from the object type for the list.
"""
css = "list_%s_%s" % (list.model._meta.app_label, list.model._meta.module_name)
return css
@register.simple_tag(takes_context=True)
def get_value_from_view(context, field):
"""
Responsible for deriving the displayed value for the passed in 'field'.
This first checks for a particular method on the ListView, then looks for a method
on the object, then finally treats it as an attribute.
"""
view = context['view']
obj = None
if 'object' in context:
obj = context['object']
value = view.lookup_field_value(context, obj, field)
if type(value) == datetime:
return value.strftime("%b %d, %Y %H:%M")
return value
@register.simple_tag(takes_context=True)
def get_value(context, obj, field):
"""
Responsible for deriving the displayed value for the passed in 'field'.
This first checks for a particular method on the ListView, then looks for a method
on the object, then finally treats it as an attribute.
"""
view = context['view']
value = view.lookup_field_value(context, obj, field)
if type(value) == datetime:
return value.strftime("%b %d, %Y %H:%M")
return value
@register.simple_tag(takes_context=True)
def get_class(context, field, obj=None):
"""
Looks up the class for this field
"""
view = context['view']
return view.lookup_field_class(field, obj, "field_" + field)
@register.simple_tag(takes_context=True)
def get_label(context, field, obj=None):
"""
Responsible for figuring out the right label for the passed in field.
The order of precedence is:
1) if the view has a field_config and a label specified there, use that label
2) check for a form in the view, if it contains that field, use it's value
"""
view = context['view']
return view.lookup_field_label(context, field, obj)
@register.simple_tag(takes_context=True)
def get_field_link(context, field, obj=None):
"""
Determine what the field link should be for the given field, object pair
"""
view = context['view']
return view.lookup_field_link(context, field, obj)
@register.simple_tag(takes_context=True)
def view_as_json(context):
"""
Returns our view serialized as json
"""
view = context['view']
return simplejson.dumps(view.as_json(context))
@register.filter
def field(form, field):
return form[field]
@register.filter
def map(string, args):
return string % args.__dict__
@register.filter
def field_help(view, field):
"""
Returns the field help for the passed in field
"""
return view.lookup_field_help(field)
@register.filter
def get(dictionary, key):
"""
Simple dict lookup using two variables
"""
if key in dictionary:
return dictionary[key]
else:
return ''
@register.filter
def is_smartobject(obj):
"""
Returns whether the passed in object is a smart object
"""
from smartmin.models import SmartObject
return isinstance(obj, SmartObject)
@register.filter
def field_orderable(view, field):
"""
Returns whether the passed in field is orderable
"""
return view.lookup_field_orderable(field)
#
# Woot woot, simple pdb debugging. {% pdb %}
#
class PDBNode(template.Node):
def render(self, context):
import pdb; pdb.set_trace()
@register.tag
def pdb(parser, token):
return PDBNode()
@register.simple_tag(takes_context=True)
def getblock(context, prefix, suffix=None):
key = prefix
if suffix:
key += str(suffix)
if not 'blocks' in context:
raise TemplateSyntaxError("setblock/endblock can only be used with SmartView or it's subclasses")
if key in context['blocks']:
return context['blocks'][key]
else:
return ""
def setblock(parser, token):
args = token.split_contents()
if len(args) < 2:
raise TemplateSyntaxError("setblock tag takes one argument, the name of the block got: [%s]" % ",".join(args))
key = "".join(args[1:])
nodelist = parser.parse(('endsetblock',))
parser.delete_first_token()
return SetBlockNode(key, nodelist)
class SetBlockNode(template.Node):
def __init__(self, key, nodelist):
self.key = key
self.nodelist = nodelist
def render(self, context):
if not 'blocks' in context:
raise TemplateSyntaxError("setblock/endblock can only be used with SmartView or it's subclasses")
output = self.nodelist.render(context)
context['blocks'][self.key] = output
return ""
# register our tag
setblock = register.tag(setblock)
@register.inclusion_tag('smartmin/field.html', takes_context=True)
def render_field(context, field):
form = context['form']
view = context['view']
readonly_fields = view.derive_readonly()
# check that this field exists in our form, either as a real field or as a readonly one
if not field in form.fields and not field in readonly_fields:
raise TemplateSyntaxError("Error: No field '%s' found in form to render" % field)
inclusion_context = dict(field = field,
form = context['form'],
view = context['view'],
blocks = context['blocks'])
if 'object' in context:
inclusion_context['object'] = context['object']
return inclusion_context
|
Python
| 0.000001
|
@@ -2748,24 +2748,37 @@
rm, field):%0A
+ try:%0A
return f
@@ -2787,16 +2787,57 @@
m%5Bfield%5D
+%0A except KeyError:%0A return None
%0A%0A@regis
|
d9660073cbc2e59d7eb9625e45478c2f8b2e8fd9
|
Add utf8 support
|
staticjinja/staticjinja.py
|
staticjinja/staticjinja.py
|
"""
Simple static page generator.
Uses jinja2 to compile templates.
Templates should live inside `./templates` and will be compiled in '.'.
"""
import inspect
import os
import re
import easywatch
from jinja2 import Environment, FileSystemLoader
def build_template(env, template, outpath, **kwargs):
"""Compile a template.
* env should be a Jinja environment variable indicating where to find the
templates.
* template_name should be the name of the template as it appears inside
of `./templates`.
* outpath should be the name of the directory to build the template to
* kwargs should be a series of key-value pairs. These items will be
passed to the template to be used as needed.
"""
head, tail = os.path.split(template.name)
if head:
head = os.path.join(outpath, head)
if not os.path.exists(head):
os.makedirs(head)
template.stream(**kwargs).dump(os.path.join(outpath, template.name))
def should_render(filename):
"""Check if the file should be rendered.
- Hidden files will not be rendered.
- Files prefixed with an underscore are assumed to be partials and will
not be rendered.
"""
_, tail = os.path.split(filename)
return not (tail.startswith('_') or tail.startswith("."))
def render_templates(env, outpath, contexts=None, filter_func=None,
rules=None):
"""Render each template inside of `env`.
- env should be a Jinja environment object.
* outpath should be the name of the directory to build the template to
- contexts should be a list of regex-function pairs where the
function should return a context for that template and the regex,
if matched against a filename, will cause the context to be used.
- filter_func should be a function that takes a filename and returns
a boolean indicating whether or not a template should be rendered.
- rules are used to override template compilation. The value of rules
should be a list of `regex`-`function` pairs where `function` takes
a jinja2 Environment, the filename, and the context and builds the
template, and `regex` is a regex that if matched against a filename
will cause `function` to be used instead of the default.
"""
if contexts is None:
contexts = []
if filter_func is None:
filter_func = should_render
if rules is None:
rules = []
for template_name in env.list_templates(filter_func=filter_func):
print "Building %s..." % template_name
template = env.get_template(template_name)
# get the context
for regex, context_generator in contexts:
if re.match(regex, template_name):
try:
context = context_generator(template)
except TypeError:
context = context_generator()
break
else:
context = {}
# build the template
for regex, func in rules:
if re.match(regex, template_name):
func(env, template, **context)
break
else:
build_template(env, template, outpath, **context)
def main(searchpath="templates", outpath=".", filter_func=None, contexts=None,
extensions=None, rules=None, autoreload=True):
"""
Render each of the templates and then recompile on any changes.
- searchpath should be the directory that contains the template.
Defaults to "templates"
- filter_func should be a function that takes a filename and returns
a boolean indicating whether or not a template should be rendered.
Defaults to ignore any files with '.' or '_' prefixes.
- contexts should be a map of template names to functions where each
function should return a context for that template.
- extensions should be any extensions to add to the Environment.
- autoreload should be a boolean indicating whether or not to
automatically recompile templates. Defaults to true.
"""
if extensions is None:
extensions = []
# Get calling module
mod = inspect.getmodule(inspect.stack()[-1][0])
# Absolute path to project
project_path = os.path.realpath(os.path.dirname(mod.__file__))
# Absolute path to templates
template_path = os.path.join(project_path, searchpath)
loader = FileSystemLoader(searchpath=searchpath)
env = Environment(loader=loader,
extensions=extensions)
def build_all():
render_templates(env, outpath, contexts, filter_func=filter_func,
rules=rules)
print "Templates built."
build_all()
if autoreload:
print "Watching '%s' for changes..." % searchpath
print "Press Ctrl+C to stop."
def handler(event_type, src_path):
if event_type == "modified":
if src_path.startswith(template_path):
build_all()
easywatch.watch("./" + searchpath, handler)
print "Process killed"
return 0
|
Python
| 0
|
@@ -283,16 +283,26 @@
outpath,
+ encoding,
**kwarg
@@ -305,16 +305,16 @@
wargs):%0A
-
%22%22%22C
@@ -987,16 +987,26 @@
te.name)
+, encoding
)%0A%0A%0Adef
@@ -1430,16 +1430,33 @@
les=None
+, encoding=%22utf8%22
):%0A %22
@@ -3278,16 +3278,26 @@
outpath,
+ encoding,
**conte
@@ -3434,16 +3434,33 @@
oad=True
+, encoding=%22utf8%22
):%0A %22
@@ -4560,16 +4560,35 @@
archpath
+, encoding=encoding
)%0A en
@@ -4759,16 +4759,16 @@
r_func,%0A
-
@@ -4795,16 +4795,35 @@
es=rules
+, encoding=encoding
)%0A
|
e04b71d4fed675e3d8333e59ecf1df5a67ce42ac
|
remove martor app from django
|
oeplatform/settings.py
|
oeplatform/settings.py
|
"""
Django settings for oeplatform project.
Generated by 'django-admin startproject' using Django 1.8.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
try:
from .securitysettings import *
except:
raise Exception("No securitysettings found")
try:
from .martor_settings import *
except:
raise Exception("No martor_settings found")
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# Application definition
INSTALLED_APPS = (
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.sessions.backends.signed_cookies",
"bootstrap4",
"rest_framework",
"rest_framework.authtoken",
"modelview",
"modelview.templatetags.modelview_extras",
"login",
"base",
"base.templatetags.base_tags",
"widget_tweaks",
"dataedit",
"colorfield",
"literature",
"api",
"ontology",
"axes",
"captcha",
"django.contrib.postgres",
"fontawesome_5",
"tutorials",
"martor"
)
MIDDLEWARE = (
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"django.middleware.security.SecurityMiddleware",
"login.middleware.DetachMiddleware",
"axes.middleware.AxesMiddleware",
)
ROOT_URLCONF = "oeplatform.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
]
},
}
]
WSGI_APPLICATION = "oeplatform.wsgi.application"
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "Europe/Berlin"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
AUTH_USER_MODEL = "login.myuser"
LOGIN_URL = "/user/login"
LOGIN_REDIRECT_URL = "/"
REST_FRAMEWORK = {
"DEFAULT_AUTHENTICATION_CLASSES": (
"rest_framework.authentication.BasicAuthentication",
"rest_framework.authentication.SessionAuthentication",
"rest_framework.authentication.TokenAuthentication",
)
}
|
Python
| 0.000001
|
@@ -1362,21 +1362,8 @@
s%22,%0A
- %22martor%22%0A
)%0A%0AM
@@ -2421,16 +2421,42 @@
ation%22%0A%0A
+ONTOLOGY_FOLDER = %22/tmp%22%0A%0A
# Intern
|
c1283375fdb4a8e61f9a85c5fb7efc7e9272a555
|
Fix 3.5 build
|
tests/record_tests/test_anonymous_record.py
|
tests/record_tests/test_anonymous_record.py
|
from __future__ import absolute_import, division, print_function, with_statement
from __future__ import unicode_literals
from deepstreampy import client
from deepstreampy.record import AnonymousRecord
from deepstreampy.constants import connection_state
from tornado import testing
from tornado.concurrent import Future
import sys
if sys.version_info[0] < 3:
import mock
else:
from unittest import mock
URL = "ws://localhost:7777/deepstream"
class AnonymousRecordTest(testing.AsyncTestCase):
def setUp(self):
super(AnonymousRecordTest, self).setUp()
self.client = client.Client(URL)
self.handler = mock.Mock()
self.handler.stream.closed = mock.Mock(return_value=False)
f = Future()
f.set_result(None)
self.handler.write_message = mock.Mock(return_value=f)
self.client._connection._state = connection_state.OPEN
self.client._connection._websocket_handler = self.handler
self.connection = self.client._connection
self.io_loop = self.connection._io_loop
self.record_handler = self.client.record
self.options = {}
self.general_callback = mock.Mock()
self.firstname_callback = mock.Mock()
self.ready_callback = mock.Mock()
self.name_changed_callback = mock.Mock()
self.error_callback = mock.Mock()
@testing.gen_test
def test_anonymous_record(self):
# Creates the anonymous record
anon_record = AnonymousRecord(self.record_handler)
self.assertIsNone(anon_record.get())
self.assertIsNone(anon_record.name)
# Works before name is set
with self.assertRaises(AttributeError):
anon_record.set()
anon_record.subscribe(self.general_callback)
anon_record.subscribe(self.firstname_callback, "firstname")
anon_record.on("ready", self.ready_callback)
self.firstname_callback.assert_not_called()
self.general_callback.assert_not_called()
self.handler.write_message.assert_not_called()
# Request a record when name is set
anon_record.name = "recordA"
self.assertEquals(anon_record.name, "recordA")
self.handler.write_message.assert_called_with(
"R{0}CR{0}recordA{1}".format(chr(31), chr(30)).encode())
# Updates subscription once the record is ready
self.firstname_callback.assert_not_called()
self.general_callback.assert_not_called()
self.ready_callback.assert_not_called()
self.record_handler.handle(
{"topic": "R",
"action": "R",
"data": ["recordA", 1, '{"firstname":"Yavor"}']})
self.ready_callback.assert_called_once()
self.firstname_callback.assert_called_once_with("Yavor")
self.general_callback.assert_called_once_with({"firstname": "Yavor"})
# Does nothing when another record changes
yield self.record_handler.get_record("recordB")
self.record_handler.handle(
{"topic": "R",
"action": "R",
"data": [
"recordB", 1, '{"firstname":"John", "lastname":"Smith"}']})
self.ready_callback.assert_called_once()
self.firstname_callback.assert_called_with("Yavor")
self.general_callback.assert_called_with({"firstname": "Yavor"})
# Updates subscriptions when the record changes to an existing one
anon_record.name = "recordB"
self.assertEquals(self.ready_callback.call_count, 2)
self.firstname_callback.assert_called_with("John")
self.general_callback.assert_called_with({"firstname": "John",
"lastname": "Smith"})
# Proxies calls through the underlying record
recordB = yield self.record_handler.get_record("recordB")
self.assertEquals(recordB.get("lastname"), "Smith")
anon_record.set("Doe", "lastname")
self.assertEquals(recordB.get("lastname"), "Doe")
# No error thrown if record is reset after being destroyed
anon_record._record.on("error", self.error_callback)
self.record_handler.handle(
{"topic": "R",
"action": "A",
"data": ["D", "recordB", 1]})
self.assertEquals(self.ready_callback.call_count, 2)
self.error_callback.assert_not_called()
# Emits nameChanged when name is changed
anon_record.on("nameChanged", self.name_changed_callback)
anon_record.name = "recordC"
self.name_changed_callback.assert_called_once_with("recordC")
# Emits an additional ready event once the new record becomes available
self.assertEquals(self.ready_callback.call_count, 2)
self.record_handler.handle(
{"topic": "R",
"action": "R",
"data": [
"recordC", 1, '{"firstname":"Yavor","lastname":"Paunov"}']})
self.assertEquals(self.ready_callback.call_count, 3)
|
Python
| 0
|
@@ -2686,42 +2686,54 @@
elf.
-ready_callback.assert_called_once(
+assertEquals(self.ready_callback.call_count, 1
)%0A
@@ -3191,42 +3191,54 @@
elf.
-ready_callback.assert_called_once(
+assertEquals(self.ready_callback.call_count, 1
)%0A
|
33122c5a1b3642712546bc290d591077ce4cc847
|
Fix test
|
tests/rest/client/v2_alpha/test_register.py
|
tests/rest/client/v2_alpha/test_register.py
|
from synapse.rest.client.v2_alpha.register import RegisterRestServlet
from synapse.api.errors import SynapseError
from twisted.internet import defer
from mock import Mock
from tests import unittest
from tests.utils import mock_getRawHeaders
import json
class RegisterRestServletTestCase(unittest.TestCase):
def setUp(self):
# do the dance to hook up request data to self.request_data
self.request_data = ""
self.request = Mock(
content=Mock(read=Mock(side_effect=lambda: self.request_data)),
path='/_matrix/api/v2_alpha/register'
)
self.request.args = {}
self.request.requestHeaders.getRawHeaders = mock_getRawHeaders()
self.appservice = None
self.auth = Mock(get_appservice_by_req=Mock(
side_effect=lambda x: self.appservice)
)
self.auth_result = (False, None, None, None)
self.auth_handler = Mock(
check_auth=Mock(side_effect=lambda x, y, z: self.auth_result),
get_session_data=Mock(return_value=None)
)
self.registration_handler = Mock()
self.identity_handler = Mock()
self.login_handler = Mock()
self.device_handler = Mock()
# do the dance to hook it up to the hs global
self.handlers = Mock(
registration_handler=self.registration_handler,
identity_handler=self.identity_handler,
login_handler=self.login_handler
)
self.hs = Mock()
self.hs.hostname = "superbig~testing~thing.com"
self.hs.get_auth = Mock(return_value=self.auth)
self.hs.get_handlers = Mock(return_value=self.handlers)
self.hs.get_auth_handler = Mock(return_value=self.auth_handler)
self.hs.get_device_handler = Mock(return_value=self.device_handler)
self.hs.config.enable_registration = True
# init the thing we're testing
self.servlet = RegisterRestServlet(self.hs)
@defer.inlineCallbacks
def test_POST_appservice_registration_valid(self):
user_id = "@kermit:muppet"
token = "kermits_access_token"
self.request.args = {
"access_token": "i_am_an_app_service"
}
self.request_data = json.dumps({
"username": "kermit"
})
self.appservice = {
"id": "1234"
}
self.registration_handler.appservice_register = Mock(
return_value=user_id
)
self.auth_handler.get_access_token_for_user_id = Mock(
return_value=token
)
(code, result) = yield self.servlet.on_POST(self.request)
self.assertEquals(code, 200)
det_data = {
"user_id": user_id,
"access_token": token,
"home_server": self.hs.hostname
}
self.assertDictContainsSubset(det_data, result)
@defer.inlineCallbacks
def test_POST_appservice_registration_invalid(self):
self.request.args = {
"access_token": "i_am_an_app_service"
}
self.request_data = json.dumps({
"username": "kermit"
})
self.appservice = None # no application service exists
result = yield self.servlet.on_POST(self.request)
self.assertEquals(result, (401, None))
def test_POST_bad_password(self):
self.request_data = json.dumps({
"username": "kermit",
"password": 666
})
d = self.servlet.on_POST(self.request)
return self.assertFailure(d, SynapseError)
def test_POST_bad_username(self):
self.request_data = json.dumps({
"username": 777,
"password": "monkey"
})
d = self.servlet.on_POST(self.request)
return self.assertFailure(d, SynapseError)
@defer.inlineCallbacks
def test_POST_user_valid(self):
user_id = "@kermit:muppet"
token = "kermits_access_token"
device_id = "frogfone"
self.request_data = json.dumps({
"username": "kermit",
"password": "monkey",
"device_id": device_id,
})
self.registration_handler.check_username = Mock(return_value=True)
self.auth_result = (True, None, {
"username": "kermit",
"password": "monkey"
}, None)
self.registration_handler.register = Mock(return_value=(user_id, None))
self.auth_handler.get_access_token_for_user_id = Mock(
return_value=token
)
self.device_handler.check_device_registered = \
Mock(return_value=device_id)
(code, result) = yield self.servlet.on_POST(self.request)
self.assertEquals(code, 200)
det_data = {
"user_id": user_id,
"access_token": token,
"home_server": self.hs.hostname,
"device_id": device_id,
}
self.assertDictContainsSubset(det_data, result)
self.auth_handler.get_login_tuple_for_user_id(
user_id, device_id=device_id, initial_device_display_name=None)
def test_POST_disabled_registration(self):
self.hs.config.enable_registration = False
self.request_data = json.dumps({
"username": "kermit",
"password": "monkey"
})
self.registration_handler.check_username = Mock(return_value=True)
self.auth_result = (True, None, {
"username": "kermit",
"password": "monkey"
}, None)
self.registration_handler.register = Mock(return_value=("@user:id", "t"))
d = self.servlet.on_POST(self.request)
return self.assertFailure(d, SynapseError)
|
Python
| 0.000004
|
@@ -1869,16 +1869,60 @@
n = True
+%0A self.hs.config.auto_join_rooms = %5B%5D
%0A%0A
|
afd0ce40107899a5096b16543919400c912649d7
|
improve detection of imported symbols (support `from x import y as z`)
|
doc/utils/checkapidoc.py
|
doc/utils/checkapidoc.py
|
# -*- coding: utf-8 -*-
"""Trac API doc checker
Verify that all symbols belonging to modules already documented in the doc/api
Sphinx sources are referenced.
See http://trac.edgewall.org/wiki/TracDev/ApiDocs
"""
import fnmatch
import os
import re
import sys
excluded_docs = ['index.rst']
api_doc = 'doc/api'
def usage(cmd):
print "Usage: %s [FILE...]" % (cmd,)
print
print "FILE is a %s file and can be a glob pattern." % (api_doc,)
print "If no files are given, check all."
exit(0)
def main(argv):
api_files = [rst for rst in os.listdir('doc/api')
if fnmatch.fnmatch(rst, '*.rst')
and rst not in excluded_docs]
cmd = argv.pop(0)
def has(*options):
for opt in options:
if opt in argv:
return argv.pop(argv.index(opt))
if has('-h', '--help'):
usage(cmd)
verbose = has('-v', '--verbose')
only_documented = not has('-a', '--all')
if argv:
given_files = []
for arg in argv:
arg = arg.replace('\\', '/').replace(api_doc + '/', '')
arg = arg.replace('.rst', '') + '.rst'
if '*' in arg: # glob pattern
given_files += [rst for rst in api_files
if fnmatch.fnmatch(rst, arg)]
elif arg in api_files:
given_files.append(arg)
api_files = given_files
for rst in api_files:
check_api_doc(rst, verbose, only_documented)
def check_api_doc(rst, verbose, only_documented):
if verbose:
print "== Checking %s ... " % (rst,)
module_name = rst.replace('_', '.').replace('.rst', '')
try:
module = __import__(module_name, globals(), {}, ['__all__'])
except ImportError, e:
print "Skipping %s (%s)" % (rst, e)
return
all = getattr(module, '__all__', None)
if not all:
print "Warning: %s doesn't define __all__, using exported symbols." % (
module_name,)
all = get_default_symbols(module, only_documented)
symbols, keywords = get_sphinx_documented_symbols(rst)
for symbol in sorted(all):
if symbol in symbols:
if verbose:
print " - OK %14s :: %s" % (
keywords[symbols.index(symbol)], symbol)
else:
value = getattr(module, symbol)
cls = getattr(value, '__class__', None)
keyword = 'attribute'
if not cls or cls.__name__ == 'type':
keyword = 'class'
elif cls.__name__ in ('function', 'module'):
keyword = cls.__name__
print " * .. %14s :: %s" % ('auto' + keyword, symbol)
sphinx_doc_re = re.compile(r'''
^.. \s+ ((?:py:|auto)(?:module|class|function|attribute)) # keyword
\s* :: \s* ([\w\.]+) # symbol
''', re.MULTILINE | re.VERBOSE)
def get_sphinx_documented_symbols(rst):
doc = file(os.path.join(api_doc, rst)).read()
symbols, keywords = [], []
for k, s in sphinx_doc_re.findall(doc):
symbols.append(s.split('.')[-1])
keywords.append(k)
return symbols, keywords
def get_default_symbols(module, only_documented):
public = get_public_symbols(module) - get_imported_symbols(module)
# eliminate modules
all = []
for symbol in public:
try:
__import__(symbol)
except ImportError:
all.append(symbol)
# only keep symbols having a docstring
if only_documented:
documented = []
for symbol in all:
value = getattr(module, symbol)
if value.__doc__ and (not getattr(value, '__class__', None) or
value.__doc__ != value.__class__.__doc__):
documented.append(symbol)
all = documented
return all
def get_public_symbols(m):
return set(symbol for symbol in dir(m) if not symbol.startswith('_'))
import_from_re = re.compile(r'''
^ \s* from \s+ ([\w\.]+) \s+ import \s+ # module
( \* # all symbols
| %s (?: [\s\\]* , [\s\\]* %s)* # list of symbols
| \( \s* %s (?: \s* , \s* %s)* \s* \) # list of symbols in parenthesis
)
''' % ((r'(?:\w+|\w+\s+as\s+\w+)',) * 4), re.MULTILINE | re.VERBOSE)
remove_original_re = re.compile(r'\w+\s+as', re.MULTILINE)
def get_imported_symbols(module):
src_filename = module.__file__.replace('\\', '/').replace('.pyc', '.py')
if src_filename.endswith('/__init__.py'):
return set()
src = file(src_filename).read()
imported = set()
for mod, symbol_list in import_from_re.findall(src):
symbol_list = symbol_list.strip()
if symbol_list == '*':
try:
imported_module = __import__(mod, globals(), {}, ['__all__'])
symbols = set(getattr(imported_module, '__all__', None) or
get_public_symbols(imported_module))
except ImportError:
print "Warning: 'from %s import *' couldn't be resolved" % (
mod,)
continue
else:
if symbol_list and symbol_list[0] == '(' and symbol_list[-1] == ')':
symbol_list = symbol_list[1:-1]
symbol_list = re.sub(r'\w+\s+as', '', symbol_list)
symbols = set(remove_original_re.sub('', symbol_list)
.replace('\\', '').replace(',', ' ').split())
imported |= symbols
return imported
if __name__ == '__main__':
main(sys.argv)
|
Python
| 0.000002
|
@@ -5252,71 +5252,8 @@
-1%5D%0A
- symbol_list = re.sub(r'%5Cw+%5Cs+as', '', symbol_list)%0A
|
b6c78bc88b53e2cfbda4ef4d337ff1971f805051
|
Change enum ordering
|
paperwork_parser/base.py
|
paperwork_parser/base.py
|
import inspect
from enum import IntEnum
from pdfquery import PDFQuery
class DocFieldType(IntEnum):
NUMBER = 1
TEXT = 2
CUSTOM = 3 # TODO: Forget this and have 'type' take a callable instead?
class DocField(object):
def __init__(self, bbox, type=DocFieldType.TEXT, required=False,
description=None):
self.bbox = bbox
self.type = type
self.required = required
self.description = description
class DocSchema(object):
@classmethod
def as_pdf_selectors(cls, field_name=None):
"""Return pdfminer selector for specified field. If no field is
specified, then selectors for all fields are returned.
"""
if field_name is not None:
field = getattr(cls, field_name, None)
if (field is None) or (not isinstance(field, DocField)):
raise ValueError(
'{field} is not a DocField attribute on {klass}'.format(
field=field_name, klass=cls.__name__
)
)
pdf_fields = [('assessment_year', field)]
else:
pdf_fields = inspect.getmembers(
cls, lambda f: isinstance(f, DocField)
)
selectors = [('with_formatter', 'text')]
selectors.extend(
(key, 'LTTextLineHorizontal:in_bbox("{bbox}")'.format(
bbox=', '.join(str(coord) for coord in field.bbox)
))
for key, field in pdf_fields
)
return selectors
class Document(object):
variants = []
def __init__(self, file):
# TODO: Check for str or actual file inst?
self._file = PDFQuery(file)
self._data = {}
self._check_configuration()
@property
def data(self):
"""Read only property that is loaded with document data once
`extract()` is called.
"""
return self._data
def detect_variant(self):
raise NotImplementedError('Subclass Document and override this method')
def extract(self):
self._file.load()
variant = self.detect_variant()
selectors = variant.as_pdf_selectors()
extracted = self._file.extract(selectors)
self._data = extracted
def _check_configuration(self):
if not self.variants:
raise ValueError(
"The class '{name}' hasn't been configured with any variants."
" Set {name}.variants to a list of DocSchema types.".format(
name=self.__class__.__name__
)
)
|
Python
| 0.000001
|
@@ -103,14 +103,12 @@
-NUMBER
+TEXT
= 1
@@ -112,20 +112,22 @@
= 1%0A
-TEXT
+NUMBER
= 2%0A
|
a074f73e97312f39c6e5ab6790ec6768cf9780ad
|
remove unused and broken test utility (#661)
|
geopandas/tests/util.py
|
geopandas/tests/util.py
|
import os.path
from geopandas import GeoDataFrame, GeoSeries
HERE = os.path.abspath(os.path.dirname(__file__))
PACKAGE_DIR = os.path.dirname(os.path.dirname(HERE))
try:
import psycopg2
from psycopg2 import OperationalError
except ImportError:
class OperationalError(Exception):
pass
try:
import unittest.mock as mock
except ImportError:
import mock
def validate_boro_df(df, case_sensitive=False):
""" Tests a GeoDataFrame that has been read in from the nybb dataset."""
assert isinstance(df, GeoDataFrame)
# Make sure all the columns are there and the geometries
# were properly loaded as MultiPolygons
assert len(df) == 5
columns = ('BoroCode', 'BoroName', 'Shape_Leng', 'Shape_Area')
if case_sensitive:
for col in columns:
assert col in df.columns
else:
for col in columns:
assert col.lower() in (dfcol.lower() for dfcol in df.columns)
assert all(df.geometry.type == 'MultiPolygon')
def connect(dbname):
try:
con = psycopg2.connect(dbname=dbname)
except (NameError, OperationalError):
return None
return con
def create_db(df):
"""
Create a nybb table in the test_geopandas PostGIS database.
Returns a boolean indicating whether the database table was successfully
created
"""
# Try to create the database, skip the db tests if something goes
# wrong
# If you'd like these tests to run, create a database called
# 'test_geopandas' and enable postgis in it:
# > createdb test_geopandas
# > psql -c "CREATE EXTENSION postgis" -d test_geopandas
con = connect('test_geopandas')
if con is None:
return False
try:
cursor = con.cursor()
cursor.execute("DROP TABLE IF EXISTS nybb;")
sql = """CREATE TABLE nybb (
geom geometry,
borocode integer,
boroname varchar(40),
shape_leng float,
shape_area float
);"""
cursor.execute(sql)
for i, row in df.iterrows():
sql = """INSERT INTO nybb VALUES (
ST_GeometryFromText(%s), %s, %s, %s, %s
);"""
cursor.execute(sql, (row['geometry'].wkt,
row['BoroCode'],
row['BoroName'],
row['Shape_Leng'],
row['Shape_Area']))
finally:
cursor.close()
con.commit()
con.close()
return True
def assert_seq_equal(left, right):
"""
Poor man's version of assert_almost_equal which isn't working with Shapely
objects right now
"""
assert (len(left) == len(right),
"Mismatched lengths: %d != %d" % (len(left), len(right)))
for elem_left, elem_right in zip(left, right):
assert elem_left == elem_right, "%r != %r" % (left, right)
def geom_equals(this, that):
"""Test for geometric equality. Empty geometries are considered equal.
Parameters
----------
this, that : arrays of Geo objects (or anything that has an `is_empty`
attribute)
"""
return (this.geom_equals(that) | (this.is_empty & that.is_empty)).all()
def geom_almost_equals(this, that):
"""Test for 'almost' geometric equality. Empty geometries considered equal.
Parameters
----------
this, that : arrays of Geo objects (or anything that has an `is_empty`
property)
"""
return (this.geom_almost_equals(that) |
(this.is_empty & that.is_empty)).all()
def assert_geoseries_equal(left, right, check_dtype=False,
check_index_type=False,
check_series_type=True,
check_less_precise=False,
check_geom_type=False,
check_crs=True):
"""Test util for checking that two GeoSeries are equal.
Parameters
----------
left, right : two GeoSeries
check_dtype : bool, default False
if True, check geo dtype [only included so it's a drop-in replacement
for assert_series_equal]
check_index_type : bool, default False
check that index types are equal
check_series_type : bool, default True
check that both are same type (*and* are GeoSeries). If False,
will attempt to convert both into GeoSeries.
check_less_precise : bool, default False
if True, use geom_almost_equals. if False, use geom_equals.
check_geom_type : bool, default False
if True, check that all the geom types are equal.
check_crs: bool, default True
if check_series_type is True, then also check that the
crs matches
"""
assert len(left) == len(right), "%d != %d" % (len(left), len(right))
if check_index_type:
assert isinstance(left.index, type(right.index))
if check_dtype:
assert left.dtype == right.dtype, "dtype: %s != %s" % (left.dtype,
right.dtype)
if check_series_type:
assert isinstance(left, GeoSeries)
assert isinstance(left, type(right))
if check_crs:
assert(left.crs == right.crs)
else:
if not isinstance(left, GeoSeries):
left = GeoSeries(left)
if not isinstance(right, GeoSeries):
right = GeoSeries(right, index=left.index)
assert left.index.equals(right.index), "index: %s != %s" % (left.index,
right.index)
if check_geom_type:
assert (left.type == right.type).all(), "type: %s != %s" % (left.type,
right.type)
if check_less_precise:
assert geom_almost_equals(left, right)
else:
assert geom_equals(left, right)
|
Python
| 0
|
@@ -2559,388 +2559,8 @@
e%0A%0A%0A
-def assert_seq_equal(left, right):%0A %22%22%22%0A Poor man's version of assert_almost_equal which isn't working with Shapely%0A objects right now%0A %22%22%22%0A assert (len(left) == len(right),%0A %22Mismatched lengths: %25d != %25d%22 %25 (len(left), len(right)))%0A%0A for elem_left, elem_right in zip(left, right):%0A assert elem_left == elem_right, %22%25r != %25r%22 %25 (left, right)%0A%0A%0A
def
|
ac8fdbacc01d82a2ca5c19250464a45cce052ff3
|
remove stray print statement
|
tests/test_units/test_decorator_validate.py
|
tests/test_units/test_decorator_validate.py
|
# -*- coding: utf-8 -*-
from paste.fixture import TestApp
from paste.registry import RegistryManager
from pylons.decorators import validate, encode_formencode_errors
from pylons.controllers import WSGIController
from __init__ import ControllerWrap, SetupCacheGlobal, TestWSGIController
import formencode
from formencode.htmlfill import html_quote
def custom_error_formatter(error):
return '<p><span class="pylons-error">%s</span></p>\n' % html_quote(error)
class NetworkForm(formencode.Schema):
allow_extra_fields = True
filter_extra_fields = True
new_network = formencode.validators.URL(not_empty=True)
class HelloForm(formencode.Schema):
hello = formencode.ForEach(formencode.validators.Int())
class ValidatingController(WSGIController):
def new_network(self):
return """
<html>
<form action="/dhcp/new_form" method="POST">
<table>
<tr>
<th>Network</th>
<td>
<input id="new_network" name="new_network" type="text" value="" />
</td>
</tr>
</table>
<input name="commit" type="submit" value="Save changes" />
</form>
</html>
"""
def network(self):
return 'Your network is: %s' % self.form_result.get('new_network')
network = validate(schema=NetworkForm, form='new_network')(network)
def view_hello(self):
return """
<html>
<form action="/hello" method="POST">
<table>
<tr>
<th>Hello</th>
<td>
<form:iferror name="hello">Bad Hello! </form:iferror>
<input id="hello" name="hello" type="text" value="" />
<input id="hello" name="hello" type="text" value="" />
<input id="hello" name="hello" type="text" value="" />
</td>
</tr>
</table>
<input name="commit" type="submit" value="Submit" />
</form>
</html>
"""
def hello(self):
return str(self.form_result)
hello = validate(schema=HelloForm(), post_only=False, form='view_hello')(hello)
def hello_custom(self):
return str(self.form_result)
hello_custom = \
validate(schema=HelloForm(), post_only=False, form='view_hello',
auto_error_formatter=custom_error_formatter)(hello_custom)
def hello_recurse(self, environ):
if environ['REQUEST_METHOD'] == 'GET':
return self.new_network()
else:
return 'Your network is: %s' % self.form_result.get('new_network')
hello_recurse = validate(schema=NetworkForm, form='hello_recurse')(hello_recurse)
class TestValidateDecorator(TestWSGIController):
def setUp(self):
TestWSGIController.setUp(self)
app = SetupCacheGlobal(ControllerWrap(ValidatingController),
self.environ)
app = RegistryManager(app)
self.app = TestApp(app)
def test_network_validated(self):
response = self.post_response(action='network',
new_network='http://pylonshq.com/')
assert 'Your network is: http://pylonshq.com/' in response
def test_network_failed_validation_non_ascii(self):
response = self.post_response(action='network', new_network='Росси́я')
assert 'That is not a valid URL' in response
assert 'Росси́я' in response
def test_recurse_validated(self):
response = self.post_response(action='hello_recurse',
new_network='http://pylonshq.com/')
assert 'Your network is: http://pylonshq.com/' in response
def test_hello(self):
self.environ['pylons.routes_dict']['action'] = 'hello'
response = self.app.post('/hello?hello=1&hello=2&hello=3',
extra_environ=self.environ)
print response.body
assert "{u'hello': [1, 2, 3]}" in response
def test_hello_failed(self):
self.environ['pylons.routes_dict']['action'] = 'hello'
response = self.app.post('/hello?hello=1&hello=2&hello=hi',
extra_environ=self.environ)
assert 'Bad Hello! ' in response
assert "[None, None, 'Please enter an integer value']" in response
def test_hello_custom_failed(self):
self.environ['pylons.routes_dict']['action'] = 'hello_custom'
response = \
self.app.post('/hello_custom?hello=1&hello=2&hello=hi',
extra_environ=self.environ)
assert 'Bad Hello! ' in response
assert "[None, None, 'Please enter an integer value']" in response
assert ("""<p><span class="pylons-error">[None, None, 'Please enter """
"""an integer value']</span></p>""") in response
def test_encode_formencode_errors():
assert None == encode_formencode_errors(None, 'utf-8')
assert 'Invalid' == encode_formencode_errors('Invalid', 'utf-8')
errors = encode_formencode_errors(u'Invalid', 'utf-8')
assert 'Invalid' == errors
assert isinstance(errors, str)
assert 'Росси́я' == encode_formencode_errors(u'Росси́я', 'utf-8')
errors = encode_formencode_errors(dict(hello=u'Росси́я'), 'iso-8859-1', 'replace')
assert errors == dict(hello=u'Росси́я'.encode('iso-8859-1', 'replace'))
assert isinstance(errors['hello'], str)
errors = encode_formencode_errors({'hello': [None, None, u'Invalid']}, 'utf-8')
assert {'hello': [None, None, 'Invalid']} == errors
assert isinstance(errors['hello'][2], str)
orig_errors = {'a': [u'Invalid'],
'b': {'b2': [u'Invalid', None]},
'c': [None, u'Invalid',
{'c2': [u'Invalid', u'Invalid']},
[u'Invalid', None]],
'd': 'Hello'}
e = encode_formencode_errors(orig_errors, 'utf-8')
assert e == orig_errors
for i in (e['a'][0], e['b']['b2'][0], e['c'][1], e['c'][2]['c2'][0],
e['c'][2]['c2'][1], e['c'][3][0], e['d']):
assert isinstance(i, str)
|
Python
| 0.999995
|
@@ -3731,36 +3731,8 @@
on)%0A
- print response.body%0A
|
c6afe2ee8ba40d11d7e62e1fbaca6436e6a56cf5
|
Change deepcopy by copy
|
geotrek/common/forms.py
|
geotrek/common/forms.py
|
from zipfile import is_zipfile
from copy import deepcopy
from django import forms
from django.db.models import Q
from django.db.models.query import QuerySet
from django.db.models.fields.related import ForeignKey, ManyToManyField
from django.core.exceptions import FieldDoesNotExist
from django.utils.text import format_lazy
from django.utils.translation import ugettext_lazy as _
from mapentity.forms import MapEntityForm
from geotrek.authent.models import default_structure, StructureRelated, StructureOrNoneRelated
from .mixins import NoDeleteMixin
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Div, Submit
from crispy_forms.bootstrap import FormActions
class CommonForm(MapEntityForm):
class Meta:
fields = []
def deep_remove(self, fieldslayout, name):
if isinstance(fieldslayout, list):
for field in fieldslayout:
self.deep_remove(field, name)
elif hasattr(fieldslayout, 'fields'):
if name in fieldslayout.fields:
fieldslayout.fields.remove(name)
self.fields.pop(name)
for field in fieldslayout.fields:
self.deep_remove(field, name)
def replace_orig_fields(self):
model = self._meta.model
codeperm = '%s.publish_%s' % (
model._meta.app_label, model._meta.model_name)
if 'published' in self.fields and self.user and not self.user.has_perm(codeperm):
self.deep_remove(self.fieldslayout, 'published')
if 'review' in self.fields and self.instance and self.instance.any_published:
self.deep_remove(self.fieldslayout, 'review')
super(CommonForm, self).replace_orig_fields()
def filter_related_field(self, name, field):
if not isinstance(field, forms.models.ModelChoiceField):
return
try:
modelfield = self.instance._meta.get_field(name)
except FieldDoesNotExist:
# be careful but custom form fields, not in model
modelfield = None
if not isinstance(modelfield, (ForeignKey, ManyToManyField)):
return
model = modelfield.remote_field.to
# Filter structured choice fields according to user's structure
if issubclass(model, StructureRelated) and model.check_structure_in_forms:
field.queryset = field.queryset.filter(structure=self.user.profile.structure)
if issubclass(model, StructureOrNoneRelated) and model.check_structure_in_forms:
field.queryset = field.queryset.filter(Q(structure=self.user.profile.structure) | Q(structure=None))
if issubclass(model, NoDeleteMixin):
field.queryset = field.queryset.filter(deleted=False)
def __init__(self, *args, **kwargs):
self.fieldslayout = deepcopy(self.fieldslayout)
super(CommonForm, self).__init__(*args, **kwargs)
self.fields = deepcopy(self.fields)
self.update = kwargs.get("instance") is not None
if 'structure' in self.fields:
if self.user.has_perm('authent.can_bypass_structure'):
if not self.instance.pk:
self.fields['structure'].initial = self.user.profile.structure
else:
for name, field in self.fields.items():
self.filter_related_field(name, field)
del self.fields['structure']
def clean(self):
structure = self.cleaned_data.get('structure')
if not structure:
return self.cleaned_data
# Copy cleaned_data because self.add_error may remove an item
for name, field in self.cleaned_data.copy().items():
try:
modelfield = self.instance._meta.get_field(name)
except FieldDoesNotExist:
continue
if not isinstance(modelfield, (ForeignKey, ManyToManyField)):
continue
model = modelfield.remote_field.to
if not issubclass(model, (StructureRelated, StructureOrNoneRelated)):
continue
if not model.check_structure_in_forms:
continue
if isinstance(field, QuerySet):
for value in field:
self.check_structure(value, structure, name)
else:
self.check_structure(field, structure, name)
return self.cleaned_data
def check_structure(self, obj, structure, name):
if hasattr(obj, 'structure'):
if obj.structure and structure != obj.structure:
self.add_error(name, format_lazy(_("Please select a choice related to all structures (without brackets) "
"or related to the structure {struc} (in brackets)"), struc=structure))
def save(self, commit=True):
"""Set structure field before saving if need be"""
if self.update: # Structure is already set on object.
pass
elif not hasattr(self.instance, 'structure'):
pass
elif 'structure' in self.fields:
pass # The form contains the structure field. Let django use its value.
elif self.user:
self.instance.structure = self.user.profile.structure
else:
self.instance.structure = default_structure()
return super(CommonForm, self).save(commit)
class ImportDatasetForm(forms.Form):
parser = forms.TypedChoiceField(
label=_('Data to import from network'),
widget=forms.RadioSelect,
required=True,
)
def __init__(self, choices=None, *args, **kwargs):
super(ImportDatasetForm, self).__init__(*args, **kwargs)
self.fields['parser'].choices = choices
self.helper = FormHelper()
self.helper.layout = Layout(
Div(
Div(
'parser',
),
FormActions(
Submit('import-web', _("Import"), css_class='button white')
),
css_class='file-attachment-form',
)
)
class ImportDatasetFormWithFile(ImportDatasetForm):
zipfile = forms.FileField(
label=_('File'),
required=True,
widget=forms.FileInput
)
encoding = forms.ChoiceField(
label=_('Encoding'),
choices=(('Windows-1252', 'Windows-1252'), ('UTF-8', 'UTF-8'))
)
def __init__(self, *args, **kwargs):
super(ImportDatasetFormWithFile, self).__init__(*args, **kwargs)
self.fields['parser'].label = _('Data to import from local file')
self.helper.layout = Layout(
Div(
Div(
'parser',
'zipfile',
'encoding',
),
FormActions(
Submit('upload-file', _("Import"), css_class='button white')
),
css_class='file-attachment-form',
)
)
def clean_zipfile(self):
z = self.cleaned_data['zipfile']
if not is_zipfile(z):
raise forms.ValidationError(
_("File must be of ZIP type."), code='invalid')
# Reset position for further use.
z.seek(0)
|
Python
| 0
|
@@ -2926,25 +2926,16 @@
ields =
-deepcopy(
self.fie
@@ -2937,16 +2937,22 @@
f.fields
+.copy(
)%0A
|
26b08b2f5eef4f71e1bc147a5ed6b92b83799742
|
Update unit tests to new config file structure
|
pi_ldapproxy/test/util.py
|
pi_ldapproxy/test/util.py
|
# Contains code from ldaptor (createServer from ldaptor/testutil.py),
# which is licensed under the MIT license as follows.
# Copyright (c) 2002-2014, Ldaptor Contributors (see AUTHORS)
#
# Ldaptor is licensed under the MIT license for the majority of the
# files, with exceptions listed below.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import configobj
import twisted
import validate
from ldaptor import testutil
from ldaptor.protocols.ldap.ldapclient import LDAPClient
from ldaptor.test.util import returnConnected, IOPump
from twisted.internet import defer
from twisted.internet import reactor
from twisted.internet.task import LoopingCall
from pi_ldapproxy.config import CONFIG_SPEC
from pi_ldapproxy.proxy import TwoFactorAuthenticationProxy, ProxyServerFactory
from pi_ldapproxy.test.mock import MockPrivacyIDEA, MockLDAPClient
BASE_CONFIG = """
[privacyidea]
instance = http://example.com
realm = default
[ldap-backend]
endpoint = tcp:host=example.com:port=1337:timeout=1
use-tls = false
test-connection = false
[service-account]
dn = "uid=service,cn=users,dc=test,dc=local"
password = service-secret
[ldap-proxy]
endpoint = tcp:1389
passthrough-binds = "uid=passthrough,cn=users,dc=test,dc=local"
bind-service-account = false
allow-search = false
[user-mapping]
#strategy = lookup
#attribute = uid
strategy = match
pattern = "uid=([^,]+),cn=users,dc=test,dc=local"
[bind-cache]
enabled = false
"""
def load_test_config():
config = configobj.ConfigObj(BASE_CONFIG.splitlines(), configspec=CONFIG_SPEC.splitlines())
validator = validate.Validator()
result = config.validate(validator, preserve_errors=True)
assert result
return config
class ProxyTestCase(twisted.trial.unittest.TestCase):
additional_config = {}
privacyidea_credentials = {}
def get_config(self):
config = load_test_config()
for section, contents in self.additional_config.iteritems():
for key, value in contents.iteritems():
config[section][key] = value
return config
def inject_service_account_server(self, *responses):
client = MockLDAPClient(*responses)
@defer.inlineCallbacks
def _factory_connect_service_account():
client.connectionMade() # TODO: Necessary here?
yield client.bind(self.factory.service_account_dn, self.factory.service_account_password)
defer.returnValue(client)
self.factory.connect_service_account = _factory_connect_service_account
return client
def setUp(self):
self.factory = ProxyServerFactory(self.get_config())
self.pump_call = LoopingCall(self.pump_all)
self.pump_call.start(0.1)
self.privacyidea = MockPrivacyIDEA(self.privacyidea_credentials)
self.pumps = set()
def tearDown(self):
self.pump_call.stop()
# remove all pumps that have been created
for pump in self.pumps:
IOPump.active.remove(pump)
self.pumps = set()
def pump_all(self):
for pump in IOPump.active:
pump.pump()
def create_server(self, *responses, **kwds):
"""
Create a server for each test.
"""
protocol = kwds.get("protocol", TwoFactorAuthenticationProxy)
server = protocol()
clientTestDriver = MockLDAPClient(*responses)
def simulateConnectToServer():
d = defer.Deferred()
def onConnect():
clientTestDriver.connectionMade()
d.callback(clientTestDriver)
reactor.callLater(0, onConnect)
return d
clientConnector = kwds.get('clientConnector', simulateConnectToServer)
server.clientConnector = clientConnector
server.factory = self.factory
server.clientTestDriver = clientTestDriver
self.privacyidea.inject(server)
return server
def create_server_and_client(self, *responses, **kwds):
client = LDAPClient()
server = self.create_server(*responses, **kwds)
self.pumps.add(returnConnected(server, client))
return server, client
|
Python
| 0
|
@@ -1912,24 +1912,8 @@
.com
-%0Arealm = default
%0A%0A%5Bl
@@ -2375,16 +2375,101 @@
local%22%0A%0A
+%5Brealm-mapping%5D%0Astrategy = static%0Arealm = default%0A%0A%5Bpreamble-cache%5D%0Aenabled = false%0A%0A
%5Bbind-ca
@@ -2730,16 +2730,47 @@
t result
+ == True, %22Invalid test config%22
%0A ret
|
41d64648623c261baf18dc3e9006f877276caf35
|
Fix bug bordercase, unit that was killed during attack, wants to move later
|
onagame2015/actions.py
|
onagame2015/actions.py
|
import random
from onagame2015.validations import (
coord_in_arena,
arg_is_valid_tuple,
)
from onagame2015.lib import (
Coordinate,
UNIT_TYPE_ATTACK,
)
def toss_dice(number_of_dice):
for _ in range(number_of_dice):
yield random.randint(1, 6)
class BaseBotAction(object):
ACTION_NAME = ''
def __init__(self, bot):
self.calling_bot = bot
self.result = ''
def action_result(self):
return {
'result': self.result,
}
def execute(self, arena, action, opponent):
"""Return a dict, indicating what was done
Implemented by the subclass
{
'action_type': -> 'ATTACK' | 'MOVE',
....
}
"""
raise NotImplementedError
class AttackAction(BaseBotAction):
ACTION_NAME = 'ATTACK'
def execute(self, arena, action, opponent):
"""Attack from one tile to another
:action: <dict> with {
'action_type': 'ATTACK',
'from': <coord> attack from,
'to': <coord> attack to,
}
Validate <attack_{from,to}> are adjacent cells in the arena.
If possible, run the attack, and update the units in each tile
according to the result.
@return
{
'action_type': 'ATTACK',
'attacker_coord': <coord_attack_from>,
'defender_coord': <coord_attack_to>,
'defender_units': <n>,
'attacker_units': <m>,
}
"""
if not arg_is_valid_tuple(action['from']) or not arg_is_valid_tuple(action['to']):
raise RuntimeError("Invalid tuple")
# Because it's a list of lists, the user will parse the matrix,
# first through longitude and then through latitude
# The coordinates will come reversed
attacker_coord = Coordinate(*action['from'])
defender_coord = Coordinate(*action['to'])
self._run_attack_validations(
arena=arena,
tile_from=attacker_coord,
tile_to=defender_coord,
)
attacker_tile = arena.get_tile_content(attacker_coord)
defender_tile = arena.get_tile_content(defender_coord)
attack_result = self._launch_attack(
attacker_tile=attacker_tile,
defender_tile=defender_tile,
)
attack_result.update({
'action_type': 'ATTACK',
'attacker_coord': attacker_coord,
'defender_coord': defender_coord,
'attacker_player': arena.whos_in_tile(attacker_coord),
'defender_player': arena.whos_in_tile(defender_coord),
'attacker_bot': self.calling_bot,
'defender_bot': opponent
})
units_removed = arena.synchronize_attack_results(attack_result)
self._update_bots_units(attack_result, units_removed)
attack_result.update({
'defender_units': arena.number_of_units_in_tile(defender_coord),
'attacker_units': arena.number_of_units_in_tile(attacker_coord),
})
return attack_result
def _update_bots_units(self, attack_result, units_removed):
for who in ('attacker', 'defender'):
bot = attack_result['{}_bot'.format(who)]
units_lost = units_removed['{}_removed_units'.format(who)]
bot.remove_units(units_lost)
def _launch_attack(self, attacker_tile, defender_tile):
"""Run the attack on the tiles, by using the units in each one
@return: dict indicating how many unit loses every team
{
'attacker_loses': <n> :int:,
'defender_loses': <m> :int:,
'attacker_dice': [x0, x1,....],
'defender_dice': [y0, y1,....],
}
"""
attacker_n_dice = len([unit for unit in attacker_tile.items if unit.type == UNIT_TYPE_ATTACK])
defender_n_dice = len([unit for unit in defender_tile.items if unit.type == UNIT_TYPE_ATTACK])
play = lambda n_dice: sorted(toss_dice(n_dice), reverse=True)
attacker_dice = play(attacker_n_dice)
defender_dice = play(defender_n_dice)
partial_result = {
'attacker_loses': 0,
'defender_loses': 0,
'attacker_dice': attacker_dice,
'defender_dice': defender_dice,
}
for attacker, defender in zip(attacker_dice, defender_dice):
if attacker <= defender:
partial_result['attacker_loses'] += 1
else:
partial_result['defender_loses'] += 1
return partial_result
def _run_attack_validations(self, arena, tile_from, tile_to):
"""Run a series of validations to assess if is possible to perform an
attack with the given pair of coordinates."""
self._tiles_in_arena(
tiles=(Coordinate(*point) for point in (tile_to, tile_from)),
arena=arena)
self._contiguous_tiles(source_coord=tile_from, target_coord=tile_to)
self._oposite_bands(arena=arena, attacker_coord=tile_from, defender_coord=tile_to)
def _tiles_in_arena(self, tiles, arena):
if not all(coord_in_arena(t, arena) for t in tiles):
raise RuntimeError("Invalid coordinates")
def _contiguous_tiles(self, source_coord, target_coord):
delta_latitude = abs(source_coord.latitude - target_coord.latitude)
delta_longitude = abs(source_coord.longitude - target_coord.longitude)
try:
assert 1 <= delta_latitude + delta_longitude <= 2
except AssertionError:
raise RuntimeError("Invalid attack range")
def _oposite_bands(self, arena, attacker_coord, defender_coord):
"""Validate that both tiles have units of different teams."""
attacker_tile = arena.get_tile_content(attacker_coord)
defender_tile = arena.get_tile_content(defender_coord)
try:
team_1 = next(unit.player_id for unit in attacker_tile.items)
team_2 = next(unit.player_id for unit in defender_tile.items)
assert team_1 != team_2, "Friendly fire!"
except AssertionError as e:
raise RuntimeError(str(e))
except StopIteration:
raise RuntimeError("One of the tiles is empty")
class MoveAction(BaseBotAction):
ACTION_NAME = 'MOVE'
def execute(self, arena, action, opponent):
"""@return :dict: with
{
'action_type': 'MOVE',
'from': <coord> for the origin,
'to: <coord> of destiny,
'remain_in_source': <n>,
'error': <empty> if OK or msg error description,
'player': <player_that_moved>,
}
"""
action_result = {'action_type': 'MOVE'}
unit = arena.get_unit(action['unit_id'])
action_result.update(unit.move(action['direction']))
action_result['remain_in_source'] = arena.number_of_units_in_tile(action_result['from'])
action_result['player'] = arena.whos_in_tile(action_result['to'])
return action_result
|
Python
| 0
|
@@ -6763,24 +6763,45 @@
'unit_id'%5D)%0A
+ if unit:%0A
acti
@@ -6849,16 +6849,20 @@
ion'%5D))%0A
+
@@ -6950,32 +6950,36 @@
from'%5D)%0A
+
action_result%5B'p
@@ -7024,24 +7024,28 @@
sult%5B'to'%5D)%0A
+
retu
|
35bc179c6e6c7c8d9230de8da0672a106a372954
|
Install plugins using package.
|
testcases/cloud_admin/run_sos_report.py
|
testcases/cloud_admin/run_sos_report.py
|
#!/usr/bin/python
import os
import time
from eucaops import Eucaops
from eutester.eutestcase import EutesterTestCase
from eutester.machine import Machine
class SampleTest(EutesterTestCase):
def __init__(self):
self.setuptestcase()
self.setup_parser()
self.start_time = self.ticket_number = int(time.time())
self.parser.add_argument("--remote-dir", default="/root/euca-sosreport-" + str(self.start_time) + "/")
self.parser.add_argument("--local-dir", default=os.getcwd())
self.parser.add_argument("--git-repo", default="https://github.com/risaacson/eucalyptus-sosreport-plugins.git")
self.get_args()
# Setup basic eutester object
self.tester = Eucaops( config_file=self.args.config,password=self.args.password)
def clean_method(self):
pass
def Install(self):
"""
This is where the test description goes
"""
for machine in self.tester.get_component_machines():
assert isinstance(machine, Machine)
machine.install("sos")
machine.install("git")
machine.sys("git clone " + self.args.git_repo)
machine.sys("cp /root/eucalyptus-sosreport-plugins/sos/plugins/euca*.py /usr/lib/python2.6/site-packages/sos/plugins/")
def Run(self):
for machine in self.tester.get_component_machines():
assert isinstance(machine, Machine)
machine.sys("mkdir -p " + self.args.remote_dir)
machine.sys("sosreport --batch --tmp-dir " + self.args.remote_dir + " --ticket-number " + str(self.ticket_number),code=0)
def Download(self):
for machine in self.tester.get_component_machines():
assert isinstance(machine, Machine)
remote_tarball_path = machine.sys("ls -1 " + self.args.remote_dir + "*" + str(self.ticket_number) + "*.xz", code=0)[0]
tarball = remote_tarball_path.split("/")[-1]
local_tarball_path = self.args.local_dir + '/' + tarball
self.tester.debug("Downloading file to: " + local_tarball_path)
machine.sftp.get(remote_tarball_path, local_tarball_path)
def RunAll(self):
self.Install()
self.Run()
self.Download()
if __name__ == "__main__":
testcase = SampleTest()
### Use the list of tests passed from config/command line to determine what subset of tests to run
### or use a predefined list
list = testcase.args.tests or ["RunAll"]
### Convert test suite methods to EutesterUnitTest objects
unit_list = [ ]
for test in list:
unit_list.append( testcase.create_testunit_by_name(test) )
### Run the EutesterUnitTest objects
result = testcase.run_test_case_list(unit_list,clean_on_exit=True)
exit(result)
|
Python
| 0
|
@@ -551,16 +551,19 @@
(%22--
-git-repo
+package-url
%22, d
@@ -578,32 +578,35 @@
http
-s
://
-github.com/risaacson
+mongo.beldurnik.com/RPMS
/euc
@@ -616,30 +616,24 @@
ptus-sos
-report
-plugins
.git%22)%0A
@@ -624,20 +624,37 @@
-plugins
-.git
+-0.1-0.el6.noarch.rpm
%22)%0A
@@ -1105,212 +1105,53 @@
ine.
-install(%22git%22)%0A machine.sys(%22git clone %22 + self.args.git_repo)%0A machine.sys(%22cp /root/eucalyptus-sosreport-plugins/sos/plugins/euca*.py /usr/lib/python2.6/site-packages/sos/plugins/%22
+sys(%22yum install -y %22 + self.args.package_url
)%0A%0A
|
faa11c39ca24676b2b7b498203eda4034e9805b5
|
convert rest.Forbidden to 401 Unauthorized when there is no client identity
|
ermrest/exception/rest.py
|
ermrest/exception/rest.py
|
#
# Copyright 2012-2013 University of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""ERMREST exception types to signal REST HTTP errors
"""
import web
import urllib
urlquote = urllib.quote
class WebException (web.HTTPError):
def __init__(self, status, data=u'', headers={}, desc=u'%s'):
if isinstance(data, str):
data = data.decode('utf8')
data = ('%s\n%s\n' % (status, desc)) % data
headers['Content-Type'] = 'text/plain'
try:
web.ctx.ermrest_request_trace(data)
except:
pass
web.HTTPError.__init__(self, status, headers=headers, data=data)
class NotModified(WebException):
def __init__(self, data=u'', headers={}):
status = '304 Not Modified'
desc = u'Resource not modified. %s'
WebException.__init__(self, status, headers=headers, data=data, desc=desc)
class BadRequest (WebException):
def __init__(self, data=u'', headers={}):
status = '400 Bad Request'
desc = u'The request is malformed. %s'
WebException.__init__(self, status, headers=headers, data=data, desc=desc)
class Unauthorized (WebException):
def __init__(self, data=u'', headers={}):
status = '401 Unauthorized'
desc = u'The requested %s requires authorization.'
WebException.__init__(self, status, headers=headers, data=data, desc=desc)
class Forbidden (WebException):
def __init__(self, data=u'', headers={}):
status = '403 Forbidden'
desc = u'The requested %s is forbidden.'
WebException.__init__(self, status, headers=headers, data=data, desc=desc)
class NotFound (WebException):
def __init__(self, data=u'', headers={}):
status = '404 Not Found'
desc = u'The requested %s could not be found.'
WebException.__init__(self, status, headers=headers, data=data, desc=desc)
class NoMethod (WebException):
def __init__(self, data=u'', headers={}):
status = '405 Method Not Allowed'
desc = (u'The requested method %s is not allowed: %%s.' % web.ctx.method)
WebException.__init__(self, status, headers=headers, data=data, desc=desc)
class Conflict (WebException):
def __init__(self, data=u'', headers={}):
status = '409 Conflict'
desc = u'The request conflicts with the state of the server. %s'
WebException.__init__(self, status, headers=headers, data=data, desc=desc)
class PreconditionFailed (WebException):
def __init__(self, data=u'', headers={}):
status = '412 Precondition Failed'
desc = 'Resource state does not match requested preconditions. %s'
WebException.__init__(self, status, headers=headers, data=data, desc=desc)
class UnsupportedMediaType (WebException):
def __init__(self, data=u'', headers={}):
status = '415 Unsupported Media Type'
desc = u'The request input type is not supported. %s'
WebException.__init__(self, status, headers=headers, data=data, desc=desc)
class RuntimeError (WebException):
def __init__(self, data=u'', headers={}):
status = '500 Internal Server Error'
desc = u'The request execution encountered a runtime error: %s.'
WebException.__init__(self, status, headers=headers, data=data, desc=desc)
class ServiceUnavailable (WebException):
def __init__(self, data=u'', headers={}):
status = '503 Service Unavailable'
desc = u'The service is temporarily unavailable: %s.'
WebException.__init__(self, status, headers=headers, data=data, desc=desc)
|
Python
| 0.00001
|
@@ -2061,24 +2061,117 @@
forbidden.'%0A
+ if web.ctx.webauthn2_context.client is None:%0A status = '401 Unauthorized'%0A
WebE
|
10ba0ea095e4765a2d60751371f7dca8e36e2d18
|
Fix infinite loop in grit headers clobbering script.
|
build/win/clobber_generated_headers.py
|
build/win/clobber_generated_headers.py
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script helps workaround IncrediBuild problem on Windows.
# See http://crbug.com/17706.
import os
import sys
_SRC_PATH = os.path.join(os.path.dirname(__file__), '..', '..')
sys.path.append(os.path.join(_SRC_PATH, 'tools', 'grit'))
import grit.grd_reader
# We need to apply the workaround only on Windows.
if os.name != 'nt':
sys.exit(0)
def total_split(path):
components = []
while path:
head, tail = os.path.split(path)
components.append(tail)
path = head
return list(reversed(components))
for path in sys.argv[1:]:
path = os.path.join('src', path)
path_components = total_split(path)
root = grit.grd_reader.Parse(path)
output_files = [node.GetOutputFilename() for node in root.GetOutputFiles()]
output_headers = [file for file in output_files if file.endswith('.h')]
for build_type in ('Debug', 'Release'):
build_path = os.path.join(_SRC_PATH, 'chrome', build_type)
# We guess target file output based on path of the grd file (the first
# path component after 'src').
intermediate_path = os.path.join(build_path, 'obj',
'global_intermediate', path_components[1])
for header in output_headers:
full_path = os.path.join(intermediate_path, header)
try:
os.remove(full_path)
print 'Clobbered ' + full_path
except OSError:
print 'Could not remove ' + full_path + '. Continuing.'
|
Python
| 0.000029
|
@@ -616,24 +616,53 @@
split(path)%0A
+ if not tail:%0A break%0A
componen
@@ -1627,8 +1627,9 @@
inuing.'
+%0A
|
e71870736959efcde2188bdcbd89838b67ca8582
|
Add AbstractSanitizer/AbstractValidator class to import path
|
pathvalidate/__init__.py
|
pathvalidate/__init__.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from .__version__ import __author__, __copyright__, __email__, __license__, __version__
from ._common import (
Platform,
ascii_symbols,
normalize_platform,
replace_ansi_escape,
replace_unprintable_char,
unprintable_ascii_chars,
validate_null_string,
validate_pathtype,
)
from ._filename import FileNameSanitizer, is_valid_filename, sanitize_filename, validate_filename
from ._filepath import (
FilePathSanitizer,
is_valid_filepath,
sanitize_file_path,
sanitize_filepath,
validate_file_path,
validate_filepath,
)
from ._ltsv import sanitize_ltsv_label, validate_ltsv_label
from ._symbol import replace_symbol, validate_symbol
from .error import (
ErrorReason,
InvalidCharError,
InvalidLengthError,
InvalidReservedNameError,
NullNameError,
ReservedNameError,
ValidationError,
ValidReservedNameError,
)
|
Python
| 0
|
@@ -154,16 +154,72 @@
rsion__%0A
+from ._base import AbstractSanitizer, AbstractValidator%0A
from ._c
|
12e2d6d01ff15cadb7cd87484f14475f30aea652
|
fix version number because this was backported
|
lib/ansible/modules/cloud/scaleway/scaleway_image_facts.py
|
lib/ansible/modules/cloud/scaleway/scaleway_image_facts.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2018, Yanis Guenane <yanis+ansible@guenane.org>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: scaleway_image_facts
short_description: Gather facts about the Scaleway images available.
description:
- Gather facts about the Scaleway images available.
version_added: "2.7"
author:
- "Yanis Guenane (@Spredzy)"
- "Remy Leone (@sieben)"
extends_documentation_fragment: scaleway
options:
region:
version_added: "2.8"
description:
- Scaleway compute zone
required: true
choices:
- ams1
- EMEA-NL-EVS
- par1
- EMEA-FR-PAR1
'''
EXAMPLES = r'''
- name: Gather Scaleway images facts
scaleway_image_facts:
region: par1
'''
RETURN = r'''
---
scaleway_image_facts:
description: Response from Scaleway API
returned: success
type: complex
contains:
"scaleway_image_facts": [
{
"arch": "x86_64",
"creation_date": "2018-07-17T16:18:49.276456+00:00",
"default_bootscript": {
"architecture": "x86_64",
"bootcmdargs": "LINUX_COMMON scaleway boot=local nbd.max_part=16",
"default": false,
"dtb": "",
"id": "15fbd2f7-a0f9-412b-8502-6a44da8d98b8",
"initrd": "http://169.254.42.24/initrd/initrd-Linux-x86_64-v3.14.5.gz",
"kernel": "http://169.254.42.24/kernel/x86_64-mainline-lts-4.9-4.9.93-rev1/vmlinuz-4.9.93",
"organization": "11111111-1111-4111-8111-111111111111",
"public": true,
"title": "x86_64 mainline 4.9.93 rev1"
},
"extra_volumes": [],
"from_server": null,
"id": "00ae4a88-3252-4eda-9feb-5f6b56bf5ef0",
"modification_date": "2018-07-17T16:42:06.319315+00:00",
"name": "Debian Stretch",
"organization": "51b656e3-4865-41e8-adbc-0c45bdd780db",
"public": true,
"root_volume": {
"id": "da32dfbb-c5ff-476d-ae2d-c297dd09b7dd",
"name": "snapshot-2a7229dc-d431-4dc5-b66e-95db08b773af-2018-07-17_16:18",
"size": 25000000000,
"volume_type": "l_ssd"
},
"state": "available"
}
]
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.scaleway import (
Scaleway, ScalewayException, scaleway_argument_spec, SCALEWAY_LOCATION)
class ScalewayImageFacts(Scaleway):
def __init__(self, module):
super(ScalewayImageFacts, self).__init__(module)
self.name = 'images'
region = module.params["region"]
self.module.params['api_url'] = SCALEWAY_LOCATION[region]["api_endpoint"]
def main():
argument_spec = scaleway_argument_spec()
argument_spec.update(dict(
region=dict(required=True, choices=SCALEWAY_LOCATION.keys()),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
try:
module.exit_json(
ansible_facts={'scaleway_image_facts': ScalewayImageFacts(module).get_resources()}
)
except ScalewayException as exc:
module.fail_json(msg=exc.message)
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -775,17 +775,17 @@
ded: %222.
-8
+7
%22%0A de
|
42609dfaf39c09fa591ff1b40e23ab1795a6d7a5
|
test fix
|
vitrage/tests/unit/datasources/test_alarm_transformer_base.py
|
vitrage/tests/unit/datasources/test_alarm_transformer_base.py
|
# Copyright 2017 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from oslo_log import log as logging
from vitrage.common.constants import EdgeLabel
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import GraphAction
from vitrage.common.constants import VertexProperties as VProps
from vitrage.datasources.alarm_properties import AlarmProperties as AlarmProps
from vitrage.datasources.nova.host import NOVA_HOST_DATASOURCE
from vitrage.tests.unit.datasources.test_transformer_base import \
BaseTransformerTest
LOG = logging.getLogger(__name__)
# noinspection PyProtectedMember
class BaseAlarmTransformerTest(BaseTransformerTest):
def _validate_alarm_vertex_props(self,
vertex,
expected_name,
expected_datasource_name,
expected_sample_time):
self._validate_base_vertex_props(vertex,
expected_name,
expected_datasource_name)
self.assertEqual(EntityCategory.ALARM, vertex[VProps.CATEGORY])
self.assertEqual(expected_sample_time, vertex[VProps.SAMPLE_TIMESTAMP])
if self._is_erroneous(vertex):
self.assertEqual(AlarmProps.ACTIVE_STATE, vertex[VProps.STATE])
else:
self.assertEqual(AlarmProps.INACTIVE_STATE, vertex[VProps.STATE])
def _validate_host_neighbor(self,
wrapper,
alarm_id,
host_name):
self.assertEqual(1, len(wrapper.neighbors))
host_neighbor = wrapper.neighbors[0]
host_transformer = self.transformers[NOVA_HOST_DATASOURCE]
properties = {
VProps.ID: host_name,
VProps.TYPE: NOVA_HOST_DATASOURCE,
VProps.SAMPLE_TIMESTAMP: wrapper.vertex[VProps.SAMPLE_TIMESTAMP],
}
expected_neighbor = host_transformer.\
create_placeholder_vertex(**properties)
self.assertEqual(expected_neighbor, host_neighbor.vertex)
# Validate neighbor edge
edge = host_neighbor.edge
self.assertEqual(edge.source_id, alarm_id)
self.assertEqual(edge.target_id, host_neighbor.vertex.vertex_id)
self.assertEqual(edge.label, EdgeLabel.ON)
def _validate_graph_action(self, wrapper):
if self._is_erroneous(wrapper.vertex):
self.assertEqual(GraphAction.UPDATE_ENTITY, wrapper.action)
else:
self.assertEqual(GraphAction.DELETE_ENTITY, wrapper.action)
@abc.abstractmethod
def _is_erroneous(self, vertex):
pass
|
Python
| 0.000006
|
@@ -2415,16 +2415,70 @@
SOURCE,%0A
+ VProps.CATEGORY: EntityCategory.RESOURCE,%0A
@@ -2602,16 +2602,17 @@
sformer.
+
%5C%0A
@@ -2624,16 +2624,25 @@
create_
+neighbor_
placehol
|
de22467c0b5e5996a40cc8b8b84d2ac484690939
|
Clear some unneeded optimizations so progress reporting is smoother
|
gmail-restore-labels.py
|
gmail-restore-labels.py
|
#!/usr/bin/python3
import config_oldbw
import config_newbw
import email.header
import imaplib
import os
import pprint
import re
import shelve
import ssl
import pickle
class Gmail(imaplib.IMAP4_SSL):
def __init__(self, cfg):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.set_default_verify_paths()
# XXX: alternative, should work too:
# ctx.load_verify_locations('/etc/ssl/certs/ca-certificates.crt')
imaplib.IMAP4_SSL.__init__(self, 'imap.gmail.com', 993, ssl_context=ctx)
# XXX: I have no idea how to check / if I need to check that thing. State from 2012-12-14.
# MGL: set_default_verify_paths should do the work, as long as openssl is configured properly
# assert self.sock.getpeercert() == {'subject': ((('countryName', 'US'),), (('stateOrProvinceName', 'California'),), (('localityName', 'Mountain View'),), (('organizationName', 'Google Inc'),), (('commonName', 'imap.gmail.com'),)), 'serialNumber': '3B73268B0000000068A5', 'subjectAltName': (('DNS', 'imap.gmail.com'),), 'version': 3, 'notBefore': 'Sep 12 11:55:49 2012 GMT', 'notAfter': 'Jun 7 19:43:27 2013 GMT', 'issuer': ((('countryName', 'US'),), (('organizationName', 'Google Inc'),), (('commonName', 'Google Internet Authority'),))}
self.login(cfg.LOGIN, cfg.PASSWORD)
assert 'X-GM-EXT-1' in self.capabilities
def selectfolder(self, folder, readonly=True):
resp = self.select(folder, readonly)
assert resp[0] == 'OK'
total = int(resp[1][0])
assert total > 0
return total
def download_labels_batch(gmail, start, count):
# for 1,100 ask for 1:100, next time 101:200, etc.
resp = gmail.fetch('%d:%d' % (start, start + count - 1), '(X-GM-LABELS UID BODY[HEADER.FIELDS (MESSAGE-ID)])')
assert resp[0] == 'OK'
"""
response here is ugly:
[(b'1 (X-GM-LABELS () UID 1 BODY[HEADER.FIELDS (MESSAGE-ID)] {61}',
b'Message-ID: <a38097d40612071225s1e399c3eu@mail.gmail.com>\r\n\r\n'),
b')',
(b'2 (X-GM-LABELS () UID 2 BODY[HEADER.FIELDS (MESSAGE-ID)] {40}',
b'Message-ID: <45787AFA.7020202@wp.pl>\r\n\r\n'),
b')',
...
]
And UID comes after X-GM-LABELs regardless of request order, wtf?
"""
regexp = re.compile('(\d+) \(X-GM-LABELS \((.*)\) UID (\d+) BODY\[HEADER.FIELDS \(MESSAGE-ID\)\] {(\d+)}')
# every even (2, 4, 6, 8, ...) item from response should be b')'
for even_item in resp[1][1::2]:
assert even_item == b')'
# every odd (1, 3, 5, 7, ...) item should match regexp
for odd_item in resp[1][::2]:
try:
imapid, labels, uid, payloadlen = regexp.match(odd_item[0].decode('utf-8')).groups()
except AttributeError:
print("%s\n%s" % (odd_item[0], odd_item[1]))
raise
assert int(payloadlen) == len(odd_item[1])
try:
msgid = odd_item[1].decode('utf-8').split()[1]
except IndexError:
if config.DEBUG or config.MESSAGE_DETAILS:
print('got message without Message-ID header: '
'gmail id %s, link: https://mail.google.com/mail/#all/%s'
% (gmailid, hex(int(gmailthreadid))[2:])
)
#continue
# allow update by gmail id
msgid = None
yield uid, msgid, labels
def download_labels(gmail, total):
batch_size = 1000
for start in range(1, total, batch_size):
for uid, msgid, labels in download_labels_batch(gmail, start, batch_size):
yield uid, msgid, labels
def map_labels(labels):
for label in labels.split():
#TODO: could keep most of these and map to things under [Gmail]/
if label[0:3] == '"\\\\':
assert label[-1:] == '"'
continue
yield label
def create_label_index(gmail, cfg):
total = gmail.selectfolder(cfg.IMAP_FOLDER)
index = dict()
count = 0
for uid, msgid, labels in download_labels(gmail, total):
msglabels = index.setdefault(msgid, set())
msglabels.update(map_labels(labels))
count += 1
if count % 100 == 0:
print("Fetch: %7d / %7d" % (count, total), end='\r', flush=True)
print("Fetch: %7d / %7d -- Done" % (count, total))
return index
def apply_labels(gmail, cfg, index):
total = gmail.selectfolder(cfg.IMAP_FOLDER)
count = 0
added = 0
for uid, msgid, labels in download_labels(gmail, total):
count += 1
msgwantlabels = index.get(msgid)
if msgwantlabels is None:
print("No labels for %s" % msgid)
continue
msghaslabels = set(map_labels(labels))
msgneedlabels = msgwantlabels - msghaslabels
if len(msgneedlabels) == 0:
continue
#print("Message %s has %s, should have %s, add %s" % (msgid, msghaslabels, msgwantlabels, msgneedlabels))
for l in msgneedlabels:
type, data = gmail.uid('COPY', uid, l)
assert type == 'OK'
added += 1
#print("%s" % (data,))
# apply is slow, print all the time
if True or count % 100 == 0:
print("Apply: %7d (%8d) / %7d" % (count, added, total), end='\r', flush=True)
print("Apply: %7d (%8d) / %7d -- Done" % (count, added, total))
def main():
labelsfile = 'gmail-restore-labels.labels.pickle'
index = None
try:
with open(labelsfile, 'rb') as f:
index = pickle.load(f)
except FileNotFoundError:
print('No index file, will generate one')
index = None
if index is None:
with Gmail(config_oldbw) as oldgmail:
index = create_label_index(oldgmail, config_oldbw)
with open(labelsfile, 'wb') as f:
pickle.dump(index, f)
with Gmail(config_newbw) as newgmail:
apply_labels(newgmail, config_newbw, index)
return
if __name__ == "__main__":
main()
|
Python
| 0.000001
|
@@ -4587,16 +4587,20 @@
if
+len(
msgwantl
@@ -4604,24 +4604,22 @@
ntlabels
- is None
+) == 0
:%0A
@@ -4658,37 +4658,16 @@
msgid)%0A
- continue%0A
@@ -4697,32 +4697,32 @@
labels(labels))%0A
+
msgneedl
@@ -4762,65 +4762,8 @@
els%0A
- if len(msgneedlabels) == 0:%0A continue%0A
|
8e1f573edb01aac1df45030182ab73d423914f8f
|
check if REDIS_URL exists before connect to redis brain
|
robot.py
|
robot.py
|
# coding: utf-8
from __future__ import unicode_literals
from gevent.monkey import patch_all
patch_all()
import gevent
import logging
from gevent.pool import Pool
from redis import StrictRedis
from importlib import import_module
from slackclient import SlackClient
from settings import APPS, SLACK_TOKEN, REDIS_URL
pool = Pool(20)
CMD_PREFIX = '!'
logger = logging.getLogger()
class RedisBrain(object):
def __init__(self):
try:
self.redis = StrictRedis(host=REDIS_URL)
except Exception as e:
logger.error(e)
self.redis = None
def set(self, key, value):
if self.redis:
self.redis.set(key, value)
return True
else:
return False
def get(self, key):
if self.redis:
return self.redis.get(key)
return None
def lpush(self, key, value):
if self.redis:
self.redis.lpush(key, value)
return True
else:
return False
def lpop(self, key):
if self.redis:
return self.redis.lpop(key)
return None
def lindex(self, key):
if self.redis:
return self.redis.lindex(key)
return None
class Robot(object):
def __init__(self):
self.client = SlackClient(SLACK_TOKEN)
self.brain = RedisBrain()
self.apps, self.docs = self.load_apps()
def load_apps(self):
docs = ['='*14, 'Usage', '='*14]
apps = {}
for name in APPS:
app = import_module('apps.%s' % name)
docs.append(
'!%s: %s' % (', '.join(app.run.commands), app.run.__doc__)
)
for command in app.run.commands:
apps[command] = app
return apps, docs
def handle_messages(self, messages):
for channel, user, text in messages:
command, payloads = self.extract_command(text)
if not command:
continue
app = self.apps.get(command, None)
if not app:
continue
pool.apply_async(
func=app.run, args=(self, channel, user, payloads)
)
def extract_messages(self, events):
messages = []
for event in events:
channel = event.get('channel', '')
user = event.get('user', '')
text = event.get('text', '')
if channel and user and text:
messages.append((channel, user, text))
return messages
def extract_command(self, text):
if CMD_PREFIX != text[0]:
return (None, None)
tokens = text.split(' ', 1)
if 1 < len(tokens):
return tokens[0][1:], tokens[1]
else:
return (text[1:], '')
def rtm_connect(self):
conn = None
try:
conn = self.client.rtm_connect()
except Exception as e:
logger.error(e)
return conn
def read_message(self):
events = None
try:
events = self.client.rtm_read()
except Exception as e:
logger.error(e)
return events
def run(self):
if not self.rtm_connect():
raise RuntimeError(
'Can not connect to slack client. Check your settings.'
)
while True:
events = self.read_message()
if events:
messages = self.extract_messages(events)
self.handle_messages(messages)
gevent.sleep(0.3)
if '__main__' == __name__:
robot = Robot()
robot.run()
|
Python
| 0
|
@@ -418,32 +418,84 @@
__init__(self):%0A
+ self.redis = None%0A if REDIS_URL:%0A
try:%0A
@@ -483,32 +483,36 @@
try:%0A
+
self
@@ -548,32 +548,36 @@
IS_URL)%0A
+
+
except Exception
@@ -587,32 +587,36 @@
e:%0A
+
logger.error(e)%0A
@@ -618,38 +618,8 @@
r(e)
-%0A self.redis = None
%0A%0A
|
8783d769b58a926fd873391d51138a590a7b877b
|
fix dependency resolution and resource embedding (regression caused by rename from medea to medealib)
|
compiler/build.py
|
compiler/build.py
|
import sys
import re
import os
import shutil
import preprocessor
primary_compiled_file = 'medea.core-compiled.js'
def get_full_file_name(file):
# the rules for module names are simple - if the full .js file name
# is given, we load it directly. Otherwise, we assume it is a medea
# module of the given name and derive the file name from it.
return ('medea.' + file + '.js') if not ".js" in file.lower()\
else os.path.join('3rdparty',file)
def get_google_closure_params():
# ADVANCED_OPTIMIZATIONS breaks the medea module dependency system.
# TODO: might be possible to fix this, though.
return '// ==ClosureCompiler==\n' +\
'// @output_file_name {0}\n'.format(primary_compiled_file[:-2] + 'min.js') +\
'// @compilation_level SIMPLE_OPTIMIZATIONS\n' +\
'// ==/ClosureCompiler==\n\n'
def get_license():
with open( 'LICENSE', 'rt') as inp:
# the @license tag instructs minifiers not to strip the comment
return "/** @license\n" + inp.read() + '\n*/'
def javascript_string_escape(s):
# TODO: this does not catch everything.
escaped = s.replace('\\','\\\\')
escaped = escaped.replace('"','\\"')
escaped = escaped.replace('\'','\\\'')
return '+ \n'.join("'" + line + "\\n'" for line in escaped.split('\n')) + '\n'
def include_resource(resource, source_file):
try:
with open(source_file, 'rt') as inp:
return """
medea._bakedResources["{resource}"] = {data};
""".format(resource=resource, data=javascript_string_escape(inp.read()))
except IOError:
print('failed to open input file: ' + source_file)
def derive_topological_order(initial, mods_by_deps):
mods_by_deps_copy = dict(mods_by_deps)
topo_order = list(initial)
deps_handled = set()
while len(mods_by_deps_copy) > 0:
for k,v in mods_by_deps_copy.items():
if not v.issubset(deps_handled):
continue
if not k in topo_order:
topo_order.append(k)
mods_by_deps_copy.pop(k)
deps_handled.add(k)
break
else:
print('error: cyclic dependency in modules, current order is ' + str(topo_order))
sys.exit(-2)
return topo_order
def run(input_folder, output_folder, files_to_compact, resources_to_include = {}):
input_folder_3rdparty = os.path.join(input_folder, '3rdparty')
output_folder_3rdparty = os.path.join(output_folder, '3rdparty')
# cleanup previous compiler output
shutil.rmtree(output_folder, True)
try:
os.makedirs(output_folder_3rdparty)
except:
pass
mods_by_deps = {}
all_deps = set()
# add implicit dependencies dependent on core (core itself is handled separately)
files_to_compact.append('node')
files_to_compact.append('viewport')
# scan input files for dependencies
cursor = 0
while cursor < len(files_to_compact):
file = files_to_compact[cursor]
cursor = cursor + 1
full_file_name = get_full_file_name(file)
path = os.path.join(input_folder, full_file_name)
print('processing: ' + path)
with open(path, 'rt') as inp:
contents = inp.read()
l = None
for match in re.finditer(r"medea\.define\(.*?,\[(.*?)\]", contents):
if not l is None:
print('unexpected input: two define calls in one file')
break
l = match.group(1)
l = frozenset(l.strip()[1:-1] for l in l.split(',') if len(l.strip()) > 0)
for dep in l:
all_deps.add(dep)
if not dep in mods_by_deps and not dep in files_to_compact:
files_to_compact.append(dep)
print full_file_name + ' depends on ' + dep
mods_by_deps[file] = l or frozenset()
print('deriving topological order of collated modules')
# pre-define sprintf, matrix and the core module as they do not follow the
# usual module dependency system.
topo_order = derive_topological_order(['core', 'glMatrix.js'],mods_by_deps)
print topo_order
print('writing medea.core-compiled.js')
# generate medea.core-compiled.js output file
with open(os.path.join(output_folder, primary_compiled_file), 'wt') as outp:
outp.write(get_google_closure_params())
outp.write(get_license())
outp.write('medea_is_compiled = true;');
for n, dep in enumerate(topo_order):
path = os.path.join(input_folder, get_full_file_name(dep));
print('collating: ' + path)
with open(path, 'rt') as inp:
outp.write(preprocessor.run(inp.read(), input_folder))
#if '.js' in dep:
# outp.write('medea._markScriptAsLoaded("'+ dep +'");')
outp.write('\n')
# embed resource files
if resources_to_include:
outp.write('medea._bakedResources = {}; \n')
for k,v in resources_to_include.items():
print('embedding: ' + v + ' as ' + k)
outp.write(include_resource(k,v))
outp.write('delete window.medea_is_compiled;');
topo_order = [get_full_file_name(e) for e in topo_order]
# copy all other files
for file in os.listdir(input_folder):
if not file in topo_order and ".js" in file:
print('copying ' + file + ' to output folder')
shutil.copy2(os.path.join(input_folder, file), os.path.join(output_folder, file))
for file in os.listdir(input_folder_3rdparty):
if not os.path.join('3rdparty',file) in topo_order and ".js" in file:
print('copying ' + file + ' to output folder')
shutil.copy2(os.path.join(input_folder_3rdparty, file), os.path.join(output_folder_3rdparty, file))
|
Python
| 0
|
@@ -1356,24 +1356,27 @@
%22 %0A%0A%09%09%09medea
+lib
._bakedResou
@@ -2982,16 +2982,19 @@
(r%22medea
+lib
%5C.define
@@ -4401,16 +4401,19 @@
e('medea
+lib
._bakedR
|
395617afca4d242de12e2a75a3ae7d2a258f75a7
|
use template string
|
paystackapi/constants.py
|
paystackapi/constants.py
|
"""Script used to define constants used across codebase."""
PAYSTACK_SECRET_KEY = 'sk_test_0a246ef179dc841f42d20959bebdd790f69605d8'
HEADERS = {'Authorization': 'Bearer ' + PAYSTACK_SECRET_KEY}
API_URL = 'https://api.paystack.co/'
|
Python
| 0.000001
|
@@ -168,31 +168,11 @@
rer
-' + PAYSTACK_SECRET_KEY
+%7B%7D'
%7D%0AAP
|
39beb9cbb3d0158dab58787cbe95651c8ec66db9
|
Bump up minor version.
|
patroni/version.py
|
patroni/version.py
|
__version__ = '0.75'
|
Python
| 0
|
@@ -15,7 +15,7 @@
'0.7
-5
+6
'%0A
|
d0568b2c132ebe2cdf1f656ee96442a0888257cd
|
add NSecurity class
|
CorpFin/Security.py
|
CorpFin/Security.py
|
from HelpyFuncs.SymPy import sympy_theanify
class Security:
def __init__(self, label='', bs_val=0., val=0.):
self.label = label
self.bs_val_expr = bs_val
self.bs_val = sympy_theanify(bs_val)
self.val_expr = val
self.val = sympy_theanify(val)
def __call__(self, **kwargs):
if self.label:
s = ' "%s"' % self.label
else:
s = ''
return 'Security' + s + ': BS Val = %.3g, Val = %.3g' % (self.bs_val(**kwargs), self.val(**kwargs))
DOLLAR = Security(label='$', bs_val=1., val=1.)
|
Python
| 0
|
@@ -569,8 +569,125 @@
val=1.)%0A
+%0A%0Aclass NSecurity:%0A def __init__(self, n=1, security=DOLLAR):%0A self.n = n%0A self.security = security%0A
|
99818f02ebc46debe349a6c1b6bba70be6e04968
|
Update error message for no plugins
|
skimage/io/_plugins/null_plugin.py
|
skimage/io/_plugins/null_plugin.py
|
__all__ = ['imshow', 'imread', 'imsave', '_app_show']
import warnings
message = '''\
No plugin has been loaded. Please refer to
skimage.io.plugins()
for a list of available plugins.'''
def imshow(*args, **kwargs):
warnings.warn(RuntimeWarning(message))
def imread(*args, **kwargs):
warnings.warn(RuntimeWarning(message))
def imsave(*args, **kwargs):
warnings.warn(RuntimeWarning(message))
_app_show = imshow
|
Python
| 0
|
@@ -127,64 +127,187 @@
r to
-%0A%0Askimage.io.plugins()%0A%0Afor a list of available plugins.
+ the docstring for %60%60skimage.io%60%60%0Afor a list of available plugins. You may specify a plugin explicitly as%0Aan argument to %60%60imread%60%60, e.g. %60%60imread(%22image.jpg%22, plugin='pil')%60%60.%0A%0A
'''%0A
|
265052b981e04afe4815e9dceafbb7f2b06d2b0c
|
disable script host key checking
|
king/name-server.py
|
king/name-server.py
|
from twisted.internet import reactor
from twisted.names import dns, client, server
from rpyc.utils.factory import ssh_connect
from plumbum import SshMachine
from threading import Thread
import argparse
parser = argparse.ArgumentParser(description='Central Name Server')
parser.add_argument('--full', default=False, action='store_true', help='This instance will act as the endpoint for integration testing')
arguments = parser.parse_args()
class DNSServerFactory(server.DNSServerFactory):
def handleQuery(self, message, protocol, address):
try:
query = message.queries[0]
target = query.name.name
print 'Target:', target
query_type = target.split('.')[0]
if query_type == 'ns1':
A = dns.RRHeader(name=target, type=dns.A, cls=dns.IN, ttl=0,
payload=dns.Record_A(address='54.244.114.147', ttl=None))
args = (self, ([A], [], []), protocol, message, address)
return server.DNSServerFactory.gotResolverResponse(*args)
elif query_type == 'ns2':
A = dns.RRHeader(name=target, type=dns.A, cls=dns.IN, ttl=0,
payload=dns.Record_A(address='54.244.114.167', ttl=None))
args = (self, ([A], [], []), protocol, message, address)
return server.DNSServerFactory.gotResolverResponse(*args)
else:
query_id = int(target.split('.')[1])
origin = target.split('.')[2].split('---')
origin_ns_name = '.'.join(origin[4:])
origin_ip = '.'.join(origin[:4])
target = '.'.join(target.split('.')[2:])
print query_type, origin_ip, origin_ns_name
if query_type == 'full' and arguments.full:
Thread(target=full_rpc, args=(origin_ip, query_id)).start()
NS = dns.RRHeader(name=target, type=dns.NS, cls=dns.IN, ttl=0, auth=True,
payload=dns.Record_NS(name=origin_ns_name, ttl=0))
A = dns.RRHeader(name=origin_ns_name, type=dns.A, cls=dns.IN, ttl=0,
payload=dns.Record_A(address=origin_ip, ttl=None))
ans = []
auth = [NS]
add = [A]
args = (self, (ans, auth, add), protocol, message, address)
return server.DNSServerFactory.gotResolverResponse(*args)
except Exception, e:
print "Bad Request", e
def full_rpc(origin_ip, query_id):
try:
rem = SshMachine(origin_ip, user='ucb_268_measure', keyfile='~/.ssh/id_rsa')
conn = ssh_connect(rem, 18861)
conn.root.exposed_full_response(query_id, 'End Point Reached')
except Exception, e:
print "Could not perform RPC"
factory = DNSServerFactory()
protocol = dns.DNSDatagramProtocol(factory)
reactor.listenUDP(53, protocol)
reactor.listenTCP(53, factory)
reactor.run()
|
Python
| 0
|
@@ -2670,16 +2670,90 @@
/id_rsa'
+, ssh_opts=%5B%22StrictHostKeyChecking no%22, %22-o UserKnownHostsFile=/dev/null%22%5D
)%0A
|
c9170cb4c0d63a6dc75f0fa7ca76faa688a1678a
|
Make tags optional
|
ppb/forms.py
|
ppb/forms.py
|
from pinax.blog.forms import FIELDS, AdminPostForm
from pinax.blog.models import Post
from taggit.forms import TagField
FIELDS.append("tags")
class AdminPostTagsForm(AdminPostForm):
tags = TagField()
class Meta:
model = Post
fields = FIELDS
|
Python
| 0.000001
|
@@ -201,16 +201,30 @@
agField(
+required=False
)%0A%0A c
|
9ee9ba34e447e99c868fcb43d40ce905cebf5fb9
|
Add list and define functions.
|
noah/noah.py
|
noah/noah.py
|
import json
class Noah(object):
pass
|
Python
| 0
|
@@ -34,8 +34,588 @@
-pass
+def __init__(self, dictionary_file):%0A self.dictionary = json.load(dictionary_file)%0A%0A def list(self):%0A return '%5Cn'.join(%5Bentry%5B'word'%5D for entry in self.dictionary%5D)%0A%0A def define(self, word):%0A entry = next((x for x in self.dictionary if x%5B'word'%5D == word), None)%0A%0A if not entry is None:%0A return '%25s (%25s)' %25 (entry%5B'word'%5D, entry%5B'part_of_speech'%5D)%0A%0Adef main():%0A with open('../dictionaries/english.json') as dictionary:%0A n = Noah(dictionary)%0A%0A print n.list()%0A print n.define('aardvark')%0A%0Aif __name__ == '__main__':%0A main()
|
48e15ea8494d72ee2a4cb7d05b5ee5d626d581c5
|
Add groups to serf inventory plugin
|
plugins/inventory/serf.py
|
plugins/inventory/serf.py
|
#!/usr/bin/env python
# (c) 2015, Marc Abramowitz <marca@surveymonkey.com>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Dynamic inventory script which lets you use nodes discovered by Serf
# (https://serfdom.io/).
#
# Requires the `serfclient` Python module from
# https://pypi.python.org/pypi/serfclient
#
# Environment variables
# ---------------------
# - `SERF_RPC_ADDR`
# - `SERF_RPC_AUTH`
#
# These variables are described at https://www.serfdom.io/docs/commands/members.html#_rpc_addr
import argparse
import os
import sys
# https://pypi.python.org/pypi/serfclient
from serfclient import SerfClient, EnvironmentConfig
try:
import json
except ImportError:
import simplejson as json
_key = 'serf'
def _serf_client():
env = EnvironmentConfig()
return SerfClient(host=env.host, port=env.port, rpc_auth=env.auth_key)
def get_serf_members_data():
return _serf_client().members().body['Members']
def get_nodes(data):
return [node['Name'] for node in data]
def get_meta(data):
meta = {'hostvars': {}}
for node in data:
meta['hostvars'][node['Name']] = node['Tags']
return meta
def print_list():
data = get_serf_members_data()
nodes = get_nodes(data)
meta = get_meta(data)
print(json.dumps({_key: nodes, '_meta': meta}))
def print_host(host):
data = get_serf_members_data()
meta = get_meta(data)
print(json.dumps(meta['hostvars'][host]))
def get_args(args_list):
parser = argparse.ArgumentParser(
description='ansible inventory script reading from serf cluster')
mutex_group = parser.add_mutually_exclusive_group(required=True)
help_list = 'list all hosts from serf cluster'
mutex_group.add_argument('--list', action='store_true', help=help_list)
help_host = 'display variables for a host'
mutex_group.add_argument('--host', help=help_host)
return parser.parse_args(args_list)
def main(args_list):
args = get_args(args_list)
if args.list:
print_list()
if args.host:
print_host(args.host)
if __name__ == '__main__':
main(sys.argv[1:])
|
Python
| 0
|
@@ -1118,16 +1118,35 @@
rgparse%0A
+import collections%0A
import o
@@ -1615,24 +1615,228 @@
in data%5D%0A%0A%0A
+def get_groups(data):%0A groups = collections.defaultdict(list)%0A%0A for node in data:%0A for key, value in node%5B'Tags'%5D.items():%0A groups%5Bvalue%5D.append(node%5B'Name'%5D)%0A%0A return groups%0A%0A%0A
def get_meta
@@ -2042,24 +2042,54 @@
nodes(data)%0A
+ groups = get_groups(data)%0A
meta = g
@@ -2106,33 +2106,33 @@
ta)%0A
-print(json.dumps(
+inventory_data =
%7B_key: n
@@ -2151,16 +2151,86 @@
': meta%7D
+%0A inventory_data.update(groups)%0A print(json.dumps(inventory_data
))%0A%0A%0Adef
|
0dd2bd0a8d2b041672afdf66666df63e2dd1a044
|
Add author friends url.
|
rest/urls.py
|
rest/urls.py
|
# Author: Braedy Kuzma
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/$', views.PostView.as_view(),
name='post'),
url(r'^posts/$', views.PostsView.as_view(), name='posts'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments')
]
|
Python
| 0
|
@@ -86,16 +86,79 @@
rns = %5B%0A
+ url(r'%5Eposts/$', views.PostsView.as_view(), name='posts'),%0A
url(
@@ -255,39 +255,82 @@
rl(r'%5Eposts/
-$', views.Posts
+(?P%3Cpid%3E%5B0-9a-fA-F%5C-%5D+)/comments/$',%0A views.Comment
View.as_view
@@ -331,35 +331,38 @@
s_view(), name='
-pos
+commen
ts'),%0A url(r'
@@ -456,35 +456,36 @@
%0A url(r'%5E
-posts
+author
/(?P%3C
-p
+a
id%3E%5B0-9a-fA-
@@ -483,39 +483,38 @@
%5B0-9a-fA-F%5C-%5D+)/
-comment
+friend
s/$',%0A vi
@@ -509,39 +509,45 @@
,%0A views.
-Comment
+AuthorFriends
View.as_view(),
@@ -552,21 +552,20 @@
, name='
-comment
+friend
s')%0A%5D%0A
|
0cf7fda731a71524651de95821b444b5c554260e
|
Move initial_fetch() up
|
inferno-cli.py
|
inferno-cli.py
|
#!/usr/bin/env python3
import argparse
import collections
import datetime
import logging
import os
import re
import sys
import time
import bs4
import requests
import setproctitle
import util
class Shoutbox:
base_url = ""
inferno_url = ""
s = None
lines = []
read = collections.deque(maxlen=21)
def __init__(self, base_url, cookie={}, inferno_path="/infernoshout.php", base_path="/index.php"):
self.base_url = base_url
self.inferno_url = self.base_url + inferno_path
self.s = requests.Session()
self.s.headers.update({
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; rv:45.0) Gecko/20100101 Firefox/45.0",
"X-Requested-With": "XMLHttpRequest",
"Referer": self.base_url + base_path,
})
if cookie:
self.s.cookies.update(cookie)
def _parse(self, html):
MAGIC = "<<~!PARSE_SHOUT!~>>"
try:
active_users = util.atoi(html)
logging.info("%d active users" % active_users)
html = html[len(str(active_users)):]
except ValueError:
pass
if not html.startswith(MAGIC):
logging.warning("ignoring bogus html: %s" % html)
return
html = html.lstrip(MAGIC)
h = bs4.BeautifulSoup(html)
# put the full URL after the text inside the "a" tag
for a in h.find_all("a"):
if "href" not in a.attrs:
continue
if a['href'] == "#":
continue
a.string.replace_with("%s (%s)" % (a.string, a['href']))
for br in h.find_all("br"):
br.string = "\n"
chat = h.get_text()
# remove timestamps - they're relative, and thus they make read lines appear as unread when the day changes
chat = re.sub("^\[[^\]]*\] ", "", chat, flags=re.MULTILINE)
return chat
def _get(self):
params = {
"action": "getshouts",
"timestamp": datetime.datetime.now().strftime("%s200"),
}
try:
r = self.s.get(self.inferno_url, params=params)
return r.text
except requests.exceptions.ConnectionError as e:
logging.warn("connection error: %s" % e)
return ""
def update(self):
l = self._parse(self._get()).rstrip("\n").split('\n')
self.lines.extend(l)
def print_new(self):
for i in self.lines:
if i not in self.read:
print(i, flush=True)
self.read.append(i)
else:
logging.debug("skipping line " + i)
self.lines = []
def initial_fetch(self):
self.update()
for i in self.lines:
self.read.append(i)
self.lines = []
def main():
logging.basicConfig(level=logging.INFO)
os.environ["SPT_NOENV"] = "true"
setproctitle.setproctitle(sys.argv[0])
parser = argparse.ArgumentParser(description="Command line feed for Inferno Shoutbox")
parser.add_argument("-b", "--backlog", action="store_true", help="Display the backlog after connecting")
parser.add_argument("url", help="Base URL of the forum")
parser.add_argument("cookies", help="Cookies in the standard Cookie header format (RFC 6265, section 4.1.1)")
args = parser.parse_args()
s = Shoutbox(args.url, util.dict_from_cookie_str(args.cookies))
if args.backlog:
s.update()
s.print_new()
else:
s.initial_fetch()
while True:
time.sleep(5)
s.update()
s.print_new()
if __name__ == "__main__":
main()
|
Python
| 0.000005
|
@@ -2281,16 +2281,153 @@
urn %22%22%0A%0A
+ def initial_fetch(self):%0A self.update()%0A for i in self.lines:%0A self.read.append(i)%0A self.lines = %5B%5D%0A%0A
def
@@ -2794,145 +2794,8 @@
%5B%5D%0A%0A
- def initial_fetch(self):%0A self.update()%0A for i in self.lines:%0A self.read.append(i)%0A self.lines = %5B%5D%0A%0A
%0Adef
|
ad912a737a51070cc621715458740578d466c5b7
|
Update router.py
|
peewee_migrate/router.py
|
peewee_migrate/router.py
|
import os
import re
from importlib import import_module
from types import ModuleType
import mock
import peewee as pw
from cached_property import cached_property
from peewee_migrate import LOGGER, MigrateHistory
from peewee_migrate.auto import diff_many, NEWLINE
from peewee_migrate.compat import string_types, exec_in
from peewee_migrate.migrator import Migrator
CLEAN_RE = re.compile(r'\s+$', re.M)
MIGRATE_DIR = os.path.join(os.getcwd(), 'migrations')
VOID = lambda m, d: None # noqa
with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'template.txt')) as t:
MIGRATE_TEMPLATE = t.read()
class BaseRouter(object):
"""Abstract base class for router."""
def __init__(self, database, logger=LOGGER):
self.database = database
self.logger = logger
if not isinstance(self.database, (pw.Database, pw.Proxy)):
raise RuntimeError('Invalid database: %s' % database)
@cached_property
def model(self):
"""Ensure that migrations has prepared to run."""
# Initialize MigrationHistory model
MigrateHistory._meta.database = self.database
MigrateHistory.create_table(True)
return MigrateHistory
@property
def todo(self):
raise NotImplementedError
def create(self, name='auto', auto=False):
"""Create a migration."""
migrate = rollback = ''
if auto:
if isinstance(auto, str):
try:
auto = import_module(auto)
except ImportError:
return self.logger.error('Invalid models module: %s', auto)
if isinstance(auto, ModuleType):
auto = list(filter(
lambda m: isinstance(m, type) and issubclass(m, pw.Model)),
(getattr(auto, model) for model in dir(auto))) # noqa
for migration in self.diff:
self.run_one(migration, self.migrator)
models1 = auto
models2 = list(self.migrator.orm.values())
migrate = diff_many(models1, models2)
if not migrate:
return self.logger.warn('No changes has found.')
migrate = NEWLINE + NEWLINE.join('\n\n'.join(migrate).split('\n'))
migrate = CLEAN_RE.sub('\n', migrate)
rollback = diff_many(models2, models1)
rollback = NEWLINE + NEWLINE.join('\n\n'.join(rollback).split('\n'))
rollback = CLEAN_RE.sub('\n', rollback)
self.logger.info('Create a migration "%s"', name)
path = self._create(name, migrate, rollback)
self.logger.info('Migration has created %s', path)
return path
def _create(self, name, migrate='', rollback=''):
raise NotImplementedError
def read(self, name):
raise NotImplementedError
@property
def done(self):
"""Scan migrations in database."""
return [mm.name for mm in self.model.select()]
@property
def diff(self):
"""Calculate difference between fs and db."""
done = set(self.done)
return [name for name in self.todo if name not in done]
@cached_property
def migrator(self):
"""Create migrator and setup it with fake migrations."""
migrator = Migrator(self.database)
for name in self.done:
self.run_one(name, migrator)
return migrator
def run_one(self, name, migrator, fake=True, downgrade=False, force=False):
"""Run a migration."""
try:
migrate, rollback = self.read(name)
if fake:
with mock.patch('peewee.Model.select'):
with mock.patch('peewee.InsertQuery.execute'):
migrate(migrator, self.database)
if force:
self.model.create(name=name)
self.logger.info('Done %s', name)
migrator.clean()
return migrator
self.logger.info('Run "%s"', name)
with self.database.transaction():
if not downgrade:
migrate(migrator, self.database)
migrator.run()
self.model.create(name=name)
self.logger.info('Done %s', name)
else:
self.logger.info('Rollback %s', name)
rollback(migrator, self.database)
migrator.run()
self.model.delete().where(self.model.name == name).execute()
self.logger.info('Rolled back %s', name)
except Exception as exc:
self.database.rollback()
self.logger.exception(exc)
self.logger.error('Migration failed: %s', name)
raise
def run(self, name=None, fake=False):
"""Run migrations."""
self.logger.info('Start migrations')
done = []
diff = self.diff
if not diff:
self.logger.info('There is nothing to migrate')
return done
migrator = self.migrator
for mname in diff:
self.run_one(mname, migrator, fake=fake, force=fake)
done.append(mname)
if name and name == mname:
break
return done
def rollback(self, name):
name = name.strip()
done = self.done
if not done:
raise RuntimeError('No migrations are found.')
if name != done[-1]:
raise RuntimeError('Only last migration can be canceled.')
migrator = self.migrator
self.run_one(name, migrator, False, True)
self.logger.warn('Downgraded migration: %s', name)
class Router(BaseRouter):
filemask = re.compile(r"[\d]{3}_[^\.]+\.py$")
def __init__(self, database, migrate_dir=MIGRATE_DIR, **kwargs):
super(Router, self).__init__(database, **kwargs)
self.migrate_dir = migrate_dir
@property
def todo(self):
"""Scan migrations in file system."""
if not os.path.exists(self.migrate_dir):
self.logger.warn('Migration directory: %s does not exists.', self.migrate_dir)
os.makedirs(self.migrate_dir)
return sorted(
''.join(f[:-3]) for f in os.listdir(self.migrate_dir) if self.filemask.match(f))
def _create(self, name, migrate='', rollback=''):
"""Create a migration."""
num = len(self.todo)
prefix = '{:03}_'.format(num + 1)
name = prefix + name + '.py'
path = os.path.join(self.migrate_dir, name)
with open(path, 'w') as f:
f.write(MIGRATE_TEMPLATE.format(migrate=migrate, rollback=rollback))
return path
def read(self, name):
"""Read migration from file."""
with open(os.path.join(self.migrate_dir, name + '.py')) as f:
code = f.read()
scope = {}
exec_in(code, scope)
return scope.get('migrate', VOID), scope.get('rollback', VOID)
class ModuleRouter(BaseRouter):
def __init__(self, database, migrate_module='migrations', **kwargs):
super(ModuleRouter, self).__init__(database, **kwargs)
if isinstance(migrate_module, string_types):
migrate_module = import_module(migrate_module)
self.migrate_module = migrate_module
def read(self, name):
mod = getattr(self.migrate_module, name)
return getattr(mod, 'migrate', VOID), getattr(mod, 'rollback', VOID)
|
Python
| 0
|
@@ -3710,24 +3710,99 @@
.execute'):%0A
+ with mock.patch('peewee.UpdateQuery.execute'):%0A
|
6fce2e52715f1a77edb19eca8b1133875fff3d34
|
Set HearingViewSet read Only
|
kk/views/hearing.py
|
kk/views/hearing.py
|
import django_filters
from rest_framework import viewsets
from rest_framework import serializers
from rest_framework import filters
from rest_framework.decorators import detail_route
from rest_framework.response import Response
from kk.models import Hearing
from .image import ImageFieldSerializer, ImageSerializer
class HearingFilter(django_filters.FilterSet):
next_closing = django_filters.DateTimeFilter(name='close_at', lookup_type='gt')
class Meta:
model = Hearing
fields = ['next_closing', ]
# Serializer for labels. Get label names instead of IDs.
class LabelSerializer(serializers.RelatedField):
def to_representation(self, value):
return value.label
class HearingSerializer(serializers.ModelSerializer):
labels = LabelSerializer(many=True, read_only=True)
images = ImageFieldSerializer(many=True, read_only=True)
class Meta:
model = Hearing
fields = ['abstract', 'heading', 'borough', 'n_comments', 'labels', 'close_at', 'created_at',
'latitude', 'longitude', 'servicemap_url', 'images']
class HearingViewSet(viewsets.ModelViewSet):
"""
API endpoint for hearings.
"""
queryset = Hearing.objects.all()
serializer_class = HearingSerializer
filter_backends = (filters.DjangoFilterBackend, filters.OrderingFilter)
#ordering_fields = ('created_at',)
#ordering = ('-created_at',)
#filter_class = HearingFilter
def get_queryset(self):
next_closing = self.request.query_params.get('next_closing', None)
if next_closing is not None:
return self.queryset.filter(close_at__gt=next_closing).order_by('close_at')[:1]
return self.queryset.order_by('-created_at')
@detail_route(methods=['get'])
def images(self, request, pk=None):
hearing = self.get_object()
images = hearing.images.all()
page = self.paginate_queryset(images)
if page is not None:
serializer = ImageSerializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = ImageSerializer(images, many=True)
return Response(serializer.data)
# temporary for query debug purpose
def _list(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
print(queryset.query)
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
|
Python
| 0
|
@@ -1117,16 +1117,24 @@
iewsets.
+ReadOnly
ModelVie
|
b0fa9031b4eabd33a6c6f8f27e22351b14e1eeee
|
Set a new primary avatar when deleting the primary avatar.
|
avatar/views.py
|
avatar/views.py
|
import os.path
from avatar.models import Avatar, avatar_file_path
from avatar.forms import PrimaryAvatarForm, DeleteAvatarForm
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.utils.translation import ugettext as _
def _get_next(request):
"""
The part that's the least straightforward about views in this module is how they
determine their redirects after they have finished computation.
In short, they will try and determine the next place to go in the following order:
1. If there is a variable named ``next`` in the *POST* parameters, the view will
redirect to that variable's value.
2. If there is a variable named ``next`` in the *GET* parameters, the view will
redirect to that variable's value.
3. If Django can determine the previous page from the HTTP headers, the view will
redirect to that previous page.
"""
next = request.POST.get('next', request.GET.get('next', request.META.get('HTTP_REFERER', None)))
if not next:
next = request.path
return next
def change(request, extra_context={}, next_override=None):
avatars = Avatar.objects.filter(user=request.user).order_by('-primary')
if avatars.count() > 0:
avatar = avatars[0]
kwargs = {'initial': {'choice': avatar.id}}
else:
avatar = None
kwargs = {}
primary_avatar_form = PrimaryAvatarForm(request.POST or None, user=request.user, **kwargs)
if request.method == "POST":
if 'avatar' in request.FILES:
path = avatar_file_path(user=request.user,
filename=request.FILES['avatar'].name)
avatar = Avatar(
user = request.user,
primary = True,
avatar = path,
)
new_file = avatar.avatar.storage.save(path, request.FILES['avatar'])
avatar.save()
request.user.message_set.create(
message=_("Successfully uploaded a new avatar."))
if 'choice' in request.POST and primary_avatar_form.is_valid():
avatar = Avatar.objects.get(id=
primary_avatar_form.cleaned_data['choice'])
avatar.primary = True
avatar.save()
request.user.message_set.create(
message=_("Successfully updated your avatar."))
return HttpResponseRedirect(next_override or _get_next(request))
return render_to_response(
'avatar/change.html',
extra_context,
context_instance = RequestContext(
request,
{ 'avatar': avatar,
'avatars': avatars,
'primary_avatar_form': primary_avatar_form,
'next': next_override or _get_next(request), }
)
)
change = login_required(change)
def delete(request, extra_context={}, next_override=None):
avatars = Avatar.objects.filter(user=request.user).order_by('-primary')
if avatars.count() > 0:
avatar = avatars[0]
else:
avatar = None
delete_avatar_form = DeleteAvatarForm(request.POST or None, user=request.user)
if request.method == 'POST':
if delete_avatar_form.is_valid():
ids = delete_avatar_form.cleaned_data['choices']
Avatar.objects.filter(id__in=ids).delete()
request.user.message_set.create(
message=_("Successfully deleted the requested avatars."))
return HttpResponseRedirect(next_override or _get_next(request))
return render_to_response(
'avatar/confirm_delete.html',
extra_context,
context_instance = RequestContext(
request,
{ 'avatar': avatar,
'avatars': avatars,
'delete_avatar_form': delete_avatar_form,
'next': next_override or _get_next(request), }
)
)
change = login_required(change)
|
Python
| 0
|
@@ -3358,16 +3358,276 @@
oices'%5D%0A
+ if unicode(avatar.id) in ids and avatars.count() %3E len(ids):%0A for a in avatars:%0A if unicode(a.id) not in ids:%0A a.primary = True%0A a.save()%0A break%0A
|
829ddcdf0ceff4f43cf871b7438170d4e4971a70
|
Fix cyclomatic complexity problem in exception handling
|
surveymonkey/exceptions.py
|
surveymonkey/exceptions.py
|
# -*- coding: utf-8 -*-
class SurveyMonkeyException(Exception):
def __init__(self, response):
data = response.json()
super(SurveyMonkeyException, self).__init__(data["error"]["message"])
self.status_code = response.status_code
self.error_code = data["error"]["id"]
class SurveyMonkeyBadRequest(SurveyMonkeyException):
pass
class SurveyMonkeyAuthorizationError(SurveyMonkeyException):
pass
class SurveyMonkeyPermissionError(SurveyMonkeyException):
pass
class SurveyMonkeyResourceNotFound(SurveyMonkeyException):
pass
class SurveyMonkeyResourceConflict(SurveyMonkeyException):
pass
class SurveyMonkeyRequestEntityTooLarge(SurveyMonkeyException):
pass
class SurveyMonkeyInternalServerError(SurveyMonkeyException):
pass
class SurveyMonkeyUserSoftDeleted(SurveyMonkeyException):
pass
class SurveyMonkeyUserDeleted(SurveyMonkeyException):
pass
def response_raises(response):
if response.status_code == 200:
return
elif response.status_code == 400:
raise SurveyMonkeyBadRequest(response)
elif response.status_code == 401:
raise SurveyMonkeyAuthorizationError(response)
elif response.status_code == 403:
raise SurveyMonkeyPermissionError(response)
elif response.status_code == 404:
if response.json()["error"]["id"] == "1052":
raise SurveyMonkeyUserSoftDeleted(response)
else:
raise SurveyMonkeyResourceNotFound(response)
elif response.status_code == 409:
raise SurveyMonkeyResourceConflict(response)
elif response.status_code == 413:
raise SurveyMonkeyRequestEntityTooLarge(response)
elif response.status_code in [500, 503]:
raise SurveyMonkeyInternalServerError(response)
elif response.status_code == 410:
raise SurveyMonkeyUserDeleted(response)
|
Python
| 0.000041
|
@@ -960,97 +960,157 @@
e):%0A
- if response.status_code == 200:%0A return%0A elif response.status_code == 400:%0A
+%0A def _not_found(response):%0A if response.json()%5B%22error%22%5D%5B%22id%22%5D == %221052%22:%0A return SurveyMonkeyUserSoftDeleted%0A else:%0A
@@ -1106,36 +1106,37 @@
e:%0A r
-aise
+eturn
SurveyMonkeyBad
@@ -1136,302 +1136,306 @@
nkey
-BadRequest(response)%0A elif response.status_code == 401:%0A raise SurveyMonkeyAuthorizationError(response)%0A elif response.status_code == 403:%0A raise SurveyMonkeyPermissionError(response)%0A elif response.status_code == 404:%0A if response.json()%5B%22error%22%5D%5B%22id%22%5D == %221052%22:
+ResourceNotFound%0A%0A def _client_error(code):%0A return %7B%0A 400: SurveyMonkeyBadRequest,%0A 401: SurveyMonkeyAuthorizationError,%0A 403: SurveyMonkeyPermissionError,%0A 409: SurveyMonkeyResourceConflict,%0A 413: SurveyMonkeyRequestEntityTooLarge,
%0A
@@ -1435,37 +1435,36 @@
ge,%0A
-raise
+410:
SurveyMonkeyUse
@@ -1468,29 +1468,65 @@
User
-Soft
Deleted
-(respons
+%0A %7D.get(code)%0A%0A def _server_error(cod
e)
+:
%0A
@@ -1522,37 +1522,40 @@
(code):%0A
-else:
+return %7B
%0A rai
@@ -1543,37 +1543,36 @@
n %7B%0A
-raise
+500:
SurveyMonkeyRes
@@ -1572,43 +1572,109 @@
nkey
-ResourceNotFound(respons
+InternalServerError,%0A 503: SurveyMonkeyInternalServerError%0A %7D.get(cod
e)%0A
+%0A
-elif
+code =
res
@@ -1694,15 +1694,28 @@
code
+%0A%0A if code
==
-409
+200
:%0A
@@ -1725,77 +1725,23 @@
r
-aise SurveyMonkeyResourceConflict(response)%0A elif response.status_
+eturn%0A elif
code
@@ -1745,18 +1745,18 @@
ode == 4
-13
+04
:%0A
@@ -1761,195 +1761,137 @@
-raise SurveyMonkeyRequestEntityTooLarge(response)%0A elif response.status_code in %5B500, 503%5D:%0A raise SurveyMonkeyI
+exception = _not_found(response)%0A elif 400 %3C= code %3C= 499:%0A exception = _clie
nt
+_
er
-nalServerError(response)%0A elif response.status_
+ror(code)%0A elif 500 %3C=
code
-== 410
+%3C= 599
:%0A
@@ -1900,37 +1900,60 @@
-raise SurveyMonkeyUserDeleted
+exception = _server_error(code)%0A%0A raise exception
(res
|
954c06d2715090e15dbe9a76dffb0eeabda06a48
|
make flake8 happy
|
bids/grabbids/__init__.py
|
bids/grabbids/__init__.py
|
__all__ = ["bids_layout"]
|
Python
| 0
|
@@ -1,8 +1,44 @@
+from .bids_layout import BIDSLayout%0A
__all__
@@ -41,22 +41,21 @@
l__ = %5B%22
-bids_l
+BIDSL
ayout%22%5D%0A
|
31c5071203fa234521cb8d3270f0c0f75488934d
|
Add test for IX prefixes.
|
peeringdb/tests.py
|
peeringdb/tests.py
|
from __future__ import unicode_literals
from django.test import TestCase
from django.utils import timezone
from .api import PeeringDB
from .models import Network, NetworkIXLAN
class PeeringDBTestCase(TestCase):
def test_time_last_sync(self):
api = PeeringDB()
# Test when no sync has been done
self.assertEqual(api.get_last_sync_time(), 0)
# Test of sync record with no objects
time_of_sync = timezone.now()
api.record_last_sync(
time_of_sync, {'added': 0, 'updated': 0, 'deleted': 0})
self.assertEqual(api.get_last_sync_time(), 0)
# Test of sync record with one object
time_of_sync = timezone.now()
api.record_last_sync(
time_of_sync, {'added': 1, 'updated': 0, 'deleted': 0})
self.assertEqual(api.get_last_sync_time(),
int(time_of_sync.timestamp()))
def test_get_autonomous_system(self):
api = PeeringDB()
asn = 15169
# Using an API call (no cached data)
autonomous_system = api.get_autonomous_system(asn)
self.assertEqual(autonomous_system.asn, asn)
# Save the data inside the cache
details = {
'id': autonomous_system.id,
'asn': autonomous_system.asn,
'name': autonomous_system.name,
}
network = Network(**details)
network.save()
# Using no API calls (cached data)
autonomous_system = api.get_autonomous_system(asn)
self.assertEqual(autonomous_system.asn, asn)
def test_get_ix_network(self):
api = PeeringDB()
ix_network_id = 29146
# Using an API call (no cached data)
ix_network = api.get_ix_network(ix_network_id)
self.assertEqual(ix_network.id, ix_network_id)
# Save the data inside the cache
details = {
'id': ix_network.id,
'asn': ix_network.asn,
'name': ix_network.name,
'ix_id': ix_network.ix_id,
'ixlan_id': ix_network.ixlan_id,
}
network_ixlan = NetworkIXLAN(**details)
network_ixlan.save()
# Using no API calls (cached data)
ix_network = api.get_ix_network(ix_network_id)
self.assertEqual(ix_network.id, ix_network_id)
def test_get_ix_networks_for_asn(self):
api = PeeringDB()
asn = 29467
known_ix_networks = [29146, 15321, 24292, 14658,
15210, 16774, 14657, 23162, 14659, 17707, 27863]
found_ix_networks = []
ix_networks = api.get_ix_networks_for_asn(asn)
for ix_network in ix_networks:
found_ix_networks.append(ix_network.id)
self.assertEqual(sorted(found_ix_networks), sorted(known_ix_networks))
|
Python
| 0
|
@@ -2779,8 +2779,443 @@
works))%0A
+%0A def test_get_prefixes_for_ix_network(self):%0A api = PeeringDB()%0A ix_network_id = 29146%0A%0A known_prefixes = %5B'2001:7f8:1::/64', '80.249.208.0/21'%5D%0A found_prefixes = %5B%5D%0A%0A ix_prefixes = api.get_prefixes_for_ix_network(ix_network_id)%0A for ix_prefix in ix_prefixes:%0A found_prefixes.append(ix_prefix%5B'prefix'%5D)%0A%0A self.assertEqual(sorted(found_prefixes), sorted(known_prefixes))%0A
|
93eb1fb058629f25f919a9c5f3647702c2767b22
|
test parsing nested rules and toplevel imports
|
peru/test/test_parser.py
|
peru/test/test_parser.py
|
from textwrap import dedent
import unittest
from peru.parser import parse_string
from peru.remote_module import RemoteModule
from peru.rule import Rule
class ParserTest(unittest.TestCase):
def test_parse_empty_file(self):
scope, local_module = parse_string("")
self.assertDictEqual(scope, {})
self.assertDictEqual(local_module.imports, {})
def test_parse_rule(self):
input = dedent("""\
rule foo:
build: echo hi
export: out/
""")
scope, local_module = parse_string(input)
self.assertIn("foo", scope)
rule = scope["foo"]
self.assertIsInstance(rule, Rule)
self.assertEqual(rule.name, "foo")
self.assertEqual(rule.build_command, "echo hi")
self.assertEqual(rule.export, "out/")
def test_parse_module(self):
input = dedent("""\
git module foo:
url: http://www.example.com/
rev: abcdefg
imports:
wham: bam/
thank: you/maam
""")
scope, local_module = parse_string(input)
self.assertIn("foo", scope)
module = scope["foo"]
self.assertIsInstance(module, RemoteModule)
self.assertEqual(module.name, "foo")
self.assertDictEqual(module.imports,
{"wham": "bam/",
"thank": "you/maam"})
self.assertDictEqual(module.plugin_fields,
{"url": "http://www.example.com/",
"rev": "abcdefg"})
|
Python
| 0
|
@@ -1614,12 +1614,726 @@
%22abcdefg%22%7D)%0A
+%0A def test_parse_nested_rule(self):%0A input = dedent(%22%22%22%5C%0A git module bar:%0A rule baz:%0A %22%22%22)%0A scope, local_module = parse_string(input)%0A self.assertIn(%22bar%22, scope)%0A module = scope%5B%22bar%22%5D%0A self.assertIsInstance(module, RemoteModule)%0A self.assertIn(%22bar.baz%22, scope)%0A rule = scope%5B%22bar.baz%22%5D%0A self.assertIsInstance(rule, Rule)%0A%0A def test_parse_toplevel_imports(self):%0A input = dedent(%22%22%22%5C%0A imports:%0A foo: bar/%0A %22%22%22)%0A scope, local_module = parse_string(input)%0A self.assertDictEqual(scope, %7B%7D)%0A self.assertDictEqual(local_module.imports, %7B%22foo%22: %22bar/%22%7D)%0A
|
dd91c9ee1964899b50801b0ca0fd5dd721d20620
|
Convert default URL to HTTPS
|
abusehelper/bots/phishtank/phishtankbot.py
|
abusehelper/bots/phishtank/phishtankbot.py
|
"""
PhishTank feed handler. Requires a PhishTank application key.
Maintainer: Codenomicon <clarified@codenomicon.com>
"""
import re
import bz2
import socket
import urllib2
import urlparse
import collections
from datetime import datetime
import xml.etree.cElementTree as etree
import idiokit
from abusehelper.core import bot, events, utils
def _replace_non_xml_chars(unicode_obj, replacement=u"\uFFFD"):
return _NON_XML.sub(replacement, unicode_obj)
_NON_XML = re.compile(u"[\x00-\x08\x0B\x0C\x0E-\x1F\uD800-\uDFFF\uFFFE\uFFFF]", re.U)
def parse_text(element, key):
entry = element.find(key)
if entry is None:
return
if not isinstance(entry, basestring):
entry = entry.text
if entry:
return entry
def is_domain(string):
for addr_type in (socket.AF_INET, socket.AF_INET6):
try:
socket.inet_ntop(addr_type, socket.inet_pton(addr_type, string))
except (ValueError, socket.error):
pass
else:
return False
return True
class BZ2Reader(object):
def __init__(self, fileobj):
self._fileobj = fileobj
self._bz2 = bz2.BZ2Decompressor()
self._line_buffer = collections.deque([""])
self._current_line = ""
self._current_offset = 0
def _read_raw(self, chunk_size=65536):
while True:
compressed = self._fileobj.read(chunk_size)
if not compressed:
return ""
decompressed = self._bz2.decompress(compressed)
if decompressed:
return decompressed
def _read_line(self):
if not self._line_buffer:
return ""
while len(self._line_buffer) == 1:
raw = self._read_raw()
if not raw:
return self._line_buffer.pop()
last = self._line_buffer.pop()
self._line_buffer.extend((last + raw).splitlines(True))
return self._line_buffer.popleft()
def _mangle_line(self, line, target="utf-8"):
# Forcibly decode the bytes into an unicode object.
try:
decoded = line.decode("utf-8")
except UnicodeDecodeError:
decoded = line.decode("latin-1")
# Remove characters that are not proper XML 1.0.
sanitized = _replace_non_xml_chars(decoded)
return sanitized.encode("utf-8")
def _read(self, amount):
while self._current_offset >= len(self._current_line):
line = self._read_line()
if not line:
return ""
self._current_line = self._mangle_line(line)
self._current_offset = 0
data = self._current_line[self._current_offset:self._current_offset + amount]
self._current_offset += len(data)
return data
def read(self, amount):
result = list()
while amount > 0:
data = self._read(amount)
if not data:
break
amount -= len(data)
result.append(data)
return "".join(result)
class HeadRequest(urllib2.Request):
def get_method(self):
return "HEAD"
class PhishTankBot(bot.PollingBot):
application_key = bot.Param("registered application key for PhishTank")
feed_url = bot.Param(default="http://data.phishtank.com/data/%s/online-valid.xml.bz2")
def __init__(self, *args, **keys):
bot.PollingBot.__init__(self, *args, **keys)
self._etag = None
@idiokit.stream
def _handle_entry(self, entry, sites):
details = entry.find("details")
if details is None:
return
verification = entry.find("verification")
if verification is None or parse_text(verification, "verified") != "yes":
return
status = entry.find("status")
if status is None or parse_text(status, "online") != "yes":
return
url = parse_text(entry, "url")
if not url:
return
event = events.Event({"feed": "phishtank", "url": url})
domain = urlparse.urlparse(url).netloc
if is_domain(domain):
event.add("domain name", domain)
detail_url = parse_text(entry, "phish_detail_url")
if detail_url:
event.add("description url", detail_url)
target = parse_text(entry, "target")
if target:
event.add("target", target)
history = {}
for detail in details.findall("detail"):
ip = parse_text(detail, "ip_address")
if not ip:
continue
announcer = parse_text(detail, "announcing_network")
if not announcer:
continue
detail_time = parse_text(detail, "detail_time")
try:
ts = datetime.strptime(detail_time, "%Y-%m-%dT%H:%M:%S+00:00")
except (ValueError, TypeError):
continue
history[ts] = (ip, announcer)
if history:
latest = sorted(history.keys())[-1]
ip, announcer = history[latest]
url_data = sites.setdefault(url, set())
if (ip, announcer) in url_data:
return
url_data.add((ip, announcer))
event.add("ip", ip)
event.add("asn", announcer)
event.add("source time", latest.strftime("%Y-%m-%d %H:%M:%SZ"))
yield idiokit.send(event)
@idiokit.stream
def poll(self):
url = self.feed_url % self.application_key
try:
self.log.info("Checking if {0!r} has new data".format(url))
info, _ = yield utils.fetch_url(HeadRequest(url))
etag = info.get("etag", None)
if etag is not None and self._etag == etag:
raise bot.PollSkipped("no new data detected (ETag stayed the same)")
self.log.info("Downloading data from {0!r}".format(url))
_, fileobj = yield utils.fetch_url(url)
except utils.FetchUrlFailed as error:
raise bot.PollSkipped("failed to download {0!r} ({1})".format(url, error))
self.log.info("Downloaded data from {0!r}".format(url))
reader = BZ2Reader(fileobj)
try:
depth = 0
sites = dict()
for event, element in etree.iterparse(reader, events=("start", "end")):
if event == "start" and element.tag == "entry":
depth += 1
if event == "end" and element.tag == "entry":
yield self._handle_entry(element, sites)
depth -= 1
if event == "end" and depth == 0:
element.clear()
except SyntaxError as error:
raise bot.PollSkipped("syntax error in report {0!r} ({1})".format(url, error))
else:
self._etag = etag
def main(self, state):
if state is None:
state = None, None
self._etag, wrapped_state = state
return bot.PollingBot.main(self, wrapped_state) | self._add_etag_to_result()
@idiokit.stream
def _add_etag_to_result(self):
state = yield idiokit.consume()
idiokit.stop(self._etag, state)
if __name__ == "__main__":
PhishTankBot.from_command_line().execute()
|
Python
| 0.999998
|
@@ -3296,16 +3296,17 @@
lt=%22http
+s
://data.
|
8bb77e1cf4c5ec284641a178a106300db2f5575d
|
Use UTC
|
petitions/views.py
|
petitions/views.py
|
from django.shortcuts import render, get_object_or_404, render, redirect
from django.views.decorators.http import require_POST
from django.contrib.auth.decorators import login_required
from django.db.models import F
from datetime import datetime
from petitions.models import Petition
from profile.models import Profile
def petition(request, petition_id):
petition = get_object_or_404(Petition, pk=petition_id)
author = Profile.objects.get(petitions_created=petition)
user = request.user
curr_user_signed = user.partner_set.filter(petitions_signed=petition).exists()
users_signed = Profile.objects.filter(petitions_signed=petition)
data_object = {
'petition': petition,
'current_user': user,
'curr_user_signed': curr_user_signed,
'users_signed': users_signed
}
return render(request, '', data_object)
@login_required
@require_POST
def petition_sign(request, petition_id):
petition = get_object_or_404(Petition, pk=petition_id)
user = request.user
user.profile.petitions_signed.add(petition)
user.save()
petition.update(signatures=F('signatures')+1)
petition.update(last_signed=datetime.now())
petition.save()
return redirect('petition/' + str(petition_id))
# HELPER FUNCTIONS #
# SORTING
def most_recent():
return Petition.objects.all() \
.filter(expires__gt=datetime.now()) \
.exclude(has_response=True) \
.filter(published=True) \
.order_by('-created_at')
def most_signatures():
return Petition.objects.all() \
.filter(expires__gt=datetime.now()) \
.exclude(has_response=True) \
.filter(published=True) \
.order_by('-signatures')
def last_signed():
return Petition.objects.all() \
.filter(expires_gt=datetime.now()) \
.exclude(has_response=True) \
.filter(published=True) \
.order_by('-last_signed')
|
Python
| 0
|
@@ -849,17 +849,43 @@
quest, '
-'
+petition/'+str(petition_id)
, data_o
@@ -1204,24 +1204,27 @@
ed=datetime.
+utc
now())%0A p
@@ -1273,16 +1273,21 @@
etition/
+sign/
' + str(
@@ -1414,32 +1414,35 @@
es__gt=datetime.
+utc
now()) %5C%0A .ex
@@ -1612,32 +1612,35 @@
es__gt=datetime.
+utc
now()) %5C%0A .ex
@@ -1813,16 +1813,19 @@
atetime.
+utc
now()) %5C
|
03b17837ed2c88692f1b99ec5b9b477f86fdddb6
|
Update version to 2.2b4-dev
|
openslides/__init__.py
|
openslides/__init__.py
|
__author__ = 'OpenSlides Team <support@openslides.org>'
__description__ = 'Presentation and assembly system'
__version__ = '2.2b3'
__license__ = 'MIT'
__url__ = 'https://openslides.org'
args = None
|
Python
| 0
|
@@ -125,9 +125,13 @@
2.2b
-3
+4-dev
'%0A__
|
356fdc5d69dadbddeb7cd064593ab31b7993a0bc
|
Use shared helper code for palevoccbot.
|
abusehelper/contrib/abusech/palevoccbot.py
|
abusehelper/contrib/abusech/palevoccbot.py
|
"""
abuse.ch Palevo C&C feed RSS bot.
Maintainer: Lari Huttunen <mit-code@huttu.net>
"""
from abusehelper.core import bot, events
from abusehelper.contrib.rssbot.rssbot import RSSBot
from . import is_ip
class PalevoCcBot(RSSBot):
feeds = bot.ListParam(default=["https://palevotracker.abuse.ch/?rssfeed"])
# If treat_as_dns_source is set, the feed ip is dropped.
treat_as_dns_source = bot.BoolParam()
def create_event(self, **keys):
event = events.Event()
# handle link data
link = keys.get("link", None)
if link:
event.add("description url", link)
# handle title data
title = keys.get("title", None)
if title:
host, date = title.split()
if is_ip(host):
event.add("ip", host)
else:
event.add("host", host)
event.add("source time", date)
# handle description data
description = keys.get("description", None)
if description:
for part in description.split(","):
pair = part.split(":", 1)
if len(pair) < 2:
continue
key = pair[0].strip()
value = pair[1].strip()
if not key or not value:
continue
if key == "Status":
event.add(key.lower(), value)
elif key == "SBL" and value != "Not listed":
key = key.lower() + " id"
event.add(key, value)
elif key == "IP address":
if not self.treat_as_dns_source:
event.add("ip", value)
event.add("feed", "abuse.ch")
event.add("malware", "Palevo")
event.add("type", "c&c")
return event
if __name__ == "__main__":
PalevoCcBot.from_command_line().execute()
|
Python
| 0
|
@@ -120,90 +120,64 @@
bot
-, events%0Afrom abusehelper.contrib.rssbot.rssbot import RSSBot%0A%0Afrom . import is_ip
+%0A%0Afrom . import is_ip, split_description, AbuseCHFeedBot
%0A%0A%0Ac
@@ -193,19 +193,27 @@
voCcBot(
-RSS
+AbuseCHFeed
Bot):%0A
@@ -405,293 +405,81 @@
def
-create_event(self, **keys):%0A event = events.Event()%0A # handle link data%0A link = keys.get(%22link%22, None)%0A if link:%0A event.add(%22description url%22, link)%0A # handle title data%0A title = keys.get(%22title%22, None)%0A if title:%0A
+parse_title(self, title):%0A pieces = title.split(None, 1)%0A%0A
hos
@@ -478,40 +478,25 @@
-
host
-, date = title.split()%0A
+ = pieces%5B0%5D%0A
@@ -523,38 +523,30 @@
- event.add(
+yield
%22ip%22, host)%0A
@@ -539,30 +539,25 @@
d %22ip%22, host
-)%0A
+%0A
else
@@ -570,30 +570,22 @@
- event.add(
+yield
%22host%22,
@@ -588,17 +588,17 @@
t%22, host
-)
+%0A
%0A
@@ -602,455 +602,176 @@
- event.add(%22source time%22, date)%0A # handle description data%0A description = keys.get(%22description%22, None)%0A if description:%0A for part in description.split(%22,%22):%0A pair = part.split(%22:%22, 1)%0A if len(pair) %3C 2:%0A continue%0A key = pair%5B0%5D.strip()%0A value = pair%5B1%5D.strip()%0A if not key or not value:%0A continue%0A
+if len(pieces) %3E 1:%0A yield %22source time%22, pieces%5B1%5D%0A%0A def parse_description(self, description):%0A for key, value in split_description(description):%0A
@@ -778,19 +778,16 @@
-
if key =
@@ -789,17 +789,17 @@
key == %22
-S
+s
tatus%22:%0A
@@ -818,46 +818,25 @@
- event.add(key.lower()
+yield key
, value
-)%0A
+%0A
@@ -860,11 +860,11 @@
== %22
-SBL
+sbl
%22 an
@@ -874,14 +874,22 @@
alue
+.lower()
!= %22
-N
+n
ot l
@@ -916,84 +916,33 @@
- key = key.lower() + %22 id%22%0A event.add(key
+yield key + %22 id%22
, value
-)%0A
+%0A
@@ -966,10 +966,10 @@
== %22
-IP
+ip
add
@@ -977,32 +977,12 @@
ess%22
-:%0A if
+ and
not
@@ -1028,169 +1028,25 @@
- event.add(%22ip%22, value)%0A event.add(%22feed%22, %22abuse.ch%22)%0A event.add(%22malware%22, %22Palevo%22)%0A event.add(%22type%22, %22c&c%22)%0A return event
+yield %22ip%22, value
%0A%0Aif
|
fbf8b0aa6284339cadbf51b681d6174484add625
|
Fix the start command not to reload if debug is False
|
openslides/__main__.py
|
openslides/__main__.py
|
#!/usr/bin/env python
import os
import sys
from django.core.management import execute_from_command_line
from openslides import __version__ as openslides_version
from openslides.utils.main import (
ExceptionArgumentParser,
UnknownCommand,
get_default_settings_path,
get_development_settings_path,
is_development,
setup_django_settings_module,
start_browser,
write_settings,
)
def main():
"""
Main entrance to OpenSlides.
"""
parser = get_parser()
try:
known_args, unknown_args = parser.parse_known_args()
except UnknownCommand:
unknown_command = True
else:
unknown_command = False
if unknown_command:
# Run a command, that is defined by the django management api
development = is_development()
setup_django_settings_module(development=development)
execute_from_command_line(sys.argv)
else:
# Run a command that is defined here
# These are commands that can not rely on an existing settings
known_args.callback(known_args)
def get_parser():
"""
Parses all command line arguments.
"""
if len(sys.argv) == 1 and not is_development():
sys.argv.append('start')
# Init parser
description = 'Start script for OpenSlides.'
if 'manage.py' not in sys.argv[0]:
description += (' If it is called without any argument, this will be '
'treated as if it is called with the "start" subcommand. '
'That means OpenSlides will setup default settings and '
'database, start the tornado webserver, launch the '
'default web browser and open the webinterface.')
parser = ExceptionArgumentParser(description=description)
# Add version argument
parser.add_argument(
'--version',
action='version',
version=openslides_version,
help='Show version number and exit.')
# Init subparsers
subparsers = parser.add_subparsers(
dest='subcommand',
title='Available subcommands',
description="Type '%s <subcommand> --help' for help on a "
"specific subcommand." % parser.prog,
help='You can choose only one subcommand at once.')
# Subcommand start
subcommand_start = subparsers.add_parser(
'start',
help='Setup settings and database, start tornado webserver, launch the '
'default web browser and open the webinterface. The environment '
'variable DJANGO_SETTINGS_MODULE is ignored.')
subcommand_start.add_argument(
'--no-browser',
action='store_true',
help='Do not launch the default web browser.')
subcommand_start.add_argument(
'--settings_path',
action='store',
default=None,
help='The used settings file. The file is created, if it does not exist.')
subcommand_start.set_defaults(callback=start)
subcommand_start.add_argument(
'--development',
action='store_true',
help='Command for development purposes.')
# Subcommand createsettings
subcommand_createsettings = subparsers.add_parser(
'createsettings',
help='Create the settings file.')
subcommand_createsettings.set_defaults(callback=createsettings)
subcommand_createsettings.add_argument(
'--settings_path',
action='store',
default=None,
help='The used settings file. The file is created, even if it exists.')
subcommand_createsettings.add_argument(
'--development',
action='store_true',
help='Command for development purposes.')
return parser
def start(args):
"""
Starts OpenSlides: Runs migrations and runs runserver.
"""
settings_path = args.settings_path
development = is_development()
if settings_path is None:
if development:
settings_path = get_development_settings_path()
else:
settings_path = get_default_settings_path()
# Write settings if it does not exists.
if not os.path.isfile(settings_path):
createsettings(args)
# Set the django setting module and run migrations
# A manual given environment variable will be overwritten
setup_django_settings_module(settings_path, development=development)
execute_from_command_line(['manage.py', 'migrate'])
if not args.no_browser:
start_browser('http://localhost:8000')
# Start the webserver
execute_from_command_line(['manage.py', 'runserver', '0.0.0.0:8000'])
def createsettings(args):
"""
Creates settings for OpenSlides.
"""
settings_path = args.settings_path
development = is_development()
context = {}
if development:
if settings_path is None:
settings_path = get_development_settings_path()
context = {
'openslides_user_data_path': repr(os.path.join(os.getcwd(), 'development', 'var')),
'debug': 'True'}
settings_path = write_settings(settings_path, **context)
print('Settings created at %s' % settings_path)
if __name__ == "__main__":
exit(main())
|
Python
| 0.999998
|
@@ -4525,16 +4525,96 @@
bserver%0A
+ # Tell django not to reload. OpenSlides uses the reload method from tornado%0A
exec
@@ -4676,16 +4676,30 @@
.0:8000'
+, '--noreload'
%5D)%0A%0A%0Adef
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.