commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
4501d32134bbed4c7a7616f51e74bf6c4444c96f
|
Fix too long line in soc.views.models.sponsor module.
|
app/soc/views/models/sponsor.py
|
app/soc/views/models/sponsor.py
|
#!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views for Sponsor profiles.
"""
__authors__ = [
'"Sverre Rabbelier" <sverre@rabbelier.nl>',
'"Lennard de Rijk" <ljvderijk@gmail.com>',
'"Pawel Solyga" <pawel.solyga@gmail.com>',
]
from soc.logic import dicts
from soc.logic.models.host import logic as host_logic
from soc.logic.models.sponsor import logic as sponsor_logic
from soc.views.helper import access
from soc.views.helper import decorators
from soc.views.helper import redirects
from soc.views.models import group
import soc.models.sponsor
import soc.logic.dicts
import soc.logic.models.sponsor
class View(group.View):
"""View methods for the Sponsor model.
"""
def __init__(self, params=None):
"""Defines the fields and methods required for the base View class
to provide the user with list, public, create, edit and delete views.
Params:
params: a dict with params for this View
"""
rights = access.Checker(params)
rights['create'] = ['checkIsDeveloper']
rights['edit'] = [('checkHasActiveRoleForLinkIdAsScope', host_logic),
('checkGroupIsActiveForLinkId', sponsor_logic)]
rights['delete'] = ['checkIsDeveloper']
rights['home'] = [('checkHasActiveRoleForScope', host_logic)]
rights['list'] = ['checkIsDeveloper']
rights['list_requests'] = [('checkHasActiveRoleForLinkIdAsScope', host_logic)]
rights['list_roles'] = [('checkHasActiveRoleForLinkIdAsScope', host_logic)]
new_params = {}
new_params['logic'] = soc.logic.models.sponsor.logic
new_params['rights'] = rights
new_params['name'] = "Program Owner"
new_params['module_name'] = "sponsor"
new_params['document_prefix'] = "sponsor"
new_params['sidebar_grouping'] = 'Programs'
params = dicts.merge(params, new_params)
super(View, self).__init__(params=params)
def _getExtraMenuItems(self, role_description, params=None):
"""Used to create the specific Sponsor menu entries.
For args see group.View._getExtraMenuItems().
"""
submenus = []
group_entity = role_description['group']
roles = role_description['roles']
if roles.get('host'):
# add a link to create a new program
submenu = (redirects.getCreateRedirect(group_entity,
{'url_name': 'program'}),"Create a New Program", 'any_access')
submenus.append(submenu)
# add a link to the management page
submenu = (redirects.getListRolesRedirect(group_entity, params),
"Manage Program Administrators", 'any_access')
submenus.append(submenu)
# add a link to invite an a host
submenu = (redirects.getInviteRedirectForRole(group_entity, 'host'),
"Invite a Host", 'any_access')
submenus.append(submenu)
# add a link to the request page
submenu = (redirects.getListRequestsRedirect(group_entity, params),
"List Host Invites", 'any_access')
submenus.append(submenu)
# add a link to the edit page
submenu = (redirects.getEditRedirect(group_entity, params),
"Edit Sponsor Profile", 'any_access')
submenus.append(submenu)
# add a link to resign as a host
submenu = (redirects.getManageRedirect(roles['host'],
{'url_name': 'host'}),
"Resign as Host", 'any_access')
submenus.append(submenu)
# add a link to create a new document
submenu = (redirects.getCreateDocumentRedirect(group_entity, 'sponsor'),
"Create a New Document", 'any_access')
submenus.append(submenu)
# add a link to list all documents
submenu = (redirects.getListDocumentsRedirect(group_entity, 'sponsor'),
"List Documents", 'any_access')
submenus.append(submenu)
return submenus
view = View()
admin = decorators.view(view.admin)
create = decorators.view(view.create)
delete = decorators.view(view.delete)
edit = decorators.view(view.edit)
home = decorators.view(view.home)
list = decorators.view(view.list)
list_requests = decorators.view(view.listRequests)
list_roles = decorators.view(view.listRoles)
public = decorators.view(view.public)
export = decorators.view(view.export)
pick = decorators.view(view.pick)
|
Python
| 0.000001
|
@@ -1938,32 +1938,65 @@
LinkIdAsScope',
+%0A
host_logic)%5D%0A
|
50b1e27fd0d3ae2dfff74e271b86d2a7147b2b03
|
fix bug in user permission view
|
shuup/admin/modules/users/views/permissions.py
|
shuup/admin/modules/users/views/permissions.py
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group as PermissionGroup
from django.forms.models import modelform_factory
from django.http.response import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from django.views.generic.edit import UpdateView
from shuup.admin.forms.fields import Select2MultipleField
from shuup.admin.toolbar import get_default_edit_toolbar
from shuup.admin.utils.urls import get_model_url
from shuup.utils.django_compat import force_text
class PermissionChangeFormBase(forms.ModelForm):
old_password = forms.CharField(
label=_("Your Password"),
widget=forms.PasswordInput,
help_text=_(
"In order to allow making significant changes to accounts, we need "
"to confirm that you know the password for the account you are using."
)
)
def __init__(self, changing_user, *args, **kwargs):
super(PermissionChangeFormBase, self).__init__(*args, **kwargs)
self.changing_user = changing_user
if not getattr(self.changing_user, 'is_superuser', False):
self.fields.pop("is_superuser")
if not (
self.changing_user == self.instance
or getattr(self.instance, 'is_superuser', False)
):
# Only require old password when editing
self.fields.pop("old_password")
if "is_superuser" in self.fields:
self.fields["is_superuser"].label = _("Superuser (Full rights) status")
self.fields["is_superuser"].help_text = _(
"Designates whether this user has all permissions without explicitly "
"assigning them. Assigning Granular Permission Groups to a Superuser "
"will not have any effect because Granular Permission Groups are only "
" able to give more rights, but Superuser already has them all."
)
self.fields["is_staff"].label = _("Access to Admin Panel status")
self.fields["is_staff"].help_text = _(
"Designates whether this user can log into this admin site. Even "
"Superusers should have this status enabled, otherwise they won't "
"be able to access the Admin Panel."
)
permission_groups_field = Select2MultipleField(
model=PermissionGroup,
required=False,
label=_("Granular Permission Groups"),
help_text=_(
"Use Permission Groups to granularly give more permissions. User "
"can belong to many groups and their permissions add and stack together. "
"Search for `Permission Groups` to change these and add them to "
"multiple users. Go to user account -> `Actions` -> `Edit Main "
"Permissions` to add them to a specific user. Will not influence "
"Superusers as they already have all the rights and can't be "
"stripped of them without removing Superuser status first."
)
)
initial_groups = self._get_initial_groups()
permission_groups_field.initial = [group.pk for group in initial_groups]
permission_groups_field.widget.choices = [(group.pk, force_text(group)) for group in initial_groups]
self.fields["permission_groups"] = permission_groups_field
def _get_initial_groups(self):
if self.instance.pk and hasattr(self.instance, "groups"):
return self.instance.groups.all()
else:
return []
def clean_old_password(self):
"""
Validates that the `old_password` field is correct.
"""
old_password = self.cleaned_data["old_password"]
if not self.changing_user.check_password(old_password):
raise forms.ValidationError(
_("Your old password was entered incorrectly. Please enter it again."),
code='password_incorrect',
)
return old_password
def clean_members(self):
members = self.cleaned_data.get("members", [])
return get_user_model().objects.filter(pk__in=members).all()
def clean_permission_groups(self):
permission_groups = self.cleaned_data.get("permission_groups", [])
return PermissionGroup.objects.filter(pk__in=permission_groups)
def clean(self):
for field in ("is_staff", "is_superuser"):
flag = self.cleaned_data[field]
if self.changing_user == self.instance and not flag:
self.add_error(field, _(
"You can't unset this status for yourself "
"due to security reasons. Use another account if you want to "
"remove permissions for this particular account.")
)
return self.cleaned_data
def save(self):
obj = super(PermissionChangeFormBase, self).save()
obj.groups.clear()
obj.groups.set(self.cleaned_data["permission_groups"])
class UserChangePermissionsView(UpdateView):
template_name = "shuup/admin/users/change_permissions.jinja"
model = settings.AUTH_USER_MODEL
title = _("Change User Permissions")
def get_form_class(self):
return modelform_factory(
model=get_user_model(),
form=PermissionChangeFormBase,
fields=("is_staff", "is_superuser")
)
def get_queryset(self):
return get_user_model().objects.all()
def get_toolbar(self):
toolbar = get_default_edit_toolbar(
self,
"permissions_form",
discard_url=get_model_url(self.object),
with_split_save=False,
)
return toolbar
def get_form_kwargs(self):
kwargs = super(UserChangePermissionsView, self).get_form_kwargs()
kwargs["changing_user"] = self.request.user
return kwargs
def get_context_data(self, **kwargs):
context = super(UserChangePermissionsView, self).get_context_data(**kwargs)
context["toolbar"] = self.get_toolbar()
context["title"] = _("Change Main Permissions: %s") % self.object
return context
def form_valid(self, form):
form.save()
messages.success(self.request, _("Permissions changed for %s.") % self.object)
return HttpResponseRedirect(self.get_success_url())
def get_success_url(self):
return get_model_url(self.object)
|
Python
| 0.000001
|
@@ -4822,24 +4822,97 @@
uperuser%22):%0A
+ if field not in self.cleaned_data:%0A continue%0A%0A
|
c8976b0bc61f08b6dc50194b62abb4d82ef70301
|
version bump
|
mhcflurry/version.py
|
mhcflurry/version.py
|
__version__ = "1.4.1"
|
Python
| 0.000001
|
@@ -12,11 +12,11 @@
= %221.4.
-1
+2
%22%0A
|
a2753124d89689dcfd3f90e050417d38a17bdd60
|
Fix redis caching when multiple GitDox instances share a Redis instance
|
modules/redis_cache.py
|
modules/redis_cache.py
|
import redis
r = redis.Redis()
GITDOX_PREFIX = "__gitdox"
SEP = "|"
REPORT = "report"
TIMESTAMP = "timestamp"
def make_key_base(doc_id, validation_type):
"""Keys for this cache have the form, e.g., __gitdox|123|ether|report
This function formats the first three pieces of this string."""
if validation_type not in ["xml", "ether", "meta", "export"]:
raise Exception("Unknown validation type: " + validation_type)
return SEP.join([GITDOX_PREFIX, str(doc_id), validation_type])
# common ------------------------------------------------------------------------
def get_report(doc_id, validation_type):
"""Returns the report for the given validation type if present in the cache,
False otherwise"""
key_base = make_key_base(doc_id, validation_type)
if key_base + SEP + REPORT in r:
return r.get(key_base + SEP + REPORT)
return False
def get_timestamp(doc_id, validation_type):
"""For ether and export validation types, returns the associated timestamp
obtained from roomtimes at the time of validation."""
key_base = make_key_base(doc_id, validation_type)
if key_base + SEP + TIMESTAMP in r:
return r.get(key_base + SEP + TIMESTAMP)
return False
def invalidate_by_doc(doc_id, validation_type):
"""Invalidates the report for a given validation type for a given doc."""
key_base = make_key_base(doc_id, validation_type)
r.delete(key_base + SEP + REPORT)
if key_base + SEP + TIMESTAMP in r:
r.delete(key_base + SEP + TIMESTAMP)
def invalidate_by_type(validation_type):
"""Invalidates the reports for a given validation type for all docs."""
pattern = GITDOX_PREFIX + "*" + SEP + validation_type + SEP + "*"
for key in r.keys(pattern=pattern):
r.delete(key)
def reset_cache():
"""Invalidates all reports."""
pattern = GITDOX_PREFIX + "*"
for key in r.keys(pattern=pattern):
r.delete(key)
# Functions for xml and meta ----------------------------------------------------
def cache_validation_result(doc_id, validation_type, report):
"""Caching for non-ethercalc-based validation types, currently xml and meta."""
if validation_type not in ["xml", "meta"]:
raise Exception("Mode must be one of 'xml', 'meta'.")
key_base = make_key_base(doc_id, validation_type)
r.set(key_base + SEP + REPORT, report)
# Functions for ether and export ------------------------------------------------
def cache_timestamped_validation_result(doc_id, validation_type, report, timestamp):
"""Caching for ethercalc-based validation types, currently ether and export.
For xml and meta we are able to maintain the cache because Gitdox knows when
xml or meta has changed, but with ethercalc, Gitdox is not informed of
changes, so we must compare timestamps."""
if validation_type not in ["ether", "export"]:
raise Exception("Mode must be one of 'ether', 'export'.")
key_base = make_key_base(doc_id, validation_type)
r.set(key_base + SEP + REPORT, report)
r.set(key_base + SEP + TIMESTAMP, timestamp)
|
Python
| 0
|
@@ -1,20 +1,86 @@
import
-redis
+os%0Aimport platform%0Aimport redis%0Afrom modules.configobj import ConfigObj
%0A%0Ar = re
@@ -171,16 +171,382 @@
stamp%22%0A%0A
+if platform.system() == %22Windows%22:%0A prefix = %22transc%5C%5C%22%0Aelse:%0A prefix = %22%22%0Arootpath = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + os.sep%0Auserdir = rootpath + %22users%22 + os.sep%0Aconfig = ConfigObj(userdir + 'config.ini')%0APROJECT_NAME = %22_%22 + (config%5B'project'%5D.lower().replace(%22 %22, %22_%22) if 'project' in config else 'default_project')%0A%0A%0A
def make
@@ -861,16 +861,16 @@
_type)%0A%0A
-
retu
@@ -896,16 +896,29 @@
_PREFIX,
+ PROJECT_NAME
str(doc
|
5ddae0e29591c1fd4afa2cd08c73168868a4721e
|
Handle bad certificate dates and other issues.
|
modules/sfp_sslcert.py
|
modules/sfp_sslcert.py
|
# -*- coding: utf-8 -*-
# -------------------------------------------------------------------------------
# Name: sfp_sslcert
# Purpose: Gather information about SSL certificates behind HTTPS sites.
#
# Author: Steve Micallef <steve@binarypool.com>
#
# Created: 23/08/2013
# Copyright: (c) Steve Micallef
# Licence: GPL
# -------------------------------------------------------------------------------
import socket
import socks
import ssl
import time
import M2Crypto
from sflib import SpiderFoot, SpiderFootPlugin, SpiderFootEvent
class sfp_sslcert(SpiderFootPlugin):
"""SSL Certificates:Footprint,Investigate:Crawling and Scanning::Gather information about SSL certificates used by the target's HTTPS sites."""
# Default options
opts = {
"tryhttp": True,
"ssltimeout": 10,
"certexpiringdays": 30
}
# Option descriptions
optdescs = {
"tryhttp": "Also try to HTTPS-connect to HTTP sites and hostnames.",
"ssltimeout": "Seconds before giving up trying to HTTPS connect.",
"certexpiringdays": "Number of days in the future a certificate expires to consider it as expiring."
}
# Be sure to completely clear any class variables in setup()
# or you run the risk of data persisting between scan runs.
results = dict()
def setup(self, sfc, userOpts=dict()):
self.sf = sfc
self.results = dict()
# Clear / reset any other class member variables here
# or you risk them persisting between threads.
for opt in userOpts.keys():
self.opts[opt] = userOpts[opt]
# What events is this module interested in for input
# * = be notified about all events.
def watchedEvents(self):
return ["INTERNET_NAME", "LINKED_URL_INTERNAL"]
# What events this module produces
# This is to support the end user in selecting modules based on events
# produced.
def producedEvents(self):
return ["SSL_CERTIFICATE_ISSUED", "SSL_CERTIFICATE_ISSUER",
"SSL_CERTIFICATE_MISMATCH", "SSL_CERTIFICATE_EXPIRED",
"SSL_CERTIFICATE_EXPIRING", "SSL_CERTIFICATE_RAW"]
# Handle events sent to this module
def handleEvent(self, event):
eventName = event.eventType
srcModuleName = event.module
eventData = event.data
self.sf.debug("Received event, " + eventName + ", from " + srcModuleName)
if eventName == "LINKED_URL_INTERNAL":
fqdn = self.sf.urlFQDN(eventData.lower())
else:
fqdn = eventData
if fqdn not in self.results:
self.results[fqdn] = True
else:
return None
if not eventData.lower().startswith("https://") and not self.opts['tryhttp']:
return None
self.sf.debug("Testing SSL for: " + eventData)
# Re-fetch the certificate from the site and process
try:
s = socket.socket()
s.settimeout(int(self.opts['ssltimeout']))
s.connect((fqdn, 443))
sock = ssl.wrap_socket(s)
sock.do_handshake()
rawcert = sock.getpeercert(True)
cert = ssl.DER_cert_to_PEM_cert(rawcert)
m2cert = M2Crypto.X509.load_cert_string(str(cert).replace('\r', ''))
except BaseException as x:
self.sf.info("Unable to SSL-connect to " + fqdn)
return None
# Generate the event for the raw cert (in text form)
# Cert raw data text contains a lot of gems..
rawevt = SpiderFootEvent("SSL_CERTIFICATE_RAW",
m2cert.as_text().encode('raw_unicode_escape'),
self.__name__, event)
self.notifyListeners(rawevt)
# Generate events for other cert aspects
self.getIssued(m2cert, event)
self.getIssuer(m2cert, event)
self.checkHostMatch(m2cert, fqdn, event)
try:
self.checkExpiry(m2cert, event)
except M2Crypto.X509.X509Error as e:
self.sf.error("Error processing certificate: " + str(e), False)
# Report back who the certificate was issued to
def getIssued(self, cert, sevt):
issued = cert.get_subject().as_text().encode('raw_unicode_escape')
evt = SpiderFootEvent("SSL_CERTIFICATE_ISSUED", issued, self.__name__, sevt)
self.notifyListeners(evt)
# Report back the certificate issuer
def getIssuer(self, cert, sevt):
issuer = cert.get_issuer().as_text().encode('raw_unicode_escape')
evt = SpiderFootEvent("SSL_CERTIFICATE_ISSUER", issuer, self.__name__, sevt)
self.notifyListeners(evt)
# Check if the hostname matches the name of the server
def checkHostMatch(self, cert, fqdn, sevt):
fqdn = fqdn.lower()
hosts = ""
# Extract the CN from the issued section
issued = cert.get_subject().as_text().encode('raw_unicode_escape')
self.sf.debug("Checking for " + fqdn + " in " + issued.lower())
if "cn=" + fqdn in issued.lower():
hosts = 'dns:' + fqdn
try:
hosts = hosts + " " + cert.get_ext("subjectAltName").get_value().encode('raw_unicode_escape').lower()
except LookupError as e:
self.sf.debug("No alternative name found in certificate.")
fqdn_tld = ".".join(fqdn.split(".")[1:]).lower()
if "dns:" + fqdn not in hosts and "dns:*." + fqdn_tld not in hosts:
evt = SpiderFootEvent("SSL_CERTIFICATE_MISMATCH", hosts, self.__name__, sevt)
self.notifyListeners(evt)
# Check if the expiration date is in the future
def checkExpiry(self, cert, sevt):
exp = int(time.mktime(cert.get_not_after().get_datetime().timetuple()))
expstr = cert.get_not_after().get_datetime().strftime("%Y-%m-%d %H:%M:%S")
now = int(time.time())
warnexp = now + self.opts['certexpiringdays'] * 86400
if exp <= now:
evt = SpiderFootEvent("SSL_CERTIFICATE_EXPIRED", expstr, self.__name__, sevt)
self.notifyListeners(evt)
return None
if exp <= warnexp:
evt = SpiderFootEvent("SSL_CERTIFICATE_EXPIRING", expstr, self.__name__, sevt)
self.notifyListeners(evt)
return None
# End of sfp_sslcert class
|
Python
| 0
|
@@ -5688,32 +5688,49 @@
f, cert, sevt):%0A
+ try:%0A
exp = in
@@ -5801,16 +5801,20 @@
+
+
expstr =
@@ -5880,16 +5880,20 @@
%25M:%25S%22)%0A
+
@@ -5923,16 +5923,20 @@
+
warnexp
@@ -5980,16 +5980,146 @@
* 86400
+%0A except ValueError as e:%0A self.sf.error(%22Couldn't process date in certificate.%22, False)%0A return None
%0A%0A
|
72df22e62806e64e05b3bbb6eca0efd958c7c8bb
|
make btcnet_wrapper fail in a more instructive manner
|
btcnet_wrapper.py
|
btcnet_wrapper.py
|
from git import Repo
try:
repo = Repo("btcnet_info")
except:
repo = Repo.init("btcnet_info")
repo = repo.clone("git://github.com/c00w/btcnet_info.git")
origin = repo.create_remote('origin', 'git://github.com/c00w/btcnet_info.git')
origin = repo.remotes.origin
origin.fetch()
origin.pull('master')
import btcnet_info
|
Python
| 0.000001
|
@@ -313,16 +313,25 @@
ster')%0A%0A
+try:%0A
import b
@@ -341,8 +341,118 @@
et_info%0A
+except:%0A print 'Install pythongit! See the readme for detailed instructions'%0A import os%0A os._exit(2)%0A
|
1ac423e9127631eeb78868c47cf6fee12bf36a12
|
Fix bug in handling get/post, should work now
|
test_utils/middleware/testmaker.py
|
test_utils/middleware/testmaker.py
|
from django.conf import settings
from django.test import Client
from django.test.utils import setup_test_environment
import logging, re
from django.utils.encoding import force_unicode
log = logging.getLogger('testmaker')
print "Loaded Testmaker Middleware"
#Remove at your own peril
debug = getattr(settings, 'DEBUG', False)
if not debug:
print "THIS CODE IS NOT MEANT FOR USE IN PRODUCTION"
#return
class TestMakerMiddleware(object):
def process_request(self, request):
if 'test_client_true' not in request.REQUEST:
log_request(request)
if request.method.lower() == "get":
setup_test_environment()
c = Client()
getdict = request.GET.copy()
getdict['test_client_true'] = 'yes' #avoid recursion
r = c.get(request.path, getdict)
log_status(request.path, r)
if r.context:
con = get_user_context(r.context)
output_user_context(con)
def log_request(request):
log.info('\n\tdef %s(self): ' % 'test_path')
method = request.method.lower()
request_str = "'%s', {" % request.path
for dict in request.REQUEST.dicts:
for arg in dict:
request_str += "'%s': '%s', " % arg, request.REQUEST[arg]
request_str += "}"
log.info("\t\tr = c.%s(%s)" % (method, request_str))
def log_status(path, request):
log.info("\t\tself.assertEqual(r.status_code, %s)" % request.status_code)
def get_user_context(context_list):
#Ugly Hack. Needs to be a better way
if isinstance(context_list, list):
context_list = context_list[-1] #Last context rendered
ret = context_list.dicts[-1]
if ret == {}:
ret = context_list.dicts[0]
return ret
else:
return context_list
def output_user_context(context):
for var in context:
try:
if not re.search("0x\w+", force_unicode(context[var])): #Avoid memory addy's which will change.
log.info(u'\t\tself.assertEqual(unicode(r.context[-1]["%s"]), u"%s")' % (var, unicode(context[var])))
except Exception, e:
#FIXME: This might blow up on odd encoding or 404s.
pass
|
Python
| 0
|
@@ -876,24 +876,49 @@
if r.context
+ and r.status_code != 404
:%0A
@@ -1170,17 +1170,17 @@
for di
-c
+k
t in req
@@ -1218,17 +1218,17 @@
rg in di
-c
+k
t:%0A
@@ -1261,14 +1261,13 @@
'%25s'
-,
%22 %25
+(
arg,
@@ -1287,16 +1287,17 @@
EST%5Barg%5D
+)
%0A requ
@@ -2105,17 +2105,26 @@
ept
-Exception
+UnicodeDecodeError
, e:
@@ -2180,17 +2180,8 @@
ing
-or 404s.
%0A
|
46268cb2cf5e4570ef3e08440291e802d9e16b05
|
Fix variable name conflict
|
modules/networking/page.py
|
modules/networking/page.py
|
import http.client
import socket
import subprocess
import tempfile
import urllib
from nemubot import __version__
from nemubot.exception import IRCException
from nemubot.tools import web
def load(CONF, add_hook):
# check w3m exists
pass
def headers(url):
"""Retrieve HTTP header for the given URL
Argument:
url -- the page URL to get header
"""
o = urllib.parse.urlparse(url, "http")
if o.netloc == "":
raise IRCException("invalid URL")
if o.scheme == "http":
conn = http.client.HTTPConnection(o.hostname, port=o.port, timeout=5)
else:
conn = http.client.HTTPSConnection(o.hostname, port=o.port, timeout=5)
try:
conn.request("HEAD", o.path, None, {"User-agent":
"Nemubot v%s" % __version__})
except socket.timeout:
raise IRCException("request timeout")
except socket.gaierror:
print ("<tools.web> Unable to receive page %s from %s on %d."
% (o.path, o.hostname, o.port))
raise IRCException("an unexpected error occurs")
try:
res = conn.getresponse()
except http.client.BadStatusLine:
raise IRCException("An error occurs")
finally:
conn.close()
return (res.version, res.status, res.reason, res.getheaders())
def _onNoneDefault():
raise IRCException("An error occurs when trying to access the page")
def fetch(url, onNone=_onNoneDefault):
"""Retrieve the content of the given URL
Argument:
url -- the URL to fetch
"""
try:
req = web.getURLContent(url)
if req is not None:
return req
else:
if onNone is not None:
return onNone()
else:
return None
except socket.timeout:
raise IRCException("The request timeout when trying to access the page")
except socket.error as e:
raise IRCException(e.strerror)
def render(url, onNone=_onNoneDefault):
"""Use w3m to render the given url
Argument:
url -- the URL to render
"""
with tempfile.NamedTemporaryFile() as fp:
cnt = fetch(url, onNone)
if cnt is None:
return None
fp.write(cnt.encode())
args = ["w3m", "-T", "text/html", "-dump"]
args.append(fp.name)
with subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
return proc.stdout.read().decode()
def traceURL(url, stack=None):
"""Follow redirections and return the redirections stack
Argument:
url -- the URL to trace
"""
if stack is None:
stack = list()
stack.append(url)
if len(stack) > 15:
stack.append('stack overflow :(')
return stack
_, status, _, headers = headers(url)
if status == http.client.FOUND or status == http.client.MOVED_PERMANENTLY or status == http.client.SEE_OTHER:
for h, c in headers:
if h == "Location":
url = c
if url in stack:
stack.append("loop on " + url)
return stack
else:
return traceURL(url, stack)
return stack
|
Python
| 0.00001
|
@@ -2788,18 +2788,16 @@
_, head
-er
s = head
@@ -2944,18 +2944,16 @@
in head
-er
s:%0A
|
e4a5dd51829df198a07232afc06afdff6089ae6c
|
fix wmt datatype checking (#1259)
|
parlai/tasks/wmt/agents.py
|
parlai/tasks/wmt/agents.py
|
#!/usr/bin/env python3
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from parlai.core.teachers import FbDialogTeacher
from .build import build
import copy
import os
def _path(task, opt, dt=''):
# Build the data if it doesn't exist.
build(opt)
if dt == '':
dt = opt['datatype'].split(':')[0]
return os.path.join(opt['datapath'], 'wmt',
'{task}_{type}.txt'.format(task=task, type=dt))
class EnDeTeacher(FbDialogTeacher):
def __init__(self, opt, shared=None):
opt = copy.deepcopy(opt)
task = opt.get('task', 'wmt:en_de')
self.task_name = task.split(':')[1] if ':' in task else 'en_de'
opt['datafile'] = _path(self.task_name, opt, opt['datatype'])
super().__init__(opt, shared)
class DefaultTeacher(EnDeTeacher):
pass
|
Python
| 0
|
@@ -437,14 +437,11 @@
, dt
-=''
):%0A
+
@@ -497,68 +497,8 @@
pt)%0A
- if dt == '':%0A dt = opt%5B'datatype'%5D.split(':')%5B0%5D%0A
@@ -738,43 +738,32 @@
-task = opt.get('task', 'wmt:
+self.task_name = '
en_de'
-)
%0A
@@ -771,71 +771,42 @@
-self.task_name = task.split(':')%5B1%5D if ':' in task else 'en_de'
+dt = opt%5B'datatype'%5D.split(':')%5B0%5D
%0A
@@ -855,31 +855,18 @@
e, opt,
-opt%5B'datatype'%5D
+dt
)%0A
|
542bb81e68975d52c23fa3829233cbff9ead39a7
|
Use json for PUT
|
mnubo/api_manager.py
|
mnubo/api_manager.py
|
import requests
import json
import base64
import datetime
def authenticate(func):
def authenticate_and_call(*args):
if not args[0].is_access_token_valid():
args[0].access_token = args[0].fetch_access_token()
return func(*args)
return authenticate_and_call
class APIManager(object):
def __init__(self, client_id, client_secret, hostname):
""" Initializes the API Manager which is responsible for authenticating every request.
:param client_id: the client id generated by mnubo
:param client_secret: the client secret generated by mnubo
:param hostname: the hostname to send the requests (sandbox or production)
"""
self.__client_id = client_id
self.__client_secret = client_secret
self.__hostname = hostname
self.access_token = self.fetch_access_token()
def fetch_access_token(self):
""" Requests the access token necessary to communicate with the mnubo plateform
"""
requested_at = datetime.datetime.now()
r = requests.post(self.get_auth_url(), headers=self.get_token_authorization_header())
json_response = json.loads(r.content)
token = {'access_token': json_response['access_token'], 'expires_in': datetime.timedelta(0, json_response['expires_in']), 'requested_at': requested_at}
return token
def is_access_token_valid(self):
""" Validates if the token is still valid
:return: True of the token is still valid, False if it is expired
"""
return self.access_token['requested_at'] + self.access_token['expires_in'] > datetime.datetime.now()
def get_token_authorization_header(self):
""" Generates the authorization header used while requesting an access token
"""
return {'content-type': 'application/x-www-form-urlencoded', 'Authorization': "Basic " + base64.b64encode(self.__client_id + ":" + self.__client_secret)}
def get_authorization_header(self):
""" Generates the authorization header used to access resources via mnubo's API
"""
return {'content-type': 'application/json', 'Authorization': 'Bearer ' + self.access_token['access_token']}
def get_api_url(self):
""" Generates the general API url
"""
return self.__hostname + '/api/v3/'
def get_auth_url(self):
""" Generates the url to fetch the access token
"""
return self.__hostname + '/oauth/token?grant_type=client_credentials'
@authenticate
def post(self, route, body={}):
""" Build and send a post request authenticated
:param route: which resource to access via the REST API
:param body: body to be appended to the HTTP request
"""
url = self.get_api_url() + route
headers = self.get_authorization_header()
return requests.post(url, data=body, headers=headers)
@authenticate
def put(self, route, body={}):
""" Build and send a put request authenticated
:param route: which resource to access via the REST API
:param body: body to be appended to the HTTP request
"""
url = self.get_api_url() + route
headers = self.get_authorization_header()
return requests.put(url, data=body, headers=headers)
@authenticate
def delete(self, route):
""" Build and send a delete request authenticated
:param route: which resource to access via the REST API
"""
url = self.get_api_url() + route
headers = self.get_authorization_header()
return requests.delete(url, headers=headers)
|
Python
| 0.000002
|
@@ -2929,17 +2929,16 @@
aders)%0A%0A
-%0A
@aut
@@ -3296,28 +3296,28 @@
ts.put(url,
-data
+json
=body, heade
|
226b27ad6e66c7d512ce6cad300b7f96de5ccfa7
|
Introduce cache feature to GoogleDrive base logic.
|
model/googledrive.py
|
model/googledrive.py
|
# -*- encoding:utf8 -*-
import os
import httplib2
from oauth2client.client import SignedJwtAssertionCredentials
from apiclient.discovery import build
class GoogleDrive(object):
@classmethod
def retrieve_content(cls, **kwargs):
document_id = kwargs.get('document_id')
export_type = kwargs.get('export_type')
if not document_id:
print("There is no documentID")
return None
if not export_type:
print("There is no exportType")
return None
try:
private_key = os.environ['GOOGLE_PRIVATE_KEY']
if not private_key:
return None
credential_args = (
os.environ['GOOGLE_CLIENT_EMAIL'],
private_key,
'https://www.googleapis.com/auth/drive'
)
credential_kwargs = {
'sub': os.environ.get('GOOGLE_OWNER_EMAIL')
}
credentials = SignedJwtAssertionCredentials(*credential_args, **credential_kwargs)
http = httplib2.Http()
credentials.authorize(http)
service = build('drive', 'v2', http=http)
f = service.files().get(fileId=document_id).execute()
if 'exportLinks' in f and export_type in f['exportLinks']:
download = f['exportLinks'][export_type]
resp, content = service._http.request(download)
else:
content = '読み込みに失敗したにゃー'
except Exception as e:
content = '読み込みに失敗したにゃーー : ' + str(e) + ' / ' + str(e.message)
return content
|
Python
| 0
|
@@ -144,16 +144,46 @@
t build%0A
+from model.cache import Cache%0A
%0A%0Aclass
@@ -544,32 +544,161 @@
return None%0A%0A
+ # Check document cache exists%0A content = Cache().get(document_id)%0A if content:%0A return content%0A%0A
try:%0A
@@ -1586,16 +1586,103 @@
wnload)%0A
+ # Set document cache%0A Cache().set(document_id, content)%0A
|
2ab36d3f98a3b909801b557df39742ef3a09d561
|
Remove unused flag on autodiscover and handle_translation_registrations()
|
modeltrans/models.py
|
modeltrans/models.py
|
def autodiscover(create_virtual_fields=True):
'''
Auto-discover INSTALLED_APPS translation.py modules and fail silently when
not present. This forces an import on them to register.
Also import explicit modules.
'''
import os
import sys
import copy
from django.utils.module_loading import module_has_submodule
from modeltrans.translator import translator
if not create_virtual_fields:
translator.disable_create_virtual_fields()
from importlib import import_module
from django.conf import settings
from django.apps import apps
mods = [(app_config.name, app_config.module) for app_config in apps.get_app_configs()]
for (app, mod) in mods:
# Attempt to import the app's translation module.
module = '%s.translation' % app
before_import_registry = copy.copy(translator._registry)
try:
import_module(module)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
translator._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an translation module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'translation'):
raise
# In debug mode, print a list of registered models and pid to stdout.
# Note: Differing model order is fine, we don't rely on a particular
# order, as far as base classes are registered before subclasses.
if settings.DEBUG:
try:
if sys.argv[1] in ('runserver', 'runserver_plus'):
models = translator.get_registered_models()
names = ', '.join(m.__name__ for m in models)
print('modeltrans: Registered %d models for translation'
' (%s) [pid: %d].' % (len(models), names, os.getpid()))
except IndexError:
pass
def handle_translation_registrations(create_virtual_fields=True):
'''
Ensures that any configuration of the TranslationOption(s) are handled when
importing modeltranslation.
This makes it possible for scripts/management commands that affect models
but know nothing of modeltranslation.
'''
from modeltrans.settings import ENABLE_REGISTRATIONS
if not ENABLE_REGISTRATIONS:
# If the user really wants to disable this, they can, possibly at their
# own expense. This is generally only required in cases where other
# apps generate import errors and requires extra work on the user's
# part to make things work.
return
# Trigger autodiscover, causing any TranslationOption initialization
# code to execute.
autodiscover(create_virtual_fields)
|
Python
| 0
|
@@ -16,34 +16,8 @@
ver(
-create_virtual_fields=True
):%0A
@@ -368,94 +368,8 @@
or%0A%0A
- if not create_virtual_fields:%0A translator.disable_create_virtual_fields()%0A%0A
@@ -2080,34 +2080,8 @@
ons(
-create_virtual_fields=True
):%0A
@@ -2819,27 +2819,6 @@
ver(
-create_virtual_fields
)%0A
|
4c4499dcb86ae16a7d3822feab4390adca89d348
|
Bump version to 0.12.1
|
pingparsing/__version__.py
|
pingparsing/__version__.py
|
# encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright {}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.12.0"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
|
Python
| 0
|
@@ -200,17 +200,17 @@
= %220.12.
-0
+1
%22%0A__main
|
0b94543d605ad64149faa6df2e3d8bf2e4b5c08c
|
remove print statement
|
plots/gender_by_country.py
|
plots/gender_by_country.py
|
from __future__ import print_function
from collections import OrderedDict
import csv
import numpy as np
import pandas
import world_countries as wc
from bokeh.models import HoverTool, ColumnDataSource
from bokeh.plotting import figure
from bokeh.resources import CDN
from bokeh.embed import autoload_static
import os
def plot(newest_changes):
filelist = os.listdir('/home/maximilianklein/snapshot_data/{}/'.format(newest_changes))
site_linkss_file = [f for f in filelist if f.startswith('worldmap')][0]
if newest_changes == 'newest-changes':
date_range = site_linkss_file.split('worldmap-index-from-')[1].split('.csv')[0].replace('-',' ')
print(date_range)
csv_to_read = '/home/maximilianklein/snapshot_data/{}/{}'.format(newest_changes,site_linkss_file)
df = pandas.DataFrame.from_csv(csv_to_read)
major = df[df['total'] > 100]
# https://github.com/chdoig/pyladiesatx-bokeh-tutorial
world_countries = wc.data.copy()
country_xs = [world_countries[code]['lons'] for code in world_countries]
country_ys = [world_countries[code]['lats'] for code in world_countries]
country_names = [world_countries[code]['name'] for code in world_countries]
def lookup_wigi(code):
try:
return df.ix[code]['Score']
except KeyError:
return -1
index_vals = np.array([lookup_wigi(code) for code in world_countries])
def fmt(c):
return int(np.nan_to_num(c))
colors = [
"#%02x%02x%02x" % (fmt(r), fmt(g), fmt(b)) for r, g, b in
zip(np.floor(250*(1-index_vals)),
np.floor(200*(1-index_vals)),
np.floor(100*index_vals))]
print(colors)
source = ColumnDataSource(
data=dict(
name=country_names,
wigi_index=[str(idx) for idx in index_vals]
)
)
# setup widgets
TOOLS = "pan,wheel_zoom,box_zoom,reset,hover,save"
title_suffix = 'Changes since {}'.format(date_range) if newest_changes == 'newest-changes' else 'All Time'
p = figure(title="Gender by Country {}".format(title_suffix), tools=TOOLS)
p.patches(country_xs, country_ys, fill_color=colors, source=source)
hover = p.select(dict(type=HoverTool))
hover.point_policy = "follow_mouse"
hover.tooltips = OrderedDict([
("wigi", "@wigi_index"),
("Country", "@name"),
])
js_filename = "gender_by_country_{}.js".format(newest_changes)
script_path = "./assets/js/"
output_path = "./files/assets/js/"
# generate javascript plot and corresponding script tag
js, tag = autoload_static(p, CDN, script_path + js_filename)
with open(output_path + js_filename, 'w') as js_file:
js_file.write(js)
return tag
if __name__ == "__main__":
print(plot('newest'))
print(plot('newest-changes'))
|
Python
| 0.999999
|
@@ -1680,26 +1680,8 @@
))%5D%0A
- print(colors)%0A
|
12995be9490bde60c92e6f962b748832c083fe45
|
use API and HTTP HEAD instead
|
modules/subreddit.py
|
modules/subreddit.py
|
import re
import urllib.request as req
class SubredditModule:
subre = re.compile(r"^(?:.* )?/r/([A-Za-z0-9][A-Za-z0-9_]{2,20})")
def __init__(self, circa):
self.circa = circa
self.events = {
"message": [self.findsub]
}
def findsub(self, fr, to, msg, m):
for sub in self.subre.findall(msg):
url = "http://www.reddit.com/r/" + sub
try:
req.urlopen(url)
self.circa.say(to, url)
except:
pass
module = SubredditModule
|
Python
| 0
|
@@ -31,16 +31,45 @@
t as req
+%0Aimport urllib.error as err
%0A%0Aclass
@@ -336,22 +336,41 @@
%0A%09%09%09
-url =
+try:%0A%09%09%09%09r = req.Request(
%22http://
www.
@@ -369,11 +369,11 @@
p://
-www
+api
.red
@@ -389,24 +389,61 @@
/%22 + sub
-%0A%09%09%09try:
+ + %22.json%22)%0A%09%09%09%09r.get_method = lambda: %22HEAD%22
%0A%09%09%09%09req
@@ -451,19 +451,17 @@
urlopen(
-url
+r
)%0A%09%09%09%09se
@@ -481,22 +481,70 @@
to,
-url)%0A%09%09%09except
+%22http://www.reddit.com/r/%22 + sub)%0A%09%09%09except err.HTTPError as e
:%0A%09%09
|
06f7a1d99156ae64ab70aab84dcdcf902d85b676
|
update keys()
|
mongoext/document.py
|
mongoext/document.py
|
from __future__ import absolute_import
import weakref
from . import scheme
class FieldDescriptor(object):
def __init__(self, field):
self.field = field
self.data = weakref.WeakKeyDictionary()
def __get__(self, instance, owner):
return self.data[instance]
def __set__(self, instance, value):
# unset value
if (value is None) and (instance in self.data):
del self.data[instance]
return
self.data[instance] = self.field(value)
def __delete__(self, instance):
del self.data[instance]
def __call__(self, value):
return self.field(value)
def __contains__(self, instance):
try:
self.data[instance]
except KeyError:
return False
else:
return True
class MetaDocument(type):
DISCOVER = (FieldDescriptor, scheme.Field)
def __new__(cls, class_name, bases, attrs):
document_scheme = {}
# collect document scheme
for base in bases:
for name, obj in vars(base).iteritems():
if issubclass(type(obj), cls.DISCOVER):
document_scheme[name] = obj
for name, obj in attrs.iteritems():
if issubclass(type(obj), cls.DISCOVER):
document_scheme[name] = obj
# wrap all the fields into data descriptor
for name, obj in document_scheme.items():
if not isinstance(obj, FieldDescriptor):
obj = FieldDescriptor(obj)
document_scheme[name] = obj
attrs['_scheme'] = document_scheme
return super(MetaDocument, cls).__new__(cls, class_name, bases, attrs)
class Document(object):
__metaclass__ = MetaDocument
_scheme = None
_id = scheme.Field()
def __init__(self, **data):
data = scheme.process(self._scheme, data)
for field, value in data.items():
setattr(self, field, value)
def __getattribute__(self, name):
scheme = super(Document, self).__getattribute__('_scheme')
if name in scheme:
if self in scheme[name]:
return scheme[name].__get__(self, type(self))
else:
raise AttributeError(name)
return super(Document, self).__getattribute__(name)
def __setattr__(self, name, value):
scheme = super(Document, self).__getattribute__('_scheme')
if name in scheme:
return scheme[name].__set__(self, value)
return super(Document, self).__setattr__(name, value)
def __delattr__(self, name):
scheme = super(Document, self).__getattribute__('_scheme')
if name in scheme:
return scheme[name].__delete__(self)
return super(Document, self).__delattr__(name)
def __iter__(self):
for name in (n for n in self._scheme if self in self._scheme[n]):
yield name
def __contains__(self, name):
try:
self._scheme[name].__get__(self, type(self))
except KeyError:
return False
else:
return True
def __len__(self):
return len(iter(self))
def __hash__(self):
return id(self)
def __repr__(self):
if '_id' in self:
return '<{}: {}>'.format(type(self).__name__, self._id)
else:
return '<{}: None>'.format(type(self).__name__)
def __getitem__(self, name):
try:
return getattr(self, name)
except AttributeError:
raise KeyError(name)
def __setitem__(self, name, value):
return setattr(self, name, value)
def __delitem__(self, name):
return delattr(self, name)
def get(self, key, default=None):
return self[key] if key in self else default
def iterkeys(self):
return (k for k in self)
def keys(self):
return list(self.iterkeys())
def itervalues(self):
return (self[k] for k in self)
def values(self):
return list(self.itervalues())
def iteritems(self):
return ((k, self[k]) for k in self)
def items(self):
return list(self.iteritems())
def __eq__(self, other):
return self is other or dict(self.items()) == dict(other.items())
def __ne__(self, other):
return not (self == other)
|
Python
| 0
|
@@ -3840,20 +3840,13 @@
urn
-(k for k in
+iter(
self
|
c1e84bd196f28c35b032a609a3edb5f596216f71
|
fix for document.iter
|
mongoext/document.py
|
mongoext/document.py
|
from __future__ import absolute_import
import collections
import mongoext.collection
import mongoext.scheme
import mongoext.exc
class MetaDocument(type):
def __new__(cls, name, bases, attrs):
fields = {}
for base in bases:
for name, obj in vars(base).iteritems():
if issubclass(type(obj), mongoext.scheme.Field):
fields[name] = obj
for name, obj in attrs.iteritems():
if issubclass(type(obj), mongoext.scheme.Field):
fields[name] = obj
attrs['__scheme__'] = mongoext.scheme.Scheme(fields)
return super(MetaDocument, cls).__new__(cls, name, bases, attrs)
class Document(object):
__metaclass__ = MetaDocument
__scheme__ = None
_id = mongoext.scheme.Field()
def __init__(self, **data):
for name, value in data.items():
if name not in self.__scheme__:
raise mongoext.exc.SchemeError(name)
setattr(self, name, value)
def __contains__(self, name):
return name in self.__scheme__
def __len__(self):
return len(self.__scheme__)
def __iter__(self):
for name in self.__scheme__:
yield name, getattr(self, name)
def __hash__(self):
return super(object, self).__hash__()
def __repr__(self):
return '<{}: {}>'.format(type(self).__name__, self._id)
|
Python
| 0.000001
|
@@ -37,28 +37,8 @@
rt%0A%0A
-import collections%0A%0A
impo
@@ -1215,16 +1215,22 @@
lf, name
+, None
)%0A%0A d
|
cd60e12bb8858fb242264ffdb8b17f115836c767
|
Use shlex for command splitting.
|
zoocli/cli.py
|
zoocli/cli.py
|
import os
import atexit
import warnings
import readline
import traceback
from zoocli.args import Args
from zoocli.config import config
from zoocli.exceptions import UnknownCommand, CLIException
from zoocli.zookeeper import ZooKeeper
from zoocli.completer import Completer
from zoocli.paths import ROOT_PATH, format_path
warnings.simplefilter("ignore")
PROMPT = "> "
class ZooCLI(object):
def __init__(self):
self._running = True
self._verbose = config.getboolean('zoocli', 'verbose')
self._args = Args()
self._completer = Completer(self)
self._zookeeper = ZooKeeper()
atexit.register(self._zookeeper.stop)
self._current_path = ROOT_PATH
self._history_file = os.path.expanduser(config['zoocli'].get('history', ''))
self._commands_map = {
'ls': self.ls,
'cd': self.cd,
'get': self.get,
'set': self.set,
'help': self.help,
'exit': self.exit,
}
def run(self):
"""Loops and executes commands in interactive mode."""
readline.parse_and_bind("tab: complete")
readline.set_completer(self._completer.complete)
if self._history_file:
# Ensure history file exists
if not os.path.isfile(self._history_file):
open(self._history_file, 'w').close()
readline.read_history_file(self._history_file)
while self._running:
try:
command = input(self._format_prompt())
if command:
self.execute(command.split())
except UnknownCommand as exc:
print(exc)
except (KeyboardInterrupt, EOFError):
self._running = False
if self._history_file:
readline.write_history_file(self._history_file)
return 0
def execute(self, args):
"""Executes single command and prints result, if any."""
command, kwargs = self.parse(args)
if command not in self._commands_map:
raise UnknownCommand("There is no action for command {}".format(command))
method = self._commands_map[command]
try:
result = method(**kwargs)
if result:
print(result)
return 0
except CLIException as exc:
print(exc)
return 1
except Exception:
traceback.print_exc()
return 2
def parse(self, args):
parsed = self._args.parse(args)
kwargs = dict(parsed._get_kwargs())
command = kwargs.pop('command')
return command, kwargs
def log(self, message, *args, **kwargs):
if self._verbose:
print(message.format(*args, **kwargs))
def ls(self, long=False, path=None):
path = format_path(self._current_path, path)
result = self._zookeeper.list(path)
separator = "\n" if long else " "
return separator.join(sorted(result))
def cd(self, path=None):
path = format_path(self._current_path, path, default=ROOT_PATH)
# No exception means correct path
self._zookeeper.list(path)
self._current_path = path
def get(self, path=None):
path = format_path(self._current_path, path, default=ROOT_PATH)
data = self._zookeeper.get(path)
return data
def set(self, path=None, data=None):
if not path:
raise CLIException("Missing node path")
if not data:
raise CLIException("Missing data")
path = format_path(self._current_path, path, default=ROOT_PATH)
self._zookeeper.set(path, data)
self.log("Set {} data: {}".format(path, data))
def help(self, parser, all_commands, subject):
if subject:
subparsers = [command for command in all_commands
if command.name == subject]
if subparsers:
parser = subparsers[0].parser
return parser.print_help()
def exit(self):
self._running = False
def _format_prompt(self):
return "[{path}]{prompt}".format(path=self._current_path,
prompt=PROMPT)
@property
def commands(self):
return self._args.commands
|
Python
| 0
|
@@ -3,16 +3,29 @@
port os%0A
+import shlex%0A
import a
@@ -1611,22 +1611,27 @@
ute(
+shlex.split(
command
-.split(
))%0A
|
15c2595e126689d184a5de52b8f209b4e3e6eb67
|
add a test for json
|
zq_gen/str.py
|
zq_gen/str.py
|
'''
Helper functions for string related operation
'''
import unittest
def cmd_str2dic(cmd_str):
words = cmd_str.split()
rst = {}
if len(words) >= 1:
begin = 0;
if words[0][0:1] != '-': # the first one could be the the name of the command
rst['cmd_name'] = words[0]
begin = 1
curr_word = '...' # default parameter
quoted = False
for word in words[begin:]:
if quoted: # currently expecting the reverse double quote
if word.endswith('"'):
quoted = False
word = word[:-1]
else:
if word[0:1]=='-': # a new parameter
curr_word = word
rst[curr_word] = ''
continue
if word.startswith('"'):
quoted = True
word = word[1:]
# append to current parameter
if len(rst[curr_word]) == 0: # first value
rst[curr_word] += word
else: # following value, add a space
rst[curr_word] += ' '+word
return rst
# Unit test class
class TestString(unittest.TestCase):
def test_primary_cmd(self):
cmd_str = 'schedule -n job name -dsc job description -t job_type -p "-d 20 -n 5"'
cmd_dict = cmd_str2dic(cmd_str)
exp_dict = {
'cmd_name': 'schedule',
'-n': 'job name',
'-dsc': 'job description',
'-t': 'job_type',
'-p': '-d 20 -n 5'
}
self.assertEqual(cmd_dict, exp_dict)
def test_mv_avg_cmd(self):
cmd_str = '-n 5 -d 20'
cmd_dict = cmd_str2dic(cmd_str)
exp_dict = {
'-n': '5',
'-d': '20'
}
self.assertEqual(cmd_dict, exp_dict)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000023
|
@@ -419,32 +419,61 @@
quoted = False%0A
+ curly_braced = False%0A
for word
@@ -544,18 +544,8 @@
#
- currently
exp
@@ -687,16 +687,180 @@
rd%5B:-1%5D%0A
+ elif curly_braced: # expecting the reverse curly brace%0A if word.endswith('%7D'):%0A curly_braced = False%0A
@@ -1073,33 +1073,175 @@
startswith('%22'):
-%0A
+ # meet double quote%0A if word.endswith('%22'):%0A word = word%5B1:-1%5D%0A else:%0A
@@ -1258,16 +1258,20 @@
= True%0A
+
@@ -1298,16 +1298,174 @@
ord%5B1:%5D%0A
+ elif word.startswith('%7B'): # meet curly brace%0A if not word.endswith('%7D'):%0A curly_braced = True%0A
@@ -1829,16 +1829,424 @@
tCase):%0A
+ def test_json(self):%0A cmd_str = 'command -t job_type -p %7B%22num1%22:1, %22num2%22:2, %22str1%22:%22abcd%22, %22str2%22:%22efgh%22%7D'%0A cmd_dict = cmd_str2dic(cmd_str)%0A exp_dict = %7B%0A 'cmd_name' : 'command',%0A '-t' : 'job_type',%0A '-p' : '%7B%22num1%22:1, %22num2%22:2, %22str1%22:%22abcd%22, %22str2%22:%22efgh%22%7D'%0A %7D%0A self.assertEqual(cmd_dict, exp_dict)%0A%0A
def
@@ -2285,16 +2285,17 @@
cmd_str
+
= 'sched
@@ -2447,16 +2447,17 @@
md_name'
+
: 'sched
@@ -2482,23 +2482,24 @@
'-n'
-:
+ :
'job na
@@ -2525,21 +2525,22 @@
'-dsc'
-:
+ :
'job de
@@ -2571,23 +2571,24 @@
'-t'
-:
+
+:
'job_ty
@@ -2612,23 +2612,24 @@
'-p'
-:
+
+:
'-d 20
@@ -2745,16 +2745,17 @@
cmd_str
+
= '-n 5
|
8bbb160cc742fa04c7aace678afa0d226c0d1407
|
fix sample script
|
resources/examples/ClickToCall.py
|
resources/examples/ClickToCall.py
|
import string, cgi, time, thread
import sys, urllib, urllib2
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
number1 = ""
number2 = ""
listeningPort = 8081
listeningIp = "127.0.0.1"
bluescaleIp = "127.0.0.1"
bluescalePort = 8080
class MyHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.do_POST()
def do_POST(self):
if self.path == "/Status":
self.printParams()
self.postOK()
else:
self.printParams()
self.connectCall()
def printParams(self):
params = self.parseParams()
for field in params.keys():
print( field + "=" + "".join(params[field]))
def postOK(self):
self.send_response(200)
self.send_header("Content-Type", "text/html")
self.end_headers()
def parseParams(self):
length = int(self.headers.getheader('Content-Length'))
params = cgi.parse_qs(self.rfile.read(length), keep_blank_values=1)
return params
def connectCall(self):
self.postOK()
str = """
<Response>
<Dial>
<Number>""" + number2 + """</Number>
<Action>http://""" + listeningIp + ":" + str(listeningPort) + """/Status</Action>
</Dial>
</Response>
"""
self.wfile.write(str)
return
def main():
try:
server = HTTPServer( (listeningIp, listeningPort), MyHandler)
print("going to connect " + number1 + " to " + number2)
thread.start_new_thread(serveWeb, (server,))
postCall()
while True:
time.sleep(5)
#server.serve_forever()
#time.sleep(5000)
except Exception, err:
print("damn error = " + str(err))
def serveWeb(server):
server.serve_forever()
print("serving...")
def postCall():
data = urllib.urlencode( {"To" : number1, "From": number2, "Url" : (listeningIp + ":" + str(listeningPort) + "/")} )
f = urllib.urlopen( "http://" + bluescaleIp + ":" + str(bluescalePort) + "/Calls/" ,data)
r = f.read()
print(r)
if __name__ == '__main__':
number1 = sys.argv[1]
number2 = sys.argv[2]
main()
|
Python
| 0.000001
|
@@ -2021,16 +2021,28 @@
%22Url%22 :
+ %22http://%22 +
(listen
|
7cd3c0449b05e75ffbe5ba346bab3ff389f63b9d
|
clean up map_async_bench
|
tests/benchmark/map_async_bench.py
|
tests/benchmark/map_async_bench.py
|
import threading
import random
import time
import logging
import sys
from os.path import dirname
sys.path.append(dirname(dirname(dirname(__file__))))
import hazelcast
REQ_COUNT = 20000
ENTRY_COUNT = 10 * 1000
VALUE_SIZE = 10000
GET_PERCENTAGE = 40
PUT_PERCENTAGE = 40
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
config = hazelcast.ClientConfig()
config.group_config.name = "dev"
config.group_config.password = "dev-pass"
config.network_config.addresses.append("127.0.0.1:5701")
client = hazelcast.HazelcastClient(config)
class Test(object):
ops = 0
def get_cb(self, _):
self.ops += 1
def put_cb(self, _):
self.ops += 1
def remove_cb(self, _):
self.ops += 1
def run(self):
my_map = client.get_map("default")
for _ in xrange(0, REQ_COUNT):
key = int(random.random() * ENTRY_COUNT)
operation = int(random.random() * 100)
if operation < GET_PERCENTAGE:
my_map.get_async(key, self.get_cb)
elif operation < GET_PERCENTAGE + PUT_PERCENTAGE:
my_map.put_async(key, "x" * VALUE_SIZE, -1, self.put_cb)
else:
my_map.remove_async(key, self.remove_cb)
t = Test()
start = time.time()
t.run()
while t.ops != REQ_COUNT:
time.sleep(0.01)
print("ops per second: %d" % (t.ops/(time.time()-start)))
|
Python
| 0.000018
|
@@ -177,17 +177,17 @@
COUNT =
-2
+5
0000%0AENT
@@ -692,20 +692,8 @@
ct):
-%0A ops = 0
%0A%0A
@@ -702,22 +702,21 @@
def
-get_cb
+__init__
(self
-, _
):%0A
@@ -723,36 +723,74 @@
self.ops
-+
=
-1
+0%0A self.event = threading.Event()
%0A%0A def put_cb
@@ -783,22 +783,20 @@
def
-put_cb
+incr
(self, _
@@ -824,37 +824,46 @@
= 1%0A
-%0A
-def remove_cb(self, _):%0A
+ if self.ops == REQ_COUNT:%0A
@@ -871,24 +871,27 @@
self.
-ops += 1
+event.set()
%0A%0A de
@@ -1179,14 +1179,12 @@
elf.
-get_cb
+incr
)%0A
@@ -1312,14 +1312,12 @@
elf.
-put_cb
+incr
)%0A
@@ -1382,17 +1382,12 @@
elf.
-remove_cb
+incr
)%0At
@@ -1427,53 +1427,121 @@
n()%0A
-while t.ops != REQ_COUNT:%0A time.sleep(0.01
+t.event.wait()%0Atime_taken = time.time() - start%0Aprint(%22Took %25s seconds for %25d requests%22 %25 (time_taken, REQ_COUNT)
)%0Apr
@@ -1562,17 +1562,17 @@
econd: %25
-d
+s
%22 %25 (t.o
@@ -1578,26 +1578,17 @@
ops/
-(
time
-.time()-start)
+_taken
))%0A
|
8dd0a904d2572c5d2368a78401c8b935d197b532
|
use the full network name in prefixes
|
plugins/relay_clientbot.py
|
plugins/relay_clientbot.py
|
# relay_clientbot.py: Clientbot extensions for Relay
import string
from pylinkirc import utils, conf, world
from pylinkirc.log import log
default_styles = {'MESSAGE': '\x02[$colored_netname]\x02 <$colored_nick> $text',
'KICK': '\x02[$colored_netname]\x02 -$colored_nick$identhost has kicked $target_nick from $channel ($text)',
'PART': '\x02[$colored_netname]\x02 -$colored_nick$identhost has left $channel ($text)',
'JOIN': '\x02[$colored_netname]\x02 -$colored_nick$identhost has joined $channel',
'NICK': '\x02[$colored_netname]\x02 -$colored_nick$identhost is now known as $newnick',
'QUIT': '\x02[$colored_netname]\x02 -$colored_nick$identhost has quit ($text)',
'ACTION': '\x02[$colored_netname]\x02 * $colored_nick $text',
'NOTICE': '\x02[$colored_netname]\x02 * Notice from $colored_nick: $text',
}
def color_text(s):
"""
Returns a colorized version of the given text based on a simple hash algorithm
(sum of all characters).
"""
colors = ('02', '03', '04', '05', '06', '07', '08', '09', '10', '11',
'12', '13')
num = sum([ord(char) for char in s])
num = num % len(colors)
return "\x03%s%s\x03" % (colors[num], s)
def cb_relay_core(irc, source, command, args):
"""
This function takes Clientbot actions and outputs them to a channel as regular text.
"""
real_command = command.split('_')[-1]
relay = world.plugins.get('relay')
if irc.pseudoclient and relay:
try:
sourcename = irc.getFriendlyName(source)
except KeyError: # User has left due to /quit
sourcename = args['userdata'].nick
# Special case for CTCPs.
if real_command == 'MESSAGE':
# CTCP action, format accordingly
if (not args.get('is_notice')) and args['text'].startswith('\x01ACTION ') and args['text'].endswith('\x01'):
args['text'] = args['text'][8:-1]
real_command = 'ACTION'
# Other CTCPs are ignored
elif args['text'].startswith('\x01'):
return
elif args.get('is_notice'): # Different syntax for notices
real_command = 'NOTICE'
# .get() chains are lovely. Try to fetch the format for the given command from the
# relay:clientbot_format:$command key, falling back to one defined in default_styles
# above, and then nothing if not found.
text_template = conf.conf.get('relay', {}).get('clientbot_format', {}).get(real_command,
default_styles.get(real_command, ''))
text_template = string.Template(text_template)
if text_template:
# Get the original client that the relay client source was meant for.
try:
origuser = relay.getOrigUser(irc, source) or args['userdata'].remote
except (AttributeError, KeyError):
return
netname = origuser[0]
# Figure out where the message is destined to.
target = args.get('channel') or args.get('target')
if target is None or not utils.isChannel(target):
# Quit and nick messages are not channel specific. Figure out all channels that the
# sender shares over the relay, and relay them that way.
userdata = args.get('userdata') or irc.users.get(source)
assert userdata, "Got a channel-less message from bad UID %s" % source
channels = [channel for channel in userdata.channels if relay.getRelay((irc.name, channel))]
else:
# Pluralize the channel so that we can iterate over it.
channels = [target]
if source in irc.users:
try:
identhost = irc.getHostmask(source).split('!')[-1]
except KeyError: # User got removed due to quit
identhost = '%s@%s' % (args['olduser'].ident, args['olduser'].host)
# This is specifically spaced so that ident@host is only shown for users that have
# one, and not servers.
identhost = ' (%s)' % identhost
else:
identhost = ''
# Convert the target for kicks, etc. from a UID to a nick
if args.get("target") in irc.users:
target_nick = irc.getFriendlyName(args['target'])
else:
target_nick = ''
args.update({'netname': netname, 'nick': sourcename, 'identhost': identhost,
'colored_nick': color_text(sourcename), 'colored_netname': color_text(netname),
'target_nick': target_nick})
text = text_template.substitute(args)
for channel in channels:
irc.proto.message(irc.pseudoclient.uid, channel, text)
utils.add_hook(cb_relay_core, 'CLIENTBOT_MESSAGE')
utils.add_hook(cb_relay_core, 'CLIENTBOT_KICK')
utils.add_hook(cb_relay_core, 'CLIENTBOT_PART')
utils.add_hook(cb_relay_core, 'CLIENTBOT_JOIN')
utils.add_hook(cb_relay_core, 'CLIENTBOT_QUIT')
utils.add_hook(cb_relay_core, 'CLIENTBOT_NICK')
|
Python
| 0.000006
|
@@ -3062,16 +3062,186 @@
guser%5B0%5D
+%0A try: # Try to get the full network name%0A netname = conf.conf%5B'servers'%5D%5Bnetname%5D%5B'netname'%5D%0A except KeyError:%0A pass
%0A%0A
|
29315213a8503de018a76badc71da3737d2b54c7
|
Fix spiffsgen example test
|
examples/storage/spiffsgen/example_test.py
|
examples/storage/spiffsgen/example_test.py
|
from __future__ import print_function
import os
import hashlib
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_spiffsgen(env, extra_data):
# Test with default build configurations
dut = env.get_dut('spiffsgen', 'examples/storage/spiffsgen', dut_class=ttfw_idf.ESP32DUT)
dut.start_app()
base_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'spiffs_image')
# Expect hello.txt is read successfully
with open(os.path.join(base_dir, 'hello.txt'), 'r') as hello_txt:
dut.expect('Read from hello.txt: ' + hello_txt.read())
# Expect alice.txt MD5 hash is computed accurately
with open(os.path.join(base_dir, 'sub', 'alice.txt'), 'rb') as alice_txt:
alice_md5 = hashlib.md5(alice_txt.read()).hexdigest()
dut.expect('Computed MD5 hash of alice.txt: ' + alice_md5)
if __name__ == '__main__':
test_examples_spiffsgen()
|
Python
| 0
|
@@ -599,16 +599,25 @@
t.read()
+.rstrip()
)%0A%0A #
|
7c762733311c6a52f0a7605a9495f8234c1d6ff2
|
put portLo..Hi as arg
|
predictor/server/server.py
|
predictor/server/server.py
|
#!/usr/bin/python
import sys
from datetime import datetime
from server_thread import ServerThread as Server
from config import serverConfig as scfg
def main():
if len(sys.argv)!=2:
print 'USAGE: phyton prediction_server.py [serverId]'
return
serverId = sys.argv[1]
if serverId not in scfg['ports']:
print 'FATAL: serverId unknown'
return
host = scfg['host']
portLo,portHi = scfg['ports'][serverId]
upAt = datetime.now().strftime("%Y:%m:%d %H:%M:%S")
print >> sys.stderr, '******************************************************'
print >> sys.stderr,"Ijah predictor server :)"
print >> sys.stderr,"[id= "+serverId+"]"
print >> sys.stderr,"[ports= "+str(portLo)+" to "+str(portHi)+"]"
print >> sys.stderr,"[upFrom= "+upAt+"]"
threadList = [Server(i,"serverThread_"+str(serverId)+"_"+str(i),host,port)
for i,port in enumerate(range(portLo, portHi+1))]
for t in threadList:
t.daemon=True
t.start()
while True:
pass
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -105,48 +105,8 @@
rver
-%0Afrom config import serverConfig as scfg
%0A%0Ade
@@ -112,16 +112,20 @@
ef main(
+argv
):%0A i
@@ -145,9 +145,9 @@
v)!=
-2
+4
:%0A
@@ -204,16 +204,34 @@
erverId%5D
+ %5BportLo%5D %5BportHi%5D
'%0A
@@ -248,192 +248,95 @@
-serverId = sys.argv%5B1%5D%0A if serverId not in scfg%5B'ports'%5D:%0A print 'FATAL: serverId unknown'%0A return%0A%0A host = scfg%5B'host'%5D%0A portLo,portHi = scfg%5B'ports'%5D%5BserverId%5D
+host = '127.0.0.1'%0A serverId = argv%5B1%5D%0A portLo,portHi = int(argv%5B2%5D),int(argv%5B3%5D)
%0A
@@ -963,10 +963,18 @@
main(
+sys.argv
)%0A
|
87ca8475f58b057e8043f8b398bd76123a89a733
|
Revert "parsing html"
|
moz/minutes/helpers.py
|
moz/minutes/helpers.py
|
#!/usr/bin/env python
# encoding: utf-8
"""
helpers.py
Some modules to help with this project
Created by Karl Dubost on 2016-02-24.
Copyright (c) 2016 La Grange. All rights reserved.
MIT License
"""
import io
import sys
import lxml.html
import requests
def fetch_content(uri):
'''Fetch the URI and returns the raw content and its encoding'''
content = requests.get(uri)
return content.text, content.encoding
def html_parse(content):
'''returns a parsed HTML content'''
html = ''
try:
html = lxml.html.parse(io.StringIO(content))
except Exception, e:
raise e
return html
def main():
'''core program'''
pass
if __name__ == "__main__":
sys.exit(main())
|
Python
| 0
|
@@ -197,48 +197,8 @@
%0A%22%22%22
-%0A%0Aimport io%0Aimport sys%0A%0Aimport lxml.html
%0Aimp
@@ -210,18 +210,16 @@
equests%0A
-%0A%0A
def fetc
@@ -335,16 +335,16 @@
et(uri)%0A
+
retu
@@ -381,210 +381,8 @@
ing%0A
-%0A%0Adef html_parse(content):%0A '''returns a parsed HTML content'''%0A html = ''%0A try:%0A html = lxml.html.parse(io.StringIO(content))%0A except Exception, e:%0A raise e%0A return html%0A%0A%0A
def
|
2158f0049134fb044c7d8606bb63a0c64cfacbf5
|
fix cancelling jobs on a subqueue
|
mrq/basetasks/utils.py
|
mrq/basetasks/utils.py
|
from __future__ import print_function
from future.utils import itervalues
from future.builtins import str
from mrq.task import Task
from mrq.queue import Queue
from bson import ObjectId
from mrq.context import connections, get_current_config, get_current_job
from mrq.job import set_queues_size
from collections import defaultdict
from mrq.utils import group_iter
import datetime
import ujson as json
def get_task_cfg(taskpath):
return get_current_config().get("tasks", {}).get(taskpath) or {}
class JobAction(Task):
params = None
collection = None
def run(self, params):
self.params = params
self.collection = connections.mongodb_jobs.mrq_jobs
query = self.build_query()
return self.perform_action(
self.params.get("action"), query, self.params.get("destination_queue")
)
def build_query(self):
query = {}
current_job = get_current_job()
if self.params.get("id"):
query["_id"] = ObjectId(self.params.get("id"))
# TODO use redis for queue
for k in [
"queue",
"status",
"worker",
"path",
"dateretry",
"exceptiontype"]:
if self.params.get(k):
if isinstance(self.params[k], (list, tuple)):
query[k] = {"$in": list(self.params[k])}
elif k == "queue" and self.params[k][-1] == "/":
query[k] = {"$regex": "^%s" % self.params[k]}
else:
query[k] = self.params[k]
if query.get("worker"):
query["worker"] = ObjectId(query["worker"])
if self.params.get("params"):
params_dict = json.loads(self.params.get("params")) # pylint: disable=no-member
for key in params_dict:
query["params.%s" % key] = params_dict[key]
if current_job and "_id" not in query:
query["_id"] = {"$lte": current_job.id}
return query
def perform_action(self, action, query, destination_queue):
stats = {
"requeued": 0,
"cancelled": 0
}
if action == "cancel":
default_job_timeout = get_current_config()["default_job_timeout"]
# Finding the ttl here to expire is a bit hard because we may have mixed paths
# and hence mixed ttls.
# If we are cancelling by path, get this ttl
if query.get("path"):
result_ttl = get_task_cfg(query["path"]).get("result_ttl", default_job_timeout)
# If not, get the maxmimum ttl of all tasks.
else:
tasks_defs = get_current_config().get("tasks", {})
tasks_ttls = [cfg.get("result_ttl", 0) for cfg in itervalues(tasks_defs)]
result_ttl = max([default_job_timeout] + tasks_ttls)
now = datetime.datetime.utcnow()
size_by_queues = defaultdict(int)
if "queue" not in query:
for job in self.collection.find(query, projection={"queue": 1}):
size_by_queues[job["queue"]] += 1
ret = self.collection.update(query, {"$set": {
"status": "cancel",
"dateexpires": now + datetime.timedelta(seconds=result_ttl),
"dateupdated": now
}}, multi=True)
stats["cancelled"] = ret["n"]
if "queue" in query:
if isinstance(query["queue"], str):
size_by_queues[query["queue"]] = ret["n"]
set_queues_size(size_by_queues, action="decr")
# Special case when emptying just by queue name: empty it directly!
# In this case we could also loose some jobs that were queued after
# the MongoDB update. They will be "lost" and requeued later like the other case
# after the Redis BLPOP
if list(query.keys()) == ["queue"]:
Queue(query["queue"]).empty()
elif action in ("requeue", "requeue_retry"):
# Requeue task by groups of maximum 1k items (if all in the same
# queue)
cursor = self.collection.find(query, projection=["_id", "queue"])
# We must freeze the list because queries below would change it.
# This could not fit in memory, research adding {"stats": {"$ne":
# "queued"}} in the query
fetched_jobs = list(cursor)
for jobs in group_iter(fetched_jobs, n=1000):
jobs_by_queue = defaultdict(list)
for job in jobs:
jobs_by_queue[job["queue"]].append(job["_id"])
stats["requeued"] += 1
for queue in jobs_by_queue:
updates = {
"status": "queued",
"datequeued": datetime.datetime.utcnow(),
"dateupdated": datetime.datetime.utcnow()
}
if destination_queue is not None:
updates["queue"] = destination_queue
if action == "requeue":
updates["retry_count"] = 0
self.collection.update({
"_id": {"$in": jobs_by_queue[queue]}
}, {"$set": updates}, multi=True)
set_queues_size({queue: len(jobs) for queue, jobs in jobs_by_queue.iteritems()})
return stats
|
Python
| 0.000001
|
@@ -4011,16 +4011,59 @@
%22queue%22%5D
+ and isinstance(query%5B%22queue%22%5D, basestring)
:%0A
|
90b75ba76c5f98abf3d6484cc9c51119042b7812
|
Fix issues with the tethys manage sync command.
|
tethys_apps/cli/manage_commands.py
|
tethys_apps/cli/manage_commands.py
|
"""
********************************************************************************
* Name: manage_commands.py
* Author: Nathan Swain
* Created On: 2015
* Copyright: (c) Brigham Young University 2015
* License: BSD 2-Clause
********************************************************************************
"""
import os
import subprocess
from tethys_apps.base.testing.environment import set_testing_environment
#/usr/lib/tethys/src/tethys_apps/cli
CURRENT_SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
TETHYS_HOME = os.sep.join(CURRENT_SCRIPT_DIR.split(os.sep)[:-3])
TETHYS_SRC_DIRECTORY = os.sep.join(CURRENT_SCRIPT_DIR.split(os.sep)[:-2])
MANAGE_START = 'start'
MANAGE_SYNCDB = 'syncdb'
MANAGE_COLLECTSTATIC = 'collectstatic'
MANAGE_COLLECTWORKSPACES = 'collectworkspaces'
MANAGE_COLLECT = 'collectall'
MANAGE_CREATESUPERUSER = 'createsuperuser'
MANAGE_SYNC = 'sync'
def get_manage_path(args):
"""
Validate user defined manage path, use default, or throw error
"""
# Determine path to manage.py file
manage_path = os.path.join(TETHYS_SRC_DIRECTORY, 'manage.py')
# Check for path option
if hasattr(args, 'manage'):
manage_path = args.manage or manage_path
# Throw error if path is not valid
if not os.path.isfile(manage_path):
print('ERROR: Can\'t open file "{0}", no such file.'.format(manage_path))
exit(1)
return manage_path
def manage_command(args):
"""
Management commands.
"""
# Get the path to manage.py
manage_path = get_manage_path(args)
# Define the process to be run
primary_process = None
if args.command == MANAGE_START:
if args.port:
primary_process = ['python', manage_path, 'runserver', args.port]
else:
primary_process = ['python', manage_path, 'runserver']
elif args.command == MANAGE_SYNCDB:
intermediate_process = ['python', manage_path, 'makemigrations']
run_process(intermediate_process)
primary_process = ['python', manage_path, 'migrate']
elif args.command == MANAGE_COLLECTSTATIC:
# Run pre_collectstatic
intermediate_process = ['python', manage_path, 'pre_collectstatic']
run_process(intermediate_process)
# Setup for main collectstatic
primary_process = ['python', manage_path, 'collectstatic']
if args.noinput:
primary_process.append('--noinput')
elif args.command == MANAGE_COLLECTWORKSPACES:
# Run collectworkspaces command
if args.force:
primary_process = ['python', manage_path, 'collectworkspaces', '--force']
else:
primary_process = ['python', manage_path, 'collectworkspaces']
elif args.command == MANAGE_COLLECT:
# Convenience command to run collectstatic and collectworkspaces
## Run pre_collectstatic
intermediate_process = ['python', manage_path, 'pre_collectstatic']
run_process(intermediate_process)
## Setup for main collectstatic
intermediate_process = ['python', manage_path, 'collectstatic']
if args.noinput:
intermediate_process.append('--noinput')
run_process(intermediate_process)
## Run collectworkspaces command
primary_process = ['python', manage_path, 'collectworkspaces']
elif args.command == MANAGE_CREATESUPERUSER:
primary_process = ['python', manage_path, 'createsuperuser']
elif args.command == MANAGE_SYNC:
from tethys_apps.utilities import sync_tethys_db
sync_tethys_db()
if primary_process:
run_process(primary_process)
def run_process(process):
# Call the process with a little trick to ignore the keyboard interrupt error when it happens
try:
if 'test' in process:
set_testing_environment(True)
subprocess.call(process)
except KeyboardInterrupt:
pass
finally:
set_testing_environment(False)
|
Python
| 0
|
@@ -3513,62 +3513,69 @@
pps.
-utilities import sync_tethys_db%0A sync_tethys_db
+harvester import SingletonHarvester%0A harvester.harvest
()%0A%0A
|
c5a548730064acfaa6fa81c0754a3fa8976b014f
|
update commands for django
|
suarm/server/project.py
|
suarm/server/project.py
|
from __future__ import unicode_literals
import os
import click
import sys
from fabric.api import *
from fabric.contrib.files import upload_template, exists
from ..server.config import (
get_project_src, make_user, make_app, get_user_home)
class Project(object):
@staticmethod
def config_settings():
pass
@staticmethod
def push():
""" Push changes to selected server"""
local("git push %s master" % env.stage)
@staticmethod
def install():
"""
Run intall command.
"""
python = "DJANGO_SETTINGS_MODULE=config.settings.production ./env/bin/python"
pip = "./env/bin/pip"
with cd(get_project_src(env.stage)):
"""
SETTINGS=config.settings.local
PYTHON_ENV := =$(SETTINGS) ./env/bin/python
PIP_ENV := DJANGO_SETTINGS_MODULE=$(SETTINGS) ./env/bin/pip
virtualenv -p python3 env --always-copy --no-site-packages
$(PIP_ENV) install -r requirements/production.txt
mkdir -p var/cache
mkdir -p var/log
mkdir -p var/db
mkdir -p var/run
mkdir -p var/bin
$(PYTHON_ENV) manage.py migrate
$(PYTHON_ENV) manage.py collectstatic \
-v 0 \
--noinput \
--traceback \
-i django_extensions \
-i '*.coffee' \
-i '*.rb' \
-i '*.scss' \
-i '*.less' \
-i '*.sass'
rm -rf var/cache/*
rm -rf public/media/cache/*
"""
if not exists("env"):
run("virtualenv -p python3 env --always-copy --no-site-packages")
run("%(pip)s install -r requirements/production.txt" % {"pip": pip})
run("mkdir -p var/cache var/log var/db var/run var/bin")
run("%(python)s manage.py migrate" % {"python": python})
run("%(python)s manage.py collectstatic \
-v 0 --noinput --traceback -i django_extensions \
-i '*.coffee' -i '*.rb' -i '*.scss' -i '*.less' -i '*.sass'" % {"python": python})
run("rm -rf var/cache/*")
run("rm -rf public/media/cache/*")
@staticmethod
def clean():
"""
Clean project logs and cache.
"""
with cd("%s/var/log" % get_project_src(env.stage)):
run("rm -rf *")
with cd("%s/var/cache" % get_project_src(env.stage)):
run("rm -rf *")
@staticmethod
def environment():
""" Push the environment configuration """
with cd(get_project_src(env.stage)):
if os.path.isfile(".environment"):
upload_template(
filename=".environment",
destination='.environment',
template_dir="./",
use_sudo=False,
)
else:
sys.exit("\nYou need [.environment] file to continue with deployment")
@staticmethod
def start():
"""
Start supervisor service.
"""
sudo("supervisorctl start %s" % env.project)
@staticmethod
def restart():
"""
Restart supervisor service.
"""
sudo("supervisorctl restart %s" % env.project)
@staticmethod
def stop():
"""
Stop supervisor service.
"""
sudo("supervisorctl stop %s" % env.project)
@staticmethod
def create_superuser():
"""
Create a superuser to production at selected server.
"""
with settings(user=make_user(env.project), password=env.passwd):
with cd(get_project_src(env.stage)):
run("make superuser SETTINGS=config.settings.production")
@staticmethod
def reset_env():
"""
Create a superuser to production at selected server.
"""
with settings(user=make_user(env.project), password=env.passwd):
with cd(get_project_src(env.stage)):
run("rm -rf env/")
@staticmethod
def run_django_command(command):
pass
@staticmethod
def upload_key():
"""
Upload id_rsa.pub file to server.
This file is obtained from ssh-keygen command.
"""
try:
local("ssh-copy-id %(user)s@%(ipv4)s" % {
"user": make_user(env.project),
"ipv4": env.ipv4
})
except Exception as e:
raise Exception('Unfulfilled local requirements')
#-------------------------------------------------------------------------------
def backup():
"""
Create a database backup
"""
# Backup DB
sudo('pg_dump %(app)s > /tmp/%(app)s.sql' % {
"app": make_app(env.project),
}, user='postgres')
with settings(user=make_user(env.project), password=env.passwd):
with cd(get_user_home(env.stage)):
# Copy backup from temporal
run("cp /tmp/%(app)s.sql ." %
{"app": make_app(env.project)})
# Compress DB
run("tar -cvf %(app)s.db.tar %(app)s.sql" %
{"app": make_app(env.project)})
run("rm %(app)s.sql" %
{"app": make_app(env.project)})
# Compress media
run("tar -cvf %(app)s.media.tar %(app)s/src/public/media/" %
{"app": make_app(env.project)})
# Clean DB from temporal
sudo('rm /tmp/%(app)s.sql' % {"app": make_app(env.project)})
@staticmethod
def download_backup():
click.echo("\n----------------------------------------------------------")
click.echo("Downloading backup patient please ...!!!")
click.echo("----------------------------------------------------------")
get(remote_path="%(home)s/%(app)s.db.tar" % {
"home": get_user_home(env.stage),
"app": make_app(env.project)
}, local_path=".", use_sudo=True)
click.echo("---> DB Backup OK")
get(remote_path="%(home)s/%(app)s.media.tar" % {
"home": get_user_home(env.stage),
"app": make_app(env.project)
}, local_path=".", use_sudo=True)
click.echo("---> MEDIA Backup OK")
click.echo("----------------------------------------------------------\n")
|
Python
| 0.000001
|
@@ -263,16 +263,124 @@
object):
+%0A python = %22DJANGO_SETTINGS_MODULE=config.settings.production ./env/bin/python%22%0A pip = %22./env/bin/pip%22
%0A%0A @s
@@ -654,124 +654,8 @@
%22%22%22
-%0A python = %22DJANGO_SETTINGS_MODULE=config.settings.production ./env/bin/python%22%0A pip = %22./env/bin/pip%22
%0A%0A
@@ -1771,16 +1771,24 @@
%7B%22pip%22:
+Project.
pip%7D)%0A
@@ -1906,32 +1906,40 @@
e%22 %25 %7B%22python%22:
+Project.
python%7D)%0A
@@ -2149,16 +2149,24 @@
ython%22:
+Project.
python%7D)
@@ -3762,59 +3762,74 @@
un(%22
-make superuser SETTINGS=config.settings.production%22
+%25(python)s manage.py createsuperuser%22 %25 %7B%22python%22: Project.python%7D
)%0A%0A
|
a123663004f96e845659be019c2e5970cfa2be98
|
use tempfile instead of /tmp
|
thinc/linear/tests/test_avgtron.py
|
thinc/linear/tests/test_avgtron.py
|
from __future__ import division
import pytest
import pickle
import io
from thinc.linear.avgtron import AveragedPerceptron
from thinc.extra.eg import Example
def assert_near_eq(float1, float2):
assert abs(float1 - float2) < 0.001
def test_basic():
nr_class = 3
model = AveragedPerceptron(((1,), (2,), (3,), (4,), (5,)))
instances = [
(1, {1: 1, 3: -5}),
(2, {2: 4, 3: 5})
]
for clas, feats in instances:
eg = Example(nr_class)
eg.features = feats
model(eg)
eg.costs = [i != clas for i in range(nr_class)]
model.update(eg)
eg = Example(nr_class)
eg.features = {1: 2, 2: 1}
model(eg)
assert eg.guess == 2
eg = Example(nr_class)
eg.features = {0: 2, 2: 1}
model(eg)
assert eg.scores[1] == 0
eg = Example(nr_class)
eg.features = {1: 2, 2: 1}
model(eg)
assert eg.scores[2] > 0
eg = Example(nr_class)
eg.features = {1: 2, 1: 1}
model(eg)
assert eg.scores[1] > 0
eg = Example(nr_class)
eg.features = {0: 3, 3: 1}
model(eg)
assert eg.scores[1] < 0
eg = Example(nr_class)
eg.features = {0: 3, 3: 1}
model(eg)
assert eg.scores[2] > 0
@pytest.fixture
def instances():
instances = [
[
(1, {1: -1, 2: 1}),
(2, {1: 5, 2: -5}),
(3, {1: 3, 2: -3}),
],
[
(1, {1: -1, 2: 1}),
(2, {1: -1, 2: 2}),
(3, {1: 3, 2: -3})
],
[
(1, {1: -1, 2: 2}),
(2, {1: 5, 2: -5}),
(3, {4: 1, 5: -7, 2: 1})
]
]
return instances
@pytest.fixture
def model(instances):
templates = []
for batch in instances:
for _, feats in batch:
for key in feats:
templates.append((key,))
templates = tuple(set(templates))
model = AveragedPerceptron(templates)
for batch in instances:
model.time += 1
for clas, feats in batch:
for key, value in feats.items():
model.update_weight(key, clas, value)
return model
def get_score(nr_class, model, feats, clas):
eg = Example(nr_class)
eg.features = feats
eg.costs = [i != clas for i in range(nr_class)]
model(eg)
return eg.scores[clas]
def get_scores(nr_class, model, feats):
eg = Example(nr_class)
eg.features = feats
model(eg)
return list(eg.scores)
def test_averaging(model):
model.end_training()
nr_class = 4
# Feature 1
assert_near_eq(get_score(nr_class, model, {1: 1}, 1), sum([-1, -2, -3]) / 3.0)
assert_near_eq(get_score(nr_class, model, {1: 1}, 2), sum([5, 4, 9]) / 3.0)
assert_near_eq(get_score(nr_class, model, {1: 1}, 3), sum([3, 6, 6]) / 3.0)
# Feature 2
assert_near_eq(get_score(nr_class, model, {2: 1}, 1), sum([1, 2, 4]) / 3.0)
assert_near_eq(get_score(nr_class, model, {2: 1}, 2), sum([-5, -3, -8]) / 3.0)
assert_near_eq(get_score(nr_class, model, {2: 1}, 3), sum([-3, -6, -5]) / 3.0)
# Feature 3 (absent)
assert_near_eq(get_score(nr_class, model, {3: 1}, 1), 0)
assert_near_eq(get_score(nr_class, model, {3: 1}, 2), 0)
assert_near_eq(get_score(nr_class, model, {3: 1}, 3), 0)
# Feature 4
assert_near_eq(get_score(nr_class, model, {4: 1}, 1), sum([0, 0, 0]) / 3.0)
assert_near_eq(get_score(nr_class, model, {4: 1}, 2), sum([0, 0, 0]) / 3.0)
assert_near_eq(get_score(nr_class, model, {4: 1}, 3), sum([0, 0, 1]) / 3.0)
# Feature 5
assert_near_eq(get_score(nr_class, model, {5: 1}, 1), sum([0, 0, 0]) / 3.0)
assert_near_eq(get_score(nr_class, model, {5: 1}, 2), sum([0, 0, 0]) / 3.0)
assert_near_eq(get_score(nr_class, model, {5: 1}, 3), sum([0, 0, -7]) / 3.0)
def test_dump_load(model):
loc = '/tmp/test_model'
model.end_training()
model.dump(loc)
string = open(loc, 'rb').read()
assert string
new_model = AveragedPerceptron([(1,), (2,), (3,), (4,)])
nr_class = 5
assert get_scores(nr_class, model, {1: 1, 3: 1, 4: 1}) != \
get_scores(nr_class, new_model, {1:1, 3:1, 4:1})
assert get_scores(nr_class, model, {2:1, 5:1}) != \
get_scores(nr_class, new_model, {2:1, 5:1})
assert get_scores(nr_class, model, {2:1, 3:1, 4:1}) != \
get_scores(nr_class, new_model, {2:1, 3:1, 4:1})
new_model.load(loc)
assert get_scores(nr_class, model, {1:1, 3:1, 4:1}) == \
get_scores(nr_class, new_model, {1:1, 3:1, 4:1})
assert get_scores(nr_class, model, {2:1, 5:1}) == \
get_scores(nr_class, new_model, {2:1, 5:1})
assert get_scores(nr_class, model, {2:1, 3:1, 4:1}) == \
get_scores(nr_class, new_model, {2:1, 3:1, 4:1})
## TODO: Need a test that exercises multiple lines. Example bug:
## in gather_weights, don't increment f_i per row, only per feature
## (so overwrite some lines we're gathering)
|
Python
| 0.000001
|
@@ -63,16 +63,32 @@
mport io
+%0Aimport tempfile
%0A%0Afrom t
@@ -3801,25 +3801,26 @@
c =
-'/tmp/test_model'
+tempfile.mkdtemp()
%0A
|
8ecc26cffabb5a4c80b9a5574b102cc5c63312d3
|
Update accounts.py
|
myuw/views/accounts.py
|
myuw/views/accounts.py
|
from myuw.views.page import page
from myuw.util.page_view import page_view
@page_view
def accounts(request):
return page(request, {}, template='accounts.html')
|
Python
| 0.000001
|
@@ -158,8 +158,9 @@
s.html')
+%0A
|
affb8417c7592158fbfd62c4cd49608a368ccabf
|
Switch update flag for full flag
|
nap/dataviews/views.py
|
nap/dataviews/views.py
|
from collections import defaultdict
from inspect import classify_class_attrs
from django.db.models.fields import NOT_PROVIDED
from django.forms import ValidationError
from django.utils.functional import cached_property
from .fields import field
from .utils import DictObject
class DataView(object):
def __init__(self, obj=None, **kwargs):
if obj is None:
obj = DictObject()
self._obj = obj
self._kwargs = kwargs
@cached_property
def _fields(self):
return {
name: prop
for name, kind, cls, prop in classify_class_attrs(self.__class__)
if isinstance(prop, field)
}
@cached_property
def _field_names(self):
return tuple(self._fields.keys())
def __lshift__(self, other):
'''
Allow implicit reduction using:
>>> data = view << obj
'''
self._obj = other
return self._reduce()
def __rlshift__(self, other):
'''
Allow implicit apply(update) using:
>>> obj = data >> view
Note: sets update=True
'''
return self._apply(other, update=True)
def _reduce(self):
'''
Reduce our instance to its serialisable state.
Returns a dict.
'''
return {
name: getattr(self, name)
for name in self._field_names
}
def _apply(self, data, update=False):
'''
Update an instance from supplied data.
If update is False, all fields not tagged as .required=False MUST be
supplied in the data dict.
'''
errors = defaultdict(list)
for name in self._field_names:
required = getattr(self._fields[name], 'required', True)
default = getattr(self._fields[name], 'default', NOT_PROVIDED)
value = data.get(name, default)
if value is NOT_PROVIDED:
if required and not update:
errors[name].append(
ValidationError('This field is required')
)
continue
try:
setattr(self, name, value)
except ValidationError as e:
errors[name].append(e.message)
self._errors = dict(errors)
if errors:
raise ValidationError(self._errors)
return self._obj
|
Python
| 0
|
@@ -1020,16 +1020,8 @@
pply
-(update)
usi
@@ -1059,40 +1059,8 @@
view
-%0A%0A Note: sets update=True
%0A
@@ -1104,21 +1104,8 @@
ther
-, update=True
)%0A%0A
@@ -1365,22 +1365,20 @@
, data,
-update
+full
=False):
@@ -1453,22 +1453,19 @@
If
-update is Fals
+full is Tru
e, a
@@ -1883,31 +1883,25 @@
if
-required and not update
+full and required
:%0A
|
9eebf1d43b93a6e1001186693d3a15ce2b5d568e
|
Add Bank and BankAccount models, add some fields to Supplier model
|
nbs/models/supplier.py
|
nbs/models/supplier.py
|
# -*- coding: utf-8 -*-
from nbs.models import db
from nbs.models.entity import Entity
from nbs.models.misc import FiscalDataMixin
class Supplier(Entity, FiscalDataMixin):
__tablename__ = 'supplier'
__mapper_args__ = {'polymorphic_identity': u'supplier'}
supplier_id = db.Column(db.Integer, db.ForeignKey('entity.id'),
primary_key=True)
name = Entity._name_1
fancy_name = Entity._name_2
payment_term = db.Column(db.Integer)
@property
def full_name(self):
fn = u" ({0})".format(self.fancy_name) if self.fancy_name else u""
return u"{0}{1}".format(self.name, fn)
|
Python
| 0
|
@@ -268,376 +268,2080 @@
-supplier_id = db.Column(db.Integer, db.ForeignKey('entity.id'),%0A primary_key=True)%0A name = Entity._name_1%0A fancy_name = Entity._name_2%0A%0A payment_term = db.Column(db.Integer)%0A%0A @property%0A def full_name(self):%0A fn = u%22 (%7B0%7D)%22.format(self.fancy_name) if self.fancy_name else u%22%22%0A return u%22%7B0%7D%7B1%7D%22.format(self.name, fn
+FREIGHT_SUPPLIER = 'FREIGHT_SUPPLIER'%0A FREIGHT_CUSTOMER = 'FREIGHT_CUSTOMER'%0A%0A _freight_types = %7B%0A FREIGHT_SUPPLIER: 'Flete de proveedor',%0A FREIGHT_CUSTOMER: 'Flete de cliente',%0A %7D%0A%0A supplier_id = db.Column(db.Integer, db.ForeignKey('entity.id'),%0A primary_key=True)%0A name = Entity._name_1%0A fancy_name = Entity._name_2%0A%0A payment_term = db.Column(db.Integer) # in days%0A freight_type = db.Column(db.Enum(*_freight_types.keys(),%0A name='freight_type'), default=FREIGHT_CUSTOMER)%0A leap_time = db.Column(db.Integer) # in days%0A%0A @property%0A def full_name(self):%0A fn = u%22 (%7B0%7D)%22.format(self.fancy_name) if self.fancy_name else u%22%22%0A return u%22%7B0%7D%7B1%7D%22.format(self.name, fn)%0A%0A%0Aclass Bank(db.Model):%0A __tablename__ = 'bank'%0A%0A id = db.Column(db.Integer, primary_key=True)%0A name = db.Column(db.Unicode)%0A # TODO: Add bank logo, to quickly identify%0A%0A%0Aclass BankAccount(db.Model):%0A __tablename__ = 'bank_account'%0A%0A TYPE_CC_PESOS = 'TYPE_CC_PESOS'%0A TYPE_CC_USD = 'TYPE_CC_USD'%0A TYPE_CA_PESOS = 'TYPE_CA_PESOS'%0A TYPE_CA_USD = 'TYPE_CA_USD'%0A TYPE_UNIQUE = 'TYPE_UNIQUE'%0A%0A _account_type = %7B%0A TYPE_CC_PESOS: 'Cuenta Corriente en Pesos',%0A TYPE_CC_USD: 'Cuenta Corriente en D%C3%B3lares',%0A TYPE_CA_PESOS: 'Caja de Ahorro en Pesos',%0A TYPE_CA_USD: 'Caja de Ahorro en D%C3%B3lares',%0A TYPE_UNIQUE: 'Cuenta %C3%9Anica',%0A %7D%0A%0A%0A id = db.Column(db.Integer, primary_key=True)%0A bank_branch = db.Column(db.Unicode)%0A account_type = db.Column(db.Enum(*_account_type.keys(),%0A name='account_type'), default=TYPE_CC_PESOS)%0A account_number = db.Column(db.Unicode)%0A account_cbu = db.Column(db.Unicode)%0A account_owner = db.Column(db.Unicode)%0A%0A%0A bank_id = db.Column(db.Integer, db.ForeignKey('bank.id'))%0A bank = db.relationship(Bank, backref=%22accounts%22)%0A%0A supplier_id = db.Column(db.Integer, db.ForeignKey('supplier.supplier_id'))%0A supplier = db.relationship(Supplier, backref='bank_accounts'
)%0A
|
e1a7262bc4fc841b95ee6fb45c1bb0da5cc3f2c1
|
add an option for fallback style in vimrc
|
tools/clang-format/clang-format.py
|
tools/clang-format/clang-format.py
|
# This file is a minimal clang-format vim-integration. To install:
# - Change 'binary' if clang-format is not on the path (see below).
# - Add to your .vimrc:
#
# map <C-I> :pyf <path-to-this-file>/clang-format.py<cr>
# imap <C-I> <c-o>:pyf <path-to-this-file>/clang-format.py<cr>
#
# The first line enables clang-format for NORMAL and VISUAL mode, the second
# line adds support for INSERT mode. Change "C-I" to another binding if you
# need clang-format on a different key (C-I stands for Ctrl+i).
#
# With this integration you can press the bound key and clang-format will
# format the current line in NORMAL and INSERT mode or the selected region in
# VISUAL mode. The line or region is extended to the next bigger syntactic
# entity.
#
# It operates on the current, potentially unsaved buffer and does not create
# or save any files. To revert a formatting, just undo.
import difflib
import json
import subprocess
import sys
import vim
# set g:clang_format_path to the path to clang-format if it is not on the path
# Change this to the full path if clang-format is not on the path.
binary = 'clang-format'
if vim.eval('exists("g:clang_format_path")') == "1":
binary = vim.eval('g:clang_format_path')
# Change this to format according to other formatting styles. See the output of
# 'clang-format --help' for a list of supported styles. The default looks for
# a '.clang-format' or '_clang-format' file to indicate the style that should be
# used.
style = 'file'
def main():
# Get the current text.
buf = vim.current.buffer
text = '\n'.join(buf)
# Determine range to format.
lines = '%s:%s' % (vim.current.range.start + 1, vim.current.range.end + 1)
# Determine the cursor position.
cursor = int(vim.eval('line2byte(line("."))+col(".")')) - 2
if cursor < 0:
print 'Couldn\'t determine cursor position. Is your file empty?'
return
# Avoid flashing an ugly, ugly cmd prompt on Windows when invoking clang-format.
startupinfo = None
if sys.platform.startswith('win32'):
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = subprocess.SW_HIDE
# Call formatter.
command = [binary, '-lines', lines, '-style', style, '-cursor', str(cursor)]
if vim.current.buffer.name:
command.extend(['-assume-filename', vim.current.buffer.name])
p = subprocess.Popen(command,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
stdin=subprocess.PIPE, startupinfo=startupinfo)
stdout, stderr = p.communicate(input=text)
# If successful, replace buffer contents.
if stderr:
print stderr
if not stdout:
print ('No output from clang-format (crashed?).\n' +
'Please report to bugs.llvm.org.')
else:
lines = stdout.split('\n')
output = json.loads(lines[0])
lines = lines[1:]
sequence = difflib.SequenceMatcher(None, vim.current.buffer, lines)
for op in reversed(sequence.get_opcodes()):
if op[0] is not 'equal':
vim.current.buffer[op[1]:op[2]] = lines[op[3]:op[4]]
vim.command('goto %d' % (output['Cursor'] + 1))
main()
|
Python
| 0.000099
|
@@ -1468,16 +1468,140 @@
= 'file'
+%0Aif vim.eval('exists(%22g:clang_format_fallback_style%22)') == %221%22:%0A fallback_style = vim.eval('g:clang_format_fallback_style')
%0A%0Adef ma
@@ -2382,16 +2382,93 @@
ursor)%5D%0A
+ if fallback_style:%0A command.extend(%5B'-fallback-style', fallback_style%5D)%0A
if vim
|
f57294c59e197c989536638776738b0ed0bcee1d
|
disable scheduler.tough_pepper_cases
|
tools/perf/benchmarks/scheduler.py
|
tools/perf/benchmarks/scheduler.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import benchmark
from measurements import smoothness
import page_sets
class SchedulerToughSchedulingCases(benchmark.Benchmark):
"""Measures rendering statistics while interacting with pages that have
challenging scheduling properties.
https://docs.google.com/a/chromium.org/document/d/
17yhE5Po9By0sCdM1yZT3LiUECaUr_94rQt9j-4tOQIM/view"""
test = smoothness.Smoothness
page_set = page_sets.ToughSchedulingCasesPageSet
# Pepper plugin is not supported on android.
@benchmark.Disabled('android', 'win') # crbug.com/384733
class SchedulerToughPepperCases(benchmark.Benchmark):
"""Measures rendering statistics while interacting with pages that have
pepper plugins"""
test = smoothness.Smoothness
page_set = page_sets.ToughPepperCasesPageSet
def CustomizeBrowserOptions(self, options):
# This is needed for testing pepper plugin.
options.AppendExtraBrowserArgs('--enable-pepper-testing')
|
Python
| 0.000031
|
@@ -690,16 +690,23 @@
', 'win'
+, 'mac'
) # crb
|
4fb1ad11add4436395f775a12f0d4e90b99d6594
|
add ignore filtering
|
psutil_mon/psutil_alarm.py
|
psutil_mon/psutil_alarm.py
|
#
# Hubblemon - Yet another general purpose system monitor
#
# Copyright 2015 NAVER Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import socket, fnmatch, pickle, sys, os
import psutil_mon.settings
hubblemon_path = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(hubblemon_path)
import common.settings
class psutil_alarm:
def __init__(self):
self.name = 'psutil'
self.sec_interval = 5 # 5 sec interval
def system_list_init(self):
pass
def select_conf(self, client, item, map):
key = '%s:%s' % (client, item)
# exact
if key in map:
return map[key]
# wild card match
for k, v in map.items():
# overwrite if match like net-*
if fnmatch.fnmatch(key, k):
return map[k]
return {}
def get_conf(self, client, item): # client: machine name, item: items in psutil (ex, cpu, net, disk...)
# select exact conf
abs_conf = self.select_conf(client, item, psutil_mon.settings.alarm_conf_absolute)
lambda_conf = self.select_conf(client, item, psutil_mon.settings.alarm_conf_lambda)
message_head = '%s:%s' % (client, item)
return (message_head, abs_conf, lambda_conf)
|
Python
| 0.000001
|
@@ -1201,16 +1201,115 @@
ey, k):%0A
+%09%09%09%09if 'IGNORE' in v:%0A%09%09%09%09%09if fnmatch.fnmatch(key, v%5B'IGNORE'%5D): # skip IGNORE case%0A%09%09%09%09%09%09continue%0A
%09%09%09%09retu
@@ -1315,14 +1315,9 @@
urn
-map%5Bk%5D
+v
%0A%0A%09%09
|
58a1a3f014387c5358f08b8706a91a18c07bae68
|
remove print, change subject
|
pubsublogger/subscriber.py
|
pubsublogger/subscriber.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
:mod:`subscriber` -- Subscribe to a redis channel and gather logging messages.
To use this module, you have to define at least a channel name.
"""
import redis
from logbook import Logger
import ConfigParser
from logbook import NestedSetup, NullHandler, FileHandler, MailHandler
import os
# use a TCP Socket by default
use_tcp_socket = True
#default config for a UNIX socket
unix_socket = '/tmp/redis.sock'
# default config for a TCP socket
hostname = 'localhost'
port = 6379
pubsub = None
channel = None
# Required only if you want to send emails
dest_mails = []
smtp_server = None
smtp_port = 0
src_server = None
def setup(name, path = 'log', enable_debug = False):
"""
Prepare a NestedSetup.
:param name: the channel name
:param path: the path where the logs will be written
:param enable_debug: do we want to save the message at the DEBUG level
:return a nested Setup
"""
path_tmpl = os.path.join(path, '{name}_{level}.log')
info = path_tmpl.format(name = name, level = 'info')
warn = path_tmpl.format(name = name, level = 'warn')
err = path_tmpl.format(name = name, level = 'err')
crit = path_tmpl.format(name = name, level = 'crit')
# a nested handler setup can be used to configure more complex setups
setup = [
# make sure we never bubble up to the stderr handler
# if we run out of setup handling
NullHandler(),
# then write messages that are at least info to to a logfile
FileHandler(info, level='INFO', encoding='utf-8'),
# then write messages that are at least warnings to to a logfile
FileHandler(warn, level='WARNING', encoding='utf-8'),
# then write messages that are at least errors to to a logfile
FileHandler(err, level='ERROR', encoding='utf-8'),
# then write messages that are at least critical errors to to a logfile
FileHandler(crit, level='CRITICAL', encoding='utf-8'),
]
if enable_debug:
debug = path_tmpl.format(name = name, level = 'debug')
setup.insert(1, FileHandler(debug, level='DEBUG', encoding='utf-8'))
if src_server is not None and smtp_server is not None \
and smtp_port != 0 and len(dest_mails) != 0:
mail_tmpl = '{name}_error@{src}'
from_mail = mail_tmpl.format(name = name, src = src_server)
print from_mail
# errors should then be delivered by mail and also be kept
# in the application log, so we let them bubble up.
setup.append(MailHandler(from_mail, dest_mails, level='ERROR',
bubble=True, server_addr=(smtp_server, smtp_port)))
return NestedSetup(setup)
def mail_setup(path):
"""
Set the variables to be able to send emails.
:param path: path to the config file
"""
global dest_mails
global smtp_server
global smtp_port
global src_server
config = ConfigParser.RawConfigParser()
config.readfp(path)
dest_mails = config.get('mail', 'dest_mail').split(',')
smtp_server = config.get('mail', 'smtp_server')
smtp_port = config.get('mail', 'smtp_port')
src_server = config.get('mail', 'src_server')
def run(log_name, path, debug = False, mail = None):
"""
Run a subscriber and pass the messages to the logbook setup.
Stays alive as long as the pubsub instance listen to something.
:param log_name: the channel to listen to
:param path: the path where the log files will be written
:param debug: True if you want to save the debug messages too
:param mail: Path to the config file for the mails
"""
global pubsub
global channel
channel = log_name
if use_tcp_socket:
r = redis.StrictRedis(host=hostname, port=port)
else:
r = redis.StrictRedis(unix_socket_path = unix_socket)
pubsub = r.pubsub()
pubsub.psubscribe(channel + '.*')
logger = Logger(channel)
if mail is not None:
mail_setup(mail)
if os.path.exists(path) and not os.path.isdir(path):
raise Exception("The path you want to use to save the file is invalid (not a directory).")
if not os.path.exists(path):
os.mkdir(path)
with setup(channel, path, debug):
for msg in pubsub.listen():
if msg['type'] == 'pmessage':
level = msg['channel'].split('.')[1]
message = msg['data']
try:
message = message.decode('utf-8')
except:
pass
logger.log(level, message)
def stop():
"""
Unsubscribe to the channel, stop the script.
"""
pubsub.punsubscribe(channel + '.*')
|
Python
| 0.000001
|
@@ -2391,23 +2391,44 @@
-print from_mail
+subject = 'Error in %7B%7D'.format(name)
%0A
@@ -2606,16 +2606,37 @@
t_mails,
+ subject,%0A
level='
@@ -2642,28 +2642,16 @@
'ERROR',
-%0A
bubble=
|
7a66989b62d1776e72229ac36c0ed77235549b1d
|
Add a data timeout to ensure that we aren't blocked waiting on data for a connection that the pusher server does not think exists. This occurs when the network cable has been unplugged for an extended period of time and then reconnected.
|
pusherclient/connection.py
|
pusherclient/connection.py
|
import websocket
try:
import simplejson as json
except:
import json
from threading import Thread
import time
import logging
CONNECTION_EVENTS_NEW = [
'initialized',
'connecting',
'connected',
'unavailable',
'failed',
'disconnected',
]
CONNECTION_EVENTS_OLD = [
'pusher:connection_established',
'pusher:connection_failed',
]
class Connection(Thread):
def __init__(self, eventHandler, url, logLevel=logging.INFO):
self.socket = None
self.socket_id = ""
self.eventCallbacks = {}
self.eventHandler = eventHandler
self.url = url
self.needsReconnect = False
self.reconnectInterval = 10
self.bind("pusher:connection_established", self._connect_handler)
self.bind("pusher:connection_failed", self._failed_handler)
self.state = "initialized"
self.logger = logging.getLogger()
self.logger.addHandler(logging.StreamHandler())
if logLevel == logging.DEBUG:
websocket.enableTrace(True)
self.logger.setLevel(logLevel)
Thread.__init__(self)
def bind(self, stateEvent, callback):
if stateEvent not in self.eventCallbacks.keys():
self.eventCallbacks[stateEvent] = []
self.eventCallbacks[stateEvent].append(callback)
def run(self):
self._connect()
def _connect(self):
self.state = "connecting"
self.socket = websocket.WebSocketApp(self.url,
self._on_open,
self._on_message,
self._on_error,
self._on_close)
self.socket.run_forever()
while (self.needsReconnect):
self.logger.info("Attempting to connect again in %s seconds." % self.reconnectInterval)
self.state = "unavailable"
time.sleep(self.reconnectInterval)
self.socket.run_forever()
def _on_open(self, ws):
self.logger.info("Connection: Connection opened")
def _on_error(self, ws, error):
self.logger.info("Connection: Error - %s" % error)
self.state = "failed"
self.needsReconnect = True
def _on_message(self, ws, message):
self.logger.info("Connection: Message - %s" % message)
params = self._parse(message)
if 'event' in params.keys():
if (params['event'] in CONNECTION_EVENTS_NEW) or (params['event'] in CONNECTION_EVENTS_OLD):
if params['event'] in self.eventCallbacks.keys():
for callback in self.eventCallbacks[params['event']]:
callback(params['data'])
else:
if 'channel' in params.keys():
self.eventHandler(params['event'],
params['data'],
params['channel'])
else:
self.logger.info("Connection: Unknown event type")
def _on_close(self, ws):
self.logger.info("Connection: Connection closed")
self.state = "disconnected"
def _parse(self, message):
return json.loads(message)
def _send_event(self, eventName, data):
self.socket.send(json.dumps({'event':eventName, 'data':data}))
def _connect_handler(self, data):
parsed = json.loads(data)
self.socket_id = parsed['socket_id']
self.state = "connected"
def _failed_handler(self, data):
parsed = json.loads(data)
self.state = "failed"
|
Python
| 0
|
@@ -98,17 +98,24 @@
t Thread
+, Timer
%0A
-
import t
@@ -1301,16 +1301,664 @@
Level)%0A%0A
+ # From Martyn's comment at: https://pusher.tenderapp.com/discussions/problems/36-no-messages-received-after-1-idle-minute-heartbeat%0A # %22We send a ping every 5 minutes in an attempt to keep connections %0A # alive...%22%0A # This is why we set the connection timeout to 5 minutes, since we can%0A # expect a pusher heartbeat message every 5 minutes. Adding 5 sec to%0A # account for small timing delays which may cause messages to not be%0A # received in exact 5 minute intervals.%0A self.connectionTimeout = 305%0A self.connectionTimer = Timer(self.connectionTimeout, self._connectionTimedOut)%0A%0A
@@ -2975,16 +2975,53 @@
opened%22)
+%0A self.connectionTimer.start()
%0A%0A de
@@ -3271,32 +3271,128 @@
%25s%22 %25 message)%0A%0A
+ # Stop our timeout timer, since we got some data%0A self.connectionTimer.cancel()%0A%0A
params =
@@ -4073,16 +4073,217 @@
type%22)%0A%0A
+ # We've handled our data, so restart our connection timeout handler%0A self.connectionTimer = Timer(self.connectionTimeout, self._connectionTimedOut)%0A self.connectionTimer.start()%0A%0A
def
@@ -4798,32 +4798,32 @@
on.loads(data)%0A%0A
-
self.sta
@@ -4816,28 +4816,234 @@
self.state = %22failed%22%0A
+%0A def _connectionTimedOut(self):%0A self.logger.info(%22Did not receive any data in time. Reconnecting.%22)%0A self.state = %22failed%22%0A self.needsReconnect = True%0A self.socket.close()%0A
|
5cd9499fcc0c1f9b48216aeca11a7adcd8995a47
|
Fix for MRV failing to enter enable mode
|
netmiko/mrv/mrv_ssh.py
|
netmiko/mrv/mrv_ssh.py
|
"""MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.enable()
self.set_base_prompt()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
|
Python
| 0
|
@@ -433,22 +433,31 @@
self.
-enable
+set_base_prompt
()%0A
@@ -460,39 +460,30 @@
self.
-set_base_prompt
+enable
()%0A s
@@ -608,16 +608,47 @@
factor)%0A
+ self.set_base_prompt()%0A
|
a813f7dc391834d695b513ec6acd0dede49291de
|
Add text and path to FileNotFound exception.
|
pyedgar/utilities/forms.py
|
pyedgar/utilities/forms.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Utilities for interacting with edgar forms.
"""
import os
import re
import logging
from . import plaintext
from .htmlparse import RE_HTML_TAGS, convert_html_to_text, html_ent_re_sub
from .. import exceptions as EX
__logger = logging.getLogger(__name__)
RE_DOC_TAG_OPEN = re.compile('<DOCUMENT>')
RE_DOC_TAG_CLOSE = re.compile('</DOCUMENT>')
RE_TEXT_TAG_OPEN = re.compile('<TEXT>')
RE_TEXT_TAG_CLOSE = re.compile('</TEXT>')
RE_HEADER_TAG = re.compile(r'^<(?P<key>[^/][^>]*)>(?P<value>.+)$', re.M)
def get_all_headers(text, pos=0, endpos=None):
"""
Return dictionary of all <KEY>VALUE formatted headers in EDGAR documents.
Note this requires the daily feed version of the EDGAR files.
Dictionary keys are lowercase (`.lower()` is called), and stripped.
`pos` and `endpos` can be used to get headers for specific exhibits.
"""
if endpos is None:
endpos = len(text)
return {x.group(1).lower():x.group(2).strip()
for x in RE_HEADER_TAG.finditer(text, pos, endpos) if x}
def get_header(text, header, return_match=False, pos=0, endpos=None):
"""
Searches `text` for header formatted <`header`>VALUE\\n and returns VALUE.strip()
Note this requires the daily feed version of the EDGAR files.
`pos` and `endpos` can be used to get headers for specific exhibits.
"""
re_tag = re.compile(r'^<{}>(.+)$'.format(header), re.M | re.I)
if endpos is None:
endpos = len(text)
match = re_tag.search(text, pos, endpos)
value = match.group(1).strip() if match else ''
if return_match:
return value, match
return value
def get_form_with_header(file_path, form_type=None, buff_size=(2<<16) + 8):
"""
Reads file or string, returns:
>>> {'cik', 'form_type', 'filing_date', 'text':[]}
or None on failure.
"""
if not os.path.exists(file_path):
raise EX.FileNotFound
with open(file_path, encoding='utf-8', errors='ignore',
buffering=buff_size) as fh:
text = fh.read(buff_size)
found_form = get_header(text, "TYPE")
if form_type is not None:
if not found_form or form_type.upper() != found_form.upper():
raise EX.WrongFormType
# Now find where the header stops (where first document starts)
doc_start = RE_DOC_TAG_OPEN.search(text)
# If no DOCUMENT tag found, this isn't an EDGAR form. ABORT!
if not doc_start:
raise EX.EDGARFilingFormatError
# This is what I care about now. Could be changed to `get_all_headers`
ret_dict = {'form_type': found_form.upper(),
'name': get_header(text, "CONFORMED-NAME",
endpos=doc_start.start()),
'sic': get_header(text, "ASSIGNED-SIC",
endpos=doc_start.start()),
'fye': get_header(text, "FISCAL-YEAR-END",
endpos=doc_start.start()),
'filing_date': get_header(text, "FILING-DATE",
endpos=doc_start.start()),
'filing_date_period': get_header(text, "PERIOD",
endpos=doc_start.start()),
'filing_date_change': get_header(text, "DATE-OF-FILING-DATE-CHANGE",
endpos=doc_start.start()),}
# Iteratively loop through open file buffer, reading buff_size chunks
# until </DOCUMENT> tag is found. There is a chance that the tag could
# be split across chunks, but it's a cost I'm willing to accept.
chunks = [text]
while not RE_DOC_TAG_CLOSE.search(chunks[-1]):
text = fh.read(buff_size)
if not text: # prevent infinite loop, text is null when EOF reached
break
chunks.append(text)
# Now put all those chunks together.
text = "".join(chunks)
st = RE_DOC_TAG_OPEN.search(text)
if not st:
return text
en = RE_DOC_TAG_CLOSE.search(text, st.end()) # start searching after start
if not en:
return text[st.end()]
return text[st.end():en.start()]
def get_form(file_path):
"""
Reads file at file_path and returns form between <TEXT> and </TEXT> tags.
"""
text = get_form_with_header(file_path)
if not text:
return ''
st = RE_TEXT_TAG_OPEN.search(text)
if not st:
return text
en = RE_TEXT_TAG_CLOSE.search(text, st.end())
if not en:
return text[st.end()]
return text[st.end():en.start()]
def get_plaintext(path, unwrap=True, document_width=150):
"""
Get the plaintext version of an edgar filing.
Assumes the first exhibit in the full filing text document.
If HTML, uses w3m linux program to parse into plain text.
If `unwrap`, also unwraps paragraphs so each paragraph is on one line.
:param string path: Full path to form.
:param bool unwrap: Whether to call `plaintext.unwrap_plaintext` on document.
:param int document_width: How wide the plaintext will be. Used in unwrapping.
:return: Plain text representation of file.
:rtype: string
"""
text = get_form(path)
return convert_html_to_text(text, unwrap=unwrap, document_width=document_width)
|
Python
| 0
|
@@ -1941,16 +1941,61 @@
NotFound
+(%22File %7B%7D does not exist.%22.format(file_path))
%0A%0A wi
|
d24a8db471cc9a415e3e2081e702199990bd6ac4
|
Add option to configure plot's linewidth
|
pyexperiment/utils/plot.py
|
pyexperiment/utils/plot.py
|
"""Provides setup for matplotlib figures
Written by Peter Duerr.
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
import matplotlib
from matplotlib import pyplot as plt
def setup_matplotlib(font_size=14,
label_size=14,
use_tex=True):
"""Setup basic style for matplotlib figures
"""
font_size = int(font_size)
font = {'family': 'normal',
'weight': 'normal',
'size': font_size}
# ** is elegant here
matplotlib.rc('font', **font) # pylint:disable=W0142
matplotlib.rc('text', usetex=use_tex)
label_size = int(label_size)
matplotlib.rc('xtick', labelsize=label_size)
matplotlib.rc('ytick', labelsize=label_size)
def quit_figure_on_key(key, figure=None):
"""Add handler to figure (defaults to current figure) that closes it
on a key press event.
"""
def quit_on_keypress(event):
"""Quit the figure on key press
"""
if event.key == key:
plt.close(event.canvas.figure)
if figure is None:
figure = plt.gcf()
figure.canvas.mpl_connect('key_press_event', quit_on_keypress)
def setup_figure(name):
"""Setup a figure that can be closed by pressing 'q' and saved by
pressing 's'.
"""
fig = plt.figure()
fig.canvas.set_window_title(name)
quit_figure_on_key('q', fig)
|
Python
| 0.000001
|
@@ -374,16 +374,50 @@
tex=True
+,%0A linewidth=2
):%0A %22
@@ -718,16 +718,64 @@
use_tex)
+%0A matplotlib.rc('lines', linewidth=linewidth)
%0A%0A la
|
9c40fa22c395b3d1dba800f0826606ecf314ddb2
|
test update
|
apps/pypi/tests/test_slurper.py
|
apps/pypi/tests/test_slurper.py
|
from django.template.defaultfilters import slugify
from django.test import TestCase
from package.models import Package, Version
from pypi.slurper import Slurper
TEST_PACKAGE_NAME = 'Django'
TEST_PACKAGE_VERSION = '1.2.5'
TEST_PACKAGE_REPO_NAME = 'django-uni-form'
class SlurpAllTests(TestCase):
def test_get_latest_version_number(self):
slurper = Slurper(TEST_PACKAGE_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_NAME)
self.assertEquals(version, TEST_PACKAGE_VERSION)
def test_get_or_create_package(self):
slurper = Slurper(TEST_PACKAGE_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_NAME)
package, created = slurper.get_or_create_package(TEST_PACKAGE_NAME, version)
self.assertTrue(created)
self.assertTrue(isinstance(package, Package))
self.assertEquals(package.title, TEST_PACKAGE_NAME)
self.assertEquals(package.slug, slugify(TEST_PACKAGE_NAME))
def test_get_or_create_with_repo(self):
slurper = Slurper(TEST_PACKAGE_REPO_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_REPO_NAME)
package, created = slurper.get_or_create_package(TEST_PACKAGE_REPO_NAME, version)
self.assertTrue(created)
self.assertTrue(isinstance(package, Package))
self.assertEquals(package.title, TEST_PACKAGE_REPO_NAME)
self.assertEquals(package.slug, slugify(TEST_PACKAGE_REPO_NAME))
def test_check_versions(self):
slurper = Slurper(TEST_PACKAGE_REPO_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_REPO_NAME)
# make me a package (Actually, make me a billionare)
slurper.get_or_create_package(TEST_PACKAGE_REPO_NAME, version)
# fetch the package for testing
package = Package.objects.get(title=TEST_PACKAGE_REPO_NAME)
self.assertTrue(package.pypi_downloads > 1000)
|
Python
| 0.000001
|
@@ -215,11 +215,9 @@
'1.
-2.5
+3
'%0ATE
|
fe797b35d4e9b3f623d60b42f38efe3bf9ac3705
|
test para vistas de charlas
|
apps/votos/tests/tests_views.py
|
apps/votos/tests/tests_views.py
|
# -*- coding: utf-8 -*-
import unittest
from test_plus.test import TestCase
from ..factories.user import UserFactory
class ViewsTestCase(TestCase):
def setUp(self):
self.user = UserFactory()
def test_posibles_eventos(self):
self.get('index')
self.response_200()
def test_agendados_eventos(self):
self.get('agendado')
self.response_200()
def test_finalizados_eventos(self):
self.get('finalizado')
self.response_200()
def test_get_registrar_charla(self):
self.assertLoginRequired('registrar_charla')
def test_post_registrar_charla(self):
with self.login(username=self.user.username, password='1234'):
response = self.post('registrar_charla',
data={"titulo": "charla 1",
"descripcion": "descripcion 1"})
self.response_200(response)
@unittest.expectedFailure
def test_fail_post_registrar_charla(self):
with self.login(username=self.user.username, password='1234'):
response = self.post('registrar_charla',
data={"titulo": "charla 1"})
self.response_200(response)
def test_user_name_in_index(self):
with self.login(username=self.user.username, password='1234'):
response = self.get("index")
self.assertContains(response,
'<span class="truncate">¡Hola! @{}</span>'.format(self.user.username),
status_code=200)
class ViewTemplateTestCase(TestCase):
def setUp(self):
self.user = UserFactory()
def test_index_template(self):
response = self.get("index")
self.assertTemplateUsed(response,"charla/index.html")
def test_get_registrar_charla(self):
with self.login(username=self.user.username, password='1234'):
response = self.get('registrar_charla')
self.assertTemplateUsed(response, "charla/registrar.html")
|
Python
| 0
|
@@ -113,16 +113,115 @@
Factory%0A
+from ..factories.charla import CharlaFactory%0A%0Afrom .. import constants%0Afrom ..models import Charla%0A
%0A%0Aclass
@@ -319,16 +319,13 @@
est_
-posibles
+index
_eve
@@ -1026,312 +1026,8 @@
e)%0A%0A
- @unittest.expectedFailure%0A def test_fail_post_registrar_charla(self):%0A with self.login(username=self.user.username, password='1234'):%0A response = self.post('registrar_charla',%0A data=%7B%22titulo%22: %22charla 1%22%7D)%0A self.response_200(response)%0A%0A
@@ -1369,16 +1369,1710 @@
=200)%0A%0A%0A
+class TestCharlaView(TestCase):%0A%0A def test_charlas_posibles(self):%0A CharlaFactory(estado=constants.ESTAOO_FINALIZADO)%0A qs = %5BCharlaFactory(estado=constants.ESTADO_POSIBLE)%5D%0A self.get('index')%0A charlas = self.get_context('charlas')%0A self.assertEqual(len(qs), charlas.count())%0A for i,model in enumerate(charlas):%0A self.assertEqual(qs%5Bi%5D, model)%0A%0A def test_charlas_seleccionadas(self):%0A CharlaFactory(estado=constants.ESTAOO_FINALIZADO)%0A qs = %5BCharlaFactory(estado=constants.ESTADO_AGENDADO),%0A CharlaFactory(estado=constants.ESTADO_POSIBLE)%5D%0A self.get('index')%0A charlas = self.get_context('charlas')%0A self.assertEqual(len(qs), charlas.count())%0A for i,model in enumerate(charlas):%0A self.assertEqual(qs%5Bi%5D, model)%0A%0A def test_charlas_agendadas(self):%0A CharlaFactory(estado=constants.ESTAOO_FINALIZADO)%0A CharlaFactory(estado=constants.ESTADO_POSIBLE)%0A qs = %5BCharlaFactory(estado=constants.ESTADO_AGENDADO)%5D%0A self.get('agendado')%0A charlas = self.get_context('charlas')%0A self.assertEqual(len(qs), charlas.count())%0A for i,model in enumerate(charlas):%0A self.assertEqual(qs%5Bi%5D, model)%0A%0A def test_charlas_finalizadas(self):%0A CharlaFactory(estado=constants.ESTADO_AGENDADO)%0A CharlaFactory(estado=constants.ESTADO_POSIBLE)%0A qs = %5BCharlaFactory(estado=constants.ESTAOO_FINALIZADO)%5D%0A self.get('finalizado')%0A charlas = self.get_context('charlas')%0A self.assertEqual(len(qs), charlas.count())%0A for i,model in enumerate(charlas):%0A self.assertEqual(qs%5Bi%5D, model)%0A%0A%0A
class Vi
|
fe3bb9440a46ae626c9bfd34882f3ad5823d7396
|
drop unnecessary conn assignment
|
python/libvirt-override.py
|
python/libvirt-override.py
|
#
# Manually written part of python bindings for libvirt
#
# On cygwin, the DLL is called cygvirtmod.dll
try:
import libvirtmod
except ImportError, lib_e:
try:
import cygvirtmod as libvirtmod
except ImportError, cyg_e:
if str(cyg_e).count("No module named"):
raise lib_e
import types
# The root of all libvirt errors.
class libvirtError(Exception):
def __init__(self, defmsg, conn=None, dom=None, net=None, pool=None, vol=None):
if dom is not None:
conn = dom._conn
elif net is not None:
conn = net._conn
elif pool is not None:
conn = pool._conn
elif vol is not None:
conn = vol._conn
# Never call virConnGetLastError().
# virGetLastError() is now thread local
err = virGetLastError()
if err is None:
msg = defmsg
else:
msg = err[2]
Exception.__init__(self, msg)
self.err = err
def get_error_code(self):
if self.err is None:
return None
return self.err[0]
def get_error_domain(self):
if self.err is None:
return None
return self.err[1]
def get_error_message(self):
if self.err is None:
return None
return self.err[2]
def get_error_level(self):
if self.err is None:
return None
return self.err[3]
def get_str1(self):
if self.err is None:
return None
return self.err[4]
def get_str2(self):
if self.err is None:
return None
return self.err[5]
def get_str3(self):
if self.err is None:
return None
return self.err[6]
def get_int1(self):
if self.err is None:
return None
return self.err[7]
def get_int2(self):
if self.err is None:
return None
return self.err[8]
#
# register the libvirt global error handler
#
def registerErrorHandler(f, ctx):
"""Register a Python written function to for error reporting.
The function is called back as f(ctx, error), with error
being a list of information about the error being raised.
Returns 1 in case of success."""
return libvirtmod.virRegisterErrorHandler(f,ctx)
def openAuth(uri, auth, flags):
ret = libvirtmod.virConnectOpenAuth(uri, auth, flags)
if ret is None:raise libvirtError('virConnectOpenAuth() failed')
return virConnect(_obj=ret)
#
# Return library version.
#
def getVersion (name = None):
"""If no name parameter is passed (or name is None) then the
version of the libvirt library is returned as an integer.
If a name is passed and it refers to a driver linked to the
libvirt library, then this returns a tuple of (library version,
driver version).
If the name passed refers to a non-existent driver, then you
will get the exception 'no support for hypervisor'.
Versions numbers are integers: 1000000*major + 1000*minor + release."""
if name is None:
ret = libvirtmod.virGetVersion ();
else:
ret = libvirtmod.virGetVersion (name);
if ret is None: raise libvirtError ("virGetVersion() failed")
return ret
#
# Invoke an EventHandle callback
#
def eventInvokeHandleCallback (watch, fd, event, callback, opaque):
"""
Invoke the Event Impl Handle Callback in C
"""
libvirtmod.virEventInvokeHandleCallback(watch, fd, event, callback, opaque);
#
# Invoke an EventTimeout callback
#
def eventInvokeTimeoutCallback (timer, callback, opaque):
"""
Invoke the Event Impl Timeout Callback in C
"""
libvirtmod.virEventInvokeTimeoutCallback(timer, callback, opaque);
|
Python
| 0
|
@@ -474,245 +474,8 @@
):%0A%0A
- if dom is not None:%0A conn = dom._conn%0A elif net is not None:%0A conn = net._conn%0A elif pool is not None:%0A conn = pool._conn%0A elif vol is not None:%0A conn = vol._conn%0A%0A
|
a961e11c5b3666f2504cf2a0d46028b5957cb9bf
|
Fix doctest
|
qnet/misc/testing_tools.py
|
qnet/misc/testing_tools.py
|
"""Collection of routines needed for testing. This includes proto-fixtures,
i.e. routines that should be imported and then turned into a fixture with the
pytest.fixture decorator.
See <https://pytest.org/latest/fixture.html>
"""
import os
from glob import glob
from collections import OrderedDict
from distutils import dir_util
from qnet.misc.trajectory_data import TrajectoryData
def datadir(tmpdir, request):
'''Proto-fixture responsible for searching a folder with the same name of
test module and, if available, moving all contents to a temporary directory
so tests can use them freely.
In any test, import the datadir routine and turn it into a fixture:
>>> import pytest
>>> import qnet.misc.testing
>>> datadir = pytest.fixture(qnet.misc.testing.datadir)
'''
# http://stackoverflow.com/questions/29627341/pytest-where-to-store-expected-data
filename = request.module.__file__
test_dir, _ = os.path.splitext(filename)
if os.path.isdir(test_dir):
dir_util.copy_tree(test_dir, str(tmpdir))
return str(tmpdir)
def qsd_traj(datadir, folder, seed):
"""Return a proto-fixture that returns a TrajectoryData instance based on
all the *.out file in the given folder (relative to the test datadir), and
with the given seed.
The returned function should be turned into a fixture:
>>> import pytest
>>> import qnet.misc.testing
>>> datadir = pytest.fixture(qnet.misc.testing.datadir)
>>> traj1 = pytest.fixture(qsd_traj(datadir, 'traj1', 102121))
"""
def proto_fixture(datadir):
operators = OrderedDict()
datafiles = sorted(glob(os.path.join(datadir, folder, '*.out')))
assert len(datafiles) >0, "No files *.out in %s"%folder
for file in datafiles:
op_name = os.path.splitext(os.path.split(file)[1])[0]
operators[op_name] = file
return TrajectoryData.from_qsd_data(operators, seed=seed)
import pytest # local import, so that qnet can be installed w/o pytest
return proto_fixture
|
Python
| 0.000002
|
@@ -718,32 +718,38 @@
net.misc.testing
+_tools
%0A %3E%3E%3E datadir
@@ -775,32 +775,38 @@
net.misc.testing
+_tools
.datadir)%0A ''
@@ -1419,16 +1419,75 @@
.testing
+_tools%0A %3E%3E%3E from qnet.misc.testing_tools import qsd_traj
%0A %3E%3E%3E
@@ -1529,16 +1529,22 @@
.testing
+_tools
.datadir
|
f18e26b37f06ff1d591b51351da07fca6c3c3d00
|
Enable gzip
|
rnacentral/rnacentral/settings.py
|
rnacentral/rnacentral/settings.py
|
# Django settings for rnacentral project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Europe/London'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-gb'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
'compressor.finders.CompressorFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = ''
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware', # django-debug-toolbar
'maintenancemode.middleware.MaintenanceModeMiddleware', # django-maintenance
)
ROOT_URLCONF = 'rnacentral.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'rnacentral.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.comments',
'django.contrib.humanize',
# 'south',
'haystack',
'portal',
'rest_framework',
'debug_toolbar',
'compressor',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.simple_backend.SimpleEngine',
},
}
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# django rest framework
REST_FRAMEWORK = {
# Use hyperlinked styles by default.
# Only used if the `serializer_class` attribute is not set on a view.
'DEFAULT_MODEL_SERIALIZER_CLASS':
'rest_framework.serializers.HyperlinkedModelSerializer',
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
]
}
# django-debug-toolbar
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.version.VersionDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
)
# django-maintenance
MAINTENANCE_MODE = False
try:
from local_settings import *
except ImportError, e:
pass
|
Python
| 0.000002
|
@@ -3310,24 +3310,69 @@
CLASSES = (%0A
+ 'django.middleware.gzip.GZipMiddleware',%0A
'django.
|
a6b49b92bd942655c0fe9a1c745e53ea19e070b5
|
create a new django custom tag to replace a substring in a global string
|
src/alfanous-django/wui/templatetags/custom_filters.py
|
src/alfanous-django/wui/templatetags/custom_filters.py
|
'''
Created on Dec 29, 2012
@author: assem
'''
from django.template import Library
register = Library()
@register.filter
def get_range( value ):
""" make a range from a number starting of 1 """
return range( 1, value + 1 )
@register.filter
def space_split( str ):
""" split a string counting on spaces """
return str.split()
@register.simple_tag
def build_search_link( params, query, page, filter ):
""" build a search link based on a new query
usage: {% build_search_link params query filter %}link</a>
"""
# create a mutuable params object
new_params = {}
for k,v in params.items():
new_params[k]=v
# update params
new_params["page"] = page
new_params["sortedby"] = "mushaf"
if filter == "True" and params["query"] != query:
new_params["query"] = "(" + params["query"] + ") + " + query;
else:
new_params["query"] = query;
return build_params( new_params )
def build_params(params):
""" Concatenate the params to build a url GET request """
get_request = ""
for k, v in params.items():
get_request = get_request + unicode( k ) + "=" + unicode( v ) + "&"
return get_request[:-1]
|
Python
| 0.000777
|
@@ -332,16 +332,205 @@
plit()%0A%0A
+%0A@register.simple_tag%0Adef string_replace( string, oldword, newword ):%0A %22%22%22 replace all occurrences of oldword in string by newword %22%22%22%0A return string.replace( oldword, newword )%0A%0A%0A
@registe
@@ -1135,16 +1135,17 @@
rams )%0A%0A
+%0A
def buil
|
65e412bae8fed010048815f1ab0007aae2a214af
|
refactor report format to return array of the issue format
|
mythril/analysis/report.py
|
mythril/analysis/report.py
|
"""This module provides classes that make up an issue report."""
import logging
import json
import operator
from jinja2 import PackageLoader, Environment
import _pysha3 as sha3
import hashlib
from mythril.solidity.soliditycontract import SolidityContract
from mythril.analysis.swc_data import SWC_TO_TITLE
from mythril.support.source_support import Source
log = logging.getLogger(__name__)
class Issue:
"""Representation of an issue and its location."""
def __init__(
self,
contract,
function_name,
address,
swc_id,
title,
bytecode,
gas_used=(None, None),
severity=None,
description_head="",
description_tail="",
debug="",
):
"""
:param contract:
:param function_name:
:param address:
:param swc_id:
:param title:
:param bytecode:
:param gas_used:
:param _type:
:param description:
:param debug:
"""
self.title = title
self.contract = contract
self.function = function_name
self.address = address
self.description_head = description_head
self.description_tail = description_tail
self.description = "%s\n%s" % (description_head, description_tail)
self.severity = severity
self.debug = debug
self.swc_id = swc_id
self.min_gas_used, self.max_gas_used = gas_used
self.filename = None
self.code = None
self.lineno = None
self.source_mapping = None
try:
keccak = sha3.keccak_256()
keccak.update(bytes.fromhex(bytecode))
self.bytecode_hash = "0x" + keccak.hexdigest()
except ValueError:
log.debug(
"Unable to change the bytecode to bytes. Bytecode: {}".format(bytecode)
)
self.bytecode_hash = ""
@property
def as_dict(self):
"""
:return:
"""
issue = {
"title": self.title,
"swc-id": self.swc_id,
"contract": self.contract,
"description": self.description,
"function": self.function,
"severity": self.severity,
"address": self.address,
"debug": self.debug,
"min_gas_used": self.min_gas_used,
"max_gas_used": self.max_gas_used,
"sourceMap": self.source_mapping,
}
if self.filename and self.lineno:
issue["filename"] = self.filename
issue["lineno"] = self.lineno
if self.code:
issue["code"] = self.code
return issue
def add_code_info(self, contract):
"""
:param contract:
"""
if self.address and isinstance(contract, SolidityContract):
codeinfo = contract.get_source_info(
self.address, constructor=(self.function == "constructor")
)
self.filename = codeinfo.filename
self.code = codeinfo.code
self.lineno = codeinfo.lineno
self.source_mapping = codeinfo.solc_mapping
else:
self.source_mapping = self.address
class Report:
"""A report containing the content of multiple issues."""
environment = Environment(
loader=PackageLoader("mythril.analysis"), trim_blocks=True
)
def __init__(self, verbose=False, source=None):
"""
:param verbose:
"""
self.issues = {}
self.verbose = verbose
self.solc_version = ""
self.meta = {}
self.source = source or Source()
def sorted_issues(self):
"""
:return:
"""
issue_list = [issue.as_dict for key, issue in self.issues.items()]
return sorted(issue_list, key=operator.itemgetter("address", "title"))
def append_issue(self, issue):
"""
:param issue:
"""
m = hashlib.md5()
m.update((issue.contract + str(issue.address) + issue.title).encode("utf-8"))
self.issues[m.digest()] = issue
def as_text(self):
"""
:return:
"""
name = self._file_name()
template = Report.environment.get_template("report_as_text.jinja2")
return template.render(
filename=name, issues=self.sorted_issues(), verbose=self.verbose
)
def as_json(self):
"""
:return:
"""
result = {"success": True, "error": None, "issues": self.sorted_issues()}
return json.dumps(result, sort_keys=True)
def as_swc_standard_format(self):
"""Format defined for integration and correlation.
:return:
"""
_issues = []
source_list = []
for key, issue in self.issues.items():
if issue.bytecode_hash not in source_list:
idx = len(source_list)
source_list.append(issue.bytecode_hash)
else:
idx = source_list.index(issue.bytecode_hash)
try:
title = SWC_TO_TITLE[issue.swc_id]
except KeyError:
title = "Unspecified Security Issue"
_issues.append(
{
"swcID": "SWC-" + issue.swc_id,
"swcTitle": title,
"description": {
"head": issue.description_head,
"tail": issue.description_tail,
},
"severity": issue.severity,
"locations": [{"sourceMap": "%d:1:%d" % (issue.address, idx)}],
"extra": {},
}
)
result = {
"issues": _issues,
"sourceType": "raw-bytecode",
"sourceFormat": "evm-byzantium-bytecode",
"sourceList": source_list,
"meta": {},
}
return json.dumps(result, sort_keys=True)
def as_markdown(self):
"""
:return:
"""
filename = self._file_name()
template = Report.environment.get_template("report_as_markdown.jinja2")
return template.render(
filename=filename, issues=self.sorted_issues(), verbose=self.verbose
)
def _file_name(self):
"""
:return:
"""
if len(self.issues.values()) > 0:
return list(self.issues.values())[0].filename
|
Python
| 0.000006
|
@@ -5736,16 +5736,17 @@
esult =
+%5B
%7B%0A
@@ -5930,32 +5930,33 @@
%22: %7B%7D,%0A %7D
+%5D
%0A%0A return
|
728cfe8e3c40ecd4e0128030d1d66864816626c8
|
use single pipe to avoid problems with Jenkins reading them concurrently (#552)
|
ros_buildfarm/catkin_workspace.py
|
ros_buildfarm/catkin_workspace.py
|
# Copyright 2014-2016 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import subprocess
def ensure_workspace_exists(workspace_root):
# ensure that workspace exists
assert os.path.exists(workspace_root), \
"Workspace root '%s' does not exist" % workspace_root
source_space = os.path.join(workspace_root, 'src')
assert os.path.exists(source_space), \
"Source space '%s' does not exist" % source_space
def clean_workspace(workspace_root):
# clean up build, devel and install spaces
build_space = os.path.join(workspace_root, 'build_isolated')
if os.path.exists(build_space):
shutil.rmtree(build_space)
devel_space = os.path.join(workspace_root, 'devel_isolated')
if os.path.exists(devel_space):
shutil.rmtree(devel_space)
install_space = os.path.join(workspace_root, 'install_isolated')
if os.path.exists(install_space):
shutil.rmtree(install_space)
test_results_dir = os.path.join(workspace_root, 'test_results')
if os.path.exists(test_results_dir):
shutil.rmtree(test_results_dir)
def call_catkin_make_isolated(
rosdistro_name, workspace_root, args, parent_result_spaces=None):
# command to run
script_name = 'catkin_make_isolated'
# use script from source space if available
source_space = os.path.join(workspace_root, 'src')
script_from_source = os.path.join(
source_space, 'catkin', 'bin', script_name)
if os.path.exists(script_from_source):
script_name = script_from_source
cmd = ' '.join(
['PYTHONIOENCODING=utf_8', 'PYTHONUNBUFFERED=1', script_name] + args)
# prepend setup files if available
if parent_result_spaces is None:
parent_result_spaces = ['/opt/ros/%s' % rosdistro_name]
for parent_result_space in reversed(parent_result_spaces):
setup_file = os.path.join(parent_result_space, 'setup.sh')
if os.path.exists(setup_file):
cmd = '. %s && %s' % (setup_file, cmd)
print("Invoking '%s' in '%s'" % (cmd, workspace_root))
return subprocess.call(cmd, cwd=workspace_root, shell=True)
|
Python
| 0
|
@@ -2631,16 +2631,25 @@
ss.call(
+%0A
cmd, cwd
@@ -2675,10 +2675,36 @@
ell=True
+, stderr=subprocess.STDOUT
)%0A
|
61ae89684c8eb916823d7a54ae4278df00e64459
|
Update plugin to v1.2.0 compatibility
|
flexget/plugins/metainfo/assume_quality.py
|
flexget/plugins/metainfo/assume_quality.py
|
from __future__ import unicode_literals, division, absolute_import
from flexget.plugin import register_plugin, priority, PluginError
from collections import namedtuple
import logging
import flexget.utils.qualities as qualities
log = logging.getLogger('assume_quality')
class AssumeQuality(object):
"""
Applies quality components to entries that match specified quality requirements.
When a quality is applied, any components which are unknown in the entry are filled from the applied quality.
Quality requirements are tested in order of increasing precision (ie "720p h264" is more precise than "1080p"
so gets tested first), and applied as matches are found. Using the simple configuration is the same as specifying
an "any" rule.
Examples::
assume_quality: 1080p webdl 10bit truehd
assume_quality:
hdtv: 720p
720p hdtv: 10bit
'!ac3 !mp3': flac
any: 720p h264
"""
schema = {
'oneOf': [
{'title':'simple config', 'type': 'string', 'format': 'quality'},
{'title':'advanced config', 'type': 'object', 'properties': {
'target': {'type': 'string', 'format': 'quality'},
'quality': {'type': 'string', 'format': 'quality'}
}
}
]
}
def precision(self, qualityreq):
p = 0
for component in qualityreq.components:
if component.acceptable: p += 8
if component.min: p += 4
if component.max: p += 4
if component.none_of: p += len(component.none_of)
#Still a long way from perfect, but probably good enough.
return p
def assume(self, entry, quality):
newquality = qualities.Quality()
log.debug('Current qualities: %s', entry.get('quality'))
for component in entry.get('quality').components:
qualitycomponent = getattr(quality, component.type)
log.debug('\t%s: %s vs %s', component.type, component.name, qualitycomponent.name)
if component.name != 'unknown':
log.debug('\t%s: keeping %s', component.type, component.name)
setattr(newquality, component.type, component)
elif qualitycomponent.name != 'unknown':
log.debug('\t%s: assuming %s', component.type, qualitycomponent.name)
setattr(newquality, component.type, qualitycomponent)
entry['assumed_quality'] = True
elif component.name == 'unknown' and qualitycomponent.name == 'unknown':
log.debug('\t%s: got nothing', component.type)
entry['quality'] = newquality
log.debug('Quality updated: %s', entry.get('quality'))
def on_task_start(self, task, config):
if isinstance(config, basestring): config = {'any': config}
assume = namedtuple('assume', ['target', 'quality'])
self.assumptions = []
for target, quality in config.items():
log.verbose('New assumption: %s is %s' % (target, quality))
try: target = qualities.Requirements(target)
except: raise PluginError('%s is not a valid quality. Forgetting assumption.' % target)
try: quality = qualities.get(quality)
except: raise PluginError('%s is not a valid quality. Forgetting assumption.' % quality)
self.assumptions.append(assume(target, quality))
self.assumptions.sort(key=lambda assumption: self.precision(assumption.target), reverse=True)
for assumption in self.assumptions:
log.debug('Target %s - Priority %s' % (assumption.target, self.precision(assumption.target)))
@priority(127) #run after metainfo_quality@128
def on_task_metainfo(self, task, config):
for entry in task.entries:
log.verbose('%s' % entry.get('title'))
for assumption in self.assumptions:
log.debug('Trying %s - %s' % (assumption.target, assumption.quality))
if assumption.target.allows(entry.get('quality')):
log.debug('Match: %s' % assumption.target)
self.assume(entry, assumption.quality)
log.verbose('New quality: %s', entry.get('quality'))
register_plugin(AssumeQuality, 'assume_quality', api_ver=2)
|
Python
| 0
|
@@ -64,74 +64,8 @@
ort%0A
-from flexget.plugin import register_plugin, priority, PluginError%0A
from
@@ -153,16 +153,75 @@
ualities
+%0Afrom flexget import plugin%0Afrom flexget.event import event
%0A%0Alog =
@@ -3139,32 +3139,39 @@
except: raise
+plugin.
PluginError('%25s
@@ -3304,16 +3304,23 @@
: raise
+plugin.
PluginEr
@@ -3705,16 +3705,23 @@
)%0A%0A @
+plugin.
priority
@@ -4284,16 +4284,76 @@
))%0A%0A
+@event('plugin.register')%0Adef register_plugin():%0A plugin.
register
_plu
@@ -4348,23 +4348,16 @@
register
-_plugin
(AssumeQ
|
8369d189e822fa7496864cac4ddc906bf7c05fe3
|
Convert gaphor/UML/classes/tests/test_interface.py to pytest
|
gaphor/UML/classes/tests/test_interface.py
|
gaphor/UML/classes/tests/test_interface.py
|
"""Test classes."""
from gaphor import UML
from gaphor.tests import TestCase
from gaphor.UML.classes.interface import Folded, InterfaceItem
class InterfaceTestCase(TestCase):
def test_interface_creation(self):
"""Test interface creation."""
iface = self.create(InterfaceItem, UML.Interface)
assert isinstance(iface.subject, UML.Interface)
def test_folded_interface_persistence(self):
"""Test folded interface saving/loading."""
iface = self.create(InterfaceItem, UML.Interface)
# note: assembly folded mode..
iface.folded = Folded.REQUIRED
data = self.save()
self.load(data)
interfaces = list(self.diagram.select(InterfaceItem))
assert len(interfaces) == 1
# ... gives provided folded mode on load;
# correct folded mode is determined by connections, which will be
# recreated later, i.e. required folded mode will be set when
# implementation connects to the interface and Folded.PROVIDED
# is equal to interfaces[0].folded
|
Python
| 0.999999
|
@@ -41,42 +41,8 @@
UML%0A
-from gaphor.tests import TestCase%0A
from
@@ -108,16 +108,20 @@
%0A%0Aclass
+Test
Interfac
@@ -125,26 +125,8 @@
face
-TestCase(TestCase)
:%0A
@@ -155,24 +155,30 @@
reation(self
+, case
):%0A %22
@@ -215,36 +215,36 @@
iface =
+ca
se
-lf
.create(Interfac
@@ -368,16 +368,22 @@
nce(self
+, case
):%0A
@@ -445,20 +445,20 @@
iface =
+ca
se
-lf
.create(
@@ -582,20 +582,20 @@
data =
+ca
se
-lf
.save()%0A
@@ -602,20 +602,20 @@
+ca
se
-lf
.load(da
@@ -645,20 +645,20 @@
= list(
+ca
se
-lf
.diagram
|
43cd47077b60ecb70b73883b635b9b06a7f873ac
|
add check against double redirects
|
glottolog3/scripts/check_db_consistency.py
|
glottolog3/scripts/check_db_consistency.py
|
#!/usr/bin/env python
# check_db_consistency.py - test for some application-specific db invariants
import itertools
import sqlalchemy as sa
import six
from clld.scripts.util import parsed_args
from clld.db.meta import DBSession
from glottolog3.models import Languoid, LanguoidLevel, LanguoidStatus,\
TreeClosureTable, Language
class CheckMeta(type):
__instances = []
def __init__(self, name, bases, dct):
super(CheckMeta, self).__init__(name, bases, dct)
if 'invalid_query' in dct:
self.__instances.append(self)
def __iter__(self):
return iter(self.__instances)
class Check(six.with_metaclass(CheckMeta, object)):
def __init__(self):
self.query = self.invalid_query(DBSession)
self.invalid_count = self.query.count()
self.invalid = self.query.all()
print(self)
def display(self, number=25):
ids = (i.id for i in itertools.islice(self.invalid, number))
cont = ', ...' if number < self.invalid_count else ''
print(' %s%s' % (', '.join(ids), cont))
def __str__(self):
if self.invalid_count:
msg = '%d invalid\n (violating %s)' % (self.invalid_count, self.__doc__)
else:
msg = 'OK'
return '%s: %s' % (self.__class__.__name__, msg)
class DialectFather(Check):
"""Father of a dialect is a language or dialect."""
def invalid_query(self, session):
return session.query(Languoid)\
.filter_by(active=True, level=LanguoidLevel.dialect)\
.join(Languoid.father, aliased=True)\
.filter(Languoid.level.notin_(
[LanguoidLevel.language, LanguoidLevel.dialect]))\
.order_by(Languoid.id)
class FamilyChildren(Check):
"""Family has at least one subfamily or language."""
def invalid_query(self, session):
return session.query(Languoid)\
.filter_by(active=True, level=LanguoidLevel.family)\
.filter(~Languoid.children.any(sa.and_(Languoid.active == True,
Languoid.level.in_([LanguoidLevel.family, LanguoidLevel.language]))))\
.order_by(Languoid.id)
class FamilyLanguages(Check):
"""Family has at least two languages."""
def invalid_query(self, session):
child = sa.orm.aliased(Languoid, flat=True)
return session.query(Languoid)\
.filter_by(active=True, level=LanguoidLevel.family)\
.join(TreeClosureTable, TreeClosureTable.parent_pk == Languoid.pk)\
.outerjoin(child, sa.and_(
TreeClosureTable.child_pk == child.pk,
TreeClosureTable.depth > 0,
child.level == LanguoidLevel.language))\
.group_by(Language.pk, Languoid.pk)\
.having(sa.func.count(child.pk) < 2)\
.order_by(Languoid.id)
class IsolateInactive(Check):
"""Inactive languoids lack parent and children."""
def invalid_query(self, session):
return session.query(Languoid)\
.filter_by(active=False).filter(sa.or_(
Languoid.father_pk != None,
Languoid.family_pk != None,
Languoid.child_family_count != 0,
Languoid.child_language_count != 0,
Languoid.child_dialect_count != 0,
Languoid.children.any()))\
.order_by(Languoid.id)
class GlottologName(Check):
"""Languoid has its name as Glottolog identifier."""
def invalid_query(self, session):
return session.query(Languoid)\
.filter(~Languoid.identifiers.any(name=Languoid.name,
type=u'name', description=u'Glottolog'))\
.order_by(Languoid.id)
def main(args):
for cls in Check:
check = cls()
if check.invalid:
check.display()
if __name__ == '__main__':
main(parsed_args())
|
Python
| 0
|
@@ -223,16 +223,50 @@
BSession
+%0Afrom clld.db.models import Config
%0A%0Afrom g
@@ -3752,16 +3752,578 @@
d.id)%0A%0A%0A
+class RefRedirects(Check):%0A %22%22%22Redirects of reference ids target an unredirected id.%22%22%22%0A%0A def invalid_query(self, session):%0A return session.query(%0A sa.func.regexp_replace(Config.key, u'%5CD', '', u'g').label('id'),%0A sa.func.nullif(Config.value, u'__gone__').label('target'))%5C%0A .filter(Config.key.like(u'__Source_%25%25__'))%5C%0A .filter(session.query(sa.orm.aliased(Config))%5C%0A .filter_by(key=sa.func.format(u'__Source_%25s__', Config.value)).exists())%5C%0A .order_by('id', 'target')%0A%0A%0A
def main
@@ -4329,20 +4329,16 @@
n(args):
-
%0A for
|
03734b5f42a448e20f5926dd6ffc24cc40dc004e
|
Remove unused methods
|
src/foremast/pipeline/construct_pipeline_block_datapipeline.py
|
src/foremast/pipeline/construct_pipeline_block_datapipeline.py
|
# Foremast - Pipeline Tooling
#
# Copyright 2016 Gogo, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Construct a block section of Stages in a Spinnaker Pipeline."""
import copy
import logging
from pprint import pformat
from ..utils import generate_encoded_user_data, get_template
LOG = logging.getLogger(__name__)
def construct_pipeline_block_datapipeline(env='',
generated=None,
previous_env=None,
region='us-east-1',
settings=None,
pipeline_data=None):
"""Create the Pipeline JSON from template.
This handles the common repeatable patterns in a pipeline, such as
judgement, infrastructure, tagger and qe.
Args:
env (str): Deploy environment name, e.g. dev, stage, prod.
generated (gogoutils.Generator): Gogo Application name generator.
previous_env (str): The previous deploy environment to use as
Trigger.
region (str): AWS Region to deploy to.
settings (dict): Environment settings from configurations.
Returns:
dict: Pipeline JSON template rendered with configurations.
"""
LOG.info('%s block for [%s].', env, region)
if env.startswith('prod'):
template_name = 'pipeline/pipeline_{}_datapipeline.json.j2'.format(env)
else:
template_name = 'pipeline/pipeline_stages_datapipeline.json.j2'
LOG.debug('%s info:\n%s', env, pformat(settings))
gen_app_name = generated.app_name()
data = copy.deepcopy(settings)
data['app'].update({
'appname': gen_app_name,
'repo_name': generated.repo,
'group_name': generated.project,
'environment': env,
'region': region,
'previous_env': previous_env,
'promote_restrict': pipeline_data['promote_restrict'],
'owner_email': pipeline_data['owner_email']
})
LOG.debug('Block data:\n%s', pformat(data))
pipeline_json = get_template(template_file=template_name, data=data)
return pipeline_json
|
Python
| 0.000034
|
@@ -765,36 +765,8 @@
port
- generate_encoded_user_data,
get
|
a842439edb47524b64345d3a893199f3b92f2b14
|
Fix top-level domain extraction from site name.
|
google_analytics/templatetags/analytics.py
|
google_analytics/templatetags/analytics.py
|
from django import template
from django.db import models
from django.contrib.sites.models import Site
from django.template import Context, loader
register = template.Library()
Analytics = models.get_model('google_analytics', 'analytic')
def do_get_analytics(parser, token):
try:
# split_contents() knows not to split quoted strings.
tag_name, code = token.split_contents()
except ValueError:
code = None
if not code:
current_site = Site.objects.get_current()
else:
if not (code[0] == code[-1] and code[0] in ('"', "'")):
raise template.TemplateSyntaxError, "%r tag's argument should be in quotes" % tag_name
code = code[1:-1]
current_site = None
return AnalyticsNode(current_site, code)
class AnalyticsNode(template.Node):
def __init__(self, site=None, code=None):
self.site = site
self.code = code
def render(self, context):
content = ''
if self.site:
code_set = Analytics.objects.filter(site=self.site)
if code_set:
code = code_set[0].analytics_code
else:
return ''
elif self.code:
code = self.code
else:
return ''
if code.strip() != '':
t = loader.get_template('google_analytics/analytics_template.html')
c = Context({
'analytics_code': code,
'domain': '.'.join(self.site.domain.split('.')[2:])
})
return t.render(c)
else:
return ''
register.tag('analytics', do_get_analytics)
|
Python
| 0
|
@@ -1494,16 +1494,17 @@
it('.')%5B
+-
2:%5D)%0A
|
0d2f35ddc27cf4c7155a4d1648c0bbfe0ff3a528
|
Fix the bool name in the array API namespace
|
numpy/_array_api/dtypes.py
|
numpy/_array_api/dtypes.py
|
from .. import int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64, bool
__all__ = ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'bool']
|
Python
| 0.99996
|
@@ -82,17 +82,69 @@
float64
-,
+%0A# Note: This name is changed%0Afrom .. import bool_ as
bool%0A%0A_
|
ff611c4a41cdbaf9b3306650a3d1bc4177b23bad
|
Update sst.py
|
banpei/sst.py
|
banpei/sst.py
|
import numpy as np
from banpei.base.model import Model
class SST(Model):
def __init__(self):
pass
def _extract_matrix(self, data, start, end, w):
row = w
column = end - start + 1
matrix = np.empty((row, column))
i = 0
for t in range(start, end+1):
matrix[:, i] = data[t-1:t-1+row]
i += 1
return matrix
def detect(self, data, w, m=2, k=None, L=None):
"""
Parameters
----------
data : array_like
Input array or object that can be converted to an array.
w : int
Window size
m : int
Number of basis vectors
k : int
Number of columns for the trajectory and test matrices
L : int
Lag time
Returns
-------
Numpy array contains the degree of change.
"""
# Set variables
data = self.convert_to_nparray(data)
if k is None:
k = w // 2
if L is None:
L = k // 2
T = len(data)
# Calculation range
start_cal = k + w
end_cal = T - L + 1
# Calculate the degree of change
change_scores = np.zeros(len(data))
for t in range(start_cal, end_cal + 1):
# Trajectory matrix
start_tra = t - w - k + 1
end_tra = t - w
tra_matrix = self._extract_matrix(data, start_tra, end_tra, w)
# Test matrix
start_test = start_tra + L
end_test = end_tra + L
test_matrix = self._extract_matrix(data, start_test, end_test, w)
# Singular value decomposition(SVD)
U_tra, _, _ = np.linalg.svd(tra_matrix, full_matrices=False)
U_test, _, _ = np.linalg.svd(test_matrix, full_matrices=False)
U_tra_m = U_tra[:, :m]
U_test_m = U_test[:, :m]
_, s, _ = np.linalg.svd(np.dot(U_tra_m.T, U_test_m), full_matrices=False)
change_scores[t] = 1 - s[0] ** 2
return change_scores
|
Python
| 0.000001
|
@@ -110,290 +110,8 @@
ss%0A%0A
- def _extract_matrix(self, data, start, end, w):%0A row = w%0A column = end - start + 1%0A matrix = np.empty((row, column))%0A i = 0%0A for t in range(start, end+1):%0A matrix%5B:, i%5D = data%5Bt-1:t-1+row%5D%0A i += 1%0A%0A return matrix%0A%0A
@@ -1675,15 +1675,9 @@
-_, s, _
+s
= n
@@ -1729,32 +1729,50 @@
l_matrices=False
+, compute_uv=False
)%0A ch
@@ -1800,13 +1800,8 @@
s%5B0%5D
- ** 2
%0A%0A
@@ -1827,8 +1827,290 @@
_scores%0A
+%0A def _extract_matrix(self, data, start, end, w):%0A row = w%0A column = end - start + 1%0A matrix = np.empty((row, column))%0A i = 0%0A for t in range(start, end+1):%0A matrix%5B:, i%5D = data%5Bt-1:t-1+row%5D%0A i += 1%0A%0A return matrix%0A
|
ff268941bfc588e21a2f460c034e3c0a99837d23
|
Fix migration order (post-rebase)
|
migrations/versions/201502111317_233928da84b2_create_video_conference_rooms.py
|
migrations/versions/201502111317_233928da84b2_create_video_conference_rooms.py
|
"""Create video conference rooms
Revision ID: 233928da84b2
Revises: 50c2b5ee2726
Create Date: 2015-02-11 13:17:44.365589
"""
import sqlalchemy as sa
from alembic import op
from indico.core.db.sqlalchemy import PyIntEnum
from indico.core.db.sqlalchemy import UTCDateTime
from indico.modules.vc.models.vc_rooms import VCRoomLinkType, VCRoomStatus
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '233928da84b2'
down_revision = '50c2b5ee2726'
def upgrade():
op.create_table('vc_rooms',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('type', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('status', PyIntEnum(VCRoomStatus), nullable=False),
sa.Column('created_by_id', sa.Integer(), nullable=False, index=True),
sa.Column('created_dt', UTCDateTime, nullable=False),
sa.Column('modified_dt', UTCDateTime, nullable=True),
sa.Column('data', postgresql.JSON(), nullable=False),
sa.PrimaryKeyConstraint('id'),
schema='events')
op.create_table('vc_room_events',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('event_id', sa.Integer(), autoincrement=False, nullable=False, index=True),
sa.Column('vc_room_id', sa.Integer(), nullable=False, index=True),
sa.Column('link_type', PyIntEnum(VCRoomLinkType), nullable=False),
sa.Column('link_id', sa.String(), nullable=True),
sa.Column('show', sa.Boolean(), nullable=False),
sa.Column('data', postgresql.JSON(), nullable=False),
sa.ForeignKeyConstraint(['vc_room_id'], ['events.vc_rooms.id']),
sa.PrimaryKeyConstraint('id'),
schema='events')
def downgrade():
op.drop_table('vc_room_events', schema='events')
op.drop_table('vc_rooms', schema='events')
|
Python
| 0
|
@@ -470,27 +470,27 @@
ion = '5
-0c2b5ee2726
+583f647dff5
'%0A%0A%0Adef
|
d7cf4b8becbb0176872f0fc3feceaa097147ab9c
|
Update poly_dynamo.py
|
grassroot-nlu/databases/poly_dynamo.py
|
grassroot-nlu/databases/poly_dynamo.py
|
from __future__ import print_function
import boto3
import json
import decimal
from boto3.dynamodb.conditions import Key, Attr
from botocore.exceptions import ClientError
class DynamoDB(object):
def db_find(self, table):
x = []
for i in table.find():
x.append(i)
return x
def db_find_one(self, table, key_val):
val = key_val['uid']
return ddb_find(table, val)
def db_insert_one(self, table, doc):
create_new(table, doc)
def load_old_Text(self, key_val):
x = ddb_find('entries',key_val['uid'])[0]['payload']
y = x.replace("'",'"')
x = json.loads(y)
old_text = x['past_lives'][0]
return old_text
def find_previous_Entry(self, key_val):
return ddb_find('entries',key_val['uid'])[0]
def find_clean_save(self, key_val):
clean_and_save(key_val['uid'])
def update_DB(self, doc):
update('entries', doc['uid'], doc['parsed']['text'], str(doc))
e = ddb_find('entries', doc['uid'])
return e
def check_db(self, text):
table = dynamodb.Table('entries')
entries = table.scan()['Items']
for entry in entries:
if entry['text'] == text:
payload = entry['payload']
x = payload.replace("'",'"')
entry = json.loads(x)
if entry['parsed']['intent']['confidence'] > 0.6:
return entry
return False
#Helper class to convert a DynamoDB item to json
class DecimalEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
if o % 1 > 0:
return float(o)
else:
return int(o)
return super(DecimalEncoder, self).default(o)
dynamodb = boto3.resource('dynamodb', region_name='us-west-2', endpoint_url="http://localhost:8000")
def create_new(t_name, doc):
table = dynamodb.Table(t_name)
#if t_name == 'entries':
response = table.put_item(
Item={
'uid': doc['_id'],
'text': doc['text'],
'date': doc['date'],
'past_lives': doc['past_lives']
}
)
x = list(doc)
if 'payload' in x:
update(t_name, doc['_id'],doc['text'],doc['payload'])
print("PutItem succeeded")
return json.dumps(response, indent=4, cls=DecimalEncoder)
def get(t_name, uid):
table = dynamodb.Table(t_name)
try:
response = table.get_item(
Key={
'year': year,
'title': title
}
)
except ClientError as e:
print(e.response['Error']['Message'])
else:
item = response['Item']
print("GetItem succeeded:")
x = json.dumps(item, indent=4,cls=DecimalEncoder)
the_dict = json.loads(x)
return the_dict
def update(t_name, uid, text, p):
table = dynamodb.Table(t_name)
response = table.update_item(
Key={
'uid': uid,
'text': text
},
UpdateExpression="set payload = :p",
ExpressionAttributeValues={
':p': p
},
ReturnValues="UPDATED_NEW"
)
print("UpdateItem succeeded:")
print(json.dumps(response, indent=4, cls=DecimalEncoder))
def create_table(name):
table = dynamodb.create_table(
TableName=name,
KeySchema=[
{
'AttributeName': 'uid',
'KeyType': 'HASH' #Partition key
},
{
'AttributeName': 'text',
'KeyType': 'RANGE' #Sort key
}
],
AttributeDefinitions=[
{
'AttributeName': 'uid',
'AttributeType': 'S'
},
{
'AttributeName': 'text',
'AttributeType': 'S'
},
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
print("Table status:", table.table_status)
def ddb_find(t_name, value):
table = dynamodb.Table(t_name)
response = table.query(
KeyConditionExpression=Key('uid').eq(value)
)
return response['Items']
def delete_table(table_name):
table = dynamodb.Table(table_name)
table.delete()
def clean_and_save(uid):
dirty = ddb_find('entries',uid)[0]['payload']
y = dirty.replace("'",'"')
dirty = json.loads(y)
cleansed = dirty['parsed']
try:
cleansed['intent'] = cleansed['intent']['name']
except:
pass
if cleansed['entities'] != []:
leng = len(cleansed['entities'])
for i in range(0,leng):
if cleansed['entities'][i]['entity'] != 'date_time':
try:
item = cleansed['entities'][i]
item.pop('extractor')
item.pop('processors')
except:
pass
else:
return
cleansed = {'_id': uid,
'text': 'runtime_training_data',
'date': str(datetime.datetime.now()),
'past_lives': [],
'payload': cleansed}
create_new('runtime_training_data', cleansed)
|
Python
| 0.000001
|
@@ -442,23 +442,16 @@
ne(self,
- table,
doc):%0A
@@ -468,21 +468,25 @@
ate_new(
-table
+'entries'
, doc)%0A
@@ -5295,8 +5295,9 @@
leansed)
+%0A
|
63c3a302d32ca85148703642d618a4f8c5f50f83
|
update debug settings
|
PhotoManagementSystem/PhotoManagementSystem/settings.py
|
PhotoManagementSystem/PhotoManagementSystem/settings.py
|
"""
Django settings for PhotoManagementSystem project.
Generated by 'django-admin startproject' using Django 1.8.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$6#w&i!ng7e34urp0*g2l^1@@y=!bph3!4*#i$o-#fqi^d=$up'
# SECURITY WARNING: don't run with debug turned on in production!
if 'SERVER_SOFTWARE' in os.environ:
DEBUG = False
else:
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'PhotoManager',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'PhotoManagementSystem.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
'./PhotoManager/templates',
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'PhotoManagementSystem.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
# SQLite
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# MySQL
'''
if 'SERVER_SOFTWARE' in os.environ:
from sae.const import (
MYSQL_HOST, MYSQL_PORT, MYSQL_USER, MYSQL_PASS, MYSQL_DB
)
else:
# Make `python manage.py syncdb` works happy!
MYSQL_HOST = 'localhost'
MYSQL_PORT = '3306'
MYSQL_USER = 'root'
MYSQL_PASS = 'root'
MYSQL_DB = 'pmsys'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': MYSQL_DB,
'USER': MYSQL_USER,
'PASSWORD': MYSQL_PASS,
'HOST': MYSQL_HOST,
'PORT': MYSQL_PORT,
}
}
'''
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'UTC'
USE_I18N = False
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static').replace('\\', '/')
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static_dev"),
)
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
|
Python
| 0.000001
|
@@ -902,16 +902,59 @@
OSTS = %5B
+%0A 'localhost',%0A 'pmsys.sinaapp.com',%0A
%5D%0A%0A%0A# Ap
|
071a2840fa6d7cbdb679c9564260ccc1dbe54559
|
Handle the case where there can be a blank line between a decorator and the function definition.
|
djangae/contrib/security/commands_utils.py
|
djangae/contrib/security/commands_utils.py
|
import re, inspect
from django.core.exceptions import ViewDoesNotExist
from django.core.urlresolvers import RegexURLPattern, RegexURLResolver
def extract_views_from_urlpatterns(urlpatterns, base='', namespace=None, ignored_modules=None):
"""
Return a list of views from a list of urlpatterns.
Each object in the returned list is a tuple: (view_func, regex, name)
"""
ignored_modules = ignored_modules if ignored_modules else []
views = []
for p in urlpatterns:
if isinstance(p, RegexURLPattern):
# Handle correct single URL patterns
try:
if namespace:
name = '{0}:{1}'.format(namespace, p.name)
else:
name = p.name
if hasattr(p.callback, '__module__'):
if p.callback.__module__.split('.')[0] not in ignored_modules:
views.append((p.callback, base + p.regex.pattern, name))
else:
views.append((p.callback, base + p.regex.pattern, name))
except ViewDoesNotExist:
continue
elif isinstance(p, RegexURLResolver):
# Handle include() definitions
try:
patterns = p.url_patterns
except ImportError:
continue
views.extend(extract_views_from_urlpatterns(patterns, base + p.regex.pattern,
namespace=(namespace or p.namespace), ignored_modules=ignored_modules))
elif hasattr(p, '_get_callback'):
# Handle string like 'foo.views.view_name' or just function view
try:
views.append((p._get_callback(), base + p.regex.pattern, p.name))
except ViewDoesNotExist:
continue
elif hasattr(p, 'url_patterns') or hasattr(p, '_get_url_patterns'):
# Handle url_patterns objects
try:
patterns = p.url_patterns
except ImportError:
continue
views.extend(extract_views_from_urlpatterns(patterns, base + p.regex.pattern,
namespace=namespace, ignored_modules=ignored_modules))
else:
raise TypeError("%s does not appear to be a urlpattern object" % p)
return views
def display_as_table(views):
"""
Get list of views from dumpurls security management command
and returns them in the form of table to print in command line
"""
views = [row.split('||', 3) for row in sorted(views)]
widths = [len(max(columns, key=len)) for columns in zip(*views)]
widths = [width if width < 100 else 100 for width in widths]
table_views = []
header = ('URL', 'Handler path', 'Decorators & Mixins')
table_views.append(
' | '.join('{0:<{1}}'.format(title, width) for width, title in zip(widths, header))
)
table_views.append('-+-'.join('-' * width for width in widths))
for row in views:
if len(row[2]) > 100:
row[2] = row[2].split(',')
row[2] = [",".join(row[2][i:i+2]) for i in range(0, len(row[2]), 2)]
mixins = row[2]
if type(mixins) == list:
i = 0
for line in mixins:
row[2] = line.strip()
if i > 0:
row[0] = ''
row[1] = ''
table_views.append(
' | '.join('{0:<{1}}'.format(cdata, width) for width, cdata in zip(widths, row))
)
i += 1
else:
table_views.append(
' | '.join('{0:<{1}}'.format(cdata, width) for width, cdata in zip(widths, row))
)
return "\n".join([v for v in table_views]) + "\n"
def get_func_name(func):
if hasattr(func, 'func_name'):
return func.func_name
elif hasattr(func, '__name__'):
return func.__name__
elif hasattr(func, '__class__'):
return '%s()' % func.__class__.__name__
else:
return re.sub(r' at 0x[0-9a-f]+', '', repr(func))
def get_decorators(func):
"""
Get function or class and return names of applied decorators
"""
decorators = []
if hasattr(func, '__module__'):
mod = inspect.getmodule(func)
source_code = inspect.getsourcelines(mod)[0]
func_name = get_func_name(func)
i = 0
func_def = 'def {}'.format(func_name)
class_def = 'class {}'.format(func_name)
for line in source_code:
if line.startswith(func_def) or line.startswith(class_def):
j = 1
k = source_code[i-j]
while k.startswith('@'):
decorators.append(k.strip().split('(')[0])
j += 1
k = source_code[i-j]
i += 1
return decorators
def get_mixins(func, ignored_modules=None):
"""
Get class and return names and paths to applied mixins
Has an optional argument for names of modules that should be ignored
"""
ignored_modules = ignored_modules if ignored_modules else []
mixins = []
if hasattr(func, 'cls'):
for klass in func.cls.mro():
if klass != func.cls and klass.__module__.split('.')[0] not in ignored_modules:
mixins.append("{}.{}".format(klass.__module__, get_func_name(klass)))
return mixins
|
Python
| 0
|
@@ -4649,122 +4649,495 @@
-while k.startswith('@'):%0A decorators.append(k.strip().split('(')%5B0%5D)%0A j += 1
+# decorators can be defined on the previous line(s), but can have blank lines before them%0A # blank lines are '%5Cn', hence we strip() them to see if they're empty%0A while k.startswith('@') or not k.strip():%0A if k.startswith('@'):%0A decorators.append(k.strip().split('(')%5B0%5D)%0A j += 1%0A if j %3E= i: # don't wrap around when we get to the start of the file%0A break
%0A
|
bf13f5403dc505ec93b44e2861dc6ce53ffde9b7
|
Update rv_transformation_tests.py
|
GPy/testing/rv_transformation_tests.py
|
GPy/testing/rv_transformation_tests.py
|
# Written by Ilias Bilionis
"""
Test if hyperparameters in models are properly transformed.
"""
import unittest
import numpy as np
import scipy.stats as st
import GPy
class TestModel(GPy.core.Model):
"""
A simple GPy model with one parameter.
"""
def __init__(self, theta=1.):
GPy.core.Model.__init__(self, 'test_model')
theta = GPy.core.Param('theta', theta)
self.link_parameter(theta)
def log_likelihood(self):
return 0.
class RVTransformationTestCase(unittest.TestCase):
def _test_trans(self, trans):
m = TestModel()
prior = GPy.priors.LogGaussian(.5, 0.1)
m.theta.set_prior(prior)
m.theta.unconstrain()
m.theta.constrain(trans)
# The PDF of the transformed variables
p_phi = lambda phi : np.exp(-m._objective_grads(phi)[0])
# To the empirical PDF of:
theta_s = prior.rvs(1e5)
phi_s = trans.finv(theta_s)
# which is essentially a kernel density estimation
kde = st.gaussian_kde(phi_s)
# We will compare the PDF here:
phi = np.linspace(phi_s.min(), phi_s.max(), 100)
# The transformed PDF of phi should be this:
pdf_phi = np.array([p_phi(p) for p in phi])
# UNCOMMENT TO SEE GRAPHICAL COMPARISON
#import matplotlib.pyplot as plt
#fig, ax = plt.subplots()
#ax.hist(phi_s, normed=True, bins=100, alpha=0.25, label='Histogram')
#ax.plot(phi, kde(phi), '--', linewidth=2, label='Kernel Density Estimation')
#ax.plot(phi, pdf_phi, ':', linewidth=2, label='Transformed PDF')
#ax.set_xlabel(r'transformed $\theta$', fontsize=16)
#ax.set_ylabel('PDF', fontsize=16)
#plt.legend(loc='best')
#plt.show(block=True)
# END OF PLOT
# The following test cannot be very accurate
self.assertTrue(np.linalg.norm(pdf_phi - kde(phi)) / np.linalg.norm(kde(phi)) <= 1e-1)
def _test_grad(self, trans):
m = TestModel(np.random.uniform(.5, 1.5, 20))
prior = GPy.priors.LogGaussian(.5, 0.1)
m.theta.set_prior(prior)
m.theta.constrain(trans)
m.randomize()
self.assertTrue(m.checkgrad(1))
def test_Logexp(self):
self._test_trans(GPy.constraints.Logexp())
self._test_grad(GPy.constraints.Logexp())
def test_Exponent(self):
self._test_trans(GPy.constraints.Exponent())
self._test_grad(GPy.constraints.Exponent())
if __name__ == '__main__':
unittest.main()
quit()
m = TestModel()
prior = GPy.priors.LogGaussian(0., .9)
m.theta.set_prior(prior)
# The following should return the PDF in terms of the transformed quantities
p_phi = lambda phi : np.exp(-m._objective_grads(phi)[0])
# Let's look at the transformation phi = log(exp(theta - 1))
trans = GPy.constraints.Exponent()
m.theta.constrain(trans)
# Plot the transformed probability density
phi = np.linspace(-8, 8, 100)
fig, ax = plt.subplots()
# Let's draw some samples of theta and transform them so that we see
# which one is right
theta_s = prior.rvs(10000)
# Transform it to the new variables
phi_s = trans.finv(theta_s)
# And draw their histogram
ax.hist(phi_s, normed=True, bins=100, alpha=0.25, label='Empirical')
# This is to be compared to the PDF of the model expressed in terms of these new
# variables
ax.plot(phi, [p_phi(p) for p in phi], label='Transformed PDF', linewidth=2)
ax.set_xlim(-3, 10)
ax.set_xlabel(r'transformed $\theta$', fontsize=16)
ax.set_ylabel('PDF', fontsize=16)
plt.legend(loc='best')
# Now let's test the gradients
m.checkgrad(verbose=True)
# And show the plot
plt.show(block=True)
|
Python
| 0.000002
|
@@ -2164,16 +2164,33 @@
omize()%0A
+ print(m)%0A
|
8d40cd3dab606d558806fa00b0ed5df73c457045
|
Fix for issue #2.
|
bgui/frame.py
|
bgui/frame.py
|
from .gl_utils import *
from .widget import Widget, BGUI_DEFAULT
class Frame(Widget):
"""Frame for storing other widgets"""
theme_section = 'Frame'
theme_options = {
'Color1': (0, 0, 0, 0),
'Color2': (0, 0, 0, 0),
'Color3': (0, 0, 0, 0),
'Color4': (0, 0, 0, 0),
'BorderSize': 0,
'BorderColor': (0, 0, 0, 1),
}
def __init__(self, parent, name=None, border=None, aspect=None, size=[1, 1], pos=[0, 0],
sub_theme='', options=BGUI_DEFAULT):
"""
:param parent: the widget's parent
:param name: the name of the widget
:param border: the size of the border around the frame (0 for no border)
:param aspect: constrain the widget size to a specified aspect ratio
:param size: a tuple containing the width and height
:param pos: a tuple containing the x and y position
:param sub_theme: name of a sub_theme defined in the theme file (similar to CSS classes)
:param options: various other options
"""
Widget.__init__(self, parent, name, aspect, size, pos, sub_theme, options)
#: The colors for the four corners of the frame.
self.colors = [
self.theme['Color1'],
self.theme['Color2'],
self.theme['Color3'],
self.theme['Color4']
]
#: The color of the border around the frame.
self.border_color = self.theme['BorderColor']
#: The size of the border around the frame.
self.border = border if border else self.theme['BorderSize']
def _draw(self):
"""Draw the frame"""
# Enable alpha blending
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
# Enable polygon offset
glEnable(GL_POLYGON_OFFSET_FILL)
glPolygonOffset(1.0, 1.0)
glBegin(GL_QUADS)
for i in range(4):
glColor4f(self.colors[i][0], self.colors[i][1], self.colors[i][2], self.colors[i][3])
glVertex2f(self.gl_position[i][0], self.gl_position[i][1])
glEnd()
glDisable(GL_POLYGON_OFFSET_FILL)
# Draw an outline
if self.border > 0:
# border = self.border/2
r, g, b, a = self.border_color
glColor4f(r, g, b, a)
glPolygonMode(GL_FRONT, GL_LINE)
glLineWidth(self.border)
glBegin(GL_QUADS)
for i in range(4):
glVertex2f(self.gl_position[i][0], self.gl_position[i][1])
glEnd()
glLineWidth(1.0)
glPolygonMode(GL_FRONT, GL_FILL)
Widget._draw(self)
|
Python
| 0
|
@@ -1336,32 +1336,58 @@
ound the frame.%0A
+%09%09if border is not None:%0A%09
%09%09self.border =
@@ -1396,23 +1396,33 @@
rder
- if
+%0A%09%09else:%0A%09%09%09self.
border
-else
+=
sel
|
7a0560d8bd9dcb421b54522df92618d439941e69
|
Change bill detail page to use session and identifier
|
bills/urls.py
|
bills/urls.py
|
from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic),
url(r'^by_location', views.bill_list_by_location),
url(r'^latest_activity/', views.latest_bill_activity),
url(r'^latest/', views.latest_bill_actions),
url(r'^detail/(?P<bill_id>(.*))/$', views.bill_detail, name='bill_detail'),
]
|
Python
| 0
|
@@ -308,10 +308,41 @@
ill_
-id
+session%3E(.*))/(?P%3Cbill_identifier
%3E(.*
|
14e2d2282b7c95a1bb6d475faa6d827d90609e16
|
Define PostAdmin list_display.
|
blog/admin.py
|
blog/admin.py
|
from django.contrib import admin
from .models import Post
admin.site.register(Post)
|
Python
| 0
|
@@ -57,19 +57,16 @@
st%0A%0A
+%0A@
admin.
-site.
regi
@@ -76,8 +76,84 @@
r(Post)%0A
+class PostAdmin(admin.ModelAdmin):%0A list_display = ('title', 'pub_date')%0A
|
0d389018353f03d79332a1b40d6dc1881df91cd0
|
Fix sorting of items in RSS feed
|
blog/views.py
|
blog/views.py
|
from django.contrib.syndication.views import Feed
from django.utils.feedgenerator import Atom1Feed
from .models import BlogIndexPage, BlogPage, BlogCategory
from django.shortcuts import get_object_or_404
from django.conf import settings
def tag_view(request, tag):
index = BlogIndexPage.objects.first()
return index.serve(request, tag=tag)
def category_view(request, category):
index = BlogIndexPage.objects.first()
return index.serve(request, category=category)
def author_view(request, author):
index = BlogIndexPage.objects.first()
return index.serve(request, author=author)
class LatestEntriesFeed(Feed):
'''
If a URL ends with "rss" try to find a matching BlogIndexPage
and return its items.
'''
def get_object(self, request, blog_slug):
return get_object_or_404(BlogIndexPage, slug=blog_slug)
def title(self, blog):
if blog.seo_title:
return blog.seo_title
return blog.title
def link(self, blog):
return blog.full_url
def description(self, blog):
return blog.search_description
def items(self, blog):
num = getattr(settings, 'BLOG_PAGINATION_PER_PAGE', 10)
return blog.get_descendants()[:num]
def item_title(self, item):
return item.title
def item_description(self, item):
return item.specific.body
def item_link(self, item):
return item.full_url
class LatestEntriesFeedAtom(LatestEntriesFeed):
feed_type = Atom1Feed
class LatestCategoryFeed(Feed):
description = "A Blog"
def title(self, category):
return "Blog: " + category.name
def link(self, category):
return "/blog/category/" + category.slug
def get_object(self, request, category):
return get_object_or_404(BlogCategory, slug=category)
def items(self, obj):
return BlogPage.objects.filter(
categories__category=obj).order_by('-date')[:5]
def item_title(self, item):
return item.title
def item_description(self, item):
return item.body
|
Python
| 0
|
@@ -1226,16 +1226,48 @@
ndants()
+.order_by('-first_published_at')
%5B:num%5D%0A%0A
|
3e2cbd52a916b767473335427702ecf3bae5a51d
|
create dir if image_filter not exist
|
CapturePictures.py
|
CapturePictures.py
|
#!/usr/bin/env python
import cv2
import sys
import argparse
def capturePicturesByCamera(num = 300, saveDir = "./image_filter/"):
"""
Capture pictures with faces detected.
Args:
num (int): The number of pictures to capture. Default: 300.
saveDir (str): The directory to save the captured pictures. Default: "./image_filter/". Note: Please make sure the directory has been created.
Returns:
void
Todo:
* Handling of file path construction.
* Disable logging of cv2.
"""
face_cascade = cv2.CascadeClassifier('opencv_config/haarcascade_frontalface_default.xml')
cap = cv2.VideoCapture(0)
count = 1
while True:
# Capture frame-by-frame
ret, frame = cap.read()
# Detect faces in the gray frame
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray_frame, 1.3, 5)
# The frame will be saved when the faces are detected
if len(faces) > 0:
# Save frame as JPEG file
frame_file_path = saveDir + ("frame%d.jpg" % count)
cv2.imwrite(frame_file_path, frame)
print("%d picture(s) captured & saved!" % count)
count += 1
# Draw rectangles which point out the faces
for (x, y, w, h) in faces:
cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
# Display the captured frame
cv2.imshow('Camera', frame)
# Wait for 'q' on the Camera window to quit before entire capturing job finished
if cv2.waitKey(1) & 0xFF == ord('q'):
cv2.destroyAllWindows()
break
if count > num:
cv2.destroyAllWindows()
break
cap.release()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Capture pictures with faces detected.')
parser.add_argument('-n', type=int, help='the number of pictures to capture. Default: 300')
parser.add_argument('-d', type=str, help='the directory to save the captured pictures. Default: "./image_filter/". Note: Please make sure the directory has been created')
parser.set_defaults(n = 300, d = "./image_filter/")
args = parser.parse_args()
# Start the capturing
capturePicturesByCamera(args.n, args.d)
|
Python
| 0.000008
|
@@ -38,10 +38,9 @@
ort
-sy
+o
s%0Aim
@@ -2239,24 +2239,112 @@
rse_args()%0A%0A
+ if not os.path.exists(%22./image_filter%22):%0A os.makedirs('image_filter/after')%0A%0A
# Start
|
d9f623baaa8e1d1075f9132108ed7bb11eea39b0
|
Replace dask.get from core.get to async.get_sync
|
dask/__init__.py
|
dask/__init__.py
|
from __future__ import absolute_import, division, print_function
from .core import istask, get
from .context import set_options
try:
from .imperative import do, value
except ImportError:
pass
__version__ = '0.7.3'
|
Python
| 0
|
@@ -87,13 +87,8 @@
task
-, get
%0Afro
@@ -117,16 +117,50 @@
options%0A
+from .async import get_sync as get
%0Atry:%0A
|
de4f02fff4b23a442abe3062c2da4c52d8823627
|
Fix spurious deprecation warning for fatal_warnings (#6237)
|
src/python/pants/backend/jvm/subsystems/zinc_language_mixin.py
|
src/python/pants/backend/jvm/subsystems/zinc_language_mixin.py
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
from builtins import object
from pants.base.deprecated import deprecated
class ZincLanguageMixin(object):
"""A mixin for subsystems for languages compiled with Zinc."""
@classmethod
def register_options(cls, register):
super(ZincLanguageMixin, cls).register_options(register)
# NB: This option is fingerprinted because the default value is not included in a target's
# fingerprint. This also has the effect of invalidating only the relevant tasks: ZincCompile
# in this case.
register('--strict-deps', advanced=True, default=False, fingerprint=True, type=bool,
help='The default for the "strict_deps" argument for targets of this language.')
register('--fatal-warnings', advanced=True, type=bool,
fingerprint=True,
removal_version='1.11.0.dev0',
removal_hint='Use --compiler-option-sets=fatal_warnings instead of fatal_warnings',
help='The default for the "fatal_warnings" argument for targets of this language.')
register('--compiler-option-sets', advanced=True, default=[], type=list,
fingerprint=True,
help='The default for the "compiler_option_sets" argument '
'for targets of this language.')
register('--zinc-file-manager', advanced=True, default=True, type=bool,
fingerprint=True,
help='Use zinc provided file manager to ensure transactional rollback.')
@property
def strict_deps(self):
"""When True, limits compile time deps to those that are directly declared by a target.
:rtype: bool
"""
return self.get_options().strict_deps
@property
@deprecated('1.11.0.dev0', 'Consume fatal_warnings from compiler_option_sets instead.')
def fatal_warnings(self):
"""If true, make warnings fatal for targets that do not specify fatal_warnings.
:rtype: bool
"""
return self.get_options().fatal_warnings
@property
def compiler_option_sets(self):
"""For every element in this list, enable the corresponding flags on compilation
of targets.
:rtype: list
"""
option_sets = self.get_options().compiler_option_sets
if 'fatal_warnings' not in option_sets and self.fatal_warnings:
option_sets.append('fatal_warnings')
return option_sets
@property
def zinc_file_manager(self):
"""If false, the default file manager will be used instead of the zinc provided one.
:rtype: bool
"""
return self.get_options().zinc_file_manager
|
Python
| 0
|
@@ -2430,16 +2430,30 @@
nd self.
+get_options().
fatal_wa
|
93b752a251b43c268a6becb53ab298e958a46aeb
|
add Category Field in template
|
awesomepose/posts/forms/post.py
|
awesomepose/posts/forms/post.py
|
from django import forms
from django_summernote.widgets import SummernoteWidget, SummernoteInplaceWidget
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit, Layout, Field, Fieldset, Button
from crispy_forms.bootstrap import (
PrependedText, PrependedAppendedText, FormActions)
from posts.models import Post
class PostForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(PostForm, self).__init__(*args, **kwargs)
self.fields['title'].label = "제목"
self.fields['content'].label = "상세 리뷰"
self.fields['product_url'].label = "구매 주소"
self.helper = FormHelper()
self.helper.form_method = 'POST'
self.helper.label_class = 'control-label'
self.helper.layout = Layout(
Field('title', css_class='form-control', placeholder="제목을 입력해 주세요"),
Field('content', css_class='form-control', ),
Field('product_url', css_class='form-control', placeholder="구매처의 주소를 붙여넣어 주세요"),
FormActions(Submit('save', '저장하기', css_class='btn btn-primary'),
Button('cancel', 'Cancel', css_class='btn btn-default')
),
)
class Meta:
model = Post
widgets = {
'title': forms.TextInput(),
'content': SummernoteInplaceWidget(
),
'product_url': forms.TextInput(),
}
fields = ['title', 'content', 'product_url']
|
Python
| 0
|
@@ -18,16 +18,66 @@
t forms%0A
+from django.forms import ModelMultipleChoiceField%0A
from dja
@@ -262,16 +262,21 @@
, Button
+, Div
%0Afrom cr
@@ -364,73 +364,249 @@
ns)%0A
-%0Afrom posts.models import Post%0A%0A%0Aclass PostForm(forms.ModelForm):
+from mptt.forms import TreeNodeChoiceField%0A%0Afrom posts.models import Post%0Afrom categories.models import Category%0A%0A%0Aclass PostForm(forms.ModelForm):%0A category = TreeNodeChoiceField(queryset=Category.objects.all(), level_indicator='----',)%0A
%0A
@@ -699,16 +699,63 @@
kwargs)%0A
+ self.fields%5B'category'%5D.label = %22%EC%B9%B4%ED%85%8C%EA%B3%A0%EB%A6%AC%22%0A
@@ -1049,16 +1049,109 @@
Layout(%0A
+ Field('category', css_class='form-control col-lg-8', placeholder=%22%EC%A0%9C%EB%AA%A9%EC%9D%84 %EC%9E%85%EB%A0%A5%ED%95%B4 %EC%A3%BC%EC%84%B8%EC%9A%94%22),%0A
@@ -1831,16 +1831,28 @@
elds = %5B
+'category',
'title',
@@ -1878,8 +1878,101 @@
t_url'%5D%0A
+ field_classes = %7B%0A 'category': TreeNodeChoiceField,%0A %7D%0A
|
33b3f10de563f2871ac00b113fe1ec9507e62c17
|
Update ChatterBot.py
|
Cogs/ChatterBot.py
|
Cogs/ChatterBot.py
|
import asyncio
import discord
import time
import requests
import urllib
import os
from aiml import Kernel
from os import listdir
from discord.ext import commands
from Cogs import Nullify
from pyquery import PyQuery as pq
from Cogs import FuzzySearch
class ChatterBot:
# Init with the bot reference, and a reference to the settings var
def __init__(self, bot, settings, prefix : str = '$'):
self.bot = bot
self.settings = settings
self.prefix = prefix
self.waitTime = 4 # Wait time in seconds
self.botDir = 'standard'
self.botBrain = 'standard.brn'
self.botList = []
self.ownerName = "CorpNewt"
self.ownerGender = "man"
self.timeout = 3
self.chatBot = Kernel()
async def onready(self):
# We're ready - let's load the bots
if not os.path.exists(self.botBrain):
# No brain, let's learn and create one
files = listdir(self.botDir)
for file in files:
self.chatBot.learn(self.botDir + '/' + file)
# Save brain
self.chatBot.saveBrain("standard.brn")
else:
# Already have a brain - load it
self.chatBot.bootstrap(brainFile="standard.brn")
# Learned by this point - let's set our owner's name/gender
# Start the convo
self.chatBot.respond('Hello')
# Bot asks for our Name
self.chatBot.respond('My name is {}'.format(self.ownerName))
# Bot asks for our gender
self.chatBot.respond('I am a {}'.format(self.ownerGender))
def canChat(self, server):
# Check if we can display images
lastTime = int(self.settings.getServerStat(server, "LastChat"))
threshold = int(self.waitTime)
currentTime = int(time.time())
if currentTime < (int(lastTime) + int(threshold)):
return False
# If we made it here - set the LastPicture method
self.settings.setServerStat(server, "LastChat", int(time.time()))
return True
async def message(self, message):
# Check the message and see if we should allow it - always yes.
# This module doesn't need to cancel messages.
ignore = False
delete = False
msg = message.content
chatChannel = self.settings.getServerStat(message.server, "ChatChannel")
if chatChannel and not message.author == self.bot.user and not msg.startswith(self.prefix):
# We have a channel
if message.channel.id == chatChannel:
# We're in that channel!
#ignore = True
# Strip prefix
pre = '{}chat '.format(self.prefix)
msg = message.content
if msg.lower().startswith(pre):
msg = msg[len(pre):]
await self._chat(message.channel, message.server, msg)
return { 'Ignore' : ignore, 'Delete' : delete}
@commands.command(pass_context=True)
async def setchatchannel(self, ctx, *, channel : discord.Channel = None):
"""Sets the channel for bot chatter."""
isAdmin = ctx.message.author.permissions_in(ctx.message.channel).administrator
# Only allow admins to change server stats
if not isAdmin:
await self.bot.send_message(ctx.message.channel, 'You do not have sufficient privileges to access this command.')
return
if channel == None:
self.settings.setServerStat(ctx.message.server, "ChatChannel", "")
msg = 'Chat channel removed - must use the `{}chat [message]` command to chat.'.format(ctx.prefix)
await self.bot.send_message(ctx.message.channel, msg)
return
# If we made it this far - then we can add it
self.settings.setServerStat(ctx.message.server, "ChatChannel", channel.id)
msg = 'Chat channel set to **{}**.'.format(channel.name)
await self.bot.send_message(ctx.message.channel, msg)
@setchatchannel.error
async def setchatchannel_error(self, ctx, error):
# do stuff
msg = 'setchatchannel Error: {}'.format(ctx)
await self.bot.say(msg)
@commands.command(pass_context=True)
async def chat(self, ctx, *, message = None):
"""Chats with the bot."""
await self._chat(ctx.message.channel, ctx.message.server, message)
async def _chat(self, channel, server, message):
# Check if we're suppressing @here and @everyone mentions
if self.settings.getServerStat(server, "SuppressMentions").lower() == "yes":
suppress = True
else:
suppress = False
if message == None:
return
if not self.canChat(server):
return
await self.bot.send_typing(channel)
try:
message = message.splitlines().join().strip()
except Exception:
await self.bot.send_message(channel, "I couldn't understand that.")
return
msg = self.chatBot.respond(message)
if not msg:
return
# Check for suppress
if suppress:
msg = Nullify.clean(msg)
await self.bot.send_message(channel, msg)
|
Python
| 0
|
@@ -4157,14 +4157,15 @@
%09%0A%09%09
+#
try:%0A
-%09
%09%09me
@@ -4210,16 +4210,19 @@
rip()%0A%09%09
+'''
except E
@@ -4307,24 +4307,27 @@
%22)%0A%09%09%09return
+'''
%0A%0A%09%09msg = se
|
8a26eddf5c8d0913f15722a59542cd1dccfbbad4
|
fix reference to instance var
|
Conficat/Config.py
|
Conficat/Config.py
|
#!/usr/bin/env python
"""
Conficat internal configuration class
"""
import os
import re
import sys
import logging
from Cheetah.ImportHooks import install as cheetah_import_install
from TemplateRegistry import TemplateRegistry
from CSVDataSource import CSVDataSource
from ConfigError import ConfigError
class Config(object):
"""Conficat configuration class. Refer to CLI.py for an example"""
def __init__(self):
super(Config, self).__init__()
self.logger=logging.getLogger("ccat.config")
self.data = CSVDataSource()
self.globtmpls=TemplateRegistry()
self.rowtmpls=TemplateRegistry()
# make sure cheetah imports dependant classes automatically
cheetah_import_install()
def addCSVPath(self, path, key=None):
"""
Add a file or directory containing tabular data in CSV format
"""
data.loadFromPath(path)
def addGlobalTemplatePath(self,path):
"""
Add a file or directory path containing global templates
"""
self.logger.info("adding global template(s) in \"%s\"" % path)
self.globtmpls.addPath(path)
def addRowTemplatePath(self,path):
"""
Add a file or directory path containing row templates
"""
self.logger.info("adding row template(s) in \"%s\"" % path)
self.rowtmpls.addPath(path)
def setTemplateColumns(self,tcols=[]):
"""
Set the names of the columns which may contain a reference to a row
template
"""
self.logger.info("columns for row templates: %s" % ", ".join(tcols))
self.tmplcols = tcols
def setOutputFile(self,outf):
"""
Setup either stdout or open a file as a destination for template results
"""
if outf=="-":
self.logger.info("write output to stdout")
self.outfile=sys.stdout
else:
self.logger.info("write output to file %s" % outf)
self.outfile=open(outf,"w")
def setOutputDir(self,outd):
"""
Specify the directory where additional files created via the outfile def in
templates should be placed.
"""
if not os.path.isdir(outd):
raise ConfigError("%s: Not a directory" % outd)
self.outdir=os.path.normpath(outd)
self.logger.info("write separate output files to directory %s" % outd)
def validate(self):
"""
Validate the configuration
"""
if len(self.data) == 0:
self.logger.warn("No data was loaded from any csv file")
# check templates and data
while True:
# Operation with only one global template is possible
if len(self.globtmpls) > 0:
break
# Operation with some datasource and some row templates is possible
if len(self.data) > 0 or len(self.rowtmpls) > 0:
break
# Without anything we can do nothing
raise ConfigError("Either at least one global template and/or some data and at least one row template is required.")
# check template columns if row templates specified
if len(self.rowtmpls) > 0 and len(self.tmplcols) == 0:
raise ConfigError("Row templates specified but no template columns.")
|
Python
| 0.000001
|
@@ -823,16 +823,21 @@
%22%22%22%0A
+self.
data.loa
|
ad2944a49b357494ff09a729b468f2fb19934909
|
remove vertically-aligned assignments, per PEP8
|
guestbook/__init__.py
|
guestbook/__init__.py
|
# coding: utf-8
import shelve
from datetime import datetime
from flask import Flask, request, render_template, redirect, escape, Markup
application = Flask(__name__)
DATA_FILE = 'guestbook.dat'
def save_data(name, comment, create_at):
database = shelve.open(DATA_FILE)
if 'greeting_list' not in database:
greeting_list = []
else:
greeting_list = database['greeting_list']
greeting_list.insert(0, {
'name': name,
'comment': comment,
'create_at': create_at
})
database['greeting_list'] = greeting_list
database.close()
def load_data():
database = shelve.open(DATA_FILE)
greeting_list = database.get('greeting_list', [])
database.close()
return greeting_list
@application.route('/')
def index():
greeting_list = load_data()
return render_template('index.html', greeting_list=greeting_list)
@application.route('/post', methods=['POST'])
def post():
name = request.form.get('name')
comment = request.form.get('comment')
create_at = datetime.now()
save_data(name, comment, create_at)
return redirect('/')
@application.template_filter('nl2br')
def nl2br_filter(s):
return escape(s).replace('\n', Markup('<br />'))
@application.template_filter('datetime_fmt')
def datetime_fmt_filter(dt):
return dt.strftime('%Y%m%d %H:%M:%S')
def main():
application.run('127.0.0.1', 8000)
if __name__ == "__main__":
application.run('127.0.0.1', 8000, debug=True)
|
Python
| 0.000006
|
@@ -958,21 +958,16 @@
name
-
= reques
@@ -997,18 +997,16 @@
comment
-
= reques
|
90689c38f7b76b47df1b8259aa14e3c0b604f233
|
Fix netdata system_load and add disk_free. (#9091)
|
homeassistant/components/sensor/netdata.py
|
homeassistant/components/sensor/netdata.py
|
"""
Support gathering system information of hosts which are running netdata.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.netdata/
"""
import logging
from datetime import timedelta
from urllib.parse import urlsplit
import requests
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_HOST, CONF_PORT, CONF_NAME, CONF_RESOURCES)
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
_RESOURCE = 'api/v1'
_REALTIME = 'before=0&after=-1&options=seconds'
DEFAULT_HOST = 'localhost'
DEFAULT_NAME = 'Netdata'
DEFAULT_PORT = '19999'
SCAN_INTERVAL = timedelta(minutes=1)
SENSOR_TYPES = {
'memory_free': ['RAM Free', 'MiB', 'system.ram', 'free', 1],
'memory_used': ['RAM Used', 'MiB', 'system.ram', 'used', 1],
'memory_cached': ['RAM Cached', 'MiB', 'system.ram', 'cached', 1],
'memory_buffers': ['RAM Buffers', 'MiB', 'system.ram', 'buffers', 1],
'swap_free': ['Swap Free', 'MiB', 'system.swap', 'free', 1],
'swap_used': ['Swap Used', 'MiB', 'system.swap', 'used', 1],
'processes_running': ['Processes Running', 'Count', 'system.processes',
'running', 0],
'processes_blocked': ['Processes Blocked', 'Count', 'system.processes',
'blocked', 0],
'system_load': ['System Load', '15 min', 'system.processes', 'running', 2],
'system_io_in': ['System IO In', 'Count', 'system.io', 'in', 0],
'system_io_out': ['System IO Out', 'Count', 'system.io', 'out', 0],
'ipv4_in': ['IPv4 In', 'kb/s', 'system.ipv4', 'received', 0],
'ipv4_out': ['IPv4 Out', 'kb/s', 'system.ipv4', 'sent', 0],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_RESOURCES, default=['memory_free']):
vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]),
})
# pylint: disable=unused-variable
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Netdata sensor."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
url = 'http://{}:{}'.format(host, port)
data_url = '{}/{}/data?chart='.format(url, _RESOURCE)
resources = config.get(CONF_RESOURCES)
values = {}
for key, value in sorted(SENSOR_TYPES.items()):
if key in resources:
values.setdefault(value[2], []).append(key)
dev = []
for chart in values:
rest_url = '{}{}&{}'.format(data_url, chart, _REALTIME)
rest = NetdataData(rest_url)
rest.update()
for sensor_type in values[chart]:
dev.append(NetdataSensor(rest, name, sensor_type))
add_devices(dev, True)
class NetdataSensor(Entity):
"""Implementation of a Netdata sensor."""
def __init__(self, rest, name, sensor_type):
"""Initialize the Netdata sensor."""
self.rest = rest
self.type = sensor_type
self._name = '{} {}'.format(name, SENSOR_TYPES[self.type][0])
self._precision = SENSOR_TYPES[self.type][4]
self._unit_of_measurement = SENSOR_TYPES[self.type][1]
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state of the resources."""
value = self.rest.data
if value is not None:
netdata_id = SENSOR_TYPES[self.type][3]
if netdata_id in value:
return "{0:.{1}f}".format(value[netdata_id], self._precision)
return None
@property
def available(self):
"""Could the resource be accessed during the last update call."""
return self.rest.available
def update(self):
"""Get the latest data from Netdata REST API."""
self.rest.update()
class NetdataData(object):
"""The class for handling the data retrieval."""
def __init__(self, resource):
"""Initialize the data object."""
self._resource = resource
self.data = None
self.available = True
def update(self):
"""Get the latest data from the Netdata REST API."""
try:
response = requests.get(self._resource, timeout=5)
det = response.json()
self.data = {k: v for k, v in zip(det['labels'], det['data'][0])}
self.available = True
except requests.exceptions.ConnectionError:
_LOGGER.error("Connection error: %s", urlsplit(self._resource)[1])
self.data = None
self.available = False
|
Python
| 0
|
@@ -1510,28 +1510,22 @@
tem.
-processes', 'running
+load', 'load15
', 2
@@ -1798,16 +1798,83 @@
t', 0%5D,%0A
+ 'disk_free': %5B'Disk Free', 'GiB', 'disk_space._', 'avail', 2%5D,%0A
%7D%0A%0APLATF
|
1c6a2f87ebd75d69857590ec3918d65ee6468b81
|
Add link to docs
|
homeassistant/components/feedreader.py
|
homeassistant/components/feedreader.py
|
"""RSS/Atom feed reader for Home Assistant."""
from datetime import datetime
from logging import getLogger
import voluptuous as vol
from homeassistant.helpers.event import track_utc_time_change
REQUIREMENTS = ['feedparser==5.2.1']
_LOGGER = getLogger(__name__)
DOMAIN = "feedreader"
EVENT_FEEDREADER = "feedreader"
# pylint: disable=no-value-for-parameter
CONFIG_SCHEMA = vol.Schema({
DOMAIN: {
'urls': [vol.Url()],
}
}, extra=vol.ALLOW_EXTRA)
# pylint: disable=too-few-public-methods
class FeedManager(object):
"""Abstraction over feedparser module."""
def __init__(self, url, hass):
"""Initialize the FeedManager object, poll every hour."""
self._url = url
self._feed = None
self._hass = hass
# Initialize last entry timestamp as epoch time
self._last_entry_timestamp = datetime.utcfromtimestamp(0).timetuple()
_LOGGER.debug('Loading feed %s', self._url)
self._update()
track_utc_time_change(hass, lambda now: self._update(),
minute=0, second=0)
def _log_no_entries(self):
"""Send no entries log at debug level."""
_LOGGER.debug('No new entries in feed %s', self._url)
def _update(self):
"""Update the feed and publish new entries in the event bus."""
import feedparser
_LOGGER.info('Fetching new data from feed %s', self._url)
self._feed = feedparser.parse(self._url,
etag=None if not self._feed
else self._feed.get('etag'),
modified=None if not self._feed
else self._feed.get('modified'))
if not self._feed:
_LOGGER.error('Error fetching feed data from %s', self._url)
else:
if self._feed.bozo != 0:
_LOGGER.error('Error parsing feed %s', self._url)
# Using etag and modified, if there's no new data available,
# the entries list will be empty
elif len(self._feed.entries) > 0:
_LOGGER.debug('Entries available in feed %s', self._url)
self._publish_new_entries()
self._last_entry_timestamp = \
self._feed.entries[0].published_parsed
else:
self._log_no_entries()
def _publish_new_entries(self):
"""Publish new entries to the event bus."""
new_entries = False
for entry in self._feed.entries:
# Consider only entries newer then the latest parsed one
if entry.published_parsed > self._last_entry_timestamp:
new_entries = True
entry.update({'feed_url': self._url})
self._hass.bus.fire(EVENT_FEEDREADER, entry)
if not new_entries:
self._log_no_entries()
def setup(hass, config):
"""Setup the feedreader component."""
urls = config.get(DOMAIN)['urls']
feeds = [FeedManager(url, hass) for url in urls]
return len(feeds) > 0
|
Python
| 0
|
@@ -1,11 +1,24 @@
%22%22%22
+%0ASupport for
RSS/Atom
@@ -26,35 +26,136 @@
feed
- reader for Home Assistant.
+.%0A%0AFor more details about this component, please refer to the documentation at%0Ahttps://home-assistant.io/components/feedreader/%0A
%22%22%22%0A
|
f6efb0ff31ae8d0db5682cd7ad5b0921e3a4e924
|
Bump version for new release.
|
openstack_auth/__init__.py
|
openstack_auth/__init__.py
|
# following PEP 386
__version__ = "1.0.6"
|
Python
| 0
|
@@ -32,11 +32,11 @@
= %221.0.
-6
+7
%22%0A
|
8da7edb1311b73013dcf497c293212df5e0041c7
|
use new nfw potential
|
ophiuchus/potential/oph.py
|
ophiuchus/potential/oph.py
|
# coding: utf-8
# cython: boundscheck=False
# cython: nonecheck=False
# cython: cdivision=True
# cython: wraparound=False
# cython: profile=False
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Third-party
from gala.units import galactic
from gala.potential import (CCompositePotential, MiyamotoNagaiPotential,
HernquistPotential, FlattenedNFWPotential)
# Project
from . import WangZhaoBarPotential
class OphiuchusPotential(CCompositePotential):
r"""
Four-component Milky Way potential used for modeling the Ophiuchus stream.
Parameters
----------
units : iterable
Unique list of non-reducable units that specify (at minimum) the
length, mass, time, and angle units.
spheroid : dict
Dictionary of parameter values for a :class:`gala.potential.HernquistPotential`.
disk : dict
Dictionary of parameter values for a :class:`gala.potential.MiyamotoNagaiPotential`.
halo : dict
Dictionary of parameter values for a :class:`gala.potential.FlattenedNFWPotential`.
bar : dict
Dictionary of parameter values for a :class:`ophiuchus.potential.WangZhaoBarPotential`.
"""
def __init__(self, units=galactic,
spheroid=None, disk=None, halo=None, bar=None):
default_spheroid = dict(m=0., c=0.1)
default_disk = dict(m=5.E10, a=3, b=0.28) # similar to Bovy
default_halo = dict(v_c=0.19, r_s=30., q_z=0.9)
default_bar = dict(m=1.8E10 / 1.15, r_s=1.,
alpha=0.349065850398, Omega=0.06136272990322247) # from Wang, Zhao, et al.
if disk is None:
disk = default_disk
else:
for k,v in default_disk.items():
if k not in disk:
disk[k] = v
if spheroid is None:
spheroid = default_spheroid
else:
for k,v in default_spheroid.items():
if k not in spheroid:
spheroid[k] = v
if halo is None:
halo = default_halo
else:
for k,v in default_halo.items():
if k not in halo:
halo[k] = v
if bar is None:
bar = default_bar
else:
for k,v in default_bar.items():
if k not in bar:
bar[k] = v
super(OphiuchusPotential,self).__init__()
self["spheroid"] = HernquistPotential(units=units, **spheroid)
self["disk"] = MiyamotoNagaiPotential(units=units, **disk)
self["halo"] = FlattenedNFWPotential(units=units, **halo)
self["bar"] = WangZhaoBarPotential(units=units, **bar)
|
Python
| 0.000001
|
@@ -403,25 +403,16 @@
ential,
-Flattened
NFWPoten
@@ -417,16 +417,16 @@
ential)%0A
+
%0A# Proje
@@ -1071,25 +1071,16 @@
tential.
-Flattened
NFWPoten
@@ -1474,11 +1474,9 @@
0.,
-q_z
+c
=0.9
@@ -1713,32 +1713,33 @@
for k,
+
v in default_dis
@@ -1909,32 +1909,33 @@
for k,
+
v in default_sph
@@ -2105,32 +2105,33 @@
for k,
+
v in default_hal
@@ -2294,16 +2294,17 @@
for k,
+
v in def
@@ -2419,16 +2419,17 @@
tential,
+
self).__
@@ -2576,16 +2576,16 @@
**disk)%0A
+
@@ -2603,17 +2603,8 @@
%5D =
-Flattened
NFWP
|
a66ff915dcaee3e1db370196a3b40b612eb43d19
|
Add support to template_name_suffix in get_template_names methods
|
opps/views/generic/list.py
|
opps/views/generic/list.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.list import ListView as DjangoListView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from django.conf import settings
from opps.views.generic.base import View
from opps.containers.models import ContainerBox
class ListView(View, DjangoListView):
def get_template_names(self):
templates = []
domain_folder = self.get_template_folder()
if not self.long_slug:
templates.append('{}/none.html'.format(domain_folder))
return templates
list_name = 'list'
if self.channel:
# Check layout, change via admin
if self.channel.layout != u'default':
list_name = self.channel.layout
if self.channel.group and self.channel.parent:
templates.append('{}/{}/{}.html'.format(
domain_folder, self.channel.parent.long_slug, list_name))
if self.request.GET.get('page') and\
self.__class__.__name__ not in\
settings.OPPS_PAGINATE_NOT_APP:
templates.append('{}/{}/{}_paginated.html'.format(
domain_folder, self.channel.parent.long_slug,
list_name))
if self.request.GET.get('page') and\
self.__class__.__name__ not in settings.OPPS_PAGINATE_NOT_APP:
templates.append('{}/{}/{}_paginated.html'.format(
domain_folder, self.channel.long_slug, list_name))
templates.append('{}/{}/{}.html'.format(
domain_folder, self.channel.long_slug, list_name))
if self.request.GET.get('page') and\
self.__class__.__name__ not in settings.OPPS_PAGINATE_NOT_APP:
templates.append('{}/{}_paginated.html'.format(domain_folder,
list_name))
templates.append('{}/{}.html'.format(domain_folder, list_name))
return templates
def get_queryset(self):
self.site = get_current_site(self.request)
self.long_slug = self.get_long_slug()
if not self.long_slug:
return None
self.set_channel_rules()
self.articleboxes = ContainerBox.objects.filter(
channel__long_slug=self.long_slug)
is_paginated = self.page_kwarg in self.request.GET
if not is_paginated:
for box in self.articleboxes:
self.excluded_ids.update(
[a.pk for a in box.ordered_containers()])
queryset = super(ListView, self).get_queryset()
filters = {}
filters['site_domain'] = self.site.domain
filters['channel_long_slug__in'] = self.channel_long_slug
filters['date_available__lte'] = timezone.now()
filters['published'] = True
if self.channel and self.channel.is_root_node() and not is_paginated:
filters['show_on_root_channel'] = True
queryset = queryset.filter(**filters).exclude(pk__in=self.excluded_ids)
return queryset._clone()
|
Python
| 0
|
@@ -624,16 +624,131 @@
'list'%0A%0A
+ if self.template_name_suffix:%0A list_name = %22%7B%7D%7B%7D%22.format(list_name, self.template_name_suffix)%0A%0A
|
7a99ade694c5844727ca33461dd3ad5271b61f14
|
Improve q_n tests.
|
hic/test/test_flow.py
|
hic/test/test_flow.py
|
# -*- coding: utf-8 -*-
from __future__ import division
import numpy as np
from .. import flow
def test_qn():
assert flow.qn(2, 0) == 1+0j, \
'Single-particle q_n.'
assert np.allclose(flow.qn(3, np.arange(-np.pi, np.pi, 10)), -1+0j), \
'Isotropic q_n.'
def test_flow_cumulant():
pass
|
Python
| 0.000012
|
@@ -109,21 +109,44 @@
_qn(
-):%0A assert
+seed=1248):%0A # q_n(0) = 1%0A q =
flo
@@ -155,16 +155,29 @@
qn(2, 0)
+%0A assert q
== 1+0j
@@ -193,9 +193,19 @@
'
-S
+Incorrect s
ingl
@@ -222,111 +222,628 @@
q_n
-.'%0A%0A assert np.allclose(flow.qn(3, np.arange(-np.pi, np.pi, 10)), -1+0j), %5C%0A 'Isotropic q_n.'
+ (%7B%7D != 1).'.format(q)%0A%0A # q_3(uniform phi) = -1%0A q = flow.qn(3, np.arange(-np.pi, np.pi, 10))%0A assert abs(q+1) %3C 1e-12, %5C%0A 'Incorrect isotropic q_n (%7B%7D != -1).'.format(q)%0A%0A # specific example%0A np.random.seed(seed)%0A phi = 2*np.pi*(np.random.rand(10) - .5)%0A q = np.array(%5Bflow.qn(n, phi) for n in range(2, 5)%5D)%0A correct_q = np.array((%0A -0.23701789876111995+1.9307467860155012j,%0A 0.7294873796006498+0.4925428484240118j,%0A 2.0248053489550459-0.23452484252744438j%0A ))%0A assert np.allclose(q, correct_q), %5C%0A 'Incorrect random q_n.%5Cn%7B%7D != %7B%7D'.format(q, correct_q)
%0A%0A%0Ad
|
7c10feeed640f4d1a66bb3207ade980733409ad9
|
improve unit test
|
witica/test_source.py
|
witica/test_source.py
|
# coding=utf-8
import os
import unittest
import pkg_resources
from witica.source import Source, SourceItemList
from witica.log import *
from witica.metadata import extractor
class TestSourceItemList(unittest.TestCase):
def setUp(self):
Logger.start(verbose=False)
self.resource_path = pkg_resources.resource_filename("witica","test/files")
source_config = {}
source_config["version"] = 1
source_config["path"] = self.resource_path
self.source = FolderSource("test", source_config)
extractor.register_default_extractors()
def tearDown(self):
extractor.registered_extractors = []
Logger.stop()
def test_match(self):
self.assertTrue(SourceItemList.match("test/*", "test/abc"))
self.assertFalse(SourceItemList.match("test/*", "test/abc/def"))
self.assertTrue(SourceItemList.match("test/**", "test/abc/def"))
self.assertTrue(SourceItemList.match("test/*/def", "test/abc/def"))
self.assertTrue(SourceItemList.match("test/**/de?", "test/abc/def"))
self.assertFalse(SourceItemList.match("test/**/def", "test/abc/ghi"))
def test_count_items(self):
self.assertEqual(9, len(self.source.items))
class FolderSource(Source):
def __init__(self, source_id, config, prefix = ""):
super(FolderSource, self).__init__(source_id, config, prefix)
self.source_dir = config["path"]
self.state = {"cursor" : ""}
if not(os.path.exists(self.source_dir)):
raise IOError("Source folder '" + self.source_dir + "' does not exist.")
def update_cache(self):
pass
def update_change_status(self):
pass
def fetch_changes(self):
pass
def get_abs_meta_filename(self, local_filename):
return self.get_absolute_path(os.path.join('meta' + os.sep + local_filename))
def get_absolute_path(self, localpath):
return os.path.abspath(os.path.join(self.source_dir, localpath))
|
Python
| 0.000002
|
@@ -595,16 +595,52 @@
rs = %5B%5D%0A
+%09%09pkg_resources.cleanup_resources()%0A
%09%09Logger
@@ -644,24 +644,25 @@
ger.stop()%0A%0A
+%0A
%09def test_ma
@@ -1158,16 +1158,100 @@
tems))%0A%0A
+%09def test_item_exists(self):%0A%09%09self.assertTrue(self.source.items%5B%22simple%22%5D.exists)%0A%0A
%0Aclass F
|
1b7e6d41a6832ef7a8f9dafe0cd8580356f8e9da
|
check regex match before access in flickr module
|
mygpo/data/flickr.py
|
mygpo/data/flickr.py
|
#
# This file is part of gpodder.net.
#
# my.gpodder.org is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# my.gpodder.org is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with my.gpodder.org. If not, see <http://www.gnu.org/licenses/>.
#
import re
import urllib
from django.conf import settings
from mygpo.core.json import json
def get_photo_sizes(photo_id):
api_key = settings.FLICKR_API_KEY
request = 'http://api.flickr.com/services/rest/?method=flickr.photos.getSizes&api_key=%s&photo_id=%s&format=json' % (api_key, photo_id)
resp = urllib.urlopen(request).read()
extract_re = '^jsonFlickrApi\((.*)\)$'
resp_content = re.match(extract_re, resp).group(1)
resp_obj = json.loads(resp_content)
try:
return resp_obj['sizes']['size']
except KeyError:
return []
def get_photo_id(url):
photo_id_re = 'http://.*flickr.com/[^/]+/([^_]+)_.*'
return re.match(photo_id_re, url).group(1)
def is_flickr_image(url):
return re.search('flickr\.com.*\.(jpg|jpeg|png|gif)', url)
def get_display_photo(url, label='Medium'):
photo_id = get_photo_id(url)
sizes = get_photo_sizes(photo_id)
for s in sizes:
if s['label'] == label:
return s['source']
return url
|
Python
| 0
|
@@ -1090,28 +1090,17 @@
)$'%0A
-resp_content
+m
= re.ma
@@ -1120,25 +1120,48 @@
e, resp)
-.group(1)
+%0A if not m:%0A return %5B%5D
%0A%0A re
@@ -1184,20 +1184,18 @@
ads(
-resp_content
+m.group(1)
)%0A%0A
|
de257f7b1703d0df3da24d475c52bfb4292a9d8b
|
correctly sort _futureevents
|
myhdl/_Simulation.py
|
myhdl/_Simulation.py
|
# This file is part of the myhdl library, a Python package for using
# Python as a Hardware Description Language.
#
# Copyright (C) 2003-2008 Jan Decaluwe
#
# The myhdl library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of the
# License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
""" Module that provides the Simulation class """
from __future__ import absolute_import
from __future__ import print_function
import sys
import os
from warnings import warn
from types import GeneratorType
from myhdl import Cosimulation, StopSimulation, _SuspendSimulation
from myhdl import _simulator, SimulationError
from myhdl._simulator import _signals, _siglist, _futureEvents
from myhdl._Waiter import _Waiter, _inferWaiter, _SignalWaiter,_SignalTupleWaiter
from myhdl._util import _flatten, _printExcInfo
from myhdl._instance import _Instantiator
from myhdl._ShadowSignal import _ShadowSignal
schedule = _futureEvents.append
class _error:
pass
_error.ArgType = "Inappriopriate argument type"
_error.MultipleCosim = "Only a single cosimulator argument allowed"
_error.DuplicatedArg = "Duplicated argument"
class Simulation(object):
""" Simulation class.
Methods:
run -- run a simulation for some duration
"""
def __init__(self, *args):
""" Construct a simulation object.
*args -- list of arguments. Each argument is a generator or
a nested sequence of generators.
"""
_simulator._time = 0
arglist = _flatten(*args)
self._waiters, self._cosim = _makeWaiters(arglist)
if not self._cosim and _simulator._cosim:
warn("Cosimulation not registered as Simulation argument")
self._finished = False
del _futureEvents[:]
del _siglist[:]
def _finalize(self):
cosim = self._cosim
if cosim:
_simulator._cosim = 0
os.close(cosim._rt)
os.close(cosim._wf)
os.waitpid(cosim._child_pid, 0)
if _simulator._tracing:
_simulator._tracing = 0
_simulator._tf.close()
# clean up for potential new run with same signals
for s in _signals:
s._clear()
self._finished = True
def runc(self, duration=0, quiet=0):
simrunc.run(sim=self, duration=duration, quiet=quiet)
def run(self, duration=None, quiet=0):
""" Run the simulation for some duration.
duration -- specified simulation duration (default: forever)
quiet -- don't print StopSimulation messages (default: off)
"""
# If the simulation is already finished, raise StopSimulation immediately
# From this point it will propagate to the caller, that can catch it.
if self._finished:
raise StopSimulation("Simulation has already finished")
waiters = self._waiters
maxTime = None
if duration:
stop = _Waiter(None)
stop.hasRun = 1
maxTime = _simulator._time + duration
schedule((maxTime, stop))
cosim = self._cosim
t = _simulator._time
actives = {}
tracing = _simulator._tracing
tracefile = _simulator._tf
exc = []
_pop = waiters.pop
_append = waiters.append
_extend = waiters.extend
while 1:
try:
for s in _siglist:
_extend(s._update())
del _siglist[:]
while waiters:
waiter = _pop()
try:
waiter.next(waiters, actives, exc)
except StopIteration:
continue
if cosim:
cosim._get()
if _siglist or cosim._hasChange:
cosim._put(t)
continue
elif _siglist:
continue
if actives:
for wl in actives.values():
wl.purge()
actives = {}
# at this point it is safe to potentially suspend a simulation
if exc:
raise exc[0]
# future events
if _futureEvents:
if t == maxTime:
raise _SuspendSimulation(
"Simulated %s timesteps" % duration)
_futureEvents.sort()
t = _simulator._time = _futureEvents[0][0]
if tracing:
print("#%s" % t, file=tracefile)
if cosim:
cosim._put(t)
while _futureEvents:
newt, event = _futureEvents[0]
if newt == t:
if isinstance(event, _Waiter):
_append(event)
else:
_extend(event.apply())
del _futureEvents[0]
else:
break
else:
raise StopSimulation("No more events")
except _SuspendSimulation:
if not quiet:
_printExcInfo()
if tracing:
tracefile.flush()
return 1
except StopSimulation:
if not quiet:
_printExcInfo()
self._finalize()
self._finished = True
return 0
except Exception as e:
if tracing:
tracefile.flush()
# if the exception came from a yield, make sure we can resume
if exc and e is exc[0]:
pass # don't finalize
else:
self._finalize()
# now reraise the exepction
raise
def _makeWaiters(arglist):
waiters = []
ids = set()
cosim = None
for arg in arglist:
if isinstance(arg, GeneratorType):
waiters.append(_inferWaiter(arg))
elif isinstance(arg, _Instantiator):
waiters.append(arg.waiter)
elif isinstance(arg, Cosimulation):
if cosim is not None:
raise SimulationError(_error.MultipleCosim)
cosim = arg
waiters.append(_SignalTupleWaiter(cosim._waiter()))
elif isinstance(arg, _Waiter):
waiters.append(arg)
elif arg == True:
pass
else:
raise SimulationError(_error.ArgType, str(type(arg)))
if id(arg) in ids:
raise SimulationError(_error.DuplicatedArg)
ids.add(id(arg))
# add waiters for shadow signals
for sig in _signals:
if hasattr(sig, '_waiter'):
waiters.append(sig._waiter)
return waiters, cosim
|
Python
| 0.999998
|
@@ -1040,16 +1040,48 @@
port os%0A
+from operator import itemgetter%0A
from war
@@ -5156,16 +5156,33 @@
ts.sort(
+key=itemgetter(0)
)%0A
|
963481958af78655e02a5d7d01e156f9b6ee506e
|
Correct HStoreField code
|
hs_core/hydro_realtime_signal_processor.py
|
hs_core/hydro_realtime_signal_processor.py
|
from django.db import models
from haystack.signals import RealtimeSignalProcessor
from haystack.exceptions import NotHandled
import logging
import types
from haystack.query import SearchQuerySet
from haystack.utils import get_identifier
logger = logging.getLogger(__name__)
class HydroRealtimeSignalProcessor(RealtimeSignalProcessor):
"""
Customized for the fact that all indexed resources are subclasses of BaseResource.
Notes:
1. RealtimeSignalProcessor already plumbs in all class updates. We might want to be more specific.
2. The class sent to this is a subclass of BaseResource, or another class.
3. Thus, we want to capture cases in which it is an appropriate instance, and respond.
"""
def handle_save(self, sender, instance, **kwargs):
"""
Given an individual model instance, determine which backends the
update should be sent to & update the object on those backends.
"""
from hs_core.models import BaseResource, CoreMetaData, AbstractMetaDataElement
from hs_access_control.models import ResourceAccess
if isinstance(instance, BaseResource):
if hasattr(instance, 'raccess') and hasattr(instance, 'metadata'):
# work around for failure of super(BaseResource, instance) to work properly.
# this always succeeds because this is a post-save object action.
newbase = BaseResource.objects.get(pk=instance.pk)
newsender = BaseResource
using_backends = self.connection_router.for_write(instance=newbase)
for using in using_backends:
# if object is public/discoverable or becoming public/discoverable, index it
# test whether the object should be exposed.
if instance.show_in_discover:
try:
index = self.connections[using].get_unified_index().get_index(newsender)
index.update_object(newbase, using=using)
except NotHandled:
logger.exception("Failure: changes to %s with short_id %s not added to Solr Index.",
str(type(instance)), newbase.short_id)
# if object is private or becoming private, delete from index
else: # not to be shown in discover
try:
index = self.connections[using].get_unified_index().get_index(newsender)
index.remove_object(newbase, using=using)
except NotHandled:
logger.exception("Failure: delete of %s with short_id %s failed.",
str(type(instance)), newbase.short_id)
elif isinstance(instance, ResourceAccess):
# automatically a BaseResource; just call the routine on it.
try:
newbase = instance.resource
self.handle_save(BaseResource, newbase)
except Exception as e:
logger.exception("{} exception: {}".format(type(instance), e))
elif isinstance(instance, CoreMetaData):
try:
newbase = instance.resource
self.handle_save(BaseResource, newbase)
except Exception:
logger.exception("{} exception: {}".format(type(instance), e))
elif isinstance(instance, AbstractMetaDataElement):
try:
# resolve the BaseResource corresponding to the metadata element.
newbase = instance.metadata.resource
self.handle_save(BaseResource, newbase)
except Exception as e:
logger.exception("{} exception: {}".format(type(instance), e))
else: # could be extended metadata element
try:
newbase = BaseResource.objects.get(extra_metadata=instance)
self.handle_save(BaseResource, newbase)
except Exception as e:
logger.exception("{} exception: {}".format(type(instance), e))
def handle_delete(self, sender, instance, **kwargs):
"""
Ignore delete events as this is accomplished separately.
"""
pass
|
Python
| 0.000041
|
@@ -1097,16 +1097,80 @@
eAccess%0A
+ from django.contrib.postgres.fields import HStoreField%0A%0A
%0A
@@ -3945,49 +3945,45 @@
el
-se: # could be extended metadata element
+if isinstance(instance, HStoreField):
%0A
|
81d01175a7403b3e627738056ef9436e8172e51e
|
Enforce python 3.6
|
shared_infra/lambdas/common/setup.py
|
shared_infra/lambdas/common/setup.py
|
import os
from setuptools import find_packages, setup
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
SOURCE = local_file('src')
setup(
name='wellcome_lambda_utils',
packages=find_packages(SOURCE),
package_dir={'': SOURCE},
version='1.0.0',
install_requires=['boto'],
python_requires='>=3',
description='Common lib for lambdas',
author='Wellcome digital platform',
author_email='wellcomedigitalplatform@wellcome.ac.uk',
url='https://github.com/wellcometrust/platform',
keywords=['lambda', 'utils'],
classifiers=[],
)
|
Python
| 0.000187
|
@@ -359,16 +359,18 @@
res='%3E=3
+.6
',%0A d
|
1eca1b11492b009990900a0e35dcf223ca22cffc
|
create .hokusai-tmp if not exists
|
hokusai/lib/config.py
|
hokusai/lib/config.py
|
import os
import sys
from collections import OrderedDict
import yaml
from packaging.specifiers import SpecifierSet, InvalidSpecifier
from packaging.version import Version, InvalidVersion
from hokusai import CWD
from hokusai.lib.constants import YAML_HEADER
from hokusai.lib.exceptions import HokusaiError
from hokusai.version import VERSION
HOKUSAI_ENV_VAR_PREFIX = 'HOKUSAI_'
HOKUSAI_CONFIG_DIR = 'hokusai'
HOKUSAI_TMP_DIR = os.path.join(CWD, '.hokusai-tmp')
HOKUSAI_CONFIG_FILE = os.path.join(CWD, HOKUSAI_CONFIG_DIR, 'config.yml')
BUILD_YAML_FILE = 'build'
TEST_YML_FILE = 'test'
DEVELOPMENT_YML_FILE = 'development'
class HokusaiConfig(object):
def create(self, project_name):
config = OrderedDict([
('project-name', project_name)
])
with open(HOKUSAI_CONFIG_FILE, 'w') as f:
payload = YAML_HEADER + yaml.safe_dump(config, default_flow_style=False)
f.write(payload)
def check(self):
if not self._check_config_present(HOKUSAI_CONFIG_FILE):
raise HokusaiError("Hokusai is not set up for this project - run 'hokusai setup'")
if not self._check_required_version(self.hokusai_required_version, VERSION):
raise HokusaiError("Hokusai's current version %s does not satisfy this project's version requirements '%s'. Aborting."
% (VERSION, self.hokusai_required_version))
def _check_config_present(self, config_file):
return os.path.isfile(config_file)
def _check_required_version(self, required_version, target_version):
if required_version is None:
return True
try:
match_versions = SpecifierSet(required_version)
except InvalidSpecifier:
raise HokusaiError("Could not parse '%s' as a valid version specifier. See https://www.python.org/dev/peps/pep-0440/#version-specifiers" % required_version)
try:
compare_version = Version(target_version)
except InvalidVersion:
raise HokusaiError("Could not parse '%s' as a valid version identifier. See https://www.python.org/dev/peps/pep-0440/#version-scheme" % target_version)
return compare_version in match_versions
def get(self, key, default=None, use_env=False, _type=str):
value = self._config_value_for(key, _type)
if value is not None:
return value
if use_env:
value = self._env_value_for(key, _type)
if value is not None:
return value
return default
def _env_value_for(self, key, _type):
env_var = HOKUSAI_ENV_VAR_PREFIX + key.upper().replace('-', '_')
val = os.environ.get(env_var)
if val is None:
return val
if _type == list:
try:
return _type(val.split(','))
except ValueError:
raise HokusaiError("Environment variable %s could not be split to %s" % (env_var, _type))
try:
return _type(val)
except ValueError:
raise HokusaiError("Environment variable %s could not be cast to %s" % (env_var, _type))
def _config_value_for(self, key, _type):
try:
with open(HOKUSAI_CONFIG_FILE, 'r') as config_file:
config_struct = yaml.safe_load(config_file.read())
try:
val = config_struct[key]
except KeyError:
return None
if not isinstance(val, _type):
raise HokusaiError("Config key %s is not of %s" % (key, _type))
return val
except IOError:
return None
@property
def project_name(self):
project = self.get('project-name')
if project is None:
raise HokusaiError("Unconfigured 'project-name'! Plz check ./hokusai/config.yml")
return project
@property
def hokusai_required_version(self):
return self.get('hokusai-required-version')
@property
def pre_deploy(self):
return self.get('pre-deploy')
@property
def post_deploy(self):
return self.get('post-deploy')
@property
def git_remote(self):
return self.get('git-remote')
@property
def pre_build(self):
return self.get('pre-build')
@property
def post_build(self):
return self.get('post-build')
@property
def template_config_files(self):
return self.get('template-config-files', _type=list)
@property
def run_tty(self):
return self.get('run-tty', default=False, use_env=True, _type=bool)
@property
def run_constraints(self):
return self.get('run-constraints', default=[], use_env=True, _type=list)
@property
def follow_logs(self):
return self.get('follow-logs', default=False, use_env=True, _type=bool)
@property
def tail_logs(self):
return self.get('tail-logs', use_env=True, _type=int)
@property
def always_verbose(self):
return self.get('always-verbose', default=False, use_env=True, _type=bool)
config = HokusaiConfig()
|
Python
| 0.000001
|
@@ -648,16 +648,114 @@
bject):%0A
+ def __init__(self):%0A if not os.path.isdir(HOKUSAI_TMP_DIR):%0A os.mkdir(HOKUSAI_TMP_DIR)%0A%0A
def cr
|
3abe25d2272e2a0111511b68407da0ef3c53f59e
|
Use wizard settings during samba provision
|
nazs/samba/module.py
|
nazs/samba/module.py
|
from nazs import module
from nazs.commands import run
from nazs.sudo import root
import os
import logging
logger = logging.getLogger(__name__)
class Samba(module.Module):
"""
Samba 4 module, it deploys samba AD and file server
"""
ETC_FILE = '/etc/samba/smb.conf'
install_wizard = 'samba:install'
def install(self):
"""
Installation procedure, it writes basic smb.conf and uses samba-tool to
provision the domain
"""
with root():
if os.path.exists(self.ETC_FILE):
os.remove(self.ETC_FILE)
run("samba-tool domain provision "
" --domain='zentyal' "
" --workgroup='zentyal' "
"--realm='zentyal.lan' "
"--use-xattrs=yes "
"--use-rfc2307 "
"--server-role='domain controller' "
"--use-ntvfs "
"--adminpass='foobar1!'")
|
Python
| 0
|
@@ -101,16 +101,52 @@
ogging%0A%0A
+from .models import DomainSettings%0A%0A
%0Alogger
@@ -501,32 +501,80 @@
ain%0A %22%22%22%0A
+ domain_settings = DomainSettings.get()%0A%0A
with roo
@@ -666,16 +666,65 @@
_FILE)%0A%0A
+ if domain_settings.mode == 'ad':%0A
@@ -774,24 +774,28 @@
+
%22 --domain='
@@ -817,24 +817,28 @@
+
+
%22 --workgrou
@@ -843,32 +843,36 @@
oup='zentyal' %22%0A
+
@@ -908,24 +908,28 @@
+
%22--use-xattr
@@ -948,24 +948,28 @@
+
+
%22--use-rfc23
@@ -973,16 +973,20 @@
c2307 %22%0A
+
@@ -1042,24 +1042,28 @@
+
%22--use-ntvfs
@@ -1081,16 +1081,20 @@
+
+
%22--admin
@@ -1111,8 +1111,104 @@
ar1!'%22)%0A
+%0A elif domain_settings.mode == 'member':%0A # TODO%0A pass%0A
|
2383497f25e400aa27c600d3a30526d118e2a6dc
|
fix scan, follow new scan chain
|
host/test_register.py
|
host/test_register.py
|
from scan.scan import ScanBase
class TestRegisters(ScanBase):
def __init__(self, config_file, definition_file = None, bit_file = None, device = None, scan_identifier = "test_register", scan_data_path = None):
super(TestRegisters, self).__init__(config_file = config_file, definition_file = definition_file, bit_file = bit_file, device = device, scan_identifier = scan_identifier, scan_data_path = scan_data_path)
def start(self, configure = True):
super(TestRegisters, self).start(configure)
number_of_errors = scan.register_utils.test_global_register()
print 'Global Register Test: Found', number_of_errors, "error(s)"
number_of_errors = scan.register_utils.test_pixel_register()
print 'Pixel Register Test: Found', number_of_errors, "error(s)"
sn = scan.register_utils.read_chip_sn()
print "Chip S/N:", sn
print 'Reset SRAM FIFO...'
scan.readout_utils.reset_sram_fifo()
print 'Done!'
if __name__ == "__main__":
import configuration
scan = TestRegisters(config_file = configuration.config_file, bit_file = configuration.bit_file, scan_data_path = configuration.scan_data_path)
scan.start()
|
Python
| 0
|
@@ -439,100 +439,28 @@
ef s
-tart
+can
(self,
-configure = True):%0D%0A super(TestRegisters, self).start(configure)%0D%0A
+**kwargs):
%0D%0A
@@ -477,35 +477,35 @@
er_of_errors = s
-can
+elf
.register_utils.
@@ -637,27 +637,27 @@
f_errors = s
-can
+elf
.register_ut
@@ -781,19 +781,19 @@
sn = s
-can
+elf
.registe
@@ -850,123 +850,8 @@
sn%0D
-%0A %0D%0A print 'Reset SRAM FIFO...'%0D%0A scan.readout_utils.reset_sram_fifo()%0D%0A print 'Done!'%0D
%0A%0D%0Ai
@@ -1066,12 +1066,27 @@
an.start()%0D%0A
+ scan.stop()
|
576a4b88637cf3bb9231ee3d2cbce75c547d934a
|
update error message to be within 80 chars
|
numba/core/funcdesc.py
|
numba/core/funcdesc.py
|
"""
Function descriptors.
"""
from collections import defaultdict
import sys
from numba.core import types, itanium_mangler
from numba.core.utils import _dynamic_modname, _dynamic_module
def default_mangler(name, argtypes):
return itanium_mangler.mangle(name, argtypes)
def qualifying_prefix(modname, qualname):
"""
Returns a new string that is used for the first half of the mangled name.
"""
# XXX choose a different convention for object mode
return '{}.{}'.format(modname, qualname) if modname else qualname
class FunctionDescriptor(object):
"""
Base class for function descriptors: an object used to carry
useful metadata about a natively callable function.
Note that while `FunctionIdentity` denotes a Python function
which is being concretely compiled by Numba, `FunctionDescriptor`
may be more "abstract": e.g. a function decorated with `@generated_jit`.
"""
__slots__ = ('native', 'modname', 'qualname', 'doc', 'typemap',
'calltypes', 'args', 'kws', 'restype', 'argtypes',
'mangled_name', 'unique_name', 'env_name', 'global_dict',
'inline', 'noalias')
def __init__(self, native, modname, qualname, unique_name, doc,
typemap, restype, calltypes, args, kws, mangler=None,
argtypes=None, inline=False, noalias=False, env_name=None,
global_dict=None):
self.native = native
self.modname = modname
self.global_dict = global_dict
self.qualname = qualname
self.unique_name = unique_name
self.doc = doc
# XXX typemap and calltypes should be on the compile result,
# not the FunctionDescriptor
self.typemap = typemap
self.calltypes = calltypes
self.args = args
self.kws = kws
self.restype = restype
# Argument types
if argtypes is not None:
assert isinstance(argtypes, tuple), argtypes
self.argtypes = argtypes
else:
# Get argument types from the type inference result
# (note the "arg.FOO" convention as used in typeinfer
self.argtypes = tuple(self.typemap['arg.' + a] for a in args)
mangler = default_mangler if mangler is None else mangler
# The mangled name *must* be unique, else the wrong function can
# be chosen at link time.
qualprefix = qualifying_prefix(self.modname, self.unique_name)
self.mangled_name = mangler(qualprefix, self.argtypes)
if env_name is None:
env_name = mangler(".NumbaEnv.{}".format(qualprefix),
self.argtypes)
self.env_name = env_name
self.inline = inline
self.noalias = noalias
def lookup_globals(self):
"""
Return the global dictionary of the function.
It may not match the Module's globals if the function is created
dynamically (i.e. exec)
"""
return self.global_dict or self.lookup_module().__dict__
def lookup_module(self):
"""
Return the module in which this function is supposed to exist.
This may be a dummy module if the function was dynamically
generated. Raise exception if the module can't be found.
"""
if self.modname == _dynamic_modname:
return _dynamic_module
else:
try:
return sys.modules[self.modname]
except:
raise ModuleNotFoundError(f"can't compile {self.qualname}: import of module {self.modname} failed")
def lookup_function(self):
"""
Return the original function object described by this object.
"""
return getattr(self.lookup_module(), self.qualname)
@property
def llvm_func_name(self):
"""
The LLVM-registered name for the raw function.
"""
return self.mangled_name
# XXX refactor this
@property
def llvm_cpython_wrapper_name(self):
"""
The LLVM-registered name for a CPython-compatible wrapper of the
raw function (i.e. a PyCFunctionWithKeywords).
"""
return itanium_mangler.prepend_namespace(self.mangled_name,
ns='cpython')
@property
def llvm_cfunc_wrapper_name(self):
"""
The LLVM-registered name for a C-compatible wrapper of the
raw function.
"""
return 'cfunc.' + self.mangled_name
def __repr__(self):
return "<function descriptor %r>" % (self.unique_name)
@classmethod
def _get_function_info(cls, func_ir):
"""
Returns
-------
qualname, unique_name, modname, doc, args, kws, globals
``unique_name`` must be a unique name.
"""
func = func_ir.func_id.func
qualname = func_ir.func_id.func_qualname
# XXX to func_id
modname = func.__module__
doc = func.__doc__ or ''
args = tuple(func_ir.arg_names)
kws = () # TODO
global_dict = None
if modname is None:
# Dynamically generated function.
modname = _dynamic_modname
# Retain a reference to the dictionary of the function.
# This disables caching, serialization and pickling.
global_dict = func_ir.func_id.func.__globals__
unique_name = func_ir.func_id.unique_name
return qualname, unique_name, modname, doc, args, kws, global_dict
@classmethod
def _from_python_function(cls, func_ir, typemap, restype, calltypes,
native, mangler=None, inline=False, noalias=False):
(qualname, unique_name, modname, doc, args, kws, global_dict,
)= cls._get_function_info(func_ir)
self = cls(native, modname, qualname, unique_name, doc,
typemap, restype, calltypes,
args, kws, mangler=mangler, inline=inline, noalias=noalias,
global_dict=global_dict)
return self
class PythonFunctionDescriptor(FunctionDescriptor):
"""
A FunctionDescriptor subclass for Numba-compiled functions.
"""
__slots__ = ()
@classmethod
def from_specialized_function(cls, func_ir, typemap, restype, calltypes,
mangler, inline, noalias):
"""
Build a FunctionDescriptor for a given specialization of a Python
function (in nopython mode).
"""
return cls._from_python_function(func_ir, typemap, restype, calltypes,
native=True, mangler=mangler,
inline=inline, noalias=noalias)
@classmethod
def from_object_mode_function(cls, func_ir):
"""
Build a FunctionDescriptor for an object mode variant of a Python
function.
"""
typemap = defaultdict(lambda: types.pyobject)
calltypes = typemap.copy()
restype = types.pyobject
return cls._from_python_function(func_ir, typemap, restype, calltypes,
native=False)
class ExternalFunctionDescriptor(FunctionDescriptor):
"""
A FunctionDescriptor subclass for opaque external functions
(e.g. raw C functions).
"""
__slots__ = ()
def __init__(self, name, restype, argtypes):
args = ["arg%d" % i for i in range(len(argtypes))]
super(ExternalFunctionDescriptor, self).__init__(native=True,
modname=None, qualname=name, unique_name=name, doc='',
typemap=None, restype=restype, calltypes=None,
args=args, kws=None, mangler=lambda a, x: a,
argtypes=argtypes)
|
Python
| 0
|
@@ -3535,16 +3535,37 @@
ndError(
+%0A
f%22can't
@@ -3589,16 +3589,40 @@
lname%7D:
+%22%0A f%22
import o
|
63ed4199e5cb3f8eb9a6b294ac8c6df12f9b5f56
|
Add last_request function to httprequest module
|
httpretty/__init__.py
|
httpretty/__init__.py
|
# #!/usr/bin/env python
# -*- coding: utf-8 -*-
# <HTTPretty - HTTP client mock for Python>
# Copyright (C) <2011-2013> Gabriel Falcão <gabriel@nacaolivre.org>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from __future__ import unicode_literals
__version__ = version = '0.6.0'
import sys
from .core import httpretty, httprettified
from .errors import HTTPrettyError
from .core import URIInfo
HTTPretty = httpretty
activate = httprettified
SELF = sys.modules[__name__]
for attr in [name.decode() for name in httpretty.METHODS] + ['register_uri', 'enable', 'disable', 'is_enabled', 'Response']:
setattr(SELF, attr, getattr(httpretty, attr))
|
Python
| 0.000002
|
@@ -1661,8 +1661,63 @@
attr))%0A
+%0Adef last_request():%0A return httpretty.last_request%0A
|
2e4a934203b4d736a4180a970cacca508400ea7e
|
Update runcrons Command() to timezone.now()
|
django_cron/management/commands/runcrons.py
|
django_cron/management/commands/runcrons.py
|
from django.core.management.base import BaseCommand
from django.conf import settings
from django.core.cache import cache
from django_cron import CronJobManager
from datetime import datetime
from optparse import make_option
DEFAULT_LOCK_TIME = 15*60
def get_class( kls ):
"""TODO: move to django-common app.
Converts a string to a class. Courtesy: http://stackoverflow.com/questions/452969/does-python-have-an-equivalent-to-java-class-forname/452981#452981"""
parts = kls.split('.')
module = ".".join(parts[:-1])
m = __import__( module )
for comp in parts[1:]:
m = getattr(m, comp)
return m
CRONS_TO_RUN = map(lambda x: get_class(x), settings.CRON_CLASSES)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--force', action='store_true', help='Force cron runs'),
)
def handle(self, *args, **options):
for cron_class in CRONS_TO_RUN:
if not cache.get(cron_class.__name__):
instance = cron_class()
timeout = DEFAULT_LOCK_TIME
try:
timeout = settings.DJANGO_CRON_LOCK_TIME
except:
pass
cache.set(cron_class.__name__, datetime.now(), timeout)
CronJobManager.run(instance, options['force'])
cache.delete(cron_class.__name__)
else:
print "%s failed: lock has been found. Other cron started at %s" % (cron_class.__name__, cache.get(cron_class.__name__))
|
Python
| 0.000001
|
@@ -114,16 +114,49 @@
t cache%0A
+from django.utils import timezone
%0Afrom dj
@@ -1286,23 +1286,23 @@
name__,
-datetim
+timezon
e.now(),
|
abbe9b391ed32a07c5e912e3683ff7668e12eeb5
|
bump to new version
|
octbrowser/__init__.py
|
octbrowser/__init__.py
|
__version__ = '0.4'
|
Python
| 0
|
@@ -11,10 +11,12 @@
_ = '0.4
+.1
'%0A
|
f34a82735bb2d58effa2b8570ba69a67c7527ec9
|
update to utilities
|
netmiko/utilities.py
|
netmiko/utilities.py
|
"""Miscellaneous utility functions."""
from __future__ import print_function
from __future__ import unicode_literals
import sys
import io
import os
# Dictionary mapping 'show run' for vendors with different command
SHOW_RUN_MAPPER = {
'juniper': 'show configuration',
'juniper_junos': 'show configuration',
'extreme': 'show configuration',
'hp_comware': 'display current-configuration',
'huawei': 'display current-configuration',
'fortinet': 'show full-configuration',
'cisco_wlc': 'show run-config',
'enterasys': 'show running-config',
'dell_force10': 'show running-config',
'avaya_vsp': 'show running-config',
'avaya_ers': 'show running-config',
'brocade_vdx': 'show running-config',
'brocade_nos': 'show running-config',
'brocade_fastiron': 'show running-config',
'brocade_netiron': 'show running-config',
}
# Expand SHOW_RUN_MAPPER to include '_ssh' key
new_dict = {}
for k, v in SHOW_RUN_MAPPER.items():
new_key = k + '_ssh'
new_dict[k] = v
new_dict[new_key] = v
SHOW_RUN_MAPPER = new_dict
# Default location of netmiko temp directory for netmiko tools
NETMIKO_BASE_DIR = '~/.netmiko'
def load_yaml_file(yaml_file):
"""Read YAML file."""
try:
import yaml
except ImportError:
sys.exit("Unable to import yaml module.")
try:
with io.open(yaml_file, encoding='utf-8') as fname:
return yaml.load(fname)
except IOError:
sys.exit("Unable to open YAML file: {0}".format(yaml_file))
def load_devices():
"""Find and load .netmiko.yml file."""
yaml_devices_file = find_cfg_file()
return load_yaml_file(yaml_devices_file)
def find_cfg_file(file_name=None):
"""Look for .netmiko.yml in current dir, then ~/.netmiko.yml."""
check_files = [
'.netmiko.yml',
os.path.expanduser('~') + '/.netmiko.yml',
]
if file_name:
check_files.insert(0, file_name)
for test_file in check_files:
if os.path.isfile(test_file):
return test_file
raise IOError("{} file not found in current dir or home dir.".format(file_name))
def display_inventory(my_devices):
"""Print out inventory devices and groups."""
inventory_groups = ['all']
inventory_devices = []
for k, v in my_devices.items():
if isinstance(v, list):
inventory_groups.append(k)
elif isinstance(v, dict):
inventory_devices.append((k, v['device_type']))
inventory_groups.sort()
inventory_devices.sort(key=lambda x: x[0])
print("\nDevices:")
print('-' * 40)
for a_device, device_type in inventory_devices:
device_type = " ({})".format(device_type)
print("{:<25}{:>15}".format(a_device, device_type))
print("\n\nGroups:")
print('-' * 40)
for a_group in inventory_groups:
print(a_group)
print()
def obtain_all_devices(my_devices):
"""Dynamically create 'all' group."""
new_devices = {}
for device_name, device_or_group in my_devices.items():
# Skip any groups
if not isinstance(device_or_group, list):
new_devices[device_name] = device_or_group
return new_devices
def obtain_netmiko_filename(device_name):
"""Create file name based on device_name."""
_, netmiko_full_dir = find_netmiko_dir()
return "{}/{}.txt".format(netmiko_full_dir, device_name)
def write_tmp_file(device_name, output):
file_name = obtain_netmiko_filename(device_name)
with open(file_name, "w") as f:
f.write(output)
return file_name
def ensure_dir_exists(verify_dir):
"""Ensure directory exists. Create if necessary."""
if not os.path.exists(verify_dir):
# Doesn't exist create dir
os.makedirs(verify_dir)
else:
# Exists
if not os.path.isdir(verify_dir):
# Not a dir, raise an exception
raise ValueError("{} is not a directory".format(verify_dir))
def find_netmiko_dir():
"""Check environment first, then default dir"""
try:
netmiko_base_dir = os.environ['NETMIKO_DIR']
except KeyError:
netmiko_base_dir = NETMIKO_BASE_DIR
netmiko_base_dir = os.path.expanduser(netmiko_base_dir)
if netmiko_base_dir == '/':
raise ValueError("/ cannot be netmiko_base_dir")
netmiko_full_dir = "{}/tmp".format(netmiko_base_dir)
return (netmiko_base_dir, netmiko_full_dir)
|
Python
| 0
|
@@ -1766,24 +1766,55 @@
iko.yml.%22%22%22%0A
+ base_file = '.netmiko.yml'%0A
check_fi
@@ -1829,30 +1829,25 @@
-'.netmiko.yml'
+base_file
,%0A
@@ -1876,29 +1876,29 @@
~') + '/
-.netmiko.yml'
+' + base_file
,%0A %5D%0A
@@ -2081,11 +2081,9 @@
r(%22%7B
-%7D f
+F
ile
@@ -2128,19 +2128,19 @@
.format(
-fil
+bas
e_name))
|
5fb03068113ccdaebb2496f127146617f8931c02
|
Add depth users to user summary
|
scripts/analytics/user_summary.py
|
scripts/analytics/user_summary.py
|
import pytz
import logging
from dateutil.parser import parse
from datetime import datetime, timedelta
from modularodm import Q
from website.app import init_app
from website.models import User
from scripts.analytics.base import SummaryAnalytics
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
class UserSummary(SummaryAnalytics):
@property
def collection_name(self):
return 'user_summary'
def get_events(self, date):
super(UserSummary, self).get_events(date)
# Convert to a datetime at midnight for queries and the timestamp
timestamp_datetime = datetime(date.year, date.month, date.day).replace(tzinfo=pytz.UTC)
query_datetime = timestamp_datetime + timedelta(1)
counts = {
'keen': {
'timestamp': timestamp_datetime.isoformat()
},
'status': {
'active': User.find(
Q('is_registered', 'eq', True) &
Q('password', 'ne', None) &
Q('merged_by', 'eq', None) &
Q('date_disabled', 'eq', None) &
Q('date_confirmed', 'ne', None) &
Q('date_confirmed', 'lt', query_datetime)
).count(),
'unconfirmed': User.find(
Q('date_registered', 'lt', query_datetime) &
Q('date_confirmed', 'eq', None)
).count(),
'deactivated': User.find(
Q('date_disabled', 'ne', None) &
Q('date_disabled', 'lt', query_datetime)
).count()
}
}
logger.info(
'Users counted. Active: {}, Unconfirmed: {}, Deactivated: {}'.format(
counts['status']['active'],
counts['status']['unconfirmed'],
counts['status']['deactivated']
)
)
return [counts]
def get_class():
return UserSummary
if __name__ == '__main__':
init_app()
user_summary = UserSummary()
args = user_summary.parse_args()
date = parse(args.date).date() if args.date else None
events = user_summary.get_events(date)
user_summary.send_events(events)
|
Python
| 0.00001
|
@@ -186,17 +186,26 @@
ort User
+, NodeLog
%0A
-
from scr
@@ -327,16 +327,637 @@
.INFO)%0A%0A
+LOG_THRESHOLD = 11%0A%0A%0A# Modified from scripts/analytics/depth_users.py%0Adef count_user_logs(user):%0A logs = NodeLog.find(Q('user', 'eq', user._id))%0A length = logs.count()%0A if length %3E 0:%0A item = logs%5B0%5D%0A if item.action == 'project_created' and item.node.is_bookmark_collection:%0A length -= 1%0A return length%0A%0A%0A# Modified from scripts/analytics/depth_users.py%0Adef get_number_of_depth_users(active_users):%0A depth_users = 0%0A for user in active_users:%0A log_count = count_user_logs(user)%0A if log_count %3E= LOG_THRESHOLD:%0A depth_users += 1%0A%0A return depth_users%0A%0A
%0Aclass U
@@ -1388,165 +1388,22 @@
-counts = %7B%0A 'keen': %7B%0A 'timestamp': timestamp_datetime.isoformat()%0A %7D,%0A 'status': %7B%0A 'active':
+active_users =
Use
@@ -1742,32 +1742,313 @@
)
-.count()
+%0A%0A depth_users = get_number_of_depth_users(active_users)%0A%0A counts = %7B%0A 'keen': %7B%0A 'timestamp': timestamp_datetime.isoformat()%0A %7D,%0A 'status': %7B%0A 'active': active_users.count(),%0A 'depth': depth_users
,%0A
@@ -2489,16 +2489,27 @@
ive: %7B%7D,
+ Depth: %7B%7D,
Unconfi
@@ -2583,24 +2583,67 @@
%5B'active'%5D,%0A
+ counts%5B'status'%5D%5B'depth'%5D,%0A
|
ca77c9a91ea52334aca244abfd01c4595074eb04
|
Fix PEP8
|
tests/chainer_tests/functions_tests/loss_tests/test_triplet.py
|
tests/chainer_tests/functions_tests/loss_tests/test_triplet.py
|
import unittest
import numpy
import six
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
@testing.parameterize(
*testing.product({
'batchsize': [5, 10], 'input_dim': [2, 3],
'margin': [0.1, 0.5], 'reduce': ['mean', 'no']
})
)
class TestTriplet(unittest.TestCase):
def setUp(self):
eps = 1e-3
x_shape = (self.batchsize, self.input_dim)
# Sample differentiable inputs
while True:
self.a = numpy.random.uniform(-1, 1, x_shape).astype(numpy.float32)
self.p = numpy.random.uniform(-1, 1, x_shape).astype(numpy.float32)
self.n = numpy.random.uniform(-1, 1, x_shape).astype(numpy.float32)
if (abs(self.a - self.p) < 2 * eps).any():
continue
if (abs(self.a - self.n) < 2 * eps).any():
continue
dist = numpy.sum(
(self.a - self.p) ** 2 - (self.a - self.n) ** 2,
axis=1) + self.margin
if (abs(dist) < 2 * eps).any():
continue
break
if self.reduce == 'mean':
gy_shape = ()
else:
gy_shape = self.batchsize,
self.gy = numpy.random.uniform(-1, 1, gy_shape).astype(numpy.float32)
self.gga = numpy.random.uniform(-1, 1, x_shape).astype(numpy.float32)
self.ggp = numpy.random.uniform(-1, 1, x_shape).astype(numpy.float32)
self.ggn = numpy.random.uniform(-1, 1, x_shape).astype(numpy.float32)
def check_forward(self, a_data, p_data, n_data):
a_val = chainer.Variable(a_data)
p_val = chainer.Variable(p_data)
n_val = chainer.Variable(n_data)
loss = functions.triplet(a_val, p_val, n_val, self.margin, self.reduce)
if self.reduce == 'mean':
self.assertEqual(loss.data.shape, ())
else:
self.assertEqual(loss.data.shape, (self.batchsize,))
self.assertEqual(loss.data.dtype, numpy.float32)
loss_value = cuda.to_cpu(loss.data)
#
# Compute expected value
#
loss_expect = numpy.empty((self.a.shape[0],), dtype=numpy.float32)
for i in six.moves.range(self.a.shape[0]):
ad, pd, nd = self.a[i], self.p[i], self.n[i]
dp = numpy.sum((ad - pd) ** 2)
dn = numpy.sum((ad - nd) ** 2)
loss_expect[i] = max((dp - dn + self.margin), 0)
if self.reduce == 'mean':
loss_expect = loss_expect.mean()
numpy.testing.assert_allclose(
loss_expect, loss_value, rtol=1e-4, atol=1e-4)
def test_negative_margin(self):
self.margin = -1
self.assertRaises(ValueError, self.check_forward,
self.a, self.p, self.n)
self.assertRaises(ValueError, self.check_backward,
self.a, self.p, self.n, self.gy)
def test_forward_cpu(self):
self.check_forward(self.a, self.p, self.n)
@attr.gpu
def test_forward_gpu_no_cudnn(self):
self.check_forward(cuda.to_gpu(self.a), cuda.to_gpu(self.p),
cuda.to_gpu(self.n))
def check_backward(self, a_data, p_data, n_data, gy_data):
def f(a, p, n):
return functions.triplet(
a, p, n, margin=self.margin, reduce=self.reduce)
gradient_check.check_backward(
f, (a_data, p_data, n_data), gy_data, dtype=numpy.float64,
rtol=5e-4, atol=5e-4)
def test_backward_cpu(self):
self.check_backward(self.a, self.p, self.n, self.gy)
@attr.gpu
def test_backward_gpu_no_cudnn(self):
self.check_backward(cuda.to_gpu(self.a), cuda.to_gpu(self.p),
cuda.to_gpu(self.n), cuda.to_gpu(self.gy))
def check_double_backward(self, a_data, p_data, n_data, gy_data, gga_data,
ggp_data, ggn_data):
def f(a, p, n):
return functions.triplet(
a, p, n, margin=self.margin, reduce=self.reduce)
gradient_check.check_double_backward(
f, (a_data, p_data, n_data), gy_data,
(gga_data, ggp_data, ggn_data), rtol=5e-4, atol=5e-4)
def test_double_backward_cpu(self):
self.check_double_backward(
self.a, self.p, self.n, self.gy, self.gga, self.ggp, self.ggn)
@attr.gpu
def test_double_backward_gpu_no_cudnn(self):
self.check_double_backward(
cuda.to_gpu(self.a), cuda.to_gpu(self.p), cuda.to_gpu(self.n),
cuda.to_gpu(self.gy), cuda.to_gpu(self.gga), cuda.to_gpu(self.ggp),
cuda.to_gpu(self.ggn))
class TestContrastiveInvalidReductionOption(unittest.TestCase):
def setUp(self):
self.a = numpy.random.uniform(-1, 1, (5, 10)).astype(numpy.float32)
self.p = numpy.random.uniform(-1, 1, (5, 10)).astype(numpy.float32)
self.n = numpy.random.randint(-1, 1, (5, 10)).astype(numpy.float32)
def check_invalid_option(self, xp):
a = xp.asarray(self.a)
p = xp.asarray(self.p)
n = xp.asarray(self.n)
with self.assertRaises(ValueError):
functions.triplet(a, p, n, reduce='invalid_option')
def test_invalid_option_cpu(self):
self.check_invalid_option(numpy)
@attr.gpu
def test_invalid_option_gpu(self):
self.check_invalid_option(cuda.cupy)
testing.run_module(__name__, __file__)
|
Python
| 0.000294
|
@@ -205,46 +205,8 @@
ttr%0A
-from chainer.testing import condition%0A
%0A%0A@t
|
3582191d79646041ec589e2f1928c4cc560f5eaa
|
Add model, iOS, id to underlevel script.
|
scripts/my_export_accounts_csv.py
|
scripts/my_export_accounts_csv.py
|
#!/usr/bin/env python3
import csv
import sys
from datetime import datetime
from pathlib import Path
monocle_dir = Path(__file__).resolve().parents[1]
sys.path.append(str(monocle_dir))
from monocle.shared import ACCOUNTS
accounts_file = monocle_dir / 'accounts.csv'
try:
now = datetime.now().strftime("%Y-%m-%d-%H%M")
accounts_file.rename('accounts-{}.csv'.format(now))
except FileNotFoundError:
pass
banned = []
invalid = []
underlevel = []
with accounts_file.open('w') as csvfile:
writer = csv.writer(csvfile, delimiter=',')
writer.writerow(('username', 'password', 'provider', 'model', 'iOS', 'id'))
for account in ACCOUNTS.values():
if account.get('banned', False):
banned.append(account)
continue
if not account.get('level', False):
invalid.append(account)
continue
if account.get('level') < 2:
underlevel.append(account)
writer.writerow((account['username'],
account['password'],
account['provider'],
account['model'],
account['iOS'],
account['id']))
if banned:
banned_file = monocle_dir / 'banned.csv'
write_header = not banned_file.exists()
with banned_file.open('a') as csvfile:
writer = csv.writer(csvfile, delimiter=',')
if write_header:
writer.writerow(('username', 'password', 'provider', 'level', 'created', 'last used'))
for account in banned:
row = [account['username'], account['password'], account['provider']]
row.append(account.get('level'))
try:
row.append(datetime.fromtimestamp(account['created']).strftime('%x %X'))
except KeyError:
row.append(None)
try:
row.append(datetime.fromtimestamp(account['time']).strftime('%x %X'))
except KeyError:
row.append(None)
writer.writerow(row)
if invalid:
invalid_file = monocle_dir / 'invalid.csv'
write_header = not invalid_file.exists()
with invalid_file.open('a') as csvfile:
writer = csv.writer(csvfile, delimiter=',')
if write_header:
writer.writerow(('username', 'password', 'provider'))
for account in invalid:
row = [account['username'], account['password'], account['provider']]
writer.writerow(row)
if underlevel:
underlevel_file = monocle_dir / 'underlevel.csv'
write_header = not underlevel_file.exists()
with underlevel_file.open('a') as csvfile:
writer = csv.writer(csvfile, delimiter=',')
if write_header:
writer.writerow(('username', 'password', 'provider'))
for account in underlevel:
row = [account['username'], account['password'], account['provider'], account['model'], account['iOS'], account['id']]
writer.writerow(row)
print('Done!')
|
Python
| 0
|
@@ -2776,32 +2776,54 @@
ord', 'provider'
+, 'model', 'iOS', 'id'
))%0A for a
|
0c1952c70358494cafcd5b6d2bbee31bdd1a5cb1
|
update post code detector documentation
|
scrubadub/detectors/postalcode.py
|
scrubadub/detectors/postalcode.py
|
import re
from .base import RegionLocalisedRegexDetector
from ..filth.postalcode import PostalCodeFilth
class PostalCodeDetector(RegionLocalisedRegexDetector):
"""Detects british postcodes."""
filth_cls = PostalCodeFilth
name = 'postalcode'
region_regex = {
'GB': re.compile(r"""
(
(?:[gG][iI][rR] {0,}0[aA]{2})|
(?:
(?:
[aA][sS][cC][nN]|
[sS][tT][hH][lL]|
[tT][dD][cC][uU]|
[bB][bB][nN][dD]|
[bB][iI][qQ][qQ]|
[fF][iI][qQ][qQ]|
[pP][cC][rR][nN]|
[sS][iI][qQ][qQ]|
[iT][kK][cC][aA]
)
\ {0,}1[zZ]{2}
)|
(?:
(?:
(?:[a-pr-uwyzA-PR-UWYZ][a-hk-yxA-HK-XY]?[0-9][0-9]?)|
(?:
(?:[a-pr-uwyzA-PR-UWYZ][0-9][a-hjkstuwA-HJKSTUW])|
(?:[a-pr-uwyzA-PR-UWYZ][a-hk-yA-HK-Y][0-9][abehmnprv-yABEHMNPRV-Y])
)
)
\ {0,}[0-9][abd-hjlnp-uw-zABD-HJLNP-UW-Z]{2}
)
)
""", re.VERBOSE),
}
|
Python
| 0
|
@@ -171,17 +171,46 @@
Detects
-b
+postal codes, currently only B
ritish p
@@ -212,21 +212,36 @@
ish post
+
codes
+ are supported
.%22%22%22%0A
|
0f7505876a70be3942b938a4e2fdeb97918c5ad1
|
version 0.9.0
|
bulbs/__init__.py
|
bulbs/__init__.py
|
__version__ = "0.8.12"
|
Python
| 0.000002
|
@@ -14,10 +14,9 @@
%220.
-8.12
+9.0
%22%0A
|
3a7798b46878e0938d1291642ec093cca34a7557
|
Stop accessing self_fd direct and go through helper to enforce santify checks for eventfd
|
butter/eventfd.py
|
butter/eventfd.py
|
#!/usr/bin/env python
"""eventfd: maintain an atomic counter inside a file descriptor"""
from __future__ import print_function
from os import write as _write, read as _read, close as _close
from cffi import FFI as _FFI
import errno as _errno
_ffi = _FFI()
_ffi.cdef("""
#define EFD_CLOEXEC ...
#define EFD_NONBLOCK ...
#define EFD_SEMAPHORE ...
int eventfd(unsigned int initval, int flags);
""")
_C = _ffi.verify("""
#include <sys/eventfd.h>
#include <stdint.h> /* Definition of uint64_t */
""", libraries=[])
def eventfd(inital_value=0, flags=0):
"""Create a new eventfd
Arguments
----------
:param int inital_value: The inital value to set the eventfd to
:param int flags: Flags to specify extra options
Flags
------
EFD_CLOEXEC: Close the eventfd when executing a new program
EFD_NONBLOCK: Open the socket in non-blocking mode
EFD_SEMAPHORE: Provide semaphore like semantics for read operations
Returns
--------
:return: The file descriptor representing the eventfd
:rtype: int
Exceptions
-----------
:raises ValueError: Invalid value in flags
:raises OSError: Max per process FD limit reached
:raises OSError: Max system FD limit reached
:raises OSError: Could not mount (internal) anonymous inode device
:raises MemoryError: Insufficient kernel memory
"""
fd = _C.eventfd(inital_value, flags)
if fd < 0:
err = _ffi.errno
if err == _errno.EINVAL:
raise ValueError("Invalid value in flags")
elif err == _errno.EMFILE:
raise OSError("Max per process FD limit reached")
elif err == _errno.ENFILE:
raise OSError("Max system FD limit reached")
elif err == _errno.ENODEV:
raise OsError("Could not mount (internal) anonymous inode device")
elif err == _errno.ENOMEM:
raise MemoryError("Insufficent kernel memory available")
else:
# If you are here, its a bug. send us the traceback
raise ValueError("Unknown Error: {}".format(err))
return fd
EFD_CLOEXEC = _C.EFD_CLOEXEC
EFD_NONBLOCK = _C.EFD_NONBLOCK
EFD_SEMAPHORE = _C.EFD_SEMAPHORE
class Eventfd(object):
def __init__(self, inital_value=0, flags=0):
"""Create a new Eventfd object
Arguments
----------
:param int inital_value: The inital value to set the eventfd to
:param int flags: Flags to specify extra options
Flags
------
EFD_CLOEXEC: Close the eventfd when executing a new program
EFD_NONBLOCK: Open the socket in non-blocking mode
EFD_SEMAPHORE: Provide semaphore like semantics for read operations
"""
self._fd = eventfd(inital_value, flags)
def increment(self, value=1):
"""Increment the counter by the specified value
:param int value: The value to increment the counter by (default=1)
"""
packed_value = _ffi.new('uint64_t[1]', (value,))
packed_value = _ffi.buffer(packed_value)[:]
_write(self._fd, packed_value)
def read(self):
"""Read the current value of the counter and zero the counter
Returns
--------
:return: The current count of the timer
:rtype: int
"""
data = _read(self._fd, 8)
value = _ffi.new('uint64_t[1]')
_ffi.buffer(value, 8)[0:8] = data
return value[0]
def __int__(self):
return self.read()
def close(self):
if self._fd:
_close(self._fd)
self._fd = None
else:
raise ValueError("I/O operation on closed file")
def fileno(self):
if self._fd:
return self._fd
else:
raise ValueError("I/O operation on closed file")
def _main():
ev = Eventfd(30)
print('First Read:', int(ev))
# read blocks if 0
#print('Second Read:', int(ev))
print("Adding 30 to zero'd counter")
ev.increment(30)
print("Read value back:", int(ev))
print("Incrementing value 5 times")
ev.increment(30)
ev.increment(30)
ev.increment(30)
ev.increment(30)
ev.increment(30)
print("Read value back:", int(ev))
if __name__ == "__main__":
_main()
|
Python
| 0
|
@@ -3103,19 +3103,24 @@
te(self.
-_fd
+fileno()
, packed
@@ -3358,19 +3358,24 @@
ad(self.
-_fd
+fileno()
, 8)%0A
@@ -3571,151 +3571,28 @@
-if self._fd:%0A _close(self._fd)%0A self._fd = None%0A else:%0A raise ValueError(%22I/O operation on closed file%22
+_close(self.fileno()
)%0A%0A
@@ -4123,32 +4123,32 @@
rement(30)%0A %0A
-
print(%22Read
@@ -4161,32 +4161,228 @@
back:%22, int(ev))
+%0A%0A print(%22Closing FD%22) %0A ev.close()%0A try:%0A ev.close()%0A except OSError:%0A print(%22Could not close closed FD, OK%22)%0A else:%0A print(%22Closed closed FD, this is bad%22)
%0A %0A %0Aif __
|
6eff7f5d614a89a298fd31f83e0a514193b1d73a
|
add plot for cot_globale
|
c2cstats/plots.py
|
c2cstats/plots.py
|
#!/usr/bin/env python2
# -*- coding:utf-8 -*-
import datetime
import string
import os.path
import numpy as np
import matplotlib.pyplot as plt
from collections import Counter
OUTPUT_DIR = "_output"
FILE_EXT = ".svg"
MONTHS = {u'janvier': 1, u'février': 2, u'mars': 3, u'avril': 4, u'mai': 5,
u'juin': 6, u'juillet': 7, u'août': 8, u'septembre': 9,
u'octobre': 10, u'novembre': 11, u'décembre': 12}
def plot_all(data):
plot_date(data.date)
plot_activity(data.activity)
plot_area(data.area)
def get_filepath(name):
return os.path.join(OUTPUT_DIR, name+FILE_EXT)
def plot_date(data):
"Plot histogram of years"
year = [int(string.split(i)[2]) for i in data]
# n, bins, patches = plt.hist(year, max(year)-min(year)+1,
# range = (min(year)-0.5, max(year)+0.5))
n, bins = np.histogram(year, max(year) - min(year) + 1,
range=(min(year) - 0.5, max(year) + 0.5))
plt.figure()
plt.bar(bins[:-1], n)
plt.xlabel(u'Année')
plt.ylabel('Nb de sorties')
plt.title('Nb de sorties par an')
# plt.axis([min(year), max(year), 0, max(n)+1])
labels = [str(i) for i in range(min(year), max(year) + 1)]
plt.xticks(bins[:-1] + 0.4, labels)
# plt.yticks(np.arange(0,81,10))
# plt.legend( (p1[0], p2[0]), ('Men', 'Women')
plt.savefig(get_filepath('years'))
# try with plot_date
d = []
for i in data:
t = i.split(' ')
d.append(datetime.date(int(t[2]), MONTHS[t[1]], int(t[0])))
plt.figure()
plt.plot_date(d, np.ones(100))
plt.savefig(get_filepath('timeline'))
def plot_activity(data):
"Pie plot for activities"
c = Counter(data)
explode = np.zeros(len(c)) + 0.05
plt.figure()
plt.pie(c.values(), labels=c.keys(), explode=explode, shadow=True, autopct='%d')
plt.title(u'Répartition par activité')
plt.savefig(get_filepath('activities'))
def plot_area(data):
"Pie plot for areas"
c = Counter(data)
use = c.most_common(10)
labels = [k for k,v in use]
counts = [v for k,v in use]
labels.append(u'Autres')
counts.append(sum(c.values()) - sum(counts))
explode = np.zeros(len(counts)) + 0.05
plt.figure()
plt.pie(counts, labels=labels, explode=explode, shadow=True, autopct='%d')
plt.title(u'Répartition par région')
plt.savefig(get_filepath('regions'))
|
Python
| 0.000002
|
@@ -414,16 +414,189 @@
': 12%7D%0A%0A
+%0ACOTATION_GLOBALE = ('F', 'PD-', 'PD', 'PD+', 'AD-', 'AD', 'AD+', 'D-', 'D', 'D+',%0A 'TD-', 'TD', 'TD+', 'ED-', 'ED', 'ED+', 'ED4', 'ED5', 'ED6', 'ED7')%0A%0A%0A
def plot
@@ -689,16 +689,55 @@
ta.area)
+%0A plot_cot_globale(data.cot_globale)
%0A%0Adef ge
@@ -2609,12 +2609,335 @@
regions'))%0A%0A
+def plot_cot_globale(data):%0A %22Hist plot for cot_globale%22%0A%0A c = Counter(data)%0A counts = %5Bc%5Bk%5D for k in COTATION_GLOBALE%5D%0A x = np.arange(len(counts))%0A%0A plt.figure()%0A plt.bar(x, counts)%0A plt.xlabel(u'Cotation globale')%0A plt.xticks(x + 0.4, COTATION_GLOBALE)%0A plt.savefig(get_filepath('cot_global'))%0A
|
59c9b7925d89e409e36ae15e3add38d2ded58692
|
make modernize notice print_function import
|
custom/enikshay/management/commands/data_dumps_person_case.py
|
custom/enikshay/management/commands/data_dumps_person_case.py
|
from __future__ import (
absolute_import,
unicode_literals,
)
from __future__ import print_function
from corehq.apps.users.models import CommCareUser
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.apps.es import queries
from custom.enikshay.case_utils import (
CASE_TYPE_EPISODE,
CASE_TYPE_PERSON,
get_all_occurrence_cases_from_person,
)
from custom.enikshay.const import ENROLLED_IN_PRIVATE
from custom.enikshay.management.commands.base_data_dump import BaseDataDump
from custom.enikshay.management.commands.duplicate_occurrences_and_episodes_reconciliation import (
get_case_recently_modified_on_phone,
)
DOMAIN = "enikshay"
class Command(BaseDataDump):
""" data dumps for person cases
https://docs.google.com/spreadsheets/d/1OPp0oFlizDnIyrn7Eiv11vUp8IBmc73hES7qqT-mKKA/edit#gid=1039030624
"""
TASK_NAME = "01_person_case"
INPUT_FILE_NAME = "data_dumps_person_case.csv"
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
self.case_type = CASE_TYPE_PERSON
def get_last_episode(self, case):
self.context['last_episode'] = (
self.context.get('last_episode') or
get_last_episode(case)
)
if not self.context['last_episode']:
return Exception("could not find last episode for person %s" % case.case_id)
return self.context['last_episode']
def get_custom_value(self, column_name, case):
if column_name == "Commcare UUID":
return case.case_id
elif column_name == "Created by Username":
user_id = None
try:
user_id = case.opened_by
user = CommCareUser.get_by_user_id(user_id, DOMAIN)
return user.username
except Exception as e:
return Exception("Could not get username. case opened by %s, %s" % (user_id, e))
elif column_name == "Created by User ID":
return case.opened_by
elif column_name == "Date of creation":
return case.opened_on
elif column_name == "Current Owner - PHI":
return case.owner_id
elif column_name == 'Person Status':
if case.closed:
return "closed"
elif case.owner_id == "_invalid_":
return "removed"
elif case.owner_id == '_archive_':
return "archived"
else:
return "active"
elif column_name == "Latest Episode creation Date":
return get_last_episode(case).opened_on
elif column_name == "Latest Episode Closed?":
return get_last_episode(case).closed
elif column_name == "Latest Episode - Date Closed (If any)":
return get_last_episode(case).closed_on
raise Exception("unknown custom column %s" % column_name)
def get_case_reference_value(self, case_reference, case, calculation):
if case_reference == 'last_episode':
try:
return self.get_last_episode(case).get_case_property(calculation)
except Exception as e:
return str(e)
return Exception("unknown case reference %s" % case_reference)
def get_case_ids_query(self, case_type):
"""
All open and closed person cases with person.dataset = 'real' and person.enrolled_in_private != 'true'
"""
return (self.case_search_instance
.case_type(case_type)
.case_property_query(ENROLLED_IN_PRIVATE, 'true', clause=queries.MUST_NOT)
.case_property_query("dataset", 'real')
)
def get_all_episode_cases_from_person(domain, person_case_id):
occurrence_cases = get_all_occurrence_cases_from_person(domain, person_case_id)
return [
case for case in CaseAccessors(domain).get_reverse_indexed_cases(
[c.case_id for c in occurrence_cases], case_types=[CASE_TYPE_EPISODE])
]
def get_last_episode(person_case):
"""
For all episode cases under the person (the host of the host of the episode is the primary person case):
If count(open episode cases with episode.is_active = 'yes') > 1, report error
If count(open episode cases with episode.is_active = 'yes') = 1, pick this case
If count(open episode cases with episode.is_active = 'yes') = 0:
pick the episode with the latest episode.closed_date if there is one
Else report 'No episodes'
"""
episode_cases = get_all_episode_cases_from_person(person_case.domain, person_case.case_id)
open_episode_cases = [
episode_case for episode_case in episode_cases
if not episode_case.closed
]
active_open_episode_cases = [
episode_case for episode_case in open_episode_cases
if episode_case.get_case_property('is_active') == 'yes'
]
if len(active_open_episode_cases) > 1:
raise Exception("Multiple active open episode cases found for %s" % person_case.case_id)
elif len(active_open_episode_cases) == 1:
return active_open_episode_cases[0]
else:
# look for case recently modified by a user
recently_modified_case_on_phone = get_case_recently_modified_on_phone(episode_cases, False)
if recently_modified_case_on_phone:
return recently_modified_case_on_phone
# else look for the case recently modified ever
# for ex cases that were created and then closed by the system itself
recently_modified_case = get_case_recently_modified(episode_cases)
if recently_modified_case:
return recently_modified_case
raise Exception("No episodes for %s" % person_case.case_id)
def get_case_recently_modified(all_cases):
recently_modified_case = None
recently_modified_time = None
for case in all_cases:
last_edit = case.modified_on
if last_edit:
if recently_modified_time is None:
recently_modified_time = last_edit
recently_modified_case = case
elif recently_modified_time and recently_modified_time < last_edit:
recently_modified_time = last_edit
recently_modified_case = case
elif recently_modified_time and recently_modified_time == last_edit:
print("This looks like a super edge case that can be looked at. "
"Not blocking as of now. Case id: {case_id}".format(case_id=case.case_id))
return recently_modified_case
|
Python
| 0.000001
|
@@ -16,22 +16,16 @@
import
-(%0A
absolute
@@ -31,21 +31,39 @@
e_import
-,%0A
+%0Afrom __future__ import
unicode
@@ -71,19 +71,16 @@
literals
-,%0A)
%0Afrom __
|
1d2ea0c72d8700687761125e4eaf90ec52f419be
|
Fix ORM call and add progress check
|
custom/icds_reports/management/commands/update_aadhar_date.py
|
custom/icds_reports/management/commands/update_aadhar_date.py
|
from __future__ import absolute_import, print_function
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from django.db import connections
from corehq.apps.locations.models import SQLLocation
from corehq.sql_db.routers import db_for_read_write
from custom.icds_reports.models import ChildHealthMonthly
CHILD_TABLENAME = "config_report_icds-cas_static-child_health_cases_a46c129f"
PERSON_TABLENAME = "config_report_icds-cas_static-person_cases_v2_b4b5d57a"
UPDATE_QUERY = """
UPDATE "{child_tablename}" child SET
aadhar_date = person.aadhar_date
FROM "{person_tablename}" person
WHERE child.mother_id = person.doc_id AND child.supervisor_id = %(sup_id)s AND person.supervisor_id = %(sup_id)s
""".format(child_tablename=CHILD_TABLENAME, person_tablename=PERSON_TABLENAME)
def get_cursor(model):
db = db_for_read_write(model)
return connections[db].cursor()
class Command(BaseCommand):
def handle(self, *args, **options):
supervisor_ids = (
SQLLocation.objects
.filter(domain='icds-cas', location_type__name='supervisor')
.values('location_id')
)
for sup_id in supervisor_ids:
with get_cursor(ChildHealthMonthly) as cursor:
cursor.execute(UPDATE_QUERY, {"sup_id": sup_id})
|
Python
| 0
|
@@ -1130,16 +1130,21 @@
.values
+_list
('locati
@@ -1149,26 +1149,93 @@
tion_id'
-)%0A
+, flat=True)%0A )%0A count = 0%0A num_ids = len(supervisor_ids
)%0A%0A
@@ -1391,8 +1391,121 @@
up_id%7D)%0A
+ count += 1%0A%0A if count %25 100 == 0:%0A print(%22%7B%7D / %7B%7D%22.format(count, num_ids))%0A
|
e2d8a32590c0865b2a8339d86af4eb9b34ea5d20
|
Update __init__.py
|
tendrl/node_agent/objects/cluster_message/__init__.py
|
tendrl/node_agent/objects/cluster_message/__init__.py
|
from tendrl.commons import etcdobj
from tendrl.commons.message import Message as message
from tendrl.commons import objects
class ClusterMessage(objects.BaseObject, message):
internal = True
def __init__(self, **cluster_message):
self._defs = {}
message.__init__(self, **cluster_message)
objects.BaseObject.__init__(self)
self.value = 'clusters/%s/messages/%s'
self._etcd_cls = _ClusterMessageEtcd
class _ClusterMessageEtcd(etcdobj.EtcdObj):
"""Cluster message object, lazily updated
"""
__name__ = 'clusters/%s/messages/%s'
_tendrl_cls = ClusterMessage
def render(self):
self.__name__ = self.__name__ % (
self.cluster_id, self.message_id
)
return super(_ClusterMessageEtcd, self).render()
|
Python
| 0.000072
|
@@ -451,16 +451,92 @@
geEtcd%0A%0A
+ def save(self):%0A super(ClusterMessage, self).save(update=False)%0A%0A
class _C
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.