commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
9c5531c5adccf1d6cc8194e3ed83a8f30210b91b
|
add variable Admin API method name
|
qubesadmin/events/__init__.py
|
qubesadmin/events/__init__.py
|
# -*- encoding: utf8 -*-
#
# The Qubes OS Project, http://www.qubes-os.org
#
# Copyright (C) 2017 Marek Marczykowski-Górecki
# <marmarek@invisiblethingslab.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with this program; if not, see <http://www.gnu.org/licenses/>.
'''Event handling implementation, require Python >=3.5.2 for asyncio.'''
import asyncio
import subprocess
import qubesadmin.config
import qubesadmin.exc
class EventsDispatcher(object):
''' Events dispatcher, responsible for receiving events and calling
appropriate handlers'''
def __init__(self, app):
'''Initialize EventsDispatcher'''
#: Qubes() object
self.app = app
#: event handlers - dict of event -> handlers
self.handlers = {}
def add_handler(self, event, handler):
'''Register handler for event
Use '*' as event to register a handler for all events.
Handler function is called with:
* subject (VM object or None)
* event name (str)
* keyword arguments related to the event, if any - all values as str
:param event Event name, or '*' for all events
:param handler Handler function'''
self.handlers.setdefault(event, set()).add(handler)
def remove_handler(self, event, handler):
'''Remove previously registered event handler
:param event Event name
:param handler Handler function
'''
self.handlers[event].remove(handler)
@asyncio.coroutine
def _get_events_reader(self, vm=None) -> (asyncio.StreamReader, callable):
'''Make connection to qubesd and return stream to read events from
:param vm: Specific VM for which events should be handled, use None
to handle events from all VMs (and non-VM objects)
:return stream to read events from and a cleanup function
(call it to terminate qubesd connection)'''
if vm is not None:
dest = vm.name
else:
dest = 'dom0'
if self.app.qubesd_connection_type == 'socket':
reader, writer = yield from asyncio.open_unix_connection(
qubesadmin.config.QUBESD_SOCKET)
writer.write(b'dom0\0') # source
writer.write(b'admin.Events\0') # method
writer.write(dest.encode('ascii') + b'\0') # dest
writer.write(b'\0') # arg
writer.write_eof()
def cleanup_func():
'''Close connection to qubesd'''
writer.close()
elif self.app.qubesd_connection_type == 'qrexec':
proc = yield from asyncio.create_subprocess_exec(
'qrexec-client-vm', dest, 'admin.Events',
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
proc.stdin.write_eof()
reader = proc.stdout
def cleanup_func():
'''Close connection to qubesd'''
try:
proc.kill()
except ProcessLookupError:
pass
else:
raise NotImplementedError('Unsupported qubesd connection type: '
+ self.app.qubesd_connection_type)
return reader, cleanup_func
@asyncio.coroutine
def listen_for_events(self, vm=None, reconnect=True):
'''
Listen for events and call appropriate handlers.
This function do not exit until manually terminated.
This is coroutine.
:param vm: Listen for events only for this VM, use None to listen for
events about all VMs and not related to any particular VM.
:param reconnect: should reconnect to qubesd if connection is
interrupted?
:rtype: None
'''
while True:
try:
yield from self._listen_for_events(vm)
except (ConnectionRefusedError, ConnectionResetError,
FileNotFoundError):
pass
if not reconnect:
break
self.app.log.warning(
'Connection to qubesd terminated, reconnecting in {} '
'seconds'.format(qubesadmin.config.QUBESD_RECONNECT_DELAY))
# avoid busy-loop if qubesd is dead
yield from asyncio.sleep(qubesadmin.config.QUBESD_RECONNECT_DELAY)
@asyncio.coroutine
def _listen_for_events(self, vm=None):
'''
Listen for events and call appropriate handlers.
This function do not exit until manually terminated.
This is coroutine.
:param vm: Listen for events only for this VM, use None to listen for
events about all VMs and not related to any particular VM.
:return: True if any event was received, otherwise False
:rtype: bool
'''
reader, cleanup_func = yield from self._get_events_reader(vm)
try:
some_event_received = False
while not reader.at_eof():
try:
event_header = yield from reader.readuntil(b'\0')
if event_header != b'1\0':
raise qubesadmin.exc.QubesDaemonCommunicationError(
'Non-event received on events connection: '
+ repr(event_header))
subject = (yield from reader.readuntil(b'\0'))[:-1].decode(
'utf-8')
event = (yield from reader.readuntil(b'\0'))[:-1].decode(
'utf-8')
kwargs = {}
while True:
key = (yield from reader.readuntil(b'\0'))[:-1].decode(
'utf-8')
if not key:
break
value = (yield from reader.readuntil(b'\0'))[:-1].\
decode('utf-8')
kwargs[key] = value
except asyncio.IncompleteReadError as err:
if err.partial == b'':
break
else:
raise
if not subject:
subject = None
self.handle(subject, event, **kwargs)
some_event_received = True
finally:
cleanup_func()
return some_event_received
def handle(self, subject, event, **kwargs):
'''Call handlers for given event'''
if subject:
if event in ['property-set:name']:
self.app.domains.clear_cache()
subject = self.app.domains[subject]
else:
# handle cache refreshing on best-effort basis
if event in ['domain-add', 'domain-delete']:
self.app.domains.clear_cache()
subject = None
for handler in self.handlers.get(event, []):
handler(subject, event, **kwargs)
for handler in self.handlers.get('*', []):
handler(subject, event, **kwargs)
|
Python
| 0
|
@@ -1156,16 +1156,43 @@
elf, app
+, api_method='admin.Events'
):%0A
@@ -1278,16 +1278,55 @@
= app%0A%0A
+ self._api_method = api_method%0A%0A
@@ -2901,22 +2901,38 @@
ite(
-b'admin.Events
+self._api_method.encode() + b'
%5C0')
@@ -3350,30 +3350,32 @@
, dest,
-'admin.Events'
+self._api_method
,%0A
|
61890f3f6def74a6867a51366e9ee54bbe338407
|
Use a list comprehension instead of a new SQL query
|
src/dal_select2/views.py
|
src/dal_select2/views.py
|
"""Select2 view implementation."""
import collections
import json
from dal.views import BaseQuerySetView, ViewMixin
from django import http
from django.core.exceptions import ImproperlyConfigured
from django.utils import six
from django.utils.translation import ugettext as _
from django.views.generic.list import View
class Select2ViewMixin(object):
"""View mixin to render a JSON response for Select2."""
def get_results(self, context):
"""Return data for the 'results' key of the response."""
return [
{
'id': self.get_result_value(result),
'text': self.get_result_label(result),
'selected_text': self.get_selected_result_label(result),
} for result in context['object_list']
]
def get_create_option(self, context, q):
"""Form the correct create_option to append to results."""
create_option = []
display_create_option = False
if self.create_field and q:
page_obj = context.get('page_obj', None)
if page_obj is None or page_obj.number == 1:
display_create_option = True
# Don't offer to create a new option if a (case-insensitive) identical one already exists
if self.get_queryset().filter(**{self.create_field +'__iexact': q}).exists():
display_create_option = False
if display_create_option and self.has_add_permission(self.request):
create_option = [{
'id': q,
'text': _('Create "%(new_value)s"') % {'new_value': q},
'create_id': True,
}]
return create_option
def render_to_response(self, context):
"""Return a JSON response in Select2 format."""
q = self.request.GET.get('q', None)
create_option = self.get_create_option(context, q)
return http.HttpResponse(
json.dumps({
'results': self.get_results(context) + create_option,
'pagination': {
'more': self.has_more(context)
}
}),
content_type='application/json',
)
class Select2QuerySetView(Select2ViewMixin, BaseQuerySetView):
"""List options for a Select2 widget."""
class Select2ListView(ViewMixin, View):
"""Autocomplete from a list of items rather than a QuerySet."""
def get_list(self):
"""Return the list strings from which to autocomplete."""
return []
def get(self, request, *args, **kwargs):
"""Return option list json response."""
results = self.get_list()
create_option = []
if self.q:
results = self.autocomplete_results(results)
if hasattr(self, 'create'):
create_option = [{
'id': self.q,
'text': 'Create "%s"' % self.q,
'create_id': True
}]
return http.HttpResponse(json.dumps({
'results': self.results(results) + create_option
}), content_type='application/json')
def autocomplete_results(self, results):
"""Return list of strings that match the autocomplete query."""
return [x for x in results if self.q.lower() in x.lower()]
def results(self, results):
"""Return the result dictionary."""
return [dict(id=x, text=x) for x in results]
def post(self, request):
"""Add an option to the autocomplete list.
If 'text' is not defined in POST or self.create(text) fails, raises
bad request. Raises ImproperlyConfigured if self.create if not defined.
"""
if not hasattr(self, 'create'):
raise ImproperlyConfigured('Missing "create()"')
text = request.POST.get('text', None)
if text is None:
return http.HttpResponseBadRequest()
text = self.create(text)
if text is None:
return http.HttpResponseBadRequest()
return http.HttpResponse(json.dumps({
'id': text,
'text': text,
}))
class Select2GroupListView(Select2ListView):
"""View mixin for grouped options."""
def get_item_as_group(self, entry):
"""Return the item with its group."""
group = None
value = entry
if isinstance(entry, collections.Sequence) and \
not isinstance(entry, six.string_types):
entry_length = len(entry)
if(entry_length > 1):
group, value = entry[0:2]
elif(entry_length > 0):
value = entry[0]
if not isinstance(value, collections.Sequence) or \
isinstance(value, six.string_types):
value = (value,)
return (group, value),
def get(self, request, *args, **kwargs):
"""Return option list with children(s) json response."""
results_dict = {}
results = self.get_list()
if results:
flat_results = [(group, item) for entry in results
for group, items in self.get_item_as_group(entry)
for item in items]
if self.q:
q = self.q.lower()
flat_results = [(g, x) for g, x in flat_results
if q in x.lower()]
for group, value in flat_results:
results_dict.setdefault(group, [])
results_dict[group].append(value)
return http.HttpResponse(json.dumps({
"results":
[{"id": x, "text": x} for x in results_dict.pop(None, [])] +
[{"id": g, "text": g, "children": [{"id": x, "text": x}
for x in l]}
for g, l in six.iteritems(results_dict)]
}))
|
Python
| 0
|
@@ -1207,18 +1207,31 @@
ion if a
+%0A #
-(
case-ins
@@ -1285,84 +1285,180 @@
-if self.get_queryset().filter(**%7Bself.create_field +'__iexact': q%7D).exists()
+existing_options = (self.get_result_label(result).lower()%0A for result in context%5B'object_list'%5D)%0A if q.lower() in existing_options
:%0A
|
e20faf0486d981b3aaed72487150e00ec99730b0
|
Use a UK/Europe standard format of dates
|
lowfat/settings.py
|
lowfat/settings.py
|
"""
Django settings for lowfat project.
Generated by 'django-admin startproject' using Django 1.9.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
from collections import OrderedDict
URL_SRC = "https://github.com/softwaresaved/lowfat"
VERSION = "0.8.1"
SETTINGS_EXPORT = [
'URL_SRC',
'VERSION',
]
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '_iy7)5@ids_q5m(b4!q$-)ie)&-943zx37$+9-9b#988^*f-+4'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django.contrib.flatpages',
'django_countries',
'crispy_forms',
'social_django',
'dbbackup',
'constance',
'constance.backends.database',
'django_extensions',
'simple_history',
'lowfat',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'simple_history.middleware.HistoryRequestMiddleware',
]
ROOT_URLCONF = 'lowfat.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django_settings_export.settings_export',
'social_django.context_processors.backends',
'constance.context_processors.config',
],
},
},
]
WSGI_APPLICATION = 'lowfat.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
AUTHENTICATION_BACKENDS = (
'social_core.backends.github.GithubOAuth2',
'django.contrib.auth.backends.ModelBackend',
)
SOCIAL_AUTH_GITHUB_KEY = ''
SOCIAL_AUTH_GITHUB_SECRET = ''
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = False
USE_TZ = False
DATE_FORMAT = "Y-m-d"
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
# Stored files
# https://docs.djangoproject.com/en/1.9/ref/settings/#media-url
MEDIA_URL = '/upload/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'upload')
# Authentication system
# https://docs.djangoproject.com/en/1.9/topics/auth/default/
LOGIN_URL = '/login/' # The URL where requests are redirected for login, especially when using the login_required() decorator.
LOGIN_REDIRECT_URL = '/dashboard/'
# Email
# Email backend for development (print on console)
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Email backend for development (save on file)
# EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
# EMAIL_FILE_PATH = '/tmp/lowfat-emails'
# Default email address to use for various automated correspondence from the site manager(s).
DEFAULT_FROM_EMAIL = 'no-reply@software.ac.uk'
# The email address that error messages come from.
SERVER_EMAIL = 'no-reply@software.ac.uk'
# A list of all the people who get code error notifications.
ADMINS = [
('admin', 'admin@software.ac.uk'),
]
# Subject-line prefix for email messages sent
EMAIL_SUBJECT_PREFIX = ""
# Backup
DBBACKUP_STORAGE = 'django.core.files.storage.FileSystemStorage'
DBBACKUP_STORAGE_OPTIONS = {
'location': os.path.join(BASE_DIR, 'backups'),
}
DBBACKUP_GPG_ALWAYS_TRUST = True
DBBACKUP_GPG_RECIPIENT = "" # XXX This variable need to be filled for --encrypt or --decrypt work properly.
# Run time variables
# Powered by Constance
CONSTANCE_BACKEND = 'constance.backends.database.DatabaseBackend'
CONSTANCE_IGNORE_ADMIN_VERSION_CHECK = True
CONSTANCE_CONFIG = OrderedDict([
("DOMAIN", (
"http://fellows.software.ac.uk",
"Used when write emails",
)),
("FELLOWS_MANAGEMENT_EMAIL", (
"fellows-management@software.ac.uk",
"Contact address to fellows management staffs",
)),
("STAFFS_EMAIL", (
"['Software Sustainability Institute <fellows-management@software.ac.uk>']",
"Contact address of staffs, e.g. ['John <john@example.com>', 'Mary <mary@example.com>']",
)),
("STAFF_EMAIL_NOTIFICATION", (
False,
"Notification to staffs by email",
)),
("CLAIMANT_EMAIL_NOTIFICATION", (
False,
"Notification to claimant by email",
)),
])
# Flatpages
SITE_ID = 1
|
Python
| 0.999994
|
@@ -3886,14 +3886,96 @@
= %22
-Y-m-d%22
+l, d F Y%22 # British English style%0ADATETIME_FORMAT = %22l, d F Y%22 # British English style
%0A%0ACR
|
11f13dd6fc3321e4b3e1fde7e85065392cecc988
|
Revise comments
|
lc0236_lowest_common_ancestor_of_a_binary_tree.py
|
lc0236_lowest_common_ancestor_of_a_binary_tree.py
|
"""Leetcode 236. Lowest Common Ancestor of a Binary Tree
Medium
URL: https://leetcode.com/problems/lowest-common-ancestor-of-a-binary-tree/
Given a binary tree, find the lowest common ancestor (LCA) of two given nodes
in the tree.
According to the definition of LCA on Wikipedia: "The lowest common ancestor
is defined between two nodes p and q as the lowest node in T that has both
p and q as descendants (where we allow a node to be a descendant of itself)."
Given the following binary tree: root = [3,5,1,6,2,0,8,null,null,7,4]
3
/ \
5 1
/ \ / \
6 2 0 8
/ \
7 4
Example 1:
Input: root = [3,5,1,6,2,0,8,null,null,7,4], p = 5, q = 1
Output: 3
Explanation: The LCA of nodes 5 and 1 is 3.
Example 2:
Input: root = [3,5,1,6,2,0,8,null,null,7,4], p = 5, q = 4
Output: 5
Explanation: The LCA of nodes 5 and 4 is 5, since a node can be a descendant of itself according to the LCA definition.
Note:
- All of the nodes' values will be unique.
- p and q are different and both values will exist in the binary tree.
"""
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, val):
self.val = val
self.left = None
self.right = None
class SolutionPreorderRecur(object):
def lowestCommonAncestor(self, root, p, q):
"""
:type root: TreeNode
:type p: TreeNode
:type q: TreeNode
:rtype: TreeNode
Time complexity: O(n).
Space complexity: O(logn) for balanced tree; O(n) for singly-linked list.
"""
# Apply recursive Preorder Traversal: root->left->right.
if not root:
return None
# Vist root: if root is p or q, return itself as p or q.
if root is p or root is q:
return root
# Visit left & right recursively.
left = self.lowestCommonAncestor(root.left, p, q)
right = self.lowestCommonAncestor(root.right, p, q)
if not left and not right:
# p & q are not in subtree.
return None
if left and right:
# p and q are in subtree.
return root
# Otherwise, p or q is in subtree.
return left or right
class SolutionPreorderIter(object):
def lowestCommonAncestor(self, root, p, q):
"""
:type root: TreeNode
:type p: TreeNode
:type q: TreeNode
:rtype: TreeNode
Time complexity: O(n).
Space complexity: O(logn) for balanced tree; O(n) for singly-linked list.
"""
# Use dict to memorize node and its parent.
child_parent = {}
child_parent[root] = None
# Apply iterative Preorder Traversal: root->left->right.
stack = [root]
while p not in child_parent or q not in child_parent:
current = stack.pop()
# Visit right and then left since we use stack with FILO.
if current.right:
child_parent[current.right] = current
stack.append(current.right)
if current.left:
child_parent[current.left] = current
stack.append(current.left)
# Use set to collect ancestors: reversely traverse p's parents.
ancestors = set()
while p:
ancestors.add(p)
p = child_parent[p]
while q not in ancestors:
# Then reversely traverse q's parents until meet one of p's parents.
q = child_parent[q]
return q
def main():
import time
root = TreeNode(3)
root.left = TreeNode(5)
root.right = TreeNode(1)
root.left.left = TreeNode(6)
root.left.right = TreeNode(2)
root.right.left = TreeNode(0)
root.right.right = TreeNode(8)
root.left.right.left = TreeNode(7)
root.left.right.right = TreeNode(4)
# Input: root = [3,5,1,6,2,0,8,null,null,7,4], p = 5, q = 1
# Output: 3
p = root.left
q = root.right
start_time = time.time()
print SolutionPreorderRecur().lowestCommonAncestor(root, p, q).val
print 'Time for recur: {}'.format(time.time() - start_time)
start_time = time.time()
print SolutionPreorderIter().lowestCommonAncestor(root, p, q).val
print 'Time for iter: {}'.format(time.time() - start_time)
# Input: root = [3,5,1,6,2,0,8,null,null,7,4], p = 5, q = 4
# Output: 5
p = root.left
q = root.left.right.right
start_time = time.time()
print SolutionPreorderRecur().lowestCommonAncestor(root, p, q).val
print 'Time for recur: {}'.format(time.time() - start_time)
start_time = time.time()
print SolutionPreorderIter().lowestCommonAncestor(root, p, q).val
print 'Time for iter: {}'.format(time.time() - start_time)
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -1946,47 +1946,8 @@
q)%0A%0A
- if not left and not right:%0A
@@ -1990,35 +1990,14 @@
- return None%0A%0A if
+if not
lef
@@ -2002,16 +2002,20 @@
eft and
+not
right:%0A
@@ -2019,22 +2019,43 @@
:%0A
+
+ return None%0A%0A
+
# p and
@@ -2064,32 +2064,59 @@
are in subtree.%0A
+ if left and right:%0A
retu
@@ -3330,46 +3330,8 @@
p%5D%0A%0A
- while q not in ancestors:%0A
@@ -3395,32 +3395,66 @@
of p's parents.%0A
+ while q not in ancestors:%0A
q =
|
fd426f479748826b4987f9037483686da5e344ec
|
Bump version to `1.0.2`
|
pomodoro_calculator/__init__.py
|
pomodoro_calculator/__init__.py
|
"""
A pretty command line tool to calculate the number
of Pomodori available between two points in time.
"""
__author__ = 'Matt Deacalion Stevens'
__version__ = '1.0.1'
import datetime
from itertools import cycle
class PomodoroCalculator:
"""
Calculates the number of Pomodori available in an amount of time.
"""
def __init__(self, end, start='now', short_break=5, long_break=15,
pomodoro_length=25, group_length=4, interval=False, amount=False):
self.pomodoro_length_seconds = pomodoro_length * 60
self.amount_mode = False
if start == 'now':
self.start = datetime.datetime.now()
else:
self.start = self._create_datetime(start)
if interval:
self.end = self.start + self._create_timedelta(end)
elif amount:
# set dummy end. So we don't crash.
self.end = self.start + self._create_timedelta("48:00:00")
self.amount_mode = True
self.amount = int(end)
else:
self.end = self._create_datetime(end)
# if the end time is earlier than the start,
# overlap to the next day
if self.end.time() < self.start.time():
self.end += datetime.timedelta(days=1)
self.group_length = group_length
self.short_break = short_break
self.long_break = long_break
@property
def short_break_seconds(self):
"""
Returns `short_break` in seconds.
"""
return self.short_break * 60
@property
def long_break_seconds(self):
"""
Returns `long_break` in seconds.
"""
return self.long_break * 60
@property
def total_seconds(self):
"""
Return the total time span in seconds.
"""
delta = self.end - self.start
return int(delta.total_seconds())
def _create_timedelta(self, time_string):
"""
Takes a string in the format of 'HH:MM:SS' and returns a timedelta.
"""
args = dict(zip(
['hours', 'minutes', 'seconds'],
[int(unit) for unit in time_string.split(':')],
))
return datetime.timedelta(**args)
def _create_datetime(self, time_string):
"""
Takes a string in the format of 'HH:MM:SS' and returns a datetime.
"""
args = dict(zip(
['hour', 'minute', 'second'],
[int(unit) for unit in time_string.split(':')],
))
return datetime.datetime.now().replace(**args)
def _get_item(self, offset, item_type, index):
"""
Returns one of three types of Pomodori entities. A short break, a long
break or the Pomodoro itself. The returned dict also contains the
start and end datetimes.
"""
types = {
'short-break': self.short_break_seconds,
'long-break': self.long_break_seconds,
'pomodoro': self.pomodoro_length_seconds,
}
start = self.end - datetime.timedelta(seconds=offset)
end = start + datetime.timedelta(seconds=types[item_type])
return {
'index': index,
'pomodori-index': index // 2 + 1,
'type': item_type,
'start': start,
'end': end,
'length': int((end - start).total_seconds()),
}
def pomodori_segments(self, group_length=4):
"""
Generate Pomodori along with the short and long breaks in between.
Credit: http://codereview.stackexchange.com/questions/53970
"""
# every fourth Pomodori precedes a long break,
# all others have short breaks following them
return cycle(
['pomodoro', 'short-break'] * (group_length - 1) + ['pomodoro', 'long-break'],
)
def pomodori_schedule(self):
"""
Returns a Pomodori schedule, which is a dict that contains a
list of Pomodori segments (Pomodoro, short break or long
break) in chronological order.
Credit: http://codereview.stackexchange.com/questions/53970
"""
available_time = self.total_seconds
segments = []
# make sure we have enough time for at least one Pomodoro
if available_time < self.pomodoro_length_seconds:
return
for i, segment_name in enumerate(self.pomodori_segments(self.group_length)):
segment = self._get_item(available_time, segment_name, i + 1)
if self.amount_mode and segment['pomodori-index'] > self.amount:
break
elif segment['length'] > available_time:
break
available_time -= segment['length']
segments.append(segment)
if segments and segments[-1]['type'].endswith('break'):
segments.pop()
work_segments = [seg for seg in segments if seg['type'] == 'pomodoro']
rest_segments = [seg for seg in segments if seg['type'].endswith('break')]
return {
'segments': segments,
'start': self.start,
'end': segments[-1]['end'],
'seconds-per-pomodoro': self.pomodoro_length_seconds,
'total-pomodori': len(work_segments),
'total-breaks': len(rest_segments),
'total-rest-time': sum(seg['length'] for seg in rest_segments),
'total-work-time': sum(seg['length'] for seg in work_segments),
}
def humanise_seconds(seconds):
"""
Takes `seconds` as an integer and returns a human readable
string, e.g. "2 hours, 5 minutes".
"""
units = []
unit_table = [('hour', 3600), ('minute', 60)]
for unit in unit_table:
quotient, seconds = divmod(seconds, unit[1])
if quotient:
units.append(
'{} {}'.format(
quotient,
unit[0] + ('s' if quotient > 1 else ''),
)
)
return ', '.join(units)
|
Python
| 0
|
@@ -159,17 +159,17 @@
= '1.0.
-1
+2
'%0A%0Aimpor
|
59ec603a7bcaabd7bac5901bc265920682d4cfcf
|
Add another missing self
|
tictactoe.py
|
tictactoe.py
|
from logishort import *
from getch import *
from logipy import logi_led
from logimap import logimap
import time
class TicTacToe:
def __init__(self):
init()
time.sleep(1)
self.accepted_keys = {
't':[0x14, 0, 0],
'y':[0x15, 0, 1],
'u':[0x16, 0, 2],
'g':[0x22, 1, 0],
'h':[0x23, 1, 1],
'j':[0x24, 1, 2],
'b':[0x30, 2, 0],
'n':[0x31, 2, 1],
',':[0x32, 2, 2]
}
self.board = [[0, 0, 0], [0, 0, 0], [0, 0, 0]]
all(20, 20, 100)
self.turn_count = 0
self.someone_won = 0
self.draw()
def getKey(self, c):
if self.accepted_keys.get(c) != None and c != b'\x1b':
self.someone_won = play(c)
self.turn_count += 1
if self.someone_won == 0 and (c == b'\x1b' or self.turn_count >= 9):
self.someone_won = -1
if self.someone_won != 0:
print("Player " + str(someone_won) + " won !!")
shutdown()
def draw(self):
for key in self.accepted_keys:
k = self.accepted_keys[key]
if(self.board[k[1]][k[2]] == 0):
one(k[0], 0, 0, 0)
if(self.board[k[1]][k[2]] == 1):
one(k[0], 100, 0, 0)
if(self.board[k[1]][k[2]] == 2):
one(k[0], 0, 100, 0)
def checkWin(self):
b = self.board
won = 0
#print(str(b[0][0]) + " " + str(b[0][1]) + " " + str(b[0][2]))
#print(str(b[0][0] == b[0][1] and b[0][0] == b[0][2]))
if (b[0][0] == b[0][1] and b[0][0] == b[0][2]) or (b[0][0] == b[1][0] and b[0][0] == b[2][0]) or (b[0][0] == b[1][1] and b[0][0] == b[2][2]):
won = int(b[0][0])
elif (b[1][1] == b[2][1] and b[1][1] == b[1][2]) or (b[1][1] == b[1][0] and b[1][1] == b[1][2]) or (b[1][1] == b[0][1] and b[1][1] == b[2][1]):
won = int(b[1][1])
elif (b[2][2] == b[2][0] and b[2][2] == b[2][1]) or (b[2][2] == b[0][2] and b[2][2] == b[1][2]):
won = int(b[2][2])
return won
def play(c):
k = self.accepted_keys.get(c)
player = self.turn_count % 2 + 1
self.board[k[1]][k[2]] = player
self.draw()
return self.checkWin()
|
Python
| 0.001914
|
@@ -772,16 +772,21 @@
e_won =
+self.
play(c)%0A
|
7523ff90cadcefe3d51682d3301f7ceb51c70ced
|
Revert "Corrige a resolução"
|
timelapse.py
|
timelapse.py
|
import os
import datetime
import time
import picamera
from PIL import Image, ImageStat, ImageFont, ImageDraw
with picamera.PiCamera() as camera:
camera.resolution = (1024, 728)
camera.rotation = 180
time.sleep(2) # camera warm-up time
for filename in camera.capture_continuous('images/img_{timestamp:%Y%m%d%H%M%S}.png'):
image = Image.open(filename)
stat = ImageStat.Stat(image)
r, g, b, _ = stat.mean
if r < 50 and g < 50 and b < 50:
print('[!] Lights must be powered off, sleeping...')
try:
os.unlink(filename)
except:
pass
time.sleep(60 * 5)
else:
annotate_text = datetime.datetime.now().strftime('%H:%M:%S @ %d/%m/%Y')
draw = ImageDraw.Draw(image)
font = ImageFont.truetype('/usr/share/fonts/truetype/roboto/Roboto-Regular.ttf', 24)
draw.text((10, 700), annotate_text, (255, 255, 0), font=font)
image.save(filename)
print('[!] Taken: {}'.format(filename))
time.sleep(60 / 2)
image.close()
|
Python
| 0
|
@@ -174,17 +174,17 @@
(1024, 7
-2
+6
8)%0A c
@@ -549,17 +549,36 @@
ing...')
-%0A
+ %0A
%0A
@@ -664,16 +664,28 @@
pass%0A
+
%0A
|
60f89131b8f18046e4504b20c64f95cb3b30085a
|
Make sure we allow https flv files
|
apps/videos/types/flv.py
|
apps/videos/types/flv.py
|
# Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2010 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from videos.types.base import VideoType
import re
URL_REGEX = re.compile('^http://.+/.+\.flv$', re.I)
class FLVVideoType(VideoType):
abbreviation = 'L'
name = 'FLV'
@classmethod
def matches_video_url(cls, url):
url = cls.format_url(url)
return bool(URL_REGEX.match(url))
|
Python
| 0
|
@@ -851,16 +851,20 @@
e('%5Ehttp
+(s)?
://.+/.+
@@ -1089,16 +1089,17 @@
h(url))
+%0A
|
2d7306be270738de5e3c2e66d18706c9e530a000
|
fix some bugs
|
handlers/displaygroup.py
|
handlers/displaygroup.py
|
import tornado.web
import tornado.gen
import tornado.httpclient
import json
import io
import logging
import datetime
import motor
from bson.objectid import ObjectId
import mickey.userfetcher
from mickey.basehandler import BaseHandler
import mickey.commonconf
import mickey.tp
class DisplayGroupHandler(BaseHandler):
@tornado.web.asynchronous
@tornado.gen.coroutine
def post(self):
coll = self.application.db.groups
token = self.request.headers.get("Authorization", "")
chatcoll = self.application.db.tbchats
usercoll = self.application.userdb.users
data = json.loads(self.request.body.decode("utf-8"))
groupid = data.get("groupid", None)
chatid = data.get("chatid", None)
if not groupid and not chatid:
logging.error("invalid parameter")
self.set_status(403)
self.finish()
return
logging.info("begin to display group %s" % groupid)
if not groupid and chatid:
chat = yield chatcoll.find_one({"id":chatid})
if not chat:
self.set_status(404)
self.finish()
return
groupid = chat.get('gid', '')
result = yield coll.find_one({"_id":ObjectId(groupid)})
if result:
#check tp chat id
tp_chatid = result.get('tp_chatid', None)
if not tp_chatid:
owner = result.get('owner', '')
mickey.tp.addgroup(groupid, owner, "", True)
#set new expire
expire_set = result.get('expireAt', None)
if expire_set:
new_expiredate = datetime.datetime.utcnow() + datetime.timedelta(days = mickey.commonconf.conf_expire_time)
modresult = yield coll.find_and_modify({"_id":ObjectId(groupid)},
{
"$set":{"expireAt": new_expiredate},
"$unset": {"garbage": 1}
})
groupinfo = {}
groupinfo["groupid"] = groupid
rs_members = []
for item in result.get("members", []):
u_member = {}
u_id = item.get("id", "")
u_member["id"] = u_id
u_member["remark"] = item.get("remark", "")
# get user information
c_info = yield mickey.userfetcher.getcontact(u_id, token)
if not c_info:
logging.error("get user info failed %s" % u_id)
continue
u_member["nickname"] = c_info.get("commName", "")
u_member["name"] = c_info.get("name", "")
u_member["type"] = c_info.get("type", "PERSON")
u_member["contactInfos"] = c_info.get("contactInfos", [])
rs_members.append(u_member)
rs_appendings = []
for item in result.get("appendings", []):
u_appending = {}
u_id = item.get("id", "")
u_appending["id"] = u_id
u_appending["remark"] = item.get("remark", "")
# get user information
c_info = yield mickey.userfetcher.getcontact(u_id, token)
if not c_info:
logging.error("get user info failed %s" % u_id)
continue
u_appending["nickname"] = c_info.get("commName", "")
u_appending["name"] = c_info.get("name", "")
u_appending["type"] = c_info.get("type", "PERSON")
u_appending["contactInfos"] = c_info.get("contactInfos", [])
rs_appendings.append(u_appending)
groupinfo["members"] = rs_members
groupinfo["invitees"] = result.get("invitees", [])
groupinfo["appendings"] = rs_appendings
groupinfo["name"] = result.get("name", "")
groupinfo["owner"] = result.get("owner", "")
groupinfo["vip"] = result.get("vip", "false")
groupinfo["vipname"] = result.get("vipname", "")
groupinfo["invite"] = result.get("invite", "free")
groupinfo["tp_chatid"] = result.get("tp_chatid", "")
self.write(groupinfo)
else:
logging.error("group %s does not exist" % groupid)
self.set_status(404)
self.write({"error":"not found"});
self.finish()
|
Python
| 0.000008
|
@@ -257,25 +257,8 @@
conf
-%0Aimport mickey.tp
%0A%0Acl
@@ -1286,232 +1286,8 @@
lt:%0A
- #check tp chat id%0A tp_chatid = result.get('tp_chatid', None)%0A if not tp_chatid:%0A owner = result.get('owner', '')%0A mickey.tp.addgroup(groupid, owner, %22%22, True)%0A%0A
|
b856016182a9a0c97ccb5e6593aa16f3a269bf79
|
fix ToDoList class method add_todo to pass non_boolean test
|
todo_list.py
|
todo_list.py
|
import todo_item
class ToDoList(object):
def __init__(self, name, description, todo_items):
self.name = name
self.description = description
self.todo_items = todo_items
def add_todo(self, content, complete = False, *args):
item = todo_item.ToDoItem(content, complete, *args)
self.todo_items.append(item)
def finish_item(self, index):
if index >= len(self.todo_items) or index < 0:
return 'That to do item does not exist'
self.todo_items[index] = True
def edit_item(self, index, content):
self.todo_items[index] = content
def delete_item(self, index):
del self.todo_items[index]
def percentage_completed(self):
completed_items = 0
for item in self.todo_items:
if item.complete:
completed_items += 1
percentage = 100 * (completed_items/len(self.todo_items))
return percentage
|
Python
| 0.000002
|
@@ -225,16 +225,86 @@
*args):%0A
+%09%09if type(complete) != type(True):%0A%09%09%09self.complete = False%0A%09%09%09return%0A
%09%09item =
|
c000dd1d0940b47c13761bb09e0cb50a2adc6a2e
|
Handle token_endpoint auth type in osc plugin
|
heatclient/osc/plugin.py
|
heatclient/osc/plugin.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""OpenStackClient plugin for Orchestration service."""
import logging
from osc_lib import utils
LOG = logging.getLogger(__name__)
DEFAULT_ORCHESTRATION_API_VERSION = '1'
API_VERSION_OPTION = 'os_orchestration_api_version'
API_NAME = 'orchestration'
API_VERSIONS = {
'1': 'heatclient.v1.client.Client',
}
def make_client(instance):
"""Returns an orchestration service client"""
heat_client = utils.get_client_class(
API_NAME,
instance._api_version[API_NAME],
API_VERSIONS)
LOG.debug('Instantiating orchestration client: %s', heat_client)
kwargs = {'region_name': instance.region_name,
'interface': instance.interface}
if instance.session:
kwargs.update({'session': instance.session,
'service_type': API_NAME})
else:
endpoint = instance.get_endpoint_for_service_type(
API_NAME,
region_name=instance.region_name,
interface=instance.interface,
)
kwargs.update({'endpoint': endpoint,
'auth_url': instance.auth.auth_url,
'username': instance.auth_ref.username,
'token': instance.auth_ref.auth_token})
client = heat_client(**kwargs)
return client
def build_option_parser(parser):
"""Hook to add global options"""
parser.add_argument(
'--os-orchestration-api-version',
metavar='<orchestration-api-version>',
default=utils.env(
'OS_ORCHESTRATION_API_VERSION',
default=DEFAULT_ORCHESTRATION_API_VERSION),
help='Orchestration API version, default=' +
DEFAULT_ORCHESTRATION_API_VERSION +
' (Env: OS_ORCHESTRATION_API_VERSION)')
return parser
|
Python
| 0.000044
|
@@ -1370,16 +1370,180 @@
_NAME%7D)%0A
+ elif instance.auth_plugin_name == 'token_endpoint':%0A kwargs.update(%7B'endpoint': instance.auth.url,%0A 'token': instance.auth.token%7D)%0A
else
@@ -1723,17 +1723,16 @@
)%0A
-%0A
|
ab365a6fdf39feed6f529a4a5170c2d9f674b706
|
fix unicode issue
|
weboob/backends/orange/pages/compose.py
|
weboob/backends/orange/pages/compose.py
|
# -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Nicolas Duhamel
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
from weboob.capabilities.messages import CantSendMessage
from weboob.tools.browser import BasePage
__all__ = ['ComposePage', 'ConfirmPage']
class ConfirmPage(BasePage):
def on_loaded(self):
pass
class ComposePage(BasePage):
phone_regex = re.compile('^(\+33|0033|0)(6|7)(\d{8})$')
def on_loaded(self):
#Deal with bad encoding... for ie6 ...
response = self.browser.response()
response.set_data(response.get_data().decode('utf-8', 'ignore') )
self.browser.set_response(response)
def get_nb_remaining_free_sms(self):
return "0"
def post_message(self, message, sender):
receiver = message.thread.id
if self.phone_regex.match(receiver) is None:
raise CantSendMessage(u'Invalid receiver: %s' % receiver)
listetel = ",,"+ receiver
#Fill the form
self.browser.select_form(name="formulaire")
self.browser.new_control("hidden", "autorize",{'value':''})
self.browser.new_control("textarea", "msg", {'value':''})
self.browser.set_all_readonly(False)
self.browser["corpsms"] = message.content
self.browser["pays"] = "33"
self.browser["listetel"] = listetel
self.browser["reply"] = "2"
self.browser["typesms"] = "2"
self.browser["produit"] = "1000"
self.browser["destToKeep"] = listetel
self.browser["NUMTEL"] = sender
self.browser["autorize"] = "1"
self.browser["msg"] = message.content.encode('utf-8')
self.browser.submit()
|
Python
| 0.000099
|
@@ -1883,16 +1883,32 @@
.content
+.encode('utf-8')
%0A
|
41b532c882446d855b48d730fdd7f804668eab7e
|
Fix problem with Django Admin -> It always uses username as the username field
|
axes/utils.py
|
axes/utils.py
|
from __future__ import unicode_literals
try:
import win_inet_pton # pylint: disable=unused-import
except ImportError:
pass
from socket import error, inet_pton, AF_INET6
from django.core.cache import caches
from django.utils import six
import ipware.ip2
from axes.conf import settings
from axes.models import AccessAttempt
def get_axes_cache():
return caches[getattr(settings, 'AXES_CACHE', 'default')]
def query2str(items, max_length=1024):
"""Turns a dictionary into an easy-to-read list of key-value pairs.
If there's a field called "password" it will be excluded from the output.
The length of the output is limited to max_length to avoid a DoS attack
via excessively large payloads.
"""
return '\n'.join([
'%s=%s' % (k, v) for k, v in six.iteritems(items)
if k != settings.AXES_PASSWORD_FORM_FIELD
][:int(max_length / 2)])[:max_length]
def get_client_str(username, ip_address, user_agent=None, path_info=None):
if settings.AXES_VERBOSE:
if isinstance(path_info, tuple):
path_info = path_info[0]
details = "{{user: '{0}', ip: '{1}', user-agent: '{2}', path: '{3}'}}"
return details.format(username, ip_address, user_agent, path_info)
if settings.AXES_ONLY_USER_FAILURES:
client = username
elif settings.AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP:
client = '{0} from {1}'.format(username, ip_address)
else:
client = ip_address
if settings.AXES_USE_USER_AGENT:
client += '(user-agent={0})'.format(user_agent)
return client
def get_client_ip(request):
client_ip_attribute = 'axes_client_ip'
if not hasattr(request, client_ip_attribute):
client_ip, _ = ipware.ip2.get_client_ip(
request,
proxy_order=settings.AXES_PROXY_ORDER,
proxy_count=settings.AXES_PROXY_COUNT,
proxy_trusted_ips=settings.AXES_PROXY_TRUSTED_IPS,
request_header_order=settings.AXES_META_PRECEDENCE_ORDER,
)
setattr(request, client_ip_attribute, client_ip)
return getattr(request, client_ip_attribute)
def get_client_username(request, credentials=None):
if settings.AXES_USERNAME_CALLABLE:
return settings.AXES_USERNAME_CALLABLE(request, credentials)
if credentials is None:
return request.POST.get(settings.AXES_USERNAME_FORM_FIELD, None)
return credentials.get(settings.AXES_USERNAME_FORM_FIELD, None)
def is_ipv6(ip):
try:
inet_pton(AF_INET6, ip)
except (OSError, error):
return False
return True
def reset(ip=None, username=None):
"""Reset records that match ip or username, and
return the count of removed attempts.
"""
attempts = AccessAttempt.objects.all()
if ip:
attempts = attempts.filter(ip_address=ip)
if username:
attempts = attempts.filter(username=username)
count, _ = attempts.delete()
return count
def iso8601(timestamp):
"""Returns datetime.timedelta translated to ISO 8601 formatted duration.
"""
seconds = timestamp.total_seconds()
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
date = '{:.0f}D'.format(days) if days else ''
time_values = hours, minutes, seconds
time_designators = 'H', 'M', 'S'
time = ''.join([
('{:.0f}'.format(value) + designator)
for value, designator in zip(time_values, time_designators)
if value]
)
return 'P' + date + ('T' + time if time else '')
def get_lockout_message():
if settings.AXES_COOLOFF_TIME:
return settings.AXES_COOLOFF_MESSAGE
return settings.AXES_PERMALOCK_MESSAGE
|
Python
| 0.000002
|
@@ -2352,41 +2352,18 @@
get(
-settings.AXES_USERNAME_FORM_FIELD
+'username'
, No
|
54b319a09c71e707eb87195ce79e59bfb2b0daaa
|
Fix scene management
|
ezpygame.py
|
ezpygame.py
|
"""Easier and more pythonic usage of :mod:`pygame`."""
import pygame
class Application:
"""The class for creating a :mod:`pygame` application.
A simple wrapper around :mod:`pygame`, which initializes and quits
:mod:`pygame` as the application starts/ends. Also makes the scene
management seamless and fun together with :class:`Scene`.
Example usage:
.. code-block:: python
class Menu(ezpygame.Scene):
...
class Game(ezpygame.Scene):
...
app = ezpygame.Application(
title='My First EzPyGame Application',
resolution=(1280, 720),
update_rate=60,
)
main_menu = Menu()
app.run(main_menu)
"""
def __init__(self,
title='EzPyGame App',
resolution=(640, 480),
update_rate=30,
initial_scene=None):
"""Initialize the application with window settings.
:param str title: title to display in the window's title bar
:param tuple[int,int] resolution: resolution of the game window
:param int update_rate: how many times per second to update
:param Scene|None initial_scene: scene where to start from
"""
pygame.init()
self.update_rate = update_rate
self._scene = initial_scene
# Trigger property setters
self.title = title
self.resolution = resolution
@property
def title(self):
"""The title to display in the application's game window."""
return pygame.display.get_caption()
@title.setter
def title(self, value):
pygame.display.set_caption(value)
@property
def resolution(self):
"""The application's game window's resolution."""
return self._screen.get_size()
@resolution.setter
def resolution(self, value):
self._screen = pygame.display.set_mode(value)
@property
def active_scene(self):
"""The currently active scene."""
return self._scene
def change_scene(self, scene):
"""Change the currently active scene in the application.
This will change the current scene and invoke
:meth:`Scene.on_exit` and :meth:`Scene.on_enter`
on the switching scenes (unless ``None``).
The scene will change after the next update.
:param Scene scene: the scene to change into
"""
if self.active_scene is not None:
self.active_scene.on_exit(self, next_scene=scene)
self._scene, old_scene = scene, self.active_scene
if self.active_scene is not None:
self.active_scene.on_enter(self, previous_scene=old_scene)
def run(self, scene=None):
"""Run the application.
:param Scene scene: initial scene to start the execution from
"""
if scene is None:
if self.active_scene is None:
raise ValueError('No scene provided')
else:
self._scene = scene
clock = pygame.time.Clock()
done = False
while not done and self.active_scene is not None:
self.active_scene.draw(self, self._screen)
pygame.display.update()
for event in pygame.event.get():
self.active_scene.handle_event(self, event)
if event.type == pygame.QUIT:
done = True
dt = clock.tick(self._update_rate)
self.active_scene.update(self, dt)
if self.active_scene: # Exit happened through done = True
self.change_scene(None) # Trigger on_exit()
pygame.quit()
class Scene:
"""An individual scene in the application.
Create a scene by subclassing and overriding any of the methods.
Example usage with two scenes interacting:
.. code-block:: python
class Menu(Scene):
def __init__(self):
self.font = pygame.font.Font(...)
def on_enter(self, app, previous_scene):
app.update_settings(title='Main Menu', update_rate=30)
def draw(self, app, screen):
pygame.draw.rect(...)
text = self.font.render(...)
screen.blit(text, ...)
def handle_event(self, app, event):
if event.type == pygame.MOUSEBUTTONUP:
if event.button == 1:
game_size = self._get_game_size(event.pos)
self.change_scene(Game(game_size))
def _get_game_size(self, mouse_pos_upon_click):
...
class Game(ezpygame.Scene):
def __init__(self, size):
super().__init__()
self.size = size
self.player = ...
...
def on_enter(self, app, previous_scene):
self.previous_scene = previous_scene
app.update_settings(title='The Game!', update_rate=60)
def draw(self, app, screen):
self.player.draw(screen)
for enemy in self.enemies:
...
def update(self, app, dt):
self.player.move(dt)
...
if self.player.is_dead():
app.change_scene(self.previous_scene)
elif self.player_won():
app.change_scene(...)
def handle_event(self, app, event):
... # Player movement etc.
"""
def draw(self, app, screen):
"""Draw the scene.
:param Application app: application running the scene
:param pygame.Surface screen: screen to draw the scene on
"""
def update(self, app, dt):
"""Update the scene.
:param Application app: application running the scene
:param int dt: time in milliseconds since the last update
"""
def handle_event(self, app, event):
"""Process an event.
All of :mod:`pygame`'s events are sent here, so filtering
should be applied manually in the subclass.
:param Application app: application running the scene
:param pygame.event.Event event: event to handle
"""
def on_enter(self, app, previous_scene=None):
"""The scene is entered.
:param Application app: application running the scene
:param Scene previous_scene: previous scene to run, or ``None``
"""
def on_exit(self, app, next_scene=None):
"""The scene is exited.
:param Application app: application running the scene
:param Scene next_scene: next scene to run, or ``None``
"""
|
Python
| 0.000061
|
@@ -864,45 +864,8 @@
e=30
-,%0A initial_scene=None
):%0A
@@ -1137,75 +1137,8 @@
ate%0A
- :param Scene%7CNone initial_scene: scene where to start from%0A
@@ -1232,19 +1232,10 @@
e =
-initial_sce
+No
ne%0A
@@ -2890,30 +2890,35 @@
self.
+change
_scene
- =
+(
scene
+)
%0A%0A
@@ -2960,47 +2960,13 @@
-done = False%0A while not done and
+while
sel
@@ -3257,27 +3257,93 @@
-done = True
+self.change_scene(None) # Trigger Scene.on_exit()%0A return
%0A%0A
@@ -3435,156 +3435,8 @@
t)%0A%0A
- if self.active_scene: # Exit happened through done = True%0A self.change_scene(None) # Trigger on_exit()%0A%0A pygame.quit()%0A%0A
%0Acla
|
ca327b35c2e45329962da0dc04cfe2354ffd8b35
|
add lcm gl support to testDrakeVisualizer.py
|
src/python/tests/testDrakeVisualizer.py
|
src/python/tests/testDrakeVisualizer.py
|
from ddapp.consoleapp import ConsoleApp
from ddapp.screengrabberpanel import ScreenGrabberPanel
from ddapp.drakevisualizer import DrakeVisualizer
from ddapp import objectmodel as om
from ddapp import applogic
from PythonQt import QtCore, QtGui
class DrakeVisualizerApp(ConsoleApp):
def __init__(self):
ConsoleApp.__init__(self)
self.view = self.createView()
self.mainWindow = QtGui.QMainWindow()
self.mainWindow.setCentralWidget(self.view)
self.mainWindow.resize(768 * (16/9.0), 768)
self.mainWindow.setWindowTitle('Drake Visualizer')
self.mainWindow.setWindowIcon(QtGui.QIcon(':/images/drake_logo.png'))
self.drakeVisualizer = DrakeVisualizer(self.view)
self.screenGrabberPanel = ScreenGrabberPanel(self.view)
self.screenGrabberDock = self.addWidgetToDock(self.screenGrabberPanel.widget, QtCore.Qt.RightDockWidgetArea)
self.screenGrabberDock.setVisible(False)
model = om.getDefaultObjectModel()
model.getTreeWidget().setWindowTitle('Scene Browser')
model.getPropertiesPanel().setWindowTitle('Properties Panel')
self.sceneBrowserDock = self.addWidgetToDock(model.getTreeWidget(), QtCore.Qt.LeftDockWidgetArea)
self.propertiesDock = self.addWidgetToDock(model.getPropertiesPanel(), QtCore.Qt.LeftDockWidgetArea)
self.sceneBrowserDock.setVisible(False)
self.propertiesDock.setVisible(False)
applogic.addShortcut(self.mainWindow, 'Ctrl+Q', self.quit)
applogic.addShortcut(self.mainWindow, 'F1', self.toggleObjectModel)
applogic.addShortcut(self.mainWindow, 'F2', self.toggleScreenGrabber)
def toggleObjectModel(self):
self.sceneBrowserDock.setVisible(not self.sceneBrowserDock.visible)
self.propertiesDock.setVisible(not self.propertiesDock.visible)
def toggleScreenGrabber(self):
self.screenGrabberDock.setVisible(not self.screenGrabberDock.visible)
def addWidgetToDock(self, widget, dockArea):
dock = QtGui.QDockWidget()
dock.setWidget(widget)
dock.setWindowTitle(widget.windowTitle)
self.mainWindow.addDockWidget(dockArea, dock)
return dock
def main():
# use global so the variable is available in the python console
global app
app = DrakeVisualizerApp()
app.setupGlobals(globals())
app.mainWindow.show()
app.start()
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -139,16 +139,53 @@
ualizer%0A
+from ddapp.lcmgl import LCMGLManager%0A
from dda
@@ -239,17 +239,16 @@
pplogic%0A
-%0A
from Pyt
@@ -759,16 +759,68 @@
lf.view)
+%0A self.lcmglManager = LCMGLManager(self.view)
%0A%0A
|
d73d1ab8844ac0048c3ad0ebd2d2b6c12b8c606c
|
Make cli for getting files from google folder
|
featured.py
|
featured.py
|
import httplib2
import os
import datetime
import argparse
import io
import sys
import datetime
from apiclient import discovery
import oauth2client
from oauth2client import client
from oauth2client import tools
from googleapiclient.http import MediaIoBaseDownload
from flask import Flask
from flask_restful import Resource, Api, reqparse
SCOPES = 'https://www.googleapis.com/auth/drive'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Signup Form'
FEATURED_DOGS_FOLDER = '0B-mj_1f0ZZvRSk9XNXpaWDN1eU0'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'drive.googleapis.com-signup.json')
store = oauth2client.file.Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
credentials = tools.run_flow(flow, store, flags)
return credentials
def get_service(credentials):
http = credentials.authorize(httplib2.Http())
service = discovery.build('drive', 'v3', http=http)
return service
def download(service, file_id, target):
request = service.files().get_media(fileId=file_id)
fh = io.BytesIO()
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
with open(target, 'w+b') as out:
out.write(fh.getvalue())
def get_files(service, since=None):
"""Get files that were modified after 'since' date.
'since' is a datetime in UTC.
"""
if since is None:
since = datetime.datetime.fromtimestamp(0)
q = "'{FOLDER}' in parents and modifiedTime > '{SINCE}'".format(
FOLDER=FEATURED_DOGS_FOLDER,
SINCE=since.isoformat())
files = service.files().list(
q=q, fields='files(description,id,name)').execute()
if not files['files']:
return
q = "'{FOLDER}' in parents and not trashed".format(
FOLDER=FEATURED_DOGS_FOLDER)
files = service.files().list(
q=q, fields='files(description,id,name)').execute()
pics = []
for file in files['files']:
target = os.path.join('featured', file['name'])
download(service, file['id'], target)
pics.append("""
<div class="carousel-item" >
<a class=""
href="#one!"><img src="{TARGET}"></a>
<p class="center">
{DESCR}
</p>
</div>""".format(
TARGET=target,
DESCR=file['description']))
template = open('index.html.template').read()
with open('index.html', 'w') as out:
out.write(template.format(CAROUSEL=''.join(pics)))
|
Python
| 0
|
@@ -3368,37 +3368,321 @@
%0A
- %0A %0A
+%0Aif __name__ == '__main__':%0A srv = get_service(get_credentials())%0A try:%0A last = datetime.datetime.fromtimestamp(float(open('last').readline()))%0A except IOError:%0A last = None%0A get_files(srv, last)%0A with open('last', 'w') as out:%0A out.write(str(datetime.datetime.now().timestamp()))
%0A
|
38939635530223ef7d736c19c9c2d666c67baca4
|
fix file format generated by ffhlwiki.py
|
ffhlwiki.py
|
ffhlwiki.py
|
#!/usr/bin/env python3
import json
import argparse
from itertools import zip_longest
from urllib.request import urlopen
from bs4 import BeautifulSoup
def import_wikigps(url):
def fetch_wikitable(url):
f = urlopen(url)
soup = BeautifulSoup(f)
table = soup.find_all("table")[0]
rows = table.find_all("tr")
headers = []
data = []
def maybe_strip(x):
if isinstance(x.string, str):
return x.string.strip()
else:
return ""
for row in rows:
tds = list([maybe_strip(x) for x in row.find_all("td")])
ths = list([maybe_strip(x) for x in row.find_all("th")])
if any(tds):
data.append(tds)
if any(ths):
headers = ths
nodes = []
for d in data:
nodes.append(dict(zip(headers, d)))
return nodes
nodes = fetch_wikitable(url)
aliases = []
for node in nodes:
try:
node['MAC'] = node['MAC'].split(',')
except KeyError:
pass
try:
node['GPS'] = node['GPS'].split(',')
except KeyError:
pass
try:
node['Knotenname'] = node['Knotenname'].split(',')
except KeyError:
pass
nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname'])
for data in nodes:
alias = {}
alias['mac'] = data[0].strip()
if data[1]:
alias['gps'] = data[1].strip()
if data[2]:
alias['name'] = data[2].strip()
aliases.append(alias)
return aliases
parser = argparse.ArgumentParser()
parser.add_argument('url', help='wiki URL')
args = parser.parse_args()
options = vars(args)
aliases = import_wikigps(options['url'])
print(json.dumps(aliases))
|
Python
| 0
|
@@ -848,26 +848,26 @@
aliases =
-%5B%5D
+%7B%7D
%0A%0A for node
@@ -1268,20 +1268,11 @@
-alias%5B'
mac
-'%5D
= d
@@ -1421,22 +1421,21 @@
ases
-.append(
+%5Bmac%5D =
alias
-)
%0A%0A
|
ce2c34fc9dc010429047613b6bdfc513c799987d
|
update projecs with voting status and check deadline
|
bluebottle/projects/management/commands/cron_status_realised.py
|
bluebottle/projects/management/commands/cron_status_realised.py
|
from django.core.management.base import BaseCommand, CommandError
from django.utils.timezone import now
from django.db import connection
from bluebottle.clients.models import Client
from bluebottle.projects.models import Project
from bluebottle.bb_projects.models import ProjectPhase
from bluebottle.tasks.models import Task
from bluebottle.clients import properties
from bluebottle.clients.utils import LocalTenant
class Command(BaseCommand):
args = 'No arguments required'
help = 'Sets projects to "Done Incomplete" and task status to "Realised" \
when the deadline is passed'
def handle(self, *args, **options):
for client in Client.objects.all():
self.update_statuses_for_client(client)
def update_statuses_for_client(self, client):
"""
Projects which have expired but have been funded will already have
their status set to done-complete so these can be ignored. We only
need to update projects which haven't been funded but have expired,
or they have been overfunded and have expired.
"""
connection.set_tenant(client)
with LocalTenant(client, clear_tenant=True):
self.stdout.write("Checking deadlines for client {0}".
format(client.client_name))
# we no longer need the actual phases (moved to project)
# but verify they exist, just to be sure
try:
ProjectPhase.objects.get(slug='done-complete')
ProjectPhase.objects.get(slug='done-incomplete')
except ProjectPhase.DoesNotExist:
raise CommandError(
"A ProjectPhase with name 'Done-Complete' or 'Done-Incomplete' \
does not exist")
try:
campaign_phase = ProjectPhase.objects.get(slug='campaign')
except ProjectPhase.DoesNotExist:
raise CommandError(
"A ProjectPhase with name 'Campaign' does not exist")
try:
ProjectPhase.objects.get(slug='closed')
except ProjectPhase.DoesNotExist:
raise CommandError(
"A ProjectPhase with slug 'closed' does not exist")
"""
Projects which have at least the funds asked, are still in campaign
phase and have not expired need the campaign funded date set to now.
FIXME: this action should be moved into the code where 'amount_needed'
is calculated => when the value is lte 0 then set campaign_funded.
"""
self.stdout.write("Checking Project funded and still running...")
Project.objects.filter(amount_needed__lte=0,
status=campaign_phase,
deadline__gt=now()).update(campaign_funded=now())
"""
Projects which are still in campaign phase but have expired need to be
set to 'done'.
"""
self.stdout.write("Checking Project deadlines...")
for project in Project.objects.filter(status=campaign_phase,
deadline__lte=now()):
project.deadline_reached()
"""
Iterate over tasks and save them one by one so the receivers get a
signal
"""
self.stdout.write("Checking Task deadlines...\n\n")
for task in Task.objects.filter(status='in progress',
deadline__lt=now()).all():
task.deadline_reached()
self.stdout.write(
"Successfully updated the status of expired Project and Task \
models.\n\n")
|
Python
| 0
|
@@ -410,16 +410,57 @@
lTenant%0A
+from bluebottle.votes.models import Vote%0A
%0A%0Aclass
@@ -3832,16 +3832,530 @@
models.%5Cn%5Cn%22)%0A
+%0A self.stdout.write(%22Checking projects with voting deadlines%5Cn%5Cn%22)%0A%0A vote_phase = ProjectPhase.objects.get(slug='voting')%0A vote_done = ProjectPhase.objects.get(slug='voting-done')%0A%0A for project in Project.objects.filter(status=vote_phase,%0A voting_deadline__lt=now()):%0A project.status = vote_done%0A project.save()%0A%0A self.stdout.write(%22Done checking projects with voting deadlines%22)%0A
|
6192a8ed74e80e0a6b2c18292bd87ba2b44a6f44
|
Split format dictionary
|
audiorename/rename.py
|
audiorename/rename.py
|
# -*- coding: utf-8 -*-
"""Rename a single audio file."""
import os
import six
from ansicolor import green
from ansicolor import red
import shutil
from phrydy import as_string
from tmep import Functions
from tmep import Template
from .meta import Meta
formats = {
'default': '$artist_initial/' +
'%shorten{$artistsafe_sort}/' +
'%shorten{$album_clean}%ifdef{year_safe,_${year_safe}}/' +
'${disctrack}_%shorten{$title}',
'compilation': '_compilations/' +
'$album_initial/' +
'%shorten{$album_clean}%ifdef{year_safe,_${year_safe}}/' +
'${disctrack}_%shorten{$title}',
}
class Rename(object):
def __init__(self, file, args):
if args:
self.args = args
self.old_file = file
if args.target_dir:
self.target_dir = args.target_dir
else:
self.target_dir = os.getcwd()
if args.source_as_target_dir:
if args.is_dir:
self.target_dir = args.path
else:
self.target_dir = os.path.dirname(args.path)
self.old_path = os.path.realpath(self.old_file)
self.extension = self.old_file.split('.')[-1]
meta = Meta(self.old_path, args.shell_friendly)
self.meta = meta.getMeta()
def generateFilename(self):
if self.meta['comp']:
t = Template(as_string(self.args.compilation))
else:
t = Template(as_string(self.args.format))
f = Functions(self.meta)
new = t.substitute(self.meta, f.functions())
new = self.postTemplate(new)
new = f.tmpl_deldupchars(new + '.' + self.extension.lower())
self.new_path = os.path.join(self.target_dir, new)
if six.PY2:
old_path = self.old_path.decode('utf-8')
else:
old_path = self.old_path
self.message = red(old_path) + '\n -> ' + green(
self.new_path) + '\n'
def postTemplate(self, text):
if isinstance(text, str) or isinstance(text, unicode):
if self.args.shell_friendly:
text = Functions.tmpl_asciify(text)
text = Functions.tmpl_delchars(text, '[]().,!"\'’')
text = Functions.tmpl_replchars(text, '-', ' ')
return text
def createDir(self, path):
path = os.path.dirname(path)
import errno
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def skipMessage(self, message='no field'):
print(
red('!!! SKIPPED [' + message + '] !!!',
reverse=True) + ': ' + self.old_file)
def dryRun(self):
self.generateFilename()
print('Dry run: ' + self.message)
def rename(self):
"""Rename audio files"""
self.generateFilename()
print('Rename: ' + self.message)
self.createDir(self.new_path)
shutil.move(self.old_path, self.new_path)
def copy(self):
"""Copy audio files to new path."""
self.generateFilename()
print('Copy: ' + self.message)
self.createDir(self.new_path)
shutil.copy2(self.old_path, self.new_path)
def execute(self):
skip = self.args.skip_if_empty
if not self.meta:
self.skipMessage('broken file')
elif skip and (skip not in self.meta or not self.meta[skip]):
self.skipMessage()
else:
if self.args.dry_run:
self.dryRun()
elif self.args.copy:
self.copy()
else:
self.rename()
def do_rename(path, args=None):
if args.unittest:
print(os.path.abspath(path))
else:
audio = Rename(path, args)
audio.execute()
|
Python
| 0.000003
|
@@ -262,21 +262,27 @@
mats = %7B
-%0A
+%7D%0A%0Aformats%5B
'default
@@ -282,17 +282,19 @@
default'
-:
+%5D =
'$artis
@@ -298,32 +298,34 @@
tist_initial/' +
+ %5C
%0A '%25shorten%7B$
@@ -340,24 +340,27 @@
fe_sort%7D/' +
+ %5C
%0A '%25short
@@ -402,32 +402,35 @@
%7Byear_safe%7D%7D/' +
+ %5C
%0A '$%7Bdisctrac
@@ -449,22 +449,26 @@
$title%7D'
-,%0A
+%0A%0Aformats%5B
'compila
@@ -472,17 +472,19 @@
ilation'
-:
+%5D =
'_compi
@@ -490,24 +490,27 @@
ilations/' +
+ %5C
%0A '$album
@@ -517,24 +517,27 @@
_initial/' +
+ %5C
%0A '%25short
@@ -587,16 +587,19 @@
fe%7D%7D/' +
+ %5C
%0A '$%7B
@@ -630,12 +630,8 @@
le%7D'
-,%0A%7D%0A
%0A%0Acl
|
47c76074e010107fb3bfe3fc0f74482058efac50
|
Add support for constructor keyword arguments (i.e. pass them through to FilesystemCollection).
|
src/sheared/web/collections/entwined.py
|
src/sheared/web/collections/entwined.py
|
#
# Sheared -- non-blocking network programming library for Python
# Copyright (C) 2003 Sune Kirkeby <sune@mel.interspace.dk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import warnings
from entwine import entwine
from sheared.python import io
from sheared.python import log
from sheared.web.collections.filesystem import *
def entwined_handler(request, reply, collection, walker):
if walker.root.endswith(collection.template_ext):
templates = [walker.root]
templates.extend(collection.page_templates)
ctx = {}
for i in range(len(templates)):
last = i == len(templates) - 1
r = entwine(io.readfile(templates[i]), ctx)
if (not last) and r.strip():
warnings.warn('ignored generated content from %s' % template,
UserWarning, stacklevel=2)
reply.headers.setHeader('Content-Type', 'text/html')
reply.headers.setHeader('Content-Length', len(r))
reply.send(r)
else:
return normal_handler(request, reply, collection, walker)
class EntwinedCollection(FilesystemCollection):
def __init__(self, page_templates, *a):
FilesystemCollection.__init__(self, *a)
self.page_templates = page_templates
self.normal_handler = entwined_handler
self.template_ext = '.html'
|
Python
| 0.000047
|
@@ -1843,16 +1843,22 @@
ates, *a
+, **kw
):%0A
@@ -1898,16 +1898,22 @@
self, *a
+, **kw
)%0A
|
89acbe756359f7ebc3bc25bf2e53264d9b951333
|
fix test failure on windows: process names can also ends with '.bin', not only '.exe'
|
test/_windows.py
|
test/_windows.py
|
#!/usr/bin/env python
#
# $Id$
#
import os
import unittest
import platform
import subprocess
import signal
import time
import warnings
import psutil
from test_psutil import reap_children, get_test_subprocess
try:
from psutil import wmi
except ImportError:
warnings.warn("Can't import WMI module; Windows specific tests disabled",
RuntimeWarning)
wmi = None
class WindowsSpecificTestCase(unittest.TestCase):
def setUp(self):
self.pid = get_test_subprocess().pid
def tearDown(self):
reap_children()
def test_issue_24(self):
p = psutil.Process(0)
self.assertRaises(psutil.AccessDenied, p.kill)
def test_pid_4(self):
p = psutil.Process(4)
self.assertEqual(p.name, 'System')
# use __str__ to access all common Process properties to check
# that nothing strange happens
str(p)
p.username
self.assertTrue(p.create_time >= 0.0)
try:
rss, vms = p.get_memory_info()
except psutil.AccessDenied:
# expected on Windows Vista and Windows 7
if not platform.uname()[1] in ('vista', 'win-7', 'win7'):
raise
else:
self.assertTrue(rss > 0)
self.assertEqual(vms, 0)
def test_signal(self):
p = psutil.Process(self.pid)
self.assertRaises(ValueError, p.send_signal, signal.SIGINT)
def test_process_names(self):
for p in psutil.process_iter():
if p.pid in (0, 4):
continue
self.assertTrue(p.name.endswith(".exe"))
if wmi is not None:
# --- Process class tests
def test_process_name(self):
w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
p = psutil.Process(self.pid)
self.assertEqual(p.name, w.Caption)
def test_process_path(self):
w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
p = psutil.Process(self.pid)
self.assertEqual(p.exe, w.ExecutablePath)
def test_process_cmdline(self):
w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
p = psutil.Process(self.pid)
self.assertEqual(' '.join(p.cmdline), w.CommandLine)
def test_process_username(self):
w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
p = psutil.Process(self.pid)
domain, _, username = w.GetOwner()
username = "%s\\%s" %(domain, username)
self.assertEqual(p.username, username)
def test_process_rss_memory(self):
time.sleep(0.1)
w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
p = psutil.Process(self.pid)
rss = p.get_memory_info().rss
self.assertEqual(rss, int(w.WorkingSetSize))
def test_process_vms_memory(self):
time.sleep(0.1)
w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
p = psutil.Process(self.pid)
vms = p.get_memory_info().vms
# http://msdn.microsoft.com/en-us/library/aa394372(VS.85).aspx
# claims that PageFileUsage is represented in Kilo bytes.
# This appear to be true on Windows 7 but not on Windows XP.
if platform.uname()[1] in ('vista', 'win-7', 'win7'):
self.assertEqual(vms, int(w.PageFileUsage * 1024))
else:
self.assertEqual(vms, w.PageFileUsage)
def test_process_create_time(self):
w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
p = psutil.Process(self.pid)
wmic_create = str(w.CreationDate.split('.')[0])
psutil_create = time.strftime("%Y%m%d%H%M%S",
time.localtime(p.create_time))
self.assertEqual(wmic_create, psutil_create)
# --- psutil namespace functions and constants tests
def test_NUM_CPUS(self):
num_cpus = int(os.environ['NUMBER_OF_PROCESSORS'])
self.assertEqual(num_cpus, psutil.NUM_CPUS)
def test_TOTAL_PHYMEM(self):
w = wmi.WMI().Win32_ComputerSystem()[0]
self.assertEqual(int(w.TotalPhysicalMemory), psutil.TOTAL_PHYMEM)
def test__UPTIME(self):
# _UPTIME constant is not public but it is used internally
# as value to return for pid 0 creation time.
# WMI behaves the same.
w = wmi.WMI().Win32_Process(ProcessId=self.pid)[0]
p = psutil.Process(0)
wmic_create = str(w.CreationDate.split('.')[0])
psutil_create = time.strftime("%Y%m%d%H%M%S",
time.localtime(p.create_time))
def test_get_pids(self):
# Note: this test might fail if the OS is starting/killing
# other processes in the meantime
w = wmi.WMI().Win32_Process()
wmi_pids = [x.ProcessId for x in w]
wmi_pids.sort()
psutil_pids = psutil.get_pid_list()
psutil_pids.sort()
if wmi_pids != psutil_pids:
difference = filter(lambda x:x not in wmi_pids, psutil_pids) + \
filter(lambda x:x not in psutil_pids, wmi_pids)
self.fail("difference: " + str(difference))
if __name__ == '__main__':
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(WindowsSpecificTestCase))
unittest.TextTestRunner(verbosity=2).run(test_suite)
|
Python
| 0.000003
|
@@ -1598,16 +1598,43 @@
(%22.exe%22)
+ or p.name.endswith(%22.bin%22)
)%0A%0A i
|
15ab20faa2ae7e9b5f30d57458b12e11722bb63f
|
Remove debugging
|
flickrpc.py
|
flickrpc.py
|
from twisted.python.failure import Failure
from twisted.internet import defer
from twisted.web.xmlrpc import Proxy
from elementtree import ElementTree
import os, md5
class FlickrError(Exception):
def __init__(self, code, message):
Exception.__init__(self)
self.code = int(code)
self.message = message
def __str__(self):
return "%d: %s" % (self.code, self.message)
class FlickRPC:
def __init__(self, api_key, secret, perms="read"):
self.proxy = Proxy("http://api.flickr.com/services/xmlrpc/")
self.api_key = api_key
self.secret = secret
self.perms = perms
self.token = None
self.__methods = {}
@staticmethod
def __failure(exception):
"""Take a xmlrpclib.Fault object and return a new Twisted Failure object."""
return Failure(FlickrError(exception.faultCode,
exception.faultString))
def __sign(self, kwargs):
kwargs['api_key'] = self.api_key
# If authenticating we don't yet have a token
if self.token:
kwargs['auth_token'] = self.token
s = []
for key in kwargs.keys():
s.append("%s%s" % (key, kwargs[key]))
s.sort()
sig = md5.new(self.secret + ''.join(s)).hexdigest()
kwargs['api_sig'] = sig
def __getTokenFile(self):
"""Get the filename that contains the authentication token for the API key"""
return os.path.expanduser(os.path.join("~", ".flickr", self.api_key, "auth.xml"))
def __getattr__(self, method, **kwargs):
"""Magic automatic method generation. Take the Flickr method name
(flickr.favorites.getList), remove the flickr. prefix
(favorites.getList) and replace all . with _ (favorite_getList). Then
pass keyword arguments as required. The return value is a Twisted
Deferred object"""
if not self.__methods.has_key(method):
print "generating %s" % method
real_method = "flickr." + method.replace("_", ".")
def proxy(method=real_method, **kwargs):
d = defer.Deferred()
self.__sign(kwargs)
# TODO: do I have to convert a Unicode string to UTF-8 to parse it?
self.proxy.callRemote(method, kwargs).addCallbacks(
lambda data: d.callback(ElementTree.XML(data.encode("utf-8"))),
lambda fault: d.errback(FlickRPC.__failure(fault.value)))
return d
self.__methods[method] = proxy
return self.__methods[method]
def authenticate(self):
"""Attemps to log in to Flickr. This will open a web browser if
required. The return value is a Twisted Deferred object that callbacks
when authentication is complete."""
filename = self.__getTokenFile()
if os.path.exists(filename):
e = ElementTree.parse(filename).getroot()
self.token = e.find("token").text
return defer.succeed(True)
d = defer.Deferred()
def gotFrob(xml):
frob = xml.text
keys = { 'perms': self.perms,
'frob': frob }
self.__sign(keys)
url = "http://flickr.com/services/auth/?api_key=%(api_key)s&perms=%(perms)s&frob=%(frob)s&api_sig=%(api_sig)s" % keys
# TODO: signal or something
os.spawnlp(os.P_WAIT, "epiphany", "epiphany", "-p", url)
def gotToken(e):
# Set the token
self.token = e.find("token").text
# Cache the authentication
filename = self.__getTokenFile()
path = os.path.dirname(filename)
if not os.path.exists(path):
os.makedirs(path, 0700)
f = file(filename, "w")
f.write(ElementTree.tostring(e))
f.close()
# Callback to the user
d.callback(True)
# TODO: chain up the error callbacks too
self.auth_getToken(frob=frob).addCallback(gotToken)
# TODO: chain up the error callbacks too
flickr.auth_getFrob().addCallback(gotFrob)
return d
|
Python
| 0.000005
|
@@ -1975,51 +1975,8 @@
d):%0A
- print %22generating %25s%22 %25 method%0A
|
9824144e9a7f8558e3ca0836ad30613ed2baeb08
|
remove left-over debug bash
|
pprof/projects/gentoo/crafty.py
|
pprof/projects/gentoo/crafty.py
|
"""
crafty experiment within gentoo chroot.
"""
from os import path
from pprof.projects.gentoo.gentoo import GentooGroup
from pprof.utils.downloader import Wget
from pprof.utils.run import run, uchroot
from plumbum import local
from plumbum.cmd import cat # pylint: disable=E0401
class Crafty(GentooGroup):
"""
games-board/crafty
"""
NAME = "gentoo-crafty"
DOMAIN = "games-board"
def download(self):
super(Crafty, self).download()
book_file = "book.bin"
book_bin = "http://www.craftychess.com/" + book_file
with local.cwd(self.builddir):
Wget(book_bin, book_file)
def build(self):
with local.cwd(self.builddir):
emerge_in_chroot = uchroot()["/usr/bin/emerge"]
run(emerge_in_chroot["games-board/crafty"])
def run_tests(self, experiment):
from pprof.project import wrap
crafty_path = "/usr/games/bin/crafty"
wrap(path.join(self.builddir, crafty_path.lstrip("/")), experiment,
self.builddir)
crafty = uchroot()[crafty_path]
bash = uchroot()["/bin/bash"]
with open(path.join(self.builddir, "test1.sh"), 'w') as test1:
lines = '''
st=10
ponder=off
display nomoves
setboard rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq
move
book off
setboard rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq
move
setboard 1k1r4/pp1b1R2/3q2pp/4p3/2B5/4Q3/PPP2B2/2K5 b
move
setboard 3r1k2/4npp1/1ppr3p/p6P/P2PPPP1/1NR5/5K2/2R5 w
move
setboard 2q1rr1k/3bbnnp/p2p1pp1/2pPp3/PpP1P1P1/1P2BNNP/2BQ1PRK/7R b
move
setboard rnbqkb1r/p3pppp/1p6/2ppP3/3N4/2P5/PPP1QPPP/R1B1KB1R w KQkq
move
setboard r1b2rk1/2q1b1pp/p2ppn2/1p6/3QP3/1BN1B3/PPP3PP/R4RK1 w
move
setboard 2r3k1/pppR1pp1/4p3/4P1P1/5P2/1P4K1/P1P5/8 w
move
setboard 1nk1r1r1/pp2n1pp/4p3/q2pPp1N/b1pP1P2/B1P2R2/2P1B1PP/R2Q2K1 w
move
setboard 4b3/p3kp2/6p1/3pP2p/2pP1P2/4K1P1/P3N2P/8 w
move
setboard 2kr1bnr/pbpq4/2n1pp2/3p3p/3P1P1B/2N2N1Q/PPP3PP/2KR1B1R w
move
setboard 3rr1k1/pp3pp1/1qn2np1/8/3p4/PP1R1P2/2P1NQPP/R1B3K1 b
move
setboard 2r1nrk1/p2q1ppp/bp1p4/n1pPp3/P1P1P3/2PBB1N1/4QPPP/R4RK1 w
move
setboard r3r1k1/ppqb1ppp/8/4p1NQ/8/2P5/PP3PPP/R3R1K1 b
move
setboard r2q1rk1/4bppp/p2p4/2pP4/3pP3/3Q4/PP1B1PPP/R3R1K1 w
move
setboard rnb2r1k/pp2p2p/2pp2p1/q2P1p2/8/1Pb2NP1/PB2PPBP/R2Q1RK1 w
move
setboard 2r3k1/1p2q1pp/2b1pr2/p1pp4/6Q1/1P1PP1R1/P1PN2PP/5RK1 w
move
setboard r1bqkb1r/4npp1/p1p4p/1p1pP1B1/8/1B6/PPPN1PPP/R2Q1RK1 w kq
move
setboard r2q1rk1/1ppnbppp/p2p1nb1/3Pp3/2P1P1P1/2N2N1P/PPB1QP2/R1B2RK1 b
move
setboard r1bq1rk1/pp2ppbp/2np2p1/2n5/P3PP2/N1P2N2/1PB3PP/R1B1QRK1 b
move
setboard 3rr3/2pq2pk/p2p1pnp/8/2QBPP2/1P6/P5PP/4RRK1 b
move
setboard r4k2/pb2bp1r/1p1qp2p/3pNp2/3P1P2/2N3P1/PPP1Q2P/2KRR3 w
move
setboard 3rn2k/ppb2rpp/2ppqp2/5N2/2P1P3/1P5Q/PB3PPP/3RR1K1 w
move
setboard 2r2rk1/1bqnbpp1/1p1ppn1p/pP6/N1P1P3/P2B1N1P/1B2QPP1/R2R2K1 b
move
setboard r1bqk2r/pp2bppp/2p5/3pP3/P2Q1P2/2N1B3/1PP3PP/R4RK1 b kq
move
setboard r2qnrnk/p2b2b1/1p1p2pp/2pPpp2/1PP1P3/PRNBB3/3QNPPP/5RK1 w
move
setboard /k/3p/p2P1p/P2P1P///K/ w
move
setboard /k/rnn////5RBB/K/ w
move
mt=0
quit
EOF
'''
test1.write(lines)
with open(path.join(self.builddir, "test2.sh"), 'w') as test2:
lines = '''
st=10
ponder=off
mt=2
setboard 2r2rk1/1bqnbpp1/1p1ppn1p/pP6/N1P1P3/P2B1N1P/1B2QPP1/R2R2K1 b
move
mt=0
quit
'''
test2.write(lines)
with local.cwd(self.builddir):
run((cat["test1.sh"] | crafty))
run((cat["test2.sh"] | crafty))
|
Python
| 0
|
@@ -1082,46 +1082,8 @@
ath%5D
-%0A bash = uchroot()%5B%22/bin/bash%22%5D
%0A%0A
|
b0d23f375a224b60ad7e57e35864cc9e140b72c2
|
Remove old comment
|
workflows/common/python/model_runner.py
|
workflows/common/python/model_runner.py
|
# MODEL RUNNER PY
# Currently only supports NT3_TC1 # Not true? -Justin 2018/02/28
# See __main__ section for usage
import sys
import json
import os
import numpy as np
import importlib
import runner_utils
import log_tools
import math
logger = None
print("MODEL RUNNER...")
# Adding the following line in order to append an arbitrary model's dependencies to the path *before* the benchmarks in order to accidentally use a benchmark dependency
sys.path.append(os.getenv("MODEL_PYTHON_DIR"))
sys.path.append(os.getenv("BENCHMARKS_ROOT")+"/common")
print("sys.path:")
print(sys.path)
print("")
def import_pkg(framework, model_name):
print ("model_name", model_name)
if framework == 'keras':
module_name = os.getenv("MODEL_PYTHON_SCRIPT") if "MODEL_PYTHON_SCRIPT" in os.environ and os.getenv("MODEL_PYTHON_SCRIPT") != "" else "{}_baseline_keras2".format(model_name)
print ("module_name:", module_name)
pkg = importlib.import_module(module_name)
from keras import backend as K
if K.backend() == 'tensorflow' and 'NUM_INTER_THREADS' in os.environ:
import tensorflow as tf
print("Configuring tensorflow with {} inter threads and {} intra threads".
format(os.environ['NUM_INTER_THREADS'], os.environ['NUM_INTRA_THREADS']))
session_conf = tf.ConfigProto(inter_op_parallelism_threads=int(os.environ['NUM_INTER_THREADS']),
intra_op_parallelism_threads=int(os.environ['NUM_INTRA_THREADS']))
sess = tf.Session(graph=tf.get_default_graph(), config=session_conf)
K.set_session(sess)
# elif framework is 'mxnet':
# import nt3_baseline_mxnet
# pkg = nt3_baseline_keras_baseline_mxnet
# elif framework is 'neon':
# import nt3_baseline_neon
# pkg = nt3_baseline_neon
else:
raise ValueError("Invalid framework: {}".format(framework))
return pkg
def run(hyper_parameter_map, obj_return):
global logger
logger = log_tools.get_logger(logger, __name__)
framework = hyper_parameter_map['framework']
model_name = hyper_parameter_map['model_name']
pkg = import_pkg(framework, model_name)
runner_utils.format_params(hyper_parameter_map)
# params is python dictionary
params = pkg.initialize_parameters()
for k,v in hyper_parameter_map.items():
#if not k in params:
# raise Exception("Parameter '{}' not found in set of valid arguments".format(k))
if(k=="dense"):
if(type(v) != list):
v=v.split(" ")
v = [int(i) for i in v]
if(k=="dense_feature_layers"):
if(type(v) != list):
v=v.split(" ")
v = [int(i) for i in v]
if(k=="cell_features"):
cp_str = v
v = list()
v.append(cp_str)
params[k] = v
logger.debug("WRITE_PARAMS START")
runner_utils.write_params(params, hyper_parameter_map)
logger.debug("WRITE_PARAMS STOP")
history = pkg.run(params)
runner_utils.keras_clear_session(framework)
# Default result if there is no val_loss (as in infer.py)
result = 0
if history != None:
# Return the history entry that the user requested.
val_loss = history.history[obj_return]
# Return a large number for nan and flip sign for val_corr
if(obj_return == "val_loss"):
if(math.isnan(val_loss[-1])):
result = 999999999
else:
result = val_loss[-1]
elif(obj_return == "val_corr" or obj_return == "val_dice_coef"): # allow for the return variable to be the val_dice_coef, which is sometimes used by arbitrary models instead of val_corr
if(math.isnan(val_loss[-1])):
result = 999999999
else:
result = -val_loss[-1] #Note negative sign
else:
raise ValueError("Unsupported objective function (use obj_param to specify val_corr or val_loss): {}".format(framework))
print("result: " + str(result))
return result
def get_obj_return():
obj_return = os.getenv("OBJ_RETURN")
valid_obj_returns = [ "val_loss", "val_corr" ]
if obj_return == None:
raise Exception("No OBJ_RETURN was in the environment!")
if obj_return not in valid_obj_returns:
raise Exception("Invalid value for OBJ_RETURN: use: " +
valid_obj_returns)
return obj_return
# Usage: see how sys.argv is unpacked below:
if __name__ == '__main__':
logger = log_tools.get_logger(logger, __name__)
logger.debug("RUN START")
( _, # The Python program name (unused)
param_string,
instance_directory,
framework,
runid,
benchmark_timeout ) = sys.argv
obj_return = get_obj_return()
hyper_parameter_map = runner_utils.init(param_string,
instance_directory,
framework,
out_dir_key='save')
hyper_parameter_map['model_name'] = os.getenv("MODEL_NAME")
if hyper_parameter_map['model_name'] == None:
raise Exception("No MODEL_NAME was in the environment!")
hyper_parameter_map['experiment_id'] = os.getenv("EXPID")
hyper_parameter_map['run_id'] = runid
hyper_parameter_map['timeout'] = float(benchmark_timeout)
# tensorflow.__init__ calls _os.path.basename(_sys.argv[0])
# so we need to create a synthetic argv.
# if (not hasattr(sys, 'argv')) or (len(sys.argv) == 0):
# sys.argv = ['nt3_tc1']
sys.argv = ['null']
# Call to Benchmark!
logger.debug("CALL BENCHMARK " + hyper_parameter_map['model_name'])
print("sys.argv=" + str(sys.argv))
result = run(hyper_parameter_map, obj_return)
runner_utils.write_output(result, instance_directory)
logger.debug("RUN STOP")
|
Python
| 0
|
@@ -17,73 +17,8 @@
PY%0A%0A
-# Currently only supports NT3_TC1 # Not true? -Justin 2018/02/28%0A
# Se
|
c6dc5e6a25e6a7154728e38dd174cc65bce00a61
|
Add build support for bzip2
|
pprof/projects/gentoo/gentoo.py
|
pprof/projects/gentoo/gentoo.py
|
"""
The Gentoo module for running tests on builds from the portage tree.
This will install a stage3 image of gentoo together with a recent snapshot
of the portage tree. For building / executing arbitrary projects successfully it
is necessary to keep the installed image as close to the host system as
possible.
In order to speed up your experience, you can replace the stage3 image that
we pull from the distfiles mirror with a new image that contains all necessary
dependencies for your experiments. Make sure you update the hash alongside
the gentoo image in pprof's source directory.
The following packages are required to run GentooGroup:
* fakeroot
"""
from pprof.project import Project
from pprof.utils.run import run, uchroot
from plumbum import local
from lazy import lazy
def latest_src_uri():
"""
Get the latest src_uri for a stage 3 tarball.
Returns (str):
Latest src_uri from gentoo's distfiles mirror.
"""
from plumbum.cmd import curl, cut, tail
from plumbum import ProcessExecutionError
from logging import error
latest_txt = "http://distfiles.gentoo.org/releases/amd64/autobuilds/"\
"latest-stage3-amd64.txt"
try:
src_uri = (curl[latest_txt] | tail["-n", "+3"]
| cut["-f1", "-d "])().strip()
except ProcessExecutionError as proc_ex:
src_uri = "NOT-FOUND"
error("Could not determine latest stage3 src uri: {0}", str(proc_ex))
return src_uri
class GentooGroup(Project):
"""
Gentoo ProjectGroup is the base class for every portage build.
"""
GROUP = 'gentoo'
def __init__(self, exp):
super(GentooGroup, self).__init__(exp, "gentoo")
src_dir = "gentoo"
src_file = src_dir + ".tar.bz2"
@lazy
def src_uri(self):
return "http://distfiles.gentoo.org/releases/amd64/autobuilds/{0}" \
.format(latest_src_uri())
# download location for portage files
src_uri_portage = "ftp://sunsite.informatik.rwth-aachen.de/pub/Linux/"\
"gentoo/snapshots/portage-latest.tar.bz2"
src_file_portage = "portage_snap.tar.bz2"
def download(self):
from pprof.utils.downloader import Wget
from pprof.utils.run import run
from pprof.settings import config
from plumbum.cmd import cp, tar, fakeroot, rm
with local.cwd(self.builddir):
Wget(self.src_uri, self.src_file)
cp(config["sourcedir"] + "/bin/uchroot", "uchroot")
run(fakeroot["tar", "xfj", self.src_file])
rm(self.src_file)
with local.cwd(self.builddir + "/usr"):
Wget(self.src_uri_portage, self.src_file_portage)
run(tar["xfj", self.src_file_portage])
rm(self.src_file_portage)
def configure(self):
from plumbum.cmd import mkdir, cp
with local.cwd(self.builddir):
with open("etc/portage/make.conf", 'w') as makeconf:
lines = '''
CFLAGS="-O2 -pipe"
CXXFLAGS="${CFLAGS}"
FEATURES="-sandbox -usersandbox -usersync -xattr"
CC="/llvm/bin/clang"
CXX="/llvm/bin/clang++"
PORTAGE_USERNAME = "root"
PORTAGE_GRPNAME = "root"
PORTAGE_INST_GID = 0
PORTAGE_INST_UID = 0
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/llvm/lib"
CHOST="x86_64-pc-linux-gnu"
USE="bindist mmx sse sse2"
PORTDIR="/usr/portage"
DISTDIR="${PORTDIR}/distfiles"
PKGDIR="${PORTDIR}/packages"
'''
makeconf.write(lines)
mkdir("-p", "etc/portage/metadata")
with open("etc/portage/metadata/layout.conf", 'w') as layoutconf:
lines = '''masters = gentoo'''
layoutconf.write(lines)
cp("/etc/resolv.conf", "etc/resolv.conf")
class PrepareStage3(GentooGroup):
"""
A project that can be used for interactive stage3 generation.
"""
NAME = "stage3"
DOMAIN = "debug"
def build(self):
from plumbum import FG
from plumbum.cmd import tar, mv, rm
from pprof.utils.downloader import update_hash
from logging import info
from pprof.settings import config
from os import path
root = config["tmpdir"]
src_file = self.src_file + ".new"
with local.cwd(self.builddir):
bash_in_uchroot = uchroot()["/bin/bash"]
print("Entering User-Chroot. Prepare your image and "
"type 'exit' when you are done.")
bash_in_uchroot & FG
tgt_path = path.join(root, self.src_file)
tgt_path_new = path.join(root, src_file)
print("Packing new stage3 image. "
"This will replace the original one at: {}", tgt_path)
tar("cjf", tgt_path_new, ".")
update_hash(src_file, root)
mv(path.join(root, src_file), tgt_path)
def run_tests(self, experiment):
pass
class Eix(GentooGroup):
"""
Represents the package eix from the portage tree.
Building this class will create bare gentoo and compile eix.
"""
NAME = 'eix'
DOMAIN = 'debug'
def build(self):
with local.cwd(self.builddir):
emerge_in_chroot = uchroot()["/usr/bin/emerge"]
run(emerge_in_chroot["eix"])
def run_tests(self, experiment):
pass
|
Python
| 0
|
@@ -5247,28 +5247,279 @@
, experiment):%0A pass%0A
+%0A%0Aclass BZip2(GentooGroup):%0A NAME = %22gentoo-bzip2%22%0A DOMAIN = %22app-arch%22%0A%0A def build(self):%0A with local.cwd(self.builddir):%0A emerge_in_chroot = uchroot()%5B%22/usr/bin/emerge%22%5D%0A run(emerge_in_chroot%5B%22app-arch/bzip2%22%5D)%0A
|
829d32fa8f1724bf2e8a738567f48e7047ce11b6
|
handle error again
|
cmsplugin_remote_form/cms_plugins.py
|
cmsplugin_remote_form/cms_plugins.py
|
import requests
from django.core.mail import EmailMultiAlternatives
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
try:
from django.urls import reverse
except ImportError:
# handle Django < 1.10
from django.core.urlresolvers import reverse
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from .admin import ExtraFieldInline
from .models import RemoteForm as RemoteFormModel
from .forms import RemoteForm as RemoteFormForm
import time
def handle_uploaded_file(f, ts):
destination = open('%s/%s' % (settings.MEDIA_ROOT, ts + '-' + f.name), 'wb+')
for chunk in f.chunks():
destination.write(chunk)
destination.close()
class CMSRemoteFormPlugin(CMSPluginBase):
"""
"""
model = RemoteFormModel
inlines = [ExtraFieldInline, ]
name = _('Remote Form')
render_template = "cmsplugin_remote_form/default.html"
change_form_template = 'cmsplugin_remote_form/admin/change_form.html'
cache = False
def render(self, context, instance, placeholder):
request = context['request']
form = RemoteFormForm(contactFormInstance=instance, request=request)
show_thanks = False
if instance and instance.template:
self.render_template = instance.template
if request.method == "POST" and "remote_form_" + str(instance.id) in request.POST.keys():
ts = str(int(time.time()))
self.submitted_form = RemoteFormForm(contactFormInstance=instance,
request=request,
data=request.POST,
files=request.FILES)
if self.submitted_form.is_valid():
for fl in request.FILES:
for f in request.FILES.getlist(fl):
handle_uploaded_file(f, ts)
show_thanks = True
self.instance = instance
self.request = request
self.saved_record = self.submitted_form.save_record(instance, ts)
self.remote_response = self.post_to_remote(instance, request, self.submitted_form.cleaned_data)
self.handle_response()
else:
form = self.submitted_form
context.update({
'object': instance,
'form': form,
'show_thanks': show_thanks
})
return context
def handle_response(self):
if self.remote_response and self.determine_success():
self.success_callback()
else:
if self.instance.error_notification_emails:
error_email_addresses = [x.strip() for x in self.instance.error_notification_emails.split(',')]
message = EmailMultiAlternatives(
"Form Submission Error",
'There was a problem with a form-submission on:\n%s\nView the record:\n%s\nContent:\n%s' % (
self.request.build_absolute_uri(),
self.request.build_absolute_uri(reverse('admin:cmsplugin_remote_form_contactrecord_change', args=(self.saved_record.id,))),
self.remote_response.content
),
'no-reply@worthwhile.com',
error_email_addresses,
)
message.send()
self.failure_callback()
# Override these if you need to do different stuff.
def post_to_remote(self, instance, request, cleaned_data):
try:
response = requests.post(instance.post_url, data=cleaned_data)
return response
except requests.ConnectionError, e:
print e
def determine_success(self):
return "Please correct the following errors:" not in self.remote_response.content
def success_callback(self):
pass
def failure_callback(self):
pass
plugin_pool.register_plugin(CMSRemoteFormPlugin)
|
Python
| 0
|
@@ -2666,16 +2666,117 @@
emails:%0A
+ error = self.remote_response.content if self.remote_response else %22Connection Error%22%0A
@@ -3322,36 +3322,13 @@
-self.remote_response.content
+error
%0A
|
dcda5039755841ee5eb7faa6d45e763fbede3ee0
|
update serializer api
|
hwt/synthesizer/utils.py
|
hwt/synthesizer/utils.py
|
# -*- coding: utf-8 -*-
from io import StringIO
import os
from hwt.serializer.store_manager import SaveToStream, StoreManager
from hwt.serializer.vhdl.serializer import Vhdl2008Serializer
from hwt.synthesizer.dummyPlatform import DummyPlatform
from hwt.synthesizer.unit import Unit
from hwt.serializer.generic.to_hdl_ast import ToHdlAst
def toRtl(unit_or_cls: Unit, store_manager: StoreManager=None,
name: str=None,
target_platform=DummyPlatform()):
"""
Convert unit to RTL using specified serializer
:param unitOrCls: unit instance or class, which should be converted
:param name: name override of top unit (if is None name is derived
form class name)
:param target_platform: metainformatins about target platform, distributed
on every unit under _target_platform attribute
before Unit._impl() is called
"""
if isinstance(unit_or_cls, Unit):
u = unit_or_cls
else:
u = unit_or_cls()
u._loadDeclarations()
if name is not None:
assert isinstance(name, str)
u._hdl_module_name = u._name = name
if store_manager is None:
#buff = StringIO()
import sys
buff = sys.stdout
store_manager = SaveToStream(Vhdl2008Serializer, buff)
else:
buff = None
# serialize all unit instances to HDL code
constraints = []
for serialized, obj in u._toRtl(target_platform, store_manager):
if not serialized and obj._constraints:
raise NotImplementedError()
# [todo] if the instance is shared with something else copy a constrains
constraints.extend(obj._constraints)
# collect and serialize all constraints in design
if constraints:
for cs_cls in target_platform.constraint_serializer:
f_name = os.path.join(saveTo, cs_cls.DEFAULT_FILE_NAME)
with open(f_name, "w") as f:
cs = cs_cls(f)
for c in constraints:
cs.any(c)
files.append(f_name)
if buff is not None:
return buff.getvalue()
def to_rtl_str(unit_or_cls: Unit,
serializer_cls=Vhdl2008Serializer, name: str=None,
target_platform=DummyPlatform()):
buff = StringIO()
store_manager = SaveToStream(serializer_cls, buff)
toRtl(unit_or_cls, store_manager, name, target_platform)
return buff.getvalue()
def serializeAsIpcore(unit, folderName=".", name=None,
serializer=Vhdl2008Serializer,
target_platform=DummyPlatform()):
"""
Create an IPCore package
"""
from hwt.serializer.ip_packager import IpPackager
p = IpPackager(unit, name=name,
serializer=serializer,
target_platform=target_platform)
p.createPackage(folderName)
return p
class DummySerializerCls():
"""
The serializer which does not do any additional code transformations
and does not produce any output. It is used to generate just internal representation
of RTL code.
"""
TO_HDL_AST = ToHdlAst
def synthesised(u: Unit, target_platform=DummyPlatform()):
"""
Elaborate design without producing any hdl
"""
sm = StoreManager(DummySerializerCls)
if not hasattr(u, "_interfaces"):
u._loadDeclarations()
for _ in u._toRtl(target_platform, sm):
pass
return u
|
Python
| 0
|
@@ -2493,16 +2493,20 @@
rializer
+_cls
=Vhdl200
@@ -2738,16 +2738,20 @@
rializer
+_cls
=seriali
@@ -2745,32 +2745,36 @@
r_cls=serializer
+_cls
,%0A
|
6fcce1bcecb15000c671c706588d6fd0d92145e5
|
Add windbg info to header
|
voltron/entry.py
|
voltron/entry.py
|
"""
This is the main entry point for Voltron from the debugger host's perspective.
This file is loaded into the debugger through whatever means the given host
supports.
In LLDB:
(lldb) command script import /path/to/voltron/entry.py
In GDB:
(gdb) source /path/to/voltron/entry.py
In VDB:
(vdb) script /path/to/voltron/entry.py
"""
log = None
try:
import logging
import os
import sys
blessed = None
import blessed
import voltron
from voltron.plugin import pm
from voltron.core import Server
log = voltron.setup_logging('debugger')
# figure out in which debugger host we are running
try:
import lldb
host = "lldb"
except ImportError:
pass
try:
import gdb
host = "gdb"
except ImportError:
pass
if "vtrace" in locals():
host = "vdb"
try:
import pykd
host = "windbg"
except:
pass
if not host:
raise Exception("No debugger host is present")
# register any plugins that were loaded
pm.register_plugins()
# get the debugger plugin for the host we're in
plugin = pm.debugger_plugin_for_host(host)
# set up command and adaptor instances
voltron.debugger = plugin.adaptor_class()
voltron.command = plugin.command_class()
# create and start the voltron server
voltron.server = Server()
if host != "gdb":
voltron.server.start()
print(blessed.Terminal().bold_red("Voltron loaded."))
if host == 'lldb':
print("Run `voltron init` after you load a target.")
except Exception as e:
import traceback
msg = "An error occurred while loading Voltron:\n\n{}".format(traceback.format_exc())
if blessed:
msg = blessed.Terminal().bold_red(msg)
if log:
log.exception("Exception raised while loading Voltron")
print(msg)
|
Python
| 0
|
@@ -163,19 +163,16 @@
ports.%0A%0A
-In
LLDB:%0A%0A
@@ -230,19 +230,16 @@
try.py%0A%0A
-In
GDB:%0A%0A
@@ -284,11 +284,8 @@
py%0A%0A
-In
VDB:
@@ -328,16 +328,109 @@
entry.py
+%0A%0AWinDbg/CDB (via PyKD):%0A%0A %3E .load pykd.pyd%0A %3E !py --global C:%5Cpath%5Cto%5Cvoltron%5Centry.py
%0A%22%22%22%0A%0Alo
|
2f1df9024ae4a0a070bb058ce075acdb8bcf0474
|
Include date in email send
|
weekly-update.py
|
weekly-update.py
|
#!/usr/bin/python
import sys
import xmlrpclib
import subprocess
import yaml
import smtplib
import json
from datetime import datetime
print 'Weekly update started at ' + str(datetime.now())
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import render
from config import *
def send_email(address, html):
with open('secret.txt', 'r') as f:
secret = json.load(f)
username = secret['gmail']['username']
password = secret['gmail']['password']
print 'Sending weekly email to {0}...'.format(address)
smtp = smtplib.SMTP('smtp.gmail.com', 587)
smtp.ehlo()
smtp.starttls()
smtp.ehlo()
smtp.login(username, password)
msg = MIMEMultipart('mixed')
msg['Subject'] = 'Weekly Recap'
msg['To'] = address
msg['From'] = username
msg['Content-Type'] = 'text/html'
msg.attach(MIMEText(html, 'html'))
smtp.sendmail(username, address, msg.as_string())
print 'Weekly email send successfully'
smtp.close()
def generate_email():
dry_run = False
args = sys.argv[1:]
if args[0] == '-n':
dry_run = True
args = args[1:]
date = args[0]
with open('ledger', 'a') as f:
f.write("\n")
f.write(render.render_template('templates/ledger', date))
if not dry_run:
subprocess.check_call(["git", "commit", "ledger",
"-m", "Update for %s" % (date,)])
debts = render.get_debts()
punt = []
with open('ledger', 'a') as f:
f.write("\n")
for (user, debt) in debts:
if debt <= (FINE_SIZE * 6):
continue
punt.append(user)
f.write(PUNT_TEXT % {'user': user, 'debt': debt, 'date': date})
if not dry_run:
text = render.render_template('templates/week.tmpl', date, punt=punt)
lines = text.split("\n")
title = lines[0]
body = "\n".join(lines[1:])
page = dict(title = title, description = body)
with open('secret.txt', 'r') as f:
secret = json.load(f)
passwd = secret['wordpress']['password']
x = xmlrpclib.ServerProxy(XMLRPC_ENDPOINT)
x.metaWeblog.newPost(BLOG_ID, USER, passwd, page, True)
email = render.render_template('templates/email.html', date, punt=punt)
if dry_run:
print email
else:
send_email('iron-blogger-sf@googlegroups.com', email)
with open('out/email.txt', 'w') as f:
f.write(email)
if punt:
with open('bloggers.yml') as b:
bloggers = yaml.safe_load(b)
for p in punt:
if bloggers.get(p) and 'end' not in bloggers[p]:
bloggers[p]['end'] = date
with open('bloggers.yml','w') as b:
yaml.safe_dump(bloggers, b)
if not dry_run:
subprocess.check_call(["git", "commit", "ledger", "bloggers.yml",
"-m", "Punts for %s" % (date,)])
# if it's a dry run, lets set the ledger back to the beginning state
if dry_run:
subprocess.check_call(["git", "checkout", "ledger"])
if punt:
subprocess.check_call(["git", "checkout", "bloggers.yml"])
if __name__ == '__main__':
generate_email()
|
Python
| 0.000003
|
@@ -333,16 +333,22 @@
ss, html
+, date
):%0A w
@@ -766,17 +766,29 @@
ly Recap
-'
+ for ' + date
%0A msg
@@ -2440,24 +2440,30 @@
.com', email
+, date
)%0A%0A with
|
3275827ef5578142e07747f9feacc4f47fc22006
|
Update factorial.py
|
problems/factorial/factorial.py
|
problems/factorial/factorial.py
|
def fac(n):
return 1 if n == 1 else n * fac(n-1)
print fac(3)
print fac(33)
|
Python
| 0.000002
|
@@ -1,8 +1,30 @@
+# Recursive factorial%0A
def fac(
@@ -78,26 +78,264 @@
rint
- fac(3)%0Aprint fac(33)
+(fac(3)) # 6%0Aprint(fac(33)) # 8683317618811886495518194401280000000%0A%0A# Iterative factorial%0Adef fac(n):%0A res = i = 1%0A while i %3C= n:%0A res *= i%0A i += 1%0A return res%0A%0Aprint(fac(3)) # 6%0Aprint(fac(33)) # 8683317618811886495518194401280000000
%0A
|
20fc5e0cb1bce4e4e1a69e3f7158f1ceec29c1d0
|
Version bump
|
be/version.py
|
be/version.py
|
VERSION_MAJOR = 0
VERSION_MINOR = 2
VERSION_PATCH = 1
version_info = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
version = '%i.%i.%i' % version_info
__version__ = version
__all__ = ['version', 'version_info', '__version__']
|
Python
| 0.000001
|
@@ -50,9 +50,9 @@
H =
-1
+2
%0A%0Ave
|
b250cfacdb45d85bf6ef7f0a1f28b89935c24b9b
|
Update settings.py
|
project-name/my_app/settings.py
|
project-name/my_app/settings.py
|
# Snippets from Actual Settings.py
TEMPLATES = [
{
'BACKEND': 'django_jinja.backend.Jinja2',
"DIRS": "PROJECT_ROOT_DIRECTORY",
'APP_DIRS': True,
'OPTIONS': {
'match_extension': '.html',
'context_processors': [
'django.template.context_processors.request',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz'
],
'globals': {
},
'extensions': DEFAULT_EXTENSIONS + [
'pipeline.templatetags.ext.PipelineExtension',
],
},
},
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True
},
]
# Auto Register Template Globals
_template_globals = {}
for object_name in dir(app_template_globals):
_obj = getattr(app_template_globals, object_name)
if callable(_obj) and not object_name.startswith('__'):
_template_globals[object_name] = _obj.__module__ + '.' + _obj.__qualname__
TEMPLATES[0]['OPTIONS']['globals'].update(_template_globals)
|
Python
| 0.000001
|
@@ -115,16 +115,17 @@
%22DIRS%22:
+%5B
%22PROJECT
@@ -141,16 +141,24 @@
ECTORY%22,
+ %22...%22%5D,
%0A
|
94e85fb24a9b2c327094b880e05251ffb00c1335
|
Add urls for list by topic and by location
|
bills/urls.py
|
bills/urls.py
|
from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^list/', views.bill_list),
url(r'^latest_activity/', views.latest_bill_activity),
url(r'^latest/', views.latest_bill_actions),
url(r'^detail/(?P<bill_id>(.*))/$', views.bill_detail, name='bill_detail'),
]
|
Python
| 0
|
@@ -79,13 +79,69 @@
(r'%5E
-list/
+by_topic/', views.bill_list_by_topic),%0A url(r'%5Eby_location
', v
@@ -154,16 +154,28 @@
ill_list
+_by_location
),%0A u
|
f3a80a66dbfde1e762dd241e182a7ce66fd77e5f
|
Add is_active boolean to serializer
|
project/apps/api/serializers.py
|
project/apps/api/serializers.py
|
from rest_framework import serializers
from .models import (
Convention,
Contest,
Contestant,
Group,
Performance,
Note,
)
from django.contrib.auth import get_user_model
User = get_user_model()
class PerformanceSerializer(serializers.ModelSerializer):
round = serializers.CharField(
source='get_round_display',
)
kind = serializers.CharField(
source='contestant.contest.get_kind_display',
)
prelim = serializers.FloatField(
source='contestant.prelim',
)
group = serializers.SlugRelatedField(
read_only=True,
slug_field='slug',
source='contestant.group',
)
class Meta:
model = Performance
lookup_field = 'slug'
fields = (
'id',
'slug',
'round',
'kind',
'prelim',
'group',
'queue',
'session',
'stagetime',
'place',
'song1',
'mus1',
'prs1',
'sng1',
'song2',
'mus2',
'prs2',
'sng2',
'men',
'mus1_rata',
'prs1_rata',
'sng1_rata',
'song1_raw',
'song1_rata',
'mus2_rata',
'prs2_rata',
'sng2_rata',
'song2_raw',
'song2_rata',
'total_raw',
'score',
)
class GroupSerializer(serializers.ModelSerializer):
district = serializers.StringRelatedField()
kind = serializers.CharField(
source='get_kind_display',
)
chapterName = serializers.CharField(
source='chapter_name',
)
lead = serializers.StringRelatedField()
tenor = serializers.StringRelatedField()
baritone = serializers.StringRelatedField()
bass = serializers.StringRelatedField()
class Meta:
model = Group
lookup_field = 'slug'
fields = (
'id',
'url',
'slug',
'name',
'kind',
'district',
'location',
'website',
'facebook',
'twitter',
'email',
'phone',
'picture',
'description',
'director',
'chapterName',
'lead',
'tenor',
'baritone',
'bass',
'bsmdb',
# 'contestants',
)
class ContestantSerializer(serializers.ModelSerializer):
contest = serializers.SlugRelatedField(
read_only=True,
slug_field='slug',
)
performances = serializers.SlugRelatedField(
many=True,
read_only=True,
slug_field='slug',
)
group = serializers.SlugRelatedField(
read_only=True,
slug_field='slug',
)
# group = GroupSerializer(
# read_only=True,
# )
# performances = PerformanceSerializer(
# read_only=True,
# many=True,
# )
class Meta:
model = Contestant
lookup_field = 'slug'
fields = (
'id',
'slug',
'contest',
'group',
'seed',
'prelim',
'place',
'score',
'queue',
'stagetime',
'quarters_place',
'quarters_score',
'semis_place',
'semis_score',
'finals_place',
'finals_score',
'performances',
)
class ContestSerializer(serializers.ModelSerializer):
contestants = serializers.SlugRelatedField(
many=True,
read_only=True,
slug_field='slug',
)
level = serializers.CharField(
source='get_level_display',
)
kind = serializers.CharField(
source='get_kind_display',
)
year = serializers.CharField(
source='get_year_display',
)
district = serializers.StringRelatedField()
class Meta:
model = Contest
lookup_field = 'slug'
fields = (
'id',
'slug',
'level',
'kind',
'year',
'district',
'panel',
'scoresheet_pdf',
'contestants',
)
class ConventionSerializer(serializers.ModelSerializer):
contests = serializers.SlugRelatedField(
many=True,
read_only=True,
slug_field='slug',
)
# contests = ContestSerializer(
# read_only=True,
# many=True,
# )
class Meta:
model = Convention
lookup_field = 'slug'
fields = (
'id',
'url',
'slug',
'name',
'dates',
'timezone',
'contests',
)
class NoteSerializer(serializers.ModelSerializer):
performance = serializers.SlugRelatedField(
# read_only=True,
queryset=Performance.objects.all(),
slug_field='slug',
)
user = serializers.PrimaryKeyRelatedField(
queryset=User.objects.all(),
# read_only=True,
)
class Meta:
model = Note
fields = (
'id',
'text',
'performance',
'user',
)
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = (
'id',
'email',
'username',
)
|
Python
| 0.002324
|
@@ -3539,24 +3539,49 @@
formances',%0A
+ 'is_active',%0A
)%0A%0A%0A
@@ -4320,32 +4320,57 @@
'contestants',%0A
+ 'is_active',%0A
)%0A%0A%0Aclas
|
67fe36b7a733af9fb68b8ac96d5c1c0a471bc2b8
|
version bump
|
malcolm/version.py
|
malcolm/version.py
|
__version__ = '0.5'
|
Python
| 0.000001
|
@@ -14,7 +14,7 @@
'0.
-5
+6
'%0A
|
9bfd47801b476158b94d10c74ecd6a4120162704
|
Fix AttributeError: 'super' object has no attribute 'capture_traffic'
|
malware/sandbox.py
|
malware/sandbox.py
|
import virtualbox
import os
import time
import threading
import logging
logging.basicConfig(level=logging.DEBUG,
format='(%(threadName)-10s) %(message)s',)
class Sandbox(object):
def __init__(self, name):
vbox = virtualbox.VirtualBox()
self.vm = vbox.find_machine(name)
self.session = None
self.gs = None
self.malware_name = None
def start(self):
logging.info("Starting the VM")
session = virtualbox.Session()
power_up_process = self.vm.launch_vm_process(session, 'gui', '')
power_up_process.wait_for_completion()
session.unlock_machine()
def power_off(self):
logging.info("Shutdown the VM")
self.gs.close()
power_down_process = self.session.console.power_down()
power_down_process.wait_for_completion()
time.sleep(5)
def restore_snapshot(self, point):
logging.info("Restored the VM")
session = self.vm.create_session()
origin_point = session.machine.find_snapshot(point)
restore_process = session.console.restore_snapshot(origin_point)
restore_process.wait_for_completion()
session.unlock_machine()
def take_snapshot(self, point):
logging.info("Take the snapshoot on %s" % (point))
session = self.vm.create_session()
localtime = time.asctime(time.localtime(time.time()))
snapshot_process = self.session.console.take_snapshot(point, localtime)
snapshot_process.wait_for_completion()
session.unlock_machine()
def delete_snapshot(self, point):
logging.info("Delete the snapshoot %s" % (point))
session = self.vm.create_session()
origin_point = session.machine.find_snapshot(point)
delete_process = session.console.delete_snapshot(origin_point.id_p)
delete_process.wait_for_completion()
session.unlock_machine()
def capture_traffic(self, wait_sec, interface=0):
if not os.path.isdir('PCAPLog'):
os.mkdir('PCAPLog')
logging.debug('Capture the network traffic')
session = self.vm.create_session()
adapter = session.machine.get_network_adapter(interface)
adapter.trace_file = os.path.abspath('./PCAPLog/%s.pcap'
% self.malware_name)
adapter.trace_enabled = True
time.sleep(wait_sec)
adapter.trace_enabled = False
session.unlock_machine()
logging.debug('Exiting')
class Win32Sandbox(Sandbox):
def __init__(self, name):
super(Win32Sandbox, self).__init__(name)
def login(self, account, password):
logging.info("Login to Windows XP")
self.session = self.vm.create_session()
self.gs = self.session.console.guest.create_session('john', 'john')
time.sleep(10)
try:
self.gs.execute('C:\\Windows\\System32\\cmd.exe',
['/C', 'tasklist'])
except:
logging.info("Waiting for logins to complete")
time.sleep(15)
def copy_malware_in(self, name):
self.malware_name = name
logging.info("Copy malware %s into sandbox" % (name))
copy_to_process = self.gs.copy_to('./malware_bin/%s' % (name),
'C:\\%s.exe' % (name))
copy_to_process.wait_for_completion()
def active_malware(self, name, timeout=5000):
logging.debug('Activate the malware')
try:
process, stdout, stderr = self.gs.execute('C:\\%s.exe' % (name),
timeout_ms=timeout)
except:
f = open("./cannot_exec.out", "a")
f.write(name + '\n')
f.close()
return
logging.debug('Exiting')
def capture_traffic(self, wait_sec, interface=0):
super(Sandbox, self).capture_traffic(wait_sec, interface)
def active_capture_parallel(self, time):
capture_worker = threading.Thread(name='Capture Traffic',
target=self.capture_traffic,
args=(time, ))
active_worker = threading.Thread(name='Active Malware',
target=self.active_malware,
args=(self.malware_name, ))
capture_worker.start()
active_worker.start()
capture_worker.join()
active_worker.join(5)
class LinuxSandbox(Sandbox):
def __init__(self, name):
super(LinuxSandbox, self).__init__(name)
if __name__ == "__main__":
pass
|
Python
| 0.022124
|
@@ -3890,16 +3890,21 @@
super(
+Win32
Sandbox,
|
318bf52453055ce00fc1d66006d25ef81f013dfa
|
change output format a little
|
bin/evtest.py
|
bin/evtest.py
|
#!/usr/bin/env python
# encoding: utf-8
'''
evdev example - input device event monitor
'''
from sys import argv, exit
from select import select
from evdev import ecodes, InputDevice, list_devices, AbsInfo
usage = 'usage: evtest <device> [<type> <value>]'
evfmt = 'time {:<16} type {} ({}), code {:<4} ({}), value {}'
device_dir = '/dev/input/'
query_type = None
query_value = None
def select_device():
'''Select a device from the list of accessible input devices.'''
devices = [InputDevice(i) for i in reversed(list_devices(device_dir))]
if not devices:
print('error: no input devices found (do you have rw permission on /dev/input/*?)')
exit(1)
dev_fmt = '{0:<3} {1.fn:<20} {1.name:<35} {1.phys}'
dev_lns = [dev_fmt.format(n, d) for n, d in enumerate(devices)]
print('ID {:<20} {:<35} {}'.format('Device', 'Name', 'Phys'))
print('-' * len(max(dev_lns, key=len)))
print('\n'.join(dev_lns))
print('')
choice = input('Select device [0-{}]:'.format(len(dev_lns)-1))
return devices[int(choice)]
def print_event(e):
if e.type == ecodes.EV_SYN:
if e.code == ecodes.SYN_MT_REPORT:
print('time {:<16} +++++++++ {} ++++++++'.format(e.timestamp(), ecodes.SYN[e.code]))
else:
print('time {:<16} --------- {} --------'.format(e.timestamp(), ecodes.SYN[e.code]))
else:
if e.type in ecodes.bytype:
codename = ecodes.bytype[e.type][e.code]
else:
codename = '?'
print(evfmt.format(e.timestamp(), e.type, ecodes.EV[e.type], e.code, codename, e.value))
if len(argv) == 1:
device = select_device()
elif len(argv) == 2:
device = InputDevice(argv[1])
elif len(argv) == 4:
device = InputDevice(argv[1])
query_type = argv[2]
query_value = argv[3]
else:
print(usage)
exit(1)
capabs = device.capabilities(verbose=True)
print('Device name: {.name}'.format(device))
print('Device info: {.info}'.format(device))
print('Repeat settings: {}'.format(device.repeat))
if ('EV_LED', ecodes.EV_LED) in capabs:
print('Active LEDs: {}\n'.format(','.join(i[0] for i in device.leds(True))))
print('Currently active keys: {}\n'.format(','.join(k[0] for k in device.active_keys(True))))
print('Device capabilities:')
for type, codes in capabs.items():
print(' Type {} {}:'.format(*type))
for i in codes:
# i <- ('BTN_RIGHT', 273) or (['BTN_LEFT', 'BTN_MOUSE'], 272)
if isinstance(i[1], AbsInfo):
print(' Code {:<4} {}:'.format(*i[0]))
print(' {}'.format(i[1]))
else:
# multiple names may resolve to one value
s = ', '.join(i[0]) if isinstance(i[0], list) else i[0]
print(' Code {:<4} {}'.format(s, i[1]))
print('')
print('Listening for events ...\n')
while True:
r, w, e = select([device], [], [])
for ev in device.read():
print_event(ev)
|
Python
| 0.000002
|
@@ -2008,16 +2008,18 @@
ings: %7B%7D
+%5Cn
'.format
@@ -2084,41 +2084,15 @@
-print('Active LEDs: %7B%7D%5Cn'.format(
+leds =
','.
@@ -2132,55 +2132,60 @@
ue))
-))%0A%0A
+%0A
print('
-Currently
+Active LEDs: %25s' %25 leds)%0A%0A
active
-
+_
keys
-: %7B%7D%5Cn'.format(
+ =
','.
@@ -2228,17 +2228,56 @@
s(True))
-)
+%0Aprint('Active keys: %25s%5Cn' %25 active_keys
)%0A%0Aprint
|
b183a200d2e546de955e5190fefe7b7a61a1fc55
|
check if no devices were found
|
bin/evtest.py
|
bin/evtest.py
|
#!/usr/bin/env python
# encoding: utf-8
'''
evdev example - input device event monitor
'''
from sys import argv, exit
from select import select
from evdev import ecodes, InputDevice, list_devices, AbsInfo
usage = 'usage: evtest <device> [<type> <value>]'
evfmt = 'time {:<16} type {} ({}), code {:<4} ({}), value {}'
device_dir = '/dev/input/'
query_type = None
query_value = None
def select_device():
''' Select a device from the list of accessible input devices '''
devices = [InputDevice(i) for i in reversed(list_devices(device_dir))]
dev_fmt = '{0:<3} {1.fn:<20} {1.name:<35} {1.phys}'
dev_lns = [dev_fmt.format(n, d) for n, d in enumerate(devices)]
print('ID {:<20} {:<35} {}'.format('Device', 'Name', 'Phys'))
print('-' * len(max(dev_lns, key=len)))
print('\n'.join(dev_lns))
print('')
choice = input('Select device [0-{}]:'.format(len(dev_lns)-1))
return devices[int(choice)]
def print_event(e):
if e.type == ecodes.EV_SYN:
if e.code == ecodes.SYN_MT_REPORT:
print('time {:<16} +++++++++ {} ++++++++'.format(e.timestamp(), ecodes.SYN[e.code]))
else:
print('time {:<16} --------- {} --------'.format(e.timestamp(), ecodes.SYN[e.code]))
else:
if e.type in ecodes.bytype:
codename = ecodes.bytype[e.type][e.code]
else:
codename = '?'
print(evfmt.format(e.timestamp(), e.type, ecodes.EV[e.type], e.code, codename, e.value))
if len(argv) == 1:
device = select_device()
elif len(argv) == 2:
device = InputDevice(argv[1])
elif len(argv) == 4:
device = InputDevice(argv[1])
query_type = argv[2]
query_value = argv[3]
else:
print(usage)
exit(1)
capabs = device.capabilities(verbose=True)
print('Device name: {.name}'.format(device))
print('Device info: {.info}'.format(device))
print('Repeat settings: {}'.format(device.repeat))
if ('EV_LED', ecodes.EV_LED) in capabs:
print('Active LEDs: {}\n'.format(','.join(i[0] for i in device.leds(True))))
print('Device capabilities:')
for type, codes in capabs.items():
print(' Type {} {}:'.format(*type))
for i in codes:
# i <- ('BTN_RIGHT', 273) or (['BTN_LEFT', 'BTN_MOUSE'], 272)
if isinstance(i[1], AbsInfo):
print(' Code {:<4} {}:'.format(*i[0]))
print(' {}'.format(i[1]))
else:
# multiple names may resolve to one value
s = ', '.join(i[0]) if isinstance(i[0], list) else i[0]
print(' Code {:<4} {}'.format(s, i[1]))
print('')
print('Listening for events ...\n')
while True:
r, w, e = select([device], [], [])
for ev in device.read():
print_event(ev)
|
Python
| 0.000002
|
@@ -409,17 +409,16 @@
%0A '''
-
Select a
@@ -467,17 +467,17 @@
devices
-
+.
'''%0A%0A
@@ -547,16 +547,144 @@
e_dir))%5D
+%0A if not devices:%0A print('error: no input devices found (do you have rw permission on /dev/input/*?)')%0A exit(1)
%0A%0A de
|
1bc7937bf0c4c65996e586aef997250869bf5ed1
|
Use python from env.
|
bin/pylama.py
|
bin/pylama.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import re
import sys
import os
from pylama.main import shell
if __name__ == '__main__':
try:
virtual_env = os.environ.get('VIRTUAL_ENV', '')
activate_this = os.path.join(virtual_env, 'bin', 'activate_this.py')
with open(activate_this) as f:
exec(f.read(), dict(__file__=activate_this))
except IOError:
pass
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(shell())
|
Python
| 0
|
@@ -4,16 +4,20 @@
usr/bin/
+env
python%0A%0A
|
4ca9d012495de07ab0127582145fd16db0e03a28
|
Add health URL when in maintenance mode
|
gem/urls.py
|
gem/urls.py
|
import os
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django_cas_ng import views as cas_views
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
from wagtail.wagtailcore import urls as wagtail_urls
from molo.core import views as core_views
from molo.profiles.views import ForgotPasswordView, ResetPasswordView
from wagtail.contrib.wagtailsitemaps import views as sitemap_views
from gem.views import (
report_response, GemRegistrationView,
GemRssFeed, GemAtomFeed,
ReportCommentView, GemEditProfileView,
AlreadyReportedCommentView, GemRegistrationDoneView,
BbmRedirect, MaintenanceView,
)
urlpatterns = []
# implement CAS URLs in a production setting
if settings.ENABLE_SSO:
urlpatterns += [
url(r'^admin/login/', cas_views.login),
url(r'^admin/logout/', cas_views.logout),
url(r'^admin/callback/', cas_views.callback),
]
urlpatterns += [
url(r'^django-admin/', include(admin.site.urls)),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^robots\.txt$', TemplateView.as_view(
template_name='robots.txt', content_type='text/plain')),
url(r'^sitemap\.xml$', sitemap_views.sitemap),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'^bbm/(?P<redirect_path>.*)$',
BbmRedirect.as_view(), name='bbm_redirect'),
url(r'', include('molo.pwa.urls')),
url(r'^profiles/register/$',
GemRegistrationView.as_view(), name='user_register'),
url(r'^profiles/register/done/',
GemRegistrationDoneView.as_view(), name='registration_done'),
url(r'^profiles/forgot_password/$',
ForgotPasswordView.as_view(), name='forgot_password'),
url(r'^profiles/reset_password/$',
ResetPasswordView.as_view(), name='reset_password'),
url(r'^profiles/reset-success/$',
TemplateView.as_view(
template_name='profiles/reset_password_success.html'
),
name='reset_password_success'),
url(r'^profiles/edit/myprofile/$',
login_required(GemEditProfileView.as_view()),
name='edit_my_profile'),
url(r'^profiles/',
include('molo.profiles.urls',
namespace='molo.profiles',
app_name='molo.profiles')),
url(r'^commenting/',
include('molo.commenting.urls',
namespace='molo.commenting',
app_name='molo.commenting')),
url(r'^comments/reported/(?P<comment_pk>\d+)/$',
report_response, name='report_response'),
url(r'^comments/report_comment/(?P<comment_pk>\d+)/$',
login_required(ReportCommentView.as_view()), name='report_comment'),
url(r'^comments/already_reported/(?P<comment_pk>\d+)/$',
login_required(AlreadyReportedCommentView.as_view()),
name='already_reported'),
url(r'', include('django_comments.urls')),
url(r'^surveys/',
include('molo.surveys.urls',
namespace='molo.surveys',
app_name='molo.surveys')),
url(r'^yourwords/',
include('molo.yourwords.urls',
namespace='molo.yourwords',
app_name='molo.yourwords')),
url(r'^feed/rss/$', GemRssFeed(), name='feed_rss'),
url(r'^feed/atom/$', GemAtomFeed(), name='feed_atom'),
url(r'^servicedirectory/', include('molo.servicedirectory.urls',
namespace='molo.servicedirectory')),
url(r'^polls/', include('molo.polls.urls',
namespace='molo.polls',
app_name='molo.polls')),
url(r"^mote/", include("mote.urls", namespace="mote")),
url(r'', include('molo.core.urls')),
url(
r'^home-index/$',
core_views.home_index,
name='home_index'
),
url(
r'^home-more/$',
core_views.home_more,
name='home_more'
),
url(
r'^section-index/$',
core_views.section_index,
name='section_index'
),
url(r'^reaction/(?P<article_slug>[0-9A-Za-z_\-]+)/'
'(?P<question_id>\d+)/vote/$',
core_views.ReactionQuestionChoiceView.as_view(),
name='reaction-vote'),
url(r'', include(wagtail_urls)),
]
if settings.DEBUG:
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(
settings.MEDIA_URL + 'images/',
document_root=os.path.join(settings.MEDIA_ROOT, 'images'))
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.MAINTENANCE_MODE:
urlpatterns = [
url(r'', MaintenanceView.as_view()),
]
|
Python
| 0
|
@@ -4816,24 +4816,105 @@
atterns = %5B%0A
+ url(%0A r'%5Ehealth/$',%0A core_views.health,%0A ),%0A
url(
|
d6294ce0629c8ab59333db4b87c471448ad4d7e9
|
Allow rebuild to reinstall transition.
|
lib/python2.6/aquilon/aqdb/model/hostlifecycle.py
|
lib/python2.6/aquilon/aqdb/model/hostlifecycle.py
|
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
#
# Copyright (C) 2008,2009,2010,2011 Contributor
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the EU DataGrid Software License. You should
# have received a copy of the license with this program, and the
# license is published at
# http://eu-datagrid.web.cern.ch/eu-datagrid/license.html.
#
# THE FOLLOWING DISCLAIMER APPLIES TO ALL SOFTWARE CODE AND OTHER
# MATERIALS CONTRIBUTED IN CONNECTION WITH THIS PROGRAM.
#
# THIS SOFTWARE IS LICENSED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE AND ANY WARRANTY OF NON-INFRINGEMENT, ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. THIS
# SOFTWARE MAY BE REDISTRIBUTED TO OTHERS ONLY BY EFFECTIVELY USING
# THIS OR ANOTHER EQUIVALENT DISCLAIMER AS WELL AS ANY OTHER LICENSE
# TERMS THAT MAY APPLY.
from datetime import datetime
from sqlalchemy.orm import object_session
from sqlalchemy import (Column, Enum, Integer, DateTime, Sequence, String,
UniqueConstraint)
from aquilon.aqdb.model import StateEngine, Base
from aquilon.utils import monkeypatch
from aquilon.aqdb.column_types import Enum
_TN = 'hostlifecycle'
class HostLifecycle(StateEngine, Base):
""" Describes the state a host is within the provisioning lifecycle """
transitions = {
'blind' : ['build', 'failed', 'decommissioned'],
'build' : ['almostready', 'ready', 'failed',
'rebuild', 'reinstall', 'decommissioned'],
'install' : ['build', 'reinstall', 'failed',
'decommissioned'],
'almostready' : ['ready', 'rebuild', 'reinstall', 'failed',
'decommissioned'],
'ready' : ['almostready', 'rebuild', 'reinstall',
'failed', 'decommissioned'],
'reinstall' : ['rebuild', 'failed', 'decommissioned'],
'rebuild' : ['almostready', 'ready', 'failed',
'decommissioned'],
'failed' : ['rebuild', 'reinstall', 'decommissioned'],
'decommissioned' : [],
}
__tablename__ = _TN
id = Column(Integer, Sequence('%s_id_seq' % _TN), primary_key=True)
name = Column(Enum(32, transitions.keys()), nullable=False)
creation_date = Column(DateTime, default=datetime.now, nullable=False)
comments = Column(String(255), nullable=True)
__mapper_args__ = {'polymorphic_on': name}
def __repr__(self):
return str(self.name)
hostlifecycle = HostLifecycle.__table__ # pylint: disable-msg=C0103, E1101
hostlifecycle.primary_key.name = '%s_pk' % _TN
hostlifecycle.append_constraint(UniqueConstraint('name', name='%s_uk' % _TN))
hostlifecycle.info['unique_fields'] = ['name']
@monkeypatch(hostlifecycle)
def populate(sess, *args, **kw): # pragma: no cover
from sqlalchemy.exceptions import IntegrityError
statuslist = HostLifecycle.transitions.keys()
i = hostlifecycle.insert()
for name in statuslist:
try:
i.execute(name=name)
except IntegrityError:
pass
assert len(sess.query(HostLifecycle).all()) == len(statuslist)
"""
The following classes are the actual lifecycle states for a host.
WARNING: The classes Decommissioned, Ready, Rebuild and Build have the same name
as 4 classes in clusterlifecycle and have odd behaviors when imported into the
same namespace. It would be ill advised to do use these clashing clasess in the
same module.
Perhaps it's best to illustrate by example:
from aquilon.aqdb.model.clusterlifecycle import Ready
session.query(Ready).first()
Out[31]: ready
type(r)
Out[32]: <class 'aquilon.aqdb.model.clusterlifecycle.Ready'>
from aquilon.aqdb.model.hostlifecycle import Ready
r=s.query(Ready).first()
type(r)
Out[35]: <class 'aquilon.aqdb.model.hostlifecycle.Ready'>
from aquilon.aqdb.model.clusterlifecycle import Ready
r=s.query(Ready).first()
type(r)
Out[55]: <class 'aquilon.aqdb.model.clusterlifecycle.Ready'>
"""
class Blind(HostLifecycle):
__mapper_args__ = {'polymorphic_identity': 'blind'}
class Decommissioned(HostLifecycle):
__mapper_args__ = {'polymorphic_identity': 'decommissioned'}
class Ready(HostLifecycle):
__mapper_args__ = {'polymorphic_identity': 'ready'}
def onEnter(self, obj):
if obj.cluster and obj.cluster.status.name != 'ready':
dbstate = HostLifecycle.get_unique(object_session(obj),
'almostready',
compel=True)
obj.status.transition(obj, dbstate)
class Almostready(HostLifecycle):
__mapper_args__ = {'polymorphic_identity': 'almostready'}
class Install(HostLifecycle):
__mapper_args__ = {'polymorphic_identity': 'install'}
class Build(HostLifecycle):
__mapper_args__ = {'polymorphic_identity': 'build'}
class Rebuild(HostLifecycle):
__mapper_args__ = {'polymorphic_identity': 'rebuild'}
class Reinstall(HostLifecycle):
__mapper_args__ = {'polymorphic_identity': 'reinstall'}
class Failed(HostLifecycle):
__mapper_args__ = {'polymorphic_identity': 'failed'}
|
Python
| 0
|
@@ -2745,32 +2745,45 @@
ready', 'ready',
+ 'reinstall',
'failed',%0A
|
8de815513df691d24fb0b32504d025a022e4c61b
|
fix execute query with unicode
|
bin/sql_db.py
|
bin/sql_db.py
|
##############################################################################
#
# Copyright (c) 2004 TINY SPRL. (http://tiny.be) All Rights Reserved.
#
# $Id: pooler.py 1310 2005-09-08 20:40:15Z pinky $
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import psycopg
import tools
import sys,os
#try:
# import decimal
#except ImportError:
# from tools import decimal
import re
class fake_cursor:
nbr = 0
_tables = {}
def __init__(self, db, con, dbname):
self.db = db
self.obj = db.cursor()
self.con = con
self.dbname = dbname
def execute_not_run(self,*args):
#if not fake_cursor.nbr % 1:
# print 'sql: ',fake_cursor.nbr, args
res = re.match('^select.* from ([a-zA-Z_]+) .*$', args[0], re.I)
if res:
fake_cursor._tables.setdefault(res.group(1), 0)
fake_cursor._tables[res.group(1)] += 1
#else:
# if len(args)>1:
# print 'sql: ',fake_cursor.nbr, args[0], args[1]
# else:
# print 'sql: ',fake_cursor.nbr, args[0]
#if not fake_cursor.nbr % 5000:
# ct = []
# for t,c in fake_cursor._tables.items():
# ct.append([c,t])
# ct.sort()
# ct.reverse()
# print 'After %d queries' % (fake_cursor.nbr,)
# for line in ct[:50]:
# print ' %s: %d' % (line[1], line[0])
#if len(args)>1:
# print 'sql: ',fake_cursor.nbr, args[0], args[1]
#else:
# print 'sql: ',fake_cursor.nbr, args[0]
fake_cursor.nbr += 1
return self.obj.execute(*args)
def close(self):
# print "close cursors fno:", [i.fileno() for i in self.db.cursors]
self.obj.close()
# This force the cursor to be freed, and thus, available again. It is
# important because otherwise we can overload the server very easily
# because of a cursor shortage (because cursors are not garbage
# collected as fast as they should). The problem is probably due in
# part because browse records keep a reference to the cursor.
del self.obj
# print "after close cursors fno:", [i.fileno() for i in self.db.cursors]
def __getattr__(self, name):
# print 'LOOK',name
return getattr(self.obj, name)
class fakedb:
def __init__(self, truedb, dbname):
self.truedb = truedb
self.dbname = dbname
def cursor(self):
return fake_cursor(self.truedb, {}, self.dbname)
def decimalize(symb):
if symb is None: return None
if isinstance(symb, float):
return decimal.Decimal('%f' % symb)
return decimal.Decimal(symb)
def db_connect(db_name):
host = tools.config['db_host'] and "host=%s" % tools.config['db_host'] or ''
port = tools.config['db_port'] and "port=%s" % tools.config['db_port'] or ''
name = "dbname=%s" % db_name
user = tools.config['db_user'] and "user=%s" % tools.config['db_user'] or ''
password = tools.config['db_password'] and "password=%s" % tools.config['db_password'] or ''
maxconn = int(tools.config['db_maxconn']) or 64
tdb = psycopg.connect('%s %s %s %s %s' % (host, port, name, user, password), serialize=0, maxconn=maxconn)
fdb = fakedb(tdb, db_name)
return fdb
def init():
#define DATEOID 1082, define TIMESTAMPOID 1114 see pgtypes.h
psycopg.register_type(psycopg.new_type((1082,), "date", lambda x:x))
psycopg.register_type(psycopg.new_type((1083,), "time", lambda x:x))
psycopg.register_type(psycopg.new_type((1114,), "datetime", lambda x:x))
#psycopg.register_type(psycopg.new_type((700, 701, 1700), 'decimal', decimalize))
psycopg.register_type(psycopg.new_type((1082,), "date", lambda x:x))
psycopg.register_type(psycopg.new_type((1083,), "time", lambda x:x))
psycopg.register_type(psycopg.new_type((1114,), "datetime", lambda x:x))
|
Python
| 0.000885
|
@@ -2506,16 +2506,211 @@
*args)%0A%0A
+%09def execute(self, sql, params=()):%0A%09%09def base_string(s):%0A%09%09%09if isinstance(s, unicode):%0A%09%09%09%09return s.encode('utf-8')%0A%09%09%09return s%0A%09%09return self.obj.execute(sql, %5Bbase_string(s) for s in params%5D)%0A%0A
%09def clo
|
c3f14716bc646db003b7852c8f718203ae7a3c3c
|
Use floor division to ensure result is always an integer
|
truecolor.py
|
truecolor.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
if os.getenv('COLORTERM') is None:
raise RuntimeError('Not a true color terminal')
COLORS = {
'white': (127, 127, 127),
'grey': (64, 64, 64),
'black': (0, 0, 0),
'red': (127, 0, 0),
'green': (0, 127, 0),
'blue': (0, 0, 127),
'yellow': (127, 127, 0),
'brown': (127, 64, 0),
'purple': (32, 0, 127)
}
def _f(red_component, green_component, blue_component):
"""Return escaped foreground color sequence"""
return '\x01\x1b[38;2;{};{};{}m\x02'.format(
red_component, green_component, blue_component)
def _b(red_component, green_component, blue_component):
"""Return escaped background color sequence"""
return '\x01\x1b[48;2;{};{};{}m\x02'.format(
red_component, green_component, blue_component)
def _r():
"""Return reset sequence"""
return '\x01\x1b[0m\x02'
def _gamut(component):
return min(int(component), 254)
def bold(color):
"""Return a bolder version of a color tuple."""
return tuple(_gamut(i * 2) for i in color)
def dim(color):
"""Return a dimmer version of a color tuple."""
return tuple(int(i / 2) for i in color)
def hex_to_rgb(hex_string):
"""Return a tuple of red, green and blue components for the color
given as #rrggbb.
"""
return tuple(int(hex_string[i:i + 2], 16)
for i in range(1, len(hex_string), 2))
def rgb_to_hex(red_component=None, green_component=None, blue_component=None):
"""Return color as #rrggbb for the given color tuple or component
values.
"""
if isinstance(red_component, tuple):
red_component, green_component, blue_component = red_component
return '#{:02X}{:02X}{:02X}'.format(
red_component, green_component, blue_component)
def fore_text(txt, foreground=COLORS['white']):
"""Return text string with foreground only set."""
if foreground.startswith('#'):
foreground = hex_to_rgb(foreground)
return '{}{}{}'.format(_f(*foreground), txt, _r())
def color_text(txt, foreground=COLORS['white'], background=COLORS['black']):
"""Return text string with foreground and background set."""
return '{}{}{}{}'.format(_f(*foreground), _b(*background), txt, _r())
def fore_print(txt, foreground=COLORS['white']):
"""Print text string with foreground only set."""
print(fore_text(txt, foreground))
def color_print(txt, foreground=COLORS['white'], background=COLORS['black']):
"""Print text string with foreground and background set."""
print(color_text(txt, foreground, background))
if __name__ == "__main__":
for color_name in COLORS:
color_print(
'{} :: {} :: bright {} on dim {}'.format(
rgb_to_hex(bold(COLORS[color_name])),
rgb_to_hex(dim(COLORS[color_name])),
color_name,
color_name
).ljust(64, ' '),
bold(COLORS[color_name]),
dim(COLORS[color_name])
)
|
Python
| 0.000007
|
@@ -1165,17 +1165,13 @@
le(i
-nt(i
+/
/ 2
-)
for
|
8a6f8cccaca16bf5f6027e75e84772542b2c2231
|
add doc strings to util.py
|
bolt/utils.py
|
bolt/utils.py
|
from numpy import ndarray, asarray, prod
from numpy import any as npany
from collections import Iterable
def tupleize(arg):
"""
Coerce singletons and lists and ndarrays to tuples.
Parameters
----------
args : tuple, list, ndarray, or singleton
Item to coerce
"""
if not isinstance(arg, (tuple, list, ndarray, Iterable)):
return tuple((arg,))
elif isinstance(arg, (list, ndarray)):
return tuple(arg)
elif isinstance(arg, Iterable) and not isinstance(arg, str):
return tuple(arg)
else:
return arg
def argpack(args):
"""
Coerce a list of arguments to a tuple.
Parameters
----------
args : tuple or nested tuple
Pack arguments into a tuple, converting ((,...),) or (,) -> (,)
"""
if isinstance(args[0], (tuple, list, ndarray)):
return tupleize(args[0])
elif isinstance(args[0], Iterable) and not isinstance(args[0], str):
# coerce any iterable into a list before calling tupleize (Python 3 compatibility)
return tupleize(list(args[0]))
else:
return tuple(args)
def inshape(shape, axes):
"""
Checks to see if a list of axes are contained within an array shape.
Parameters
----------
shape : tuple[int]
the shape of a BoltArray
axes : tuple[int]
the axes to check against shape
"""
valid = all([(axis < len(shape)) and (axis >= 0) for axis in axes])
if not valid:
raise ValueError("axes not valid for an ndarray of shape: %s" % str(shape))
def allclose(a, b):
"""
Test that a and b are close and match in shape.
Parameters
----------
a : ndarray
First array to check
b : ndarray
First array to check
"""
from numpy import allclose
return (a.shape == b.shape) and allclose(a, b)
def tuplesort(seq):
"""
Sort a list by a sequence.
Parameters
----------
seq : tuple
Sequence to sort by
"""
return sorted(range(len(seq)), key=seq.__getitem__)
def listify(lst, dim):
"""
Flatten lists of indices and ensure bounded by a known dim.
Parameters
----------
lst : list
List of integer indices
dim : tuple
Bounds for indices
"""
if not all([l.dtype == int for l in lst]):
raise ValueError("indices must be integers")
if npany(asarray(lst) >= dim):
raise ValueError("indices out of bounds for axis with size %s" % dim)
return lst.flatten()
def slicify(slc, dim):
"""
Force a slice to have defined start, stop, and step from a known dim
Parameters
----------
slc : slice or int
The slice to modify, or int to convert to a slice
dim : tuple
Bounds for slices
"""
if isinstance(slc, slice):
if slc.start is None and slc.stop is None and slc.step is None:
return slice(0, dim, 1)
elif slc.start is None and slc.step is None:
return slice(0, slc.stop, 1)
elif slc.stop is None and slc.step is None:
return slice(slc.start, dim, 1)
elif slc.step is None:
return slice(slc.start, slc.stop, 1)
else:
return slc
elif isinstance(slc, int):
return slice(slc, slc+1, 1)
else:
raise ValueError("Type for slice %s not recongized" % type(slc))
def istransposeable(new, old):
new, old = tupleize(new), tupleize(old)
if not len(new) == len(old):
raise ValueError("Axes do not match axes of keys")
if not len(set(new)) == len(set(old)):
raise ValueError("Repeated axes")
if any(n < 0 for n in new) or max(new) > len(old) - 1:
raise ValueError("Invalid axes")
def isreshapeable(new, old):
new, old = tupleize(new), tupleize(old)
if not prod(new) == prod(old):
raise ValueError("Total size of new keys must remain unchanged")
|
Python
| 0.000001
|
@@ -212,33 +212,32 @@
--------%0A arg
-s
: tuple, list,
@@ -3409,16 +3409,291 @@
w, old):
+%0A %22%22%22%0A Check to see if a proposed tuple of axes is a valid permutation%0A of an old set of axes. Checks length, axis repetion, and bounds.%0A%0A Parameters%0A ----------%0A new : tuple%0A tuple of proposed axes%0A%0A old : tuple%0A tuple of old axes%0A %22%22%22
%0A%0A ne
@@ -4048,24 +4048,286 @@
(new, old):%0A
+ %22%22%22%0A Check to see if a proposed tuple of axes is a valid reshaping of%0A the old axes by ensuring that they can be factored.%0A%0A Parameters%0A ----------%0A new : tuple%0A tuple of proposed axes%0A%0A old : tuple%0A tuple of old axes%0A %22%22%22%0A
%0A new
|
3ceed6569b0028c20425a3e179bb7a4889915d9d
|
fix some bugs
|
uctrl/lib.py
|
uctrl/lib.py
|
#!/usr/bin/env python
import json
import logging
import os
import sys
import copy
from collections import namedtuple
from netaddr import IPNetwork
isdx_folder = "iSDX"
home = os.path.expanduser("~/")
isdx_path = home + isdx_folder
if isdx_path not in sys.path:
sys.path.append(isdx_path)
#import util.log
class Config(object):
def __init__(self, config_file):
# Connections between core and edge
# TODO: do I really need all this information?
self.dpids = {}
self.dpid_2_name = {}
self.core_edge = {}
self.edges = {}
self.edge_peers = {}
self.edge_to_edge = {}
self.edge_core = {}
self.participants = {}
self.refmon = None
self.flanc_auth = None
self.route_server = None
self.arp_proxy = None
self.vnhs = None
config = json.load(open(config_file, 'r'))
if "RefMon Server" in config:
self.refmon = config["RefMon Server"]
if "Flanc Auth Info" in config:
self.flanc_auth = config["Flanc Auth Info"]
if "Route Server" in config:
route_server = config["Route Server"]
self.route_server = RS(route_server['Port'], route_server["MAC"], route_server["IP"], route_server["switch"], route_server["ASN"])
if "ARP Proxy" in config:
arp_proxy = config["ARP Proxy"]
self.arp_proxy = Port(arp_proxy['Port'], arp_proxy["MAC"], arp_proxy["IP"], arp_proxy["switch"])
if "Participants" in config:
self.participants = config["Participants"]
if "RefMon Settings" in config:
if "fabric options" in config["RefMon Settings"]:
self.datapaths = config["RefMon Settings"]["fabric options"]["dpids"]
self.edges = {x:datapaths[x] for x in datapaths if x.find('edge') == 0}
self.cores = {x:datapaths[x] for x in datapaths if x.find('core') == 0}
if "dpids" in config["RefMon Settings"]["fabric options"]:
self.dpids = config["RefMon Settings"]["fabric options"]["dpids"]
for k,v in self.dpids.iteritems():
self.dpid_2_name[v] = k
if "fabric connections" in config["RefMon Settings"]:
datapaths_conns = config["RefMon Settings"]["fabric connections"]
for dp in edges:
self.edge_peers.setdefault(self.dpids[dp], {})
if dp in datapaths_conns:
edge = edges[dp]
self.parse_edge_core(edge, cores, datapaths_conns[dp])
self.parse_edge_to_edge(edge, edges, datapaths_conns[dp])
self.parse_edge_peers(datapaths_conns)
for dp in cores:
if dp in datapaths_conns:
core = cores[dp]
self.parse_core_edge(core, edges, datapaths_conns[dp])
# ARP proxy and Route Server are not peers but forwarding in
# umbrella is the same for every node connected to the
# edges of the fabric
self.edge_peers[self.dpids[self.arp_proxy.switch]][self.arp_proxy] = self.arp_proxy.id
self.edge_peers[self.dpids[self.route_server.switch]][self.route_server] = self.route_server.id
if "VNHs" in config:
self.vnhs = IPNetwork(config["VNHs"])
def parse_edge_peers(self, dp_conns):
for dp in dp_conns:
links = dp_conns[dp]
for p in links:
ports = links[p]
if isinstance(ports, int):
ports = [ports]
i = 0
if p in self.participants:
for port in self.participants[p]["Ports"]:
if port["switch"] == dp:
port = Port(port['Id'], port["MAC"], port["IP"], port["switch"])
dpid = self.dpids[dp]
self.edge_peers[dpid][port] = ports[i]
i += 1
#print self.edge_peers
# Builds a list with:
# edge dp id - core dp id -> port
def parse_edge_core(self, edge, cores, dp_conns):
self.edge_core.setdefault(edge, {})
for dp in dp_conns:
if dp in cores:
dpid = cores[dp]
self.edge_core[edge][dpid] = dp_conns[dp]
def parse_core_edge(self, core, edges, dp_conns):
self.core_edge.setdefault(core, {})
for dp in dp_conns:
if dp in edges:
dpid = edges[dp]
self.core_edge[core][dpid] = dp_conns[dp]
def parse_edge_to_edge(self, edge, edges, dp_conns):
self.edge_to_edge.setdefault(edge, {})
for dp in dp_conns:
if dp in edges:
self.edge_to_edge[edge][dp] = dp_conns[dp]
Port = namedtuple('Port', "id mac ip switch")
RS = namedtuple('RS', "id mac ip switch asn")
|
Python
| 0.000008
|
@@ -1703,21 +1703,16 @@
-self.
datapath
@@ -1868,29 +1868,24 @@
-self.
cores = %7Bx:d
|
a79fb72dc852fb7e47a78d5d444290f0033e84bd
|
Make error available to context processors through flask.g
|
udata/app.py
|
udata/app.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import bson
import datetime
import logging
import os
import types
from os.path import abspath, join, dirname, isfile, exists
from flask import Flask, abort, send_from_directory, json, Blueprint as BaseBlueprint
from flask.ext.cache import Cache
from speaklater import is_lazy_string
from werkzeug.contrib.fixers import ProxyFix
APP_NAME = __name__.split('.')[0]
ROOT_DIR = abspath(join(dirname(__file__)))
log = logging.getLogger(__name__)
cache = Cache()
class UDataApp(Flask):
debug_log_format = '[%(levelname)s][%(name)s:%(lineno)d] %(message)s'
def send_static_file(self, filename):
'''
Override default static handling:
- raises 404 if not debug
- handle static aliases
'''
if not self.debug:
self.logger.error('Static files are only served in debug')
abort(404)
cache_timeout = self.get_send_file_max_age(filename)
# Default behavior
if isfile(join(self.static_folder, filename)):
return send_from_directory(self.static_folder, filename, cache_timeout=cache_timeout)
# Handle aliases
for prefix, directory in self.config.get('STATIC_DIRS', tuple()):
if filename.startswith(prefix):
real_filename = filename[len(prefix):]
if real_filename.startswith('/'):
real_filename = real_filename[1:]
if isfile(join(directory, real_filename)):
return send_from_directory(directory, real_filename, cache_timeout=cache_timeout)
abort(404)
class Blueprint(BaseBlueprint):
'''A blueprint allowing to decorate class too'''
def route(self, rule, **options):
def wrapper(func_or_cls):
endpoint = str(options.pop('endpoint', func_or_cls.__name__))
if isinstance(func_or_cls, types.FunctionType):
self.add_url_rule(rule, endpoint, func_or_cls, **options)
else:
self.add_url_rule(rule, view_func=func_or_cls.as_view(endpoint), **options)
return func_or_cls
return wrapper
class UDataJsonEncoder(json.JSONEncoder):
'''
A JSONEncoder subclass to encode unsupported types:
- ObjectId
- datetime
- lazy strings
Handle special serialize() method and _data attribute.
Ensure an app context is always present.
'''
def default(self, obj):
if is_lazy_string(obj):
return unicode(obj)
elif isinstance(obj, bson.objectid.ObjectId):
return str(obj)
elif isinstance(obj, datetime.datetime):
return obj.isoformat()
elif hasattr(obj, 'serialize'):
return obj.serialize()
elif hasattr(obj, '_data'): # Serialize Raw data for Document and EmbeddedDocument
return obj._data
return super(UDataJsonEncoder, self).default(obj)
def create_app(config='udata.settings.Defaults'):
'''Factory for a minimal application'''
app = UDataApp(APP_NAME)
app.config.from_object(config)
app.config.from_envvar('UDATA_SETTINGS', silent=True)
custom_settings = join(os.getcwd(), 'udata.cfg')
if exists(custom_settings):
app.config.from_pyfile(custom_settings)
app.json_encoder = UDataJsonEncoder
app.debug = app.config['DEBUG'] and not app.config['TESTING']
app.wsgi_app = ProxyFix(app.wsgi_app)
init_logging(app)
register_extensions(app)
return app
def standalone(app):
'''Factory for an all in one application'''
from udata import admin, api, core, frontend
core.init_app(app)
frontend.init_app(app)
api.init_app(app)
admin.init_app(app)
from udata import ext
ext.init_app(app)
return app
def init_logging(app):
log_level = logging.DEBUG if app.debug else logging.WARNING
app.logger.setLevel(log_level)
loggers = [
logging.getLogger('elasticsearch'),
logging.getLogger('requests')
]
for name in app.config['PLUGINS']:
logging.getLogger('udata_{0}'.format(name)).setLevel(log_level)
for logger in loggers:
logger.setLevel(logging.WARNING)
return app
def register_extensions(app):
from udata import models, routing, tasks, mail, i18n, auth
i18n.init_app(app)
models.init_app(app)
routing.init_app(app)
auth.init_app(app)
cache.init_app(app)
tasks.init_app(app)
mail.init_app(app)
from udata import search
search.init_app(app)
return app
|
Python
| 0.000001
|
@@ -216,16 +216,19 @@
, abort,
+ g,
send_fr
@@ -1645,16 +1645,241 @@
t(404)%0A%0A
+ def handle_http_exception(self, e):%0A # Make exception/HTTPError available for context processors%0A if 'error' not in g:%0A g.error = e%0A return super(UDataApp, self).handle_http_exception(e)%0A%0A%0A
%0Aclass B
|
b2bc2f50c9866e758c242a6c8b57a86153cc418a
|
bump version
|
infi/conf/__version__.py
|
infi/conf/__version__.py
|
__version__ = "0.0.10"
|
Python
| 0
|
@@ -13,11 +13,11 @@
= %220.0.1
-0
+1
%22%0A
|
24033ffcabc9e88d160a4c16503dcba9440c03fa
|
Copy full source directory (including sub directories) when compiling
|
publisher/build_paper.py
|
publisher/build_paper.py
|
#!/usr/bin/env python
from __future__ import print_function
import docutils.core as dc
import os.path
import sys
import re
import tempfile
import glob
import shutil
from writer import writer
from conf import papers_dir, output_dir
import options
header = r'''
.. role:: ref
.. role:: label
.. role:: cite
.. raw:: latex
\InputIfFileExists{page_numbers.tex}{}{}
\newcommand*{\docutilsroleref}{\ref}
\newcommand*{\docutilsrolelabel}{\label}
\providecommand*\DUrolecite[1]{\cite{#1}}
.. |---| unicode:: U+2014 .. em dash, trimming surrounding whitespace
:trim:
.. |--| unicode:: U+2013 .. en dash
:trim:
'''
def rst2tex(in_path, out_path):
options.mkdir_p(out_path)
for file in glob.glob(os.path.join(in_path, '*')):
shutil.copy(file, out_path)
base_dir = os.path.dirname(__file__)
scipy_status = os.path.join(base_dir, '_static/status.sty')
shutil.copy(scipy_status, out_path)
scipy_style = os.path.join(base_dir, '_static/scipy.sty')
shutil.copy(scipy_style, out_path)
preamble = r'''\usepackage{scipy}'''
# Add the LaTeX commands required by Pygments to do syntax highlighting
pygments = None
try:
import pygments
except ImportError:
import warnings
warnings.warn(RuntimeWarning('Could not import Pygments. '
'Syntax highlighting will fail.'))
if pygments:
from pygments.formatters import LatexFormatter
from writer.sphinx_highlight import SphinxStyle
preamble += LatexFormatter(style=SphinxStyle).get_style_defs()
settings = {'documentclass': 'IEEEtran',
'use_verbatim_when_possible': True,
'use_latex_citations': True,
'latex_preamble': preamble,
'documentoptions': 'letterpaper,compsoc,twoside',
'halt_level': 3, # 2: warn; 3: error; 4: severe
}
try:
rst, = glob.glob(os.path.join(in_path, '*.rst'))
except ValueError:
raise RuntimeError("Found more than one input .rst--not sure which "
"one to use.")
content = header + open(rst, 'r').read()
tex = dc.publish_string(source=content, writer=writer,
settings_overrides=settings)
stats_file = os.path.join(out_path, 'paper_stats.json')
d = options.cfg2dict(stats_file)
try:
d.update(writer.document.stats)
options.dict2cfg(d, stats_file)
except AttributeError:
print("Error: no paper configuration found")
tex_file = os.path.join(out_path, 'paper.tex')
with open(tex_file, 'w') as f:
f.write(tex)
def tex2pdf(out_path):
import subprocess
command_line = 'pdflatex -halt-on-error paper.tex'
# -- dummy tempfile is a hacky way to prevent pdflatex
# from asking for any missing files via stdin prompts,
# which mess up our build process.
dummy = tempfile.TemporaryFile()
run = subprocess.Popen(command_line, shell=True,
stdin=dummy,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=out_path,
)
out, err = run.communicate()
if "Fatal" in out or run.returncode:
print("PDFLaTeX error output:")
print("=" * 80)
print(out)
print("=" * 80)
if err:
print(err)
print("=" * 80)
# Errors, exit early
return out
# Compile BiBTeX if available
stats_file = os.path.join(out_path, 'paper_stats.json')
d = options.cfg2dict(stats_file)
bib_file = os.path.join(out_path, d["bibliography"] + '.bib')
if os.path.exists(bib_file):
bibtex_cmd = 'bibtex paper && ' + command_line
run = subprocess.Popen(bibtex_cmd, shell=True,
stdin=dummy,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=out_path,
)
out, err = run.communicate()
if err:
print("Error compiling BiBTeX")
return out
# -- returncode always 0, have to check output for error
if not run.returncode:
# -- pdflatex has to run twice to actually work
run = subprocess.Popen(command_line, shell=True,
stdin=dummy,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=out_path,
)
out, err = run.communicate()
return out
def page_count(pdflatex_stdout, paper_dir):
"""
Parse pdflatex output for paper count, and store in a .ini file.
"""
if pdflatex_stdout is None:
print("*** WARNING: PDFLaTeX failed to generate output.")
return
regexp = re.compile('Output written on paper.pdf \((\d+) pages')
cfgname = os.path.join(paper_dir, 'paper_stats.json')
d = options.cfg2dict(cfgname)
for line in pdflatex_stdout.splitlines():
m = regexp.match(line)
if m:
pages = m.groups()[0]
d.update({'pages': int(pages)})
break
options.dict2cfg(d, cfgname)
def build_paper(paper_id):
out_path = os.path.join(output_dir, paper_id)
in_path = os.path.join(papers_dir, paper_id)
print("Building:", paper_id)
rst2tex(in_path, out_path)
pdflatex_stdout = tex2pdf(out_path)
page_count(pdflatex_stdout, out_path)
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: build_paper.py paper_directory")
sys.exit(-1)
in_path = os.path.normpath(sys.argv[1])
if not os.path.isdir(in_path):
print("Cannot open directory: %s" % in_path)
sys.exit(-1)
paper_id = os.path.basename(in_path)
build_paper(paper_id)
|
Python
| 0
|
@@ -158,16 +158,47 @@
t shutil
+%0Afrom distutils import dir_util
%0A%0Afrom w
@@ -711,113 +711,34 @@
-options.mkdir_p(out_path)%0A for file in glob.glob(os.path.join(in_path, '*')):%0A shutil.copy(file
+dir_util.copy_tree(in_path
, ou
|
4b555b9d42fe95dbd93e33755b0f690b4773d8f8
|
Update redmonster.py
|
py/desispec/zfind/redmonster.py
|
py/desispec/zfind/redmonster.py
|
"""
desispec.zfind.redmonster
=========================
Classes for use with the redmonster package.
"""
from __future__ import division, absolute_import
import os
import numpy as np
from desispec.zfind import ZfindBase
from desispec.interpolation import resample_flux
from desispec.log import get_logger
class RedMonsterZfind(ZfindBase):
"""Class documentation goes here.
"""
def __init__(self, wave, flux, ivar, R=None, dloglam=1e-4):
"""Uses Redmonster to classify and find redshifts.
See :class:`desispec.zfind.zfind.ZfindBase` class for inputs/outputs.
TODO: document redmonster specific output variables
"""
try:
from redmonster.physics.zfinder import Zfinder
from redmonster.physics.zfitter import Zfitter
from redmonster.physics.zpicker import Zpicker
except ImportError:
get_logger().error("You are attempting to use RedMonster, but it is not available for import!")
raise
#- RedMonster templates don't quite go far enough into the blue,
#- so chop off some data
ii, = np.where(wave>3965)
wave = wave[ii]
flux = flux[:, ii]
ivar = ivar[:, ii]
#- Resample inputs to a loglam grid
start = round(np.log10(wave[0]), 4)+dloglam
stop = round(np.log10(wave[-1]), 4)
nwave = int((stop-start)/dloglam)
loglam = start + np.arange(nwave)*dloglam
nspec = flux.shape[0]
self.flux = np.empty((nspec, nwave))
self.ivar = np.empty((nspec, nwave))
for i in range(nspec):
self.flux[i], self.ivar[i] = resample_flux(10**loglam, wave, flux[i], ivar[i])
self.dloglam = dloglam
self.loglam = loglam
self.wave = 10**loglam
self.nwave = nwave
self.nspec = nspec
#- list of (templatename, zmin, zmax) to fix
self.template_dir = os.getenv('REDMONSTER')+'/templates/'
self.templates = [
('ndArch-spEigenStar-55734.fits', -0.005, 0.005),
('ndArch-ssp_em_galaxy-v000.fits', 0.6, 1.6),
# ('ndArch-ssp_em_galaxy_quickdesi-v000.fits', 0.6, 1.6),
('ndArch-QSO-V003.fits', 0.0, 3.5),
]
#- Find and refine best redshift per template
self.zfinders = list()
self.zfitters = list()
for template, zmin, zmax in self.templates:
zfind = Zfinder(self.template_dir+template, npoly=2, zmin=zmin, zmax=zmax)
zfind.zchi2(self.flux, self.loglam, self.ivar, npixstep=2)
zfit = Zfitter(zfind.zchi2arr, zfind.zbase)
zfit.z_refine()
self.zfinders.append(zfind)
self.zfitters.append(zfit)
#- Create wrapper object needed for zpicker
specobj = _RedMonsterSpecObj(self.wave, self.flux, self.ivar)
flags = list()
for i in range(len(self.zfitters)):
flags.append(self.zfinders[i].zwarning.astype(int) | \
self.zfitters[i].zwarning.astype(int))
#- Zpicker
self.zpicker = Zpicker(specobj,
self.zfinders[0], self.zfitters[0], flags[0],
self.zfinders[1], self.zfitters[1], flags[1],
self.zfinders[2], self.zfitters[2], flags[2])
#- Fill in outputs
self.type = np.asarray(self.zpicker.type, dtype='S20')
self.subtype = np.asarray(self.zpicker.subtype, dtype='S20')
self.z = np.array([self.zpicker.z[i,0] for i in range(nspec)])
self.zerr = np.array([self.zpicker.z_err[i,0] for i in range(nspec)])
self.zwarn = np.array([self.zpicker.zwarning[i].astype(int) for i in range(nspec)])
self.model = self.zpicker.models
#- This is a container class needed by Redmonster zpicker
class _RedMonsterSpecObj(object):
def __init__(self, wave, flux, ivar, dof=None):
"""
Create an object with .wave, .flux, .ivar, and .dof attributes;
these are needed by RedMonster as input
"""
nspec, nwave = flux.shape
self.wave = wave
self.flux = flux
self.ivar = ivar
if dof is None:
self.dof = np.ones(nspec) * nwave
else:
self.dof = dof
#- Leftover BOSS-isms
self.plate = self.mjd = self.fiberid = self.npix = 0
self.hdr = None
self.plugmap = None
|
Python
| 0
|
@@ -832,16 +832,17 @@
.zpicker
+2
import
@@ -2648,16 +2648,17 @@
z_refine
+2
()%0A%0A
@@ -3112,28 +3112,16 @@
specobj,
-%0A
self.zf
@@ -3126,19 +3126,16 @@
zfinders
-%5B0%5D
, self.z
@@ -3145,137 +3145,15 @@
ters
-%5B0%5D
, flags
-%5B0%5D,%0A self.zfinders%5B1%5D, self.zfitters%5B1%5D, flags%5B1%5D,%0A self.zfinders%5B2%5D, self.zfitters%5B2%5D, flags%5B2%5D
)%0A%0A
@@ -3222,37 +3222,24 @@
zpicker.type
-, dtype='S20'
)%0A se
@@ -3286,21 +3286,8 @@
type
-, dtype='S20'
)%0A
@@ -3327,17 +3327,18 @@
cker.z%5Bi
-,
+%5D%5B
0%5D for i
@@ -3406,17 +3406,18 @@
.z_err%5Bi
-,
+%5D%5B
0%5D for i
@@ -3567,16 +3567,21 @@
r.models
+%5B:,0%5D
%0A%0A%0A#- Th
|
2f8a4a8f36f8e2360f8fa629c750e381d1d7b79e
|
Set tooltips for the mods in the QListView's.
|
python/moodloader/moodloader.py
|
python/moodloader/moodloader.py
|
#! /usr/bin/python3
import re
import os
import sys
import shutil
from PyQt4 import QtGui, QtCore
from moodloader_ui import MoodLoader
class MainWindow(MoodLoader):
"""
Subclass the GUI for the main window. We implement '__init__()' here, and
also set up connections for the widgets.
"""
def __init__(self):
### Create some system variables ###
self.config_dir = self.get_config_path()
self.open_dialog_dir = os.path.expanduser("~")
super(MoodLoader, self).__init__()
self.initUI()
### Set up connections ###
self.install_map_mod_button.clicked.connect(lambda: self.install_mod("/maps/"))
self.install_cam_mod_button.clicked.connect(lambda: self.install_mod("/campaign/"))
self.install_global_mod_button.clicked.connect(lambda: self.install_mod("/global/"))
self.populate_listviews()
def get_config_path(self):
"""
Get the path of the config folder of the latest version of WZ on the
users computer.
"""
matching_dir_versions = [float(re.findall(r'\d+\.\d+', directory)[0])
for directory in os.listdir(os.path.expanduser("~"))
if re.match(".warzone2100-\d+\.\d+", directory)]
if len(matching_dir_versions) >= 1:
return(os.path.expanduser("~") + "/.warzone2100-" + str(max(matching_dir_versions)))
else:
self.statusbar.showMessage("No config folder found!")
return("")
def install_mod(self, mod_type):
"""
Install a map to the /.warzone2100-xx/maps folder.
Note that even the name of the argument is 'mod_type', it's actually
the folder name the map is to be installed into (i.e. '/maps/' for a map mod).
"""
mod_path = QtGui.QFileDialog.getOpenFileName(self, "Select Mod", self.open_dialog_dir, "WZ Mods (*.wz);; All files (*.*)")
mod_install_path = self.config_dir + mod_type
mod_name = os.path.basename(mod_path)
# Check that all cases are covered before installing
if not mod_path:
return
elif not os.path.isdir(mod_install_path):
os.mkdir(mod_install_path)
elif os.path.isfile(mod_install_path + mod_name):
self.statusbar.showMessage("Mod already installed!")
return
shutil.copy(mod_path, mod_install_path)
self.statusbar.showMessage("Map installed!")
self.open_dialog_dir = os.path.dirname(mod_path) # Note that we reset 'self.open_dialog_dir' to the last used folder
def populate_listviews(self):
"""
Gets a list of map, campaign, and global mods, and populates their
respective QListView's with them.
"""
# We need this to elide the text
mod_size = QtCore.QSize(50, 15)
if os.path.isdir(self.config_dir + "/maps/"):
map_mods = [mod for mod in os.listdir(self.config_dir + "/maps/")
if os.path.isfile(self.config_dir + "/maps/" + mod)]
for mod in map_mods:
mod_item = QtGui.QStandardItem(mod)
mod_item.setSizeHint(mod_size)
mod_item.setEditable(False)
self.map_data_model.appendRow(mod_item)
if os.path.isdir(self.config_dir + "/campaign"):
cam_mods = [mod for mod in os.listdir(self.config_dir + "/campaign/")
if os.path.isfile(self.config_dir + "/campaign/" + mod)]
for mod in cam_mods:
mod_item = QtGui.QStandardItem(mod)
mod_item.setSizeHint(mod_size)
mod_item.setEditable(False)
self.cam_data_model.appendRow(mod_item)
if os.path.isdir(self.config_dir + "/global/"):
global_mods = [mod for mod in os.listdir(self.config_dir + "/global/")
if os.path.isfile(self.config_dir + "/global/" + mod)]
for mod in global_mods:
mod_item = QtGui.QStandardItem(mod)
mod_item.setSizeHint(mod_size)
mod_item.setEditable(False)
self.global_data_model.appendRow(mod_item)
def main():
app = QtGui.QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -3205,32 +3205,73 @@
eHint(mod_size)%0A
+ mod_item.setToolTip(mod)%0A
@@ -3290,32 +3290,32 @@
Editable(False)%0A
-
@@ -3699,32 +3699,73 @@
eHint(mod_size)%0A
+ mod_item.setToolTip(mod)%0A
@@ -4150,32 +4150,32 @@
andardItem(mod)%0A
-
@@ -4197,32 +4197,73 @@
eHint(mod_size)%0A
+ mod_item.setToolTip(mod)%0A
|
583a6319230b89a5f19c26e5bab83e28a5a4792e
|
Fix the but There is an error (cannot concatenate str and int objects) when the user does not specify the inputs.
|
pywps/processes/dummyprocess.py
|
pywps/processes/dummyprocess.py
|
"""
DummyProcess to check the WPS structure
Author: Jorge de Jesus (jorge.de-jesus@jrc.it) as suggested by Kor de Jong
"""
from pywps.Process import WPSProcess
class Process(WPSProcess):
def __init__(self):
# init process
WPSProcess.__init__(self,
identifier = "dummyprocess", # must be same, as filename
title="Dummy Process",
version = "0.1",
storeSupported = "true",
statusSupported = "true",
abstract="The Dummy process is used for testing the WPS structure. The process will accept 2 input numbers and will return the XML result with an add one and subtract one operation",
grassLocation =False)
self.Input1 = self.addLiteralInput(identifier = "input1",
title = "Input1 number",
default="100")
self.Input2= self.addLiteralInput(identifier="input2",
title="Input2 number",
default="200")
self.Output1=self.addLiteralOutput(identifier="output1",
title="Output1 add 1 result")
self.Output2=self.addLiteralOutput(identifier="output2",title="Output2 subtract 1 result" )
def execute(self):
self.Output1.setValue(self.Input1.getValue()+1)
self.Output2.setValue(self.Input1.getValue()-1)
return
|
Python
| 0.000002
|
@@ -72,23 +72,23 @@
rge.
-de-
jesus@
-jrc.it
+gmail.com
) as
@@ -153,16 +153,31 @@
SProcess
+%0Aimport types
@@ -906,32 +906,96 @@
%22Input1 number%22,
+%0A type=types.IntType,
%0A
@@ -1169,16 +1169,79 @@
number%22,
+%0A type=types.IntType,
%0A
@@ -1624,32 +1624,36 @@
utput1.setValue(
+int(
self.Input1.getV
@@ -1658,16 +1658,17 @@
tValue()
+)
+1)%0A
@@ -1693,16 +1693,20 @@
etValue(
+int(
self.Inp
@@ -1719,16 +1719,17 @@
tValue()
+)
-1)%0A
|
cc7ffbe88b7b71b32e036be6080f03a353fdbafe
|
Revert to using get_task_logger
|
rapidsms/router/celery/tasks.py
|
rapidsms/router/celery/tasks.py
|
import celery
import logging
from rapidsms.errors import MessageSendingError
logger = logging.getLogger(__name__)
@celery.task
def receive_async(text, connection_id, message_id, fields):
"""Task used to send inbound message through router phases."""
from rapidsms.models import Connection
from rapidsms.router import get_router
logger.debug('receive_async: %s' % text)
router = get_router()
# reconstruct incoming message
connection = Connection.objects.select_related().get(pk=connection_id)
message = router.new_incoming_message(text=text, connections=[connection],
id_=message_id, fields=fields)
try:
# call process_incoming directly to skip receive_incoming
router.process_incoming(message)
except Exception:
logger.exception("Exception processing incoming message")
raise
@celery.task
def send_async(backend_name, id_, text, identities, context):
"""Task used to send outgoing messages to backends."""
logger.debug('send_async: %s' % text)
from rapidsms.router import get_router
router = get_router()
try:
router.send_to_backend(backend_name=backend_name, id_=id_, text=text,
identities=identities, context=context)
except MessageSendingError:
# This exception has already been logged in send_to_backend.
# We'll simply pass here and not re-raise or log the exception again.
pass
|
Python
| 0
|
@@ -11,22 +11,52 @@
ery%0A
-import
+from celery.utils.log import get_task_
logg
-ing
+er
%0Afro
@@ -115,20 +115,18 @@
r =
-logging.getL
+get_task_l
ogge
|
c231d24d9f51757adba183eb02d968d09ce2df47
|
Fix missing slash in when displaying /info_x
|
VocaBot/contentparser.py
|
VocaBot/contentparser.py
|
import math
from collections import defaultdict
from constants import Context, VOCADB_BASE_URL
from i18n import _
from util import non_phone
from vocadb import voca_db
# I'm not exactly proud of this module's code.. but it does the job.
def names_text(song):
if len(song['names']) > 1:
names = _('<b>Additional names:</b>\n')
for name in song['names']:
if name['value'] != song['name']:
names += name['value'] + '\n'
return names
return _('No additional names found\n')
def artists_text(entry, inline):
if len(entry['artists']) > 0:
artists = _('<b>Artists:</b>\n')
for artist in entry['artists']:
roles = []
for role in artist['effectiveRoles'].split(', '):
if role == 'Default':
roles.append(artist['categories'][:2])
else:
roles.append(role[:2])
artists += _('[<code>{roles}</code>] '
'{artist_name}').format(roles=','.join(roles), artist_name=artist['name'])
if not inline:
try:
artists += ' /ar_{}'.format(artist['artist']['id'])
except KeyError:
pass
artists += '\n'
return artists
return _('No artists found\n')
def vocadb_url(entry, song=False, artist=False, album=False):
return '{base_url}{type}/{id}'.format(base_url=VOCADB_BASE_URL,
type='S' if song else 'Ar' if artist else 'Al',
id=entry['id'])
def content_parser(entries, info=False, inline=False, context=None, bot_name='', counts=None):
text = ''
if entries and len(entries) > 0:
if info:
entries = [entries]
for i, entry in enumerate(entries):
# Check if part of a disc listing
track_number = None
if 'trackNumber' in entry:
track_number = entry['trackNumber']
if 'song' in entry:
entry = entry['song']
song, album, artist = False, False, False
if 'songType' in entry:
song = True
if 'artistType' in entry:
artist = True
if 'discType' in entry:
album = True
if track_number is None or i != 0:
text += '\n\n'
try:
if context == Context.related:
if i == 0:
text += _('<i>Matching artist</i>')
elif i == 1:
text += _('<i>Matching likes</i>')
elif i == 2:
text += _('<i>Matching tags</i>')
text += '\n'
if song:
if track_number is None:
text += _('🎵 <b>{name}</b>\n'
'{artist}\n{type}').format(name=entry['name'],
artist=entry['artistString'],
type=voca_db.trans(entry['songType'], song=True))
if 'favoritedTimes' in entry:
text += ' ' + _('with {num} favourites').format(num=entry['favoritedTimes'])
else:
text += _('<code>{track_number})</code> <b>{name}</b>\n{artist}').format(
track_number=track_number,
name=entry['name'],
artist=entry['artistString'])
if artist:
text += _('🎤 <b>{name}</b>\n'
'{type}').format(name=entry['name'],
type=voca_db.trans(entry['artistType'], artist=True))
if album:
text += _('💿 <b>{name}</b>\n'
'{artist}\n{type}').format(name=entry['name'],
artist=entry['artistString'],
type=voca_db.trans(entry['discType'], album=True))
if (not (song or artist or album)) and track_number:
text += _('<code>{track_number})</code> <b>{name}</b>').format(
track_number=track_number,
name=entry['name'])
else:
link = ''
if song:
link = 'info_{}'.format(entry['id'])
elif artist:
link = 'ar_{}'.format(entry['id'])
elif album:
link = 'al_{}'.format(entry['id'])
if info:
text += '\n\n'
text += names_text(entry)
text += '\n'
if song:
if not inline:
text += _('<b>Derived songs:</b>') + ' /dev_{}\n'.format(entry['id'])
text += _('<b>Related songs:</b>') + ' /rel_{}\n'.format(entry['id'])
text += _('<b>Featured on albums:</b>') + ' /albys_{}\n'.format(entry['id'])
if 'originalVersionId' in entry:
text += '\n'
text += _('<b>Original song:</b>') + ' /info_{}\n'.format(
entry['originalVersionId'])
text += '\n'
text += artists_text(entry, inline)
if 'pvServices' in entry:
if entry['pvServices'] == 'Nothing':
text += _('\nNo promotional videos found')
if artist:
if not inline:
if 'baseVoicebank' in entry:
text += _('<b>Base voicebank:</b>') + ' /a_{}\n\n'.format(
entry['baseVoicebank']['id'])
if album:
if 'releaseDate' in entry:
if not entry['releaseDate']['isEmpty']:
# i18n? .-.
text += _('Release date: {date}\n\n').format(date=entry['releaseDate']['formatted'])
else:
if not inline:
text += _('\nInfo:') + ' ' + link
if inline and bot_name:
text += _('<a href="https://telegram.me/{bot_name}?start=cmd%3D{link}">'
'Click for more features.</a>').format(bot_name=bot_name, link=link)
except OSError:
pass
if counts:
text += _("\n\nFound {found_num} total. "
"Viewing page {cur_page}/{max_page}").format(found_num=non_phone(counts[1]),
cur_page=non_phone(math.ceil((counts[0] + 3) / 3)),
max_page=non_phone(math.ceil(counts[1] / 3)))
else:
if context == Context.search:
text += _("I couldn't find what you were looking for. Did you perhaps misspell it? "
"(tip: you can edit your message.)")
elif context == Context.derived:
text += _('No derived songs found.')
elif context == Context.related:
text += _('No related songs found.')
elif context == Context.albums_by_song:
text += _('Not featured on any albums.')
else:
text += _('Not found.')
return text
def album_tracks(album, inline):
text = _('<b>Tracks')
if not inline:
text += ' ' + _('on {album_name} by {album_artist}</b>\n').format(album_name=album['name'],
album_artist=album['artistString'])
else:
text += ':</b>\n'
discs = defaultdict(list)
for track in album['tracks']:
discs[track['discNumber']].append(track)
for i, (disc_number, tracks) in enumerate(discs.items()):
if len(discs) > 1:
name = ''
if not i == 0:
text += '\n\n'
if 'discs' in album and album['discs']:
try:
disc = [disc for disc in album['discs'] if disc['discNumber'] == disc_number][0]
# Can't find an album to test this on:
if 'name' in disc:
name = disc['name']
if 'mediaType' in disc:
text += ('💿' if disc['mediaType'] == 'Audio' else '🎞') + ' '
except IndexError:
pass
text += _('<i>Disc {disc_number}').format(disc_number=disc_number)
if name:
text += ' ({})'.format(name)
text += ':</i>\n'
text += content_parser(tracks, inline=inline)
return text
|
Python
| 0.000006
|
@@ -6732,16 +6732,17 @@
:') + '
+/
' + link
|
74b8aeba66a77b34eacfb1bcaac3f66caa0d8dd7
|
Fix Python script
|
CI/runTests.py
|
CI/runTests.py
|
import sys
import os
from OMPython import OMCSession
class CITests():
'''
Python class used to run CI tests
'''
def __init__(self, rootPath):
'''
Constructor starts omc and loads MSL
'''
self.rootPath = rootPath
self.omc = OMCSession()
os.chdir(self.rootPath)
self.omc.sendExpression("loadModel(Modelica)")
def loadLib(self, libPath):
# Attempt to load the library
if self.omc.sendExpression('loadFile("%s")' % (self.rootPath + self.libPath)):
print "%s is successfully loaded." % libPath
else:
raise Exception('%s was not loaded! Check the library path.') % libPath
def runCheck(self, libName, libPath):
# Load library
loadLib(libPath)
'''
Checks all of the models in the library and returns number of faild checks
'''
# Get the list of all classes in OpenIPSL
test_list = self.omc.sendExpression('getClassNames(%s,recursive=true)' % libName)
nFailed = 0
nPassed = 0
# Run the check for all classes that are model and print result msgs
print "============================ Checking Models ============================="
for test in test_list:
if self.omc.sendExpression("isModel(%s)" % (test)): # Check if a class is a model
passMsg = self.omc.sendExpression("checkModel(%s)" % (test))
failMsg = self.omc.sendExpression("getErrorString()")
if "completed successfully." in passMsg:
# print passMsg
nPassed += 1
else:
print failMsg
nFailed += 1
# Print a check summary
print "============================= Check Summary =============================="
print "Number of models that passed the check is: %s" % nPassed
print "Number of models that failed the check is: %s" % nFailed
return (nFailed == 0)
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runCheck("OpenIPSL","/OpenIPSL/package.mo")
# Libs in Application Examples
passAppEx = 0
appExamples = {
"KundurSMIB":"/Application\ Examples/KundurSMIB/package.mo",
"TwoAreas":"/Application\ Examples/TwoAreas/package.mo",
"SevenBus":"/Application\ Examples/SevenBus/package.mo",
"IEEE9":"/Application\ Examples/IEEE9/package.mo",
"IEEE14":"/Application\ Examples/IEEE14/package.mo",
"AKD":"/Application\ Examples/AKD/package.mo",
"PSAT_Systems":"/Application\ Examples/PSAT_Systems/package.mo",
"N44":"/Application\ Examples/N44/package.mo",
}
for package in appExamples.keys():
passAppEx = passAppEx + ci.runCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if (not passAppEx) or (not passingCheck):
sys.exit(1)
|
Python
| 0.999949
|
@@ -763,16 +763,21 @@
+self.
loadLib(
|
83f2fe37c6eda993d6b9e2cf2d187646a366f6d8
|
Make timer daemon
|
playserver/trackchecker.py
|
playserver/trackchecker.py
|
from threading import Timer
from . import track
_listeners = []
class TrackChecker():
def __init__(self, interval = 5):
self.listeners = []
self.CHECK_INTERVAL = interval
self.currentSong = ""
self.currentArtist = ""
self.currentAlbum = ""
self.timer = None
def checkSong(self):
song = track.getCurrentSong()
artist = track.getCurrentArtist()
album = track.getCurrentAlbum()
if (song != self.currentSong or artist != self.currentArtist
or album != self.currentAlbum):
self.currentSong = song
self.currentArtist = artist
self.currentAlbum = album
self._callListeners()
if self.timer != None:
self.startTimer()
def registerListener(self, function):
_listeners.append(function)
def _callListeners(self):
for listener in _listeners:
listener()
def startTimer(self):
self.timer = Timer(self.CHECK_INTERVAL, self.checkSong)
self.timer.start()
def cancelTimer(self):
self.timer.cancel()
self.timer = None
|
Python
| 0.000004
|
@@ -876,16 +876,38 @@
ckSong)%0A
+%09%09timer.daemon = True%0A
%09%09self.t
|
0d3082f46f0ffccaca10d3f53f22e6403783d874
|
change the range of the mean transmittance plot.
|
plot_mean_transmittance.py
|
plot_mean_transmittance.py
|
import matplotlib.pyplot as plt
import common_settings
import mean_flux
lya_center = 1215.67
settings = common_settings.Settings()
m = mean_flux.MeanFlux.from_file(settings.get_mean_transmittance_npy())
fig = plt.figure()
ax1 = fig.add_subplot(2, 1, 1)
ax2 = ax1.twiny()
ax1.plot(m.ar_z, m.get_weighted_mean())
# plt.plot(ar_z_range, np.ones(m.size))
ax1.set_ylabel(r"$\left< f_q(z)/C_q(z) \right> $")
plt.ylim(0.5, 1.5)
# add wavelength tick marks on top
x_lim2 = tuple([lya_center * (1 + z) for z in ax1.get_xlim()])
ax2.set_xlim(x_lim2)
plt.axis()
ax3 = fig.add_subplot(2, 1, 2)
ax3.plot(m.ar_z, m.ar_weights)
ax3.plot(m.ar_z, m.ar_total_flux)
ax3.set_xlim(ax1.get_xlim())
ax3.set_ylabel(r"$\sum_q f_q(z)/C_q(z)$")
ax3.set_xlabel(r"$z$")
plt.show()
|
Python
| 0
|
@@ -415,14 +415,14 @@
m(0.
-5
+0
, 1.
-5
+2
)%0A#
|
7e8f8b7ba96ade849eaed239751ef3d00c57d0bd
|
Update plots_digits_classification.py
|
examples/classification/plot_digits_classification.py
|
examples/classification/plot_digits_classification.py
|
"""
================================
Recognizing hand-written digits
================================
An example showing how the scikit-learn can be used to recognize images of
hand-written digits.
This example is commented in the
:ref:`tutorial section of the user manual <introduction>`.
"""
print(__doc__)
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# License: BSD 3 clause
# Standard scientific Python imports
import matplotlib.pyplot as plt
# Import datasets, classifiers and performance metrics
from sklearn import datasets, svm, metrics
# The digits dataset
digits = datasets.load_digits()
# The data that we are interested in is made of 8x8 images of digits, let's
# have a look at the first 3 images, stored in the `images` attribute of the
# dataset. If we were working from image files, we could load them using
# pylab.imread. Note that each image must have the same size. For these
# images, we know which digit they represent: it is given in the 'target' of
# the dataset.
images_and_labels = list(zip(digits.images, digits.target))
for index, (image, label) in enumerate(images_and_labels[:4]):
plt.subplot(2, 4, index + 1)
plt.axis('off')
plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest')
plt.title('Training: %i' % label)
# To apply a classifier on this data, we need to flatten the image, to
# turn the data in a (samples, feature) matrix:
n_samples = len(digits.images)
data = digits.images.reshape((n_samples, -1))
# Create a classifier: a support vector classifier
classifier = svm.SVC(gamma=0.001)
# We learn the digits on the first half of the digits
classifier.fit(data[:n_samples / 2], digits.target[:n_samples / 2])
# Now predict the value of the digit on the second half:
expected = digits.target[n_samples / 2:]
predicted = classifier.predict(data[n_samples / 2:])
print("Classification report for classifier %s:\n%s\n"
% (classifier, metrics.classification_report(expected, predicted)))
print("Confusion matrix:\n%s" % metrics.confusion_matrix(expected, predicted))
images_and_predictions = list(zip(digits.images[n_samples / 2:], predicted))
for index, (image, prediction) in enumerate(images_and_predictions[:4]):
plt.subplot(2, 4, index + 5)
plt.axis('off')
plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest')
plt.title('Prediction: %i' % prediction)
plt.show()
|
Python
| 0.000001
|
@@ -725,17 +725,17 @@
e first
-3
+4
images,
|
a906b07afd331872752ffe2325674d6f3f8f938c
|
Allow points with missing r_chromium
|
experimental/soundwave/soundwave/tables/timeseries.py
|
experimental/soundwave/soundwave/tables/timeseries.py
|
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import pandas # pylint: disable=import-error
TABLE_NAME = 'timeseries'
COLUMN_TYPES = (
# Index columns.
('test_suite', str), # benchmark name ('loading.mobile')
('measurement', str), # metric name ('timeToFirstContentfulPaint')
('bot', str), # master/builder name ('ChromiumPerf.android-nexus5')
('test_case', str), # story name ('Wikipedia')
('point_id', int), # monotonically increasing value for time series axis
# Other columns.
('value', float), # value recorded for test_path at given point_id
('timestamp', 'datetime64[ns]'), # when the value got stored on dashboard
('commit_pos', int), # chromium commit position
('chromium_rev', str), # git hash of chromium revision
('clank_rev', str), # git hash of clank revision
('improvement_direction', str), # good direction ('up', 'down', 'unknown')
)
COLUMNS = tuple(c for c, _ in COLUMN_TYPES)
INDEX = COLUMNS[:5]
# Copied from https://goo.gl/DzGYpW.
_CODE_TO_IMPROVEMENT_DIRECTION = {
0: 'up',
1: 'down',
}
TEST_PATH_PARTS = (
'master', 'builder', 'test_suite', 'measurement', 'test_case')
# This query finds the most recent point_id for a given test_path (i.e. fixed
# test_suite, measurement, bot, and test_case values).
_GET_MOST_RECENT_QUERY = (
'SELECT * FROM %s WHERE %s ORDER BY timestamp DESC LIMIT 1'
% (TABLE_NAME, ' AND '.join('%s=?' % c for c in INDEX[:-1])))
def _ParseIntValue(value, on_error=-1):
# Try to parse as int and, in case of error, return a pre-defined value.
try:
return int(value)
except StandardError:
return on_error
def _ParseConfigFromTestPath(test_path):
values = test_path.split('/', len(TEST_PATH_PARTS) - 1)
config = dict(zip(TEST_PATH_PARTS, values))
config['bot'] = '%s/%s' % (config.pop('master'), config.pop('builder'))
return config
def DataFrameFromJson(data):
config = _ParseConfigFromTestPath(data['test_path'])
config['improvement_direction'] = _CODE_TO_IMPROVEMENT_DIRECTION.get(
data['improvement_direction'], 'unknown')
timeseries = data['timeseries']
# The first element in timeseries list contains header with column names.
header = timeseries[0]
rows = []
# Remaining elements contain the values for each row.
for values in timeseries[1:]:
row = config.copy()
row.update(zip(header, values))
row['point_id'] = row['revision']
row['commit_pos'] = _ParseIntValue(row['r_commit_pos'])
row['chromium_rev'] = row['r_chromium']
row['clank_rev'] = row.get('r_clank', None)
rows.append(tuple(row.get(k) for k in COLUMNS))
df = pandas.DataFrame.from_records(rows, index=INDEX, columns=COLUMNS)
df['timestamp'] = pandas.to_datetime(df['timestamp'])
return df
def GetMostRecentPoint(con, test_path):
"""Find the record for the most recent data point on the given test_path.
Returns:
A pandas.Series with the record if found, or None otherwise.
"""
config = _ParseConfigFromTestPath(test_path)
params = tuple(config[c] for c in INDEX[:-1])
df = pandas.read_sql(
_GET_MOST_RECENT_QUERY, con, params=params, parse_dates=['timestamp'])
return df.iloc[0] if not df.empty else None
|
Python
| 0.000027
|
@@ -2630,17 +2630,21 @@
'%5D = row
-%5B
+.get(
'r_chrom
@@ -2647,17 +2647,17 @@
hromium'
-%5D
+)
%0A row
|
746eace7e4677b034743b25e0f8d53aabd07dd5c
|
Fix bugs?
|
autopoke.py
|
autopoke.py
|
#!/bin/env python
from selenium import webdriver
from selenium.common.exceptions import StaleElementReferenceException
from time import sleep
from getpass import getpass
if __name__ == '__main__':
driver = webdriver.phantomjs.webdriver.WebDriver()
driver.get('https://facebook.com')
driver.find_element_by_id('email').send_keys(input('Email: '))
driver.find_element_by_id('pass').send_keys(getpass())
driver.find_element_by_id('loginbutton').click()
driver.get('https://facebook.com/pokes/')
assert "Forgot password?" not in driver.page_source
c = 0
c2 = 0
while True:
try:
for i in driver.find_elements_by_link_text("Poke Back"):
i.click()
c += 1
c2 = 0
print("Clicked so far: " + str(c))
except StaleElementReferenceException:
print("Found exception, reloading page")
driver.get('https://facebook.com/pokes/')
c2 += 1
if c2 % 121 == 0:
print("No pokes in last minute. Reloading")
driver.get('https://facebook.com/pokes/')
sleep(0.5)
|
Python
| 0
|
@@ -578,26 +578,25 @@
c = 0%0A
-c2
+e
= 0%0A whi
@@ -739,31 +739,8 @@
= 1%0A
- c2 = 0%0A
@@ -833,16 +833,63 @@
eption:%0A
+ e += 1%0A if e == 10:%0A
@@ -933,16 +933,20 @@
page%22)%0A
+
@@ -991,25 +991,24 @@
okes/')%0A
-%0A
c2 += 1%0A
@@ -1003,41 +1003,21 @@
-c2 += 1%0A if c2 %25 121 =
+ e
= 0
-:
%0A
@@ -1029,106 +1029,70 @@
-print(%22No pokes in last minute. Reloading%22)%0A driver.get('https://facebook.com/pokes/')
+else:%0A print(%22Found exception, doing nothing%22)%0A
%0A
@@ -1106,9 +1106,7 @@
eep(
-0.5
+1
)%0A
|
434379f80ace9d3f99277adb567c447f870e6cf6
|
clean print and manual blacken
|
zou/app/blueprints/source/csv/assets.py
|
zou/app/blueprints/source/csv/assets.py
|
from zou.app.blueprints.source.csv.base import BaseCsvProjectImportResource
from zou.app.models.project import ProjectTaskTypeLink
from zou.app.models.task import Task
from zou.app.models.task_type import TaskType
from zou.app.services import assets_service, projects_service, shots_service
from zou.app.models.entity import Entity
from zou.app.services.tasks_service import create_task, get_tasks_for_asset, get_task_statuses
from zou.app.services.comments_service import create_comment
from zou.app.services.persons_service import get_current_user
from zou.app.utils import events
class AssetsCsvImportResource(BaseCsvProjectImportResource):
def prepare_import(self, project_id):
self.episodes = {}
self.entity_types = {}
self.descriptor_fields = self.get_descriptor_field_map(
project_id, "Asset"
)
project = projects_service.get_project(project_id)
self.is_tv_show = projects_service.is_tv_show(project)
if self.is_tv_show:
episodes = shots_service.get_episodes_for_project(project_id)
self.episodes = {
episode["name"]: episode["id"] for episode in episodes
}
self.task_types_in_project_for_assets = (
TaskType.query.join(ProjectTaskTypeLink)
.filter(ProjectTaskTypeLink.project_id == project_id)
.filter(TaskType.for_entity == "Asset")
)
self.task_statuses = get_task_statuses()
self.current_user_id = get_current_user()["id"]
def import_row(self, row, project_id):
asset_name = row["Name"]
entity_type_name = row["Type"]
description = row.get("Description", "")
episode_name = row.get("Episode", None)
episode_id = None
if episode_name is not None:
if episode_name != "MP" and episode_name not in self.episodes:
self.episodes[
episode_name
] = shots_service.get_or_create_episode(
project_id, episode_name
)[
"id"
]
episode_id = self.episodes.get(episode_name, None)
self.add_to_cache_if_absent(
self.entity_types,
assets_service.get_or_create_asset_type,
entity_type_name,
)
entity_type_id = self.get_id_from_cache(
self.entity_types, entity_type_name
)
entity = Entity.get_by(
name=asset_name,
project_id=project_id,
entity_type_id=entity_type_id,
source_id=episode_id,
)
data = {}
for name, field_name in self.descriptor_fields.items():
if name in row:
data[field_name] = row[name]
elif (
entity is not None
and entity.data is not None
and field_name in entity.data
):
data[field_name] = entity.data[field_name]
tasks = []
if entity is None:
entity = Entity.create(
name=asset_name,
description=description,
project_id=project_id,
entity_type_id=entity_type_id,
source_id=episode_id,
data=data,
)
events.emit(
"asset:new",
{"asset_id": str(entity.id), "episode_id": episode_id},
project_id=project_id,
)
for task_type in self.task_types_in_project_for_assets:
tasks.append(create_task(task_type.serialize(), entity.serialize()))
elif self.is_update:
entity.update({"description": description, "data": data})
events.emit(
"asset:update",
{"asset_id": str(entity.id), "episode_id": episode_id},
project_id=project_id,
)
tasks = get_tasks_for_asset(entity.serialize())
for task in tasks:
task_name = task["task_type_name"].title()
task_status_name = row.get(task_name)
task_status_id = task["task_status_id"]
task_comment = row.get(f"{task_name} Comment", "")
if task_status_name:
for status in self.task_statuses:
print(status)
if task_status_name.lower() in (status["name"].lower(), status["short_name"].lower()):
task_status_id = status["id"]
if task_status_id != task["task_status_id"] or task_comment:
create_comment(
self.current_user_id,
task["id"],
task_status_id,
task_comment,
[],
{},
"",
)
return entity.serialize()
|
Python
| 0
|
@@ -3588,16 +3588,37 @@
.append(
+%0A
create_t
@@ -3663,16 +3663,33 @@
alize())
+%0A
)%0A%0A
@@ -4379,97 +4379,112 @@
-print(status)%0A if task_status_name.lower() in (status%5B%22name%22%5D.lower(),
+if task_status_name.lower() in (%0A status%5B%22name%22%5D.lower(),%0A
sta
@@ -4508,16 +4508,38 @@
.lower()
+,%0A
):%0A
|
d9fb8d20948e76d4df176d083e4284d3c99258ca
|
return int index for userid's in the Netflix dataset
|
polara/datasets/netflix.py
|
polara/datasets/netflix.py
|
import pandas as pd
import tarfile
def get_netflix_data(gz_file):
movie_data = []
movie_name = []
with tarfile.open(gz_file) as tar:
training_data = tar.getmember('download/training_set.tar')
with tarfile.open(fileobj=tar.extractfile(training_data)) as inner:
for item in inner.getmembers():
if item.isfile():
f = inner.extractfile(item.name)
df = pd.read_csv(f)
movieid = df.columns[0]
movie_name.append(movieid)
movie_data.append(df[movieid])
data = pd.concat(movie_data, keys=movie_name)
data = data.reset_index().iloc[:, :3].rename(columns={'level_0': 'movieid',
'level_1': 'userid',
'level_2': 'rating'})
return data
|
Python
| 0.000011
|
@@ -91,20 +91,20 @@
movie_
-name
+inds
= %5B%5D%0A
@@ -528,20 +528,20 @@
vie_
-name
+inds
.append(
movi
@@ -536,23 +536,33 @@
.append(
+int(
movieid
+%5B:-1%5D)
)%0A
@@ -651,20 +651,20 @@
s=movie_
-name
+inds
)%0A da
|
17b0fcf279c6dc8552a45e9cebde97317212fc14
|
Fix UnboundLocalError
|
atomicwrites/__init__.py
|
atomicwrites/__init__.py
|
import contextlib
import errno
import os
import sys
import tempfile
__version__ = '0.1.8'
PY2 = sys.version_info[0] == 2
text_type = unicode if PY2 else str
def _path_to_unicode(x):
if not isinstance(x, text_type):
return x.decode(sys.getfilesystemencoding())
return x
if sys.platform != 'win32':
def _replace_atomic(src, dst):
os.rename(src, dst)
def _move_atomic(src, dst):
os.link(src, dst)
os.unlink(src)
else:
from ctypes import windll, WinError
_MOVEFILE_REPLACE_EXISTING = 0x1
_MOVEFILE_WRITE_THROUGH = 0x8
_windows_default_flags = _MOVEFILE_WRITE_THROUGH
def _handle_errors(rv):
if not rv:
raise WinError()
def _replace_atomic(src, dst):
_handle_errors(windll.kernel32.MoveFileExW(
_path_to_unicode(src), _path_to_unicode(dst),
_windows_default_flags | _MOVEFILE_REPLACE_EXISTING
))
def _move_atomic(src, dst):
_handle_errors(windll.kernel32.MoveFileExW(
_path_to_unicode(src), _path_to_unicode(dst),
_windows_default_flags
))
def replace_atomic(src, dst):
'''
Move ``src`` to ``dst``. If ``dst`` exists, it will be silently
overwritten.
Both paths must reside on the same filesystem for the operation to be
atomic.
'''
return _replace_atomic(src, dst)
def move_atomic(src, dst):
'''
Move ``src`` to ``dst``. There might a timewindow where both filesystem
entries exist. If ``dst`` already exists, :py:exc:`FileExistsError` will be
raised.
Both paths must reside on the same filesystem for the operation to be
atomic.
'''
return _move_atomic(src, dst)
class AtomicWriter(object):
'''
A helper class for performing atomic writes. Usage::
with AtomicWriter(path).open() as f:
f.write(...)
:param path: The destination filepath. May or may not exist.
:param mode: The filemode for the temporary file.
:param overwrite: If set to false, an error is raised if ``path`` exists.
Errors are only raised after the file has been written to. Either way,
the operation is atomic.
If you need further control over the exact behavior, you are encouraged to
subclass.
'''
def __init__(self, path, mode='w', overwrite=False):
if 'a' in mode:
raise ValueError(
'Appending to an existing file is not supported, because that '
'would involve an expensive `copy`-operation to a temporary '
'file. Open the file in normal `w`-mode and copy explicitly '
'if that\'s what you\'re after.'
)
if 'x' in mode:
raise ValueError('Use the `overwrite`-parameter instead.')
if 'w' not in mode:
raise ValueError('AtomicWriters can only be written to.')
self._path = path
self._mode = mode
self._overwrite = overwrite
def open(self):
'''
Open the temporary file.
'''
return self._open(self.get_fileobject)
@contextlib.contextmanager
def _open(self, get_fileobject):
try:
with get_fileobject() as f:
yield f
self.sync(f)
self.commit(f)
except:
try:
self.rollback(f)
except Exception:
pass
raise
def get_fileobject(self, dir=None, **kwargs):
'''Return the temporary file to use.'''
if dir is None:
dir = os.path.dirname(self._path)
return tempfile.NamedTemporaryFile(mode=self._mode, dir=dir,
delete=False, **kwargs)
def sync(self, f):
'''responsible for clearing as many file caches as possible before
commit'''
f.flush()
os.fsync(f.fileno())
def commit(self, f):
'''Move the temporary file to the target location.'''
if self._overwrite:
replace_atomic(f.name, self._path)
else:
move_atomic(f.name, self._path)
def rollback(self, f):
'''Clean up all temporary resources.'''
os.unlink(f.name)
def atomic_write(path, writer_cls=AtomicWriter, **cls_kwargs):
'''
Simple atomic writes. This wraps :py:class:`AtomicWriter`::
with atomic_write(path) as f:
f.write(...)
:param path: The target path to write to.
:param writer_cls: The writer class to use. This parameter is useful if you
subclassed :py:class:`AtomicWriter` to change some behavior and want to
use that new subclass.
Additional keyword arguments are passed to the writer class. See
:py:class:`AtomicWriter`.
'''
return writer_cls(path, **cls_kwargs).open()
|
Python
| 0
|
@@ -3170,24 +3170,94 @@
ileobject):%0A
+ f = None # make sure f exists even if get_fileobject() fails%0A
try:
|
5e4b661c446ad3fc9d27e55c7b0cfc9b17e4d8f9
|
add comment
|
pyalaocl/useocl/state.py
|
pyalaocl/useocl/state.py
|
# coding=utf-8
from collections import OrderedDict
class State(object):
def __init__(self):
self.objects = OrderedDict()
self.links = OrderedDict()
self.linkObject = OrderedDict()
class StateElement(object):
def __init__(self, state):
self.state = state
class Object(StateElement):
def __init__(self, state, className, name):
super(Object,self).__init__(state)
state.objects[name] = self
self.name = name
self.className = className
self.attributes = OrderedDict()
def set(self, name, value):
self.attributes[name] = value
class Link(StateElement):
def __init__(self, state, associationName, objects):
super(Link, self).__init__(state)
link_name = '_'.join(map(lambda o: o.name, objects))
state.links[link_name] = self
self.associationName = associationName
self.roles = objects
class LinkObject(StateElement):
def __init__(self, state, associationClassName, name, objects) :
super(LinkObject, self).__init__(state)
state.linkObject[name] = self
self.name = name
self.className = associationClassName
self.attributes = OrderedDict()
self.roles = objects
def set(self, name, value):
self.attributes[name] = value
|
Python
| 0
|
@@ -9,16 +9,129 @@
=utf-8%0A%0A
+%22%22%22%0ASimple metamodel for object states. Contains definitions for:%0A%0A- State,%0A- Object,%0A- Link,%0A- LinkObject.%0A%22%22%22%0A%0A
from col
|
64357fbd3c32c112bdae471e538f1a5b65a74fff
|
Remove unused module.
|
pybossa/cache/helpers.py
|
pybossa/cache/helpers.py
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy.sql import text
from pybossa.core import db, timeouts
from pybossa.cache import memoize, ONE_HOUR
from pybossa.cache.apps import overall_progress
session = db.slave_session
@memoize(timeout=ONE_HOUR * 3)
def n_available_tasks(app_id, user_id=None, user_ip=None):
"""Returns the number of tasks for a given app a user can contribute to,
based on the completion of the app tasks, and previous task_runs submitted
by the user"""
if user_id and not user_ip:
query = text('''SELECT COUNT(id) AS n_tasks FROM task WHERE NOT EXISTS
(SELECT task_id FROM task_run WHERE
app_id=:app_id AND user_id=:user_id AND task_id=task.id)
AND app_id=:app_id AND state !='completed';''')
result = session.execute(query, dict(app_id=app_id, user_id=user_id))
else:
if not user_ip:
user_ip = '127.0.0.1'
query = text('''SELECT COUNT(id) AS n_tasks FROM task WHERE NOT EXISTS
(SELECT task_id FROM task_run WHERE
app_id=:app_id AND user_ip=:user_ip AND task_id=task.id)
AND app_id=:app_id AND state !='completed';''')
result = session.execute(query, dict(app_id=app_id, user_ip=user_ip))
n_tasks = 0
for row in result:
n_tasks = row.n_tasks
return n_tasks
def check_contributing_state(app, user_id=None, user_ip=None):
"""Returns the state of a given app for a given user, depending on whether
the app is completed or not and the user can contribute more to it or not"""
app_id = app['id'] if type(app) == dict else app.id
states = ('completed', 'draft', 'can_contribute', 'cannot_contribute')
if overall_progress(app_id) >= 100:
return states[0]
if _has_no_presenter(app) or _has_no_tasks(app_id):
return states[1]
if n_available_tasks(app_id, user_id=user_id, user_ip=user_ip) > 0:
return states[2]
return states[3]
def add_custom_contrib_button_to(app, user_id_or_ip):
if type(app) == dict:
app_id = app['id']
else:
app_id = app.id
app = app.dictize()
app['contrib_button'] = check_contributing_state(app, **user_id_or_ip)
return app
def _has_no_presenter(app):
try:
return 'task_presenter' not in app.info
except AttributeError:
try:
return 'task_presenter' not in app.get('info')
except AttributeError:
return True
def _has_no_tasks(app_id):
query = text('''SELECT COUNT(id) AS n_tasks FROM task
WHERE app_id=:app_id;''')
result = session.execute(query, dict(app_id=app_id))
for row in result:
n_tasks = row.n_tasks
return n_tasks == 0
|
Python
| 0
|
@@ -800,18 +800,8 @@
t db
-, timeouts
%0Afro
|
8469053ba83dd344c41eebfdee8dfee6591d1560
|
Rename app to project. PEP8 and 257.
|
pybossa/cache/helpers.py
|
pybossa/cache/helpers.py
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy.sql import text
from pybossa.core import db
from pybossa.cache import memoize, ONE_HOUR
from pybossa.cache.projects import overall_progress
session = db.slave_session
@memoize(timeout=ONE_HOUR * 3)
def n_available_tasks(project_id, user_id=None, user_ip=None):
"""Return the number of tasks for a given project a user can contribute to.
based on the completion of the project tasks, and previous task_runs
submitted by the user.
"""
if user_id and not user_ip:
query = text('''SELECT COUNT(id) AS n_tasks FROM task WHERE NOT EXISTS
(SELECT task_id FROM task_run WHERE
app_id=:app_id AND user_id=:user_id AND task_id=task.id)
AND app_id=:project_id AND state !='completed';''')
result = session.execute(query, dict(project_id=project_id,
user_id=user_id))
else:
if not user_ip:
user_ip = '127.0.0.1'
query = text('''SELECT COUNT(id) AS n_tasks FROM task WHERE NOT EXISTS
(SELECT task_id FROM task_run WHERE
app_id=:app_id AND user_ip=:user_ip AND task_id=task.id)
AND app_id=:project_id AND state !='completed';''')
result = session.execute(query, dict(project_id=project_id,
user_ip=user_ip))
n_tasks = 0
for row in result:
n_tasks = row.n_tasks
return n_tasks
def check_contributing_state(project, user_id=None, user_ip=None):
"""Return the state of a given project for a given user.
Depending on whether the project is completed or not and the user can
contribute more to it or not.
"""
project_id = project['id'] if type(project) == dict else project.id
states = ('completed', 'draft', 'can_contribute', 'cannot_contribute')
if overall_progress(project_id) >= 100:
return states[0]
if _has_no_presenter(project) or _has_no_tasks(project_id):
return states[1]
if n_available_tasks(project_id, user_id=user_id, user_ip=user_ip) > 0:
return states[2]
return states[3]
def add_custom_contrib_button_to(project, user_id_or_ip):
"""Add a customized contrib button for a project."""
if type(project) != dict:
project = project.dictize()
project['contrib_button'] = check_contributing_state(project, **user_id_or_ip)
return project
def _has_no_presenter(project):
"""Return if a project has no presenter."""
try:
return 'task_presenter' not in project.info
except AttributeError:
try:
return 'task_presenter' not in project.get('info')
except AttributeError:
return True
def _has_no_tasks(project_id):
"""Return if a project has no tasks."""
query = text('''SELECT COUNT(id) AS n_tasks FROM task
WHERE app_id=:app_id;''')
result = session.execute(query, dict(project_id=project_id))
for row in result:
n_tasks = row.n_tasks
return n_tasks == 0
|
Python
| 0
|
@@ -735,16 +735,58 @@
enses/%3E.
+%0A%22%22%22Cache module with helper functions.%22%22%22
%0A%0Afrom s
@@ -3208,16 +3208,73 @@
project,
+%0A
**user_
@@ -3604,16 +3604,17 @@
n True%0A%0A
+%0A
def _has
@@ -3767,19 +3767,23 @@
app_id=:
-app
+project
_id;''')
|
c3f1d0f14b85dae26f36734406ff19a6a40b1239
|
remove unused imports
|
pyfarm/models/jobtype.py
|
pyfarm/models/jobtype.py
|
# No shebang line, this module is meant to be imported
#
# Copyright 2013 Oliver Palmer
# Copyright 2014 Ambient Entertainment GmbH & Co. KG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Job Type Models
===============
Models and objects dedicated to handling information which is specific
to an individual job. See :mod:`pyfarm.models.job` for more the more
general implementation.
"""
import ast
from hashlib import sha1
from textwrap import dedent
from sqlalchemy import event
from sqlalchemy.orm import validates
from sqlalchemy.schema import UniqueConstraint
from pyfarm.core.config import read_env_int, read_env_bool, read_env
from pyfarm.core.logger import getLogger
from pyfarm.master.application import db
from pyfarm.models.core.cfg import (
TABLE_JOB_TYPE, TABLE_JOB_TYPE_VERSION, MAX_JOBTYPE_LENGTH)
from pyfarm.models.core.mixins import UtilityMixins, ReprMixin
from pyfarm.models.core.types import id_column, IDTypeWork
__all__ = ("JobType", )
JOBTYPE_BASECLASS = read_env("PYFARM_JOBTYPE_SUBCLASSES_BASE_CLASS", "JobType")
logger = getLogger("models.jobtype")
class JobType(db.Model, UtilityMixins, ReprMixin):
"""
Stores the unique information necessary to execute a task
"""
__tablename__ = TABLE_JOB_TYPE
__table_args__ = (UniqueConstraint("name"),)
REPR_COLUMNS = ("id", "name")
id = id_column(IDTypeWork)
name = db.Column(db.String(MAX_JOBTYPE_LENGTH), nullable=False,
doc=dedent("""
The name of the job type. This can be either a human
readable name or the name of the job type class
itself."""))
description = db.Column(db.Text, nullable=True,
doc=dedent("""
Human readable description of the job type. This
field is not required and is not directly relied
upon anywhere."""))
success_subject = db.Column(db.Text, nullable=True,
doc="The subject line to use for notifications "
"in case of success. Some substitions, "
"for example for the job title, are "
"available.")
success_body = db.Column(db.Text, nullable=True,
doc="The email body to use for notifications in "
"in case of success. Some substitions, for "
"example for the job title, are available.")
fail_subject = db.Column(db.Text, nullable=True,
doc="The subject line to use for notifications "
"in case of failure. Some substitions, for "
"example for the job title, are available.")
fail_body = db.Column(db.Text, nullable=True,
doc="The email body to use for notifications in "
"in case of success. Some substitions, for "
"example for the job title, are available.")
@validates("name")
def validate_name(self, key, value):
if value == "":
raise ValueError("Name cannot be empty")
return value
class JobTypeVersion(db.Model, UtilityMixins, ReprMixin):
__tablename__ = TABLE_JOB_TYPE_VERSION
__table_args__ = (UniqueConstraint("jobtype_id", "version"),)
REPR_COLUMNS = (
"id", "jobtype_id", "version")
id = id_column(IDTypeWork)
jobtype_id = db.Column(IDTypeWork,
db.ForeignKey("%s.id" % TABLE_JOB_TYPE),
nullable=False,
doc="The jobtype this version belongs to")
version = db.Column(db.Integer, nullable=False, doc="The version number")
max_batch = db.Column(db.Integer,
default=read_env_int(
"JOBTYPE_DEFAULT_MAX_BATCH",
read_env_int("PYFARM_QUEUE_MAX_BATCH", 1)),
doc=dedent("""
When the queue runs, this is the maximum number of
tasks that the queue can select to assign to a single
agent. If left empty, no maximum applies"""))
batch_contiguous = db.Column(db.Boolean,
default=read_env_bool(
"JOBTYPE_DEFAULT_BATCH_CONTIGUOUS", True),
doc=dedent("""
If True then the queue will be forced to batch
numerically contiguous tasks only for this
job type. For example if True it would batch
frames 1, 2, 3, 4 together but not 2, 4, 6,
8. If this column is False however the queue
will batch non-contiguous tasks too."""))
classname = db.Column(db.String(MAX_JOBTYPE_LENGTH), nullable=True,
doc=dedent("""
The name of the job class contained within the file
being loaded. This field may be null but when it's
not provided job type name will be used instead."""))
code = db.Column(db.UnicodeText, nullable=False,
doc="The source code of the job type")
jobtype = db.relationship("JobType",
backref=db.backref("versions", lazy="dynamic",
cascade="all, delete-orphan"),
doc=dedent("""
Relationship between this version and the
:class:`JobType` it belongs to"""))
jobs = db.relationship("Job", backref="jobtype_version", lazy="dynamic",
doc=dedent("""
Relationship between this jobtype version and
:class:`.Job` objects."""))
@validates("max_batch")
def validate_max_batch(self, key, value):
if isinstance(value, int) and value < 1:
raise ValueError("max_batch must be greater than or equal to 1")
return value
@validates("version")
def validate_version(self, key, value):
if isinstance(value, int) and value < 1:
raise ValueError("version must be greater than or equal to 1")
return value
|
Python
| 0.000001
|
@@ -891,101 +891,36 @@
%22%22%0A%0A
-import ast%0Afrom hashlib import sha1%0Afrom textwrap import dedent%0A%0Afrom sqlalchemy import event
+from textwrap import dedent%0A
%0Afro
|
2fa942eb985295a2e7a573cb294680d8139929a2
|
Add documentation to overwrite modes
|
pygimplib/pgoverwrite.py
|
pygimplib/pgoverwrite.py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014-2017 khalim19
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module defines:
* overwrite chooser - an indication on how to handle existing files (skip,
replace, rename, etc.),
* `handle_overwrite` convenience function to handle conflicting files.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from future.builtins import *
import future.utils
import abc
import os
from . import pgpath
#===============================================================================
class OverwriteChooser(future.utils.with_metaclass(abc.ABCMeta, object)):
"""
This class is an interface to indicate how to handle existing files.
Attributes:
* `overwrite_mode` (read-only) - Overwrite mode chosen by the user.
"""
@abc.abstractmethod
def overwrite_mode(self):
pass
@abc.abstractmethod
def choose(self, filepath=None):
"""
Return a value indicating how to handle the conflicting file
by letting the user choose the value.
The actual overwrite modes (possible values one of which the user chooses)
and the implementation of handling conflicting files are left to the
programmer using the return value provided by this method.
Parameters:
* `filepath` - File path that conflicts with an existing file.
This class uses the file path to simply display it to the user. Defaults
to None.
"""
pass
class NoninteractiveOverwriteChooser(OverwriteChooser):
"""
This class simply stores overwrite mode specified upon the object
instantiation. The object is suitable to use in a non-interactive environment,
i.e. with no user interaction.
"""
def __init__(self, overwrite_mode):
super().__init__()
self._overwrite_mode = overwrite_mode
@property
def overwrite_mode(self):
return self._overwrite_mode
def choose(self, filepath=None):
return self._overwrite_mode
class InteractiveOverwriteChooser(
future.utils.with_metaclass(abc.ABCMeta, OverwriteChooser)):
"""
This class is an interface for interactive overwrite choosers, requiring
the user choose the overwrite mode.
Additional attributes:
* `values_and_display_names` - List of (value, display name) tuples which
define overwrite modes and their human-readable names.
* `default_value` - Default value. Must be one of the values in the
`values_and_display_names` list.
* `default_response` - Default value to return if the user made a choice that
returns a value not in `values_and_display_names`. `default_response`
does not have to be any of the values in `values_and_display_names`.
* `is_apply_to_all` (read-only) - Whether the user-made choice applies to the
current file (False) or to the current and all subsequent files (True).
"""
def __init__(self, values_and_display_names, default_value, default_response):
super().__init__()
self.values_and_display_names = values_and_display_names
self._values = [value for value, unused_ in self.values_and_display_names]
if default_value not in self._values:
raise ValueError(
"invalid default value '{0}'; must be one of the following: {1}".format(
default_value, self._values))
self.default_value = default_value
self.default_response = default_response
self._overwrite_mode = self.default_value
self._is_apply_to_all = False
@property
def overwrite_mode(self):
return self._overwrite_mode
@property
def is_apply_to_all(self):
return self._is_apply_to_all
def choose(self, filepath=None):
if self._overwrite_mode is None or not self._is_apply_to_all:
return self._choose(filepath)
else:
return self._overwrite_mode
@abc.abstractmethod
def _choose(self, filepath):
"""
Let the user choose the overwrite mode and return it.
If the choice results in a value that is not in `values_and_display_names`,
return `default_response`.
"""
pass
#===============================================================================
class OverwriteModes(object):
"""
This class defines common overwrite modes for convenience.
"""
OVERWRITE_MODES = REPLACE, SKIP, RENAME_NEW, RENAME_EXISTING, CANCEL, DO_NOTHING = (
0, 1, 2, 3, 4, 5)
def handle_overwrite(filepath, overwrite_chooser, uniquifier_position=None):
"""
If a file with the specified file path exists, handle the file path conflict
by executing the `overwrite_chooser` (an `OverwriteChooser` instance).
`filepath` indicates a file path for a new file to be saved.
`overwrite_chooser` should support all overwrite modes specified in
`OverwriteModes`. `RENAME_NEW` mode renames `filepath`.
`RENAME_EXISTING` renames the existing file in the file system.
If the overwrite mode indicates that the file path should be renamed and
`uniquifier_position` is not None, the `uniquifier_position` specifies where
in the file path to insert a unique substring (" (number)"). By default, the
uniquifier is inserted at the end of the file path to be renamed.
Returns:
* the overwrite mode as returned by `overwrite_chooser`, which the caller
of this function can further use (especially `SKIP` or `CANCEL` values),
* the file path passed as the argument, modified if `RENAME_NEW` mode is
returned.
"""
if os.path.exists(filepath):
overwrite_chooser.choose(filepath=os.path.abspath(filepath))
if overwrite_chooser.overwrite_mode in (
OverwriteModes.RENAME_NEW, OverwriteModes.RENAME_EXISTING):
uniq_filepath = pgpath.uniquify_filepath(filepath, uniquifier_position)
if overwrite_chooser.overwrite_mode == OverwriteModes.RENAME_NEW:
filepath = uniq_filepath
else:
os.rename(filepath, uniq_filepath)
return overwrite_chooser.overwrite_mode, filepath
|
Python
| 0
|
@@ -4777,16 +4777,344 @@
nience.%0A
+ %0A %60SKIP%60 should be used if a file path already exists and no action should be%0A taken.%0A %60DO_NOTHING%60 should be used if a file path does not exist and no action should%0A be taken.%0A %60CANCEL%60 should be used if the user terminated the overwrite chooser (e.g.%0A closed the overwrite dialog when an interactive chooser is used).%0A
%22%22%22%0A
|
078621494eb9981733412446aa4eabd9bc54fa52
|
Update URL for pymssql
|
lib/sqlalchemy/dialects/mssql/pymssql.py
|
lib/sqlalchemy/dialects/mssql/pymssql.py
|
# mssql/pymssql.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: mssql+pymssql
:name: pymssql
:dbapi: pymssql
:connectstring: mssql+pymssql://<username>:<password>@<freetds_name>?charset=utf8
:url: http://pymssql.sourceforge.net/
Limitations
-----------
pymssql inherits a lot of limitations from FreeTDS, including:
* no support for multibyte schema identifiers
* poor support for large decimals
* poor support for binary fields
* poor support for VARCHAR/CHAR fields over 255 characters
Please consult the pymssql documentation for further information.
"""
from .base import MSDialect
from ... import types as sqltypes, util, processors
import re
class _MSNumeric_pymssql(sqltypes.Numeric):
def result_processor(self, dialect, type_):
if not self.asdecimal:
return processors.to_float
else:
return sqltypes.Numeric.result_processor(self, dialect, type_)
class MSDialect_pymssql(MSDialect):
supports_sane_rowcount = False
driver = 'pymssql'
colspecs = util.update_copy(
MSDialect.colspecs,
{
sqltypes.Numeric: _MSNumeric_pymssql,
sqltypes.Float: sqltypes.Float,
}
)
@classmethod
def dbapi(cls):
module = __import__('pymssql')
# pymmsql doesn't have a Binary method. we use string
# TODO: monkeypatching here is less than ideal
module.Binary = lambda x: x if hasattr(x, 'decode') else str(x)
client_ver = tuple(int(x) for x in module.__version__.split("."))
if client_ver < (1, ):
util.warn("The pymssql dialect expects at least "
"the 1.0 series of the pymssql DBAPI.")
return module
def __init__(self, **params):
super(MSDialect_pymssql, self).__init__(**params)
self.use_scope_identity = True
def _get_server_version_info(self, connection):
vers = connection.scalar("select @@version")
m = re.match(
r"Microsoft SQL Server.*? - (\d+).(\d+).(\d+).(\d+)", vers)
if m:
return tuple(int(x) for x in m.group(1, 2, 3, 4))
else:
return None
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
opts.update(url.query)
port = opts.pop('port', None)
if port and 'host' in opts:
opts['host'] = "%s:%s" % (opts['host'], port)
return [[], opts]
def is_disconnect(self, e, connection, cursor):
for msg in (
"Adaptive Server connection timed out",
"Net-Lib error during Connection reset by peer",
"message 20003", # connection timeout
"Error 10054",
"Not connected to any MS SQL server",
"Connection is closed"
):
if msg in str(e):
return True
else:
return False
dialect = MSDialect_pymssql
|
Python
| 0
|
@@ -413,23 +413,11 @@
sql.
-sourceforge.net
+org
/%0A%0AL
|
da1fe9244c697b69ecc3ebf9e203edf4322f00be
|
make cholesky_grad a primitive
|
autograd/numpy/linalg.py
|
autograd/numpy/linalg.py
|
from __future__ import absolute_import
from functools import partial
import numpy.linalg as npla
from .numpy_wrapper import wrap_namespace, dot
from . import numpy_wrapper as anp
wrap_namespace(npla.__dict__, globals())
def atleast_2d_col(x):
# Promotes a 1D array into a column rather than a row.
return x if x.ndim > 1 else x[:,None]
# Some formulas are from
# "An extended collection of matrix derivative results
# for forward and reverse mode algorithmic differentiation"
# by Mike Giles
# https://people.maths.ox.ac.uk/gilesm/files/NA-08-01.pdf
inv.defgrad( lambda ans, x : lambda g : -dot(dot(ans.T, g), ans.T))
det.defgrad( lambda ans, x : lambda g : g * ans * inv(x).T)
slogdet.defgrad(lambda ans, x : lambda g : g[1] * inv(x).T)
solve.defgrad( lambda ans, a, b : lambda g : -dot(atleast_2d_col(solve(a.T, g)),
atleast_2d_col(ans).T))
solve.defgrad(lambda ans, a, b : lambda g : solve(a.T, g), argnum=1)
def make_grad_norm(ans, x, ord=None, axis=None):
def check_implemented():
matrix_norm = (x.ndim==2 and axis is None) or isinstance(axis, tuple)
frobenius_norm = ord is None or ord == 'fro'
diffable_pnorm = ord is None or ord > 1
if matrix_norm and not frobenius_norm:
raise NotImplementedError(
'Gradient of matrix norm not implemented for ord={}'.format(ord))
if not diffable_pnorm:
raise NotImplementedError(
'Gradient of norm not implemented for ord={}'.format(ord))
expand = lambda a: a if axis is None else anp.expand_dims(a, axis=axis)
def norm_grad(g):
check_implemented()
if ord is None or ord == 2 or ord is 'fro':
return expand(g / ans) * x
else:
# see https://en.wikipedia.org/wiki/Norm_(mathematics)#p-norm
return expand(g / ans**(ord-1)) * x * anp.abs(x)**(ord-2)
return norm_grad
norm.defgrad(make_grad_norm)
def make_grad_eigh(ans, x, UPLO='L'):
"""Gradient for eigenvalues and vectors of a symmetric matrix."""
N = x.shape[0]
w, v = ans # Eigenvalues, eigenvectors.
def eigh_grad(g):
wg, vg = g # Gradient w.r.t. eigenvalues, eigenvectors.
w_repeated = anp.repeat(w[:, anp.newaxis], N, 1)
off_diag = anp.ones((N, N)) - anp.eye(N)
F = off_diag / (w_repeated.T - w_repeated + anp.eye(N))
dx = dot(v * wg + dot(v, F * dot(v.T, vg)), v.T)
if UPLO == 'U': # Reflect to account for symmetry.
return anp.triu(dx) + anp.tril(dx, -1).T
else:
return anp.tril(dx) + anp.triu(dx, 1).T
return eigh_grad
eigh.defgrad(make_grad_eigh)
def make_grad_cholesky(L, A):
# based on choleskies_cython.pyx in SheffieldML/GPy and (Smith 1995)
# TODO for higher-order differentiation, replace dsymv, get rid of inplace
# ops, make cholesky grad primitive and defgrad? also ArrayNode assignment
from scipy.linalg.blas import dsymv
N = L.shape[0]
def cholesky_grad_python(g):
dL = anp.tril(g)
dL[-1,-1] /= 2 * L[-1,-1]
for k in xrange(N-2, -1, -1):
dL[k+1:,k] -= dsymv(1., dL[k+1:,k+1:], L[k+1:,k], lower=True)
dL[k+1:,k] -= anp.diag(dL[k+1:,k+1:]) * L[k+1:,k]
dL[k+1:,k] /= L[k,k]
dL[k,k] -= anp.dot(dL[k+1:,k], L[k+1:,k])
dL[k,k] /= 2 * L[k,k]
return dL
try:
from .linalg_extra import cholesky_grad as cython_cholesky_grad
cholesky_grad = partial(cython_cholesky_grad, L.value)
except ImportError:
cholesky_grad = cholesky_grad_python
return cholesky_grad
cholesky.defgrad(make_grad_cholesky)
|
Python
| 0.999391
|
@@ -61,16 +61,23 @@
partial
+, wraps
%0Aimport
@@ -178,16 +178,45 @@
r as anp
+%0Afrom ..core import primitive
%0A%0Awrap_n
@@ -3589,16 +3589,44 @@
_grad =
+wraps(cython_cholesky_grad)(
partial(
@@ -3655,16 +3655,17 @@
L.value)
+)
%0A exc
@@ -3738,16 +3738,26 @@
return
+primitive(
cholesky
@@ -3761,16 +3761,17 @@
sky_grad
+)
%0Acholesk
|
c824120ea5a33d3ee4cebc61b5bdf6b8258cf11f
|
remove set_printoptions call from debugging
|
autograd/scipy/linalg.py
|
autograd/scipy/linalg.py
|
from __future__ import division
import scipy.linalg
import autograd.numpy as anp
from autograd.numpy.numpy_wrapper import wrap_namespace
from autograd.numpy.linalg import atleast_2d_col as al2d
anp.set_printoptions(precision=3)
wrap_namespace(scipy.linalg.__dict__, globals()) # populates module namespace
def _flip(a, trans):
if anp.iscomplexobj(a):
return 'H' if trans in ('N', 0) else 'N'
else:
return 'T' if trans in ('N', 0) else 'N'
def make_grad_solve_triangular(ans, a, b, trans=0, lower=False, **kwargs):
tri = anp.tril if (lower ^ (_flip(a, trans) == 'N')) else anp.triu
transpose = lambda x: x if _flip(a, trans) != 'N' else x.T
def solve_triangular_grad(g):
v = al2d(solve_triangular(a, g, trans=_flip(a, trans), lower=lower))
return -transpose(tri(anp.dot(v, al2d(ans).T)))
return solve_triangular_grad
solve_triangular.defgrad(make_grad_solve_triangular)
solve_triangular.defgrad(lambda ans, a, b, trans=0, lower=False, **kwargs: lambda g:
solve_triangular(a, g, trans=_flip(a, trans), lower=lower), argnum=1)
def make_grad_sqrtm(ans, A, **kwargs):
def sqrtm_grad(g):
return solve_lyapunov(ans, g)
return sqrtm_grad
sqrtm.defgrad(make_grad_sqrtm)
|
Python
| 0.000002
|
@@ -193,43 +193,8 @@
2d%0A%0A
-anp.set_printoptions(precision=3)%0A%0A
wrap
|
3fb35585e498ba9b8a262e76101e3842bab3acf2
|
Version added to 'precise_bbcode' package
|
precise_bbcode/__init__.py
|
precise_bbcode/__init__.py
|
# -*- coding: utf-8 -*-
# Standard library imports
# Third party imports
# Local application / specific library imports
from precise_bbcode.bbcode import get_parser
default_app_config = 'precise_bbcode.apps.PreciseBbCodeAppConfig'
def render_bbcodes(text):
"""
Given an input text, calls the BBCode parser to get the corresponding HTML output.
"""
parser = get_parser()
return parser.render(text)
|
Python
| 0
|
@@ -415,8 +415,160 @@
r(text)%0A
+%0A%0Apkg_resources = __import__('pkg_resources')%0Adistribution = pkg_resources.get_distribution('django-precise-bbcode')%0A__version__ = distribution.version%0A
|
5dd8c7d2f14e6323655ca9eb879597ab8b2b0ec4
|
Fix battery voltage calculation
|
gate_app.py
|
gate_app.py
|
from utime import sleep_ms, sleep
import webrepl
from mqtt import MQTTClient
from machine import Pin, ADC, PWM
import secrets
# Pin constants
LED1 = 16 # GPIO16, D0, Nodemcu led
LED2 = 2 # GPIO2, D4, ESP8266 led
SWITCH = 5 # GPIO5, D1
BATTERY = 0 # ADC0, A0
BUZZER = 14 # GPIO14, D5
# Resistors in voltage divider (ohms)
R1 = 9970
R2 = 994
RESISTOR_RATIO = (R1 + R2) / R2
# ADC Reference voltage in Millivolts
ADC_REF = 3292 # Measured between 3.3V and GND pins
ADC_READS = 30
GATE_STATUS_TOPIC = b"back-gate/status"
GATE_UPDATE_TOPIC = b"back-gate/update"
PAYLOAD_FORMAT = "field1=1&field2={0:.2f}\n"
on_for_update = False
def device_control(topic, msg):
global on_for_update
on_for_update = True
print((topic, msg))
def run_gate():
global on_for_update
c = MQTTClient("gate_client", secrets.MQTT_BROKER)
c.set_callback(device_control)
c.connect(clean_session=False)
c.publish(GATE_STATUS_TOPIC, msg_payload())
c.subscribe(GATE_UPDATE_TOPIC, qos=1)
c.check_msg()
c.disconnect()
flash_led(LED1)
if not on_for_update:
switch_off()
webrepl.start()
def gate_alarm(topic, msg):
print((topic, msg))
sound_alarm()
def run_base():
c = MQTTClient("gate_base_client", secrets.MQTT_BROKER)
c.set_callback(gate_alarm)
c.connect(clean_session=False)
c.subscribe(GATE_STATUS_TOPIC)
while True:
c.wait_msg()
def msg_payload():
return PAYLOAD_FORMAT.format(battery_voltage())
def battery_voltage():
# ADC read at pin A0
adc = ADC(BATTERY)
sum = 0
for x in range(0, ADC_READS):
sum += adc.read()
return ADC_REF * RESISTOR_RATIO * (sum / ADC_READS) / 1024 / 1000
def switch_off():
# Raise pin high to signal FET switch to turn off
flash_led(LED2)
pin = Pin(SWITCH, Pin.OUT)
pin.on()
def sound_alarm():
pwm = PWM(Pin(BUZZER), freq=500, duty=512)
sleep(5)
pwm.deinit()
def flash_led(pin, count=1):
pin = Pin(pin, Pin.OUT)
pin.on()
for x in range(0, count * 2):
pin.value(not pin.value())
sleep_ms(100)
|
Python
| 0.000754
|
@@ -327,16 +327,140 @@
(ohms)%0A
+# NodeMcu internal resister divider (from schematic)%0ANODEMCU_RESISTOR_RATIO = (220 + 100) / 100%0A# External resister divider%0A
R1 = 997
@@ -468,17 +468,18 @@
%0AR2 = 99
-4
+90
%0ARESISTO
@@ -556,50 +556,119 @@
F =
-3292 # Measured between 3.3V and GND pins
+1000%0A# Average value from 100 reads when A0 is grounded%0AADC_OFFSET = 3%0A# Number of ADC reads to take average of
%0AADC
@@ -1849,16 +1849,24 @@
C_REF *
+NODEMCU_
RESISTOR
@@ -1873,16 +1873,43 @@
_RATIO *
+ RESISTOR_RATIO * %5C%0A
(sum /
@@ -1917,16 +1917,29 @@
DC_READS
+ - ADC_OFFSET
) / 1024
|
0a89c9e32e625e53cbe5ea151aff42031fb833a5
|
Add canonical link
|
frappe/website/page_controllers/base_template_page.py
|
frappe/website/page_controllers/base_template_page.py
|
import frappe
from frappe.website.doctype.website_settings.website_settings import get_website_settings
from frappe.website.page_controllers.web_page import WebPage
from frappe.website.website_components.metatags import MetaTags
class BaseTemplatePage(WebPage):
def init_context(self):
self.context = frappe._dict()
self.context.update(get_website_settings())
self.context.update(frappe.local.conf.get("website_context") or {})
def add_csrf_token(self, html):
if frappe.local.session:
csrf_token = frappe.local.session.data.csrf_token
return html.replace("<!-- csrf_token -->",
f'<script>frappe.csrf_token = "{csrf_token}";</script>')
return html
def post_process_context(self):
self.tags = MetaTags(self.path, self.context).tags
self.context.metatags = self.tags
self.set_base_template_if_missing()
self.set_title_with_prefix()
self.update_website_context()
# set using frappe.respond_as_web_page
if hasattr(frappe.local, 'response') and frappe.local.response.get('context'):
self.context.update(frappe.local.response.context)
# to be able to inspect the context dict
# Use the macro "inspect" from macros.html
self.context._context_dict = self.context
# context sends us a new template path
if self.context.template:
self.template_path = self.context.template
def set_base_template_if_missing(self):
if not self.context.base_template_path:
app_base = frappe.get_hooks("base_template")
self.context.base_template_path = app_base[-1] if app_base else "templates/base.html"
def set_title_with_prefix(self):
if (self.context.title_prefix and self.context.title
and not self.context.title.startswith(self.context.title_prefix)):
self.context.title = '{0} - {1}'.format(self.context.title_prefix, self.context.title)
def update_website_context(self):
# apply context from hooks
update_website_context = frappe.get_hooks('update_website_context')
for method in update_website_context:
values = frappe.get_attr(method)(self.context)
if values:
self.context.update(values)
|
Python
| 0
|
@@ -1199,16 +1199,101 @@
.context
+%0A%09%09self.context.canonical = frappe.utils.get_url(frappe.utils.escape_html(self.path))
%0A%0A%09%09# co
|
8780619b94bf58a21eeefbd0c4e867bf0b4eb4a8
|
Remove obselete params
|
avogadro/model_params.py
|
avogadro/model_params.py
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import copy
MODEL_PARAMS = {
"aggregationInfo": {
"seconds": 0,
"fields": [],
"months": 0,
"days": 0,
"years": 0,
"hours": 0,
"microseconds": 0,
"weeks": 0,
"minutes": 0,
"milliseconds": 0
},
"model": "CLA",
"version": 1,
"predictAheadTime": None,
"modelParams": {
"sensorParams": {
"verbosity": 0,
"encoders": {
"timestamp_dayOfWeek": None,
"timestamp_timeOfDay": {
"type": "DateEncoder",
"timeOfDay": [21, 5.4864773611134598],
"fieldname": "timestamp",
"name": "timestamp"
},
"timestamp_weekend": None
},
"sensorAutoReset": None
},
"spParams": {
"spatialImp": "cpp",
"columnCount": 2048,
"synPermInactiveDec": 0.00065,
"randomSP": 0,
"inputWidth": 0,
"spVerbosity": 0,
"synPermActiveInc": 0.001,
"synPermConnected": 0.1,
"numActiveColumnsPerInhArea": 40,
"seed": 1956,
"potentialPct": 0.8,
"globalInhibition": 1,
"useHighTier": 0,
"maxBoost": 1.0
},
"trainSPNetOnlyIfRequested": False,
"clParams": {
"alpha": 0.0068717199878650798,
"regionName": "CLAClassifierRegion",
"steps": "1",
"clVerbosity": 0
},
"tpParams": {
"columnCount": 2048,
"activationThreshold": 13,
"pamLength": 1,
"cellsPerColumn": 32,
"permanenceInc": 0.1,
"minThreshold": 11,
"verbosity": 0,
"maxSynapsesPerSegment": 32,
"outputType": "normal",
"globalDecay": 0.0,
"initialPerm": 0.21,
"permanenceDec": 0.1,
"seed": 1960,
"maxAge": 0,
"newSynapseCount": 20,
"maxSegmentsPerCell": 128,
"temporalImp": "cpp",
"inputWidth": 2048
},
"anomalyParams": {
"anomalyCacheRecords": None,
"autoDetectThreshold": None,
"autoDetectWaitRecords": 5030
},
"spEnable": True,
"inferenceType": "TemporalAnomaly",
"tpEnable": True,
"clEnable": False
}
}
def getModelParams(encoderParams, predictedField):
"""
Creates a model params dict that includes the encoder params for the
specific model.
:param encoderParams: A dict containing the encoder parameters for the
specified predicted field. For example:
{
u"CPUPercent": {
u"name": u"CPUPercent",
"fieldname": u"CPUPercent",
"resolution": 0.3521126761,
"seed": 42,
"type": "RandomDistributedScalarEncoder"
}
}
NOTE: The fieldname, name and parent value must all be the same (e.g.,
CPUPercent)
:param predictedField: A `string` representing the name of the
predictedField. This should match exactly the `fieldname` in the encoder
params
:returns: A `dict` with all of the relevant model parameters
:rtype: dict
"""
thisModel = copy.deepcopy(MODEL_PARAMS)
thisModel["modelParams"]["sensorParams"]["encoders"][predictedField] = (
encoderParams[predictedField])
return thisModel
|
Python
| 0.000002
|
@@ -1778,29 +1778,8 @@
65,%0A
- %22randomSP%22: 0,%0A
@@ -2005,32 +2005,8 @@
1,%0A
- %22useHighTier%22: 0,%0A
|
6a7bc9e7dacd30b27b48d37763c47b2419aca2a9
|
Change the imports to be Python3 compatible
|
pyipinfodb/pyipinfodb.py
|
pyipinfodb/pyipinfodb.py
|
#!/usr/bin/env python
"""
Simple python wrapper around the IPInfoDB API.
"""
import json
from urllib import urlencode
import urllib2
import socket
class IPInfo() :
def __init__(self, apikey):
self.apikey = apikey
def get_ip_info(self, baseurl, ip=None):
"""
Same as get_city and get_country, but a baseurl is required.
This is for if you want to use a different server that uses
the php scripts on ipinfodb.com.
"""
passdict = {'format': 'json', 'key': self.apikey}
if ip:
try:
# allows user to enter in domain instead of ip
passdict['ip'] = socket.gethostbyaddr(ip)[2][0]
except socket.herror:
# if domain is not found, just use input
passdict['ip'] = ip
url = baseurl + "?" + urlencode(passdict)
urlobj = urllib2.urlopen(url)
data = urlobj.read()
urlobj.close()
datadict = json.loads(data)
return datadict
def get_country(self, ip=None):
"""
Gets the location with the context of the country of the given IP.
If no IP is given, then the location of the client is given.
The timezone option defaults to False, to spare the server some queries.
"""
baseurl = 'http://api.ipinfodb.com/v3/ip-country/'
return self.get_ip_info(baseurl, ip)
def get_city(self, ip=None):
"""
Gets the location with the context of the city of the given IP.
If no IP is given, then the location of the client is given.
The timezone option defaults to False, to spare the server some queries.
"""
baseurl = 'http://api.ipinfodb.com/v3/ip-city/'
return self.get_ip_info(baseurl, ip)
|
Python
| 0.99999
|
@@ -88,16 +88,25 @@
rt json%0A
+try:%0A
from url
@@ -126,22 +126,147 @@
lencode%0A
-import
+except ImportError:%0A from urllib.parse import urlencode%0Atry:%0A import urllib2%0Aexcept ImportError:%0A import urllib.request as
urllib2
|
305e54c328cf212e01a3af7cec7b940894044e55
|
Use float, not int for random WPM
|
gen_test.py
|
gen_test.py
|
import math
import numpy
import random
from demodulate.cfg import *
def gen_test_data():
pattern = [1,0,1,1,1,0,0,0,0,0,0,0] # morse code 'A'
cycles_per_sample = MORSE_FREQ/SAMPLE_FREQ
radians_per_sample = cycles_per_sample * 2 * math.pi
WPM = random.randint(2,20)
elements_per_second = WPM * 50.0 / 60.0
samples_per_element = int(SAMPLE_FREQ/elements_per_second)
length = samples_per_element * len(pattern)
# Empty returns array containing random stuff, so we NEED to overwrite it
data = numpy.empty(length, dtype=numpy.float32)
for i in xrange(length):
keyed = pattern[int(i/samples_per_element)]
#keyed = 1
data[i] = 0 if not keyed else (radians_per_sample * i)
data = numpy.sin(data)
return data
|
Python
| 0.000002
|
@@ -253,15 +253,15 @@
dom.
-randint
+uniform
(2,2
|
dfb1784009549829a9a9bb1b72be51dacd44ec99
|
Update auth.py
|
azurecloudify/auth.py
|
azurecloudify/auth.py
|
import requests
import json
import urllib2
from cloudify import ctx
import constants
def get_token_from_client_credentials():
client_id = ctx.node.properties['client_id']
client_secret = ctx.node.properties['password']
tenant_id = ctx.node.properties['tenant_id']
endpoints = 'https://login.microsoftonline.com/'+tenant_id+'/oauth2/token'
payload = {
'grant_type': 'client_credentials',
'client_id': client_id,
'client_secret': client_secret,
'resource': constants.resource,
}
response = requests.post(endpoints, data=payload).json()
return response['u'access_token'']
"""
def _generate_credentials(**_):
client_id=ctx.node.properties['client_id']
tenant_id=ctx.node.properties['tenant_id']
username=ctx.node.properties['username']
password=ctx.node.properties['password']
url='https://login.microsoftonline.com/'+tenant_id+'/oauth2/token'
headers ={"Content-Type":"application/x-www-form-urlencoded"}
body = "grant_type=password&username="+username+"&password="+password+"&client_id="+client_id+"&resource=https://management.core.windows.net/"
req = Request(method="POST",url=url,data=body)
req_prepped = req.prepare()
s = Session()
res = Response()
res = s.send(req_prepped)
s=res.content
end_of_leader = s.index('access_token":"') + len('access_token":"')
start_of_trailer = s.index('"', end_of_leader)
token=s[end_of_leader:start_of_trailer]
credentials = "Bearer " + token
head = {"Content-Type": "application/json", "Authorization": credentials}
return head
"""
|
Python
| 0.000001
|
@@ -615,11 +615,9 @@
nse%5B
-'u'
+%22
acce
@@ -628,10 +628,11 @@
oken
-''
+ :%22
%5D%0A%0A%0A
|
2882da81b17c61e6421b96d612f31b3cfd6a88bb
|
Update vnet.py
|
azurecloudify/vnet.py
|
azurecloudify/vnet.py
|
########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# Built-in Imports
import requests
from requests import Request,Session,Response
import json
import constants
import sys
import os
from cloudify.exceptions import NonRecoverableError
from cloudify import ctx
from cloudify.decorators import operation
@operation
def creation_validation(**_):
for property_key in constants.VNET_REQUIRED_PROPERTIES:
_validate_node_property(property_key, ctx.node.properties)
@operation
#vnet:
def create_vnet(**_):
for property_key in constants.VNET_REQUIRED_PROPERTIES:
_validate_node_properties(property_key, ctx.node.properties)
vm_name=ctx.node.properties['vm_name']
resource_group_name = vm_name+'_resource_group'
vnet_name = vm_name+'_vnet'
location = ctx.node.properties['location']
subscription_id = ctx.node.properties['subscription_id']
vnet_url = constants.azure_url+'/subscriptions/'+subscription_id+'/resourceGroups/'+resource_group_name+'/providers/microsoft.network/virtualNetworks/'+vnet_name+'?api-version='+constants.api_version
ctx.logger.info("Checking availability of virtual network: " + vnet_name)
if 1:
try:
ctx.logger.info("Creating new virtual network: " + vnet_name)
vnet_params=json.dumps({"name":vnet_name, "location": location,"properties": {"addressSpace": {"addressPrefixes": constants.vnet_address_prefixes},"subnets": [{"name": constants.subnet_name, "properties": {"addressPrefix": constants.address_prefix}}]}})
response_vnet = requests.put(url=vnet_url, data=vnet_params, headers=constants.headers)
print response_vnet.text
except:
ctx.logger.info("Virtual Network " + vnet_name + "could not be created.")
sys.exit(1)
else:
ctx.logger.info("Virtual Network" + vnet_name + "has already been provisioned by another user.")
@operation
def delete_vnet(**_):
vm_name=ctx.node.properties['vm_name']
vnet_name = vm_name+'_vnet'
resource_group_name = vm_name+'_resource_group'
subscription_id = ctx.node.properties['subscription_id']
ctx.logger.info("Checking availability of virtual network: " + vnet_name)
if 1:
try:
ctx.logger.info("Deleting the virtual network: " + vnet_name)
vnet_url = 'https://management.azure.com/subscriptions/'+subscription_id+'/resourceGroups/'+resource_group_name+'/providers/microsoft.network/virtualNetworks/'+vnet_name+'?api-version='+constants.api_version
response_vnet = requests.delete(url=vnet_url,headers=constants.headers)
print response_vnet.text
except:
ctx.logger.info("Virtual Network " + vnet_name + " could not be deleted.")
sys.exit(1)
else:
ctx.logger.info("Virtual Network " + vnet_name + " does not exist.")
"""
def _generate_credentials(**_):
client_id=ctx.node.properties['client_id']
tenant_id=ctx.node.properties['tenant_id']
username=ctx.node.properties['username']
password=ctx.node.properties['password']
url='https://login.microsoftonline.com/'+tenant_id+'/oauth2/token'
headers ={"Content-Type":"application/x-www-form-urlencoded"}
body = "grant_type=password&username="+username+"&password="+password+"&client_id="+client_id+"&resource=https://management.core.windows.net/"
req = Request(method="POST",url=url,data=body)
req_prepped = req.prepare()
s = Session()
res = Response()
res = s.send(req_prepped)
s=res.content
end_of_leader = s.index('access_token":"') + len('access_token":"')
start_of_trailer = s.index('"', end_of_leader)
token=s[end_of_leader:start_of_trailer]
credentials = "Bearer " + token
head = {"Content-Type": "application/json", "Authorization": credentials}
return head
"""
def _validate_node_properties(key, ctx_node_properties):
if key not in ctx_node_properties:
raise NonRecoverableError('{0} is a required input. Unable to create.'.format(key))
|
Python
| 0.000001
|
@@ -1019,17 +1019,19 @@
_propert
-y
+ies
(propert
|
2fdbf644d0c3daf67d2bc21598df289f3e901374
|
Revert ill-conceived "Allow comma-separated services for convenience"
|
aws_list_all/__main__.py
|
aws_list_all/__main__.py
|
#!/usr/bin/env python
from __future__ import print_function
import os
from resource import getrlimit, setrlimit, RLIMIT_NOFILE
from argparse import ArgumentParser
from sys import exit, stderr
from .introspection import (
get_listing_operations, get_services, get_verbs, introspect_regions_for_service, recreate_caches
)
from .query import do_list_files, do_query
def increase_limit_nofiles():
soft_limit, hard_limit = getrlimit(RLIMIT_NOFILE)
desired_limit = 6000 # This should be comfortably larger than the product of services and regions
if hard_limit < desired_limit:
print("-" * 80, file=stderr)
print(
"WARNING!\n"
"Your system limits the number of open files and network connections to {}.\n"
"This may lead to failures during querying.\n"
"Please increase the hard limit of open files to at least {}.\n"
"The configuration for hard limits is often found in /etc/security/limits.conf".format(
hard_limit, desired_limit
),
file=stderr
)
print("-" * 80, file=stderr)
print(file=stderr)
target_soft_limit = min(desired_limit, hard_limit)
if target_soft_limit > soft_limit:
print("Increasing the open connection limit \"nofile\" from {} to {}.".format(soft_limit, target_soft_limit))
setrlimit(RLIMIT_NOFILE, (target_soft_limit, hard_limit))
print("")
def main():
"""Parse CLI arguments to either list services, operations, queries or existing json files"""
parser = ArgumentParser(
prog='aws_list_all',
description=(
'List AWS resources on one account across regions and services. '
'Saves result into json files, which can then be passed to this tool again '
'to list the contents.'
)
)
subparsers = parser.add_subparsers(
description='List of subcommands. Use <subcommand> --help for more parameters',
dest='command',
metavar='COMMAND'
)
# Query is the main subcommand, so we put it first
query = subparsers.add_parser('query', description='Query AWS for resources', help='Query AWS for resources')
query.add_argument(
'-s',
'--service',
action='append',
help='Restrict querying to the given service (can be specified multiple times)'
)
query.add_argument(
'-r',
'--region',
action='append',
help='Restrict querying to the given region (can be specified multiple times)'
)
query.add_argument(
'-o',
'--operation',
action='append',
help='Restrict querying to the given operation (can be specified multiple times)'
)
query.add_argument('-p', '--parallel', default=32, type=int, help='Number of request to do in parallel')
query.add_argument('-d', '--directory', default='.', help='Directory to save result listings to')
query.add_argument('-v', '--verbose', action='count', help='Print detailed info during run')
query.add_argument('-c', '--profile', help='Use a specific .aws/credentials profile.')
# Once you have queried, show is the next most important command. So it comes second
show = subparsers.add_parser(
'show', description='Show a summary or details of a saved listing', help='Display saved listings'
)
show.add_argument('listingfile', nargs='*', help='listing file(s) to load and print')
show.add_argument('-v', '--verbose', action='count', help='print given listing files with detailed info')
# Introspection debugging is not the main function. So we put it all into a subcommand.
introspect = subparsers.add_parser(
'introspect',
description='Print introspection debugging information',
help='Print introspection debugging information'
)
introspecters = introspect.add_subparsers(
description='Pieces of debug information to collect. Use <DETAIL> --help for more parameters',
dest='introspect',
metavar='DETAIL'
)
introspecters.add_parser(
'list-services',
description='Lists short names of AWS services that the current boto3 version has clients for.',
help='List available AWS services'
)
introspecters.add_parser(
'list-service-regions',
description='Lists regions where AWS services are said to be available.',
help='List AWS service regions'
)
ops = introspecters.add_parser(
'list-operations',
description='List all discovered listing operations on all (or specified) services',
help='List discovered listing operations'
)
ops.add_argument(
'-s',
'--service',
action='append',
help='Only list discovered operations of the given service (can be specified multiple times)'
)
introspecters.add_parser('debug', description='Debug information', help='Debug information')
# Finally, refreshing the service/region caches comes last.
caches = subparsers.add_parser(
'recreate-caches',
description=(
'The list of AWS services and endpoints can change over time. '
'This command (re-)creates the caches for this data to allow you to'
'list services in regions where they have not been available previously.'
'The cache lives in your OS-dependent cache directory, e.g. ~/.cache/aws_list_all/'
),
help='Recreate service and region caches'
)
caches.add_argument(
'--update-packaged-values',
action='store_true',
help=(
'Instead of writing to the cache, update files packaged with aws-list-all. '
'Use this only if you run a copy from git.'
)
)
args = parser.parse_args()
services = []
if args.service:
for service in args.service:
services.extend(service.split(","))
if args.command == 'query':
if args.directory:
try:
os.makedirs(args.directory)
except OSError:
pass
os.chdir(args.directory)
increase_limit_nofiles()
do_query(
services or get_services(),
args.region,
args.operation,
verbose=args.verbose or 0,
parallel=args.parallel,
selected_profile=args.profile
)
elif args.command == 'show':
if args.listingfile:
increase_limit_nofiles()
do_list_files(args.listingfile, verbose=args.verbose or 0)
else:
show.print_help()
return 1
elif args.command == 'introspect':
if args.introspect == 'list-services':
for service in get_services():
print(service)
elif args.introspect == 'list-service-regions':
introspect_regions_for_service()
return 0
elif args.introspect == 'list-operations':
for service in args.service or get_services():
for operation in get_listing_operations(service):
print(service, operation)
elif args.introspect == 'debug':
for service in get_services():
for verb in get_verbs(service):
print(service, verb)
else:
introspect.print_help()
return 1
elif args.command == 'recreate-caches':
increase_limit_nofiles()
recreate_caches(args.update_packaged_values)
else:
parser.print_help()
return 1
if __name__ == '__main__':
exit(main())
|
Python
| 0.000014
|
@@ -5820,132 +5820,8 @@
gs()
-%0A services = %5B%5D%0A if args.service:%0A for service in args.service:%0A services.extend(service.split(%22,%22))
%0A%0A
@@ -6049,32 +6049,82 @@
limit_nofiles()%0A
+ services = args.service or get_services()%0A
do_query
@@ -6145,34 +6145,16 @@
services
- or get_services()
,%0A
|
206c513a49b67dfaf21531573626ba139be51b18
|
Update vnet.py
|
azurecloudify/vnet.py
|
azurecloudify/vnet.py
|
########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# Built-in Imports
import requests
from requests import Request,Session,Response
import json
import constants
import sys
import os
from cloudify.exceptions import NonRecoverableError
from azure import WindowsAzureConflictError
from azure import WindowsAzureMissingResourceError
from cloudify import ctx
from cloudify.decorators import operation
@operation
#vnet:
def create_vnet(**_):
for property_key in constants.VNET_REQUIRED_PROPERTIES:
_validate_node_properties(property_key, ctx.node.properties)
vm_name=ctx.node.properties['vm_name']
resource_group_name = vm_name+'_resource_group'
vnet_name = vm_name+'_vnet'
location = ctx.node.properties['location']
subscription_id = ctx.node.properties['subscription_id']
vnet_url = constants.azure_url+'/subscriptions/'+subscription_id+'/resourceGroups/'+resource_group_name+'/providers/microsoft.network/virtualNetworks/'+vnet_name+'?api-version='+constants.api_version
ctx.logger.info("Checking availability of virtual network: " + vnet_name)
if 1:
try:
ctx.logger.info("Creating new virtual network: " + vnet_name)
vnet_params=json.dumps({"name":vnet_name, "location": location,"properties": {"addressSpace": {"addressPrefixes": constants.vnet_address_prefixes},"subnets": [{"name": constants.subnet_name, "properties": {"addressPrefix": constants.address_prefix}}]}})
response_vnet = requests.put(url=vnet_url, data=vnet_params, headers=_generate_credentials())
print response_vnet.text
except WindowsAzureConflictError:
ctx.logger.info("Virtual Network " + vnet_name + "could not be created.")
sys.exit(1)
else:
ctx.logger.info("Virtual Network" + vnet_name + "has already been provisioned by another user.")
@operation
def delete_vnet(**_):
vm_name=ctx.node.properties['vm_name']
vnet_name = vm_name+'_vnet'
resource_group_name = vm_name+'_resource_group'
subscription_id = ctx.node.properties['subscription_id']
ctx.logger.info("Checking availability of virtual network: " + vnet_name)
if 1:
try:
ctx.logger.info("Deleting the virtual network: " + vnet_name)
vnet_url = 'https://management.azure.com/subscriptions/'+subscription_id+'/resourceGroups/'+resource_group_name+'/providers/microsoft.network/virtualNetworks/'+vnet_name+'?api-version='+constants.api_version
response_vnet = requests.delete(url=vnet_url,headers=_generate_credentials())
print response_vnet.text
except WindowsAzureMissingResourceError:
ctx.logger.info("Virtual Network " + vnet_name + " could not be deleted.")
sys.exit(1)
else:
ctx.logger.info("Virtual Network " + vnet_name + " does not exist.")
def _generate_credentials(**_):
client_id=ctx.node.properties['client_id']
tenant_id=ctx.node.properties['tenant_id']
username=ctx.node.properties['username']
password=ctx.node.properties['password']
url='https://login.microsoftonline.com/'+tenant_id+'/oauth2/token'
headers ={"Content-Type":"application/x-www-form-urlencoded"}
body = "grant_type=password&username="+username+"&password="+password+"&client_id="+client_id+"&resource=https://management.core.windows.net/"
req = Request(method="POST",url=url,data=body)
req_prepped = req.prepare()
s = Session()
res = Response()
res = s.send(req_prepped)
s=res.content
end_of_leader = s.index('access_token":"') + len('access_token":"')
start_of_trailer = s.index('"', end_of_leader)
token=s[end_of_leader:start_of_trailer]
credentials = "Bearer " + token
head = {"Content-Type": "application/json", "Authorization": credentials}
return head
def _validate_node_properties(key, ctx_node_properties):
if key not in ctx_node_properties:
raise NonRecoverableError('{0} is a required input. Unable to create.'.format(key))
|
Python
| 0.000001
|
@@ -3440,16 +3440,19 @@
ist.%22)%0A%0A
+%22%22%22
%0Adef _ge
@@ -4411,16 +4411,19 @@
rn head%0A
+%22%22%22
%0A%0A%0Adef _
|
89a18ea91fb2d095541510155dcdf94ad76b8374
|
Fix broken lookdev loader
|
mindbender/maya/loaders/mindbender_look.py
|
mindbender/maya/loaders/mindbender_look.py
|
import json
from mindbender import api
from mindbender.maya import lib, pipeline
from maya import cmds
class LookLoader(api.Loader):
"""Specific loader for lookdev"""
families = ["mindbender.look"]
def process(self, asset, subset, version, representation):
fname = representation["path"].format(
dirname=version["path"].format(root=api.registered_root()),
format=representation["format"]
)
namespace = asset["name"] + "_"
name = lib.unique_name(subset["name"])
with lib.maintained_selection():
nodes = cmds.file(fname,
namespace=namespace,
reference=True,
returnNewNodes=True)
# Containerising
pipeline.containerise(name=name,
namespace=namespace,
nodes=nodes,
version=version)
# Assign shaders
representation = next(
(rep for rep in version["representations"]
if rep["format"] == ".json"), None)
if representation is None:
cmds.warning("Look development asset has no relationship data.")
else:
path = representation["path"].format(
dirname=version["path"],
format=representation["format"]
)
with open(path) as f:
relationships = json.load(f)
lib.apply_shaders(relationships)
return cmds.referenceQuery(nodes[0], referenceNode=True)
|
Python
| 0.000001
|
@@ -201,16 +201,19 @@
der.look
+dev
%22%5D%0A%0A
@@ -1348,16 +1348,51 @@
%5B%22path%22%5D
+.format(root=api.registered_root())
,%0A
|
33fef0560e14f94bab7d74d0c6a62d2016487822
|
Tidy urls.py
|
app/urls.py
|
app/urls.py
|
from django.conf.urls.defaults import *
from django.contrib import admin
from django.contrib.auth.views import login
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf import settings
from utils import installed
from registration.views import register
from sso.forms import RegistrationFormUniqueEmailBlocked
admin.autodiscover()
urlpatterns = patterns('',
('', include('registration.backends.default.urls')),
(r'^register/$', register, {'backend': 'registration.backends.default.DefaultBackend', 'form_class': RegistrationFormUniqueEmailBlocked}),
('', include('sso.urls')),
(r'^eve/', include('eve_api.urls')),
(r'^eveapi/', include('eve_proxy.urls')),
(r'^api/', include('api.urls')),
)
if installed('reddit'):
urlpatterns += patterns('',
('', include('reddit.urls')),
)
if installed('hr'):
urlpatterns += patterns('',
(r'^hr/', include('hr.urls')),
)
if installed('groups'):
urlpatterns += patterns('',
(r'^groups/', include('groups.urls')),
)
if installed('sentry'):
urlpatterns += patterns('',
(r'^sentry/', include('sentry.web.urls')),
)
if installed('nexus'):
import nexus
nexus.autodiscover()
urlpatterns += patterns('',
(r'^nexus/', include(nexus.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
|
Python
| 0.000002
|
@@ -1348,16 +1348,17 @@
BUG:%0A
+
urlpatte
|
d47b3722480675c971cdc0a8af4e923703fd261d
|
Add the not-authorized route
|
backdrop/admin/app.py
|
backdrop/admin/app.py
|
from os import getenv
from functools import wraps
from flask import Flask, jsonify, url_for, request, \
session, render_template, flash, redirect
from .. import statsd
from ..core import cache_control, log_handler, database
from ..core.bucket import Bucket
from ..core.errors import ParseError, ValidationError
from ..core.log_handler \
import create_request_logger, create_response_logger
from ..core.repository \
import BucketConfigRepository, UserConfigRepository
from ..core.flaskutils import BucketConverter
from ..core.upload import create_parser
from ..write.signonotron2 import Signonotron2
from ..write.uploaded_file import UploadedFile, FileUploadError
GOVUK_ENV = getenv("GOVUK_ENV", "development")
app = Flask("backdrop.admin.app", static_url_path="/static")
# Configuration
app.config.from_object(
"backdrop.admin.config.{}".format(GOVUK_ENV))
log_handler.set_up_logging(app, GOVUK_ENV)
app.url_map.converters["bucket"] = BucketConverter
db = database.Database(
app.config['MONGO_HOST'],
app.config['MONGO_PORT'],
app.config['DATABASE_NAME']
)
bucket_repository = BucketConfigRepository(db)
user_repository = UserConfigRepository(db)
# TODO: move this out into a helper
def protected(f):
@wraps(f)
def verify_user_logged_in(*args, **kwargs):
if not "user" in session:
return redirect(
url_for('oauth_sign_in'))
return f(*args, **kwargs)
return verify_user_logged_in
@app.errorhandler(500)
@app.errorhandler(405)
@app.errorhandler(404)
def exception_handler(e):
app.logger.exception(e)
bucket_name = getattr(e, 'bucket_name', request.path)
statsd.incr("write.error", bucket=bucket_name)
code = getattr(e, 'code', 500)
name = getattr(e, 'name', 'Internal Error')
return render_template("error.html", name=name, bucket_name=bucket_name), \
code
@app.before_first_request
def setup_oauth_service():
app.oauth_service = Signonotron2(
client_id=app.config['OAUTH_CLIENT_ID'],
client_secret=app.config['OAUTH_CLIENT_SECRET'],
base_url=app.config['OAUTH_BASE_URL'],
redirect_url=app.config['BACKDROP_ADMIN_UI_HOST']
+ url_for("oauth_authorized")
)
@app.after_request
def prevent_clickjacking(response):
response.headers["X-Frame-Options"] = "SAMEORIGIN"
return response
@app.route('/', methods=['GET'])
@cache_control.set("private, must-revalidate")
def index():
"""
This representation is private to the logged-in user
(with their own buckets)
"""
user_email = session.get('user', {}).get('email')
if user_email:
user_config = user_repository.retrieve(user_email)
else:
user_config = None
return render_template("index.html", user_config=user_config)
@app.route('/_status', methods=['GET'])
@cache_control.nocache
def health_check():
return jsonify(status='ok', message='all ok')
def _create_session_user(name, email):
session.update(
{"user": {
"name": name,
"email": email
}})
if app.config.get('ALLOW_TEST_SIGNIN', True):
@app.route('/sign-in/test', methods=['GET'])
def oauth_test_signin():
_create_session_user(request.args.get('user'),
request.args.get('email'))
return "logged in as %s" % session.get('user'), 200
@app.route('/authorized', methods=['GET'])
@cache_control.nocache
def oauth_authorized():
"""
The result of this is a redirect, which shouldn't be cached in
case their permissions get changed, etc.
"""
auth_code = request.args.get('code')
if not auth_code:
abort(400)
access_token = app.oauth_service.exchange(auth_code)
user_details, can_see_backdrop = \
app.oauth_service.user_details(access_token)
if can_see_backdrop is None:
flash("Could not authenticate with single sign on.",
category="error")
return redirect(url_for("not_authorized"))
if can_see_backdrop is False:
flash("You are signed in to your GOV.UK account, "
"but you don't have permissions to use this application.")
return redirect(url_for("not_authorized"))
_create_session_user(user_details["user"]["name"],
user_details["user"]["email"])
flash("You were successfully signed in", category="success")
return redirect(url_for("user_route"))
@app.route("/sign-in")
@cache_control.nocache
def oauth_sign_in():
"""
This returns a redirect to the OAuth provider, so we shouldn't
allow this response to be cached.
"""
return redirect(app.oauth_service.authorize())
@app.route("/sign-out")
@cache_control.set("private, must-revalidate")
def oauth_sign_out():
session.clear()
flash("You have been signed out of Backdrop", category="success")
return render_template("signon/signout.html",
oauth_base_url=app.config['OAUTH_BASE_URL'])
@app.route('/<bucket:bucket_name>/upload', methods=['GET', 'POST'])
@protected
@cache_control.set("private, must-revalidate")
def upload(bucket_name):
bucket_config = bucket_repository.retrieve(bucket_name)
user_config = user_repository.retrieve(
session.get("user").get("email"))
if bucket_name not in user_config.buckets:
return abort(404)
if request.method == 'GET':
return render_template(
"upload_{}.html".format(bucket_config.upload_format),
bucket_name=bucket_name)
return _store_data(bucket_config)
def _store_data(bucket_config):
parse_file = create_parser(bucket_config)
bucket = Bucket(db, bucket_config)
expected_errors = (FileUploadError, ParseError, ValidationError)
try:
with UploadedFile(request.files['file']) as uploaded_file:
raw_data = parse_file(uploaded_file.file_stream())
bucket.parse_and_store(raw_data)
except expected_errors as e:
app.logger.error('Upload error: {}'.format(e.message))
return render_template('upload_error.html',
message=e.message,
bucket_name=bucket.name), 400
return render_template('upload_ok.html')
def start(port):
app.debug = True
app.run('0.0.0.0', port=port)
|
Python
| 0
|
@@ -4433,24 +4433,158 @@
_route%22))%0A%0A%0A
+@app.route(%22/not-authorized%22)%0A@cache_control.nocache%0Adef not_authorized():%0A return render_template(%22signon/not_authorized.html%22)%0A%0A%0A
@app.route(%22
|
8a9422f7c323394af04f90a43a078098197076b9
|
fix small bug in dynamic urls.py
|
app/urls.py
|
app/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^about', views.about, name='about'),
url(r'^test', views.test, name='test'),
url(r'^champions/$', views.champions),
url(r'^champions/.*', views.champion),
url(r'^champions/*', views.champions),
url(r'^items/$', views.items),
url(r'^items/.*', views.item),
url(r'^items/*', views.items),
url(r'^players/$', views.players),
url(r'^players/.*', views.balls),
url(r'^players/*', views.players),
url(r'^.*/$', views.index)
]
|
Python
| 0.000001
|
@@ -516,13 +516,14 @@
ews.
-balls
+player
),%0A
|
2a1b5dbbd3e0c78df76d904602f1c4fcc6157a6b
|
Clean up imports
|
mbus/MBusHandle.py
|
mbus/MBusHandle.py
|
from ctypes import Structure, c_uint32, c_uint8, c_void_p, c_int, c_byte
class MBusHandle(Structure):
_fields_ = [("fd", c_int),
("max_data_retry", c_int),
("max_search_retry", c_int),
("purge_first_frame", c_byte),
("is_serial", c_uint8),
("internal", c_void_p * 10)] # pointers
def __str__(self):
return "MBusHandle: XXX"
|
Python
| 0
|
@@ -26,18 +26,8 @@
ure,
- c_uint32,
c_u
|
f1e84cbc99ff77da88a78ead82d5ac0ad1cf603d
|
Implement the "create collection" endpoint
|
backdrop/write/api.py
|
backdrop/write/api.py
|
from os import getenv
from flask import Flask, request, jsonify, g
from flask_featureflags import FeatureFlag
from backdrop import statsd
from backdrop.core.bucket import Bucket
from backdrop.core.flaskutils import BucketConverter
from backdrop.core.repository import (BucketConfigRepository,
UserConfigRepository)
from ..core.errors import ParseError, ValidationError
from ..core import database, log_handler, cache_control
from .validation import bearer_token_is_valid
GOVUK_ENV = getenv("GOVUK_ENV", "development")
app = Flask("backdrop.write.api")
feature_flags = FeatureFlag(app)
# Configuration
app.config.from_object(
"backdrop.write.config.{}".format(GOVUK_ENV))
db = database.Database(
app.config['MONGO_HOSTS'],
app.config['MONGO_PORT'],
app.config['DATABASE_NAME']
)
bucket_repository = BucketConfigRepository(db)
user_repository = UserConfigRepository(db)
log_handler.set_up_logging(app, GOVUK_ENV)
app.url_map.converters["bucket"] = BucketConverter
@app.errorhandler(500)
@app.errorhandler(405)
@app.errorhandler(404)
def exception_handler(e):
app.logger.exception(e)
bucket_name = getattr(g, 'bucket_name', request.path)
statsd.incr("write.error", bucket=bucket_name)
code = getattr(e, 'code', 500)
name = getattr(e, 'name', "Internal Error")
return jsonify(status='error', message=name), code
@app.route('/_status', methods=['GET'])
@cache_control.nocache
def health_check():
if db.alive():
return jsonify(status='ok', message='database seems fine')
else:
return jsonify(status='error',
message='cannot connect to database'), 500
@app.route('/data/<data_group>/<data_type>', methods=['POST'])
@cache_control.nocache
def write_by_group(data_group, data_type):
"""
Write by group/type
e.g. POST https://BACKDROP/data/my-transaction-name/volumetrics
"""
bucket_config = bucket_repository.get_bucket_for_query(
data_group,
data_type)
return _write_to_bucket(bucket_config)
@app.route('/<bucket:bucket_name>', methods=['POST'])
@cache_control.nocache
def post_to_bucket(bucket_name):
bucket_config = bucket_repository.retrieve(name=bucket_name)
return _write_to_bucket(bucket_config)
def _write_to_bucket(bucket_config):
if bucket_config is None:
return jsonify(status="error",
message='Could not find bucket_config'), 404
g.bucket_name = bucket_config.name
auth_header = request.headers.get('Authorization', None)
if not bearer_token_is_valid(bucket_config, auth_header):
statsd.incr("write_api.bad_token", bucket=g.bucket_name)
return jsonify(status='error', message='Forbidden'), 403
try:
data = listify_json(request.json)
bucket = Bucket(db, bucket_config)
bucket.parse_and_store(data)
return jsonify(status='ok')
except (ParseError, ValidationError) as e:
return jsonify(status="error", message=str(e)), 400
def listify_json(data):
if data is None:
raise ValidationError("Request must be JSON")
if isinstance(data, list):
return data
else:
return [data]
def start(port):
# this method only gets run on dev
# app.debug = True
app.run(host='0.0.0.0', port=port)
app.logger.info("Backdrop Write API started")
|
Python
| 0.999014
|
@@ -14,16 +14,28 @@
t getenv
+%0Aimport json
%0A%0Afrom f
@@ -516,16 +516,38 @@
is_valid
+, extract_bearer_token
%0A%0A%0AGOVUK
@@ -2270,24 +2270,24 @@
ucket_name)%0A
-
return _
@@ -2319,16 +2319,1387 @@
nfig)%0A%0A%0A
+@app.route('/data-sets/%3Cdataset_name%3E', methods=%5B'POST'%5D)%0A@cache_control.nocache%0Adef create_collection_for_dataset(dataset_name):%0A if not _allow_create_collection(request.headers.get('Authorization')):%0A return jsonify(status='error',%0A message=%22Forbidden: invalid or no token given.%22), 403%0A%0A if db.collection_exists(dataset_name):%0A return jsonify(status='error',%0A message='Collection exists with that name.'), 400%0A%0A try:%0A data = json.loads(request.data)%0A except ValueError as e:%0A return jsonify(status='error', message=repr(e)), 400%0A else:%0A capped_size = data.get('capped_size', None)%0A%0A if capped_size is None or not isinstance(capped_size, int):%0A return jsonify(%0A status='error',%0A message=%22You must specify an int capped_size of 0 or more%22), 400%0A%0A if capped_size == 0:%0A db.create_uncapped_collection(dataset_name)%0A else:%0A db.create_capped_collection(dataset_name, capped_size)%0A%0A return jsonify(status='ok', message='Created %22%7B%7D%22'.format(dataset_name))%0A%0A%0Adef _allow_create_collection(auth_header):%0A token = extract_bearer_token(auth_header)%0A if token == app.config%5B'CREATE_COLLECTION_ENDPOINT_TOKEN'%5D:%0A return True%0A%0A app.logger.info(%22Bad token for create collection: '%7B%7D'%22.format(token))%0A return False%0A%0A%0A
def _wri
|
5a6a50a7b300354bc62118107ea1337f4f804b91
|
Fix gramatical error in the welcome message
|
addons/email_confirmation/controllers.py
|
addons/email_confirmation/controllers.py
|
# -*- coding: utf-8 -*-
import logging
from openerp.addons.auth_signup.res_users import SignupError
from openerp.addons.auth_signup.controllers.main import AuthSignupHome
from openerp import http, SUPERUSER_ID
from openerp.http import request
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class AuthSignupHome(AuthSignupHome):
@http.route('/web/authenticate', type='http', auth='public', website=True)
def web_auth_authenticate(self, *args, **kw):
"""After signing up user confirms his email"""
qcontext = self.get_auth_signup_qcontext()
if not qcontext.get('token') and not qcontext.get('signup_enabled'):
return http.request.not_found()
else:
try:
values = dict((key, qcontext.get(key)) for key in ('login', 'email'))
request.env['res.users'].sudo()._authenticate_after_confirmation(values, qcontext.get('token'))
request.cr.commit()
response = super(AuthSignupHome, self).web_login(*args, **kw)
response.qcontext['message'] = """
Witamy w naszej społeczności! Udało Ci się pomyślnie zarejestrować do naszego systemu.
"""
return response
except (SignupError, AssertionError), e:
qcontext['error'] = _(e.message)
return self.web_login(*args, **kw)
@http.route('/web/signup', type='http', auth='public', website=True)
def web_auth_signup(self, *args, **kw):
"""Need to override, as parent function logs the user in"""
qcontext = self.get_auth_signup_qcontext()
if not qcontext.get('token') and not qcontext.get('signup_enabled'):
return http.request.not_found()
if 'error' not in qcontext and request.httprequest.method == 'POST':
try:
self.do_signup(qcontext)
qcontext['message'] = """
Dziękujemy za rejestrację!<br/><br/>
To mały krok dla Ciebie, ale wielki skok dla nas!<br/>
Nasza społeczność się powiększa!<br/><br/>
<strong>Potwierdź rejestrację klikając w link, który otrzymasz w mailu.</strong>
"""
# do not login user here
except (SignupError, AssertionError), e:
message = e.message
if message.startswith('duplicate key value violates unique constraint "res_users_login_key"'):
message = "Podany adres e-mail jest już używany."
qcontext['error'] = _(message)
return request.render('auth_signup.signup', qcontext)
@http.route('/web/reset_password', type='http', auth='public', website=True)
def web_auth_reset_password(self, *args, **kw):
"""Need to override, as all messages were originally in English."""
qcontext = self.get_auth_signup_qcontext()
if not qcontext.get('token') and not qcontext.get('reset_password_enabled'):
raise werkzeug.exceptions.NotFound()
if 'error' not in qcontext and request.httprequest.method == 'POST':
try:
if qcontext.get('token'):
self.do_signup(qcontext)
return super(AuthSignupHome, self).web_login(*args, **kw)
else:
login = qcontext.get('login')
assert login, "No login provided."
res_users = request.registry.get('res.users')
res_users.reset_password(request.cr, SUPERUSER_ID, login)
qcontext['message'] = _("Wysłaliśmy na Twojego maila wiadomość umożliwiającą zmianę hasła.")
except SignupError:
qcontext['error'] = _("Zmiana hasła nieudana")
_logger.exception('error when resetting password')
except Exception, e:
qcontext['error'] = _(e.message)
return request.render('auth_signup.reset_password', qcontext)
def do_signup(self, qcontext):
""" overriden to include redirect """
values = {key: qcontext.get(key) for key in ('login', 'name', 'password')}
assert all(values.values()), "The form was not properly filled in."
assert values.get('password') == qcontext.get('confirm_password'), "Podane hasła się różnią."
request.env['res.users'].sudo().with_context(
redirect=qcontext.get('redirect'),
no_reset_password=True,
confirm_signup=True
).signup(values, qcontext.get('token'))
request.cr.commit()
|
Python
| 0.000213
|
@@ -1149,17 +1149,26 @@
czno%C5%9Bci!
-
+%3Cbr/%3E%3Cbr/%3E
Uda%C5%82o Ci
@@ -1200,26 +1200,25 @@
wa%C4%87
-do
+w
nasz
-ego
+ym
system
-u
+ie
.%0A
|
6c2e22cc7c0f1b43f4273464635f108ed3b03eb2
|
Fix failing tests on py3k.
|
repobuddy/tests/arg_parser.py
|
repobuddy/tests/arg_parser.py
|
#
# Copyright (C) 2012 Ash (Tuxdude) <tuxdude.github@gmail.com>
#
# This file is part of repobuddy.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
import os as _os
import re as _re
import shlex as _shlex
import sys as _sys
import threading as _threading
import time as _time
if _sys.version_info < (2, 7):
import unittest2 as _unittest # pylint: disable=F0401
else:
import unittest as _unittest # pylint: disable=F0401
from repobuddy.arg_parser import ArgParser, ArgParserError
from repobuddy.tests.common import ShellHelper, TestCaseBase, TestCommon, \
TestSuiteManager
from repobuddy.globals import HelpStrings
from repobuddy.utils import Logger
from repobuddy.version import __version__
class ArgParserTestCase(TestCaseBase):
@classmethod
def setUpClass(cls):
cls._test_base_dir = TestSuiteManager.get_base_dir()
return
@classmethod
def tearDownClass(cls):
return
def _reset_logger(self):
Logger.msg_stream = self._original_logger_state['msg_stream']
Logger.error_stream = self._original_logger_state['error_stream']
return
def _test_help(self, args_str):
self._str_stream.truncate(0)
arg_parser = ArgParser(self._handlers)
with self.assertRaisesRegexp(ArgParserError, None) as err:
arg_parser.parse(_shlex.split(args_str))
self.assertTrue(err.exception.exit_prog_without_error)
help_regex = _re.compile(
r'^usage: ([a-z]+) ((\[-(h|v)\] ){2})\{(([a-z]+,)*[a-z]+)\} ' +
r'\.\.\.\s+' + HelpStrings.PROGRAM_DESCRIPTION + '\s+')
match_obj = help_regex.search(self._str_stream.getvalue())
self.assertIsNotNone(match_obj)
groups = match_obj.groups()
self.assertEqual(groups[0], 'repobuddy')
self._assert_count_equal(groups[1].rstrip().split(' '),
['[-h]', '[-v]'])
self._assert_count_equal(groups[4].rstrip().split(','),
['status', 'init', 'help'])
return
def _test_version(self, args_str):
self._str_stream.truncate(0)
arg_parser = ArgParser(self._handlers)
with self.assertRaisesRegexp(ArgParserError, None) as err:
arg_parser.parse(_shlex.split(args_str))
self.assertTrue(err.exception.exit_prog_without_error)
self.assertEqual(self._str_stream.getvalue().rstrip(), __version__)
return
def __init__(self, methodName='runTest'):
super(ArgParserTestCase, self).__init__(methodName)
self._original_logger_state = {'msg_stream': Logger.msg_stream,
'error_stream': Logger.error_stream}
self._handlers = {}
self._str_stream = TestCommon.get_string_stream()
return
def setUp(self):
self._handlers.clear()
self._handlers['init'] = None
self._handlers['status'] = None
self._str_stream.truncate(0)
Logger.msg_stream = self._str_stream
Logger.error_stream = self._str_stream
self._set_tear_down_cb(self._reset_logger)
return
def test_help(self):
self._test_help('-h')
self._test_help('--help')
return
def test_version(self):
self._test_version('-v')
self._test_version('--version')
return
class ArgParserTestSuite: # pylint: disable=W0232
@classmethod
def get_test_suite(cls):
tests = [
'test_help',
'test_version']
return _unittest.TestSuite(map(ArgParserTestCase, tests))
|
Python
| 0
|
@@ -1550,32 +1550,231 @@
return%0A%0A
+ def _hook_into_logger(self):%0A self._str_stream = TestCommon.get_string_stream()%0A Logger.msg_stream = self._str_stream%0A Logger.error_stream = self._str_stream%0A return%0A%0A
def _reset_l
@@ -1778,32 +1778,32 @@
t_logger(self):%0A
-
Logger.m
@@ -1996,37 +1996,33 @@
self._
-str_stream.truncate(0
+hook_into_logger(
)%0A
@@ -2926,37 +2926,33 @@
self._
-str_stream.truncate(0
+hook_into_logger(
)%0A
@@ -3713,16 +3713,16 @@
= None%0A
+
@@ -3758,138 +3758,8 @@
ne%0A%0A
- self._str_stream.truncate(0)%0A Logger.msg_stream = self._str_stream%0A Logger.error_stream = self._str_stream%0A%0A
|
cdb3e3872ad0dfa722f9955a7beff38b2cfa3547
|
remove schema form requester
|
backend/auth/utils.py
|
backend/auth/utils.py
|
import json
from django.http import HttpResponse
from auth.models import Token
def json_response(response_dict, status=200):
response = HttpResponse(json.dumps(response_dict), content_type="application/json", status=status)
response['Access-Control-Allow-Origin'] = 'http://memorycms.moome.net/'
response['Access-Control-Allow-Headers'] = 'Content-Type, Authorization'
return response
def get_token(request):
auth_header = request.META.get('HTTP_AUTHORIZATION', None)
if auth_header is not None:
tokens = auth_header.split(' ')
if len(tokens) == 2 and tokens[0] == 'Token':
token = tokens[1]
return Token.objects.filter(token=token).first()
def token_required(func):
def inner(request, *args, **kwargs):
if request.method == 'OPTIONS':
return func(request, *args, **kwargs)
auth_header = request.META.get('HTTP_AUTHORIZATION', None)
if auth_header is not None:
tokens = auth_header.split(' ')
if len(tokens) == 2 and tokens[0] == 'Token':
token = tokens[1]
token_obj = get_token(request)
if token_obj:
request.token = token_obj
return func(request, *args, **kwargs)
else:
return json_response({
'error': 'Token not found'
}, status=401)
return json_response({
'error': 'Invalid Header'
}, status=401)
return inner
|
Python
| 0
|
@@ -274,15 +274,8 @@
= '
-http://
memo
@@ -289,17 +289,16 @@
oome.net
-/
'%0A re
|
4de37b187527eaecbb8eb5f1cdc0ba67edbb4048
|
Add "frameworks" to the list of audit object types
|
app/main/views/audits.py
|
app/main/views/audits.py
|
from flask import jsonify, abort, request, current_app
from datetime import datetime
from ...models import AuditEvent
from sqlalchemy import asc, Date, cast
from sqlalchemy.exc import IntegrityError
from sqlalchemy.sql.expression import true, false
from ...utils import pagination_links, get_valid_page_or_1
from .. import main
from ... import db, models
from dmutils.audit import AuditTypes
from dmutils.config import convert_to_boolean
from ...validation import is_valid_date, is_valid_acknowledged_state
from ...service_utils import validate_and_return_updater_request
from ...utils import get_json_from_request, json_has_required_keys
AUDIT_OBJECT_TYPES = {
"suppliers": models.Supplier,
"services": models.Service,
}
AUDIT_OBJECT_ID_FIELDS = {
"suppliers": models.Supplier.supplier_id,
"services": models.Service.service_id,
}
@main.route('/audit-events', methods=['GET'])
def list_audits():
page = get_valid_page_or_1()
audits = AuditEvent.query.order_by(
asc(AuditEvent.created_at)
)
audit_date = request.args.get('audit-date', None)
if audit_date:
if is_valid_date(audit_date):
audits = audits.filter(
cast(AuditEvent.created_at, Date) == audit_date
)
else:
abort(400, 'invalid audit date supplied')
audit_type = request.args.get('audit-type')
if audit_type:
if AuditTypes.is_valid_audit_type(audit_type):
audits = audits.filter(
AuditEvent.type == audit_type
)
else:
abort(400, "Invalid audit type")
acknowledged = request.args.get('acknowledged', None)
if acknowledged and acknowledged != 'all':
if is_valid_acknowledged_state(acknowledged):
if convert_to_boolean(acknowledged):
audits = audits.filter(
AuditEvent.acknowledged == true()
)
elif not convert_to_boolean(acknowledged):
audits = audits.filter(
AuditEvent.acknowledged == false()
)
else:
abort(400, 'invalid acknowledged state supplied')
object_type = request.args.get('object-type')
object_id = request.args.get('object-id')
if object_type:
if object_type not in AUDIT_OBJECT_TYPES:
abort(400, 'invalid object-type supplied')
if not object_id:
abort(400, 'object-type cannot be provided without object-id')
model = AUDIT_OBJECT_TYPES[object_type]
id_field = AUDIT_OBJECT_ID_FIELDS[object_type]
audits = audits.join(model, model.id == AuditEvent.object_id) \
.filter(id_field == object_id)
elif object_id:
abort(400, 'object-id cannot be provided without object-type')
audits = audits.paginate(
page=page,
per_page=current_app.config['DM_API_SERVICES_PAGE_SIZE'],
)
return jsonify(
auditEvents=[audit.serialize() for audit in audits.items],
links=pagination_links(
audits,
'.list_audits',
request.args
)
)
@main.route('/audit-events', methods=['POST'])
def create_audit_event():
json_payload = get_json_from_request() # TODO test
json_has_required_keys(json_payload, ['auditEvents']) # TODO test
audit_event_data = json_payload['auditEvents']
json_has_required_keys(audit_event_data, ["type", "data"])
if 'objectType' not in audit_event_data:
if 'objectId' in audit_event_data:
abort(400, "object ID cannot be provided without an object type")
db_object = None
else:
if audit_event_data['objectType'] not in AUDIT_OBJECT_TYPES:
abort(400, "invalid object type supplied")
if 'objectId' not in audit_event_data:
abort(400, "object type cannot be provided without an object ID")
model = AUDIT_OBJECT_TYPES[audit_event_data['objectType']]
id_field = AUDIT_OBJECT_ID_FIELDS[audit_event_data['objectType']]
db_objects = model.query.filter(
id_field == audit_event_data['objectId']
).all()
if len(db_objects) != 1:
abort(400, "referenced object does not exist")
else:
db_object = db_objects[0]
if not AuditTypes.is_valid_audit_type(audit_event_data['type']):
abort(400, "invalid audit type supplied")
audit_event = AuditEvent(
audit_type=AuditTypes[audit_event_data['type']],
user=audit_event_data.get('user'),
data=audit_event_data['data'],
db_object=db_object)
db.session.add(audit_event)
db.session.commit()
return jsonify(auditEvents=audit_event.serialize()), 201
@main.route('/audit-events/<int:audit_id>/acknowledge', methods=['POST'])
def acknowledge_audit(audit_id):
updater_json = validate_and_return_updater_request()
audit_event = AuditEvent.query.get(audit_id)
if audit_event is None:
abort(404, "No audit event with this id")
audit_event.acknowledged = True
audit_event.acknowledged_at = datetime.utcnow()
audit_event.acknowledged_by = updater_json['updated_by']
try:
db.session.add(audit_event)
db.session.commit()
except IntegrityError as e:
db.session.rollback()
abort(400, e.orig)
return jsonify(auditEvents=audit_event.serialize()), 200
|
Python
| 0
|
@@ -723,16 +723,52 @@
ervice,%0A
+ %22frameworks%22: models.Framework,%0A
%7D%0A%0AAUDIT
@@ -878,16 +878,57 @@
ice_id,%0A
+ %22frameworks%22: models.Framework.slug,%0A
%7D%0A%0A%0A@mai
|
be909a2dd461ad72d4bb7ba297c3e98af1846222
|
correct api method signature
|
addons/sale_layout/models/sale_layout.py
|
addons/sale_layout/models/sale_layout.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
from itertools import groupby
def grouplines(self, ordered_lines, sortkey):
"""Return lines from a specified invoice or sale order grouped by category"""
grouped_lines = []
for key, valuesiter in groupby(ordered_lines, sortkey):
group = {}
group['category'] = key
group['lines'] = list(v for v in valuesiter)
if 'subtotal' in key and key.subtotal is True:
group['subtotal'] = sum(line.price_subtotal for line in group['lines'])
grouped_lines.append(group)
return grouped_lines
class SaleLayoutCategory(osv.Model):
_name = 'sale_layout.category'
_order = 'sequence'
_columns = {
'name': fields.char('Name', required=True),
'sequence': fields.integer('Sequence', required=True),
'subtotal': fields.boolean('Add subtotal'),
'separator': fields.boolean('Add separator'),
'pagebreak': fields.boolean('Add pagebreak')
}
_defaults = {
'subtotal': True,
'separator': True,
'pagebreak': False,
'sequence': 10
}
# We want to forbid edit of a category if it is already linked to a report.
def _check(self, cr, uid, ids):
for cat in self.browse(cr, uid, ids):
invoice_obj = self.pool.get('account.invoice.line')
sale_obj = self.pool.get('sale.order.line')
ids = invoice_obj.search(cr, uid, [('sale_layout_cat_id', '=', cat.id)])
ids += sale_obj.search(cr, uid, [('sale_layout_cat_id', '=', cat.id)])
if len(ids) > 0:
return False
return True
_constraints = [(
_check,
'This category could not be modified nor deleted because it is still used in an invoice or'
' a sale report.', ['name']
)]
class AccountInvoice(osv.Model):
_inherit = 'account.invoice'
def sale_layout_lines(self, cr, uid, ids, context, invoice_id, *args, **kwargs):
"""
Returns invoice lines from a specified invoice ordered by
sale_layout_category sequence. Used in sale_layout module.
:Parameters:
-'invoice_id' (int): specify the concerned invoice.
"""
ordered_lines = self.browse(cr, uid, invoice_id, context=context).invoice_line
# We chose to group first by category model and, if not present, by invoice name
sortkey = lambda x: x.sale_layout_cat_id if x.sale_layout_cat_id else ''
return grouplines(self, ordered_lines, sortkey)
class AccountInvoiceLine(osv.Model):
_inherit = 'account.invoice.line'
_columns = {
'sale_layout_cat_id': fields.many2one('sale_layout.category',
'Layout Category'),
'categ_sequence': fields.related('sale_layout_cat_id',
'sequence', type='integer',
string='Layout Sequence', store=True)
# Store is intentionally set in order to keep the "historic" order.
}
_order = 'invoice_id, categ_sequence, sequence, id'
class SaleOrder(osv.Model):
_inherit = 'sale.order'
def sale_layout_lines(self, cr, uid, ids, context, order_id, *args, **kwargs):
"""
Returns order lines from a specified sale ordered by
sale_layout_category sequence. Used in sale_layout module.
:Parameters:
-'order_id' (int): specify the concerned sale order.
"""
ordered_lines = self.browse(cr, uid, order_id, context=context).order_line
sortkey = lambda x: x.sale_layout_cat_id if x.sale_layout_cat_id else ''
return grouplines(self, ordered_lines, sortkey)
class SaleOrderLine(osv.Model):
_inherit = 'sale.order.line'
_columns = {
'sale_layout_cat_id': fields.many2one('sale_layout.category',
'Layout Category'),
'categ_sequence': fields.related('sale_layout_cat_id',
'sequence', type='integer',
string='Layout Sequence', store=True)
# Store is intentionally set in order to keep the "historic" order.
}
_order = 'order_id, categ_sequence, sequence, id'
|
Python
| 0
|
@@ -2918,25 +2918,16 @@
id, ids,
- context,
invoice
@@ -2929,33 +2929,35 @@
voice_id
-, *args, **kwargs
+=None, context=None
):%0A
@@ -4197,42 +4197,35 @@
ds,
-context, order_id, *args, **kwargs
+order_id=None, context=None
):%0A
|
6e30c44d41c0e393859d19c32628552e3b611b3c
|
Add missing space.
|
st2reactor/st2reactor/rules/enforcer.py
|
st2reactor/st2reactor/rules/enforcer.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from st2common import log as logging
from st2common.util import reference
from st2common.util import action_db as action_db_util
from st2reactor.rules.datatransform import get_transformer
from st2common.services import action as action_service
from st2common.models.db.liveaction import LiveActionDB
from st2common.models.utils import action_param_utils
from st2common.constants import action as action_constants
from st2common.models.api.auth import get_system_username
LOG = logging.getLogger('st2reactor.ruleenforcement.enforce')
class RuleEnforcer(object):
def __init__(self, trigger_instance, rule):
self.trigger_instance = trigger_instance
self.rule = rule
try:
self.data_transformer = get_transformer(trigger_instance.payload)
except Exception as e:
message = ('Failed to template-ize trigger payload: %s. If the payload contains'
'special characters such as "{{" which dont\'t reference value in '
'a datastore, those characters need to be escaped' % (str(e)))
raise ValueError(message)
def enforce(self):
# TODO: Refactor this to avoid additiona lookup in cast_params
# TODO: rename self.rule.action -> self.rule.action_exec_spec
action_ref = self.rule.action['ref']
action_db = action_db_util.get_action_by_ref(action_ref)
if not action_db:
raise ValueError('Action "%s" doesn\'t exist' % (action_ref))
data = self.data_transformer(self.rule.action.parameters)
LOG.info('Invoking action %s for trigger_instance %s with data %s.',
self.rule.action.ref, self.trigger_instance.id,
json.dumps(data))
context = {
'trigger_instance': reference.get_ref_from_model(self.trigger_instance),
'rule': reference.get_ref_from_model(self.rule),
'user': get_system_username()
}
liveaction_db = RuleEnforcer._invoke_action(self.rule.action, data, context)
if not liveaction_db:
extra = {'trigger_instance_db': self.trigger_instance, 'rule_db': self.rule}
LOG.audit('Rule enforcement failed. Liveaction for Action %s failed. '
'TriggerInstance: %s and Rule: %s',
self.rule.action.name, self.trigger_instance, self.rule,
extra=extra)
return None
extra = {'trigger_instance_db': self.trigger_instance, 'rule_db': self.rule,
'liveaction_db': liveaction_db}
LOG.audit('Rule enforced. Liveaction %s, TriggerInstance %s and Rule %s.',
liveaction_db, self.trigger_instance, self.rule, extra=extra)
return liveaction_db
@staticmethod
def _invoke_action(action_exec_spec, params, context=None):
"""
Schedule an action execution.
:type action_exec_spec: :class:`ActionExecutionSpecDB`
:param params: Parameters to execute the action with.
:type params: ``dict``
:rtype: :class:`LiveActionDB` on successful schedueling, None otherwise.
"""
action_ref = action_exec_spec['ref']
# prior to shipping off the params cast them to the right type.
params = action_param_utils.cast_params(action_ref, params)
liveaction = LiveActionDB(action=action_ref, context=context, parameters=params)
liveaction, _ = action_service.request(liveaction)
if liveaction.status == action_constants.LIVEACTION_STATUS_REQUESTED:
return liveaction
else:
return None
|
Python
| 0.001076
|
@@ -1687,16 +1687,17 @@
contains
+
'%0A
|
2a4bbb19bf32a08e7c398558d39c201f8b089342
|
change to len
|
backend/camservice.py
|
backend/camservice.py
|
import cherrypy
from cammodule import CamModule, get_camera_list, setup_pygame_camera
class CamService(object):
def __init__(self):
self.camera_list = []
setup_pygame_camera()
camera_list = get_camera_list()
for camera_index, camera_name in enumerate(camera_list):
self.camera_list.append(CamModule(camera_name, camera_index))
@cherrypy.expose
@cherrypy.tools.json_out()
def get_cameras(self):
return {"cameraCount": 1}
@cherrypy.expose
def get_image(self, cam_index="0", fake="1"):
cherrypy.response.headers['Content-Type'] = "image/jpg"
return self.camera_list[int(cam_index)].get_bytes()
|
Python
| 0.99996
|
@@ -481,17 +481,38 @@
Count%22:
-1
+len(get_camera_list())
%7D%0A%0A%0A
|
74d85b48f3451f306a31942297be93f03992586e
|
add a function to calculate the Inbreeding coefficient
|
asmvar/utils/vcfutils.py
|
asmvar/utils/vcfutils.py
|
"""
A class for output VCF file. PyVCF does not able to add or update information
fields for sample's FORMAT field. That make us have to create another classes
(like these) to handle that problem
"""
import re
class Header(object):
def __init__(self, hInfo = None):
"""
VCF header information
"""
self.header = {}
if hInfo and (type(hInfo) is not dict):
raise ValueError ('The data type should be "dict" in class '
'of "VCFHeader", but found %s' % str(type(hInfo)))
if hInfo: self.header = hInfo
def add(self, mark, id, num, type, description):
key = '##%s=<ID=%s' % (mark, id)
val = ('##%s=<ID=%s,Number=%s,Type=%s,Description="%s">' %
(mark, id, num, type, description))
self.header[key] = val
return self
def record(self, headline):
if re.search (r'^##fileformat', headline): tag = '###'
elif re.search (r'^#CHROM' , headline): tag = '#CHROM'
else: tag = headline.split(',')[0]
self.header[tag] = headline
class Info(object):
def __init__(self, info = None):
"""
INOF fields information
"""
self.info = {}
if info and (type(info) is not dict):
raise ValueError ('The data type should be "dict" in class '
'of "VCFInfo", but found %s' % str(type(info)))
if info: self.info = info
def add(self, key, context):
self.info[key] = context
return self
class Context(object):
def __init__(self):
"""
VCF comtext
"""
self.chrom = None
self.pos = None
self.Id = None
self.ref = None
self.alt = []
self.qual = None
self.filter = []
self.info = {}
self.format = None
self.sample = []
def print_context(self):
"""
"""
if self.chrom:
print '\t'.join([self.chrom,
str(self.pos),
'.' if not self.Id else self.Id,
self.ref,
','.join(self.alt),
str(self.qual),
'.' if not self.filter else ','.join(self.filter),
'.' if not self.info else ';'.join(v
for v in sorted(self.info.values())),
':'.join(self.format),
'\t'.join(self.sample)])
|
Python
| 0.000122
|
@@ -2614,19 +2614,934 @@
le)%5D)%0A%0A%0A
-%0A
+def calcuInbreedCoeff(gt):%0A %22%22%22%0A Calculating the inbreeding coefficient by GT fields of VCF.%0A%0A Args:%0A %60gt%60: A list. Genotype fields of all the samples.%0A %22%22%22%0A ref_count, het_count, hom_count, n = 0, 0, 0, 0%0A for g in gt:%0A gs = g.split('/') if '/' in g else g.split('%7C')%0A if '.' not in g: n += 1%0A if '.' in g:%0A # Do nothing%0A pass%0A elif g == '0/0' or g == '0%7C0':%0A # Reference%0A ref_count += 1%0A elif gs%5B0%5D == gs%5B1%5D:%0A # homo%0A hom_count += 1%0A else:%0A # hete%0A het_count += 1%0A%0A if n == 0: n = 1%0A p = (2.0 * ref_count + het_count) / (2.0 * n) # expected REF allele freq%0A q = 1.0 - p # expected alternative allele frequency%0A # Inbreeding coefficient: the het_count VS expected of het_count%0A inbf = 1.0 - (het_count / (2.0 * p * q * n))%0A %0A return round(inbf, 2)
%0A%0A
|
ae7cc245938b1e02974f9b54830146019ca9c0c1
|
make imports init __init__ prettier
|
pypeerassets/__init__.py
|
pypeerassets/__init__.py
|
from pypeerassets.kutil import Kutil
from pypeerassets.provider import *
from pypeerassets.__main__ import *
|
Python
| 0.999726
|
@@ -92,18 +92,392 @@
__main__ import
-*
+(deck_parser,%0A find_all_valid_cards,%0A find_all_valid_decks,%0A find_deck,%0A deck_spawn,%0A deck_transfer,%0A get_card_transfers,%0A card_transfer)
%0A
|
28ffce4de70a74b22cf51c7bdd5b6d970cf70dfc
|
Update binary windows resource test to check for correct option type
|
csunplugged/tests/resources/views/test_binary_windows.py
|
csunplugged/tests/resources/views/test_binary_windows.py
|
from django.test import tag
from django.urls import reverse
from tests.BaseTestWithDB import BaseTestWithDB
from tests.resources.ResourcesTestDataGenerator import ResourcesTestDataGenerator
from utils.import_resource_generator import import_resource_generator
from utils.create_query_string import query_string
from utils.resource_valid_test_configurations import resource_valid_test_configurations
@tag('resource_generation')
class BinaryWindowsResourceViewTest(BaseTestWithDB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.test_data = ResourcesTestDataGenerator()
self.language = "en"
def test_binary_windows_resource_form_view(self):
resource = self.test_data.create_resource(
"binary-windows",
"Binary Windows",
"resources/binary-windows.html",
"BinaryWindowsResourceGenerator",
)
kwargs = {
"resource_slug": resource.slug,
}
url = reverse("resources:resource", kwargs=kwargs)
response = self.client.get(url)
self.assertEqual(200, response.status_code)
def test_binary_windows_resource_generation_valid_configurations(self):
resource = self.test_data.create_resource(
"binary-windows",
"Binary Windows",
"resources/binary-windows.html",
"BinaryWindowsResourceGenerator",
)
kwargs = {
"resource_slug": resource.slug,
}
base_url = reverse("resources:generate", kwargs=kwargs)
empty_generator = import_resource_generator(resource.generator_module)
combinations = resource_valid_test_configurations(
empty_generator.valid_options
)
print()
for combination in combinations:
print(" - Testing combination: {} ... ".format(combination), end="")
url = base_url + query_string(combination)
response = self.client.get(url)
self.assertEqual(200, response.status_code)
if combination["dot_counts"] == "yes":
count_text = "with dot counts"
else:
count_text = "without dot counts"
TEMPLATE = "{} bits - {} - {} - {}"
subtitle = TEMPLATE.format(
combination["number_bits"],
combination["value_type"],
count_text,
combination["paper_size"],
)
self.assertEqual(
response.get("Content-Disposition"),
'attachment; filename="Resource Binary Windows ({subtitle}).pdf"'.format(subtitle=subtitle)
)
print("ok")
def test_binary_windows_resource_generation_missing_dot_count_parameter(self):
resource = self.test_data.create_resource(
"binary-windows",
"Binary Windows",
"resources/binary-windows.html",
"BinaryWindowsResourceGenerator",
)
kwargs = {
"resource_slug": resource.slug,
}
url = reverse("resources:generate", kwargs=kwargs)
get_parameters = {
"number_bits": "8",
"value_type": "binary",
"paper_size": "a4",
"header_text": "Example header text",
}
url += query_string(get_parameters)
response = self.client.get(url)
self.assertEqual(404, response.status_code)
def test_binary_windows_resource_generation_missing_number_bits_parameter(self):
resource = self.test_data.create_resource(
"binary-windows",
"Binary Windows",
"resources/binary-windows.html",
"BinaryWindowsResourceGenerator",
)
kwargs = {
"resource_slug": resource.slug,
}
url = reverse("resources:generate", kwargs=kwargs)
get_parameters = {
"dot_counts": "yes",
"value_type": "binary",
"paper_size": "a4",
"header_text": "Example header text",
}
url += query_string(get_parameters)
response = self.client.get(url)
self.assertEqual(404, response.status_code)
def test_binary_windows_resource_generation_missing_value_type_parameter(self):
resource = self.test_data.create_resource(
"binary-windows",
"Binary Windows",
"resources/binary-windows.html",
"BinaryWindowsResourceGenerator",
)
kwargs = {
"resource_slug": resource.slug,
}
url = reverse("resources:generate", kwargs=kwargs)
get_parameters = {
"dot_counts": "yes",
"number_bits": "8",
"paper_size": "a4",
"header_text": "Example header text",
}
url += query_string(get_parameters)
response = self.client.get(url)
self.assertEqual(404, response.status_code)
def test_binary_windows_resource_generation_missing_paper_size_parameter(self):
resource = self.test_data.create_resource(
"binary-windows",
"Binary Windows",
"resources/binary-windows.html",
"BinaryWindowsResourceGenerator",
)
kwargs = {
"resource_slug": resource.slug,
}
url = reverse("resources:generate", kwargs=kwargs)
get_parameters = {
"dot_counts": "yes",
"number_bits": "8",
"value_type": "binary",
"header_text": "Example header text",
}
url += query_string(get_parameters)
response = self.client.get(url)
self.assertEqual(404, response.status_code)
def test_binary_windows_resource_generation_missing_header_text_parameter(self):
resource = self.test_data.create_resource(
"binary-windows",
"Binary Windows",
"resources/binary-windows.html",
"BinaryWindowsResourceGenerator",
)
kwargs = {
"resource_slug": resource.slug,
}
url = reverse("resources:generate", kwargs=kwargs)
get_parameters = {
"dot_counts": "yes",
"number_bits": "8",
"value_type": "lightbulb",
"paper_size": "a4",
"header_text": "",
}
url += query_string(get_parameters)
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertEqual(
response.get("Content-Disposition"),
'attachment; filename="Resource Binary Windows (8 bits - lightbulb - with dot counts - a4).pdf"'
)
|
Python
| 0
|
@@ -2088,17 +2088,8 @@
ts%22%5D
- == %22yes%22
:%0A
|
3aa36ff6ef79f061158ad57b1f4a251b3eeafd7a
|
Add virtual shift dealer_btn method
|
pypoker2/engine/table.py
|
pypoker2/engine/table.py
|
from pypoker2.engine.card import Card
from pypoker2.engine.seats import Seats
from pypoker2.engine.deck import Deck
class Table:
def __init__(self, cheat_deck=None):
self.dealer_btn = 0
self.seats = Seats()
self.deck = cheat_deck if cheat_deck else Deck()
self.__community_card = []
def get_community_card(self):
return self.__community_card[::]
def add_community_card(self, card):
if len(self.__community_card) == 5:
raise ValueError(self.__exceed_card_size_msg)
self.__community_card.append(card)
def reset(self):
self.deck.restore()
self.__community_card = []
for player in self.seats.players:
player.clear_holecard()
player.clear_action_histories()
player.clear_pay_info()
def shift_dealer_btn(self):
while True:
self.dealer_btn = (self.dealer_btn + 1) % self.seats.size()
if self.seats.players[self.dealer_btn].is_active(): break
def serialize(self):
community_card = [card.to_id() for card in self.__community_card]
return [
self.dealer_btn, Seats.serialize(self.seats),
Deck.serialize(self.deck), community_card
]
@classmethod
def deserialize(self, serial):
deck = Deck.deserialize(serial[2])
community_card = [Card.from_id(cid) for cid in serial[3]]
table = self(cheat_deck=deck)
table.dealer_btn = serial[0]
table.seats = Seats.deserialize(serial[1])
table.__community_card = community_card
return table
__exceed_card_size_msg = "Community card is already full"
|
Python
| 0
|
@@ -771,26 +771,75 @@
ler_btn(self
-):
+, exec_shit=True):%0A dealer_pos = self.dealer_btn
%0A while T
@@ -853,42 +853,32 @@
-self.
dealer_
-btn
+pos
= (
-self.
dealer_
-btn
+pos
+ 1
@@ -927,31 +927,26 @@
players%5B
-self.
dealer_
-btn
+pos
%5D.is_act
@@ -957,16 +957,85 @@
): break
+%0A if exec_shit: self.dealer_btn = dealer_pos%0A return dealer_pos
%0A%0A def
|
8431b103bd0c5459f1299707c4c305e057c197ef
|
Add temp/fan
|
barpyrus.py
|
barpyrus.py
|
import sys
import contextlib
from barpyrus import hlwm
from barpyrus import widgets as W
from barpyrus.core import Theme, Painter
from barpyrus import lemonbar
from barpyrus import conky
@contextlib.contextmanager
def maybe_orange(match, predicate='> 90'):
with cg.if_('match ${%s} %s' % (match, predicate)):
cg.fg('#ffc726')
yield
cg.fg(None)
def underlined_tags(taginfo, painter):
if taginfo.empty:
return
painter.set_flag(painter.underline, True if taginfo.visible else False)
painter.fg('#a0a0a0' if taginfo.occupied else '#909090')
if taginfo.urgent:
painter.ol('#FF7F27')
painter.fg('#FF7F27')
painter.set_flag(Painter.underline, True)
painter.bg('#57000F')
elif taginfo.here:
painter.fg('#ffffff')
painter.ol(taginfo.activecolor if taginfo.focused else '#ffffff')
painter.bg(taginfo.emphbg)
else:
painter.ol('#454545')
painter.space(3)
painter += taginfo.name
painter.space(3)
painter.bg()
painter.ol()
painter.set_flag(painter.underline, False)
painter.space(2)
hc = hlwm.connect()
monitor = sys.argv[1] if len(sys.argv) >= 2 else 0
x, y, monitor_w, monitor_h = hc.monitor_rect(monitor)
height = 16
width = monitor_w
hc(['pad', str(monitor), str(height)])
cg = conky.ConkyGenerator(lemonbar.textpainter())
## CPU / RAM / df
with cg.temp_fg('#9fbc00'):
cg.symbol(0xe026)
cg.space(5)
for cpu in '1234':
with maybe_orange('cpu cpu%s' % cpu):
cg.var('cpu cpu' + cpu)
cg.text('% ')
with cg.temp_fg('#9fbc00'):
cg.symbol(0xe021)
cg.space(5)
with maybe_orange('memperc'):
cg.var('memperc')
cg.text('% ')
with cg.temp_fg('#9fbc00'):
cg.symbol(0x00e1bb)
cg.space(5)
with maybe_orange('fs_used_perc /'):
cg.var('fs_used_perc /')
cg.text('% ')
## Network
wifi_icons = [0xe217, 0xe218, 0xe219, 0xe21a]
wifi_delta = 100 / len(wifi_icons)
with cg.if_('up tun0'):
with cg.temp_fg('#ff0000'):
cg.symbol(0xe0a6)
for iface in ['eth', 'dock', 'wlan', 'ppp0']:
with cg.if_('up %s' % iface), cg.if_('match "${addr %s}" != "No Address"' % iface):
with cg.temp_fg('#9fbc00'):
if iface == 'wlan':
with cg.cases():
for i, icon in enumerate(wifi_icons[:-1]):
cg.case('match ${wireless_link_qual_perc wlan} < %d' % ((i+1)*wifi_delta))
cg.symbol(icon)
cg.else_()
cg.symbol(wifi_icons[-1]) # icon for 100 percent
cg.space(5)
elif iface in ['eth', 'dock']:
cg.symbol(0xe0af)
elif iface == 'ppp0':
cg.symbol(0xe0f3)
else:
assert False
if iface == 'wlan':
cg.var('wireless_essid')
if iface != 'ppp0':
cg.space(5)
cg.var('addr %s' % iface)
cg.space(5)
with cg.temp_fg('#9fbc00'):
cg.symbol(0xe13c)
cg.var('downspeedf %s' % iface)
cg.text('K ')
cg.var('totaldown %s' % iface)
cg.space(5)
with cg.temp_fg('#9fbc00'):
cg.symbol(0xe13b)
cg.var('upspeedf %s' % iface)
cg.text('K ')
cg.var('totalup %s' % iface)
cg.space(5)
## Battery
# first icon: 0 percent
# last icon: 100 percent
bat_icons = [
0xe242, 0xe243, 0xe244, 0xe245, 0xe246,
0xe247, 0xe248, 0xe249, 0xe24a, 0xe24b,
]
bat_delta = 100 / len(bat_icons)
with cg.if_('existing /sys/class/power_supply/BAT0'):
cg.fg('#9fbC00')
with cg.if_('match "$battery" != "discharging $battery_percent%"'):
cg.symbol(0xe0db)
with cg.cases():
for i, icon in enumerate(bat_icons[:-1]):
cg.case('match $battery_percent < %d' % ((i+1)*bat_delta))
cg.symbol(icon)
cg.else_()
cg.symbol(bat_icons[-1]) # icon for 100 percent
cg.fg(None)
cg.space(5)
with maybe_orange('battery_percent', '< 10'):
cg.var('battery_percent')
cg.text('% ')
cg.var('battery_time')
cg.space(5)
with cg.temp_fg('#9fbc00'):
cg.symbol(0xe015)
cg.space(5)
cg.var('time %d. %B, %H:%M')
conky_config = {
'update_interval': '5',
}
# Widget configuration:
bar = lemonbar.Lemonbar(geometry = (x,y,width,height))
bar.widget = W.ListLayout([
W.RawLabel('%{l}'),
hlwm.HLWMTags(hc, monitor, tag_renderer=underlined_tags),
W.RawLabel('%{c}'),
hlwm.HLWMWindowTitle(hc),
W.RawLabel('%{r}'),
conky.ConkyWidget(text=str(cg), config=conky_config),
])
|
Python
| 0
|
@@ -1840,16 +1840,221 @@
('%25 ')%0A%0A
+## temp / fan%0Awith cg.temp_fg('#9fbc00'):%0A cg.symbol(0xe1c0)%0Acg.space(5)%0Acg.var('ibm_fan')%0Acg.text('rpm ')%0Awith cg.temp_fg('#9fbc00'):%0A cg.symbol(0xe01b)%0Acg.space(5)%0Acg.var('acpitemp')%0Acg.text('%C2%B0 ')%0A
%0A## Netw
|
4a62c819f65aba0f68fb07fed2777f9bc88ee2d3
|
Fix return `Group.get_members()`
|
vk/groups.py
|
vk/groups.py
|
# coding=utf-8
from .fetch import fetch
from .users import get_users
__all__ = ("groups",)
class Group(object):
"""
Docs: https://vk.com/dev/objects/groups
"""
__slots__ = ("id", "name", "screen_name", "is_closed", "is_deactivated", "type", "has_photo",
"photo_50", "photo_100", "photo_200", "status", "is_verified", "site")
@classmethod
def from_json(cls, group_json):
group = cls()
group.id = group_json.get("id")
group.name = group_json.get("name")
group.screen_name = group_json.get("screen_name")
group.is_closed = True if group_json.get("is_closed") else False
group.is_deactivated = True if group_json.get("deactivated") else False
group.type = group_json.get("type")
group.has_photo = bool(group_json.get("has_photo"))
group.photo_50 = group_json.get("photo_50")
group.photo_100 = group_json.get("photo_100")
group.photo_200 = group_json.get("photo_200")
group.status = group_json.get("status")
group.is_verified = bool(group_json.get("verified"))
group.site = group_json.get("site")
return group
def get_description(self):
response = fetch("groups.getById", group_ids=self.id, fields="description")
return response[0]['description']
def get_members(self):
"""
Docs: https://vk.com/dev/groups.getMembers
"""
MAX_COUNT = 1000
offset = 0
while True:
res = fetch("groups.getMembers", group_id=self.id, count=MAX_COUNT, offset=offset)
user_ids = res['items']
if not user_ids:
raise StopIteration
yield get_users(user_ids)
offset += MAX_COUNT
def get_members_count(self):
response = fetch("groups.getById", group_ids=self.id, fields="members_count")
return response[0]['members_count']
def __repr__(self):
return u"<Group: {0}>".format(self.screen_name)
def groups(*group_ids):
group_ids = ",".join((str(i) for i in group_ids))
fields = ("id", "name", "screen_name", "is_closed", "deactivated", "type", "has_photo",
"photo_50", "photo_100", "photo_200", "status", "verified", "site")
response = fetch("groups.getById", group_ids=group_ids, fields=",".join(fields))
return [Group.from_json(group_json) for group_json in response]
|
Python
| 0.000024
|
@@ -1701,13 +1701,19 @@
-yield
+for user in
get
@@ -1728,16 +1728,44 @@
ser_ids)
+:%0A yield user
%0A
|
f3ff0e909fa658d43564655213185f75860a1b56
|
Remove asking for confirmation to set default menu entry
|
src/grub-boot-manager.py
|
src/grub-boot-manager.py
|
#!/usr/bin/env python
from gi.repository import Gtk, Gdk
import subprocess
import re
import os
import sys
class GrubBootManager:
def __init__(self):
output = subprocess.check_output(["grub-install", "--version"])
self.grub_version = output.split()[-1]
output = open("/boot/grub/grub.cfg", "r").read()
self.grub_menu_entries = re.findall("menuentry '(.*?)'", output)
output = open("/boot/grub/grubenv", "r").read()
self.grub_default = re.findall("saved_entry=(.*)", output)[0]
output = open("/etc/default/grub", "r").read()
self.grub_timeout = re.findall("GRUB_TIMEOUT=(.*)", output)[0]
builder = Gtk.Builder()
builder.add_from_file("grub-boot-manager.ui")
self.window = builder.get_object("dialog_main")
self.window.set_title("GRUB Boot Manager")
self.window.connect("destroy", self.quit)
#self.window.connect("key-press-event", self._keyhandler)
self.window.show()
self.label_version = builder.get_object("label_version")
self.label_version.set_text(self.label_version.get_text() + self.grub_version)
self.treeview = builder.get_object("treeview1")
self.treeview.set_headers_visible(False)
self.treeview.connect("row-activated", self.show_dialog_reboot)
self.liststore = Gtk.ListStore(str)
self.treeview.set_model(self.liststore)
self.treeview_column = Gtk.TreeViewColumn("Menu Entry")
self.treeview.append_column(self.treeview_column)
self.cellrenderer = Gtk.CellRendererText()
self.treeview_column.pack_start(self.cellrenderer, True)
self.treeview_column.add_attribute(self.cellrenderer, "text", 0)
for entry in self.grub_menu_entries:
self.liststore.append([entry])
self.treeselection = self.treeview.get_selection()
self.treeselection.connect("changed", self.selection_changed)
self.treeselection.select_path(self.grub_menu_entries.index(self.grub_default))
self.entry_timeout = builder.get_object("entry_timeout")
self.entry_timeout.set_text(str(self.grub_timeout))
self.entry_timeout.connect("activate", self.grub_set_timeout, self.grub_timeout)
self.button_default = builder.get_object("button_default")
self.button_default.connect("clicked", self.show_dialog_default)
self.button_reboot = builder.get_object("button_reboot")
self.button_reboot.connect("clicked", self.show_dialog_reboot)
self.button_cancel = builder.get_object("button_cancel")
self.button_cancel.connect("clicked", self.quit)
# dialog_default
self.dialog_default = builder.get_object("dialog_default")
self.dialog_default.set_title("Default - Grub Boot Manager")
self.dialog_default.connect("delete-event", self.hide_dialog_default)
self.label_default = builder.get_object("label_default")
self.button_cancel_default = builder.get_object("button_cancel_default")
self.button_cancel_default.connect("clicked", self.hide_dialog_default)
self.button_apply_default = builder.get_object("button_apply_default")
self.button_apply_default.connect("clicked", self.grub_set_default)
# dialog_reboot
self.dialog_reboot = builder.get_object("dialog_reboot")
self.dialog_reboot.set_title("Reboot - Grub Boot Manager")
self.dialog_reboot.connect("delete-event", self.hide_dialog_reboot)
self.label_reboot = builder.get_object("label_reboot")
self.button_no_reboot = builder.get_object("button_no_reboot")
self.button_no_reboot.connect("clicked", self.hide_dialog_reboot)
self.button_yes_reboot = builder.get_object("button_yes_reboot")
self.button_yes_reboot.connect("clicked", self.grub_reboot)
'''
def _keyhandler(self, widget, event):
keyname = Gdk.keyval_name(event.keyval)
print "Key %s (%d) was pressed" % (keyname, event.keyval)
if Gdk.keyval_name(event.keyval) == 'e':
self.show_dialog_edit()
'''
def selection_changed(self, tree_selection):
(model, pathlist) = tree_selection.get_selected_rows()
for path in pathlist:
tree_iter = model.get_iter(path)
self.grub_menu_entry = model.get_value(tree_iter, 0)
def show_dialog_default(self, *args):
self.label_default.set_text(self.grub_menu_entry)
self.dialog_default.show()
def hide_dialog_default(self, *args):
self.dialog_default.hide()
return True
def grub_set_default(self, *args):
subprocess.Popen(["grub-set-default", self.grub_menu_entry])
self.hide_dialog_default()
def grub_set_timeout(self, *args):
self.grub_timeout = self.entry_timeout.get_text()
if self.grub_timeout.isdigit():
subprocess.Popen(["sed", "-i", "-r", "s/GRUB_TIMEOUT=[0-9]+/GRUB_TIMEOUT=%s/" % self.grub_timeout, "/etc/default/grub"])
subprocess.Popen(["update-grub"])
def show_dialog_reboot(self, *args):
self.label_reboot.set_text(self.grub_menu_entry)
self.dialog_reboot.show()
def hide_dialog_reboot(self, *args):
self.dialog_reboot.hide()
return True
def grub_reboot(self, *args):
subprocess.Popen(["grub-reboot", self.grub_menu_entry])
subprocess.Popen(["shutdown", "-r", "now"])
self.hide_dialog_reboot()
def show_dialog_edit(self, *args):
self.dialog_edit.show()
def hide_dialog_edit(self, *args):
self.dialog_edit.hide()
return True
def quit(self, *args):
Gtk.main_quit()
if __name__ == "__main__":
if os.getuid() != 0:
os.execlp("gksu", "python", sys.argv[0])
else:
app = GrubBootManager()
Gtk.main()
|
Python
| 0
|
@@ -2368,27 +2368,24 @@
%22, self.
-show_dialog
+grub_set
_default
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.