commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
b45e9be5338baa652055f52c494a4febefe75c2d
|
Fix dealing with numpy arrays.
|
pydevd_plugins/extensions/types/pydevd_plugin_numpy_types.py
|
pydevd_plugins/extensions/types/pydevd_plugin_numpy_types.py
|
from _pydevd_bundle.pydevd_extension_api import TypeResolveProvider
from _pydevd_bundle.pydevd_resolver import defaultResolver, MAX_ITEMS_TO_HANDLE, TOO_LARGE_ATTR, TOO_LARGE_MSG
from .pydevd_helpers import find_mod_attr
# =======================================================================================================================
# NdArrayResolver
# =======================================================================================================================
class NdArrayResolver: pass
class NdArrayItemsContainer: pass
class NDArrayTypeResolveProvider(object):
def can_provide(self, type_object, type_name):
nd_array = find_mod_attr('numpy', 'ndarray')
return nd_array is not None and issubclass(type_object, nd_array)
'''
This resolves a numpy ndarray returning some metadata about the NDArray
'''
def is_numeric(self, obj):
if not hasattr(obj, 'dtype'):
return False
return obj.dtype.kind in 'biufc'
def resolve(self, obj, attribute):
if attribute == '__internals__':
return defaultResolver.get_dictionary(obj)
if attribute == 'min':
if self.is_numeric(obj):
return obj.min()
else:
return None
if attribute == 'max':
if self.is_numeric(obj):
return obj.max()
else:
return None
if attribute == 'shape':
return obj.shape
if attribute == 'dtype':
return obj.dtype
if attribute == 'size':
return obj.size
if attribute.startswith('['):
container = NdArrayItemsContainer()
i = 0
format_str = '%0' + str(int(len(str(len(obj))))) + 'd'
for item in obj:
setattr(container, format_str % i, item)
i += 1
if i > MAX_ITEMS_TO_HANDLE:
setattr(container, TOO_LARGE_ATTR, TOO_LARGE_MSG)
break
return container
return None
def get_dictionary(self, obj):
ret = dict()
ret['__internals__'] = defaultResolver.get_dictionary(obj)
if obj.size > 1024 * 1024:
ret['min'] = 'ndarray too big, calculating min would slow down debugging'
ret['max'] = 'ndarray too big, calculating max would slow down debugging'
else:
if self.is_numeric(obj):
ret['min'] = obj.min()
ret['max'] = obj.max()
else:
ret['min'] = 'not a numeric object'
ret['max'] = 'not a numeric object'
ret['shape'] = obj.shape
ret['dtype'] = obj.dtype
ret['size'] = obj.size
ret['[0:%s] ' % (len(obj))] = list(obj[0:MAX_ITEMS_TO_HANDLE])
return ret
import sys
if not sys.platform.startswith("java"):
TypeResolveProvider.register(NDArrayTypeResolveProvider)
|
Python
| 0
|
@@ -1189,32 +1189,49 @@
.is_numeric(obj)
+ and obj.size %3E 0
:%0A
@@ -1353,32 +1353,49 @@
.is_numeric(obj)
+ and obj.size %3E 0
:%0A
@@ -2435,32 +2435,144 @@
down debugging'%0A
+ elif obj.size == 0:%0A ret%5B'min'%5D = 'array is empty'%0A ret%5B'max'%5D = 'array is empty'%0A
else:%0A
|
d90d7c35df1f815f31de2cad9fe2dde43f9f561a
|
Print generation date.
|
git_changelog.py
|
git_changelog.py
|
from __future__ import print_function
from collections import defaultdict
import glob
import json
import os
import re
import subprocess
from urllib2 import urlopen
DEBUG = False
GIT_EXEC = "/usr/bin/git"
REPOSITORIES = glob.glob("/ssd/swinbank/src/*") # Everything in w_2017_8
JIRA_API_URL = "https://jira.lsstcorp.org/rest/api/2"
class Repository(object):
def __init__(self, path):
self.path = path
def __call_git(self, *args):
to_exec = [GIT_EXEC] + list(args)
if DEBUG:
print(to_exec)
return subprocess.check_output(to_exec, cwd=self.path)
def commits(self, reachable_from=None, merges_only=False):
args = ["log", "--pretty=format:%H"]
if reachable_from:
args.append(reachable_from)
if merges_only:
args.append("--merges")
return self.__call_git(*args).split()
def message(self, commit_hash):
return self.__call_git("show", commit_hash, "--pretty=format:%s")
def tags(self, pattern=r".*"):
return [tag for tag in self.__call_git("tag").split()
if re.search(pattern, tag)]
def update(self):
return self.__call_git("pull")
@staticmethod
def ticket(message):
try:
return re.search(r"(DM-\d+)", message, re.IGNORECASE).group(1)
except AttributeError:
if DEBUG:
print(message)
def get_ticket_summary(ticket):
url = JIRA_API_URL + "/issue/" + ticket + "?fields=summary"
if DEBUG:
print(url)
j = json.load(urlopen(url))
return j['fields']['summary']
def print_tag(tagname, tickets):
print("<h2>New in {}</h2>".format(tagname))
print("<ul>")
for ticket in sorted(tickets):
summary = get_ticket_summary(ticket)
pkgs = ", ".join(sorted(tickets[ticket]))
link_text = (u"<li><a href=https://jira.lsstcorp.org/browse/"
u"{ticket}>{ticket}</a>: {summary} [{pkgs}]</li>")
print(link_text.format(ticket=ticket, summary=summary, pkgs=pkgs)
.encode("utf-8"))
print("</ul>")
def format_output(changelog):
# Ew, needs a proper templating engine
print("<html>")
print("<body>")
print("<h1>LSST DM Weekly Changelog</h1>")
# Always do master first
print_tag("master", changelog.pop("master"))
# Then the other tags in order
for tag in sorted(changelog, reverse=True):
print_tag(tag, changelog[tag])
print("</body>")
print("</html>")
def generate_changelog(repositories):
# Dict of tag -> ticket -> affected packages
changelog = defaultdict(lambda: defaultdict(set))
for repository in repositories:
if DEBUG:
print(repository)
r = Repository(repository)
r.update()
# Extract all tags which look like weeklies
tags = sorted(r.tags("w\.\d{4}"), reverse=True)
# Also include tickets which aren't yet in a weekly
tags.insert(0, "master")
for newtag, oldtag in zip(tags, tags[1:]):
merges = (set(r.commits(newtag, merges_only=True)) -
set(r.commits(oldtag, merges_only=True)))
for sha in merges:
ticket = r.ticket(r.message(sha))
if ticket:
changelog[newtag][ticket].add(os.path.basename(repository))
return changelog
if __name__ == "__main__":
changelog = generate_changelog(REPOSITORIES)
format_output(changelog)
|
Python
| 0.000001
|
@@ -67,16 +67,32 @@
ultdict%0A
+import datetime%0A
import g
@@ -2488,16 +2488,143 @@
%5Btag%5D)%0A%0A
+ gen_date = datetime.datetime.utcnow().strftime(%22%25Y-%25m-%25d %25H:%25M +00:00%22)%0A print(%22%3Cp%3EGenerated %7B%7D.%3C/p%3E%22.format(gen_date))%0A
prin
|
3d0392a835f1b02ae01035d3cdb18d0d7e32f4a0
|
Version 0.4.0 release
|
capture/setup.py
|
capture/setup.py
|
#!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(
name = "noworkflow",
version = "0.4.0-dev",
packages = ['noworkflow'],
package_data = {'noworkflow': ['resources/*']},
entry_points = {'console_scripts': ['now = noworkflow.now:main']},
author = "Leonardo Murta, Vanessa Braganholo, Fernando Chirigati, David Koop, and Juliana Freire",
author_email = "leomurta@ic.uff.br",
description = "Supporting infrastructure to run scientific experiments without a scientific workflow management system.",
license = "MIT",
keywords = "scientific experiments provenance python",
url = "https://github.com/gems-uff/noworkflow"
)
|
Python
| 0
|
@@ -154,12 +154,8 @@
.4.0
--dev
%22,%0A
|
d7e9264418cbe5574d7475094e2c06a878897c34
|
fix ALDC scraper
|
every_election/apps/election_snooper/snoopers/aldc.py
|
every_election/apps/election_snooper/snoopers/aldc.py
|
from datetime import datetime
from .base import BaseSnooper
from election_snooper.models import SnoopedElection
class ALDCScraper(BaseSnooper):
snooper_name = "ALDC"
base_url = "https://www.aldc.org/"
def get_all(self):
url = "{}category/forthcoming-by-elections/".format(self.base_url)
print(url)
soup = self.get_soup(url)
wrapper = soup.find('section', {'class': 'mod-tile-wrap'})
for tile in wrapper.find_all('div', {'class': 'tile'}):
title = tile.find(
'div', {'class': 'election-heading'}).text.strip()
detail_url = tile.find(
'div', {'class': 'election-heading'}).a['href'].strip()
content = tile.find(
'div', {'class': 'election-content'}).find_all('p')
if 'cause' in content[1].text.lower():
seat_control, cause = content[1].text.lower().split('cause')
cause = cause.split('\n')[0].strip(": .")
else:
cause = "unknown"
data = {
'title': title,
'source': url,
'cause': cause,
'detail': "\n".join([x.text for x in content]),
'snooper_name': self.snooper_name,
}
try:
data['date'] = datetime.strptime(content[0].strong.text, "%B %d, %Y")
except ValueError:
pass
item, created = SnoopedElection.objects.update_or_create(
snooper_name=self.snooper_name,
detail_url=detail_url,
defaults=data
)
if created:
self.post_to_slack(item)
|
Python
| 0
|
@@ -361,75 +361,8 @@
rl)%0A
- wrapper = soup.find('section', %7B'class': 'mod-tile-wrap'%7D)%0A
@@ -381,15 +381,12 @@
in
-wrapper
+soup
.fin
@@ -396,31 +396,16 @@
ll('
-div', %7B'class': '
+ar
ti
+c
le'
-%7D
):%0A%0A
@@ -438,175 +438,129 @@
ind(
-%0A 'div', %7B'class': 'election-heading'%7D).text.strip()%0A detail_url = tile.find(%0A 'div', %7B'class': 'election-heading'%7D).a%5B'href'%5D
+'h2').a.text.strip()%0A detail_url = tile.find('h2').a%5B'href'%5D.strip()%0A date = tile.find('date').text
.str
@@ -596,33 +596,16 @@
le.find(
-%0A
'div', %7B
@@ -618,24 +618,16 @@
': '
-election-content
+c-editor
'%7D).
@@ -667,33 +667,33 @@
use' in content%5B
-1
+0
%5D.text.lower():%0A
@@ -742,9 +742,9 @@
ent%5B
-1
+0
%5D.te
@@ -1194,30 +1194,12 @@
ime(
-content%5B0%5D.strong.text
+date
, %22%25
|
021ca057be4333d209454b043c79f9d6d327c3e0
|
Return the response for the main page without jinja rendering as AngularJS is doing the rendering
|
webapp/keepupwithscience/frontend/main.py
|
webapp/keepupwithscience/frontend/main.py
|
from flask import Blueprint, render_template
bp = Blueprint('main', __name__)
@bp.route('/')
def index():
"""Returns the main interface."""
return render_template('main.html')
|
Python
| 0.000021
|
@@ -37,16 +37,31 @@
template
+, make_response
%0A%0Abp = B
@@ -154,16 +154,105 @@
ace.%22%22%22%0A
+ return make_response(open('keepupwithscience/frontend/templates/main.html').read())%0A#
retu
|
392e34a70bd2bccba268ec9de1752afc50cd1b35
|
Add the httlib dir to the build
|
packaging/datadog-agent-lib/setup.py
|
packaging/datadog-agent-lib/setup.py
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
import os, sys
from distutils.command.install import INSTALL_SCHEMES
def getVersion():
try:
from config import get_version
except ImportError:
import sys
sys.path.append("../..")
from config import get_version
return get_version()
def printVersion():
print getVersion()
def getDataFiles():
''' Load the data files from checks.d '''
import glob
curpath = os.path.dirname(os.path.join(os.path.realpath(__file__)))
checksd_path = os.path.join(curpath, 'checks.d')
checksd_glob = os.path.join(checksd_path, '*.py')
# Find all py files in the checks.d directory
checks = []
for check in glob.glob(checksd_glob):
check = os.path.basename(check)
checks.append(check)
return [('share/datadog/agent/checks.d', ['checks.d/%s' % c for c in checks])]
if __name__ == "__main__":
setup(name='datadog-agent-lib',
version=getVersion(),
description='Datatadog monitoring agent check library',
author='Datadog',
author_email='info@datadoghq.com',
url='http://datadoghq.com/',
packages=['checks', 'checks/db', 'checks/system', 'dogstream','pup', 'yaml'],
package_data={'checks': ['libs/*', 'libs/httplib2/*'], 'pup' : ['static/*', 'pup.html']},
data_files=getDataFiles()
)
|
Python
| 0
|
@@ -1403,16 +1403,40 @@
, 'yaml'
+, 'checks/libs/httplib2'
%5D,%0A
@@ -1477,27 +1477,8 @@
s/*'
-, 'libs/httplib2/*'
%5D, '
|
a9ff99f94938c5e50038b9d98200c5247e651c35
|
Fix AttributeError: module 'config' has no attribute 'expires'
|
utils/ignores.py
|
utils/ignores.py
|
import random
import time
import config
import log as logging
def check_ignored(host, channel):
ignores = config.expires['global']
if channel in config.expires['channel'].keys():
ignores.extend(config.expires['channel'][channel])
for i in ignores:
for (uhost, expires) in i:
# if duration is not None, check if it's in the past, else say True
is_past = time.time() > expires if expires is not None else True
if host == uhost and is_past:
return True
elif host == uhost and not is_past:
del config.ignores['channel'][channel][host]
break
return False
def add_ignore(irc, event, args):
host = args[0]
base_message = "Ignoring %s for %s seconds"
indefinite = "Ignoring %s indefinately"
if len(args) > 1:
if args[1] == 'random':
duration = random.randrange(100, 10000)
expires = duration + int(time.time())
else:
duration = int(args[1])
expires = duration + int(time.time())
else:
expires = None
channel = args[2] if len(args) > 2 else None
if channel is not None:
try:
i = config.ignores['channels'][channel]
except KeyError:
i = config.ignores['channels'][channel] = []
i.append([host, expires])
else:
i = config.ignores['global']
i.append([host, expires])
if expires is not None:
if channel is not None:
logging.info(base_message + " in %s", host, duration, channel)
else:
logging.info(base_message, host, duration)
else:
if channel is not None:
logging.info(indefinite + " in %s", host, channel)
else:
logging.info(indefinite, host)
|
Python
| 0.014043
|
@@ -152,28 +152,28 @@
l in config.
-expi
+igno
res%5B'channel
|
7de5d99866164c0f17aa85f8cdd910132ac35667
|
use re.split instead of string.split
|
topiary/rna/common.py
|
topiary/rna/common.py
|
# Copyright (c) 2015. Mount Sinai School of Medicine
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def infer_delimiter(filename, comment_char="#", n_lines=3):
"""
Given a file which contains data separated by one of the following:
- commas
- tabs
- spaces
Return the most likely separator by sniffing the first few lines
of the file's contents.
"""
lines = []
with open(filename, "r") as f:
for line in f:
if line.startswith(comment_char):
continue
if len(lines) < n_lines:
lines.append(line)
else:
break
if len(lines) < n_lines:
raise ValueError(
"Not enough lines in %s to infer delimiter" % filename)
# the split function defaults to splitting on multiple spaces,
# which here corresponds to a candidate value of None
candidate_delimiters = ["\t", ",", None]
for candidate_delimiter in candidate_delimiters:
counts = [len(line.split(candidate_delimiter)) for line in lines]
first_line_count = counts[0]
if all(c == first_line_count for c in counts) and first_line_count > 1:
if candidate_delimiter is None:
return "\s+"
else:
return candidate_delimiter
raise ValueError("Could not determine delimiter for %s" % filename)
def check_required_columns(df, filename, required_columns):
"""
Ensure that all required columns are present in the given dataframe,
otherwise raise an exception.
"""
available_columns = set(df.columns)
for column_name in required_columns:
if column_name not in available_columns:
raise ValueError("FPKM tracking file %s missing column '%s'" % (
filename,
column_name))
|
Python
| 0.000032
|
@@ -593,16 +593,27 @@
cense.%0A%0A
+import re%0A%0A
%0Adef inf
@@ -1282,133 +1282,8 @@
me)%0A
- # the split function defaults to splitting on multiple spaces,%0A # which here corresponds to a candidate value of None%0A
@@ -1317,20 +1317,21 @@
%22, %22,%22,
-None
+%22%5Cs+%22
%5D%0A fo
@@ -1399,19 +1399,17 @@
= %5Blen(
-lin
+r
e.split(
@@ -1427,16 +1427,22 @@
elimiter
+, line
)) for l
@@ -1576,103 +1576,8 @@
1:%0A
- if candidate_delimiter is None:%0A return %22%5Cs+%22%0A else:%0A
|
9c90c539f83551de2645522c22ccbd0c75d34be3
|
Fix Mapbox routing fixture shape
|
server/lib/python/cartodb_services/test/test_mapboxrouting.py
|
server/lib/python/cartodb_services/test/test_mapboxrouting.py
|
import unittest
from mock import Mock
from cartodb_services.mapbox import MapboxRouting
from cartodb_services.mapbox.routing import DEFAULT_PROFILE
from cartodb_services.tools.exceptions import ServiceException
from cartodb_services.tools import Coordinate
from credentials import mapbox_api_key
INVALID_TOKEN = 'invalid_token'
VALID_WAYPOINTS = [Coordinate(-73.989, 40.733), Coordinate(-74, 40.733)]
NUM_WAYPOINTS_MAX = 25
INVALID_WAYPOINTS_EMPTY = []
INVALID_WAYPOINTS_MIN = [Coordinate(-73.989, 40.733)]
INVALID_WAYPOINTS_MAX = [Coordinate(-73.989, 40.733)
for x in range(0, NUM_WAYPOINTS_MAX + 2)]
VALID_PROFILE = DEFAULT_PROFILE
INVALID_PROFILE = 'invalid_profile'
WELL_KNOWN_SHAPE = [(40.73312, -73.98891), (40.73353, -73.98987),
(40.73398, -73.99095), (40.73453, -73.99227),
(40.73531, -73.99412), (40.73467, -73.99459),
(40.73442, -73.99477), (40.73435, -73.99482),
(40.73403, -73.99505), (40.73344, -73.99549),
(40.73286, -73.9959), (40.73226, -73.99635),
(40.73186, -73.99664), (40.73147, -73.99693),
(40.73141, -73.99698), (40.73147, -73.99707),
(40.73219, -73.99856), (40.73222, -73.99861),
(40.73293, -74.00007), (40.733, -74.00001)]
WELL_KNOWN_LENGTH = 1317.9
class MapboxRoutingTestCase(unittest.TestCase):
def setUp(self):
self.routing = MapboxRouting(token=mapbox_api_key(), logger=Mock())
def test_invalid_profile(self):
with self.assertRaises(ValueError):
self.routing.directions(VALID_WAYPOINTS, INVALID_PROFILE)
def test_invalid_waypoints_empty(self):
with self.assertRaises(ValueError):
self.routing.directions(INVALID_WAYPOINTS_EMPTY, VALID_PROFILE)
def test_invalid_waypoints_min(self):
with self.assertRaises(ValueError):
self.routing.directions(INVALID_WAYPOINTS_MIN, VALID_PROFILE)
def test_invalid_waypoints_max(self):
with self.assertRaises(ValueError):
self.routing.directions(INVALID_WAYPOINTS_MAX, VALID_PROFILE)
def test_invalid_token(self):
invalid_routing = MapboxRouting(token=INVALID_TOKEN, logger=Mock())
with self.assertRaises(ServiceException):
invalid_routing.directions(VALID_WAYPOINTS,
VALID_PROFILE)
def test_valid_request(self):
route = self.routing.directions(VALID_WAYPOINTS, VALID_PROFILE)
self.assertEqual(route.shape, WELL_KNOWN_SHAPE)
self.assertEqual(route.length, WELL_KNOWN_LENGTH)
assert route.duration # The duration may change between executions
|
Python
| 0
|
@@ -1314,23 +1314,66 @@
.732
-93, -74.00007),
+25, -73.99868), (40.73293, -74.00007),%0A
(40
|
30edb922d26357f509cd31f55648a1c5f072ccf5
|
Convert message to str for debugging
|
tornadose/handlers.py
|
tornadose/handlers.py
|
"""Custom request handlers for pushing data to connected clients."""
import logging
from tornado import gen
from tornado.web import RequestHandler
from tornado.websocket import WebSocketHandler, WebSocketClosedError
from tornado.queues import Queue
from tornado.iostream import StreamClosedError
from tornado.log import access_log
from . import stores
logger = logging.getLogger('tornadose.handlers')
class BaseHandler(RequestHandler):
"""Base handler for subscribers. To be compatible with data stores
defined in :mod:`tornadose.stores`, custom handlers should inherit
this class and implement the :meth:`submit` and :meth:`publish`
methods.
"""
def initialize(self, store):
"""Common initialization of handlers happens here. If additional
initialization is required, this method must either be called with
``super`` or the child class must assign the ``store`` attribute and
register itself with the store.
"""
assert isinstance(store, stores.BaseStore)
self.store = store
self.store.register(self)
def submit(self, message):
"""Submit a new message to be published. This method must be
implemented by child classes.
"""
raise NotImplementedError('submit must be implemented!')
def publish(self):
"""Push a message to the subscriber. This method must be
implemented by child classes.
"""
raise NotImplementedError('publish must be implemented!')
class EventSource(BaseHandler):
"""Handler for server-sent events a.k.a. EventSource.
The EventSource__ interface has a few advantages over websockets:
* It is a normal HTTP connection and so can be more easily monitored
than websockets using tools like curl__ or HTTPie__.
* Browsers generally automatically try to reestablish a lost
connection.
* The publish/subscribe pattern is better suited to some applications
than the full duplex model of websockets.
__ https://developer.mozilla.org/en-US/docs/Web/API/EventSource
__ http://curl.haxx.se/
__ https://github.com/jkbrzt/httpie
"""
def initialize(self, store, period=None):
"""If ``period`` is given, publishers will sleep for
approximately the given time in order to throttle data
speeds.
"""
super(EventSource, self).initialize(store)
assert isinstance(period, (int, float)) or period is None
self.period = period
self.finished = False
self.set_header('content-type', 'text/event-stream')
self.set_header('cache-control', 'no-cache')
def prepare(self):
"""Log access."""
request_time = 1000.0 * self.request.request_time()
access_log.info(
"%d %s %.2fms", self.get_status(),
self._request_summary(), request_time)
@gen.coroutine
def submit(self, message):
"""Receive incoming data."""
logger.debug('Incoming message: ' + message)
yield self.publish(message)
@gen.coroutine
def publish(self, message):
"""Pushes data to a listener."""
try:
self.write('data: {}\n\n'.format(message))
yield self.flush()
except StreamClosedError:
self.finished = True
@gen.coroutine
def get(self, *args, **kwargs):
try:
while not self.finished:
if self.period is not None:
yield gen.sleep(self.period)
else:
yield gen.moment
except Exception:
pass
finally:
self.store.deregister(self)
self.finish()
class WebSocketSubscriber(BaseHandler, WebSocketHandler):
"""A Websocket-based subscription handler to be used with
:class:`tornadose.stores.QueueStore`.
"""
def initialize(self, store):
super(WebSocketSubscriber, self).initialize(store)
self.messages = Queue()
self.finished = False
@gen.coroutine
def open(self):
"""Register with the publisher."""
self.store.register(self)
while not self.finished:
message = yield self.messages.get()
yield self.publish(message)
def on_close(self):
self._close()
def _close(self):
self.store.deregister(self)
self.finished = True
@gen.coroutine
def submit(self, message):
yield self.messages.put(message)
@gen.coroutine
def publish(self, message):
"""Push a new message to the client. The data will be
available as a JSON object with the key ``data``.
"""
try:
self.write_message(dict(data=message))
except WebSocketClosedError:
self._close()
|
Python
| 0.999993
|
@@ -3099,24 +3099,28 @@
ge: ' +
+str(
message)
%0D%0A
@@ -3111,16 +3111,17 @@
message)
+)
%0D%0A
|
134406e139d5ca55fbd631eee79d73bf0f2a3e37
|
Fix boot view.
|
app/dashboard/views/boot.py
|
app/dashboard/views/boot.py
|
# Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from flask import (
abort,
current_app as app,
render_template,
request,
)
from flask.views import View
from dashboard.utils.backend import (
extract_response_metadata,
get_job,
today_date,
)
PAGE_TITLE = 'Kernel CI Dashboard — Boot Reports'
class BootsView(View):
def dispatch_request(self):
results_title = 'Available Boot Reports'
search_filter = ""
if request.args:
search_filter = " ".join([arg for arg in request.args])
return render_template(
'boots-all.html',
page_title=PAGE_TITLE,
server_date=today_date(),
results_title=results_title,
search_filter=search_filter
)
class BootLabView(View):
def dispatch_request(self, **kwargs):
page_title = PAGE_TITLE + ' ‐Board %(board)s' % kwargs
body_title = 'Boot details for board %(board)s' % kwargs
url_translation = app.config.get('KNOWN_GIT_URLS')
return render_template(
'boots-id.html',
page_title=page_title,
body_title=body_title,
board=kwargs['board'],
job=kwargs['job'],
kernel=kwargs['kernel'],
defconfig=kwargs['defconfig'],
url_translation=url_translation,
)
class BootIdView(View):
def dispatch_request(self, *args, **kwargs):
page_title = (
PAGE_TITLE +
' ‐Board %(board)s (%(lab_name)s)'
% kwargs
)
body_title = (
'Boot details for board %(board)s ' +
'<small>%(lab_name)s)</small>' % kwargs
)
boot_id = request.args.get("_id", None)
url_translation = app.config.get("KNOWN_GIT_URLS")
return render_template(
"boots-id.html",
page_title=page_title,
body_title=body_title,
board=kwargs['board'],
job=kwargs['job'],
kernel=kwargs['kernel'],
defconfig=kwargs['defconfig'],
url_translation=url_translation,
lab_name=kwargs["lab_name"],
boot_id=boot_id,
)
class BootJobKernelView(View):
def dispatch_request(self, **kwargs):
job = kwargs['job']
kernel = kwargs['kernel']
job_id = '%s-%s' % (job, kernel)
storage_id = 'boot-' + job_id
body_title = body_title = (
'Boot details for %s ‐ %s' % (job, kernel)
)
params = {'id': job_id}
response = get_job(**params)
metadata = {}
base_url = ''
commit_url = ''
if response.status_code == 200:
metadata, base_url, commit_url, result = extract_response_metadata(
response
)
return render_template(
'boots-job-kernel.html',
page_title=PAGE_TITLE,
body_title=body_title,
base_url=base_url,
commit_url=commit_url,
job_id=job_id,
job=job,
kernel=kernel,
metadata=metadata,
storage_id=storage_id,
)
else:
abort(response.status_code)
class BootJobView(View):
def dispatch_request(self, **kwargs):
job = kwargs['job']
body_title = 'Boot details for %s' % job
return render_template(
'boots-job.html',
page_title=PAGE_TITLE,
body_title=body_title,
job=job,
)
|
Python
| 0.000188
|
@@ -2221,16 +2221,18 @@
name)s)'
+ %25
%0A
@@ -2235,18 +2235,16 @@
- %25
kwargs%0A
@@ -2333,18 +2333,16 @@
s '
- +
%0A
@@ -2354,16 +2354,17 @@
'%3Csmall%3E
+(
%25(lab_na
|
ab505466859a5d2e5b397d1fb1fc3271977a2024
|
modify register validation
|
app/user/forms.py
|
app/user/forms.py
|
from flask_wtf import Form
from wtforms import StringField, PasswordField, TextAreaField, SelectField, validators
from wtforms.ext.sqlalchemy.fields import QuerySelectField
from .models import User
from app.post.models import Post_type
from flask.ext.bcrypt import check_password_hash
class LoginForm(Form):
username = StringField('Username', validators=[validators.Required('Username tidak boleh kosong')])
password = PasswordField('Password', validators=[validators.Required('Username tidak boleh kosong')])
#Add a validation when Logged In
def validate(self):
rv = Form.validate(self)
if not rv:
return False
user = User.query.filter_by(username=self.username.data).first()
if user is None or not user:
self.username.errors.append('Unknown username')
return False
if not check_password_hash(user.password, self.password.data):
self.password.errors.append('Invalid password')
return False
self.user = user
return True
class RegisterForm(Form):
full_name = StringField('Full Name', validators=[validators.Required('Nama tidak boleh kosong')])
username = StringField('Username', validators=[validators.Required('Username tidak boleh kosong')])
email = StringField('Email', validators=[validators.Required('Email tidak boleh kosong')])
password = PasswordField('Password', validators=[validators.Required('Password Tidak boleh kosong'),
validators.EqualTo('confirm', message='Password harus sama')])
confirm = PasswordField('Ulangi Password')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if User.query.filter_by(username=self.username.data).first():
self.username.errors.append("Username Telah digunakan")
return False
if User.query.filte_by(email=self.email.data).first():
self.email.errors.append("Email yang anda masukkan telah terdaftar")
return False
return True
class CreatePost(Form):
title = StringField('title', validators=[validators.Required('Judul tidak boleh kosong')])
content = TextAreaField('Content', validators=[validators.Required('Konten tidak boleh kosong'),
validators.Length(max=100, message="Konten maksimal 100 karakter")])
post_type = SelectField('Type', coerce=int)
|
Python
| 0.000001
|
@@ -1939,16 +1939,17 @@
ry.filte
+r
_by(emai
|
e2fac2a79b38c5429a9990221ddc47aacadaab01
|
Add a mention of default values to agg docstring (#65)
|
featuretools/primitives/aggregation_primitive_base.py
|
featuretools/primitives/aggregation_primitive_base.py
|
import copy
import functools
from .primitive_base import PrimitiveBase
from .utils import inspect_function_args
class AggregationPrimitive(PrimitiveBase):
"""Feature for a parent entity that summarizes
related instances in a child entity"""
stack_on = None # whitelist of primitives that can be in input_types
stack_on_exclude = None # blacklist of primitives that can be insigniture
base_of = None # whitelist of primitives this prim can be input for
base_of_exclude = None # primitives this primitive can't be input for
stack_on_self = True # whether or not it can be in input_types of self
allow_where = True # whether DFS can apply where clause to this primitive
def __init__(self, base_features, parent_entity, use_previous=None,
where=None):
# Any edits made to this method should also be made to the
# new_class_init method in make_agg_primitive
if not hasattr(base_features, '__iter__'):
base_features = [self._check_feature(base_features)]
else:
base_features = [self._check_feature(bf) for bf in base_features]
msg = "all base features must share the same entity"
assert len(set([bf.entity for bf in base_features])) == 1, msg
self.base_features = base_features[:]
self.child_entity = base_features[0].entity
if where is not None:
self.where = self._check_feature(where)
msg = "Where feature must be defined on child entity {}".format(
self.child_entity.id)
assert self.where.entity.id == self.child_entity.id, msg
if use_previous:
assert self.child_entity.has_time_index(), (
"Applying function that requires time index to entity that "
"doesn't have one")
self.use_previous = use_previous
super(AggregationPrimitive, self).__init__(parent_entity,
self.base_features)
def _where_str(self):
if self.where is not None:
where_str = u" WHERE " + self.where.get_name()
else:
where_str = ''
return where_str
def _use_prev_str(self):
if self.use_previous is not None:
use_prev_str = u", Last {}".format(self.use_previous.get_name())
else:
use_prev_str = u''
return use_prev_str
def _base_feature_str(self):
return u', ' \
.join([bf.get_name() for bf in self.base_features])
def generate_name(self):
where_str = self._where_str()
use_prev_str = self._use_prev_str()
base_features_str = self._base_feature_str()
return u"%s(%s.%s%s%s)" % (self.name.upper(),
self.child_entity.name,
base_features_str,
where_str, use_prev_str)
def make_agg_primitive(function, input_types, return_type, name=None,
stack_on_self=True, stack_on=None,
stack_on_exclude=None, base_of=None,
base_of_exclude=None, description='A custom primitive',
cls_attributes=None, uses_calc_time=False,
commutative=False):
'''Returns a new aggregation primitive class
Args:
function (function): function that takes in an array and applies some
transformation to it.
input_types (list[:class:`.Variable`]): variable types of the inputs
return_type (:class:`.Variable`): variable type of return
name (string): name of the function. If no name is provided, the name
of `function` will be used
stack_on_self (bool): whether it can be in input_types of self
stack_on (list[:class:`.PrimitiveBase`]): whitelist of primitives that
can be input_types
stack_on_exclude (list[:class:`.PrimitiveBase`]): blacklist of
primitives that cannot be input_types
base_of (list[:class:`.PrimitiveBase`]): whitelist of primitives that
can have this primitive in input_types
base_of_exclude (list[:class:`.PrimitiveBase`]): blacklist of
primitives that cannot have this primitive in input_types
description (string): description of primitive
cls_attributes (dict): custom attributes to be added to class
uses_calc_time (bool): if True, the cutoff time the feature is being
calculated at will be passed to the function as the keyword
argument 'time'.
commutative (bool): If True, will only make one feature per unique set
of base features
Example:
.. ipython :: python
from featuretools.primitives import make_agg_primitive
from featuretools.variable_types import DatetimeTimeIndex, Numeric
def time_since_last(values, time=None):
time_since = time - values.iloc[0]
return time_since.total_seconds()
TimeSinceLast = make_agg_primitive(
time_since_last,
[DatetimeTimeIndex],
Numeric,
description="Time since last related instance",
uses_calc_time=True)
'''
cls = {"__doc__": description}
if cls_attributes is not None:
cls.update(cls_attributes)
name = name or function.__name__
new_class = type(name, (AggregationPrimitive,), cls)
new_class.name = name
new_class.input_types = input_types
new_class.return_type = return_type
new_class.stack_on = stack_on
new_class.stack_on_exclude = stack_on_exclude
new_class.stack_on_self = stack_on_self
new_class.base_of = base_of
new_class.base_of_exclude = base_of_exclude
new_class.commutative = commutative
new_class, default_kwargs = inspect_function_args(new_class,
function,
uses_calc_time)
if len(default_kwargs) > 0:
new_class.default_kwargs = default_kwargs
def new_class_init(self, base_features, parent_entity,
use_previous=None, where=None, **kwargs):
if not hasattr(base_features, '__iter__'):
base_features = [self._check_feature(base_features)]
else:
base_features = [self._check_feature(bf)
for bf in base_features]
msg = "all base features must share the same entity"
assert len(set([bf.entity for bf in base_features])) == 1, msg
self.base_features = base_features[:]
self.child_entity = base_features[0].entity
if where is not None:
self.where = self._check_feature(where)
msg = "Where feature must be defined on child entity {}"
msg = msg.format(self.child_entity.id)
assert self.where.entity.id == self.child_entity.id, msg
if use_previous:
assert self.child_entity.has_time_index(), (
"Applying function that requires time index to entity that"
" doesn't have one")
self.use_previous = use_previous
self.kwargs = copy.deepcopy(self.default_kwargs)
self.kwargs.update(kwargs)
self.partial = functools.partial(function, **self.kwargs)
super(AggregationPrimitive, self).__init__(parent_entity,
self.base_features)
new_class.__init__ = new_class_init
new_class.get_function = lambda self: self.partial
else:
# creates a lambda function that returns function every time
new_class.get_function = lambda self, f=function: f
# infers default_value by passing empty data
try:
new_class.default_value = function(*[[]] * len(input_types))
except Exception:
pass
return new_class
|
Python
| 0
|
@@ -3365,24 +3365,91 @@
mitive class
+. The primitive infers default%0A values by passing in empty data.
%0A%0A Args:%0A
|
7f600ca71b6461fa558ca48b7e7e5f059a9e5ff1
|
remove unused import
|
frappe/website/doctype/website_theme/website_theme.py
|
frappe/website/doctype/website_theme/website_theme.py
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.model.document import Document
from frappe.utils import get_path
from os.path import join as join_path, exists as path_exists, abspath, splitext
from frappe.utils import update_progress_bar
class WebsiteTheme(Document):
def validate(self):
self.validate_if_customizable()
self.render_theme()
self.generate_bootstrap_theme()
def on_update(self):
if (not self.custom
and frappe.local.conf.get('developer_mode')
and not (frappe.flags.in_import or frappe.flags.in_test)):
self.export_doc()
self.clear_cache_if_current_theme()
def is_standard_and_not_valid_user(self):
return (not self.custom
and not frappe.local.conf.get('developer_mode')
and not (frappe.flags.in_import or frappe.flags.in_test or frappe.flags.in_migrate))
def on_trash(self):
if self.is_standard_and_not_valid_user():
frappe.throw(_("You are not allowed to delete a standard Website Theme"),
frappe.PermissionError)
def validate_if_customizable(self):
if self.is_standard_and_not_valid_user():
frappe.throw(_("Please Duplicate this Website Theme to customize."))
def render_theme(self):
self.theme_scss = frappe.render_template('frappe/website/doctype/website_theme/website_theme_template.scss', self.as_dict())
def export_doc(self):
"""Export to standard folder `[module]/website_theme/[name]/[name].json`."""
from frappe.modules.export_file import export_to_files
export_to_files(record_list=[['Website Theme', self.name]], create_init=True)
def clear_cache_if_current_theme(self):
if frappe.flags.in_install == 'frappe': return
website_settings = frappe.get_doc("Website Settings", "Website Settings")
if getattr(website_settings, "website_theme", None) == self.name:
website_settings.clear_cache()
def generate_bootstrap_theme(self):
from subprocess import Popen, PIPE
# create theme file in site public files folder
folder_path = abspath(frappe.utils.get_files_path('website_theme', is_private=False))
# create folder if not exist
frappe.create_folder(folder_path)
if self.custom:
self.delete_old_theme_files(folder_path)
# add a random suffix
suffix = frappe.generate_hash('Website Theme', 8) if self.custom else 'style'
file_name = frappe.scrub(self.name) + '_' + suffix + '.css'
output_path = join_path(folder_path, file_name)
self.theme_scss = content = get_scss(self)
content = content.replace('\n', '\\n')
command = ['node', 'generate_bootstrap_theme.js', output_path, content]
process = Popen(command, cwd=frappe.get_app_path('frappe', '..'), stdout=PIPE, stderr=PIPE)
stderr = process.communicate()[1]
if stderr:
stderr = frappe.safe_decode(stderr)
stderr = stderr.replace('\n', '<br>')
frappe.throw('<div style="font-family: monospace;">{stderr}</div>'.format(stderr=stderr))
else:
self.theme_url = '/files/website_theme/' + file_name
frappe.msgprint(_('Compiled Successfully'), alert=True)
def delete_old_theme_files(self, folder_path):
import os
for fname in os.listdir(folder_path):
if fname.startswith(frappe.scrub(self.name) + '_') and fname.endswith('.css'):
os.remove(os.path.join(folder_path, fname))
def generate_theme_if_not_exist(self):
bench_path = frappe.utils.get_bench_path()
if self.theme_url:
theme_path = join_path(bench_path, 'sites', self.theme_url[1:])
if not path_exists(theme_path):
self.generate_bootstrap_theme()
else:
self.generate_bootstrap_theme()
def set_as_default(self):
website_settings = frappe.get_doc('Website Settings')
website_settings.website_theme = self.name
website_settings.ignore_validate = True
website_settings.save()
def add_website_theme(context):
context.theme = frappe._dict()
if not context.disable_website_theme:
website_theme = get_active_theme()
context.theme = website_theme or frappe._dict()
def get_active_theme():
website_theme = frappe.db.get_single_value("Website Settings", "website_theme")
if website_theme:
try:
return frappe.get_doc("Website Theme", website_theme)
except frappe.DoesNotExistError:
pass
def get_scss(website_theme):
"""
Render `website_theme_template.scss` with the values defined in Website Theme.
params:
website_theme - instance of a Website Theme
"""
list_of_imports_to_ignore = (website_theme.get('imports_to_ignore') or '').split(',')
set_of_imports_to_ignore = {s.strip() for s in list_of_imports_to_ignore}
set_of_available_imports = get_scss_paths()
opts = website_theme.as_dict()
opts['website_theme_scss'] = set_of_available_imports.difference(set_of_imports_to_ignore)
return frappe.render_template('frappe/website/doctype/website_theme/website_theme_template.scss', opts)
def get_scss_paths():
"""
Return a set of SCSS import paths from all apps that provide `website.scss`.
If `$BENCH_PATH/apps/frappe/frappe/public/scss/website.scss` exists, the
returned set will contain 'frappe/public/scss/website'.
"""
import_path_list = set()
bench_path = frappe.utils.get_bench_path()
for app in frappe.get_installed_apps():
relative_path = join_path(app, 'public/scss/website.scss')
full_path = get_path('apps', app, relative_path, base=bench_path)
if path_exists(full_path):
import_path = splitext(relative_path)[0]
import_path_list.add(import_path)
return import_path_list
def after_migrate():
"""
Regenerate CSS files after migration.
Necessary to reflect possible changes in the imported SCSS files. Called at
the end of every `bench migrate`.
"""
website_theme_list = frappe.get_list('Website Theme')
for website_theme in website_theme_list:
website_theme_doc = frappe.get_doc('Website Theme', website_theme.name)
website_theme_doc.validate()
|
Python
| 0.000013
|
@@ -348,53 +348,8 @@
text
-%0Afrom frappe.utils import update_progress_bar
%0A%0Acl
|
8bc108c5a8b4ce3fa5192363576eef7f67f4d82e
|
Update tracking params
|
app/utils/meta.py
|
app/utils/meta.py
|
from urllib.parse import unquote, urlparse
import aiohttp
from sanic.log import logger
from .. import settings
def get_watermark(request, watermark: str) -> tuple[str, bool]:
api_key = _get_api_key(request)
if api_key:
api_mask = api_key[:2] + "***" + api_key[-2:]
logger.info(f"Authenticated with {api_mask}")
if api_key in settings.API_KEYS:
return "", False
if watermark == settings.DISABLED_WATERMARK:
referer = _get_referer(request)
logger.info(f"Watermark removal referer: {referer}")
if referer:
domain = urlparse(referer).netloc
if domain in settings.ALLOWED_WATERMARKS:
return "", False
return settings.DEFAULT_WATERMARK, True
if watermark:
if watermark == settings.DEFAULT_WATERMARK:
logger.warning(f"Redundant watermark: {watermark}")
return watermark, True
if watermark not in settings.ALLOWED_WATERMARKS:
logger.warning(f"Unknown watermark: {watermark}")
return settings.DEFAULT_WATERMARK, True
return watermark, False
return settings.DEFAULT_WATERMARK, False
async def track(request, lines: list[str]):
text = " ".join(lines).strip()
trackable = not any(
name in request.args for name in ["height", "width", "watermark"]
)
referer = _get_referer(request)
if referer:
source = urlparse(referer).netloc
else:
source = "memegen.link"
if text and trackable and settings.REMOTE_TRACKING_URL:
async with aiohttp.ClientSession() as session:
params = dict(text=text, source=source, context=unquote(request.url))
logger.info(f"Tracking request: {params}")
headers = {"X-API-KEY": _get_api_key(request) or ""}
response = await session.get(
settings.REMOTE_TRACKING_URL, params=params, headers=headers
)
if response.status != 200:
try:
message = await response.json()
except aiohttp.client_exceptions.ContentTypeError:
message = response.text
logger.error(f"Tracker response: {message}")
def _get_referer(request):
return request.headers.get("referer") or request.args.get("referer")
def _get_api_key(request):
return request.headers.get("x-api-key")
|
Python
| 0.000001
|
@@ -1366,144 +1366,8 @@
)%0A
- referer = _get_referer(request)%0A if referer:%0A source = urlparse(referer).netloc%0A else:%0A source = %22memegen.link%22%0A
@@ -1507,40 +1507,129 @@
ict(
-text=text, source=source, contex
+%0A text=text,%0A client=_get_referer(request) or %22https://memegen.link%22,%0A resul
t=un
@@ -1646,16 +1646,30 @@
est.url)
+,%0A
)%0A
|
29e56ec30c13c5fbb562e77cdb2c660d5fc52842
|
remove debugging print
|
freppledb/common/management/commands/generatetoken.py
|
freppledb/common/management/commands/generatetoken.py
|
#
# Copyright (C) 2021 by frePPLe bv
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
# General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from ...auth import getWebserviceAuthorization
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS
from freppledb import __version__
class Command(BaseCommand):
help = """
This command generates an API authentication token for a user.
"""
requires_system_checks = False
def get_version(self):
return __version__
def add_arguments(self, parser):
parser.add_argument("user", help="User running the command")
parser.add_argument(
"--expiry", help="Validity in days of the token", type=int, default=5
)
parser.add_argument(
"--database",
action="store",
dest="database",
default=DEFAULT_DB_ALIAS,
help="Specifies the database to use",
),
def handle(self, **options):
token = getWebserviceAuthorization(
database=options["database"],
secret="perepe", #None,
user=options["user"],
exp=options["expiry"] * 86400,
)
if options["verbosity"]:
print(
"Access token for %s, valid for %s days:"
% (options["user"], options["expiry"])
)
return token
|
Python
| 0.000081
|
@@ -1671,19 +1671,8 @@
ret=
-%22perepe%22, #
None
|
01fad6e4c6faa2cd8ad443d39b0b1da8f503d532
|
Fix registration bug
|
imagekit/registry.py
|
imagekit/registry.py
|
from .exceptions import AlreadyRegistered, NotRegistered
from .signals import before_access, source_created, source_changed, source_deleted
from .utils import call_strategy_method
class GeneratorRegistry(object):
"""
An object for registering generators. This registry provides
a convenient way for a distributable app to define default generators
without locking the users of the app into it.
"""
def __init__(self):
self._generators = {}
before_access.connect(self.before_access_receiver)
def register(self, id, generator):
if id in self._generators:
raise AlreadyRegistered('The generator with id %s is'
' already registered' % id)
self._generators[id] = generator
def unregister(self, id, generator):
# TODO: Either don't require the generator, or--if we do--assert that it's registered with the provided id
try:
del self._generators[id]
except KeyError:
raise NotRegistered('The generator with id %s is not'
' registered' % id)
def get(self, id, **kwargs):
try:
generator = self._generators[id]
except KeyError:
raise NotRegistered('The generator with id %s is not'
' registered' % id)
if callable(generator):
return generator(**kwargs)
else:
return generator
def get_ids(self):
return self._generators.keys()
def before_access_receiver(self, sender, file, **kwargs):
generator = file.generator
if generator in self._generators.values():
# Only invoke the strategy method for registered generators.
call_strategy_method(generator, 'before_access', file=file)
class SourceGroupRegistry(object):
"""
The source group registry is responsible for listening to source_* signals
on source groups, and relaying them to the image generator strategies of the
appropriate generators.
In addition, registering a new source group also registers its generated
files with that registry.
"""
_signals = {
source_created: 'on_source_created',
source_changed: 'on_source_changed',
source_deleted: 'on_source_deleted',
}
def __init__(self):
self._source_groups = {}
for signal in self._signals.keys():
signal.connect(self.source_group_receiver)
def register(self, generator_id, source_group):
from .specs.sourcegroups import SourceGroupFilesGenerator
generator_ids = self._source_groups.setdefault(source_group, set())
generator_ids.add(generator_id)
generatedfile_registry.register(generator_id,
SourceGroupFilesGenerator(source_group, generator_id))
def unregister(self, generator_id, source_group):
from .specs.sourcegroups import SourceGroupFilesGenerator
generator_ids = self._source_groups.setdefault(source_group, set())
if generator_id in generator_ids:
generator_ids.remove(generator_id)
generatedfile_registry.unregister(generator_id,
SourceGroupFilesGenerator(source_group, generator_id))
def source_group_receiver(self, sender, source, signal, **kwargs):
"""
Relay source group signals to the appropriate spec strategy.
"""
from .files import GeneratedImageFile
source_group = sender
# Ignore signals from unregistered groups.
if source_group not in self._source_groups:
return
specs = [generator_registry.get(id, source=source) for id in
self._source_groups[source_group]]
callback_name = self._signals[signal]
for spec in specs:
file = GeneratedImageFile(spec)
call_strategy_method(spec, callback_name, file=file)
class GeneratedFileRegistry(object):
"""
An object for registering generated files with image generators. The two are
associated with each other via a string id. We do this (as opposed to
associating them directly by, for example, putting a ``generatedfiles``
attribute on image generators) so that image generators can be overridden
without losing the associated files. That way, a distributable app can
define its own generators without locking the users of the app into it.
"""
def __init__(self):
self._generatedfiles = {}
def register(self, generator_id, generatedfiles):
"""
Associates generated files with a generator id
"""
if generatedfiles not in self._generatedfiles:
self._generatedfiles[generatedfiles] = set()
self._generatedfiles[generatedfiles].add(generator_id)
def unregister(self, generator_id, generatedfiles):
"""
Disassociates generated files with a generator id
"""
try:
self._generatedfiles[generatedfiles].remove(generator_id)
except KeyError:
pass
def get(self, generator_id):
for k, v in self._generatedfiles.items():
if generator_id in v:
for file in k():
yield file
class Register(object):
"""
Register generators and generated files.
"""
def generator(self, id, generator=None):
if generator is None:
# Return a decorator
def decorator(cls):
self.generator(id, cls)
return cls
return decorator
generator_registry.register(id, generator)
# iterable that returns kwargs or callable that returns iterable of kwargs
def generatedfiles(self, generator_id, generatedfiles):
generatedfile_registry.register(generator_id, generatedfiles)
def source_group(self, generator_id, source_group):
source_group_registry.register(generator_id, source_group)
class Unregister(object):
"""
Unregister generators and generated files.
"""
def generator(self, id, generator):
generator_registry.unregister(id, generator)
def generatedfiles(self, generator_id, generatedfiles):
generatedfile_registry.unregister(generator_id, generatedfiles)
def source_group(self, generator_id, source_group):
source_group_registry.unregister(generator_id, source_group)
generator_registry = GeneratorRegistry()
generatedfile_registry = GeneratedFileRegistry()
source_group_registry = SourceGroupRegistry()
register = Register()
unregister = Unregister()
|
Python
| 0
|
@@ -1624,32 +1624,99 @@
= file.generator
+%0A%0A # FIXME: I guess this means you can't register functions?
%0A if gene
@@ -1720,16 +1720,26 @@
enerator
+.__class__
in self
|
9235d1aa35e6a597be3c497577de528425d6e046
|
comment cleanup
|
training/parse_osm.py
|
training/parse_osm.py
|
from lxml import etree
import ast
import re
# parse xml data, return a list of dicts representing addresses
def xmlToAddrList(xml_file):
tree = etree.parse(xml_file)
root = tree.getroot()
addr_list=[]
for element in root:
if element.tag == 'node' or element.tag =='way':
address={}
for x in element.iter('tag'):
addr = ast.literal_eval(str(x.attrib))
address[addr['k']]=addr['v']
addr_list.append(address)
return addr_list
# transform osm data into tagged training data
def osmToTraining(address_list):
train_data=[]
addr_index = 0
token_index = 0
osm_tags_to_addr_tags = {
"addr:house:number":"AddressNumber",
"addr:street:prefix":"StreetNamePreDirectional",
"addr:street:name":"StreetName",
"addr:street:type":"StreetNamePostType",
"addr:city":"PlaceName",
"addr:state":"StateName",
"addr:postcode":"ZipCode"}
for address in address_list:
addr_train = []
for key, value in address.items(): #iterate through dict ****
if key in osm_tags_to_addr_tags.keys(): #if the key is one of the defined osm tags
addr_train.append([value ,osm_tags_to_addr_tags[key]]) #add (token, tokentag)
train_data.append(addr_train)
return train_data
|
Python
| 0
|
@@ -571,16 +571,96 @@
dex = 0%0A
+%09# only the osm tags below will end up in training data; others will be ignored%0A
%09osm_tag
@@ -1018,35 +1018,8 @@
s():
- #iterate through dict ****
%0A%09%09%09
@@ -1061,51 +1061,8 @@
s():
- #if the key is one of the defined osm tags
%0A%09%09%09
@@ -1120,31 +1120,8 @@
y%5D%5D)
- #add (token, tokentag)
%0A%09%09t
|
d7bea2995fc54c15404b4b47cefae5fc7b0201de
|
FIX partner internal code compatibility with sign up
|
partner_internal_code/res_partner.py
|
partner_internal_code/res_partner.py
|
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models, api
class partner(models.Model):
""""""
_inherit = 'res.partner'
internal_code = fields.Char(
'Internal Code')
# we let this to base nane search improoved
# def name_search(self, cr, uid, name, args=None,
# operator='ilike', context=None, limit=100):
# args = args or []
# res = []
# if name:
# recs = self.search(
# cr, uid, [('internal_code', operator, name)] + args,
# limit=limit, context=context)
# res = self.name_get(cr, uid, recs)
# res += super(partner, self).name_search(
# cr, uid,
# name=name, args=args, operator=operator, limit=limit)
# return res
@api.model
def create(self, vals):
if not vals.get('internal_code', False):
vals['internal_code'] = self.env[
'ir.sequence'].next_by_code('partner.internal.code') or '/'
return super(partner, self).create(vals)
_sql_constraints = {
('internal_code_uniq', 'unique(internal_code)',
'Internal Code mast be unique!')
}
|
Python
| 0
|
@@ -433,16 +433,42 @@
al Code'
+,%0A copy=False,%0A
)%0A%0A #
|
f4dd6ce804c3bac3b0f343004cde6b26aa49c674
|
Add G4 and G5 validation errors
|
app/validation.py
|
app/validation.py
|
import json
import re
import os
from flask import abort
from jsonschema import ValidationError, FormatChecker
from jsonschema.validators import validator_for
MINIMUM_SERVICE_ID_LENGTH = 10
MAXIMUM_SERVICE_ID_LENGTH = 20
JSON_SCHEMAS_PATH = './json_schemas'
SCHEMA_NAMES = [
'services-g4',
'services-g5',
'services-g6-scs',
'services-g6-saas',
'services-g6-paas',
'services-g6-iaas',
'services-update',
'users',
'users-auth',
'suppliers'
]
FORMAT_CHECKER = FormatChecker()
def load_schemas(schemas_path, schema_names):
loaded_schemas = {}
for schema_name in schema_names:
schema_path = os.path.join(schemas_path, '{}.json'.format(schema_name))
with open(schema_path) as f:
schema = json.load(f)
validator = validator_for(schema)
validator.check_schema(schema)
loaded_schemas[schema_name] = schema
return loaded_schemas
_SCHEMAS = load_schemas(JSON_SCHEMAS_PATH, SCHEMA_NAMES)
def get_validator(schema_name):
schema = _SCHEMAS[schema_name]
return validator_for(schema)(schema, format_checker=FORMAT_CHECKER)
def validate_updater_json_or_400(submitted_json):
if not validates_against_schema('services-update', submitted_json):
abort(400, "JSON was not a valid format")
def validate_user_json_or_400(submitted_json):
if not validates_against_schema('users', submitted_json):
abort(400, "JSON was not a valid format")
if submitted_json['role'] == 'supplier' \
and 'supplierId' not in submitted_json:
abort(400, "No supplier id provided for supplier user")
def validate_user_auth_json_or_400(submitted_json):
try:
validates_against_schema('users-auth', submitted_json)
except ValidationError as e:
abort(400, "JSON was not a valid format. {}".format(e.message))
def detect_framework_or_400(submitted_json):
framework = detect_framework(submitted_json)
if not framework:
abort(400, "JSON was not a valid format. {}".format(
reason_for_failure(submitted_json))
)
return framework
def detect_framework(submitted_json):
if validates_against_schema('services-g4', submitted_json):
return 'G-Cloud 4'
elif validates_against_schema('services-g5', submitted_json):
return 'G-Cloud 5'
elif validates_against_schema('services-g6-scs', submitted_json) or \
validates_against_schema('services-g6-saas', submitted_json) or \
validates_against_schema('services-g6-paas', submitted_json) or \
validates_against_schema('services-g6-iaas', submitted_json):
return 'G-Cloud 6'
else:
return False
def validate_supplier_json_or_400(submitted_json):
try:
get_validator('suppliers').validate(submitted_json)
except ValidationError as e:
abort(400, "JSON was not a valid format. {}".format(e.message))
def validates_against_schema(validator_name, submitted_json):
try:
get_validator(validator_name).validate(submitted_json)
except ValidationError:
return False
else:
return True
def reason_for_failure(submitted_json):
response = []
try:
get_validator('services-g6-scs').validate(submitted_json)
except ValidationError as e1:
response.append('Not SCS: %s' % e1.message)
try:
get_validator('services-g6-saas').validate(submitted_json)
except ValidationError as e2:
response.append('Not SaaS: %s' % e2.message)
try:
get_validator('services-g6-paas').validate(submitted_json)
except ValidationError as e3:
response.append('Not PaaS: %s' % e3.message)
try:
get_validator('services-g6-iaas').validate(submitted_json)
except ValidationError as e4:
response.append('Not IaaS: %s' % e4.message)
return '. '.join(response)
def is_valid_service_id(service_id):
"""
Validate that service ids contain only letters
numbers and dashes ([A-z0-9-]) and that they're
less than | equal to `MINIMUM_SERVICE_ID_LENGTH`
greater than | equal to `MAXIMUM_SERVICE_ID_LENGTH`
:param service_id:
:return True|False:
"""
regex_match_valid_service_id = r"^[A-z0-9-]{%s,%s}$" % (
MINIMUM_SERVICE_ID_LENGTH,
MAXIMUM_SERVICE_ID_LENGTH
)
if re.search(regex_match_valid_service_id, service_id):
return True
return False
def is_valid_service_id_or_400(service_id):
if is_valid_service_id(service_id):
return True
else:
abort(400, "Invalid service ID supplied: %s" % service_id)
|
Python
| 0.000001
|
@@ -3207,16 +3207,330 @@
se = %5B%5D%0A
+ try:%0A get_validator('services-g4').validate(submitted_json)%0A except ValidationError as e1:%0A response.append('Not G4: %25s' %25 e1.message)%0A%0A try:%0A get_validator('services-g5').validate(submitted_json)%0A except ValidationError as e1:%0A response.append('Not G5: %25s' %25 e1.message)%0A%0A
try:
|
26808790680c96b7a6aff48982a15b28730d7e41
|
Print stdout when running a hook (#4167, #4487) (#4702)
|
certbot/hooks.py
|
certbot/hooks.py
|
"""Facilities for implementing hooks that call shell commands."""
from __future__ import print_function
import logging
import os
from subprocess import Popen, PIPE
from certbot import errors
from certbot import util
from certbot.plugins import util as plug_util
logger = logging.getLogger(__name__)
def validate_hooks(config):
"""Check hook commands are executable."""
validate_hook(config.pre_hook, "pre")
validate_hook(config.post_hook, "post")
validate_hook(config.renew_hook, "renew")
def _prog(shell_cmd):
"""Extract the program run by a shell command.
:param str shell_cmd: command to be executed
:returns: basename of command or None if the command isn't found
:rtype: str or None
"""
if not util.exe_exists(shell_cmd):
plug_util.path_surgery(shell_cmd)
if not util.exe_exists(shell_cmd):
return None
return os.path.basename(shell_cmd)
def validate_hook(shell_cmd, hook_name):
"""Check that a command provided as a hook is plausibly executable.
:raises .errors.HookCommandNotFound: if the command is not found
"""
if shell_cmd:
cmd = shell_cmd.split(None, 1)[0]
if not _prog(cmd):
path = os.environ["PATH"]
if os.path.exists(cmd):
msg = "{1}-hook command {0} exists, but is not executable.".format(cmd, hook_name)
else:
msg = "Unable to find {2}-hook command {0} in the PATH.\n(PATH is {1})".format(
cmd, path, hook_name)
raise errors.HookCommandNotFound(msg)
def pre_hook(config):
"Run pre-hook if it's defined and hasn't been run."
cmd = config.pre_hook
if cmd and cmd not in pre_hook.already:
logger.info("Running pre-hook command: %s", cmd)
_run_hook(cmd)
pre_hook.already.add(cmd)
elif cmd:
logger.info("Pre-hook command already run, skipping: %s", cmd)
pre_hook.already = set() # type: ignore
def post_hook(config):
"""Run post hook if defined.
If the verb is renew, we might have more certs to renew, so we wait until
run_saved_post_hooks() is called.
"""
cmd = config.post_hook
# In the "renew" case, we save these up to run at the end
if config.verb == "renew":
if cmd and cmd not in post_hook.eventually:
post_hook.eventually.append(cmd)
# certonly / run
elif cmd:
logger.info("Running post-hook command: %s", cmd)
_run_hook(cmd)
post_hook.eventually = [] # type: ignore
def run_saved_post_hooks():
"""Run any post hooks that were saved up in the course of the 'renew' verb"""
for cmd in post_hook.eventually:
logger.info("Running post-hook command: %s", cmd)
_run_hook(cmd)
def renew_hook(config, domains, lineage_path):
"""Run post-renewal hook if defined."""
if config.renew_hook:
if not config.dry_run:
os.environ["RENEWED_DOMAINS"] = " ".join(domains)
os.environ["RENEWED_LINEAGE"] = lineage_path
logger.info("Running renew-hook command: %s", config.renew_hook)
_run_hook(config.renew_hook)
else:
logger.warning("Dry run: skipping renewal hook command: %s", config.renew_hook)
def _run_hook(shell_cmd):
"""Run a hook command.
:returns: stderr if there was any"""
err, _ = execute(shell_cmd)
return err
def execute(shell_cmd):
"""Run a command.
:returns: `tuple` (`str` stderr, `str` stdout)"""
# universal_newlines causes Popen.communicate()
# to return str objects instead of bytes in Python 3
cmd = Popen(shell_cmd, shell=True, stdout=PIPE,
stderr=PIPE, universal_newlines=True)
out, err = cmd.communicate()
if cmd.returncode != 0:
logger.error('Hook command "%s" returned error code %d',
shell_cmd, cmd.returncode)
if err:
base_cmd = os.path.basename(shell_cmd.split(None, 1)[0])
logger.error('Error output from %s:\n%s', base_cmd, err)
return (err, out)
|
Python
| 0.001709
|
@@ -3747,24 +3747,155 @@
mmunicate()%0A
+ base_cmd = os.path.basename(shell_cmd.split(None, 1)%5B0%5D)%0A if out:%0A logger.info('Output from %25s:%5Cn%25s', base_cmd, out)%0A
if cmd.r
@@ -4039,73 +4039,8 @@
rr:%0A
- base_cmd = os.path.basename(shell_cmd.split(None, 1)%5B0%5D)%0A
|
657f30275246390c2c8b93732dc8f65b83d37a06
|
add helper method to fixup return_to protocol. used in a few places in a couple of providers.
|
velruse/utils.py
|
velruse/utils.py
|
"""Utilities for the auth functionality"""
import sys
import uuid
try:
import simplejson as json
except ImportError:
import json
import webob.exc as exc
from routes import URLGenerator
from openid.oidutil import autoSubmitHTML
from webob import Response
from velruse.baseconvert import base_encode
from velruse.errors import error_dict
def redirect_form(end_point, token):
"""Generate a redirect form for POSTing"""
return """
<form action="%s" method="post" accept-charset="UTF-8" enctype="application/x-www-form-urlencoded">
<input type="hidden" name="token" value="%s" />
<input type="submit" value="Continue"/></form>
""" % (end_point, token)
def generate_token():
"""Generate a random token"""
return base_encode(uuid.uuid4().int)
def load_package_obj(package_obj_string):
"""Extract a package name and object name, import the package and return
the object from that package by name.
The format is velruse.store.memstore:MemoryStore.
"""
package_name, obj_name = package_obj_string.split(':')
__import__(package_name)
return getattr(sys.modules[package_name], obj_name)
# Copied from Paste
def path_info_pop(environ):
"""
'Pops' off the next segment of PATH_INFO, pushing it onto
SCRIPT_NAME, and returning that segment.
For instance::
>>> def call_it(script_name, path_info):
... env = {'SCRIPT_NAME': script_name, 'PATH_INFO': path_info}
... result = path_info_pop(env)
... print 'SCRIPT_NAME=%r; PATH_INFO=%r; returns=%r' % (
... env['SCRIPT_NAME'], env['PATH_INFO'], result)
>>> call_it('/foo', '/bar')
SCRIPT_NAME='/foo/bar'; PATH_INFO=''; returns='bar'
>>> call_it('/foo/bar', '')
SCRIPT_NAME='/foo/bar'; PATH_INFO=''; returns=None
>>> call_it('/foo/bar', '/')
SCRIPT_NAME='/foo/bar/'; PATH_INFO=''; returns=''
>>> call_it('', '/1/2/3')
SCRIPT_NAME='/1'; PATH_INFO='/2/3'; returns='1'
>>> call_it('', '//1/2')
SCRIPT_NAME='//1'; PATH_INFO='/2'; returns='1'
"""
path = environ.get('PATH_INFO', '')
if not path:
return None
while path.startswith('/'):
environ['SCRIPT_NAME'] += '/'
path = path[1:]
if '/' not in path:
environ['SCRIPT_NAME'] += path
environ['PATH_INFO'] = ''
return path
else:
segment, path = path.split('/', 1)
environ['PATH_INFO'] = '/' + path
environ['SCRIPT_NAME'] += segment
return segment
class RouteResponder(object):
"""RouteResponder for Routes-based dispatching Responder"""
def __call__(self, req):
"""Handle being called with a request object"""
results = self.map.routematch(environ=req.environ)
if not results:
return exc.HTTPNotFound()
match = results[0]
kwargs = match.copy()
link = URLGenerator(self.map, req.environ)
req.environ['wsgiorg.routing_args'] = ((), match)
req.link = link
self.map.environ = req.environ
action = kwargs.pop('action')
return getattr(self, action)(req, **kwargs)
def _error_redirect(self, error_code, end_point):
"""Redirect the user to the endpoint, save the error
status to the storage under the token"""
token = generate_token()
self.storage.store(token, error_dict(error_code))
form_html = redirect_form(end_point, token)
return Response(body=autoSubmitHTML(form_html))
def _success_redirect(self, user_data, end_point):
"""Redirect the user to the endpoint, save the user_data to a new
random token in storage"""
# Generate the token, store the extracted user-data for 5 mins, and send back
token = generate_token()
self.storage.store(token, user_data, expires=300)
form_html = redirect_form(end_point, token)
return Response(body=autoSubmitHTML(form_html))
class _Missing(object):
def __repr__(self):
return 'no value'
def __reduce__(self):
return '_missing'
_missing = _Missing()
class cached_property(object):
"""A decorator that converts a function into a lazy property. The
function wrapped is called the first time to retrieve the result
and then that calculated result is used the next time you access
the value::
class Foo(object):
@cached_property
def foo(self):
# calculate something important here
return 42
The class has to have a `__dict__` in order for this property to
work.
"""
# implementation detail: this property is implemented as non-data
# descriptor. non-data descriptors are only invoked if there is
# no entry with the same name in the instance's __dict__.
# this allows us to completely get rid of the access function call
# overhead. If one choses to invoke __get__ by hand the property
# will still work as expected because the lookup logic is replicated
# in __get__ for manual invocation.
def __init__(self, func, name=None, doc=None):
self.__name__ = name or func.__name__
self.__module__ = func.__module__
self.__doc__ = doc or func.__doc__
self.func = func
def __get__(self, obj, type=None):
if obj is None:
return self
value = obj.__dict__.get(self.__name__, _missing)
if value is _missing:
value = self.func(obj)
obj.__dict__[self.__name__] = value
return value
|
Python
| 0
|
@@ -3987,24 +3987,576 @@
_html))%0A
-
+%0A def _get_return_to(self, req):%0A return_to = req.link('process', qualified=True)%0A # post-process the return_to protocol.%0A if self.protocol:%0A if return_to.startswith('https://') and self.protocol == 'http':%0A return_to = return_to.replace('https://', %22%25s://%22%0A %25(self.protocol))%0A elif return_to.startswith('http://') and self.protocol == 'https':%0A return_to = return_to.replace('http://', %22%25s://%22%0A %25(self.protocol))%0A return return_to
%0A%0A%0Aclass
|
7428f7d87d33ab1531f94753516ad4a56780a612
|
Add helper to predefine remove recursive flag. Add copy_to and copy_from helpers which aid the copying of single files
|
virtualbox/library_ext/guest_session.py
|
virtualbox/library_ext/guest_session.py
|
import time
from virtualbox import library
"""
Add helper code to the default IGuestSession class.
"""
# Add context management to IGuestSession
class IGuestSession(library.IGuestSession):
__doc__ = library.IGuestSession.__doc__
def __enter__(self):
return self
def __exit__(self, exception_type, exception_val, trace):
self.close()
def execute(self, command, arguments=[], stdin="", environment=[],
flags=[library.ProcessCreateFlag.wait_for_std_err,
library.ProcessCreateFlag.wait_for_std_out,
library.ProcessCreateFlag.ignore_orphaned_processes],
priority=library.ProcessPriority.default,
affinity=[], timeout_ms=0):
"""Execute a command in the Guest
Arguments:
command - Command to execute.
arguments - List of arguments for the command
stdin - A buffer to write to the stdin of the command.
environment - See IGuestSession.create_process?
flags - List of ProcessCreateFlag objects.
Default value set to [wait_for_std_err,
wait_for_stdout,
ignore_orphaned_processes]
timeout_ms - ms to wait for the process to complete.
If 0, wait for ever...
priority - Set the ProcessPriority priority to be used for
execution.
affinity - Process affinity to use for execution.
Return IProcess, stdout, stderr
"""
def read_out(process, flags, stdout, stderr):
if library.ProcessCreateFlag.wait_for_std_err in flags:
e = str(process.read(2, 65000, 0))
stderr.append(e)
if library.ProcessCreateFlag.wait_for_std_out in flags:
o = str(process.read(1, 65000, 0))
stdout.append(o)
process = self.process_create_ex(command, arguments, environment,
flags, timeout_ms, priority, affinity)
process.wait_for(int(library.ProcessWaitResult.start), 0)
# write stdin to the process
if stdin:
index = 0
while index < len(stdin):
index += process.write(0, [library.ProcessInputFlag.none],
stdin[index:], 0)
process.write(0, [library.ProcessInputFlag.end_of_file], 0)
# read the process output and wait for
stdout = []
stderr = []
while process.status == library.ProcessStatus.started:
read_out(process, flags, stdout, stderr)
time.sleep(0.2)
# make sure we have read the remainder of the out
read_out(process, flags, stdout, stderr)
return process, "".join(stdout), "".join(stderr)
def makedirs(self, path, mode=0x777):
"""Super-mkdir: create a leaf directory and all intermediate ones."""
self.directory_create(path, mode, [library.DirectoryCreateFlag.parents])
|
Python
| 0
|
@@ -3143,8 +3143,1299 @@
ents%5D)%0A%0A
+ # Simplify calling directory_remove_recursive. Set default flags to%0A # content_and_dir if they have not yet been set.%0A def directory_remove_recursive(self, path, flags=None):%0A if flags is None:%0A flags = %5Blibrary.DirectoryRemoveRecFlag.content_and_dir%5D%0A super(IGuestSession, self).directory_remove_recursive(path, flags)%0A directory_remove_recursive.__doc__ = %5C%0A library.IGuestSession.directory_remove_recursive.__doc__%0A%0A def copy_to(self, host_path, guest_path):%0A %22Copy a single file to the vm. Wraps copy_to_vm.%22%0A if not os.path.exists(host_path):%0A raise OSError(%22Failed to find %25s on host%22 %25 host_path)%0A p = self.copy_to_vm(host_path, guest_path, %5B%5D)%0A p.wait_for_completion()%0A return p%0A%0A def copy_from(self, guest_path, host_path):%0A %22Copy a single file from the vm. Wraps copy_from_vm.%22%0A # Dodgy exists check...%0A for x in range(10):%0A try:%0A self.file_exists(guest_path)%0A break%0A except:%0A time.sleep(0.1)%0A else:%0A raise OSError(%22Failed to find %25s on guest%22 %25 guest_path) %0A p = self.copy_from(guest_path, host_path, %5B%5D)%0A p.wait_for_completion()%0A return p%0A%0A%0A%0A %0A
|
23b12f5eb7f89aa2efe05b30c40d359d2a8034e9
|
Fix bug in lan endpoint which let a user unattend after paying
|
apps/lan/views.py
|
apps/lan/views.py
|
# -*- coding: utf-8 -*-
from datetime import datetime
from django.contrib import messages
from django.contrib.auth.decorators import login_required, permission_required
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, redirect, render
from django.utils.translation import ugettext as _
from apps.lan.models import Attendee, Directions, LAN, Ticket
def home(request):
lans = LAN.objects.filter(end_date__gte=datetime.now())
if lans.count() == 1:
next_lan = lans[0]
return redirect('lan_details', lan_id=next_lan.id)
else:
return redirect('lan_listing')
def listing(request):
upcoming_lans = LAN.objects.filter(end_date__gte=datetime.now())
previous_lans = LAN.objects.filter(end_date__lt=datetime.now())
return render(request, 'lan/list.html', {'upcoming': upcoming_lans, 'previous': previous_lans})
def details(request, lan_id):
lan = get_object_or_404(LAN, pk=lan_id)
if lan.end_date > datetime.now():
active = True
else:
active = False
ticket_types = lan.tickettype_set.all().order_by('-priority', '-price')
user_tickets = Ticket.objects.filter(user=request.user.id, ticket_type__in=ticket_types)
directions = Directions.objects.filter(lan=lan)
if request.user in lan.attendees:
if request.user in lan.paid_attendees or user_tickets:
status = 'paid'
else:
status = 'attending'
else:
status = 'open'
return render(request, 'lan/details.html', {'lan': lan, 'status': status, 'active': active, 'ticket_types': ticket_types, 'ticket': user_tickets, 'directions': directions})
@login_required
def attend(request, lan_id):
lan = get_object_or_404(LAN, pk=lan_id)
if lan.end_date < datetime.now():
messages.error(request, _(u'This LAN has finished and can no longer be attended'))
return redirect(lan)
if not request.user.profile.has_address():
messages.error(request, _(u'You need to fill in your address and zip code in order to sign up for a LAN.'))
else:
if request.user in lan.attendees:
messages.error(request, _(u'You are already in the attendee list for ') + unicode(lan))
else:
attendee = Attendee(lan=lan, user=request.user)
attendee.save()
messages.success(request, _(u'Successfully added you to attendee list for ') + unicode(lan))
return redirect(lan)
@login_required
def unattend(request, lan_id):
lan = get_object_or_404(LAN, pk=lan_id)
if lan.start_date < datetime.now():
messages.error(request, _(u'This LAN has already started, you can not retract your signup'))
return redirect(lan)
if request.user not in lan.attendees:
messages.error(request, _(u'You are not in the attendee list for ') + unicode(lan))
else:
attendee = Attendee.objects.get(lan=lan, user=request.user)
attendee.delete()
messages.success(request, _(u'Successfully removed you from attendee list for ') + unicode(lan))
return redirect(lan)
@permission_required('lan.export_paying_participants')
def list_paid(request, lan_id):
import xlwt
lan = get_object_or_404(LAN, pk=lan_id)
response = HttpResponse(content_type='application/ms-excel')
response['Content-Disposition'] = u'attachment; filename=paid_attendees_lan-{0}.xls'.format(lan_id)
doc = xlwt.Workbook(encoding='UTF-8')
# We paid the participants? In Norwegian?
sheet = doc.add_sheet('Betalte deltakere')
def write(sheet, person, row, payment_type):
profile = person.profile
sheet.write(row, 0, u'{0} {1}'.format(person.first_name, person.last_name))
sheet.write(row, 1, u'{0}.{1}.{2}'.format(profile.date_of_birth.day, profile.date_of_birth.month, profile.date_of_birth.year))
sheet.write(row, 2, profile.address)
sheet.write(row, 3, profile.zip_code)
sheet.write(row, 4, person.email)
sheet.write(row, 5, payment_type)
row = 0
for user in lan.paid_attendees:
write(sheet, user, row, 'cash')
row += 1
tickets = lan.tickets()
for ticket in tickets:
write(sheet, ticket.user, row, 'ticket')
row += 1
doc.save(response)
return response
|
Python
| 0.000001
|
@@ -2862,24 +2862,420 @@
icode(lan))%0A
+ return redirect(lan)%0A%0A ticket_types = lan.tickettype_set.all().order_by('-priority', '-price')%0A user_tickets = Ticket.objects.filter(user=request.user.id, ticket_type__in=ticket_types)%0A%0A if request.user in lan.paid_attendees or user_tickets:%0A messages.error(request, _(u'You cannot remove attendance since you have paid for ') + unicode(lan))%0A return redirect(lan)%0A
else:%0A
|
4bc55c14ab2303c4f89c0b33a6c48dd2e2efe4f2
|
fix cmd
|
genomel/docker/variant_calling/gatk3_genotypegvcfs.py
|
genomel/docker/variant_calling/gatk3_genotypegvcfs.py
|
#!/usr/bin/env python
'''Internal multithreading for GATK3 GenotypeGVCFs'''
import sys
import argparse
import subprocess
import string
from functools import partial
from multiprocessing.dummy import Pool, Lock
def is_nat(pos):
'''Checks that a value is a natural number.'''
if int(pos) > 0:
return int(pos)
raise argparse.ArgumentTypeError('{} must be positive, non-zero'.format(pos))
def do_pool_commands(cmd, lock=Lock(), shell_var=True):
'''run pool commands'''
try:
output = subprocess.Popen(cmd, shell=shell_var, \
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output_stdout, output_stderr = output.communicate()
with lock:
print 'running: {}'.format(cmd)
print output_stdout
print output_stderr
except BaseException:
print "command failed {}".format(cmd)
return output.wait()
def multi_commands(cmds, thread_count, shell_var=True):
'''run commands on number of threads'''
pool = Pool(int(thread_count))
output = pool.map(partial(do_pool_commands, shell_var=shell_var), cmds)
return output
def get_region(intervals):
'''get region from intervals'''
interval_list = []
with open(intervals, 'r') as fhandle:
line = fhandle.readlines()
for bed in line:
blocks = bed.rstrip().rsplit('\t')
intv = '{}:{}-{}'.format(blocks[0], int(blocks[1])+1, blocks[2])
interval_list.append(intv)
return interval_list
def genotypegvcfs_template(cmd_dict):
'''cmd template'''
cmd_list = [
'java', '-Xmx3G',
'-jar', '/opt/GenomeAnalysisTK.jar',
'-T', 'GenotypeGVCFs',
'-R', '${REF}',
'-L', '${INTERVAL}',
'-D', '${SNP}',
'-o', '${OUT}',
'-A', 'AlleleBalance',
'-A', 'Coverage',
'-A', 'HomopolymerRun',
'-A', 'QualByDepth'
]
for gvcf in cmd_dict['gvcf']:
cmd_list.extend(['-I', gvcf])
cmd_str = ' '.join(cmd_list)
template = string.Template(cmd_str)
for region in get_region(cmd_dict['interval']):
interval_str = str(region).replace(':', '_').replace('-', '_')
output = cmd_dict['job_uuid'] + '.' + interval_str + '.vcf.gz'
cmd = template.substitute(
dict(
REF=cmd_dict['ref'],
INTERVAL=region,
SNP=cmd_dict['snp'],
OUT=output
)
)
yield cmd
def main():
'''main'''
parser = argparse.ArgumentParser('GATK3 GenoMEL GenotypeGVCFs.')
# Required flags.
parser.add_argument('-v', \
'--gvcf', \
required=True, \
nargs='+', \
help='GVCF file.')
parser.add_argument('-j', \
'--job_uuid', \
required=True, \
help='Job uuid.')
parser.add_argument('-r', \
'--reference', \
required=True, \
help='Reference path')
parser.add_argument('-i', \
'--interval', \
required=True, \
help='Interval files')
parser.add_argument('-s', \
'--snp', \
required=True, \
help='Reference SNP file path')
parser.add_argument('-c', \
'--thread_count', \
required=True, \
type=is_nat, \
default=25)
args = parser.parse_args()
input_dict = {
'job_uuid': args.job_uuid,
'gvcf': args.gvcf,
'ref': args.reference,
'interval': args.interval,
'snp': args.snp
}
threads = args.thread_count
cmds = list(genotypegvcfs_template(input_dict))
outputs = multi_commands(cmds, threads)
if any(x != 0 for x in outputs):
print 'Failed'
sys.exit(1)
else:
print 'Completed'
if __name__ == '__main__':
main()
|
Python
| 0.000021
|
@@ -1974,17 +1974,17 @@
tend(%5B'-
-I
+V
', gvcf%5D
|
0a12594920720bd29321665f3ae7d86843de5d42
|
Update sht25.py
|
apps/sdb/sht25.py
|
apps/sdb/sht25.py
|
#!/usr/bin/python
# code from http://www.emsystech.de/raspi-sht21
# http://github.com/jeonghoonkang
import fcntl
import time
import unittest
class SHT25:
# control constants on I2C bus
_SOFTRESET = 0xFE
_I2C_ADDRESS = 0x40
_TRIGGER_TEMPERATURE_NO_HOLD = 0xF3
_TRIGGER_HUMIDITY_NO_HOLD = 0xF5
_STATUS_BITS_MASK = 0xFFFC
# From: /linux/i2c-dev.h
I2C_SLAVE = 0x0703
I2C_SLAVE_FORCE = 0x0706
# datasheet (v4), page 9, table 7
# for suggesting the use of these better values
# code copied from https://github.com/mmilata/growd
_TEMPERATURE_WAIT_TIME = 0.086 # (datasheet: typ=66, max=85)
_HUMIDITY_WAIT_TIME = 0.030 # (datasheet: typ=22, max=29)
def __init__(self, device_number=1):
self.i2c = open('/dev/i2c-%s' % device_number, 'r+', 0)
fcntl.ioctl(self.i2c, self.I2C_SLAVE, 0x40)
self.i2c.write(chr(self._SOFTRESET))
time.sleep(0.050)
def read_temperature(self):
self.i2c.write(chr(self._TRIGGER_TEMPERATURE_NO_HOLD))
time.sleep(self._TEMPERATURE_WAIT_TIME)
data = self.i2c.read(3)
if self._calculate_checksum(data, 2) == ord(data[2]):
return self._get_temperature_from_buffer(data)
def read_humidity(self):
self.i2c.write(chr(self._TRIGGER_HUMIDITY_NO_HOLD))
time.sleep(self._HUMIDITY_WAIT_TIME)
data = self.i2c.read(3)
if self._calculate_checksum(data, 2) == ord(data[2]):
return self._get_humidity_from_buffer(data)
def close(self):
self.i2c.close()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
@staticmethod
def _calculate_checksum(data, number_of_bytes):
# CRC
POLYNOMIAL = 0x131 # //P(x)=x^8+x^5+x^4+1 = 100110001
crc = 0
# calculates 8-Bit checksum with given polynomial
for byteCtr in range(number_of_bytes):
crc ^= (ord(data[byteCtr]))
for bit in range(8, 0, -1):
if crc & 0x80:
crc = (crc << 1) ^ POLYNOMIAL
else:
crc = (crc << 1)
return crc
@staticmethod
def _get_temperature_from_buffer(data):
unadjusted = (ord(data[0]) << 8) + ord(data[1])
unadjusted &= SHT25._STATUS_BITS_MASK # zero the status bits
unadjusted *= 175.72
unadjusted /= 1 << 16 # divide by 2^16
unadjusted -= 46.85
return unadjusted
@staticmethod
def _get_humidity_from_buffer(data):
unadjusted = (ord(data[0]) << 8) + ord(data[1])
unadjusted &= SHT25._STATUS_BITS_MASK # zero the status bits
unadjusted *= 125.0
unadjusted /= 1 << 16 # divide by 2^16
unadjusted -= 6
return unadjusted
if __name__ == "__main__":
try:
while True:
with SHT25(1) as sht25:
print "Temperature: %s" % sht25.read_temperature()
print "Humidity : %s" % sht25.read_humidity()
time.sleep(1.5)
except IOError, e:
print e
print "Error creating connection to i2c. This must be run as root"
|
Python
| 0
|
@@ -184,16 +184,26 @@
I2C bus
+ for SHT25
%0A _SO
|
da05fe2d41a077276946c5d6c86995c60315e093
|
Make sure we load pyvisa-py when enumerating instruments.
|
src/auspex/instruments/__init__.py
|
src/auspex/instruments/__init__.py
|
import pkgutil
import importlib
import pyvisa
instrument_map = {}
for loader, name, is_pkg in pkgutil.iter_modules(__path__):
module = importlib.import_module('auspex.instruments.' + name)
if hasattr(module, "__all__"):
globals().update((name, getattr(module, name)) for name in module.__all__)
for name in module.__all__:
instrument_map.update({name:getattr(module,name)})
def enumerate_visa_instruments():
rm = pyvisa.ResourceManager()
print(rm.list_resources())
def probe_instrument_ids():
rm = pyvisa.ResourceManager()
for instr_label in rm.list_resources():
instr = rm.open_resource(instr_label)
try:
print(instr_label, instr.query('*IDN?'))
except:
print(instr_label, "Did not respond")
instr.close()
|
Python
| 0
|
@@ -441,16 +441,21 @@
Manager(
+%22@py%22
)%0A%09print
@@ -534,16 +534,21 @@
Manager(
+%22@py%22
)%0A%09for i
|
145c8e456b90ff4e243656c923ddc82a690f4447
|
Debug logging.
|
google-daemon/usr/share/google/google_daemon/utils.py
|
google-daemon/usr/share/google/google_daemon/utils.py
|
#!/usr/bin/python
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library functions and interfaces for manipulating accounts."""
import errno
import fcntl
import logging
import logging.handlers
import os
import shutil
import subprocess
import sys
import tempfile
class RunCommandException(Exception):
"""Could not run a command."""
pass
class System(object):
"""Interface for interacting with the system."""
def __init__(self, subprocess_module=subprocess, os_module=os):
self.subprocess = subprocess_module
self.os = os_module
def MakeLoggingHandler(self, prefix, facility):
"""Make a logging handler to send logs to syslog."""
handler = logging.handlers.SysLogHandler(
address='/dev/log', facility=facility)
formatter = logging.Formatter(prefix + ': %(levelname)s %(message)s')
handler.setFormatter(formatter)
return handler
def SetLoggingHandler(self, logger, handler):
"""Setup logging w/ a specifc handler."""
handler.setLevel(logging.INFO)
logger.setLevel(logging.INFO)
logger.addHandler(handler)
def EnableDebugLogging(self, logger):
debug_handler = logging.StreamHandler(sys.stdout)
debug_handler.setLevel(logging.DEBUG)
logger.addHandler(debug_handler)
logger.setLevel(logging.DEBUG)
def OpenFile(self, *args, **kwargs):
return open(*args, **kwargs)
def MoveFile(self, src, dst):
return shutil.move(src, dst)
def CreateTempFile(self, delete=True):
return tempfile.NamedTemporaryFile(delete=delete)
def UserAdd(self, user, groups):
logging.info('Creating account %s', user)
result = self.RunCommand([
'/usr/sbin/useradd', user, '-m', '-s', '/bin/bash', '-G',
','.join(groups)])
if self.RunCommandFailed(result, 'Could not create user %s', user):
return False
return True
def IsValidSudoersFile(self, filename):
result = self.RunCommand(['/usr/sbin/visudo', '-c', '-f', filename])
if result[0] != 0:
with self.system.OpenFile(filename, 'r') as f:
contents = f.read()
self.RunCommandFailed(
result, 'Could not produce valid sudoers file\n%s' % contents)
return False
return True
def IsExecutable(self, path):
"""Return whether path exists and is an executable binary."""
return self.os.path.isfile(path) and self.os.access(path, os.X_OK)
def RunCommand(self, args):
"""Run a command, return a retcode, stdout, stderr tuple."""
try:
p = self.subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
return (p.returncode, stdout, stderr)
except OSError, e:
raise RunCommandException('Could not run %s due to %s' % (args, e))
def RunCommandFailed(self, result, *msg_args):
retcode, stdout, stderr = result
if retcode != 0:
logging.warning('%s\nSTDOUT:\n%s\nSTDERR:\n%s\n',
msg_args[0] % msg_args[1:], stdout, stderr)
return True
return False
class CouldNotLockException(Exception):
"""Someone else seems to be holding the lock."""
pass
class UnexpectedLockException(Exception):
"""We genuinely failed to lock the file."""
pass
class CouldNotUnlockException(Exception):
"""Someone else seems to be holding the lock."""
pass
class UnexpectedUnlockException(Exception):
"""We genuinely failed to unlock the file."""
pass
class LockFile(object):
"""Lock a file to prevent multiple concurrent executions."""
def __init__(self, fcntl_module=fcntl):
self.fcntl_module = fcntl_module
def RunExclusively(self, lock_fname, method):
try:
self.Lock(lock_fname)
method()
self.Unlock()
except CouldNotLockException:
logging.warning(
'Could not lock %s. Is it locked by another program?',
lock_fname)
except UnexpectedLockException as e:
logging.warning(
'Could not lock %s due to %s', lock_fname, e)
except CouldNotUnlockException:
logging.warning(
'Could not unlock %s. Is it locked by another program?',
lock_fname)
except UnexpectedUnlockException as e:
logging.warning(
'Could not unlock %s due to %s', lock_fname, e)
def Lock(self, lock_fname):
"""Lock the lock file."""
try:
self.fh = open(lock_fname, 'w+b')
self.fcntl_module.flock(self.fh.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
except IOError as e:
if e.errno == errno.EWOULDBLOCK:
raise CouldNotLockException()
raise UnexpectedLockException('Failed to lock: %s' % e)
def Unlock(self):
"""Unlock the lock file."""
try:
self.fcntl_module.flock(self.fh.fileno(), fcntl.LOCK_UN|fcntl.LOCK_NB)
except IOError as e:
if e.errno == errno.EWOULDBLOCK:
raise CouldNotUnlockException()
raise UnexpectedUnlockException('Failed to unlock: %s' % e)
|
Python
| 0
|
@@ -1511,24 +1511,55 @@
handler.%22%22%22%0A
+ # TODO Change back to INFO%0A
handler.
@@ -1567,36 +1567,37 @@
etLevel(logging.
-INFO
+DEBUG
)%0A logger.set
@@ -1610,20 +1610,21 @@
logging.
-INFO
+DEBUG
)%0A lo
|
23d4e48155e8906510d09a5eaf9fafafa7280d63
|
Fix a few typos in the test.
|
test/functionalities/data-formatter/data-formatter-stl/libcxx/unordered/TestDataFormatterUnordered.py
|
test/functionalities/data-formatter/data-formatter-stl/libcxx/unordered/TestDataFormatterUnordered.py
|
"""
Test lldb data formatter subsystem.
"""
import os, time
import unittest2
import lldb
from lldbtest import *
import lldbutil
class LibcxxUnorderedDataFormatterTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@skipUnlessDarwin
@dsym_test
def test_with_dsym_and_run_command(self):
"""Test data formatter commands."""
self.buildDsym()
self.data_formatter_commands()
@dwarf_test
@skipIfGcc
def test_with_dwarf_and_run_command(self):
"""Test data formatter commands."""
self.buildDwarf()
self.data_formatter_commands()
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
def look_for_content_and_continue(self,var_name,substrs):
self.expect( ("frame variable %s" % var_name), substrs )
self.runCmd("continue")
def data_formatter_commands(self):
"""Test that that file and class static variables display correctly."""
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_source_regexp (self, "Set break point at this line.")
self.runCmd("run", RUN_SUCCEEDED)
# The stop reason of the thread should be breakpoint.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs = ['stopped',
'stop reason = breakpoint'])
# This is the function to remove the custom formats in order to have a
# clean slate for the next test case.
def cleanup():
self.runCmd('type format clear', check=False)
self.runCmd('type summary clear', check=False)
self.runCmd('type filter clear', check=False)
self.runCmd('type synth clear', check=False)
self.runCmd("settings set target.max-children-count 256", check=False)
# Execute the cleanup function during test case tear down.
self.addTearDownHook(cleanup)
self.expect('image list', substrs = self.getLibcPlusPlusLibs())
self.look_for_content_and_continue("map",['size=5 {,''hello','world','this','is','me'])
self.look_for_content_and_continue("mmap",['size=6 {','first = 3','second = "this"','first = 2','second = "hello"'])
self.look_for_content_and_continue("iset",['size=5 {','[0] = 5','[2] = 3','[3] = 2'])
self.look_for_content_and_continue("sset",['size=5 {','[0] = "is"','[1] = "world"','[4] = "hello"'])
self.look_for_content_and_continue("imset",['size=6 {','[0] = 3','[1] = 3','[2] = 3','[4] = 2','[5] = 1'])
self.look_for_content_and_continue("smset",['size=5 {','[0] = "is"','[1] = "is"','[2] = "world"','[3] = "world"'])
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
Python
| 0.999998
|
@@ -811,17 +811,24 @@
substrs
-
+=substrs
)%0A
@@ -2077,18 +2077,19 @@
size=5 %7B
-,
'
+,
'hello',
|
a56e72a4a5d5f3ecd68c7520885cc7808af4f17f
|
fix tests
|
plenum/test/node_request/test_pre_prepare/test_pp_obsolescence.py
|
plenum/test/node_request/test_pre_prepare/test_pp_obsolescence.py
|
import pytest
from plenum.common.util import SortedDict
from plenum.common.messages.node_messages import PrePrepare
from plenum.test.helper import create_prepare_params, create_pre_prepare_no_bls, generate_state_root
from plenum.test.replica.conftest import *
from plenum.test.replica.conftest import primary_replica as _primary_replica
from plenum.test.testing_utils import FakeSomething
OBSOLETE_PP_TS = 1499906902
class FakeSomethingHashable(FakeSomething):
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __hash__(self):
return hash(tuple(SortedDict(self.__dict__).items()))
class FakeMessageBase(FakeSomethingHashable):
_fields = {}
class FakePrePrepare(FakeMessageBase, PrePrepare):
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __hash__(self):
return hash(tuple(SortedDict(self.__dict__).items()))
@pytest.fixture(scope='module')
def sender():
return 'some_replica'
@pytest.fixture(scope='module')
def ts_now(tconf):
return OBSOLETE_PP_TS + tconf.ACCEPTABLE_DEVIATION_PREPREPARE_SECS + 1
@pytest.fixture
def viewNo():
return 0
@pytest.fixture
def inst_id():
return 0
@pytest.fixture
def mock_timestamp():
return MockTimestamp(OBSOLETE_PP_TS)
@pytest.fixture
def primary_replica(_primary_replica, ts_now):
_primary_replica._ordering_service.last_accepted_pre_prepare_time = None
_primary_replica.get_time_for_3pc_batch.value = ts_now
_primary_replica._ordering_service._validate = lambda x: (None, None)
return _primary_replica
@pytest.fixture
def sender_replica(primary_replica, sender):
return primary_replica.generateName(sender, primary_replica.instId)
@pytest.fixture
def pp(primary_replica, ts_now):
params = create_pre_prepare_params(generate_state_root(),
inst_id=primary_replica.instId,
view_no=primary_replica.viewNo,
pp_seq_no=(primary_replica.last_ordered_3pc[1] + 1),
timestamp=ts_now)
return PrePrepare(*params)
def test_pp_obsolete_if_older_than_last_accepted(primary_replica, ts_now, sender, pp, sender_replica):
primary_replica._ordering_service.last_accepted_pre_prepare_time = ts_now
pp = FakeSomethingHashable(viewNo=0, ppSeqNo=1, ppTime=OBSOLETE_PP_TS)
primary_replica._ordering_service.pre_prepare_tss[pp.viewNo, pp.ppSeqNo][pp, sender_replica] = \
primary_replica._ordering_service.last_accepted_pre_prepare_time
assert not primary_replica._ordering_service.l_is_pre_prepare_time_correct(pp, sender)
def test_pp_obsolete_if_unknown(primary_replica, pp):
pp = FakeSomethingHashable(viewNo=0, ppSeqNo=1, ppTime=OBSOLETE_PP_TS)
assert not primary_replica._ordering_service.l_is_pre_prepare_time_correct(pp, '')
def test_pp_obsolete_if_older_than_threshold(primary_replica, ts_now, pp, sender_replica):
pp = FakeSomethingHashable(viewNo=0, ppSeqNo=1, ppTime=OBSOLETE_PP_TS)
primary_replica._ordering_service.pre_prepare_tss[pp.viewNo, pp.ppSeqNo][pp, sender_replica] = ts_now
assert not primary_replica._ordering_service.l_is_pre_prepare_time_correct(pp, sender_replica)
def test_ts_is_set_for_obsolete_pp(primary_replica, ts_now, pp, sender_replica):
pp.ppTime = OBSOLETE_PP_TS
primary_replica._external_bus.process_incoming(pp, sender_replica)
assert primary_replica._ordering_service.pre_prepare_tss[pp.viewNo, pp.ppSeqNo][pp, sender_replica] == ts_now
def test_ts_is_set_for_passed_pp(primary_replica, ts_now, pp, sender_replica):
primary_replica._external_bus.process_incoming(pp, sender_replica)
assert primary_replica._ordering_service.pre_prepare_tss[pp.viewNo, pp.ppSeqNo][pp, sender_replica] == ts_now
def test_ts_is_set_for_discarded_pp(primary_replica, ts_now, pp, sender_replica):
pp.instId +=1
primary_replica._external_bus.process_incoming(pp, sender_replica)
assert primary_replica._ordering_service.pre_prepare_tss[pp.viewNo, pp.ppSeqNo][pp, sender_replica] == ts_now
def test_ts_is_set_for_stahed_pp(primary_replica, ts_now, pp, sender_replica):
pp.viewNo +=1
primary_replica._external_bus.process_incoming(pp, sender_replica)
assert primary_replica._ordering_service.pre_prepare_tss[pp.viewNo, pp.ppSeqNo][pp, sender_replica] == ts_now
def test_ts_is_not_set_for_non_pp(primary_replica, ts_now, pp, sender_replica):
pp = FakeSomethingHashable(**pp.__dict__)
primary_replica.stasher.subscribe(FakeSomethingHashable, lambda *x, **y: None)
primary_replica._external_bus.process_incoming(pp, sender_replica)
assert len(primary_replica._ordering_service.pre_prepare_tss) == 0
def test_pre_prepare_tss_is_cleaned_in_gc(primary_replica, pp, sender):
primary_replica._external_bus.process_incoming(pp, sender)
# threshold is lower
primary_replica._ordering_service.l_gc((pp.viewNo, pp.ppSeqNo - 1))
assert (pp.viewNo, pp.ppSeqNo) in primary_replica._ordering_service.pre_prepare_tss
# threshold is not lower
primary_replica._ordering_service.l_gc((pp.viewNo, pp.ppSeqNo))
assert (pp.viewNo, pp.ppSeqNo) not in primary_replica._ordering_service.pre_prepare_tss
|
Python
| 0
|
@@ -8,16 +8,66 @@
pytest%0A%0A
+from plenum.common.stashing_router import PROCESS%0A
from ple
@@ -4658,22 +4658,33 @@
*x, **y:
+ (PROCESS,
None)
+)
%0A pri
|
3f039537700448dfd2bc9b0ace9bac9925e062c2
|
fix standalone values
|
splash/kernel/lua_parser.py
|
splash/kernel/lua_parser.py
|
# -*- coding: utf-8 -*-
"""
Parser for a subset of Lua, useful for autocompletion.
It takes ``Tok(name, value)`` namedtuples as an input.
"""
from __future__ import absolute_import
import string
from operator import attrgetter
from collections import namedtuple
from funcparserlib import parser as p
Token = namedtuple("Token", "type value")
class _Match(object):
def __init__(self, value):
self.value = value
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.value)
class _AttrLookupMatch(_Match):
@property
def prefix(self):
return self.value[0]
@property
def names_chain(self):
return self.value[1:][::-1]
def __repr__(self):
return "%s(prefix=%r names_chain=%r)" % (
self.__class__.__name__, self.prefix, self.names_chain
)
class Standalone(_Match):
pass
class SplashAttribute(_AttrLookupMatch):
pass
class SplashMethod(_AttrLookupMatch):
pass
class ObjectAttribute(_AttrLookupMatch):
pass
class ObjectAttributeIndexed(object):
def __init__(self, value):
self.quote = value[1]
self.prefix = value[0]
self.names_chain = value[2:][::-1]
def __repr__(self):
return "%s(prefix=%r names_chain=%r, quote=%r)" % (
self.__class__.__name__, self.prefix, self.names_chain, self.quote
)
class ObjectMethod(_AttrLookupMatch):
pass
class ConstantMethod(object):
def __init__(self, value):
self.prefix, self.const = value
def __repr__(self):
return "%s(prefix=%r const=%r)" % (
self.__class__.__name__, self.prefix, self.const)
# ======================== processing functions =============================
token_value = attrgetter("value")
token_type = attrgetter("type")
def token(tp, check=lambda t: True):
return p.some(lambda t: t.type == tp and check(t)) >> token_value
def flat(seq):
res = []
for el in seq:
if isinstance(el, (list, tuple)):
res.extend([sub_el for sub_el in flat(el)])
else:
res.append(el)
return res
def match(cls):
return lambda res: cls(res)
# =============================== parser ====================================
# A partial parser for Lua.
#
# It works on a *reversed* sequence of tokens
# (right to left), starting from a token at cursor.
tok_number = token("number")
tok_string = token("string")
dot = token(".")
colon = token(":")
single_quote = token('"')
double_quote = token("'")
quote = (single_quote | double_quote)
open_sq_brace = token("[")
close_sq_brace = token("]")
open_rnd_brace = token("(")
close_rnd_brace = token(")")
iden_start = p.skip(p.some(lambda t: t.type not in ".:"))
tok_splash = (p.a(Token("iden", "splash")) + iden_start) >> token_value
iden_nosplash = token("iden", lambda t: t.value != 'splash')
iden = token("iden")
# standalone names are parsed separately - we need e.g. to suggest them
# as keywords
first_iden = iden + iden_start
single_obj = first_iden >> match(Standalone)
_braced_constant = p.skip(close_rnd_brace) + (tok_string | tok_number) + p.skip(open_rnd_brace)
_constant_method = iden + p.skip(colon) + _braced_constant
_constant_method_noprefix = p.pure("") + p.skip(colon) + _braced_constant
constant_method = (_constant_method | _constant_method_noprefix) >> flat >> match(ConstantMethod)
_index = p.skip(close_sq_brace) + (tok_string | tok_number) + p.skip(open_sq_brace)
dot_iden_or_index = _index | (iden + p.skip(dot)) # either .name or ["name"]
# foo[0]["bar"].baz
_attr_chain = p.oneplus(dot_iden_or_index) + first_iden
_obj = _attr_chain | first_iden
_attr_chain_noprefix = p.pure("") + p.skip(dot) + _obj
obj_attr_chain = (_attr_chain | _attr_chain_noprefix) >> flat >> match(ObjectAttribute)
# foo["bar
_indexed = quote + p.skip(open_sq_brace) + _obj
_obj_attr_indexed_noprefix = p.pure("") + _indexed
_obj_attr_indexed = iden + _indexed # FIXME: spaces in keys
obj_attr_indexed = (_obj_attr_indexed | _obj_attr_indexed_noprefix) >> flat >> match(ObjectAttributeIndexed)
# foo.bar:baz
_obj_method = iden + p.skip(colon) + _obj
_obj_method_noprefix = p.pure("") + p.skip(colon) + _obj
obj_method = (_obj_method | _obj_method_noprefix) >> flat >> match(ObjectMethod)
# splash:meth
_splash_method = iden_nosplash + p.skip(colon) + tok_splash
_splash_method_noprefix = p.pure("") + p.skip(colon) + tok_splash
splash_method = (_splash_method | _splash_method_noprefix) >> match(SplashMethod)
# splash.attr
_splash_attr = iden_nosplash + p.skip(dot) + tok_splash
_splash_attr_noprefix = p.pure("") + p.skip(dot) + tok_splash
splash_attr = (_splash_attr | _splash_attr_noprefix) >> match(SplashAttribute)
splash_parser = splash_method | splash_attr
lua_parser = (splash_parser | obj_method | obj_attr_indexed | obj_attr_chain | constant_method | single_obj)
# ========================= wrapper objects =================================
class LuaLexer(object):
def __init__(self, lua):
self._completer = lua.eval("require('completer')")
def tokenize(self, lua_source, pad=1):
# Our lexer doesn't support unicode. To avoid exceptions,
# replace all non-ascii characters before the tokenization.
# This is not optimal, but Lua doesn't allow unicode identifiers,
# so non-ascii text usually is not interesting for the completion
# engine.
lua_source = lua_source.encode('ascii', 'replace')
res = self._completer.tokenize(lua_source)
return [Token("NA", "")]*pad + [Token(t["tp"], t["value"]) for t in res.values()]
class LuaParser(object):
out_chars = string.whitespace + ".,:;\"')([]/*+^-=&%{}<>~"
def __init__(self, lua):
self.lexer = LuaLexer(lua)
def parse(self, code, cursor_pos=None):
if cursor_pos is None:
cursor_pos = len(code)
if self._token_split(code, cursor_pos):
return
context = code[:cursor_pos]
tokens = self.lexer.tokenize(context, pad=1)
try:
return lua_parser.parse(tokens[::-1])
except p.NoParseError as e:
return
def _token_split(self, code, cursor_pos):
""" Return True if a token is split into two parts by cursor_pos """
next_char = code[cursor_pos:cursor_pos+1]
return next_char and next_char not in self.out_chars
|
Python
| 0.000106
|
@@ -862,36 +862,89 @@
ne(_Match):%0A
-pass
+@property%0A def prefix(self):%0A return self.value
%0A%0Aclass SplashAt
|
abc74f521f1b52fe2b17046cc81705a691314832
|
Give an error only if the report object is not None
|
ReadConfig.py
|
ReadConfig.py
|
#
# ReadConfig
#
# Ron Lockwood
# University of Washington, SIL International
# 12/4/14
#
# Functions for reading a configuration file
import re
CONFIG_FILE = 'FlexTrans.config'
def readConfig(report):
try:
f_handle = open(CONFIG_FILE)
except:
report.Error('Error reading the file: "' + CONFIG_FILE + '". Check that it is in the FlexTools folder.')
return None
my_map = {}
for line in f_handle:
if len(line) < 2:
report.Error('Error reading the file: "' + CONFIG_FILE + '". No blank lines allowed.')
return
# Skip commented lines
if line[0] == '#':
continue
# We expect lines in the form -- property=value
if not re.search('=',line):
report.Error('Error reading the file: "' + CONFIG_FILE + '". A line without "=" was found.')
return
(prop, value) = line.split('=')
value = value.rstrip()
# if the value has commas, save it as a list
if re.search(',',value):
my_list = value.split(',')
my_map[prop] = my_list
else:
my_map[prop] = value
return my_map
def getConfigVal(my_map, key, report):
if key not in my_map:
report.Error('Error in the file: "' + CONFIG_FILE + '". A value for "'+key+'" was not found.')
return None
else:
return my_map[key]
def configValIsList(my_map, key, report):
if type(my_map[key]) is not list:
report.Error('Error in the file: "' + CONFIG_FILE + '". The value for "'+key+'" is supposed to be a comma separated list. For a single value, end it with a comma.')
return False
else:
return True
|
Python
| 0.999999
|
@@ -91,16 +91,116 @@
/4/14%0A#%0A
+# Version 1.1 - 3/7/18 - Ron Lockwood%0A# Give an error only if the report object is not None %0A#%0A
# Func
@@ -365,16 +365,51 @@
except:%0A
+ if report is not None:%0A
@@ -606,16 +606,55 @@
e) %3C 2:%0A
+ if report is not None:%0A
@@ -930,24 +930,63 @@
('=',line):%0A
+ if report is not None:%0A
@@ -1464,16 +1464,51 @@
my_map:%0A
+ if report is not None:%0A
@@ -1744,16 +1744,51 @@
t list:%0A
+ if report is not None:%0A
|
4eb4a2eaa42cd71bf4427bdaaa1e853975432691
|
Allow keyword arguments in GeneralStoreManager.create_item method
|
graphene/storage/intermediate/general_store_manager.py
|
graphene/storage/intermediate/general_store_manager.py
|
from graphene.storage.id_store import *
class GeneralStoreManager:
"""
Handles the creation/deletion of nodes to the NodeStore with ID recycling
"""
def __init__(self, store):
"""
Creates an instance of the GeneralStoreManager
:param store: Store to manage
:return: General store manager to handle index recycling
:rtype: GeneralStoreManager
"""
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self):
"""
Creates an item with the type of the store being managed
:return: New item with type STORE_TYPE
"""
# Check for an available ID from the IdStore
available_id = self.idStore.get_id()
# If no ID is available, get the last index of the file
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id)
def delete_item(self, item):
"""
Deletes the given item from the store and adds the index to its IdStore
to be recycled
:return: Nothing
:rtype: None
"""
# Get index of item to be deleted
deleted_index = item.index
# Delete the item from the store
self.store.delete_item(item)
# Add the index to the IdStore, so it can be recycled
self.idStore.store_id(deleted_index)
|
Python
| 0.000001
|
@@ -515,16 +515,26 @@
tem(self
+, **kwargs
):%0A
@@ -1039,16 +1039,26 @@
lable_id
+, **kwargs
)%0A%0A d
|
ad47fb85e5c2deb47cbe3fc3478e1ae2da93adfe
|
Update h-index.py
|
Python/h-index.py
|
Python/h-index.py
|
# Time: O(nlogn)
# Space: O(1)
# Given an array of citations (each citation is a non-negative integer)
# of a researcher, write a function to compute the researcher's h-index.
#
# According to the definition of h-index on Wikipedia:
# "A scientist has index h if h of his/her N papers have
# at least h citations each, and the other N − h papers have
# no more than h citations each."
#
# For example, given citations = [3, 0, 6, 1, 5],
# which means the researcher has 5 papers in total
# and each of them had received 3, 0, 6, 1, 5 citations respectively.
# Since the researcher has 3 papers with at least 3 citations each and
# the remaining two with no more than 3 citations each, his h-index is 3.
#
# Note: If there are several possible values for h, the maximum one is taken as the h-index.
#
class Solution(object):
def hIndex(self, citations):
"""
:type citations: List[int]
:rtype: int
"""
citations.sort(reverse=True)
h = 0
for x in citations:
if x >= h + 1:
h += 1
else:
break
return h
# Time: O(nlogn)
# Space: O(n)
class Solution2(object):
def hIndex(self, citations):
"""
:type citations: List[int]
:rtype: int
"""
return sum(x >= i + 1 for i, x in enumerate(sorted(citations, reverse=True)))
|
Python
| 0.000002
|
@@ -1,32 +1,28 @@
# Time: O(n
-logn
)%0A# Space: O(1)%0A
@@ -18,17 +18,17 @@
pace: O(
-1
+n
)%0A%0A# Giv
@@ -924,32 +924,540 @@
int%0A %22%22%22%0A
+ n = len(citations);%0A count = %5B0%5D * (n + 1)%0A for x in citations:%0A if x %3E= n:%0A count%5Bn%5D += 1%0A else:%0A count%5Bx%5D += 1%0A%0A h = 0%0A for i in reversed(xrange(0, n + 1)):%0A h += count%5Bi%5D%0A if h %3E= i:%0A return i%0A return h%0A%0A# Time: O(nlogn)%0A# Space: O(1)%0Aclass Solution2(object):%0A def hIndex(self, citations):%0A %22%22%22%0A :type citations: List%5Bint%5D%0A :rtype: int%0A %22%22%22%0A
citation
|
4903401a6365c24675b80003ed89e792eba2e2b5
|
Add support for redact EML from letter
|
feder/letters/forms.py
|
feder/letters/forms.py
|
from textwrap import wrap
from atom.ext.crispy_forms.forms import HelperMixin, SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from crispy_forms.layout import Submit
from dal import autocomplete
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from feder.cases.models import Case
from feder.letters.utils import get_body_with_footer
from feder.records.models import Record
from .models import Letter
QUOTE_TPL = "W nawiązaniu do pisma z dnia {created} z adresu {email}:\n{quoted}"
class LetterForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
case = forms.ModelChoiceField(
queryset=Case.objects.all(),
label=_("Case"),
widget=autocomplete.ModelSelect2(url="cases:autocomplete-find"),
)
def __init__(self, *args, **kwargs):
case = kwargs.pop("case", None)
letter = kwargs.get("instance")
super().__init__(*args, **kwargs)
if letter and letter.is_mass_draft():
del self.fields["case"]
else:
self.initial["case"] = case or letter.case
self.helper.form_tag = False
class Meta:
model = Letter
fields = ["title", "body", "case", "note"]
def save(self, *args, **kwargs):
if not self.instance.is_mass_draft():
self.instance.record.case = self.cleaned_data["case"]
self.instance.record.save()
return super().save(*args, **kwargs)
class ReplyForm(HelperMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
self.letter = kwargs.pop("letter")
super().__init__(*args, **kwargs)
self.helper.form_tag = False
self.user_can_reply = self.user.has_perm("reply", self.letter.case.monitoring)
self.user_can_save = self.user.has_perm(
"add_draft", self.letter.case.monitoring
)
self.set_dynamic_field_initial()
self.add_form_buttons()
def set_dynamic_field_initial(self):
self.fields["title"].initial = "Re: {title}".format(title=self.letter.title)
self.fields["body"].initial = get_body_with_footer(
"", self.letter.case.monitoring.email_footer
)
self.fields["quote"].initial = self.get_quote()
def add_form_buttons(self):
if self.user_can_reply and self.user_can_save:
self.helper.add_input(
Submit("save", _("Save draft"), css_class="btn-default")
)
self.helper.add_input(
Submit("send", _("Send reply"), css_class="btn-primary")
)
elif self.user_can_save:
self.helper.add_input(
Submit("save", _("Save draft"), css_class="btn-primary")
)
elif self.user_can_reply:
self.helper.add_input(
Submit("send", _("Send reply"), css_class="btn-primary")
)
def clean(self):
if not (self.user_can_reply or self.user_can_save):
raise forms.ValidationError(
_(
"Nothing to do. You do not have permission "
+ "to save draft or send replies."
)
)
if not self.user_can_reply and "send" in self.data:
raise forms.ValidationError(
_("You do not have permission to send replies.")
)
if not self.user_can_save and "save" in self.data:
raise forms.ValidationError(_("You do not have permission to save draft."))
return super().clean()
def get_quote(self):
quoted = "> " + "\n> ".join(wrap(self.letter.body, width=80))
return QUOTE_TPL.format(
created=self.letter.created.strftime(settings.STRFTIME_FORMAT),
email=self.letter.email,
quoted=quoted,
)
def save(self, *args, **kwargs):
self.instance.author_user = self.user
if not hasattr(self.instance, "record"):
self.instance.record = Record.objects.create(case=self.letter.case)
obj = super().save(*args, **kwargs)
return obj
class Meta:
model = Letter
fields = ["title", "body", "quote"]
class AssignLetterForm(SingleButtonMixin, forms.Form):
action_text = _("Assign")
case = forms.ModelChoiceField(
queryset=Case.objects.all(),
label=_("Case number"),
widget=autocomplete.ModelSelect2(url="cases:autocomplete-find"),
)
def __init__(self, *args, **kwargs):
self.letter = kwargs.pop("letter")
super().__init__(*args, **kwargs)
def save(self):
self.letter.case = self.cleaned_data["case"]
self.letter.record.save()
self.letter.case.save()
class ReassignLetterForm(SingleButtonMixin, forms.ModelForm):
action_text = _("Reassign")
case = forms.ModelChoiceField(
queryset=Case.objects.all(),
label=_("Case number"),
widget=autocomplete.ModelSelect2(url="cases:autocomplete-find"),
)
def save(self, commit=True):
self.instance.case = self.cleaned_data["case"]
self.instance.record.save()
return super().save(commit)
|
Python
| 0
|
@@ -1260,16 +1260,23 @@
, %22note%22
+, %22eml%22
%5D%0A%0A d
|
086de25019cc6745ceb7e8505945a102a0e8c63a
|
fix image rotation when generating dataset
|
Python/dataset.py
|
Python/dataset.py
|
import argparse
import cv2
import gzip
import numpy as np
import pathlib
import requests
import shutil
import struct
import tempfile
import zipfile
from os import path as osp
from progress.bar import IncrementalBar
from typing import Dict
class DatasetGenerator:
"""
Class to download and create dataset for training and testing.
"""
# link to download EMNIST dataset
emnist_dataset_url = 'https://www.itl.nist.gov/iaui/vip/cs_links/EMNIST/gzip.zip'
def __init__(self, balanced: bool = True, download_dir_path: str = '', data_dir_path: str = ''):
self.balanced = balanced # whether to use the balanced dataset or not
self.download_dir_path = download_dir_path
self.data_dir_path = data_dir_path
if not(self.download_dir_path and self.download_dir_path.strip()):
self.download_dir_path = osp.abspath('download/')
if not(self.data_dir_path and self.data_dir_path.strip()):
self.data_dir_path = osp.abspath('data/')
pathlib.Path(self.download_dir_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(self.data_dir_path).mkdir(parents=True, exist_ok=True)
def create(self):
"""
Method to setup EMNIST dataset.
Note: When saving to data dir, existing data is not removed.
"""
print('Setting up EMNIST dataset')
file_path = osp.abspath(osp.join(self.download_dir_path, osp.basename(DatasetGenerator.emnist_dataset_url)))
# step 1: download
DatasetGenerator.download_file(url=DatasetGenerator.emnist_dataset_url, dest=file_path)
# step 2.1: extract main zip file
DatasetGenerator.extract_zip_file(zip_fp=file_path)
# create list to store idx file paths and label mappings
dtype = 'balanced' if self.balanced else 'byclass'
# idx paths saved as (image, label) pair
idx_paths = [
(
osp.join(osp.dirname(file_path), 'gzip', f'emnist-{dtype}-train-images-idx3-ubyte.gz'),
osp.join(osp.dirname(file_path), 'gzip', f'emnist-{dtype}-train-labels-idx1-ubyte.gz')
),
(
osp.join(osp.dirname(file_path), 'gzip', f'emnist-{dtype}-test-images-idx3-ubyte.gz'),
osp.join(osp.dirname(file_path), 'gzip', f'emnist-{dtype}-test-labels-idx1-ubyte.gz')
)
]
label_mapping = {}
with open(osp.join(osp.dirname(file_path), 'gzip', f'emnist-{dtype}-mapping.txt'), mode='r') as lm:
for line in lm:
key, value = line.split()
label_mapping[key] = chr(int(value))
# step 2.2: extract smaller gzip files
for idx_pair in idx_paths:
for idx_path in idx_pair:
DatasetGenerator.extract_gzip_file(gzip_fp=idx_path)
# step 3: save image files
for idx_pair in idx_paths:
self.idx_to_image(
image_file=osp.splitext(idx_pair[0])[0],
label_file=osp.splitext(idx_pair[1])[0],
label_mapping=label_mapping
)
@staticmethod
def download_file(url: str, dest: str):
"""
Method to download a file from url and save at dest
"""
print(f'Downloading file from {url} and saving to {dest}')
response = requests.get(url, stream=True)
total_size = int(response.headers.get('content-length'))
chunk_size = 4096
total_steps = int(total_size / chunk_size)
progress_bar = IncrementalBar(max=total_steps, suffix='%(percent).1f%%')
with open(dest, mode='wb') as fd:
for chunk in response.iter_content(chunk_size=chunk_size):
fd.write(chunk)
progress_bar.next()
progress_bar.finish()
@staticmethod
def extract_zip_file(zip_fp: str):
"""
Method to extract a zip file and save it in the same directory as the zip file
"""
print(f'Extracting {zip_fp}')
with zipfile.ZipFile(zip_fp, 'r') as unzipped:
unzipped.extractall(osp.dirname(zip_fp))
@staticmethod
def extract_gzip_file(gzip_fp: str):
"""
Method to extract a gzip file and save it in the same directory as the gzip file
"""
print(f'Extracting {gzip_fp}')
with gzip.open(gzip_fp, 'rb') as zipped:
with open(osp.splitext(gzip_fp)[0], mode='wb') as unzipped:
shutil.copyfileobj(zipped, unzipped)
def idx_to_image(self, image_file: str, label_file: str, label_mapping: Dict[str, str] = None):
print(f'Converting {image_file} to image files')
with open(image_file, mode='rb') as image_stream, open(label_file, mode='rb') as label_stream:
# save images dataset
magic, num_images = struct.unpack('>II', image_stream.read(8))
if magic != 2051:
raise ValueError('Magic number invalid')
num_rows, num_cols = struct.unpack('>II', image_stream.read(8))
images = np.fromfile(image_stream, dtype=np.dtype(np.uint8).newbyteorder('>'))
images = images.reshape((num_images, num_rows, num_cols))
# save labels dataset
magic, num_labels = struct.unpack('>II', label_stream.read(8))
if magic != 2049:
raise ValueError('Magic number invalid')
labels = np.fromfile(label_stream, dtype=np.dtype(np.uint8).newbyteorder('>'))
labels = labels.astype('str')
labels = np.vectorize(lambda x: label_mapping[x])(labels) if label_mapping is not None else labels
progress_bar = IncrementalBar(max=len(labels), suffix='%(percent).1f%%')
# create missing directories
for unique_label in np.unique(labels):
label_folder = osp.abspath(osp.join(self.data_dir_path, unique_label))
pathlib.Path(label_folder).mkdir(parents=True, exist_ok=True)
# save images to data directory
for label, image in zip(labels, images):
label_folder = osp.abspath(osp.join(self.data_dir_path, label))
image_dest = tempfile.mktemp(dir=label_folder, suffix='.png')
cv2.imwrite(f'{image_dest}', image)
progress_bar.next()
progress_bar.finish()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="""
Script to download and create training data from EMNIST. Full dataset would be downloaded irrespective of
unbalanced truth value.
""",
usage='%(prog)s [options]',
)
parser.add_argument(
'-dop',
'--download-path',
dest='download_path',
type=str,
help='Path where dataset should be downloaded.',
)
parser.add_argument(
'-dap',
'--data-path',
dest='data_path',
type=str,
help='Path where dataset images should be saved.',
)
parser.add_argument(
'-ub',
'--unbalanced',
dest='unbalanced',
action='store_true',
default=False,
help='Whether to use the unbalanced dataset or not',
)
args = parser.parse_args()
creator = DatasetGenerator(balanced=not args.unbalanced,
download_dir_path=args.download_path,
data_dir_path=args.data_path)
creator.create()
|
Python
| 0.000013
|
@@ -6313,16 +6313,18 @@
', image
+.T
)%0A%0A
|
56adaeecb5ed868ca057a4985a9305770d551b61
|
Add version
|
sqlalchemy_seed/__init__.py
|
sqlalchemy_seed/__init__.py
|
# -*- coding: utf-8 -*-
"""
sqlalchemy_seed
~~~~~~~~~~~~~~~
Seed
:copyright: (c) 2017 Shinya Ohyanagi, All rights reserved.
:license: BSD, see LICENSE for more details.
"""
import os
import importlib
import json
import yaml
def create_table(base, session=None):
"""Create table.
:param base: `sqlalchemy.ext.declarative`
:param session: `sqlalchemy.orm`
"""
if session:
base.metadata.create_all(bind=session.bind)
else:
base.metadata.create_all()
def drop_table(base, session):
"""Drop table.
:param base: `sqlalchemy.ext.declarative`
:param session: `sqlalchemy.orm`
"""
session.expunge_all()
session.remove()
base.metadata.drop_all()
def load_fixture_files(paths, files):
"""Load fixture files.
:param path: Path to fixtures
:param files: Fixture file names
"""
fixtures = []
if not isinstance(paths, list):
paths = [paths]
for path in paths:
for file in files:
fixture_path = os.path.join(path, file)
if not os.path.exists(fixture_path):
continue
with open(fixture_path, 'r') as f:
if file.endswith('.yaml') or file.endswith('.yml'):
data = yaml.load(f)
elif file.endswith('.json'):
data = json.loads(f)
else:
continue
fixtures.append(data)
return fixtures
def _create_model_instance(fixture):
"""Create model instance.
:param fixture: Fixtures
"""
instances = []
for data in fixture:
if 'model' in data:
module_name, class_name = data['model'].rsplit('.', 1)
module = importlib.import_module(module_name)
model = getattr(module, class_name)
instance = model(**data['fields'])
instances.append(instance)
return instances
def load_fixtures(session, fixtures):
"""Load fixture.
:param base: `sqlalchemy.ext.declarative`
:param fixtures: Fixture files
"""
instances = []
for fixture in fixtures:
_instances = _create_model_instance(fixture)
for instance in _instances:
instances.append(instance)
try:
session.add_all(instances)
session.flush()
session.commit()
except:
session.rollback()
raise
|
Python
| 0
|
@@ -70,13 +70,105 @@
-Seed%0A
+%60sqlalchemy_seed%60 is a seed library which provides initial data to%0A database using SQLAlchemy.
%0A%0A
@@ -333,16 +333,39 @@
t yaml%0A%0A
+__version__ = '0.1.0'%0A%0A
%0Adef cre
|
4fd6d20be257cca38f98d20df78b35d7c7bc3911
|
Fix factory_jst
|
feder/teryt/factory.py
|
feder/teryt/factory.py
|
from autofixture import AutoFixture
from .models import JednostkaAdministracyjna
def factory_jst():
jst = AutoFixture(JednostkaAdministracyjna,
field_values={'updated_on': '2015-02-12'},
generate_fk=True).create_one(commit=False)
jst.rght = 0
jst.save()
return jst
|
Python
| 0.000002
|
@@ -2,39 +2,29 @@
rom
-autofixture import AutoFixture%0A
+random import randint
%0Afro
@@ -68,11 +68,47 @@
yjna
-%0A
+ as JST%0Afrom .models import Category
%0A%0A
+%0A
def
@@ -130,50 +130,170 @@
-jst = AutoFixture(JednostkaAdministracyjna
+category = Category.objects.create(name=%22X%22, level=1)%0A return JST.objects.create(name=%22X%22, id=randint(0, 1000),%0A category=category
,%0A
@@ -316,23 +316,16 @@
-field_values=%7B'
+
upda
@@ -334,24 +334,21 @@
d_on
-':
+=
'2015-0
-2
+5
-12'
-%7D
,%0A
@@ -371,94 +371,25 @@
-generate_fk=True).create_one(commit=False)%0A jst.rght = 0%0A jst.save()%0A return jst
+ active=True)
%0A
|
2eefaca1d7d27ebe2e9a489ab2c1dc2927e49b55
|
Bump version
|
sqliteschema/__version__.py
|
sqliteschema/__version__.py
|
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016, {}".format(__author__)
__license__ = "MIT License"
__version__ = "1.0.1"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
|
Python
| 0
|
@@ -129,17 +129,17 @@
= %221.0.
-1
+2
%22%0A__main
|
bc904f3ab7cc9d697dc56058ac9cb578055c401f
|
raise exception rather than logging and returning
|
checks.d/hdfs.py
|
checks.d/hdfs.py
|
from checks import AgentCheck
class HDFSCheck(AgentCheck):
"""Report on free space and space used in HDFS.
"""
def check(self, instance):
try:
import snakebite.client
except ImportError:
raise ImportError('HDFSCheck requires the snakebite module')
if 'namenode' not in instance:
self.log.info('Missing key \'namenode\' in HDFSCheck config')
return
hostport = instance['namenode']
if ':' in hostport:
host, _, port = hostport.partition(':')
port = int(port)
else:
host = hostport
port = 8020
hdfs = snakebite.client.Client(host, port)
stats = hdfs.df()
# {'used': 2190859321781L,
# 'capacity': 76890897326080L,
# 'under_replicated': 0L,
# 'missing_blocks': 0L,
# 'filesystem': 'hdfs://hostname:port',
# 'remaining': 71186818453504L,
# 'corrupt_blocks': 0L}
self.gauge('hdfs.used', stats['used'])
self.gauge('hdfs.free', stats['remaining'])
self.gauge('hdfs.capacity', stats['capacity'])
self.gauge('hdfs.in_use', float(stats['used']) / float(stats['capacity']))
self.gauge('hdfs.under_replicated', stats['under_replicated'])
self.gauge('hdfs.missing_blocks', stats['missing_blocks'])
self.gauge('hdfs.corrupt_blocks', stats['corrupt_blocks'])
if __name__ == '__main__':
check, instances = HDFSCheck.from_yaml('./hdfs.yaml')
for instance in instances:
check.check(instance)
print "Events: %r" % check.get_events()
print "Metrics: %r" % check.get_metrics()
|
Python
| 0
|
@@ -352,21 +352,24 @@
-self.log.info
+raise ValueError
('Mi
@@ -416,27 +416,8 @@
ig')
-%0A return
%0A%0A
|
082562d4fc3567f956e95d71807c65281a69b3ff
|
change get_many to expect ids
|
feedly/storage/base.py
|
feedly/storage/base.py
|
from feedly.serializers.base import BaseSerializer
class BaseActivityStorage(object):
'''
The storage class for activities data
'''
serializer = BaseSerializer
def __init__(self, **options):
self.options = options
self.serializer = self.serializer()
def add_to_storage(self, key, activities, *args, **kwargs):
'''
activities should be a dict with activity_id as keys and
the serialized data as value
'''
raise NotImplementedError()
def get_from_storage(self, key, activity_ids, *args, **kwargs):
raise NotImplementedError()
def remove_from_storage(self, key, activity_ids, *args, **kwargs):
raise NotImplementedError()
def get_many(self, key, activities, *args, **kwargs):
activity_ids = self.serialize_activities(activities).keys()
activities_data = self.get_from_storage(key, activity_ids, *args, **kwargs)
return self.deserialize_activities(activities_data)
def get(self, key, activity, *args, **kwargs):
return self.get_many(key, [activity], *args, **kwargs)[0]
def add(self, key, activity, *args, **kwargs):
return self.add_many(key, [activity], *args, **kwargs)
def add_many(self, key, activities, *args, **kwargs):
serialized_activities = self.serialize_activities(activities)
return self.add_to_storage(key, serialized_activities, *args, **kwargs)
def remove(self, key, activity, *args, **kwargs):
return self.remove_many(key, [activity], *args, **kwargs)
def remove_many(self, key, activities, *args, **kwargs):
activity_ids = self.serialize_activities(activities).keys()
return self.remove_from_storage(key, activity_ids, *args, **kwargs)
def flush(self):
pass
def serialize_activity(self, activity):
activity_id, activity_data = self.serializer.dumps(activity)
serialized_activity = dict(((activity_id, activity_data),))
return serialized_activity
def serialize_activities(self, activities):
serialized_activities = {}
for activity in activities:
serialized_activities.update(self.serialize_activity(activity))
return serialized_activities
def deserialize_activities(self, data):
return self.serializer.loads(data)
class BaseTimelineStorage(object):
'''
The storage class for the feeds
'''
def __init__(self, **options):
self.options = options
def get_many(self, key, start, stop):
raise NotImplementedError()
def add_many(self, key, activities, *args, **kwargs):
raise NotImplementedError()
def remove_many(self, key, activities, *args, **kwargs):
raise NotImplementedError()
def trim(self, key, length):
raise NotImplementedError()
def count(self, key, *args, **kwargs):
raise NotImplementedError()
def delete(self, key, *args, **kwargs):
raise NotImplementedError()
|
Python
| 0
|
@@ -752,34 +752,36 @@
lf, key, activit
-ie
+y_id
s, *args, **kwar
@@ -789,76 +789,8 @@
s):%0A
- activity_ids = self.serialize_activities(activities).keys()%0A
|
6a8c8bc0e407327e5c0e4cae3d4d6ace179a6940
|
Add team eligibility to API
|
webserver/codemanagement/serializers.py
|
webserver/codemanagement/serializers.py
|
from rest_framework import serializers
from greta.models import Repository
from competition.models import Team
from .models import TeamClient, TeamSubmission
class TeamSerializer(serializers.ModelSerializer):
class Meta:
model = Team
fields = ('id', 'name', 'slug')
class RepoSerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('name', 'description', 'forked_from',
'path', 'is_ready')
forked_from = serializers.RelatedField()
path = serializers.SerializerMethodField('get_path')
is_ready = serializers.SerializerMethodField('get_is_ready')
def get_path(self, repo):
return repo.path
def get_is_ready(self, repo):
return repo.is_ready()
class TeamSubmissionSerializer(serializers.ModelSerializer):
class Meta:
model = TeamSubmission
fields = ('name', 'commit')
class TeamClientSerializer(serializers.ModelSerializer):
class Meta:
model = TeamClient
fields = ('team', 'repository', 'tag', 'language')
team = TeamSerializer()
repository = RepoSerializer()
tag = serializers.SerializerMethodField('get_tag')
language = serializers.SerializerMethodField('get_language')
def get_tag(self, teamclient):
try:
latest_sub= teamclient.submissions.latest()
return TeamSubmissionSerializer(latest_sub).data
except TeamSubmission.DoesNotExist:
return None
def get_language(self, teamclient):
return teamclient.base.language
|
Python
| 0
|
@@ -281,16 +281,35 @@
, 'slug'
+, 'eligible_to_win'
)%0A%0A%0Aclas
|
cc5ac430cdc94211472cdb2f4541c834faa83105
|
Fix some bugs
|
Server/app.py
|
Server/app.py
|
# -*- coding: utf-8 -*-
"""
BUFSM
~~~~~~~~
A simple web api for stream map cordinates with Flask and sqlite3.
"""
from sqlite3 import dbapi2 as sqlite3
import click, json
from flask import Flask, request, redirect, json, jsonify, _app_ctx_stack, render_template
# configuration
DATABASE = '/home/bufsm.db'
SECRET_KEY = 'TOKEN'
app = Flask(__name__)
app.config.from_object(__name__)
app.config.from_envvar('MINITWIT_SETTINGS', silent=True)
def get_db():
"""Opens a new database connection if there is none yet for the
current application context.
"""
top = _app_ctx_stack.top
if not hasattr(top, 'sqlite_db'):
top.sqlite_db = sqlite3.connect(app.config['DATABASE'])
top.sqlite_db.row_factory = sqlite3.Row
return top.sqlite_db
@app.teardown_appcontext
def close_database(exception):
"""Closes the database again at the end of the request."""
top = _app_ctx_stack.top
if hasattr(top, 'sqlite_db'):
top.sqlite_db.close()
def init_db():
"""Initializes the database."""
with app.app_context():
db = get_db()
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
#@app.cli.command('initdb')
def initdb_command():
"""Creates the database tables."""
init_db()
print('Initialized the database.')
def query_db(query, args=(), one=False):
"""Queries the database and returns a list of dictionaries."""
cur = get_db().execute(query, args)
rv = cur.fetchall()
return (rv[0] if rv else None) if one else rv
def create_thing(thing, lat, lon):
db = get_db()
db.execute('''insert into things (thing_name, lat, lon) values (?,?,?)''',
[thing, float(lat), float(lon)])
db.commit()
def update_thing(lat, lon, thing_id):
db = get_db()
db.execute('''UPDATE things SET lat=?, lon=? WHERE thing_id=?''',
[float(lat), float(lon), thing_id])
db.commit()
@app.route('/api/things', methods=['POST'])
def cordinates():
"""Rest api for send and receive coordinates from bufsm's
Add a and update things:
curl -i -X POST -H "Content-Type: application/json" -d '{"thing":"thing_name",
"key":"SECRET_KEY", "lat":"", "lon":""}' http://127.0.0.1:5000/api/things
"""
thing = request.json.get('thing')
key = request.json.get('key')
if key == SECRET_KEY:
thing_id = query_db('select thing_id from things where thing_name = ?',
[thing], one=True)
if thing_id is None:
create_thing(thing, request.json.get('lat'), request.json.get('lon'))
return jsonify({ 'success': True })
update_thing(request.json.get('lat'), request.json.get('lon'), thing_id[0])
return jsonify({ 'success': True })
@app.route('/api/list')
def list_things():
"""
Serving the coordinates in GEOjsom format.
"""
rcv = query_db('''SELECT lat, lon, thing_name FROM things''')
try:
dic = []
for t in rcv:
dic.append({'geometry': {'type': 'Point', 'coordinates':\
[t[0], t[1]]}, 'type': 'Feature', 'properties': {'thing': t[2]}})
return json.dumps(dic)
#return jsonify({ 'success': True, 'error':str(rcv[0][2])})
#return jsonify(dic)
except Exception as e:
return jsonify({ 'success': False, 'error':str(e)})
@app.route('/')
def home():
return render_template('index.html')
if __name__ == '__main__':
init_db()
app.run(host="0.0.0.0", threaded=True)
|
Python
| 0.000056
|
@@ -3391,16 +3391,19 @@
route('/
+map
')%0Adef h
|
d50daddde2186d54659a4f8dbf63622311ed6d22
|
remove service class
|
glim/services.py
|
glim/services.py
|
# metaclass for Service class
class DeflectToInstance(type):
def __getattr__(selfcls, a): # selfcls in order to make clear it is a class object (as we are a metaclass)
try:
# first, inquiry the class itself
return super(DeflectToInstance, selfcls).__getattr__(a)
except AttributeError:
# Not found, so try to inquiry the instance attribute:
return getattr(selfcls.instance, a)
# facade that is used for saving complex
class Service:
__metaclass__ = DeflectToInstance
instance = None
@classmethod
def boot(cls, object, configuration = {}):
if cls.instance is None:
cls.instance = object(configuration)
class Config(Service):
pass
class Session(Service):
pass
class Router(Service):
pass
|
Python
| 0.000003
|
@@ -1,685 +1,33 @@
-# metaclass for Service class%0Aclass DeflectToInstance(type):%0A def __getattr__(selfcls, a): # selfcls in order to make clear it is a class object (as we are a metaclass)%0A try:%0A # first, inquiry the class itself%0A return super(DeflectToInstance, selfcls).__getattr__(a)%0A except AttributeError:%0A # Not found, so try to inquiry the instance attribute:%0A return getattr(selfcls.instance, a)%0A%0A# facade that is used for saving complex%0Aclass Service:%0A%09__metaclass__ = DeflectToInstance%0A%0A%09instance = None%0A%0A%09@classmethod%0A%09def boot(cls, object, configuration = %7B%7D):%0A%09%09if cls.instance is None:%0A%09%09%09cls.instance = object(configuration)%09
+from glim.core import Service
%0A%0Acl
|
ba605e0ac7df38e5a7ef62cc80e5b7b7b35f8849
|
Move astropy import to add_table
|
ipyaladin/aladin_widget.py
|
ipyaladin/aladin_widget.py
|
from ipywidgets import (widgets)
from traitlets import (Float, Unicode, Bool, List, Dict, default)
# theses library must be installed, and are used in votable operations
# http://www.astropy.org/
import astropy
""" Definition of the AladinLite widget in the python kernel """
class Aladin(widgets.DOMWidget):
_view_name = Unicode('ViewAladin').tag(sync=True)
_model_name = Unicode('ModelAladin').tag(sync=True)
_view_module = Unicode('jupyter-widget-ipyaladin').tag(sync=True)
_model_module = Unicode('jupyter-widget-ipyaladin').tag(sync=True)
# Aladin options must be declared here (as python class's attributes),
# so that they can be synchronized from the python side to the javascript side
# Default values are overwritten by values passed to the class's constructor
# only theses 4 values are actually updated on one side when they change on the other
fov = Float(60).tag(sync=True, o=True)
target = Unicode("0 +0").tag(sync=True, o=True)
coo_frame = Unicode("J2000").tag(sync=True, o=True)
survey = Unicode("P/DSS2/color").tag(sync=True, o=True)
# the remaining values exists for the widget constructor's sole purpose
reticle_size = Float(22).tag(sync=True, o=True)
reticle_color = Unicode("rgb(178, 50, 178)").tag(sync=True, o=True)
show_reticle = Bool(True).tag(sync=True, o=True)
show_zoom_control = Bool(True).tag(sync=True, o=True)
show_fullscreen_control = Bool(True).tag(sync=True, o=True)
show_layers_control = Bool(True).tag(sync=True, o=True)
show_goto_control = Bool(True).tag(sync=True, o=True)
show_share_control = Bool(False).tag(sync=True, o=True)
show_catalog = Bool(True).tag(sync=True, o=True)
show_frame = Bool(True).tag(sync=True, o=True)
show_coo_grid = Bool(False).tag(sync=True, o=True)
full_screen = Bool(False).tag(sync=True, o=True)
log = Bool(True).tag(sync=True, o=True)
allow_full_zoomout = Bool(False).tag(sync=True, o=True)
options = List(trait=Unicode).tag(sync=True)
# the following values are used in the classe's functions
# values used in the add_catalogFromUrl function
votable_URL = Unicode('').tag(sync=True)
votable_options = Dict().tag(sync=True)
votable_from_URL_flag = Bool(True).tag(sync=True)
# values used in the add_table function
table_keys = List().tag(sync=True)
table_columns = List().tag(sync=True)
table_flag = Bool(True).tag(sync=True)
# values used in the add_listener function
listener_type = Unicode('').tag(sync=True)
listener_flag = Bool(True).tag(sync=True)
listener_callback_click = None
listener_callback_hover = None
last_prompt_length = 0
@default('options')
def _default_options(self):
""" fill the options List with all the options declared """
return [name for name in self.traits(o=True)]
def __init__(self, **kwargs):
""" class constructor
Args:
kwargs: widget options
"""
super(Aladin, self).__init__(**kwargs)
# trigger the handle_aladin_event function when the send function is called on the js-side
# see: http://jupyter-notebook.readthedocs.io/en/latest/comms.html
self.on_msg(self.handle_aladin_event)
# Note: (about the classe's functions)
# As it is only possible to communicate with the js side of the application by using traitlets,
# we can not directly call a js function from the python side
# As such, we use a little trick that consists in delegating to one of the class's variable
# the role of a flag, whose change in value trigger a listener in the js side,
# who can then execute the function whose parameters are passed as trailets in its python equivalent
def add_catalog_from_URL(self, votable_URL, votable_options):
""" load a VOTable table from an url and load its data into the widget
Args:
votable_URL: string url
votable_options: dictionary object"""
self.votable_URL= votable_URL
self.votable_options= votable_options
self.votable_from_URL_flag= not self.votable_from_URL_flag
# Notes:
# 1 - The loaded table can possess fields tagged as 'masked', who can not be parsed by JSON
# As such, the table's columns cant be obtained through the use of table.columns,
# and the use of table.__array__() is requiered.
# 2 - It seems that the list.append() method does not work with traitlets,
# the affectation of the columns must be done at once by using a buffer.
def add_table(self, table):
""" load a VOTable -already accessible on the python side- into the widget
Args:
table: votable object"""
table_array = table.__array__()
self.table_keys= table.keys()
table_columns= []
for i in range(0,len(table.columns[0])):
row_data = []
# this step is needed in order to properly retrieve strings data
# (otherwise, Aladin Lite shows these values as DataView object)
for item in table_array[i]:
if isinstance(item, bytes):
row_data.append(item.decode('utf-8'))
else:
row_data.append(item)
table_columns.append(row_data)
self.table_columns = table_columns
self.table_flag= not self.table_flag
def add_listener(self, listener_type, callback):
""" add a listener to the widget
Args:
listener_type: string that can either be 'objectHovered' or 'objClicked'
callback: python function"""
self.listener_type= listener_type
if listener_type == 'objectHovered':
self.listener_callback_hover= callback
if listener_type == 'objectClicked':
self.listener_callback_click= callback
self.listener_flag= not self.listener_flag
# Note: the print() options end='\r'allow us to override the previous prints,
# thus only the last message will be displayed at the screen
def handle_aladin_event(self, _, content, buffers):
""" used to collect json objects that are sent by the js-side of the application by using the send() method """
if content.get('event', '').startswith('callback'):
if content.get('type') == 'objectHovered':
result= self.listener_callback_hover(content.get('data'))
if content.get('type') == 'objectClicked':
result= self.listener_callback_click(content.get('data'))
result= str(result)
for i in range(len(result),self.last_prompt_length):
result= result+' '
print(result, end='\r')
self.last_prompt_length= len(result)
|
Python
| 0
|
@@ -97,119 +97,8 @@
t)%0A%0A
-# theses library must be installed, and are used in votable operations%0A# http://www.astropy.org/%0Aimport astropy
%0A%0A%22%22
@@ -4645,32 +4645,178 @@
otable object%22%22%22
+%0A%0A # theses library must be installed, and are used in votable operations%0A # http://www.astropy.org/%0A import astropy%0A
%0A table_a
|
72902ebcada7bdc7a889f8766b63afff82110182
|
Comment about recursion limit in categories.
|
webshop/extensions/category/__init__.py
|
webshop/extensions/category/__init__.py
|
# Copyright (C) 2010-2011 Mathijs de Bruin <mathijs@mathijsfietst.nl>
#
# This file is part of django-webshop.
#
# django-webshop is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Django-webshop, by default, contains base classes for two kinds of categories:
* Simple categories, which define a base class for products that belong to
exactly one category.
* Advanced categories, that belong to zero or more categories.
Furthermore, generic abstract base models are defined for 'normal' categories
and for nested categories, allowing for the hierarchical categorization of
products.
"""
|
Python
| 0
|
@@ -1232,11 +1232,161 @@
ducts.%0A%0A
+TODO: We want a setting allowing us to limit the nestedness of categories.%0AFor 'navigational' reasons, a number of 3 should be a reasonable default.%0A%0A
%22%22%22
|
f5fad49e0b20e54e01fe4d9ae69be0694d7878f9
|
add docstring to test setup, and move to the top
|
sale_exception_nostock/tests/test_dropshipping_skip_check.py
|
sale_exception_nostock/tests/test_dropshipping_skip_check.py
|
# Author: Leonardo Pistone
# Copyright 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openerp.tests.common import TransactionCase
class TestDropshippingSkipCheck(TransactionCase):
def test_dropshipping_sale_can_always_be_delivered(self):
self.assertIs(True, self.order_line.can_command_at_delivery_date())
def test_dropshipping_sale_does_not_affect_future_orders(self):
self.assertIs(False, self.order_line.future_orders_are_affected())
def setUp(self):
super(TestDropshippingSkipCheck, self).setUp()
source_loc = self.env['stock.location'].new({'usage': 'supplier'})
self.order_line = self.env['sale.order.line'].new()
self.order_line._get_line_location = lambda: source_loc
|
Python
| 0
|
@@ -857,152 +857,42 @@
def
-test_dropshipping_sale_can_always_be_delivered(self):%0A self.assertIs(True, self.order_line.can_command_at_delivery_date())%0A%0A def test_
+setUp(self):%0A %22%22%22Set up an
drop
@@ -903,147 +903,111 @@
ping
-_
+
sale
-_does_not_affect_future_orders(self):%0A self.assertIs(False, self.order_line.future_orders_are_affected())%0A%0A def setUp(self):
+ order line.%0A%0A To do that, mock the computed source location to be a supplier.%0A%0A %22%22%22
%0A
@@ -1258,8 +1258,291 @@
rce_loc%0A
+%0A def test_dropshipping_sale_can_always_be_delivered(self):%0A self.assertIs(True, self.order_line.can_command_at_delivery_date())%0A%0A def test_dropshipping_sale_does_not_affect_future_orders(self):%0A self.assertIs(False, self.order_line.future_orders_are_affected())%0A
|
8fb97bc0b3a22b912958974636051447170a0b02
|
Add user_account to the user profile admin as a read-only field.
|
go/base/admin.py
|
go/base/admin.py
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import ugettext_lazy as _
from go.base.models import GoUser, UserProfile, UserOrganisation
from go.base.forms import GoUserCreationForm, GoUserChangeForm
class UserProfileInline(admin.StackedInline):
model = UserProfile
fields = ('organisation', 'is_admin')
can_delete = False
class GoUserAdmin(UserAdmin):
# The forms to add and change user instances
inlines = (UserProfileInline,)
# The fields to be used in displaying the User model.
# These override the definitions on the base UserAdmin
# that reference the removed 'username' field
fieldsets = (
(None, {'fields': ('email', 'password')}),
(_('Personal info'), {'fields': ('first_name', 'last_name')}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions')}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password1', 'password2')}
),
)
form = GoUserChangeForm
add_form = GoUserCreationForm
list_display = ('email', 'first_name', 'last_name', 'is_superuser',
'is_staff', 'is_active')
search_fields = ('email', 'first_name', 'last_name')
ordering = ('email',)
class UserProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'organisation', 'is_admin')
admin.site.register(GoUser, GoUserAdmin)
admin.site.register(UserProfile, UserProfileAdmin)
admin.site.register(UserOrganisation)
|
Python
| 0
|
@@ -371,16 +371,72 @@
s_admin'
+, 'user_account')%0A readonly_fields = ('user_account',
)%0A ca
|
56d3db6aae71c88ff8b55bb1d173abc025be7e8c
|
Add test of a write command
|
jacquard/tests/test_cli.py
|
jacquard/tests/test_cli.py
|
import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
|
Python
| 0.000268
|
@@ -1002,8 +1002,355 @@
strip()%0A
+%0A%0Adef test_run_write_command():%0A config = unittest.mock.Mock()%0A config.storage = DummyStore('', data=%7B%7D)%0A%0A output = io.StringIO()%0A with contextlib.redirect_stdout(output):%0A main(%5B'set-default', 'foo', '%22bar%22'%5D, config=config)%0A%0A assert output.getvalue() == ''%0A%0A assert config.storage.data == %7B'defaults': '%7B%22foo%22: %22bar%22%7D'%7D%0A
|
ae7f8c0deaaec2cbc830113ea19f06ca6aa169c7
|
Use `persist.errors` for the goto commands again
|
goto_commands.py
|
goto_commands.py
|
import sublime
import sublime_plugin
from itertools import dropwhile, takewhile
"""
Implement typical Goto Next Previous Error Commands.
"""
class SublimeLinterGotoError(sublime_plugin.WindowCommand):
def run(self, direction='next', count=1, wrap=False):
goto(self.window.active_view(), direction, count, wrap)
STORAGE_KEY = 'SL.{vid}.region_keys'
def get_region_keys(view):
setting_key = STORAGE_KEY.format(vid=view.id())
return set(view.settings().get(setting_key) or [])
def get_highlighted_regions(view):
return [
region
for key in get_region_keys(view)
if '.Highlights.' in key
for region in view.get_regions(key)
]
def goto(view, direction, count, wrap):
cursor = view.sel()[0].begin()
regions = get_highlighted_regions(view)
if not regions:
flash(view, 'No problems')
return
# Filter regions under the cursor, bc we don't want to jump to them.
# Also filter duplicate start positions.
all_jump_positions = sorted({
region.a
for region in regions
if not region.contains(cursor)})
# Edge case: Since we filtered, it is possible we get here with nothing
# left. That is the case if we sit on the last remaining error, where we
# don't have anything to jump to and even `wrap` becomes a no-op.
if len(all_jump_positions) == 0:
flash(view, 'No more problems')
return
def before_current_pos(pos):
return pos < cursor
next_positions = dropwhile(before_current_pos, all_jump_positions)
previous_positions = takewhile(before_current_pos, all_jump_positions)
reverse = direction == 'previous'
jump_positions = list(previous_positions if reverse else next_positions)
if reverse:
jump_positions = list(reversed(jump_positions))
if not jump_positions:
if wrap:
point = all_jump_positions[-1] if reverse else all_jump_positions[0]
flash(
view,
'Jumped to {} problem'.format('last' if reverse else 'first'))
else:
flash(
view,
'No more problems {}'.format('above' if reverse else 'below'))
return
elif len(jump_positions) <= count:
# If we cannot jump wide enough, do not wrap, but jump as wide as
# possible to reduce disorientation.
point = jump_positions[-1]
else:
point = jump_positions[count - 1]
move_to(view, point)
class _sublime_linter_goto_line(sublime_plugin.TextCommand):
def run(self, edit, point):
self.view.sel().clear()
self.view.sel().add(point)
self.view.show(point)
def move_to(view, point):
window = view.window()
if view == window.active_view():
# If the region we're moving to is already visible, then we don't want
# the view to suddenly scroll. If the region is not visible, then we
# want the surrounding area of the region to be visible.
# We need to a use a custom goto line command for several reasons:
# * ST's goto line command doesn't accept a col argument.
# * SL requires that on_selection_modified events MUST be triggered for
# each move.
# See https://github.com/SublimeLinter/SublimeLinter/pull/867.
view.run_command('_sublime_linter_goto_line', {'point': point})
else:
filename = view.file_name() or "<untitled {}>".format(view.buffer_id())
line, col = view.rowcol(point)
target = "{}:{}:{}".format(filename, line + 1, col + 1)
window.open_file(target, sublime.ENCODED_POSITION)
def flash(view, msg):
window = view.window() or sublime.active_window()
window.status_message(msg)
|
Python
| 0
|
@@ -75,16 +75,43 @@
ewhile%0A%0A
+from .lint import persist%0A%0A
%0A%22%22%22%0AImp
@@ -354,372 +354,8 @@
)%0A%0A%0A
-STORAGE_KEY = 'SL.%7Bvid%7D.region_keys'%0A%0A%0Adef get_region_keys(view):%0A setting_key = STORAGE_KEY.format(vid=view.id())%0A return set(view.settings().get(setting_key) or %5B%5D)%0A%0A%0Adef get_highlighted_regions(view):%0A return %5B%0A region%0A for key in get_region_keys(view)%0A if '.Highlights.' in key%0A for region in view.get_regions(key)%0A %5D%0A%0A%0A
def
@@ -398,22 +398,19 @@
-cursor
+bid
= view.
sel(
@@ -409,68 +409,55 @@
iew.
-sel()%5B0%5D.begin
+buffer_id
()%0A
-%0A
-regions = get_highlighted_regions(view
+errors = persist.errors.get(bid
)%0A
@@ -465,22 +465,21 @@
if not
-region
+error
s:%0A
@@ -520,24 +520,60 @@
return%0A%0A
+ cursor = view.sel()%5B0%5D.begin()%0A%0A
# Filter
@@ -724,45 +724,58 @@
-region.a%0A for region in region
+error%5B'region'%5D.begin()%0A for error in error
s%0A
@@ -787,22 +787,31 @@
if not
+error%5B'
region
+'%5D
.contain
|
abc5ad8f3741335e0ee1072867dc45514efed512
|
fix W293 (blank line with whitespace)
|
src/main/python/pybuilder/plugins/python/coverage_plugin.py
|
src/main/python/pybuilder/plugins/python/coverage_plugin.py
|
# -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2014 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import imp
import multiprocessing
import sys
try:
from StringIO import StringIO
except ImportError as e:
from io import StringIO
from pybuilder.core import init, after, use_plugin
from pybuilder.utils import discover_modules, render_report
from pybuilder.errors import BuildFailedException
use_plugin("python.core")
use_plugin("analysis")
@init
def init_coverage_properties(project):
project.build_depends_on("coverage")
project.set_property_if_unset("coverage_threshold_warn", 70)
project.set_property_if_unset("coverage_break_build", True)
project.set_property_if_unset("coverage_reload_modules", True)
project.set_property_if_unset("coverage_exceptions", [])
project.set_property_if_unset("coverage_fork", False)
def start_coverage(coverage_module):
coverage_module.erase()
coverage_module.start()
def stop_coverage(coverage_module, project, logger):
reimport_source_modules(project, logger)
coverage_module.stop()
@after(("analyze", "verify"), only_once=True)
def verify_coverage(project, logger, reactor):
logger.info("Collecting coverage information")
if project.get_property("coverage_fork"):
logger.debug("Forking process to do coverage analysis")
process = multiprocessing.Process(target=do_coverage,
args=(project, logger, reactor))
process.start()
process.join()
else:
do_coverage(project, logger, reactor)
def do_coverage(project, logger, reactor):
import coverage
start_coverage(coverage)
project.set_property('__running_coverage', True) # tell other plugins that we are not really unit testing right now
reactor.execute_task("run_unit_tests")
project.set_property('__running_coverage', False)
stop_coverage(coverage, project, logger)
coverage_too_low = False
threshold = project.get_property("coverage_threshold_warn")
exceptions = project.get_property("coverage_exceptions")
report = {
"module_names": []
}
sum_lines = 0
sum_lines_not_covered = 0
module_names = discover_modules_to_cover(project)
modules = []
for module_name in module_names:
try:
module = sys.modules[module_name]
except KeyError:
logger.warn("Module not imported: {0}. No coverage information available.".format(module_name))
continue
modules.append(module)
module_report_data = build_module_report(coverage, module)
should_ignore_module = module_name in exceptions
if not should_ignore_module:
sum_lines += module_report_data[0]
sum_lines_not_covered += module_report_data[2]
module_report = {
"module": module_name,
"coverage": module_report_data[4],
"sum_lines": module_report_data[0],
"lines": module_report_data[1],
"sum_lines_not_covered": module_report_data[2],
"lines_not_covered": module_report_data[3],
}
report["module_names"].append(module_report)
if module_report_data[4] < threshold:
msg = "Test coverage below %2d%% for %s: %2d%%" % (threshold, module_name, module_report_data[4])
if not should_ignore_module:
logger.warn(msg)
coverage_too_low = True
else:
logger.info(msg)
if sum_lines == 0:
overall_coverage = 0
else:
overall_coverage = (sum_lines - sum_lines_not_covered) * 100 / sum_lines
report["overall_coverage"] = overall_coverage
if overall_coverage < threshold:
logger.warn("Overall coverage is below %2d%%: %2d%%", threshold, overall_coverage)
coverage_too_low = True
else:
logger.info("Overall coverage is %2d%%", overall_coverage)
project.write_report("coverage.json", render_report(report))
write_summary_report(coverage, project, modules)
if coverage_too_low and project.get_property("coverage_break_build"):
raise BuildFailedException("Test coverage for at least one module is below %d%%", threshold)
def reimport_source_modules(project, logger):
if project.get_property("coverage_reload_modules"):
modules = discover_modules_to_cover(project)
for module in modules:
logger.debug("Reloading module %s", module)
if module in sys.modules:
imp.reload(sys.modules[module])
def build_module_report(coverage_module, module):
analysis_result = coverage_module.analysis(module)
lines_total = len(analysis_result[1])
lines_not_covered = len(analysis_result[2])
lines_covered = lines_total - lines_not_covered
if lines_total == 0:
code_coverage = 100
elif lines_covered == 0:
code_coverage = 0
else:
code_coverage = lines_covered * 100 / lines_total
return (lines_total, analysis_result[1],
lines_not_covered, analysis_result[2],
code_coverage)
def write_summary_report(coverage_module, project, modules):
summary = StringIO()
coverage_module.report(modules, file=summary)
project.write_report("coverage", summary.getvalue())
summary.close()
def discover_modules_to_cover(project):
return discover_modules(project.expand_path("$dir_source_main_python"))
|
Python
| 0.000003
|
@@ -3226,24 +3226,16 @@
eptions%0A
-
%0A
|
84047b6dff25b78d51830159f67360b93c391680
|
use integral for slice
|
src/main/python/pybuilder/plugins/python/unittest_plugin.py
|
src/main/python/pybuilder/plugins/python/unittest_plugin.py
|
# -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2014 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from StringIO import StringIO
except ImportError as e:
from io import StringIO
import sys
import unittest
from pybuilder.core import init, task, description, use_plugin
from pybuilder.errors import BuildFailedException
from pybuilder.utils import discover_modules_matching, render_report
from pybuilder.ci_server_interaction import test_proxy_for
from pybuilder.terminal import print_text_line
use_plugin("python.core")
if sys.version_info < (2, 7):
TextTestResult = unittest._TextTestResult # brought to you by 2.6
else:
TextTestResult = unittest.TextTestResult
class TestNameAwareTextTestRunner(unittest.TextTestRunner):
def __init__(self, logger, stream):
self.logger = logger
super(TestNameAwareTextTestRunner, self).__init__(stream=stream)
def _makeResult(self):
return TestNameAwareTestResult(self.logger, self.stream, self.descriptions, self.verbosity)
class TestNameAwareTestResult(TextTestResult):
def __init__(self, logger, stream, descriptions, verbosity):
self.test_names = []
self.failed_test_names_and_reasons = {}
self.logger = logger
super(TestNameAwareTestResult, self).__init__(stream, descriptions, verbosity)
def startTest(self, test):
self.test_names.append(test)
self.logger.debug("starting %s", test)
super(TestNameAwareTestResult, self).startTest(test)
def addError(self, test, err):
exception_type, exception, traceback = err
self.failed_test_names_and_reasons[test] = '{0}: {1}'.format(exception_type, exception).replace('\'', '')
super(TestNameAwareTestResult, self).addError(test, err)
def addFailure(self, test, err):
exception_type, exception, traceback = err
self.failed_test_names_and_reasons[test] = '{0}: {1}'.format(exception_type, exception).replace('\'', '')
super(TestNameAwareTestResult, self).addFailure(test, err)
@init
def init_test_source_directory(project):
project.set_property_if_unset("dir_source_unittest_python", "src/unittest/python")
project.set_property_if_unset("unittest_module_glob", "*_tests")
project.set_property_if_unset("unittest_file_suffix", None) # deprecated, use unittest_module_glob.
project.set_property_if_unset("unittest_test_method_prefix", None)
@task
@description("Runs unit tests based on Python's unittest module")
def run_unit_tests(project, logger):
test_dir = _register_test_and_source_path_and_return_test_dir(project, sys.path)
unittest_file_suffix = project.get_property("unittest_file_suffix")
if unittest_file_suffix is not None:
logger.warn("unittest_file_suffix is deprecated, please use unittest_module_glob")
module_glob = "*{0}".format(unittest_file_suffix)
if module_glob.endswith(".py"):
WITHOUT_DOT_PY = slice(None, -3)
module_glob = module_glob[WITHOUT_DOT_PY]
project.set_property("unittest_module_glob", module_glob)
else:
module_glob = project.get_property("unittest_module_glob")
logger.info("Executing unittest Python modules in %s", test_dir)
logger.debug("Including files matching '%s'", module_glob)
try:
test_method_prefix = project.get_property("unittest_test_method_prefix")
result, console_out = execute_tests_matching(logger, test_dir, module_glob, test_method_prefix)
if result.testsRun == 0:
logger.warn("No unittests executed.")
else:
logger.info("Executed %d unittests", result.testsRun)
write_report("unittest", project, logger, result, console_out)
if not result.wasSuccessful():
raise BuildFailedException("There were %d test error(s) and %d failure(s)"
% (len(result.errors), len(result.failures)))
logger.info("All unittests passed.")
except ImportError as e:
import traceback
_, _, import_error_traceback = sys.exc_info()
file_with_error, error_line, _, statement_causing_error = traceback.extract_tb(import_error_traceback)[-1]
logger.error("Import error in unittest file {0}, due to statement '{1}' on line {2}".format(
file_with_error, statement_causing_error, error_line))
logger.error("Error importing unittests: %s", e)
raise BuildFailedException("Unable to execute unit tests.")
def execute_tests(logger, test_source, suffix, test_method_prefix=None):
return execute_tests_matching(logger, test_source, "*{0}".format(suffix), test_method_prefix)
def execute_tests_matching(logger, test_source, file_glob, test_method_prefix=None):
output_log_file = StringIO()
try:
test_modules = discover_modules_matching(test_source, file_glob)
loader = unittest.defaultTestLoader
if test_method_prefix:
loader.testMethodPrefix = test_method_prefix
tests = loader.loadTestsFromNames(test_modules)
result = TestNameAwareTextTestRunner(logger, output_log_file).run(tests)
return result, output_log_file.getvalue()
finally:
output_log_file.close()
def _register_test_and_source_path_and_return_test_dir(project, system_path):
test_dir = project.expand_path("$dir_source_unittest_python")
system_path.insert(0, test_dir)
system_path.insert(0, project.expand_path("$dir_source_main_python"))
return test_dir
def write_report(name, project, logger, result, console_out):
project.write_report("%s" % name, console_out)
report = {"tests-run": result.testsRun,
"errors": [],
"failures": []}
for error in result.errors:
report["errors"].append({"test": error[0].id(),
"traceback": error[1]})
logger.error("Test has error: %s", error[0].id())
if project.get_property("verbose"):
print_text_line(error[1])
for failure in result.failures:
report["failures"].append({"test": failure[0].id(),
"traceback": failure[1]})
logger.error("Test failed: %s", failure[0].id())
if project.get_property("verbose"):
print_text_line(failure[1])
project.write_report("%s.json" % name, render_report(report))
report_to_ci_server(project, result)
def report_to_ci_server(project, result):
for test_name in result.test_names:
with test_proxy_for(project).and_test_name(test_name) as test:
if test_name in result.failed_test_names_and_reasons:
test.fails(result.failed_test_names_and_reasons.get(test_name))
|
Python
| 0.000001
|
@@ -3519,20 +3519,17 @@
= slice(
-None
+0
, -3)%0A
|
0cec69e607e65e75695202d755a40386c2357132
|
Version 0.1
|
tumblelog/__init__.py
|
tumblelog/__init__.py
|
__version__ = '0.9'
|
Python
| 0.000001
|
@@ -14,7 +14,7 @@
'0.
-9
+1
'%0A
|
5e507e05958e32cbf4c6b09093bbf38f00cfee24
|
ask which class the user is in
|
arithmeticQuiz.py
|
arithmeticQuiz.py
|
import random #imports the default Python random module, which allows for random number generation
import time #imports the default Python time module, which allows for pauses in the program
counter = 1 #defines the counter, which counts up to 10 each time a question is asked
score = 0 #defines the user's score so it can be counted as they get questions right
numberOne = 0 #opens the variable that stores the first number to be used in each question
numberTwo = 0 #opens the variable that stores the second number to be used in each question
operator = 0 #opens the variable that stores the number to be assigned to the operator in each question
classOne = open("classOne.txt","a")
classTwo = open("classTwo.txt","a")
classThree = open("classThree.txt","a")
name = input("What is your name user? ") #asks the user for their name so it can be used in the program.
while name == "": #while the name variable is empty, it runs the code that is entered.
time.sleep(1) #pauses the program for a second
name = input("Hello, \nWhat is your name? ") #reasks for the user's name.
print("Weclome to the quiz,",name) #welcomes the user to the quiz
time.sleep(1) #pauses the program for a second
def add (x, y): #defines the add variable
return x + y; #tells the computer what to do when the add variable is called
def subtract (x, y): #defines the subtract variable
return x - y; #tells the computer what to do when the subtract variable is called
def multiply (x, y): #defines the multiply variable
return x * y; #tells the computer what to do when the multiply variable is callled
for counter in range(0,10): #when the counter variable is in the range of 1 to 10, it runs all the indented code
counter = counter + 1 #adds one to the counter variable
numberOne = random.randint(0,11) #randomly generates a number to be used in the question
numberTwo = random.randint(0,11) #randomly generates a number to be used in the question
operator = random.randint(1,3) #randomly generates a number to be used in the question
if operator == 1: #when the operator variable is 1, it runs the indented code
print("Question",counter,"What is ",numberOne,"+",numberTwo,) #prints the question using the add variable
time.sleep(1) #pauses the program for a second
ans = add(numberOne, numberTwo) #defines the question for the computer so it can be compared to the user's input
elif operator == 2: #when the operator varialbe is 2, it runs the indented code
print("Question",counter,"What is ",numberOne,"-",numberTwo,) #prints the question using the subtract variable
time.sleep(1) #pauses the program for a second
ans = subtract(numberOne, numberTwo) #defines the question for the computer so it can be compared to the user's input
else: #when the operator varialbe is 3, it runs the indented code
print("Question",counter,"What is ",numberOne,"*",numberTwo) #prints the question using the multiply variable
time.sleep(1) #pauses the program for a second
ans = multiply(numberOne, numberTwo) #defines the question for the computer so it can be compared to the user's input
res = int(input("Write your answer here: ")) #asks for the answer for the user so it can be checked to see if it is the right answer.
if res == ans: #when the result is the same as the answer, the indented code runs
print("Congratulations,",name,"! 1 point scored.") #prints a congratulations message to the user.
time.sleep(1) #pauses the program for a second
score = score + 1 #adds one to the score variable
else: #when the result isn't the same as the answer, the indented code runs
print("Unlucky",name,"! No points scored.") #prints an unluck message to the user
time.sleep(1) #pauses the program for a second
print("That's it! You've completed the quiz.\nYour total socore was...",score,"Well done,",name) #concludes the program, printing the final score
if score <= 5: #when the score variable is less than or equal to 5, the indented code runs
print("Do some practice and have another go.") #prints an encouraging message to the user
elif score < 7: #when the score variable is less than 7, the indented code runs
print("You're almost there! have another try.") #prints an 'almost there' message
else: #when the score is more than 7, the score is more than 7, the indented code runs
print("You've done very well!") #prints a congratulations message
|
Python
| 0.999622
|
@@ -901,16 +901,104 @@
program.
+%0AwhichClass = input(%22And which class are you in, Class One, Class Two or Class Three? %22)
%0A%0Awhile
|
1d31282a9781e8eef4aafc0549c01056d4fc03d0
|
Bump version.
|
armet/_version.py
|
armet/_version.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import, division
__version_info__ = (0, 4, 21)
__version__ = '.'.join(map(str, __version_info__))
|
Python
| 0
|
@@ -116,9 +116,9 @@
4, 2
-1
+2
)%0A__
|
cec594e525fb889029b85b1f92f89170ca330332
|
Remove unnecessary "is not supported" verbiage.
|
zerver/webhooks/trello/view/__init__.py
|
zerver/webhooks/trello/view/__init__.py
|
# Webhooks for external integrations.
from typing import Any, Mapping, Optional, Tuple
import orjson
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view, return_success_on_head_request
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import UnexpectedWebhookEventType, check_send_webhook_message
from zerver.models import UserProfile
from .board_actions import SUPPORTED_BOARD_ACTIONS, process_board_action
from .card_actions import IGNORED_CARD_ACTIONS, SUPPORTED_CARD_ACTIONS, process_card_action
@api_key_only_webhook_view('Trello')
@return_success_on_head_request
@has_request_variables
def api_trello_webhook(request: HttpRequest,
user_profile: UserProfile,
payload: Mapping[str, Any]=REQ(argument_type='body')) -> HttpResponse:
payload = orjson.loads(request.body)
action_type = payload['action'].get('type')
message = get_subject_and_body(payload, action_type)
if message is None:
return json_success()
else:
subject, body = message
check_send_webhook_message(request, user_profile, subject, body)
return json_success()
def get_subject_and_body(payload: Mapping[str, Any], action_type: str) -> Optional[Tuple[str, str]]:
if action_type in SUPPORTED_CARD_ACTIONS:
return process_card_action(payload, action_type)
if action_type in IGNORED_CARD_ACTIONS:
return None
if action_type in SUPPORTED_BOARD_ACTIONS:
return process_board_action(payload, action_type)
raise UnexpectedWebhookEventType("Trello", f'{action_type} is not supported')
|
Python
| 0
|
@@ -1681,11 +1681,8 @@
o%22,
-f'%7B
acti
@@ -1692,25 +1692,6 @@
type
-%7D is not supported'
)%0A
|
04605ee82108695989e8f10b2287d43f6df448f8
|
Update noisy_linear.py
|
chainerrl/links/noisy_linear.py
|
chainerrl/links/noisy_linear.py
|
import chainer
import chainer.functions as F
from chainer.initializers import Constant
import chainer.links as L
import numpy
from chainerrl.initializers import VarianceScalingConstant
class FactorizedNoisyLinear(chainer.Chain):
"""Linear layer in Factorized Noisy Network
Args:
mu_link (L.Linear): Linear link that computes mean of output.
sigma_scale (float): The hyperparameter sigma_0 in the original paper.
Scaling factor of the initial weights of noise-scaling parameters.
"""
def __init__(self, mu_link, sigma_scale=0.4):
super(FactorizedNoisyLinear, self).__init__()
self.out_size = mu_link.out_size
self.nobias = not ('/b' in [name for name, _ in mu_link.namedparams()])
W_data = mu_link.W.data
in_size = None if W_data is None else W_data.shape[1]
with self.init_scope():
self.mu = mu_link
self.sigma = L.Linear(
in_size=in_size, out_size=self.out_size, nobias=self.nobias,
initialW=VarianceScalingConstant(sigma_scale),
initial_bias=Constant(sigma_scale))
device_id = self.mu._device_id
if device_id is not None:
self.to_gpu(device_id)
def _eps(self, shape, dtype):
xp = self.xp
r = xp.random.standard_normal(shape).astype(dtype)
# apply the function f
return xp.copysign(xp.sqrt(xp.abs(r)), r)
def __call__(self, x):
if self.mu.W.data is None:
self.mu.W.initialize((self.out_size, numpy.prod(x.shape[1:])))
if self.sigma.W.data is None:
self.sigma.W.initialize((self.out_size, numpy.prod(x.shape[1:])))
# use info of sigma.W to avoid strange error messages
dtype = self.sigma.W.dtype
out_size, in_size = self.sigma.W.shape
eps_x = self._eps(in_size, dtype)
eps_y = self._eps(out_size, dtype)
W = self.mu.W + self.sigma.W * self.xp.outer(eps_y, eps_x)
if self.nobias:
return F.linear(x, W)
else:
b = self.mu.b + self.sigma.b * eps_y
return F.linear(x, W, b)
|
Python
| 0
|
@@ -904,16 +904,302 @@
mu_link%0A
+ self.mu.W.initializer = Uniform(1 / numpy.sqrt(in_size)) %0A if not self.nobias:%0A self.mu.b.initializer = Uniform(1 / numpy.sqrt(in_size))%0A%0A self.mu.W.initialize((self.out_size, in_size))%0A self.mu.b.initialize((self.out_size))%0A
@@ -1414,16 +1414,44 @@
ma_scale
+ / numpy.sqrt(self.out_size)
))%0A%0A
|
a6ee84d105943628a66685fb5425cb00a45ca2e1
|
Update feed2zinnia.py
|
zinnia/management/commands/feed2zinnia.py
|
zinnia/management/commands/feed2zinnia.py
|
"""Feed to Zinnia command module"""
import os
import sys
from urllib2 import urlopen
from datetime import datetime
from optparse import make_option
from django.conf import settings
from django.utils import timezone
from django.core.files import File
from django.utils.text import Truncator
from django.utils.html import strip_tags
from django.db.utils import IntegrityError
from django.utils.encoding import smart_str
from django.contrib.sites.models import Site
from django.template.defaultfilters import slugify
from django.core.management.base import CommandError
from django.core.management.base import LabelCommand
from django.core.files.temp import NamedTemporaryFile
from zinnia import __version__
from zinnia.models.entry import Entry
from zinnia.models.author import Author
from zinnia.models.category import Category
from zinnia.managers import PUBLISHED
from zinnia.signals import disconnect_entry_signals
from zinnia.signals import disconnect_discussion_signals
class Command(LabelCommand):
"""Command object for importing a RSS or Atom
feed into Zinnia."""
help = 'Import a RSS or Atom feed into Zinnia.'
label = 'feed url'
args = 'url'
option_list = LabelCommand.option_list + (
make_option('--no-auto-excerpt', action='store_false',
dest='auto-excerpt', default=True,
help='Do NOT generate an excerpt if not present.'),
make_option('--no-enclosure', action='store_false',
dest='image-enclosure', default=True,
help='Do NOT save image enclosure if present.'),
make_option('--no-tags', action='store_false',
dest='tags', default=True,
help='Do NOT store categories as tags'),
make_option('--author', dest='author', default='',
help='All imported entries belong to specified author'))
SITE = Site.objects.get_current()
def __init__(self):
"""Init the Command and add custom styles"""
super(Command, self).__init__()
self.style.TITLE = self.style.SQL_FIELD
self.style.STEP = self.style.SQL_COLTYPE
self.style.ITEM = self.style.HTTP_INFO
disconnect_entry_signals()
disconnect_discussion_signals()
def write_out(self, message, verbosity_level=1):
"""Convenient method for outputing"""
if self.verbosity and self.verbosity >= verbosity_level:
sys.stdout.write(smart_str(message))
sys.stdout.flush()
def handle_label(self, url, **options):
try:
import feedparser
except ImportError:
raise CommandError('You need to install the feedparser '
'module to run this command.')
self.tags = options.get('tags', True)
self.default_author = options.get('author')
self.verbosity = int(options.get('verbosity', 1))
self.auto_excerpt = options.get('auto-excerpt', True)
self.image_enclosure = options.get('image-enclosure', True)
if self.default_author:
try:
self.default_author = Author.objects.get(
username=self.default_author)
except Author.DoesNotExist:
raise CommandError('Invalid username for default author')
self.write_out(self.style.TITLE(
'Starting importation of %s to Zinnia %s:\n' % (url, __version__)))
feed = feedparser.parse(url)
self.import_entries(feed.entries)
def import_entries(self, feed_entries):
"""Import entries"""
for feed_entry in feed_entries:
self.write_out('> %s... ' % feed_entry.title)
if feed_entry.get('publised_parsed'):
creation_date = datetime(*feed_entry.published_parsed[:6])
if settings.USE_TZ:
creation_date = timezone.make_aware(
creation_date, timezone.utc)
else:
creation_date = timezone.now()
slug = slugify(feed_entry.title)[:255]
if Entry.objects.filter(creation_date__year=creation_date.year,
creation_date__month=creation_date.month,
creation_date__day=creation_date.day,
slug=slug):
self.write_out(self.style.NOTICE(
'SKIPPED (already imported)\n'))
continue
categories = self.import_categories(feed_entry)
entry_dict = {'title': feed_entry.title[:255],
'content': feed_entry.description,
'excerpt': feed_entry.get('summary'),
'status': PUBLISHED,
'creation_date': creation_date,
'start_publication': creation_date,
'last_update': timezone.now(),
'slug': slug}
if not entry_dict['excerpt'] and self.auto_excerpt:
entry_dict['excerpt'] = Truncator(
strip_tags(feed_entry.description)).words(50)
if self.tags:
entry_dict['tags'] = self.import_tags(categories)
entry = Entry(**entry_dict)
entry.save()
entry.categories.add(*categories)
entry.sites.add(self.SITE)
if self.image_enclosure:
for enclosure in feed_entry.enclosures:
if ('image' in enclosure.get('type') and
enclosure.get('href')):
img_tmp = NamedTemporaryFile(delete=True)
img_tmp.write(urlopen(enclosure['href']).read())
img_tmp.flush()
entry.image.save(os.path.basename(enclosure['href']),
File(img_tmp))
break
if self.default_author:
entry.authors.add(self.default_author)
elif feed_entry.get('author_detail'):
try:
author = Author.objects.create_user(
slugify(feed_entry.author_detail.get('name')),
feed_entry.author_detail.get('email', ''))
except IntegrityError:
author = Author.objects.get(
username=slugify(feed_entry.author_detail.get('name')))
entry.authors.add(author)
self.write_out(self.style.ITEM('OK\n'))
def import_categories(self, feed_entry):
categories = []
for cat in feed_entry.get('tags', ''):
category, created = Category.objects.get_or_create(
slug=slugify(cat.term), defaults={'title': cat.term})
categories.append(category)
return categories
def import_tags(self, categories):
tags = []
for cat in categories:
if len(cat.title.split()) > 1:
tags.append('"%s"' % slugify(cat.title).replace('-', ' '))
else:
tags.append(slugify(cat.title).replace('-', ' '))
return ', '.join(tags)
|
Python
| 0.000006
|
@@ -3727,16 +3727,17 @@
('publis
+h
ed_parse
|
65099acbb8799e3d3beeb8aee11f46053945219b
|
handle freestyle-type projects
|
jenkins_job_wrecker/cli.py
|
jenkins_job_wrecker/cli.py
|
import argparse
from argparse import ArgumentDefaultsHelpFormatter
import errno
import logging
import jenkins
import os
import sys
import textwrap
import jenkins_job_wrecker.job_handlers as job_handlers
import xml.etree.ElementTree as ET
from yaml import dump
logging.basicConfig(level=logging.INFO)
log = logging.getLogger('jjwrecker')
# set the format of the standard StreamHandler to have a space in it
# (on the root logger)
for handler in logging.getLogger().handlers:
if isinstance(handler, logging.StreamHandler):
handler.setFormatter(logging.Formatter(fmt='%(name)s %(levelname)s: %(message)s'))
# Given a file with XML, or a string of XML, parse it with
# xml.etree.ElementTree and return the XML tree root.
def get_xml_root(filename=False, string=False):
if filename == False and string == False:
raise TypeError('specify a filename or string argument')
if filename:
tree = ET.parse(filename)
return tree.getroot()
if string:
return ET.fromstring(string)
# Walk an XML ElementTree ("root"), and return a YAML string
def root_to_yaml(root, name):
# Top-level "job" data
job = {}
build = [{'job': job}]
job['name'] = name
# "project-type:" YAML
project_types = {
'matrix-project': 'matrix'}
if root.tag not in project_types:
raise NotImplementedError('Cannot handle "%s"-type projects' % root.tag)
job['project-type'] = project_types[root.tag]
# Handle each top-level XML element with custom "handle_*" functions in
# job_handlers.py.
for child in root:
handler_name = 'handle_%s' % child.tag.lower()
try:
handler = getattr(job_handlers, handler_name)
except AttributeError:
# Show our YAML translation so far:
print dump(build, default_flow_style=False)
# ... and report what still needs to be done:
raise NotImplementedError("write a function for %s" % handler_name)
try:
settings = handler(child)
if settings is not None:
for setting in settings:
key, value = setting
job[key] = value
except Exception, e:
print 'last called %s' % handler_name
raise
return dump(build, default_flow_style=False)
# argparse foo
def parse_args(args):
parser = argparse.ArgumentParser(
description='Input XML, output YAML.',
epilog=textwrap.dedent('''
Examples:
jjwrecker -f ice-tools.xml
'''),
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument(
'-f', '--filename',
help='XML file to translate'
)
parser.add_argument(
'-s', '--jenkins-server',
help='Jenkins server to query'
)
parser.add_argument(
'-n', '--name',
help='Name of a job'
)
parser.add_argument(
'-v', '--verbose',
action='store_true', default=None,
help='show more output on the console'
)
return parser.parse_args(args)
def main():
args = parse_args(sys.argv[1:])
if args.verbose:
log.setLevel(logging.DEBUG)
# Options:
# -f and -n
# -s and -n
# TODO: -s (without -n means "all jobs on the server")
# Choose either -f or -j ...
if not args.jenkins_server and not args.filename:
log.critical('Choose an XML file (-f) or Jenkins URL (-j).')
exit(1)
# ... but not both -f and -j.
if args.jenkins_server and args.filename:
log.critical('Choose either an XML file (-f) or Jenkins URL (-j).')
exit(1)
# -f requires -n
if args.filename and not args.name:
log.critical('Choose a job name (-n) for the job in this file.')
exit(1)
# Args are ok. Proceed with writing output
try:
os.mkdir('output')
# We don't care if "output" dir already exists.
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
if args.filename:
# Convert to YAML
root = get_xml_root(filename=args.filename)
yaml = root_to_yaml(root, args.name)
# write yaml string to file (job-name.yml)
yaml_filename = os.path.join('output', args.name + '.yml')
output_file = open(yaml_filename, 'w')
output_file.write(yaml)
output_file.close()
# -s requires -n
if args.jenkins_server:
# 'http://jenkins-calamari.front.sepia.ceph.com:8080'
server = jenkins.Jenkins(args.jenkins_server)
if args.name:
job_names = [args.name]
else:
job_names = []
for job in server.get_jobs():
job_names.append(job['name'])
# write YAML
for name in job_names:
log.info('looking up job "%s"' % name)
# Get a job's XML
xml = server.get_job_config(name)
log.debug(xml)
# Convert XML to YAML
root = get_xml_root(string=xml)
log.info('converting job "%s" to YAML' % name)
yaml = root_to_yaml(root, name)
# write yaml string to file (job-name.yml)
yaml_filename = os.path.join('output', name + '.yml')
output_file = open(yaml_filename, 'w')
output_file.write(yaml)
output_file.close()
|
Python
| 0
|
@@ -1251,16 +1251,48 @@
pes = %7B%0A
+ 'project': 'freestyle',%0A
|
67c40fff7813b91b874c5fada042bfc0c6990d52
|
Bump version
|
typepy/__version__.py
|
typepy/__version__.py
|
# encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2017-{}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.3.1"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
|
Python
| 0
|
@@ -204,17 +204,17 @@
= %220.3.
-1
+2
%22%0A__main
|
a9baa24fda1ee3689acfa757d7e3b9ef1cc17968
|
Improve naming and attrs of hostnameless Huawei LTE device tracker entities (#29281)
|
homeassistant/components/huawei_lte/device_tracker.py
|
homeassistant/components/huawei_lte/device_tracker.py
|
"""Support for device tracking of Huawei LTE routers."""
import logging
import re
from typing import Any, Dict, Set
import attr
from stringcase import snakecase
from homeassistant.components.device_tracker import (
DOMAIN as DEVICE_TRACKER_DOMAIN,
SOURCE_TYPE_ROUTER,
)
from homeassistant.components.device_tracker.config_entry import ScannerEntity
from homeassistant.const import CONF_URL
from homeassistant.helpers import entity_registry
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import HuaweiLteBaseEntity
from .const import DOMAIN, KEY_WLAN_HOST_LIST, UPDATE_SIGNAL
_LOGGER = logging.getLogger(__name__)
_DEVICE_SCAN = f"{DEVICE_TRACKER_DOMAIN}/device_scan"
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up from config entry."""
# Grab hosts list once to examine whether the initial fetch has got some data for
# us, i.e. if wlan host list is supported. Only set up a subscription and proceed
# with adding and tracking entities if it is.
router = hass.data[DOMAIN].routers[config_entry.data[CONF_URL]]
try:
_ = router.data[KEY_WLAN_HOST_LIST]["Hosts"]["Host"]
except KeyError:
_LOGGER.debug("%s[%s][%s] not in data", KEY_WLAN_HOST_LIST, "Hosts", "Host")
return
# Initialize already tracked entities
tracked: Set[str] = set()
registry = await entity_registry.async_get_registry(hass)
for entity in registry.entities.values():
if (
entity.domain == DEVICE_TRACKER_DOMAIN
and entity.config_entry_id == config_entry.entry_id
):
tracked.add(entity.unique_id)
async_add_new_entities(hass, router.url, async_add_entities, tracked, True)
# Tell parent router to poll hosts list to gather new devices
router.subscriptions[KEY_WLAN_HOST_LIST].add(_DEVICE_SCAN)
async def _async_maybe_add_new_entities(url: str) -> None:
"""Add new entities if the update signal comes from our router."""
if url == router.url:
async_add_new_entities(hass, url, async_add_entities, tracked)
# Register to handle router data updates
disconnect_dispatcher = async_dispatcher_connect(
hass, UPDATE_SIGNAL, _async_maybe_add_new_entities
)
router.unload_handlers.append(disconnect_dispatcher)
# Add new entities from initial scan
async_add_new_entities(hass, router.url, async_add_entities, tracked)
def async_add_new_entities(
hass, router_url, async_add_entities, tracked, included: bool = False
):
"""Add new entities.
:param included: if True, setup only items in tracked, and vice versa
"""
router = hass.data[DOMAIN].routers[router_url]
try:
hosts = router.data[KEY_WLAN_HOST_LIST]["Hosts"]["Host"]
except KeyError:
_LOGGER.debug("%s[%s][%s] not in data", KEY_WLAN_HOST_LIST, "Hosts", "Host")
return
new_entities = []
for host in (x for x in hosts if x.get("MacAddress")):
entity = HuaweiLteScannerEntity(router, host["MacAddress"])
tracking = entity.unique_id in tracked
if tracking != included:
continue
tracked.add(entity.unique_id)
new_entities.append(entity)
async_add_entities(new_entities, True)
def _better_snakecase(text: str) -> str:
if text == text.upper():
# All uppercase to all lowercase to get http for HTTP, not h_t_t_p
text = text.lower()
else:
# Three or more consecutive uppercase with middle part lowercased
# to get http_response for HTTPResponse, not h_t_t_p_response
text = re.sub(
r"([A-Z])([A-Z]+)([A-Z](?:[^A-Z]|$))",
lambda match: f"{match.group(1)}{match.group(2).lower()}{match.group(3)}",
text,
)
return snakecase(text)
@attr.s
class HuaweiLteScannerEntity(HuaweiLteBaseEntity, ScannerEntity):
"""Huawei LTE router scanner entity."""
mac: str = attr.ib()
_is_connected: bool = attr.ib(init=False, default=False)
_name: str = attr.ib(init=False, default="device")
_device_state_attributes: Dict[str, Any] = attr.ib(init=False, factory=dict)
@property
def _entity_name(self) -> str:
return self._name
@property
def _device_unique_id(self) -> str:
return self.mac
@property
def source_type(self) -> str:
"""Return SOURCE_TYPE_ROUTER."""
return SOURCE_TYPE_ROUTER
@property
def is_connected(self) -> bool:
"""Get whether the entity is connected."""
return self._is_connected
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Get additional attributes related to entity state."""
return self._device_state_attributes
async def async_update(self) -> None:
"""Update state."""
hosts = self.router.data[KEY_WLAN_HOST_LIST]["Hosts"]["Host"]
host = next((x for x in hosts if x.get("MacAddress") == self.mac), None)
self._is_connected = host is not None
if self._is_connected:
self._name = host.get("HostName", self.mac)
self._device_state_attributes = {
_better_snakecase(k): v
for k, v in host.items()
if k not in ("MacAddress", "HostName")
}
def get_scanner(*args, **kwargs): # pylint: disable=useless-return
"""Old no longer used way to set up Huawei LTE device tracker."""
_LOGGER.warning(
"Loading and configuring as a platform is no longer supported or "
"required, convert to enabling/disabling available entities"
)
return None
|
Python
| 0
|
@@ -5057,24 +5057,87 @@
_connected:%0A
+ # HostName may be present with explicit None value%0A
@@ -5168,17 +5168,20 @@
ostName%22
-,
+) or
self.ma
@@ -5181,17 +5181,16 @@
self.mac
-)
%0A
@@ -5267,32 +5267,16 @@
se(k): v
-%0A
for k,
@@ -5296,51 +5296,16 @@
ms()
-%0A if k not in (%22MacAddress%22,
+ if k !=
%22Ho
@@ -5311,17 +5311,16 @@
ostName%22
-)
%0A
|
fa9e5956adadd20d546129b07bf4b71772cd5b05
|
make small optimization to vector multiply/divide
|
pyschool/static/external/brython/Lib/site-packages/glow/vector.py
|
pyschool/static/external/brython/Lib/site-packages/glow/vector.py
|
from javascript import JSConstructor, console
class vec:
def __init__(self, x=0, y=0, z=0):
self._vec=JSConstructor(glowscript.vec)(x,y,z)
self.add=self.__add__
self.sub=self.__sub__
self.multiply=self.__mul__
self.divide=self.__truediv__=self.__div__
#vec should be a glowscript vec, not an instance of this class
def _set_vec(self, vec):
self._vec=vec
@property
def x(self):
return self._vec.x
@x.setter
def x(self, value):
self._vec.x=value
@property
def y(self):
return self._vec.y
@y.setter
def y(self, value):
self._vec.y=value
@property
def z(self):
return self._vec.z
@z.setter
def z(self, value):
self._vec.z=value
def __add__(self, other):
if isinstance(other, vec):
_v=vec()
_v._set_vec(self._vec.add(other._vec))
return _v
raise ImplementationError("addition of vec and %s not implemented yet" % type(other))
def __sub__(self, other):
if isinstance(other, vec):
_v=vec()
_v._set_vec(self._vec.sub(other._vec))
return _v
raise ImplementationError("subtraction of vec and %s not is implemented yet" % type(other))
def __mul__(self, other):
if isinstance(other, int) or isinstance(other, float):
_v=vec()
_v._set_vec(self._vec.multiply(other))
return _v
raise ImplementationError("multiplication of vec and %s is not implemented yet" % type(other))
def __div__(self, other):
if isinstance(other, int) or isinstance(other, float):
_v=vec()
_v._set_vec(self._vec.divide(other))
return _v
raise ImplementationError("division of vec and %s is not implemented yet" % type(other))
def __eq__(self, other):
return self._vec.equals(other._vec)
def __repr__(self):
return self._vec.toString()
def __str__(self):
return self._vec.toString()
def comp(self, other):
return self._vec.comp(other._vec)
def cross(self, other):
return self._vec.cross(other._vec)
def diff_angle(self, other):
return self._vec.diff_angle(other._vec)
def dot(self):
return self._vec.dot()
def mag(self):
return self._vec.mag()
def mag2(self):
return self._vec.mag2()
def norm(self):
_v=vec()
_v._set_vec(self._vec.norm())
return _v
def proj(self, other):
_v=vec()
_v._set_vec(self._vec.proj(other._vec))
return _v
def random(self):
_v = vec()
_v._set_vec(self._vec.random())
return _v
def rotate(self, **kwargs):
_v = vec()
_v._set_vec(self._vec.rotate(kwargs))
return _v
def to_glowscript(self):
return self._vec
|
Python
| 0.000001
|
@@ -1277,40 +1277,20 @@
er,
+(
int
-) or isinstance(other
, float)
:%0A
@@ -1277,32 +1277,33 @@
er, (int, float)
+)
:%0A _v=ve
@@ -1536,40 +1536,20 @@
er,
+(
int
-) or isinstance(other
, float)
:%0A
@@ -1544,16 +1544,17 @@
, float)
+)
:%0A
|
e6f2493e2a4c00307854fb52809123b0ba1a8bce
|
fix error message formatting
|
invenio_stats/cli.py
|
invenio_stats/cli.py
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2018 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Aggregation classes."""
from __future__ import absolute_import, print_function
from functools import wraps
import click
from dateutil.parser import parse as dateutil_parse
from flask.cli import with_appcontext
from werkzeug.local import LocalProxy
from .proxies import current_stats
from .tasks import aggregate_events, process_events
def lazy_result(f):
"""Decorate function to return LazyProxy."""
@wraps(f)
def decorated(ctx, param, value):
return LocalProxy(lambda: f(ctx, param, value))
return decorated
@lazy_result
def _validate_event_type(ctx, param, value):
invalid_values = set(value) - set(current_stats.enabled_events)
if invalid_values:
raise click.BadParameter(
'Invalid event type(s): {}. Valid values: {}'.format(
invalid_values, set(current_stats.enabled_events)))
return value
def _parse_date(ctx, param, value):
if value:
return dateutil_parse(value)
@lazy_result
def _validate_aggregation_type(ctx, param, value):
invalid_values = set(value) - set(current_stats.enabled_aggregations)
if invalid_values:
raise click.BadParameter(
'Invalid aggregation type(s): {}. Valid values: {}'.format(
invalid_values, set(current_stats.enabled_aggregations)))
return value
aggr_arg = click.argument(
'aggregation-types', nargs=-1, callback=_validate_aggregation_type)
@click.group()
def stats():
"""Statistics commands."""
@stats.group()
def events():
"""Event management commands."""
@events.command('process')
@click.argument('event-types', nargs=-1, callback=_validate_event_type)
@click.option('--eager', '-e', is_flag=True)
@with_appcontext
def _events_process(event_types=None, eager=False):
"""Process stats events."""
event_types = event_types or list(current_stats.enabled_events)
if eager:
process_events.apply((event_types,), throw=True)
click.secho('Events processed successfully.', fg='green')
else:
process_events.delay(event_types)
click.secho('Events processing task sent...', fg='yellow')
@stats.group()
def aggregations():
"""Aggregation management commands."""
@aggregations.command('process')
@aggr_arg
@click.option('--start-date', callback=_parse_date)
@click.option('--end-date', callback=_parse_date)
@click.option('--update-bookmark', '-b', is_flag=True)
@click.option('--eager', '-e', is_flag=True)
@with_appcontext
def _aggregations_process(aggregation_types=None,
start_date=None, end_date=None,
update_bookmark=False, eager=False):
"""Process stats aggregations."""
aggregation_types = (aggregation_types or
list(current_stats.enabled_aggregations))
if eager:
aggregate_events.apply(
(aggregation_types,),
dict(start_date=start_date, end_date=end_date,
update_bookmark=update_bookmark),
throw=True)
click.secho('Aggregations processed successfully.', fg='green')
else:
aggregate_events.delay(
aggregation_types, start_date=start_date, end_date=end_date)
click.secho('Aggregations processing task sent...', fg='yellow')
@aggregations.command('delete')
@aggr_arg
@click.option('--start-date', callback=_parse_date)
@click.option('--end-date', callback=_parse_date)
@click.confirmation_option(
prompt='Are you sure you want to delete aggregations?')
@with_appcontext
def _aggregations_delete(aggregation_types=None,
start_date=None, end_date=None):
"""Delete computed aggregations."""
aggregation_types = (aggregation_types or
list(current_stats.enabled_aggregations))
for a in aggregation_types:
aggr_cfg = current_stats.aggregations[a]
aggregator = aggr_cfg.aggregator_class(
name=aggr_cfg.name, **aggr_cfg.aggregator_config)
aggregator.delete(start_date, end_date)
@aggregations.command('list-bookmarks')
@aggr_arg
@click.option('--start-date', callback=_parse_date)
@click.option('--end-date', callback=_parse_date)
@click.option('--limit', '-n', default=5)
@with_appcontext
def _aggregations_list_bookmarks(aggregation_types=None,
start_date=None, end_date=None, limit=None):
"""List aggregation bookmarks."""
aggregation_types = (aggregation_types or
list(current_stats.enabled_aggregations))
for a in aggregation_types:
aggr_cfg = current_stats.aggregations[a]
aggregator = aggr_cfg.aggregator_class(
name=aggr_cfg.name, **aggr_cfg.aggregator_config)
bookmarks = aggregator.list_bookmarks(start_date, end_date, limit)
click.echo('{}:'.format(a))
for b in bookmarks:
click.echo(' - {}'.format(b.date))
|
Python
| 0.000001
|
@@ -1766,32 +1766,42 @@
+', '.join(
invalid_values,
@@ -1790,37 +1790,60 @@
n(invalid_values
-, set
+),%0A ', '.join
(current_stats.e
@@ -2246,24 +2246,34 @@
+', '.join(
invalid_valu
@@ -2278,13 +2278,36 @@
lues
-, set
+),%0A ', '.join
(cur
|
835da41ffd1433c36bdc585a3154434c60bdbb8f
|
Fix lint
|
biggraphite/drivers/_utils.py
|
biggraphite/drivers/_utils.py
|
#!/usr/bin/env python
# Copyright 2016 Criteo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions currently used by the Cassandra driver but not specific to it."""
from __future__ import absolute_import
from __future__ import print_function
import threading
try:
from opencensus.trace import execution_context
except ImportError:
execution_context = None
from biggraphite import accessor as bg_accessor
class Error(bg_accessor.Error):
"""Base class for all exceptions from this module."""
class CountDown(object):
"""Decrements a count, calls a callback when it reaches 0.
This is used to wait for queries to complete without storing & sorting their results.
"""
__slots__ = ("_canceled", "count", "_lock", "_on_zero")
def __init__(self, count, on_zero):
"""Record parameters.
Args:
count: The integer that will be decremented, must be > 0
on_zero: called once count reaches zero, see decrement
"""
assert count > 0
self.count = count
self._canceled = False
self._lock = threading.Lock()
self._on_zero = on_zero
def cancel(self, reason):
"""Call the callback now with reason as argument."""
with self._lock:
if self._canceled:
return
self._canceled = True
self._on_zero(reason)
def decrement(self):
"""Call the callback if count reached zero, with None as argument."""
with self._lock:
self.count -= 1
if self._canceled:
return
elif not self.count:
self._on_zero(None)
def on_result(self, unused_result):
"""Call decrement(), suitable for Cassandra's execute_async."""
self.decrement()
def on_failure(self, exc):
"""Call cancel(), suitable for Cassandra's execute_async."""
self.cancel(Error(exc))
def list_from_str(value):
"""Convert a comma separated string into a list.
Args:
value: str or list or set.
Returns:
list a list of values.
"""
if type(value) is str and value:
value = [s.strip() for s in value.split(",")]
elif type(value) in (list, set) and value:
value = list(value)
elif value is None:
value = []
elif not value:
value = []
else:
raise Error("Unkown type for '%s'" % (value))
return value
def bool_from_str(value):
"""Convert a user-specified string to a bool."""
if value == "True":
return value
elif value == "False":
return value
elif type(value) is bool:
return value
return str(value)
def trace_accessor_func(func):
if not execution_context:
return func
def tracer(self, *args, **kwargs):
if not hasattr(self, 'module_name'):
self.module_name = func.__module__.split('.')[-1]
tracer = execution_context.get_opencensus_tracer()
with tracer.span(name=self.module_name + '.' + func.__name__) as span:
return func(self, *args, **kwargs)
return tracer
|
Python
| 0.000032
|
@@ -3212,24 +3212,70 @@
func(func):%0A
+ %22%22%22Decorator for tracing of functions.%22%22%22%0A
if not e
@@ -3312,16 +3312,17 @@
rn func%0A
+%0A
def
@@ -3591,16 +3591,8 @@
e__)
- as span
:%0A
|
4b14f12fa8bb6dca7ad91f187e7765bef27c0d65
|
Add some options
|
classes/admin.py
|
classes/admin.py
|
from django.contrib import admin
from classes.models import Attendee
from classes.models import Attendance
from classes.models import Session
from classes.models import WalkinClass
class AttendanceInline(admin.TabularInline):
model = Attendance
extra = 1
verbose_name = 'Attendee'
verbose_name_plural = 'Attendees'
fields = ('attendee', 'start_date_time', "stop_date_time", 'notes')
class SessionInline(admin.TabularInline):
model = Session
extra = 1
fields = ('start_date_time', 'stop_date_time', 'teacher')
class AttendeeAdmin(admin.ModelAdmin):
pass
class SessionAdmin(admin.ModelAdmin):
inlines = [
AttendanceInline,
]
fields = ('walk_in_class','teacher', 'start_date_time', "stop_date_time", )
list_display= ('walk_in_class', 'start_date_time',)
class WalkinClassAdmin(admin.ModelAdmin):
inlines = [
SessionInline,
]
admin.site.register(Attendee, AttendeeAdmin)
admin.site.register(Session, SessionAdmin)
admin.site.register(WalkinClass, WalkinClassAdmin)
|
Python
| 0.000535
|
@@ -399,16 +399,52 @@
notes')%0A
+ search_fields = 'name', 'phone'%0A
%0A%0Aclass
@@ -848,16 +848,157 @@
_time',)
+%0A date_hierarchy = 'start_date_time'%0A list_filter = %5B'walk_in_class', 'start_date_time', 'teacher'%5D%0A ordering = %5B'-start_date_time'%5D
%0A%0Aclass
|
54d1366167cd21604222efaeab193d0ffc71680e
|
remove TODO
|
source/jormungandr/jormungandr/street_network/ridesharing.py
|
source/jormungandr/jormungandr/street_network/ridesharing.py
|
# Copyright (c) 2001-2016, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
import logging
from jormungandr.street_network.street_network import AbstractStreetNetworkService, StreetNetworkPathType
from jormungandr import utils, fallback_modes as fm
from navitiacommon import response_pb2
class Ridesharing(AbstractStreetNetworkService):
"""
TODO:
"""
def __init__(self, instance, service_url, modes=None, id=None, timeout=10, api_key=None, **kwargs):
self.instance = instance
self.modes = modes or [fm.FallbackModes.ridesharing.name]
assert list(self.modes) == [fm.FallbackModes.ridesharing.name], (
'Class: ' + str(self.__class__) + ' can only be used for ridesharing'
)
self.sn_system_id = id or 'ridesharing'
config = kwargs.get('street_network', {})
if 'service_url' not in config['args']:
config['args'].update({'service_url': None})
if 'instance' not in config['args']:
config['args'].update({'instance': instance})
config['args'].update({'modes': self.modes})
self.street_network = utils.create_object(config)
def status(self):
return {'id': unicode(self.sn_system_id), 'class': self.__class__.__name__, 'modes': self.modes}
def _direct_path(
self, mode, pt_object_origin, pt_object_destination, fallback_extremity, request, direct_path_type
):
# TODO: the ridesharing_speed is stored in car_no_park_speed
# a proper way to handle this is to override car_no_park_speed use the ridesharing_speed here
# copy_request = copy.deepcopy(request)
# copy_request["car_no_park_speed"] = copy_request["ridesharing_speed"]
response = self.street_network._direct_path(
mode, pt_object_origin, pt_object_destination, fallback_extremity, request, direct_path_type
)
for journey in response.journeys:
for section in journey.sections:
section.street_network.mode = fm.FallbackModes[mode].value
journey.durations.ridesharing += section.duration
journey.distances.ridesharing += section.length
journey.durations.car -= section.duration
journey.distances.car -= section.length
return response
def get_street_network_routing_matrix(
self, origins, destinations, street_network_mode, max_duration, request, **kwargs
):
# TODO: the ridesharing_speed is stored in car_no_park_speed
# a proper way to handle this is to override car_no_park_speed use the ridesharing_speed here
# copy_request = copy.deepcopy(request)
# copy_request["car_no_park_speed"] = copy_request["ridesharing_speed"]
return self.street_network.get_street_network_routing_matrix(
origins, destinations, street_network_mode, max_duration, request, **kwargs
)
def make_path_key(self, mode, orig_uri, dest_uri, streetnetwork_path_type, period_extremity):
"""
:param orig_uri, dest_uri, mode: matters obviously
:param streetnetwork_path_type: whether it's a fallback at
the beginning, the end of journey or a direct path without PT also matters especially for car (to know if we
park before or after)
:param period_extremity: is a PeriodExtremity (a datetime and its meaning on the
fallback period)
Nota: period_extremity is not taken into consideration so far because we assume that a
direct path from A to B remains the same even the departure time are different (no realtime)
"""
return self.street_network.make_path_key(mode, orig_uri, dest_uri, streetnetwork_path_type, None)
|
Python
| 0.000001
|
@@ -1457,35 +1457,8 @@
e):%0A
- %22%22%22%0A TODO:%0A %22%22%22%0A%0A
|
7bdf0a3121d539e0a98ffa68a964bfd022fe43a5
|
Make the stitcher executable
|
atram_stitcher.py
|
atram_stitcher.py
|
#!/usr/bin/env python3
"""
Start the atram exon stitcher.
This wrapper module parses the input arguments and passes them to the module
that does the actual stitching (core_stitcher.py).
"""
from os.path import join
from datetime import date
import argparse
import textwrap
import lib.db as db
import lib.log as log
import lib.util as util
from lib.core_stitcher import Sticher
def parse_command_line():
"""Process command-line arguments."""
description = """
This program will find and stitch together exons from targeted
assemblies using amino acid targets and DNA assemblies.
"""
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent(description))
parser.add_argument('--version', action='version',
version='%(prog)s {}'.format(db.ATRAM_VERSION))
parser.add_argument(
'-T', '--taxa', metavar='TAXA', required=True,
help="""A text file of all of your taxon names.""")
parser.add_argument(
'-r', '--reference-genes', '--refs', metavar='FASTA', required=True,
help="""Reference amino acid sequences in a FASTA file.""")
parser.add_argument(
'-a', '--assemblies-dir', metavar='PATH', required=True,
help="""The path to the target assemblies directory.""")
parser.add_argument(
'-O', '--overlap', type=int, default=10,
help="""Contigs must overlap by this many codons before it is
considered a real overlap.""")
parser.add_argument(
'-t', '--temp-dir', metavar='DIR',
help="""Place temporary files in this directory. All files will be
deleted after aTRAM completes. The directory must exist.""")
parser.add_argument(
'--keep-temp-dir', action='store_true',
help="""This flag will keep the temporary files in the --temp-dir
around for debugging.""")
parser.add_argument(
'-l', '--log-file',
help="""Log file (full path). The default is
"atram_stitcher_<date>.log".""")
parser.add_argument(
'-i', '--iterations', type=int, default=2, metavar='N',
help="""The number of times to run the main stitcher loop. The
minimum is "1" and the default is "2".""")
parser.add_argument(
'-o', '--output-prefix',
help="""This is the prefix of all of the output files. So you can
identify different stitcher output file sets. You may include a
directory as part of the prefix. The stitcher will add suffixes to
differentiate output files.""")
args = parser.parse_args()
util.temp_dir_exists(args.temp_dir)
if not args.output_prefix:
args.output_prefix = join(
'.', 'atram_stitcher_' + date.today().isoformat())
if not args.log_file:
args.log_file = args.output_prefix + '.log'
if args.iterations < 1:
log.fatal('The iterations must be >= 1.')
return args
if __name__ == '__main__':
ARGS = parse_command_line()
Sticher(ARGS).stitch()
|
Python
| 0.998561
| |
aa681b4a36ce36c53933f3834eec9c721d6029cf
|
Update docker images utils
|
polyaxon/docker_images/image_info.py
|
polyaxon/docker_images/image_info.py
|
import logging
from typing import Any, Tuple
import conf
from constants.images_tags import LATEST_IMAGE_TAG
_logger = logging.getLogger('polyaxon.dockerizer.images')
def get_experiment_image_info(experiment: 'Experiment') -> Tuple[str, str]:
"""Return the image name and image tag for an experiment"""
project_name = experiment.project.name
repo_name = project_name
image_name = '{}/{}'.format(conf.get('REGISTRY_HOST'), repo_name)
image_tag = experiment.code_reference.commit
return image_name, image_tag
def get_job_image_info(project: 'Project', job: Any)-> Tuple[str, str]:
"""Return the image name and image tag for a job"""
project_name = project.name
repo_name = project_name
image_name = '{}/{}'.format(conf.get('REGISTRY_HOST'), repo_name)
try:
last_commit = project.repo.last_commit
except ValueError:
raise ValueError('Repo was not found for project `{}`.'.format(project))
return image_name, last_commit[0]
def get_notebook_image_info(project: 'Project', job: Any) -> Tuple[str, str]:
"""Return the image name and image tag for a job"""
image_name, _ = get_job_image_info(project, job)
return image_name, LATEST_IMAGE_TAG
def get_image_name(build_job: 'BuildJob') -> str:
return '{}/{}_{}'.format(conf.get('REGISTRY_HOST'),
build_job.project.name.lower(),
build_job.project.id)
def get_image_info(build_job: 'BuildJob') -> Tuple[str, str]:
return get_image_name(build_job=build_job), build_job.uuid.hex
def get_tagged_image(build_job: 'BuildJob') -> str:
image_name, image_tag = get_image_info(build_job)
return '{}:{}'.format(image_name, image_tag)
|
Python
| 0.000001
|
@@ -584,16 +584,17 @@
ob: Any)
+
-%3E Tuple
@@ -1226,24 +1226,32 @@
G%0A%0A%0Adef get_
+project_
image_name(b
@@ -1249,37 +1249,50 @@
ge_name(
-build_job: 'BuildJob'
+project_name: str, project_id: int
) -%3E str
@@ -1382,38 +1382,752 @@
-build_job.
+project_name.lower(),%0A project_id)%0A%0A%0Adef get_project_image_info(project_name: str, project_id: int, image_tag: str) -%3E Tuple%5Bstr, str%5D:%0A return get_project_image_name(project_name=project_name, project_id=project_id), image_tag%0A%0A%0Adef get_project_tagged_image(project_name: str, project_id: int, image_tag: str) -%3E str:%0A image_name, image_tag = get_project_image_info(project_name=project_name,%0A project_id=project_id,%0A image_tag=image_tag)%0A return '%7B%7D:%7B%7D'.format(image_name, image_tag)%0A%0A%0Adef get_image_name(build_job: 'BuildJob') -%3E str:%0A return get_project_image_name(
project
-.
+_
name
-.lower()
+=build_job.project.name
,%0A
@@ -2153,16 +2153,32 @@
+ project_id=
build_jo
@@ -2266,35 +2266,56 @@
return get_
-image
+project_image_info(project
_name
-(
+=
build_job=bu
@@ -2315,21 +2315,134 @@
_job
-=build_job),
+.project.name,%0A project_id=build_job.project.id,%0A image_tag=
buil
@@ -2455,16 +2455,17 @@
uuid.hex
+)
%0A%0A%0Adef g
@@ -2519,103 +2519,208 @@
-image_name, image
+return get_project
_tag
- =
ge
-t
+d
_image
-_info(build_job)%0A return '%7B%7D:%7B%7D'.format(image_name, image_tag
+(project_name=build_job.project.name,%0A project_id=build_job.project.id,%0A image_tag=build_job.uuid.hex
)%0A
|
d69bd1c72c1e01ba392eda54820ca5db4774b744
|
Use 'open' with 'with'
|
polycircles/test/test_earthquakes.py
|
polycircles/test/test_earthquakes.py
|
import os, csv
from nose.tools import assert_equal
from polycircles import polycircles
import simplekml
import unittest
class TestLastPointInPolygonEqualsTheFirstOne(unittest.TestCase):
"""
Courtesy Carlos H. Grohmann (https://github.com/CarlosGrohmann) who
reported Issue #1 (https://github.com/adamatan/polycircles/issues/1).
In KML, the first point of the polygon should be equal to the last point
of the polygon in order to properly create a closed polygon, without
a missing vertex.
Therefore, a Polycircle called with number_of_vertices=N
will have N+1 vertices, where vertices[0] == vertices[N].
BTW, This is cool - this project is used to map earthquakes!
"""
def setUp(self):
csvfile = 'polycircles/test/sismos_continente2.csv'
output_dir = 'kmls'
if not os.path.exists(output_dir):
os.makedirs(output_dir)
datafile = csv.reader(open(csvfile, 'rU'), delimiter=',')
quakelist = list(datafile)
kml = simplekml.Kml()
alpha = 100
self.polycircles = []
self.number_of_vertices = 36
for quake in quakelist[1:]:
lng = float(quake[0]) # X
lat = float(quake[1]) # Y
yyyy = quake[2] # Ano
magw = quake[25] # mag_03
polycircle = polycircles.Polycircle(
latitude=lat,
longitude=lng,
radius=40000,
number_of_vertices=self.number_of_vertices)
self.polycircles.append(polycircle)
pol = kml.newpolygon(name=yyyy, outerboundaryis=polycircle.to_kml())
if float(magw) < 3.0:
pol.style.polystyle.color = simplekml.Color.changealphaint(alpha, simplekml.Color.lightyellow)
pol.style.linestyle.color = simplekml.Color.lightyellow
elif 3.0 < float(magw) < 3.5:
pol.style.polystyle.color = simplekml.Color.changealphaint(alpha, simplekml.Color.yellow)
pol.style.linestyle.color = simplekml.Color.yellow
elif 3.5 < float(magw) < 4.0:
pol.style.polystyle.color = simplekml.Color.changealphaint(alpha, simplekml.Color.orangered)
pol.style.linestyle.color = simplekml.Color.orangered
else:
pol.style.polystyle.color = simplekml.Color.changealphaint(alpha, simplekml.Color.red)
pol.style.linestyle.color = simplekml.Color.red
kml.save(os.path.join(output_dir, 'sismos.kml'))
def test_number_of_vertices(self):
"""The number of vertices in the Polycircle should equal (number_of_vertices+1).
This test verifies that all the points representations (wkt, kml, lat-lon, lon-lat)
have the same number of points, equal to number_of_vertices+1."""
for polycircle in self.polycircles:
lat_lons = polycircle.to_lat_lon()
lon_lats = polycircle.to_lon_lat()
points = [p for p in polycircle]
wkt = polycircle.to_wkt().split(',')
kml = polycircle.to_kml()
assert_equal(len(lat_lons), self.number_of_vertices + 1)
assert_equal(len(lon_lats), self.number_of_vertices + 1)
assert_equal(len(points), self.number_of_vertices + 1)
assert_equal(len(wkt), self.number_of_vertices + 1)
assert_equal(len(kml), self.number_of_vertices + 1)
|
Python
| 0.998616
|
@@ -895,16 +895,53 @@
ut_dir)%0A
+ with open(csvfile) as f:%0A
@@ -966,27 +966,9 @@
der(
-open(csvfile, 'rU')
+f
, de
@@ -976,24 +976,28 @@
imiter=',')%0A
+
quak
|
7f0d7cc205f6ff740cf376932c592db39769b783
|
Update analyzefiles.py
|
bin/interpret/analyzefiles.py
|
bin/interpret/analyzefiles.py
|
#!/usr/bin/python
import os,sys,inspect
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0,parentdir)
import config as cfg
import common
def plotProfile(sample, outDir, lowessData, cnvData, refArray):
xVals = [x['abspos'] + (x['size']/2) for x in refArray]
chromStarts = [min([y['start'] for y in refArray[refArray['chrom'] == x]]) for x in np.unique(refArray['chrom'])]
chromEnds = [max([y['start'] + y['size'] for y in refArray[refArray['chrom'] == x]]) for x in np.unique(refArray['chrom'])]
chromEdges = chromEnds[:-1]
xTicks = [np.mean([chromStarts[x], chromEnds[x]]) for x,y in enumerate(np.unique(refArray['chrom'])[:-1])]
fig, ax = plt.subplots()
#ideally make these prettier colors like were used in the paper#
ax.scatter(xVals, binData, color='b', marker='d', s=3, linewidths=0)
ax.plot(xVals, cnvData, color='r', lw=1, ls='steps')
for j in chromEdges:
ax.plot([j, j], [-1, 5], lw=1, ls='-', color='gray', zorder=0)
ax.set_xticks(xTicks)
ax.set_xticklabels(xUse, fontsize=pFonts.fontSizeDict['tick'], fontname=fontType, rotation=45)
ax.set_xlabel('Genome Location (Chrom)', fontsize=pFonts.fontSizeDict['axis'], fontname=fontType, labelpad=1)
ax.set_xlim(0, xVals[-1])
yTicks = [0, 1, 2, 3, 4]
ax.set_yticks(yTicks)
ax.set_yticklabels(yTicks, fontsize=pFonts.fontSizeDict['tick'], fontname=fontType)
ax.set_ylabel('Copy Number', fontsize=pFonts.fontSizeDict['axis'], fontname=fontType, labelpad=1)
ax.set_ylim(-0.1, 4.6)
ax.tick_params(direction='out', which='both', pad=0., length=3, top='off', right='off')
fig.set_size_inches(1.4, 0.9, forward=True)
plt.subplots_adjust(left=0.13, right=0.98, bottom=0.3, top=0.91)
plt.savefig(outDir + sample + 'copyNumberProfile.png', dpi=1200, transparent=True)
plt.savefig(plotNameAlt, dpi=333)
plt.close()
#function to create cell summary file (one row per sample)
def analyzeOne(sample, species, cnvDir, lowessDir, outDir, ploidy, gender):
interpretVars = cfg.Interpret()
#load reference data#
binArray = common.importInfoFile(interpretVars.binDict[species], [0, 1, 2, 4, 5], 'normref', skiprows=1)
xBins = [x for x,y in enumerate(binArray) if y['chrom'] == 'chrX']
yBins = [x for x,y in enumerate(binArray) if y['chrom'] == 'chrY']
#load lowess counts and convert to CN state#
binData = np.loadtxt(lowessDir + common.findInfile(sample, lowessDir))
binData = (2 ** binData) * ploidy
#load CNV data and convert to array form#
cnvData = np.array( len(binArray) * [2], dtype='int' )
if gender == 'M':
cnvData[xBins] = len(xBins) * [1]
cnvData[yBins] = len(yBins) * [1]
else:
cnvData[yBins] = len(yBins) * [0]
listFile = cnvDir + common.findInfile(sample, cnvDir)
listDtype = {'names': ('chrom', 'start', 'end', 'CN', 'type'), 'formats': ('S10', 'int', 'int', 'int', 'S10')}
if os.stat(listFile).st_size > 32:
cnvs = np.loadtxt(listFile, skiprows=1, dtype=listDtype)
cnvs = np.atleast_1d(cnvs)
for j in cnvs:
cnvData[j['start']:j['end']] = j['CN']
plotProfile(sample, outDir, binData, cnvDat, binArray)
|
Python
| 0.000001
|
@@ -2936,16 +2936,8 @@
'CN'
-, 'type'
), '
@@ -2977,15 +2977,8 @@
int'
-, 'S10'
)%7D%0A%09
|
bd4a6e4444b73eacc75657985ffbfd90538a08f0
|
fix code style
|
flexget/ui/__init__.py
|
flexget/ui/__init__.py
|
from __future__ import unicode_literals, division, absolute_import
from builtins import * # pylint: disable=unused-import, redefined-builtin
import logging
import os
import fnmatch
from flask import send_from_directory, Flask
from flexget.webserver import register_app, register_home
from flask_compress import Compress
log = logging.getLogger('webui')
manager = None
debug = False
app_base = None
ui_base = os.path.dirname(os.path.realpath(__file__))
ui_src = os.path.join(ui_base, 'src')
ui_dist = os.path.join(ui_base, 'app')
bower_components = os.path.join(ui_base, 'bower_components')
webui_app = Flask(__name__)
Compress(webui_app)
webui_app.url_path = '/ui'
@webui_app.route('/<path:path>')
def serve_app(path):
if debug:
if path.startswith('bower_components'):
return send_from_directory(bower_components, path.lstrip('bower_components').lstrip('/'))
if os.path.exists(os.path.join(ui_src, path)):
return send_from_directory(ui_src, path)
return send_from_directory(app_base, path)
@webui_app.route('/')
def root():
if not app_base:
return send_from_directory(ui_base, 'load.failure.html')
return send_from_directory(app_base, 'app.html')
def _find(path, f):
matches = []
for root, dir_names, file_names in os.walk(path):
for filename in fnmatch.filter(file_names, f):
matches.append(os.path.join(root, filename))
return matches
def _strip_trailing_sep(path):
return path.rstrip('\\/')
def register_web_ui(mgr):
global manager, app_base, debug
manager = mgr
if 'debug' in manager.args:
debug = True
if debug:
app_base = os.path.join(ui_base, '.tmp', 'serve')
if not os.path.exists(app_base):
log.warning('Unable to start web ui in debug mode. To enable debug mode please run the debug build, '
'see http://flexget.com/wiki/Web-UI for instructions')
log.warning('Attempting to serve web ui from complied directory')
app_base = None
if not app_base:
app_base = ui_dist
if not os.path.exists(app_base):
log.fatal('Failed to start web ui,'
' this can happen if you are running from GitHub version and forgot to run the web ui build, '
'see http://flexget.com/wiki/Web-UI for instructions')
app_base = None
register_app(webui_app.url_path, webui_app)
register_home('%s/' % webui_app.url_path)
|
Python
| 0.000022
|
@@ -1217,25 +1217,24 @@
app.html')%0A%0A
-%0A
def _find(pa
@@ -1270,16 +1270,20 @@
for root
+_dir
, dir_na
@@ -1415,16 +1415,20 @@
oin(root
+_dir
, filena
|
7307a4b19b09f4408f569c580955f5c7d2af5f73
|
Update version number
|
auth0/__init__.py
|
auth0/__init__.py
|
__version__ = '2.0.0b3'
|
Python
| 0.000002
|
@@ -18,7 +18,7 @@
0.0b
-3
+4
'%0A
|
6a1c3e8c7adecc98af10161d41d95034919eeacd
|
Allow user specified tsconfig.json
|
sphinx_js/generators.py
|
sphinx_js/generators.py
|
from codecs import getwriter
from errno import ENOENT
import subprocess
import os
from os.path import abspath
from tempfile import TemporaryFile, NamedTemporaryFile
from json import load
from sphinx.errors import SphinxError
from sphinx.util.logging import getLogger
from six import string_types
from .typedoc import parse_typedoc
logger = getLogger(__name__)
class Command(object):
def __init__(self, program):
self.program = program+".cmd" if os.name == 'nt' else program
self.args = []
def add(self, *args):
self.args.extend(args)
def make(self):
command = [self.program]
command.extend(self.args)
logger.info('running: ' + ' '.join(command))
return command
class Generator(object):
def __init__(self, app):
self.app = app
source_paths = [app.config.js_source_path] if isinstance(app.config.js_source_path, string_types) else app.config.js_source_path
# Uses cwd, which Sphinx seems to set to the dir containing conf.py:
self.abs_source_paths = [abspath(path) for path in source_paths]
class JSDocGenerator(Generator):
def run(self):
jsdoc_command = Command('jsdoc')
jsdoc_command.add(*self.abs_source_paths)
jsdoc_command.add('-X')
if self.app.config.jsdoc_config_path:
jsdoc_command.add('-c', self.app.config.jsdoc_config_path)
# Use a temporary file to handle large output volume. JSDoc defaults to
# utf8-encoded output.
with getwriter('utf-8')(TemporaryFile(mode='w+')) as temp:
try:
p = subprocess.Popen(jsdoc_command.make(), stdout=temp)
except OSError as exc:
if exc.errno == ENOENT:
raise SphinxError('%s was not found. Install it using "npm install -g jsdoc".' % jsdoc_command_name)
else:
raise
p.wait()
# Once output is finished, move back to beginning of file and load it:
temp.seek(0)
try:
return load(temp)
except ValueError:
raise SphinxError('jsdoc found no JS files in the directories %s. Make sure js_source_path is set correctly in conf.py. It is also possible (though unlikely) that jsdoc emitted invalid JSON.' % self.abs_source_paths)
class TypedocGenerator(Generator):
def run(self):
with getwriter('utf-8')(NamedTemporaryFile(mode='w+')) as temp:
jsdoc_command = Command('typedoc')
jsdoc_command.add('--json', temp.name)
jsdoc_command.add(*self.abs_source_paths)
<<<<<<< eeacb9237af86b8b6e6dfd960f4696ab43e91aec
if self.app.config.jsdoc_config_path:
jsdoc_command.add('--tsconfig', self.app.config.jsdoc_config_path)
=======
>>>>>>> Refactor jsdoc.py
subprocess.call(jsdoc_command.make())
try:
return parse_typedoc(temp)
except ValueError:
raise SphinxError('typedoc found no TS files in the directories %s. Make sure js_source_path is set correctly in conf.py. It is also possible (though unlikely) that typedoc emitted invalid JSON.' % self.abs_source_paths)
def generate_doclets(app):
if app.config.js_language == 'javascript':
return JSDocGenerator(app).run()
elif app.config.js_language == 'typescript':
return TypedocGenerator(app).run()
else:
raise SphinxError('unknown JS language: ' + app.config.js_language)
|
Python
| 0
|
@@ -2627,57 +2627,8 @@
hs)%0A
-%3C%3C%3C%3C%3C%3C%3C eeacb9237af86b8b6e6dfd960f4696ab43e91aec%0A
@@ -2760,42 +2760,8 @@
th)%0A
-=======%0A%3E%3E%3E%3E%3E%3E%3E Refactor jsdoc.py%0A
|
f1a54346ac0a0241ee5d8011ba443fc7ef5a74f1
|
discard happens after interrupt
|
pyardrone/utils/object_executor.py
|
pyardrone/utils/object_executor.py
|
import threading
import queue
import time
class Interrupt:
def __init__(self, obj_exe, wait, discard):
self.obj_exe = obj_exe
self.wait = wait
self.discard = discard
def __enter__(self):
self.obj_exe.pause(wait=self.wait)
if self.discard:
q = self.obj_exe._queue
with self.obj_exe._queue_lock:
with q.mutex:
q.queue.clear()
q.all_tasks_done.notify_all()
q.unfinished_tasks = 0
def __exit__(self, exc_type, exc_value, exc_tb):
if exc_type is None:
self.obj_exe.resume()
class ObjectExecutor:
def __init__(self, target, interval, default):
self._target = target
self.default = default
self.interval = interval
self._queue = queue.Queue()
self._queue_lock = threading.Lock()
self._thread = threading.Thread(target=self._job)
self._stop_event = threading.Event()
self._working_event = threading.Event()
def start(self):
self._stop_event.clear()
self._working_event.set()
self._thread.start()
def stop(self, wait=False):
if wait:
self.join()
self._stop_event.set()
def pause(self, wait=False):
if wait:
self.join()
self._working_event.clear()
def resume(self):
self._working_event.set()
def interrupt(self, *, wait=False, discard=True):
return Interrupt(self, wait=wait, discard=discard)
def join(self):
self._queue.join()
def put(self, obj, with_event=True):
if with_event:
event = threading.Event()
else:
event = None
self._queue.put((obj, event))
def _process_object(self):
with self._queue_lock:
try:
obj, event = self._queue.get_nowait()
call_task_done = True
except queue.Empty:
obj, event = self.default, None
call_task_done = False
self._target(obj)
if event is not None:
event.set()
if call_task_done:
self._queue.task_done()
def _job(self):
while not self._stop_event.is_set():
self._working_event.wait()
self._process_object()
time.sleep(self.interval)
|
Python
| 0.000112
|
@@ -259,16 +259,103 @@
f.wait)%0A
+%0A def __exit__(self, exc_type, exc_value, exc_tb):%0A if exc_type is None:%0A
@@ -371,16 +371,20 @@
iscard:%0A
+
@@ -415,32 +415,36 @@
eue%0A
+
with self.obj_ex
@@ -470,24 +470,28 @@
+
+
with q.mutex
@@ -492,16 +492,20 @@
.mutex:%0A
+
@@ -552,16 +552,20 @@
+
+
q.all_ta
@@ -606,16 +606,20 @@
+
+
q.unfini
@@ -636,91 +636,8 @@
= 0
-%0A%0A def __exit__(self, exc_type, exc_value, exc_tb):%0A if exc_type is None:
%0A
|
443d56fdd2e588c11c2a1e3a685912b712e37d44
|
Make sure all fields are grabbed.
|
split/casanfar_split.py
|
split/casanfar_split.py
|
import os
import numpy as np
import sys
SDM_name = str(sys.argv[4])
print "Inputted MS: "+SDM_name
# SDM_name = '14B-088.sb30023144.eb30070731.57002.919034293984'
# Set up some useful variables (these will be altered later on)
msfile = SDM_name + '.ms'
hisplitms = SDM_name + '.hi.ms'
splitms = SDM_name + '.hi.src.split.ms'
pathname = os.environ.get('CASAPATH').split()[0]
pipepath = '/home/ekoch/pipe_scripts/'
source = 'M33'
# VOS stuff
vos_dir = '../vos/'
vos_proc = './'
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%%&%&%&%&%&%&%%&%
# Find the 21cm spw and check if the obs
# is single pointing or mosaic
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%%&%&%&%&%&%&%%&%
print "Find HI spw..."
# But first find the spw corresponding to it
tb.open(vos_dir+msfile+'/SPECTRAL_WINDOW')
freqs = tb.getcol('REF_FREQUENCY')
nchans = tb.getcol('NUM_CHAN')
tb.close()
spws = range(0, len(freqs))
# Select the 21cm
sel = np.where((freqs > 1.40*10**9) & (freqs < 1.43*10**9))
hispw = str(spws[sel[0][0]])
freq = freqs[sel[0][0]]
nchan = nchans[sel[0][0]]
print "Selected spw "+str(hispw)
print "with frequency "+str(freq)
print "and "+str(nchan)+" channels"
print "Starting split the HI line"
# Mosaic or single pointing?
tb.open(vos_dir+msfile+'/FIELD')
names = tb.getcol('NAME')
tb.close()
moscount = 0
for name in names:
chsrc = name.find(source)
if chsrc != -1:
moscount = moscount+1
if moscount > 1:
imagermode = "mosaic"
else:
imagermode = "csclean"
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
# Split the corrected source data from the rest
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
print "Starting source split..."
#os.system('md5sum $(find '+vos_dir+hisplitms+') > '+vos_proc+hisplitms+'.md5')
# os.system('rm -rf '+vos_proc+splitms)
default('split')
vis = vos_dir+msfile
outputvis = vos_proc+hisplitms
field = source
spw = hispw
datacolumn = 'corrected'
keepflags = False
print vis
print outputvis
print field
print spw
split()
print "Created splitted-source .ms "+hisplitms
|
Python
| 0
|
@@ -424,16 +424,17 @@
e = 'M33
+*
'%0A%0A# VOS
|
eeee8fb498eb3a52baff7b9b2684c8a713e20216
|
remove non-essential calls from inner-loop methods
|
pydoop/mapreduce/binary_streams.py
|
pydoop/mapreduce/binary_streams.py
|
# BEGIN_COPYRIGHT
#
# Copyright 2009-2017 CRS4.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# END_COPYRIGHT
from .streams import (
StreamWriter, StreamReader, DownStreamAdapter, UpStreamAdapter,
)
from pydoop.utils.serialize import CommandReader, CommandWriter, RULES
from pydoop.utils.py3compat import unicode
import logging
logging.basicConfig()
LOGGER = logging.getLogger('binary_streams')
LOGGER.setLevel(logging.CRITICAL)
class BinaryWriter(StreamWriter):
def __init__(self, stream):
super(BinaryWriter, self).__init__(CommandWriter(stream))
self.logger = LOGGER.getChild('BinaryWriter')
self.logger.debug('initialize on stream: %s', stream)
# we need to be sure that stream will not be gc
self.original_stream = stream
def send(self, cmd, *args):
self.logger.debug('request to write %r, %r', cmd, args)
typecodes = RULES[cmd] if cmd != self.SET_JOB_CONF else 's' * len(args)
args = self.__to_bytes(args, typecodes)
if cmd == self.SET_JOB_CONF:
args = (args,)
self.logger.debug('writing (%r, %r)', cmd, args)
self.stream.write((cmd, args))
def __to_bytes(self, args, typecodes):
assert len(args) == len(typecodes)
out_args = []
for a, t in zip(args, typecodes):
if t == "s" and not isinstance(a, (bytes, bytearray)):
if not isinstance(a, unicode):
a = unicode(a)
a = a.encode('utf-8')
out_args.append(a)
return tuple(out_args)
class BinaryReader(StreamReader):
def __init__(self, stream):
super(BinaryReader, self).__init__(CommandReader(stream))
self.logger = LOGGER.getChild('BinaryReader')
self.logger.debug('initialize on stream: %s', stream)
# we need to be sure that stream will not be gc
self.original_stream = stream
def __iter__(self):
self.logger.debug('requested iterator: %s', self)
return self.stream.__iter__()
def next(self):
return next(self.stream)
def __next__(self):
return self.next()
class BinaryDownStreamAdapter(BinaryReader, DownStreamAdapter):
def __init__(self, stream):
super(BinaryDownStreamAdapter, self).__init__(stream)
self.logger = LOGGER.getChild('BinaryDownStreamAdapter')
self.logger.debug('initialize on stream: %s', stream)
class BinaryUpStreamAdapter(BinaryWriter, UpStreamAdapter):
def __init__(self, stream):
super(BinaryUpStreamAdapter, self).__init__(stream)
self.logger = LOGGER.getChild('BinaryUpStreamAdapter')
self.logger.debug('initialize on stream: %s', stream)
class BinaryUpStreamDecoder(BinaryDownStreamAdapter):
def __init__(self, stream):
super(BinaryUpStreamDecoder, self).__init__(stream)
self.logger = LOGGER.getChild('BinaryUpStreamDecoder')
|
Python
| 0.000009
|
@@ -1310,72 +1310,8 @@
s):%0A
- self.logger.debug('request to write %25r, %25r', cmd, args)%0A
@@ -1502,65 +1502,8 @@
s,)%0A
- self.logger.debug('writing (%25r, %25r)', cmd, args)%0A
@@ -1585,51 +1585,8 @@
s):%0A
- assert len(args) == len(typecodes)%0A
|
ed320c5fac9bdd53b568946847981d38b0e0037b
|
Handle requests exceptions in actual healthcheck
|
src/django_healthchecks/checker.py
|
src/django_healthchecks/checker.py
|
import base64
import functools
import inspect
from importlib import import_module
from django.conf import settings
from django.utils.encoding import force_text
import requests
try:
from django.utils.module_loading import import_string
except ImportError:
def import_string(value):
module_name, func_name = value.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, func_name)
class PermissionDenied(Exception):
pass
def create_report(request=None):
"""Run all checks and return a tuple containing results and boolean to
indicate to indicate if all things are healthy.
"""
report = {}
has_error = False
for service, check_func in _get_check_functions(request=request):
try:
report[service] = check_func()
except:
report[service] = False
if not report[service]:
has_error = True
return report, not has_error
def create_service_result(service, request=None):
functions = list(_get_check_functions(name=service, request=request))
if not functions:
return
check_func = functions[0][1]
try:
result = check_func()
except:
result = False
return result
def _get_check_functions(name=None, request=None):
checks = _get_registered_health_checks()
if not checks or (name and name not in checks):
raise StopIteration()
checks = _filter_checks_on_permission(request, checks)
if not checks or (name and name not in checks):
raise PermissionDenied()
for service, func_string in checks.items():
if name and name != service:
continue
if func_string.startswith(('https://', 'http://')):
check_func = _http_healthcheck_func(func_string)
elif callable(func_string):
check_func = func_string
else:
check_func = import_string(func_string)
spec = inspect.getargspec(check_func)
if spec.args == ['request']:
check_func = functools.partial(check_func, request)
yield service, check_func
def _get_registered_health_checks():
return getattr(settings, 'HEALTH_CHECKS', {})
def _http_healthcheck_func(url):
return lambda: requests.get(url,
timeout=_get_http_healthcheck_timeout()).json()
def _get_http_healthcheck_timeout():
return getattr(settings, 'HEALTH_CHECKS_HTTP_TIMEOUT', 0.5)
def _filter_checks_on_permission(request, checks):
permissions = getattr(settings, 'HEALTH_CHECKS_BASIC_AUTH', {})
if not permissions:
return checks
allowed = {}
for name in checks.keys():
required_credentials = permissions.get(name, permissions.get('*'))
if required_credentials:
credentials = _get_basic_auth(request)
if not credentials or credentials not in required_credentials:
continue
allowed[name] = checks[name]
return allowed
def _get_basic_auth(request):
auth = request.META.get('HTTP_AUTHORIZATION')
if not auth:
return
auth = auth.split()
if len(auth) == 2 and force_text(auth[0]).lower() == u'basic':
credentials = base64.b64decode(auth[1]).decode('latin-1')
return tuple(credentials.split(':'))
|
Python
| 0
|
@@ -76,16 +76,32 @@
module%0A%0A
+import requests%0A
from dja
@@ -171,32 +171,16 @@
e_text%0A%0A
-import requests%0A
%0Atry:%0A
@@ -755,25 +755,8 @@
t):%0A
- try:%0A
@@ -793,54 +793,11 @@
nc()
-%0A except:%0A report%5Bservice%5D =
+ or
Fal
@@ -1095,21 +1095,8 @@
%5B1%5D%0A
- try:%0A
@@ -1120,37 +1120,11 @@
nc()
-%0A except:%0A result =
+ or
Fal
@@ -2152,22 +2152,72 @@
-return lambda:
+def handle_remote_request():%0A try:%0A response =
req
@@ -2230,24 +2230,16 @@
get(url,
-%0A
timeout
@@ -2275,15 +2275,201 @@
t())
-.json()
+%0A except requests.exceptions.RequestException:%0A return False%0A%0A if response.ok:%0A return response.json()%0A return False%0A%0A return handle_remote_request
%0A%0A%0Ad
|
b445042c56e5d3e23d05e47d9617efba4e55c284
|
Update player.py
|
axelrod/player.py
|
axelrod/player.py
|
import inspect
import random
import copy
from axelrod import Actions,
from .game import DefaultGame
C, D = Actions.C, Actions.D
# Strategy classifiers
def is_basic(s):
"""
Defines criteria for a strategy to be considered 'basic'
"""
stochastic = s.classifier['stochastic']
depth = s.classifier['memory_depth']
inspects_source = s.classifier['inspects_source']
manipulates_source = s.classifier['manipulates_source']
manipulates_state = s.classifier['manipulates_state']
return (not stochastic) and (not inspects_source) and (not manipulates_source) and (not manipulates_state) and (depth in (0, 1))
def obey_axelrod(s):
"""
A function to check if a strategy obeys Axelrod's original tournament rules.
"""
classifier = s.classifier
return not (classifier['inspects_source'] or\
classifier['manipulates_source'] or\
classifier['manipulates_state'])
def update_histories(player1, player2, move1, move2):
"""Updates histories and cooperation / defections counts following play."""
# Update histories
player1.history.append(move1)
player2.history.append(move2)
# Update player counts of cooperation and defection
if move1 == C:
player1.cooperations += 1
elif move1 == D:
player1.defections += 1
if move2 == C:
player2.cooperations += 1
elif move2 == D:
player2.defections += 1
class Player(object):
"""A class for a player in the tournament.
This is an abstract base class, not intended to be used directly.
"""
name = "Player"
classifier = {}
default_classifier = {
'stochastic': False,
'memory_depth': float('inf'),
'inspects_source': None,
'manipulates_source': None,
'manipulates_state': None
}
def __init__(self):
"""Initiates an empty history and 0 score for a player."""
self.history = []
self.classifier = copy.copy(self.classifier)
if self.name == "Player":
self.classifier['stochastic'] = False
for dimension in self.default_classifier:
if dimension not in self.classifier:
self.classifier[dimension] = self.default_classifier[dimension]
self.cooperations = 0
self.defections = 0
self.init_args = ()
self.set_tournament_attributes()
def receive_tournament_attributes(self):
# Overwrite this function if your strategy needs
# to make use of tournament_attributes such as
# the game matrix or the number of rounds
pass
def set_tournament_attributes(self, length=-1, game=None):
if not game:
game = DefaultGame
self.tournament_attributes = {
"length": length,
"game": game
}
self.receive_tournament_attributes()
def __repr__(self):
"""The string method for the strategy."""
return self.name
def _add_noise(self, noise, s1, s2):
r = random.random()
if r < noise:
s1 = flip_action(s1)
r = random.random()
if r < noise:
s2 = flip_action(s2)
return s1, s2
def strategy(self, opponent):
"""This is a placeholder strategy."""
raise NotImplementedError()
def play(self, opponent, noise=0):
"""This pits two players against each other."""
s1, s2 = self.strategy(opponent), opponent.strategy(self)
if noise:
s1, s2 = self._add_noise(noise, s1, s2)
update_histories(self, opponent, s1, s2)
def clone(self):
"""Clones the player without history, reapplying configuration
parameters as necessary."""
# You may be tempted to reimplement using the `copy` module
# Note that this would require a deepcopy in some cases and there may
# be significant changes required throughout the library.
# Consider overriding in special cases only if necessary
cls = self.__class__
new_player = cls(*self.init_args)
new_player.tournament_attributes = copy.copy(self.tournament_attributes)
return new_player
def reset(self):
"""Resets history.
When creating strategies that create new attributes then this method should be
re-written (in the inherited class) and should not only reset history but also
rest all other attributes.
"""
self.history = []
self.cooperations = 0
self.defections = 0
|
Python
| 0.000001
|
@@ -64,16 +64,27 @@
ctions,
+flip_action
%0Afrom .g
|
4e0d90fc157760606ae8503762f10bdef30bff8c
|
Remove trailing slashes
|
bluebottle/impact/urls/api.py
|
bluebottle/impact/urls/api.py
|
from django.conf.urls import url
from bluebottle.impact.views import (
ImpactTypeList,
ImpactGoalList,
ImpactGoalDetail
)
urlpatterns = [
url(r'^types/$', ImpactTypeList.as_view(), name='impact-type-list'),
url(r'^goals/$', ImpactGoalList.as_view(), name='impact-goal-list'),
url(
r'^goals/(?P<pk>\d+)/$',
ImpactGoalDetail.as_view(),
name='impact-goal-details'
)
]
|
Python
| 0.000346
|
@@ -161,17 +161,16 @@
r'%5Etypes
-/
$', Impa
@@ -233,17 +233,16 @@
r'%5Egoals
-/
$', Impa
@@ -326,17 +326,16 @@
%3Cpk%3E%5Cd+)
-/
$',%0A
|
dc5ec290095e570ee1f96ffe110aee98009119bf
|
fix call
|
gunicorn/sock.py
|
gunicorn/sock.py
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import errno
import os
import socket
import sys
import time
from gunicorn import util
from gunicorn.six import string_types
class BaseSocket(object):
def __init__(self, address, conf, log, fd=None):
self.log = log
self.conf = conf
self.cfg_addr = address
if fd is None:
sock = socket.socket(self.FAMILY, socket.SOCK_STREAM)
else:
sock = socket.fromfd(fd, self.FAMILY, socket.SOCK_STREAM)
self.sock = self.set_options(sock, bound=(fd is not None))
def __str__(self, name):
return "<socket %d>" % self.sock.fileno()
def __getattr__(self, name):
return getattr(self.sock, name)
def set_options(self, sock, bound=False):
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if not bound:
self.bind(sock)
sock.setblocking(0)
sock.listen(self.conf.backlog)
return sock
def bind(self, sock):
sock.bind(self.cfg_addr)
def close(self):
try:
self.sock.close()
except socket.error as e:
self.log.info("Error while closing socket %s", str(e))
time.sleep(0.3)
del self.sock
class TCPSocket(BaseSocket):
FAMILY = socket.AF_INET
def __str__(self):
if self.conf.is_ssl:
scheme = "https"
else:
scheme = "http"
addr = self.sock.getsockname()
return "%s://%s:%d" % (scheme, addr[0], addr[1])
def set_options(self, sock, bound=False):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
return super(TCPSocket, self).set_options(sock, bound=bound)
class TCP6Socket(TCPSocket):
FAMILY = socket.AF_INET6
def __str__(self):
(host, port, fl, sc) = self.sock.getsockname()
return "http://[%s]:%d" % (host, port)
class UnixSocket(BaseSocket):
FAMILY = socket.AF_UNIX
def __init__(self, address, log, fd=None):
if fd is None:
try:
os.remove(address)
except OSError:
pass
super(UnixSocket, self).__init__(addr, log, fd=fd)
def __str__(self):
return "unix:%s" % self.cfg_addr
def bind(self, sock):
old_umask = os.umask(self.conf.umask)
sock.bind(self.cfg_addr)
util.chown(self.cfg_addr, self.conf.uid, self.conf.gid)
os.umask(old_umask)
def close(self):
super(UnixSocket, self).close()
os.unlink(self.cfg_addr)
def _sock_type(addr):
if isinstance(addr, tuple):
if util.is_ipv6(addr[0]):
sock_type = TCP6Socket
else:
sock_type = TCPSocket
elif isinstance(addr, string_types):
sock_type = UnixSocket
else:
raise TypeError("Unable to create socket from: %r" % addr)
return sock_type
def create_sockets(conf, log):
"""
Create a new socket for the given address. If the
address is a tuple, a TCP socket is created. If it
is a string, a Unix socket is created. Otherwise
a TypeError is raised.
"""
# get it only once
laddr = conf.address
listeners = []
# check ssl config early to raise the error on startup
# only the certfile is needed since it can contains the keyfile
if conf.certfile and not os.path.exists(conf.certfile):
raise ValueError('certfile "%s" does not exist' % conf.certfile)
if conf.keyfile and not os.path.exists(conf.keyfile):
raise ValueError('certfile "%s" does not exist' % conf.keyfile)
# sockets are already bound
if 'GUNICORN_FD' in os.environ:
fds = os.environ.pop('GUNICORN_FD').split(',')
for i, fd in enumerate(fds):
fd = int(fd)
addr = laddr[i]
sock_type = _sock_type(addr)
try:
listeners.append(sock_type(addr, conf, log, fd=fd))
except socket.error as e:
if e.args[0] == errno.ENOTCONN:
log.error("GUNICORN_FD should refer to an open socket.")
else:
raise
return listeners
# no sockets is bound, first initialization of gunicorn in this env.
for addr in laddr:
sock_type = _sock_type(addr)
# If we fail to create a socket from GUNICORN_FD
# we fall through and try and open the socket
# normally.
sock = None
for i in range(5):
try:
sock = sock_type(addr, conf, log)
except socket.error as e:
if e.args[0] == errno.EADDRINUSE:
log.error("Connection in use: %s", str(addr))
if e.args[0] == errno.EADDRNOTAVAIL:
log.error("Invalid address: %s", str(addr))
sys.exit(1)
if i < 5:
log.error("Retrying in 1 second.")
time.sleep(1)
else:
break
if sock is None:
log.error("Can't connect to %s", str(addr))
sys.exit(1)
listeners.append(sock)
return listeners
|
Python
| 0.000001
|
@@ -2067,16 +2067,22 @@
address,
+ conf,
log, fd
@@ -2259,16 +2259,22 @@
__(addr,
+ conf,
log, fd
|
c89a42d4acf1e343616f537b827d471007cfc627
|
fix doc typo
|
client/python/thegame/entity.py
|
client/python/thegame/entity.py
|
import collections
from thegame.abilities import Ability
Vector = collections.namedtuple('Vector', ('x', 'y'))
Vector.__doc__ = '''
A 2D vector.
Used to represent a point and velocity in thegame
'''
class _EntityAttribute:
def __init__(self, doc=None):
self.__doc__ = doc
def __set_name__(self, klass, name):
self.name = name
def __get__(self, instance, klass=None):
if instance is None:
return self
return getattr(instance.data.entity, self.name)
def __set__(self, obj, value):
raise AttributeError(f'read-only attribute {self.name!r}')
class _DataAttribute:
def __init__(self, doc=None):
self.__doc__ = doc
def __set_name__(self, klass, name):
self.name = name
def __get__(self, instance, klass=None):
if instance is None:
return self
return getattr(instance.data, self.name)
def __set__(self, obj, value):
raise AttributeError(f'read-only attribute {self.name!r}')
class Entity:
def __init__(self, data):
self.data = data
def __repr__(self):
return (
f'<{self.__class__.__name__}#{self.id} '
f'BD={self.body_damage} '
f'HP={self.health}/{self.max_health} '
f'@({self.position.x:.0f},{self.position.y:.0f})>'
)
id = _EntityAttribute()
@property
def position(self):
'''
The velocity of the entity in a 2-tuple (x, y).
'''
p = self.data.entity.position
return Vector(p.x, p.y)
@property
def velocity(self):
'''
The velocity of the entity in a 2-tuple (x, y).
'''
v = self.data.entity.velocity
return Vector(v.x, v.y)
radius = _EntityAttribute('The radius of the entity')
health = _EntityAttribute(
'''
The health of the entity in a non-negative integer.
When a entity's health is less than or equal to zero it dies.
And the one dealing the killing blow is rewarded with
``rewarding_experience``.
'''
)
body_damage = _EntityAttribute(
'''
The body damage of the entity.
When two entities collide, they reduce each other's health
with their body damage.
'''
)
rewarding_experience = _EntityAttribute(
'''
How much experience you will get if you kill this entity.
'''
)
max_health = _EntityAttribute(
'''
The maximum health of this entity.
'''
)
class Polygon(Entity):
'''
The netural polygons.
'''
@property
def edges(self):
return self.data.edges
class Bullet(Entity):
'''
The bullet. Shot from a Hero.
'''
@property
def owner_id(self):
'''
The id of the hero owning the bullet
'''
return self.data.owner
HeroAbility = collections.namedtuple(
'HeroAbility',
['level', 'value']
)
HeroAbilityList = collections.namedtuple(
'HeroAbilityList',
[ab.as_camel for ab in Ability]
)
class _HeroAbilityShortcut:
def __init__(self, ability):
self.ability = ability
self.__doc__ = \
f'shortcut to ``hero.abilities.{ability.as_camel}.value``'
def __get__(self, instance, klass=None):
if instance is None:
return self
return instance.abilities[self.ability].value
def __set__(self, obj, value):
raise AttributeError(f'read-only attribute {self.name!r}')
class _HeroAbilityLevelShortcut:
def __init__(self, ability):
self.ability = ability
self.__doc__ = \
f'shortcut to ``hero.abilities.{ability.as_camel}.level``'
def __get__(self, instance, klass=None):
if instance is None:
return self
return instance.abilities[self.ability].level
def __set__(self, obj, value):
raise AttributeError(f'read-only attribute {self.name!r}')
class _HeroMeta(type):
@classmethod
def __prepare__(mcs, name, bases, **kwds):
return {
**{
ab.as_camel: _HeroAbilityShortcut(ab)
for ab in Ability
},
**{
ab.as_camel + '_level': _HeroAbilityLevelShortcut(ab)
for ab in Ability
}
}
class Hero(Entity, metaclass=_HeroMeta):
'''
A Hero is a player in thegame.
'''
def __init__(self, data):
super().__init__(data)
# we're doing this so it will not be modified accidently
# maybe not a good way, though.
self.__dict__['abilities'] = HeroAbilityList(
*[HeroAbility(*x) for x in zip(
self.data.ability_levels, self.data.ability_values)]
)
@property
def abilities(self):
'''
returns a tuple of abilities.
Example::
hero.abilities[MaxHealth].value # get the hero's max health
hero.abilities.max_health.value # the same thing
hero.abilities[MaxHealth].level # get the ability level
hero.abilities.max_health.value # the same thing again
'''
return self.__dict__['abilities']
orientation = _DataAttribute(
'''
The orientation of the hero; the direction the barrel is faction at,
in radians.
'''
)
level = _DataAttribute('The level of the hero')
score = _DataAttribute('The score of the hero')
experience = _DataAttribute('The experience the hero has')
experience_to_level_up = _DataAttribute(
'The experience required for the hero to level up')
skill_points = _DataAttribute(
'Number of skill points available to level up abilities'
)
cooldown = _DataAttribute(
'''
How many ticks until the a is ready.
Increase the *reload* ability to reduce the cooldown.
``shoot`` and ``shoot_at`` can still be called when on cooldown, but
nothing will happen instead.
'''
)
health_regen_cooldown = _DataAttribute(
'''
How many ticks until the hero can start to regenerate health
'''
)
name = _DataAttribute(
'''
The name of the hero. Not guranteed to be unique
'''
)
|
Python
| 0.000004
|
@@ -5840,16 +5840,23 @@
l the a
+bullet
is ready
|
0dd21b0f13aa7bf4cc3061dca216c65cf73975e5
|
Make registration reports filterable and harmonize URL
|
kcdc3/apps/classes/urls.py
|
kcdc3/apps/classes/urls.py
|
from django.conf.urls import patterns, include, url
from models import Event, Registration
from views import EventListView, EventDetailView, ResponseTemplateView, EventArchiveView, SessionView, RegistrationListView, TeacherAdminListView, FilteredTeacherAdminListView
urlpatterns = patterns('kcdc3.apps.classes.views',
url(r'^$', EventListView.as_view()),
url(r'^staff/teachers/$', TeacherAdminListView.as_view()),
url(r'^staff/teachers/session/(?P<slug>[A-Za-z0-9_-]+)$', FilteredTeacherAdminListView.as_view()),
url(r'^dashboard/registrations/(?P<slug>[A-Za-z0-9_-]+)$', RegistrationListView.as_view()),
url(r'^(?P<slug>[0-9_-]+)/$', EventArchiveView.as_view()),
url(r'^(?P<slug>[0-9_-]+)/background/$', SessionView.as_view()),
url(r'^response/(?P<slug>[A-Za-z0-9_-]+)$', ResponseTemplateView.as_view()),
url(r'^(?P<slug>[A-Za-z0-9_-]+)/$', EventDetailView.as_view(model=Event,)),
url(r'^(?P<slug>[A-Za-z0-9_-]+)/register$', 'register'),
url(r'^(?P<slug>[A-Za-z0-9_-]+)/cancel$', 'cancel'),
url(r'^(?P<slug>[A-Za-z0-9_-]+)/facilitator$', 'facilitator'),
)
|
Python
| 0
|
@@ -462,24 +462,25 @@
Za-z0-9_-%5D+)
+/
$', Filtered
@@ -524,17 +524,13 @@
(r'%5E
-dashboard
+staff
/reg
@@ -540,16 +540,24 @@
rations/
+session/
(?P%3Cslug
@@ -568,24 +568,25 @@
Za-z0-9_-%5D+)
+/
$', Registra
|
80afbf3b5be1716553b93ee6ba57404d40e43a94
|
Remove multiple workers
|
gunicorn_conf.py
|
gunicorn_conf.py
|
accesslog = '-'
access_log_format = '%({Host}i)s %(h)s %(l)s "%({X-Remote-User-Id}o)s: %({X-Remote-User-Name}o)s" %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s'
workers = 3
|
Python
| 0.003335
|
@@ -153,16 +153,4 @@
)s'%0A
-workers = 3%0A
|
67346a13eb40d605da498b0bdba25ca661f08dd1
|
Remove unused imports
|
geotrek/feedback/templatetags/feedback_tags.py
|
geotrek/feedback/templatetags/feedback_tags.py
|
import json
from geotrek.feedback.models import PredefinedEmail, ReportStatus
from mapentity.models import LogEntry
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def suricate_management_enabled():
return settings.SURICATE_MANAGEMENT_ENABLED
@register.simple_tag
def suricate_workflow_enabled():
return settings.SURICATE_WORKFLOW_ENABLED
@register.simple_tag
def enable_report_colors_per_status():
return settings.ENABLE_REPORT_COLORS_PER_STATUS
@register.simple_tag
def status_ids_and_colors():
status_ids_and_colors = {
status.pk: {
"id": str(status.identifier),
"color": str(status.color)
}
for status in ReportStatus.objects.all()
}
return json.dumps(status_ids_and_colors)
@register.simple_tag
def predefined_emails():
predefined_emails = {
email.pk: {
"label": str(email.label),
"text": str(email.text)
}
for email in PredefinedEmail.objects.all()
}
return json.dumps(predefined_emails)
@register.simple_tag
def resolved_intervention_info(report):
if report:
username = "'?'"
intervention = report.report_interventions().first()
authors = intervention.authors
if authors:
user = authors.last() # oldest author is the one that created the intervention
if user.profile and user.profile.extended_username:
username = user.profile.extended_username
else:
username = user.username
resolved_intervention_info = {
"date": report.interventions.first().date.strftime("%d/%m/%Y") if report.interventions else None,
"username": username
}
return json.dumps(resolved_intervention_info)
return json.dumps({})
|
Python
| 0.000001
|
@@ -75,46 +75,8 @@
tus%0A
-from mapentity.models import LogEntry%0A
from
|
f4b7426103d2b484501a3bfdff3ebe976216b882
|
Make a nice description of the module. (../port-add-product_multi_company_7.0-bis-jge/ rev 213.5.7)
|
product_price_history/__openerp__.py
|
product_price_history/__openerp__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright 2013 Camptocamp SA
# Author: Joel Grand-Guillaume
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
{
"name" : "Product Price History",
"version" : "1.2",
"author" : "Camptocamp",
"category" : "Generic Modules/Inventory Control",
"depends" : [ "product","purchase"],
"description": """
Product Price History
This module allow you to :
* record various prices of a same product for different companies. This way, every company
can have his own cost (average or standard) and sale price.
* historize the prices in a way that you'll then be able to retrieve the cost (or sale) price
at a given date.
Note that to benefit those values in stock report (or any other view that is based on SQL),
you'll have to adapt it to include this new historized table. Especially true for stock
valuation.
This module also contain demo data and various tests to ensure it work well. It show
how to configure OpenERP properly when you have various company, each of them having
their product setup in average price and using different currency. The goal is to share
the products between all company, keeping the right price for each of them.
Technically, this module updates the definition of field standard_price, list_price
of the product and will make them stored in an external table. We override the read,
write and create methods to achieve that and don't used ir.property for performance
and historization purpose.
You may want to also use the module analytic_multicurrency from bzr branch lp:account-analytic
in order to have a proper computation in analytic line as well (standard_price will be converted
in company currency with this module when computing cost of analytic line).
""",
'demo': [
'demo/product_price_history_purchase_demo.yml',
],
'data': [
'product_price_history_view.xml',
'wizard/historic_prices_view.xml',
'security/ir.model.access.csv',
'security/product_price_history_security.xml',
],
'test': [
'test/price_controlling_multicompany.yml',
'test/avg_price_computation_mutlicompanies_multicurrencies.yml',
'test/price_historization.yml',
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Python
| 0.000001
|
@@ -1190,25 +1190,16 @@
n%22: %22%22%22%0A
-%0A
Product
@@ -1212,25 +1212,39 @@
History%0A
-%0A
+=====================%0A%0A
This mod
@@ -1267,20 +1267,11 @@
:%0A%0A
-
*
-r
+R
ecor
@@ -1354,26 +1354,16 @@
company
-%0A
can have
@@ -1419,21 +1419,11 @@
e. %0A
-%0A
*
-h
+H
isto
@@ -1513,18 +1513,8 @@
ice
-%0A
at a
@@ -1527,24 +1527,16 @@
date.%0A%0A
-
Note tha
@@ -1619,24 +1619,16 @@
n SQL),%0A
-
you'll h
@@ -1707,24 +1707,16 @@
r stock%0A
-
valuatio
@@ -1719,24 +1719,16 @@
ation.%0A%0A
-
This mod
@@ -1805,24 +1805,16 @@
t show %0A
-
how to c
@@ -1891,24 +1891,16 @@
having %0A
-
their pr
@@ -1979,24 +1979,16 @@
o share%0A
-
the prod
@@ -2056,24 +2056,16 @@
them.%0A%0A
-
Technica
@@ -2141,24 +2141,16 @@
_price %0A
-
of the p
@@ -2227,24 +2227,16 @@
read, %0A
-
write an
@@ -2311,24 +2311,16 @@
ormance%0A
-
and hist
@@ -2340,24 +2340,16 @@
pose. %0A%0A
-
You may
@@ -2436,24 +2436,16 @@
nalytic%0A
-
in order
@@ -2533,24 +2533,16 @@
nverted%0A
-
in compa
@@ -2613,14 +2613,8 @@
e).%0A
-%0A%0A
%22%22%22,
|
2a0e114569a9828347593e8943ff061c1172f0d5
|
make encode and decode configurable
|
pyramid_redis_sessions/__init__.py
|
pyramid_redis_sessions/__init__.py
|
import os
import cPickle
import binascii
from redis import Redis
from functools import partial
from pyramid.compat import text_
from zope.interface import implementer
from .session import RedisSession
from .connection import get_default_connection
from .util import (
get_unique_session_id,
refresh,
)
from pyramid.interfaces import ISession
from pyramid.session import (
signed_serialize,
signed_deserialize,
)
def includeme(config): # pragma no cover
"""
Allows users to call ``config.include('pyramid_redis_sessions')``. If a
custom_connect setting is specified then we convert it into a python object
by resolving the location of the dotted path relative to the current
python path.
"""
settings = config.registry.settings
# special rule for a custom_connect function (a dotted python path)
if 'custom_connect' in settings:
custom_connect = config.maybe_dotted(settings['custom_connect'])
settings['custom_connect'] = custom_connect
session_factory = session_factory_from_settings(settings)
config.set_session_factory(session_factory)
def session_factory_from_settings(settings): # pragma no cover
"""
Return a Pyramid session factory using Redis session settings from a Paste
config file.
"""
from .util import _parse_settings
options = _parse_settings(settings)
return RedisSessionFactory(**options)
def RedisSessionFactory(
secret,
timeout=1200,
period=300,
cookie_name='session',
cookie_max_age=None,
cookie_path='/',
cookie_domain=None,
cookie_secure=False,
cookie_httponly=False,
cookie_on_exception=True,
host='localhost',
port=6379,
db=0,
password=None,
socket_timeout=None,
connection_pool=None,
charset='utf-8',
errors='strict',
unix_socket_path=None,
custom_connect=None,
url=None,
):
"""
Configure a :term:`session factory` which will provide session data from
a Redis server.
The return value of this function is a :term:`session factory`, which may
be provided as the ``session_factory`` argument of a
:class:`pyramid.config.Configurator` constructor, or used as the
``session_factory`` argument of the
:meth:`pyramid.config.Configurator.set_session_factory` method.
Parameters:
``secret``
A string which is used to sign the cookie.
``timeout``
A number of seconds of inactivity before a session times out.
``period``
Granularity of inactivity checking in seconds (should be lower
than timeout).
``cookie_name``
The name of the cookie used for sessioning. Default: ``session``.
``cookie_max_age``
The maximum age of the cookie used for sessioning (in seconds).
Default: ``None`` (browser scope).
``cookie_path``
The path used for the session cookie. Default: ``/``.
``cookie_domain``
The domain used for the session cookie. Default: ``None`` (no domain).
``cookie_secure``
The 'secure' flag of the session cookie. Default: ``False``.
``cookie_httponly``
The 'httpOnly' flag of the session cookie. Default: ``False``.
``cookie_on_exception``
If ``True``, set a session cookie even if an exception occurs
while rendering a view. Default: ``True``.
``host``
A string representing the IP of your Redis server. Default: ``localhost``.
``port``
An integer represnting the port of your Redis server. Default: ``6379``.
``db``
An integer to select a specific database on your Redis server.
Default: ``0``
``password``
A string password to connect to your Redis server/database if
required. Default: ``None``
The following arguments are passed straight to the redis-py Redis instance
and allow you to further configure the Redis client:
``socket_timeout``
``connection_pool``
``charset``
``errors``
``unix_socket_path``
"""
def factory(request, new_session_id=get_unique_session_id):
redis_options = dict(
host=host,
port=port,
db=db,
password=password,
socket_timeout=socket_timeout,
connection_pool=connection_pool,
charset=charset,
errors=errors,
unix_socket_path=unix_socket_path,
)
# an explicit custom connection handler gets priority over the default
if custom_connect is not None:
redis = custom_connect(request, **redis_options)
else:
redis = get_default_connection(request, url=url, **redis_options)
session_id = None
cookieval = request.cookies.get(cookie_name)
if cookieval is not None:
try:
session_id = signed_deserialize(cookieval, secret)
except ValueError:
pass
def add_cookie(session_key):
def set_cookie_callback(request, response):
"""
The set cookie callback will first check to see if we're in an
exception. If we're in an exception and ``cookie_on_exception``
is False, we return immediately before setting the cookie.
For all other cases the cookie will be set normally.
"""
exc = getattr(request, 'exception', None)
if exc is not None and cookie_on_exception == False:
return
cookieval = signed_serialize(session_key, secret)
response.set_cookie(
cookie_name,
value=cookieval,
max_age=cookie_max_age,
domain=cookie_domain,
secure=cookie_secure,
httponly=cookie_httponly,
)
request.add_response_callback(set_cookie_callback)
return
def delete_cookie():
def set_cookie_callback(request, response):
response.delete_cookie(cookie_name)
request.add_response_callback(set_cookie_callback)
return
if session_id is None:
session_id = new_session_id(redis, timeout)
add_cookie(session_id)
# attempt to find the session by session_id
session_check = redis.get(session_id)
# case: found session associated with session_id
if session_check is not None:
session = RedisSession(redis, session_id, timeout, delete_cookie)
# case: session id obtained from cookie is not in Redis; begin anew
else:
new_id = new_session_id(redis, timeout)
add_cookie(new_id)
session = RedisSession(redis, new_id, timeout, delete_cookie)
session._rs_new = True
return session
return factory
|
Python
| 0.000001
|
@@ -774,24 +774,25 @@
ry.settings%0A
+%0A
# specia
@@ -806,80 +806,129 @@
for
-a custom_connect function (a dotted python path)%0A if 'custom_connect'
+dotted python paths to configurable callables%0A for option in ('custom_connect', 'encode', 'decode'):%0A if option
in
@@ -945,30 +945,36 @@
-custom_connect
+ settings%5Boption%5D
= confi
@@ -1001,78 +1001,17 @@
ngs%5B
-'custom_connect'%5D)%0A settings%5B'custom_connect'%5D = custom_connect
+option%5D)%0A
%0A
@@ -1664,16 +1664,68 @@
n=True,%0A
+ encode=cPickle.dumps,%0A decode=cPickle.loads,%0A
host
|
ee2b2cdfe973ccc56a908047bde16c9fa82a5a5f
|
Improve efficiency of show repo sync date
|
kitchen/dashboard/views.py
|
kitchen/dashboard/views.py
|
"""Dashboard app views"""
import os
import time
from datetime import datetime
from django.contrib.messages import add_message, ERROR, INFO, WARNING
from django.shortcuts import render_to_response
from django.template import RequestContext
from logbook import Logger
from kitchen.dashboard.chef import (get_nodes_extended, get_roles,
get_role_groups, get_environments,
filter_nodes, group_nodes_by_host,
RepoError)
from kitchen.dashboard import graphs
from kitchen.settings import REPO, SHOW_VIRT_VIEW, SHOW_HOST_NAMES, STATIC_ROOT
log = Logger(__name__)
def _get_data(env, roles, virt):
"""Returns processed repository data, filtering nodes based on given args
"""
data = {'filter_env': env, 'filter_roles': roles, 'filter_virt': virt}
nodes = get_nodes_extended()
data['roles'] = get_roles()
roles_groups = get_role_groups(data['roles'])
data['roles_groups'] = roles_groups
data['virt_roles'] = ['host', 'guest']
# Get environments before we filter nodes
data['environments'] = get_environments(nodes)
if data['filter_env'] or data['filter_roles'] or data['filter_virt']:
nodes = filter_nodes(nodes, data['filter_env'],
data['filter_roles'], data['filter_virt'])
data['nodes'] = nodes
return data
def _show_repo_sync_date(request):
"""Shows the sync date, which will be the modified date of a file"""
date_file = os.path.join(STATIC_ROOT, 'syncdate')
if os.path.exists(date_file):
sync_date = os.path.getmtime(date_file)
sync_str = "Last synchronization time on {0}".format(
datetime.fromtimestamp(sync_date).strftime("%d. %B %Y - %H:%M"))
if (time.time() - sync_date) > REPO['SYNC_SCHEDULE'] * 2.5:
add_message(request, WARNING,
sync_str + " (more than thrice the sync period)")
else:
add_message(request, INFO, sync_str)
else:
add_message(
request, ERROR,
"Could not find a record of the last synchronization time!"
)
def _set_options(options):
"""Sets default options if none are given"""
if options is None:
# Set defaults
options = ''
if SHOW_HOST_NAMES:
options += 'show_hostnames,'
return options
def main(request):
"""Default main view showing a list of nodes"""
data = {}
try:
data = _get_data(request.GET.get('env', REPO['DEFAULT_ENV']),
request.GET.get('roles', ''),
request.GET.get('virt', REPO['DEFAULT_VIRT']))
_show_repo_sync_date(request)
except RepoError as e:
add_message(request, ERROR, str(e))
if not len(data.get('nodes', [])):
add_message(request, WARNING,
"There are no nodes that fit the supplied criteria.")
data['show_virt'] = SHOW_VIRT_VIEW
data['query_string'] = request.META['QUERY_STRING']
return render_to_response('main.html',
data, context_instance=RequestContext(request))
def virt(request):
"""Displays a view where the nodes are grouped by physical host"""
data = {}
try:
data = _get_data(request.GET.get('env', REPO['DEFAULT_ENV']),
request.GET.get('roles', ''),
request.GET.get('virt', None))
except RepoError as e:
add_message(request, ERROR, str(e))
else:
data['nodes'] = group_nodes_by_host(data['nodes'])
if not len(data.get('nodes', [])):
add_message(request, WARNING,
"There are no nodes that fit the supplied criteria.")
data['query_string'] = request.META['QUERY_STRING']
return render_to_response('virt.html',
data, context_instance=RequestContext(request))
def graph(request):
"""Graph view where users can visualize graphs of their nodes
generated using Graphviz open source graph visualization library
"""
options = _set_options(request.GET.get('options'))
data = {}
env_filter = request.GET.get('env', REPO['DEFAULT_ENV'])
if env_filter:
try:
data = _get_data(env_filter, request.GET.get('roles', ''), 'guest')
except RepoError as e:
add_message(request, ERROR, str(e))
else:
success, msg = graphs.generate_node_map(data['nodes'],
data.get('roles', []),
'show_hostnames' in options)
if not success:
add_message(request, ERROR, msg)
else:
add_message(request, WARNING, "Please select an environment")
data['show_hostnames'] = 'show_hostnames' in options
data['query_string'] = request.META['QUERY_STRING']
return render_to_response('graph.html',
data, context_instance=RequestContext(request))
|
Python
| 0.000001
|
@@ -1511,20 +1511,41 @@
-date_file =
+try:%0A sync_date = os.stat(
os.p
@@ -1581,90 +1581,18 @@
te')
-%0A if os.path.exists(date_file):%0A sync_date = os.path.getmtime(date_file)
+).st_mtime
%0A
@@ -1975,27 +1975,37 @@
c_str)%0A e
-lse
+xcept OSError
:%0A ad
|
eb712d30a6231b416e33d02a125daddf5322d51e
|
Add API docs for the Exscript.util.syslog module.
|
src/Exscript/util/syslog.py
|
src/Exscript/util/syslog.py
|
import imp, socket
# This way of loading a module prevents Python from looking in the
# current directory. (We need to avoid it due to the syslog module
# name collision.)
syslog = imp.load_module('syslog', *imp.find_module('syslog'))
def netlog(message,
source = None,
host = 'localhost',
port = 514,
priority = syslog.LOG_DEBUG,
facility = syslog.LOG_USER):
"""
Python's built in syslog module does not support networking, so
this is the alternative.
The source argument specifies the message source that is
documented on the receiving server. It defaults to "scriptname[pid]",
where "scriptname" is sys.argv[0], and pid is the current process id.
The priority and facility arguments are equivalent to those of
Python's built in syslog module.
@type source: str
@param source: The source address.
@type host: str
@param host: The IP address or hostname of the receiving server.
@type port: str
@param port: The TCP port number of the receiving server.
@type priority: int
@param priority: The message priority.
@type facility: int
@param facility: The message facility.
"""
if not source:
source = '%s[%s]' + (sys.argv[0], os.getpid())
data = '<%d>%s: %s' % (priority + facility, source, message)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.sendto(data, (host, port))
sock.close()
|
Python
| 0
|
@@ -1,15 +1,742 @@
+# Copyright (C) 2007-2010 Samuel Abels.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU General Public License version 2, as%0A# published by the Free Software Foundation.%0A#%0A# This program is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU General Public License for more details.%0A#%0A# You should have received a copy of the GNU General Public License%0A# along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A%22%22%22%0ASend messages to a syslog server.%0A%22%22%22%0Aimport imp%0A
import
- imp,
soc
@@ -2197,9 +2197,8 @@
close()%0A
-%0A
|
e05243983cb9167303a19e85a3c88f74da8e2612
|
Convert ipLocation function name to all lowercase
|
bot/slack/commands/ip_info.py
|
bot/slack/commands/ip_info.py
|
import netaddr
import os
from mozdef_util.geo_ip import GeoIP
def is_ip(ip):
try:
netaddr.IPNetwork(ip)
return True
except Exception:
return False
def ipLocation(ip):
location = ""
try:
geoip_data_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../../data/GeoLite2-City.mmdb")
geoip = GeoIP(geoip_data_dir)
geoDict = geoip.lookup_ip(ip)
if geoDict is not None:
if 'error' in geoDict:
return geoDict['error']
location = geoDict['country_name']
if geoDict['country_code'] in ('US'):
if geoDict['metro_code']:
location = location + '/{0}'.format(geoDict['metro_code'])
except Exception:
location = ""
return location
class command():
def __init__(self):
self.command_name = '!ipinfo'
self.help_text = 'Perform a geoip lookup on an ip address'
def handle_command(self, parameters):
response = ""
for ip_token in parameters:
if is_ip(ip_token):
ip = netaddr.IPNetwork(ip_token)[0]
if (not ip.is_loopback() and not ip.is_private() and not ip.is_reserved()):
response += "{0} location: {1}\n".format(ip_token, ipLocation(ip_token))
else:
response += "{0}: hrm...loopback? private ip?\n".format(ip_token)
else:
response = "{0} is not an IP address".format(ip_token)
return response
|
Python
| 1
|
@@ -183,17 +183,18 @@
%0A%0Adef ip
-L
+_l
ocation(
@@ -1306,17 +1306,18 @@
oken, ip
-L
+_l
ocation(
|
7f72d1d8f8472daf03afb9a942d22eaae98d98a8
|
add unit test
|
bottleneck/tests/move_test.py
|
bottleneck/tests/move_test.py
|
"Test moving window functions."
import warnings
from nose.tools import assert_true
import numpy as np
from numpy.testing import (assert_equal, assert_array_equal,
assert_array_almost_equal)
nan = np.nan
import bottleneck as bn
DTYPES = [np.float64, np.float32, np.int64, np.int32, np.float16]
def arrays(dtypes=DTYPES, nans=True):
"Iterator that yield arrays to use for unit testing."
ss = {}
ss[1] = {'size': 4, 'shapes': [(4,)]}
ss[2] = {'size': 6, 'shapes': [(1, 6), (2, 3)]}
ss[3] = {'size': 6, 'shapes': [(1, 2, 3)]}
ss[4] = {'size': 24, 'shapes': [(1, 2, 3, 4)]}
for ndim in ss:
size = ss[ndim]['size']
shapes = ss[ndim]['shapes']
for dtype in dtypes:
a = np.arange(size, dtype=dtype)
for shape in shapes:
a = a.reshape(shape)
yield a
yield -a
if issubclass(a.dtype.type, np.inexact):
if nans:
for i in range(a.size):
a.flat[i] = np.nan
yield a
yield -a
def unit_maker(func, func0, decimal=np.inf, nans=True):
"Test that bn.xxx gives the same output as a reference function."
msg = ('\nfunc %s | window %d | min_count %s | input %s (%s) | shape %s | '
'axis %s\n')
msg += '\nInput array:\n%s\n'
for i, arr in enumerate(arrays(nans=nans)):
for axis in range(-arr.ndim, arr.ndim):
windows = range(1, arr.shape[axis])
if len(windows) == 0:
windows = [1]
for window in windows:
min_counts = [w for w in windows if w <= window]
min_counts.append(None)
for min_count in min_counts:
with np.errstate(invalid='ignore'):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
if func.__name__ == 'move_median':
actual = func(arr, window, axis=axis)
else:
actual = func(arr, window, min_count,
axis=axis)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
if func.__name__ == 'move_median':
desired = func0(arr, window, axis=axis)
else:
desired = func0(arr, window, min_count,
axis=axis)
tup = (func.__name__, window, str(min_count), 'a'+str(i),
str(arr.dtype), str(arr.shape), str(axis), arr)
err_msg = msg % tup
if (decimal < np.inf) and (np.isfinite(arr).sum() > 0):
assert_array_almost_equal(actual, desired, decimal,
err_msg)
else:
assert_array_equal(actual, desired, err_msg)
err_msg += '\n dtype mismatch %s %s'
if hasattr(actual, 'dtype') or hasattr(desired, 'dtype'):
da = actual.dtype
dd = desired.dtype
assert_equal(da, dd, err_msg % (da, dd))
def test_move_sum():
"Test move_sum."
yield unit_maker, bn.move_sum, bn.slow.move_sum, 5
def test_move_mean():
"Test move_mean."
yield unit_maker, bn.move_mean, bn.slow.move_mean, 5
def test_move_std():
"Test move_std."
yield unit_maker, bn.move_std, bn.slow.move_std, 5
def test_move_min():
"Test move_min."
yield unit_maker, bn.move_min, bn.slow.move_min, 5
def test_move_max():
"Test move_max."
yield unit_maker, bn.move_max, bn.slow.move_max, 5
def test_move_median():
"Test move_median."
yield unit_maker, bn.move_median, bn.slow.move_median, 5, False
# ----------------------------------------------------------------------------
# Regression test for square roots of negative numbers
def test_move_std_sqrt():
"Test move_std for neg sqrt."
a = [0.0011448196318903589,
0.00028718669878572767,
0.00028718669878572767,
0.00028718669878572767,
0.00028718669878572767]
err_msg = "Square root of negative number. ndim = %d"
b = bn.move_std(a, window=3)
assert_true(np.isfinite(b[2:]).all(), err_msg % 1)
a2 = np.array([a, a])
b = bn.move_std(a2, window=3, axis=1)
assert_true(np.isfinite(b[:, 2:]).all(), err_msg % 2)
a3 = np.array([[a, a], [a, a]])
b = bn.move_std(a3, window=3, axis=2)
assert_true(np.isfinite(b[:, :, 2:]).all(), err_msg % 3)
|
Python
| 0.000001
|
@@ -1132,16 +1132,91 @@
ield -a%0A
+ # check that move_std is robust%0A yield np.array(%5B1., 2., 3.%5D) + 1e9%0A
%0A%0Adef un
@@ -4032,16 +4032,17 @@
max, 5%0A%0A
+%0A
def test
|
859d5cd5ac60785f64a87353ae8f9170f5e29100
|
Make uri absolute, add get_release_data api
|
folivora/utils/pypi.py
|
folivora/utils/pypi.py
|
#-*- coding: utf-8 -*-
"""
folivora.utils.pypi
~~~~~~~~~~~~~~~~~~~
Utilities to access pypi compatible servers.
"""
import time
import xmlrpclib
def get_seconds(hours):
"""Get number of seconds since epoch from now minus `hours`"""
return int(time.time() - (60 * 60) * hours)
XML_RPC_SERVER = 'http://pypi.python.org/pypi'
class CheeseShop(object):
def __init__(self, server=XML_RPC_SERVER):
self.xmlrpc = xmlrpclib.Server(server)
def get_package_versions(self, package_name):
"""Fetch list of available versions for a package.
:param package_name: Name of the package to query.
"""
return self.xmlrpc.package_releases(package_name)
def get_package_list(self):
"""Fetch the master list of package names."""
return self.xmlrpc.list_packages()
def search(self, spec, operator):
"""Query using search spec."""
return self.xmlrpc.search(spec, operator.lower())
def get_changelog(self, hours):
"""Query the changelog.
:param hours: Hours from now to specify the changelog size.
"""
return self.xmlrpc.changelog(get_seconds(hours))
def get_updated_releases(self, hours):
"""Query all updated releases within `hours`.
:param hours: Specify the number of hours to find updated releases.
"""
return self.xmlrpc.updated_releases(get_seconds(hours))
def get_release_urls(self, package_name, version):
"""Query for all available release urls of `package_name`.
:param package_name: Name of the package.
:param version: Version of the package.
"""
return self.xmlrpc.release_urls(package_name, version)
|
Python
| 0
|
@@ -294,23 +294,23 @@
ours)%0A%0A%0A
-XML_RPC
+DEFAULT
_SERVER
@@ -339,16 +339,17 @@
org/pypi
+/
'%0A%0A%0Aclas
@@ -405,15 +405,15 @@
ver=
-XML_RPC
+DEFAULT
_SER
@@ -1726,8 +1726,453 @@
ersion)%0A
+%0A def get_release_data(self, package_name, version=None):%0A %22%22%22Query for specific release data.%0A%0A :param package_name: Name of the package.%0A :param version: Version to query the data. If %60None%60, it's latest%0A version will be used.%0A %22%22%22%0A if version is None:%0A version = self.get_package_versions(package_name)%5B-1%5D%0A return self.xmlrpc.release_data(package_name, version)%0A
|
38a974f39fdc976a9567da8582d77fa9aa00df82
|
Use correct bounds
|
foolbox/attacks/ddn.py
|
foolbox/attacks/ddn.py
|
from typing import Union, Tuple, Optional, Any
import math
import eagerpy as ep
from ..models import Model
from ..criteria import Misclassification, TargetedMisclassification
from ..distances import l2
from ..devutils import atleast_kd, flatten
from .base import MinimizationAttack
from .base import get_criterion
from .base import T
from .base import raise_if_kwargs
def normalize_gradient_l2_norms(grad: ep.Tensor) -> ep.Tensor:
norms = ep.norms.l2(flatten(grad), -1)
# remove zero gradients
grad = ep.where(
atleast_kd(norms == 0, grad.ndim), ep.normal(grad, shape=grad.shape), grad
)
# calculate norms again for previously vanishing elements
norms = ep.norms.l2(flatten(grad), -1)
norms = ep.maximum(norms, 1e-12) # avoid division by zero
factor = 1 / norms
factor = atleast_kd(factor, grad.ndim)
return grad * factor
class DDNAttack(MinimizationAttack):
"""The Decoupled Direction and Norm L2 adversarial attack from [1]_.
Args:
init_epsilon: Initial value for the norm/epsilon ball.
steps: Number of steps for the optimization.
gamma: Factor by which the norm will be modified: new_norm = norm * (1 + or - gamma).
References
----------
.. [1] Jérôme Rony, Luiz G. Hafemann, Luiz S. Oliveira, Ismail Ben Ayed,
Robert Sabourin, Eric Granger, "Decoupling Direction and Norm for
Efficient Gradient-Based L2 Adversarial Attacks and Defenses",
https://arxiv.org/abs/1811.09600
"""
distance = l2
def __init__(
self, *, init_epsilon: float = 1.0, steps: int = 10, gamma: float = 0.05,
):
self.init_epsilon = init_epsilon
self.steps = steps
self.gamma = gamma
def run(
self,
model: Model,
inputs: T,
criterion: Union[Misclassification, TargetedMisclassification, T],
*,
early_stop: Optional[float] = None,
**kwargs: Any,
) -> T:
raise_if_kwargs(kwargs)
x, restore_type = ep.astensor_(inputs)
criterion_ = get_criterion(criterion)
del inputs, criterion, kwargs
N = len(x)
if isinstance(criterion_, Misclassification):
targeted = False
classes = criterion_.labels
elif isinstance(criterion_, TargetedMisclassification):
targeted = True
classes = criterion_.target_classes
else:
raise ValueError("unsupported criterion")
if classes.shape != (N,):
name = "target_classes" if targeted else "labels"
raise ValueError(
f"expected {name} to have shape ({N},), got {classes.shape}"
)
stepsize = 1.0
def loss_fn(
inputs: ep.Tensor, labels: ep.Tensor
) -> Tuple[ep.Tensor, ep.Tensor]:
logits = model(inputs)
sign = -1.0 if targeted else 1.0
loss = sign * ep.crossentropy(logits, labels).sum()
return loss, logits
grad_and_logits = ep.value_and_grad_fn(x, loss_fn, has_aux=True)
delta = ep.zeros_like(x)
epsilon = self.init_epsilon * ep.ones(x, len(x))
worst_norm = ep.norms.l2(flatten(ep.maximum(x, 1 - x)), -1)
best_l2 = worst_norm
best_delta = delta
adv_found = ep.zeros(x, len(x)).bool()
for i in range(self.steps):
# perform cosine annealing of LR starting from 1.0 to 0.01
stepsize = (
0.01 + (stepsize - 0.01) * (1 + math.cos(math.pi * i / self.steps)) / 2
)
x_adv = x + delta
_, logits, gradients = grad_and_logits(x_adv, classes)
gradients = normalize_gradient_l2_norms(gradients)
is_adversarial = criterion_(x_adv, logits)
l2 = ep.norms.l2(flatten(delta), axis=-1)
is_smaller = l2 <= best_l2
is_both = ep.logical_and(is_adversarial, is_smaller)
adv_found = ep.logical_or(adv_found, is_adversarial)
best_l2 = ep.where(is_both, l2, best_l2)
best_delta = ep.where(atleast_kd(is_both, x.ndim), delta, best_delta)
# do step
delta = delta + stepsize * gradients
epsilon = epsilon * ep.where(
is_adversarial, 1.0 - self.gamma, 1.0 + self.gamma
)
epsilon = ep.minimum(epsilon, worst_norm)
# project to epsilon ball
delta *= atleast_kd(epsilon / ep.norms.l2(flatten(delta), -1), x.ndim)
# clip to valid bounds
delta = ep.clip(x + delta, *model.bounds) - x
x_adv = x + best_delta
return restore_type(x_adv)
|
Python
| 0.000093
|
@@ -2729,16 +2729,50 @@
ze = 1.0
+%0A min_, max_ = model.bounds
%0A%0A
@@ -3274,19 +3274,29 @@
aximum(x
-, 1
+ - min_, max_
- x)),
|
30b991e78158f8dee25a34565493b1ca582d51c5
|
Simplify attribute check (menu items)
|
cmsplugin_zinnia/cms_toolbar.py
|
cmsplugin_zinnia/cms_toolbar.py
|
"""Toolbar extensions for CMS"""
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from cms.toolbar_base import CMSToolbar
from cms.toolbar_pool import toolbar_pool
class ZinniaToolbar(CMSToolbar):
def populate(self):
user = self.request.user
zinnia_menu = self.toolbar.get_or_create_menu(
'zinnia-menu', _('Zinnia'))
url = reverse('admin:zinnia_entry_add')
zinnia_menu.add_sideframe_item(
_('New entry'), url=url,
disabled=not user.has_perm('zinnia.add_entry'))
url = reverse('admin:zinnia_category_add')
zinnia_menu.add_sideframe_item(
_('New category'), url=url,
disabled=not user.has_perm('zinnia.add_category'))
zinnia_menu.add_break()
url = reverse('admin:zinnia_entry_changelist')
zinnia_menu.add_sideframe_item(
_('Entries list'), url=url,
disabled=not user.has_perm('zinnia.change_entry'))
url = reverse('admin:zinnia_category_changelist')
zinnia_menu.add_sideframe_item(
_('Categories list'), url=url,
disabled=not user.has_perm('zinnia.change_category'))
url = reverse('admin:tagging_tag_changelist')
zinnia_menu.add_sideframe_item(
_('Tags list'), url=url,
disabled=not user.has_perm('tagging.change_tag'))
# remove complete menu if all items are disabled
for item in zinnia_menu.get_items():
if hasattr(item, 'disabled') and not item.disabled:
return
self.toolbar.remove_item(zinnia_menu)
toolbar_pool.register(ZinniaToolbar)
|
Python
| 0
|
@@ -1531,19 +1531,23 @@
if
-has
+not get
attr(ite
@@ -1563,31 +1563,15 @@
led'
-) and not item.disabled
+, True)
:%0A
|
b3f926e013e81bb88e6634d453b31c5c30aac997
|
Add constant to distance scoring functions
|
cocoscore/ml/distance_scores.py
|
cocoscore/ml/distance_scores.py
|
from math import exp
def _distance_scorer(data_df, score_function):
distance_column = 'distance'
if distance_column not in data_df.columns:
raise ValueError(f'The given data_df does not have a {distance_column} column.')
distances = data_df.loc[:, distance_column]
return distances.apply(score_function)
def reciprocal_distance(data_df, *_):
"""
Computes reciprocal distance scores for a given DataFrame of co-mentions.
The reciprocal distance score is defined as 1/x where x is the the distance of the closest matches of an
entity pair of interest.
:param data_df: pandas DataFrame, the data set loaded using
tools.data_tools.load_data_frame(..., match_distance=True)
:returns a pandas Series of distance scores
"""
return polynomial_decay_distance(data_df, 1)
def constant_distance(data_df, *_):
"""
Returns a constant distance score of 1 for a given DataFrame of co-mentions.
:param data_df: pandas DataFrame, the data set loaded using
tools.data_tools.load_data_frame(..., match_distance=True)
:returns a pandas Series of distance scores
"""
return _distance_scorer(data_df, score_function=lambda x: 1.0)
def exponential_decay_distance(data_df, k):
"""
Computes exponentially decaying distance scores for a given DataFrame of co-mentions.
The exponentially decaying distance score is defined as exp(-k*x) where
x is the the distance of the closest matches of an
entity pair of interest and k is a positive constant.
:param data_df: pandas DataFrame, the data set loaded using
tools.data_tools.load_data_frame(..., match_distance=True)
:param k: float, a positive constant
:returns a pandas Series of distance scores
"""
return _distance_scorer(data_df, lambda x: exp(-k * x))
def polynomial_decay_distance(data_df, k):
"""
Computes polynomially decaying distance scores for a given DataFrame of co-mentions.
The polynomially decaying distance score is defined as x^(-k) where
x is the the distance of the closest matches of an
entity pair of interest and k is a positive constant.
:param data_df: pandas DataFrame, the data set loaded using
tools.data_tools.load_data_frame(..., match_distance=True)
:param k: float, a positive constant
:returns a pandas Series of distance scores
"""
return _distance_scorer(data_df, lambda x: x ** (-k))
|
Python
| 0.000348
|
@@ -819,16 +819,19 @@
ta_df, 1
+, 0
)%0A%0A%0Adef
@@ -1237,32 +1237,35 @@
tance(data_df, k
+, c
):%0A %22%22%22%0A C
@@ -1419,16 +1419,20 @@
xp(-k*x)
+ + c
where%0A
@@ -1704,32 +1704,73 @@
sitive constant%0A
+ :param c: float, a positive constant%0A
:returns a p
@@ -1867,16 +1867,20 @@
(-k * x)
+ + c
)%0A%0A%0Adef
@@ -1915,16 +1915,19 @@
ta_df, k
+, c
):%0A %22
@@ -2084,16 +2084,20 @@
s x%5E(-k)
+ + c
where%0A
@@ -2369,32 +2369,73 @@
sitive constant%0A
+ :param c: float, a positive constant%0A
:returns a p
@@ -2530,10 +2530,14 @@
** (-k)
+ + c
)%0A
|
a23a9c4e5cd06ff6239a24e55ca7c4c598d02b27
|
Fix broken Glance cleanup context
|
rally/benchmark/context/cleaner.py
|
rally/benchmark/context/cleaner.py
|
# Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import sys
from rally.benchmark.context import base
from rally.benchmark import utils
from rally.openstack.common.gettextutils import _
from rally.openstack.common import log as logging
from rally import osclients
from rally import utils as rutils
LOG = logging.getLogger(__name__)
class ResourceCleaner(base.Context):
"""Context class for resource cleanup (both admin and non-admin)."""
__ctx_name__ = "cleanup"
__ctx_order__ = 200
__ctx_hidden__ = True
CONFIG_SCHEMA = {
"type": "array",
"$schema": rutils.JSON_SCHEMA,
"items": {
"type": "string",
"enum": ["nova", "glance", "cinder"]
},
"uniqueItems": True
}
def __init__(self, context):
super(ResourceCleaner, self).__init__(context)
self.admin = []
self.users = []
@rutils.log_task_wrapper(LOG.info, _("Cleanup users resources."))
def _cleanup_users_resources(self):
for user in self.users:
clients = osclients.Clients(user)
cleanup_methods = {
"nova": functools.partial(utils.delete_nova_resources,
clients.nova()),
"glance": functools.partial(utils.delete_glance_resources,
clients.glance(),
clients.keystone()),
"cinder": functools.partial(utils.delete_cinder_resources,
clients.cinder())
}
for service in self.config:
try:
cleanup_methods[service]()
except Exception as e:
LOG.debug(_("Not all resources were cleaned."),
exc_info=sys.exc_info())
LOG.warning(_('Unable to fully cleanup the cloud: %s') %
(e.message))
@rutils.log_task_wrapper(LOG.info, _("Cleanup admin resources."))
def _cleanup_admin_resources(self):
try:
admin = osclients.Clients(self.admin)
utils.delete_keystone_resources(admin.keystone())
except Exception as e:
LOG.debug(_("Not all resources were cleaned."),
exc_info=sys.exc_info())
LOG.warning(_('Unable to fully cleanup keystone service: %s') %
(e.message))
@rutils.log_task_wrapper(LOG.info, _("Enter context: `cleanup`"))
def setup(self):
if "admin" in self.context and self.context["admin"]:
self.admin = self.context["admin"]["endpoint"]
if "users" in self.context and self.context["users"]:
self.users = [u["endpoint"] for u in self.context["users"]]
@rutils.log_task_wrapper(LOG.info, _("Exit context: `cleanup`"))
def cleanup(self):
if self.users and self.config:
self._cleanup_users_resources()
if self.admin:
self._cleanup_admin_resources()
def cleanup(services):
"""Decorates scenario methods requiring a cleanup of resources.
If a scenario method is not decorated by @cleanup all the resources
(nova, glance and cinder) will be cleaned.
:param services: list of services which will be cleaned.
"""
def wrap(func):
func.cleanup_services = services
return func
return wrap
|
Python
| 0.000064
|
@@ -2034,16 +2034,26 @@
ystone()
+.tenant_id
),%0A
|
e9b0bc34d27bfcf1175ad686450fc8a1af43e8f2
|
Remove now unneeded imports. Reformat.
|
abusehelper/core/runtime.py
|
abusehelper/core/runtime.py
|
import os
import idiokit
from idiokit import timer
from idiokit.xmpp import jid
from abusehelper.core import serialize, config
def iter_runtimes(obj):
for obj in config.flatten(obj):
runtime = getattr(obj, "__runtime__", None)
if callable(runtime):
yield runtime()
continue
# Backwards compatibility
runtime_method = getattr(obj, "runtime", None)
if callable(runtime_method):
for obj in config.flatten(runtime_method()):
yield obj
continue
class Pipeable(object):
def _collect(self):
return self
def __or__(self, other):
if not isinstance(other, Pipeable):
raise TypeError("%r is not pipeable" % other)
return Pipe(self, other)
class PipeError(Exception):
pass
class Pipe(Pipeable):
def __init__(self, *pieces):
self.pieces = pieces
def _collect(self, pieces=None):
for piece in self.pieces:
yield piece._collect()
def __iter__(self):
prev = None
for piece in config.flatten(self._collect()):
if prev is not None:
if isinstance(prev, Room) and isinstance(piece, Session):
piece = piece.updated(src_room=prev.name)
elif isinstance(prev, Session) and isinstance(piece, Room):
yield prev.updated(dst_room=piece.name)
elif isinstance(prev, Room) and isinstance(piece, Room):
yield Session("roomgraph",
src_room=prev.name,
dst_room=piece.name)
elif isinstance(piece, Session):
raise PipeError("a Session instance has to be piped "+
"directly after a Room instance")
prev = piece
if isinstance(prev, Session):
yield prev
class SessionError(Exception):
pass
class Session(Pipeable):
@property
def conf(self):
return dict(self._conf)
def __init__(self, service, *path, **conf):
self.__dict__["service"] = service
self.__dict__["path"] = tuple(path)
for key, value in conf.items():
try:
value = serialize.load(serialize.dump(value))
except serialize.UnregisteredType:
raise SessionError("can not serialize key %r value %r" % (key, value))
conf[key] = value
self.__dict__["_conf"] = config.HashableFrozenDict(conf)
def updated(self, **conf):
new_conf = dict(self._conf)
new_conf.update(conf)
return Session(self.service, *self.path, **new_conf)
def __setitem__(self, key, value):
raise AttributeError("%r instances are immutable" % self.__class__)
def __delitem__(self, key):
raise AttributeError("%r instances are immutable" % self.__class__)
def __hash__(self):
return hash(self.service) ^ hash(self.path) ^ hash(self._conf)
def __eq__(self, other):
if not isinstance(other, Session):
return NotImplemented
if self.service != other.service:
return False
if self.path != self.path:
return False
return self._conf == other._conf
def __ne__(self, other):
result = self.__eq__(other)
return result if result is NotImplemented else not result
def __runtime__(self):
return self
class Room(Pipeable):
def __init__(self, name):
name = unicode(name)
try:
jid.nodeprep(name)
except jid.JIDError:
jid.JID(name)
self.name = name
from abusehelper.core import bot, services, log
class Cancel(Exception):
pass
class RuntimeBot(bot.XMPPBot):
service_room = bot.Param()
@idiokit.stream
def configs(self):
yield idiokit.consume()
@idiokit.stream
def _catch(self, errors):
try:
yield idiokit.consume()
except:
errors.throw()
raise
@idiokit.stream
def _handle_updates(self, lobby, errors):
sessions = dict()
try:
while True:
configs = yield idiokit.next()
added = set(iter_runtimes(config.flatten(configs)))
for key in set(sessions) - added:
stream = sessions.pop(key)
stream.throw(Cancel())
for session in added - set(sessions):
sessions[session] = self.session(lobby, session) | self._catch(errors)
finally:
for stream in sessions.values():
stream.throw(Cancel())
@idiokit.stream
def main(self):
xmpp = yield self.xmpp_connect()
self.log.info("Joining lobby %r", self.service_room)
lobby = yield services.join_lobby(xmpp, self.service_room, self.bot_name)
self.log.addHandler(log.RoomHandler(lobby.room))
errors = idiokit.consume()
yield errors | self.configs() | self._handle_updates(lobby, errors) | lobby
@idiokit.stream
def session(self, lobby, session):
name = session.service
if session.path:
name += "(" + ".".join(session.path) + ")"
while True:
self.log.info("Waiting for %r", name)
try:
stream = yield lobby.session(session.service,
*session.path,
**session.conf)
except Cancel:
self.log.info("Stopped waiting for %r", name)
break
conf_str = ", ".join("%s=%r" % (key.encode("unicode-escape"), value)
for (key, value) in session.conf.items())
self.log.info("Sent %r conf: %s", name, conf_str)
try:
yield stream
except services.Stop:
self.log.info("Lost connection to %r", name)
except Cancel:
self.log.info("Ended connection to %r", name)
break
def run(self):
try:
return bot.XMPPBot.run(self)
except idiokit.Signal:
pass
class DefaultRuntimeBot(RuntimeBot):
config = bot.Param("configuration module")
@idiokit.stream
def configs(self):
follow = config.follow_config(self.config)
while True:
ok, obj = yield follow.next()
if not ok:
self.log.error(obj)
continue
yield idiokit.send(set(obj))
if __name__ == "__main__":
DefaultRuntimeBot.from_command_line().execute()
|
Python
| 0
|
@@ -1,14 +1,4 @@
-import os%0A
impo
@@ -12,34 +12,8 @@
kit%0A
-from idiokit import timer%0A
from
@@ -83,16 +83,37 @@
, config
+, bot, services, log%0A
%0A%0Adef it
@@ -529,16 +529,17 @@
ntinue%0A%0A
+%0A
class Pi
@@ -764,16 +764,17 @@
other)%0A%0A
+%0A
class Pi
@@ -799,24 +799,25 @@
:%0A pass%0A%0A
+%0A
class Pipe(P
@@ -1751,16 +1751,17 @@
piped %22
+
+%0A
@@ -1912,16 +1912,17 @@
d prev%0A%0A
+%0A
class Se
@@ -1950,24 +1950,25 @@
:%0A pass%0A%0A
+%0A
class Sessio
@@ -3467,16 +3467,17 @@
n self%0A%0A
+%0A
class Ro
@@ -3678,56 +3678,8 @@
me%0A%0A
-from abusehelper.core import bot, services, log%0A
%0Acla
@@ -3706,24 +3706,25 @@
:%0A pass%0A%0A
+%0A
class Runtim
@@ -6197,16 +6197,17 @@
pass%0A%0A
+%0A
class De
|
4b8e76747ee164ebce0cd92b0d3752e848ce0734
|
Fix bug with filter_target (#35141)
|
lib/ansible/modules/network/aci/aci_config_snapshot.py
|
lib/ansible/modules/network/aci/aci_config_snapshot.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_config_snapshot
short_description: Manage Config Snapshots on Cisco ACI fabrics (config:Snapshot, config:ExportP)
description:
- Manage Config Snapshots on Cisco ACI fabrics.
- Creating new Snapshots is done using the configExportP class.
- Removing Snapshots is done using the configSnapshot class.
- More information from the internal APIC classes I(config:Snapshot) and I(config:ExportP) at
U(https://developer.cisco.com/docs/apic-mim-ref/).
author:
- Jacob McGill (@jmcgill298)
version_added: '2.4'
notes:
- The APIC does not provide a mechanism for naming the snapshots.
- 'Snapshot files use the following naming structure: ce_<config export policy name>-<yyyy>-<mm>-<dd>T<hh>:<mm>:<ss>.<mss>+<hh>:<mm>.'
- 'Snapshot objects use the following naming structure: run-<yyyy>-<mm>-<dd>T<hh>-<mm>-<ss>.'
options:
description:
description:
- The description for the Config Export Policy.
aliases: [ descr ]
export_policy:
description:
- The name of the Export Policy to use for Config Snapshots.
aliases: [ name ]
format:
description:
- Sets the config backup to be formatted in JSON or XML.
- The APIC defaults new Export Policies to C(json)
choices: [ json, xml ]
default: json
include_secure:
description:
- Determines if secure information should be included in the backup.
- The APIC defaults new Export Policies to C(yes).
choices: [ 'no', 'yes' ]
default: 'yes'
max_count:
description:
- Determines how many snapshots can exist for the Export Policy before the APIC starts to rollover.
- The APIC defaults new Export Policies to C(3).
choices: [ range between 1 and 10 ]
default: 3
snapshot:
description:
- The name of the snapshot to delete.
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- name: Create a Snapshot
aci_config_snapshot:
hostname: apic
username: admin
password: SomeSecretPassword
state: present
export_policy: config_backup
max_count: 10
description: Backups taken before new configs are applied.
- name: Query all Snapshots
aci_config_snapshot:
hostname: apic
username: admin
password: SomeSecretPassword
state: query
- name: Query Snapshots associated with a particular Export Policy
aci_config_snapshot:
hostname: apic
username: admin
password: SomeSecretPassword
state: query
export_policy: config_backup
- name: Delete a Snapshot
aci_config_snapshot:
hostname: apic
username: admin
password: SomeSecretPassword
state: absent
export_policy: config_backup
snapshot: run-2017-08-24T17-20-05
'''
RETURN = r''' # '''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
description=dict(type='str', aliases=['descr']),
export_policy=dict(type='str', aliases=['name']),
format=dict(type='str', choices=['json', 'xml']),
include_secure=dict(type='str', choices=['no', 'yes']),
max_count=dict(type='int'),
snapshot=dict(type='str'),
state=dict(type='str', choices=['absent', 'present', 'query'], default='present'),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=False,
required_if=[
['state', 'absent', ['export_policy', 'snapshot']],
['state', 'present', ['export_policy']],
],
)
description = module.params['description']
export_policy = module.params['export_policy']
file_format = module.params['format']
include_secure = module.params['include_secure']
max_count = module.params['max_count']
if max_count is not None:
if max_count in range(1, 11):
max_count = str(max_count)
else:
module.fail_json(msg='The "max_count" must be a number between 1 and 10')
snapshot = module.params['snapshot']
if snapshot is not None and not snapshot.startswith('run-'):
snapshot = 'run-' + snapshot
state = module.params['state']
aci = ACIModule(module)
if state == 'present':
aci.construct_url(
root_class=dict(
aci_class='configExportP',
aci_rn='fabric/configexp-{0}'.format(export_policy),
filter_target='eq(configExportP.name, "{0}")'.format(export_policy),
module_object=export_policy,
),
)
aci.get_existing()
# Filter out module params with null values
aci.payload(
aci_class='configExportP',
class_config=dict(
adminSt='triggered',
descr=description,
format=file_format,
includeSecureFields=include_secure,
maxSnapshotCount=max_count,
name=export_policy,
snapshot='yes',
),
)
aci.get_diff('configExportP')
# Create a new Snapshot
aci.post_config()
else:
# Prefix the proper url to export_policy
if export_policy is not None:
export_policy = 'uni/fabric/configexp-{0}'.format(export_policy)
aci.construct_url(
root_class=dict(
aci_class='configSnapshotCont',
aci_rn='backupst/snapshots-[{0}]'.format(export_policy),
filter_target='(configSnapshotCont.name, "{0}")'.format(export_policy),
module_object=export_policy,
),
subclass_1=dict(
aci_class='configSnapshot',
aci_rn='snapshot-{0}'.format(snapshot),
filter_target='(configSnapshot.name, "{0}")'.format(snapshot),
module_object=snapshot,
),
)
aci.get_existing()
if state == 'absent':
# Build POST request to used to remove Snapshot
aci.payload(
aci_class='configSnapshot',
class_config=dict(
name=snapshot,
retire="yes",
),
)
if aci.result['existing']:
aci.get_diff('configSnapshot')
# Mark Snapshot for Deletion
aci.post_config()
module.exit_json(**aci.result)
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -6354,32 +6354,34 @@
filter_target='
+eq
(configSnapshot.
|
a668300c2e038b40b2ea6bbc51cb47598f4a5688
|
Use AwesomeVersion for account link service check (#55449)
|
homeassistant/components/cloud/account_link.py
|
homeassistant/components/cloud/account_link.py
|
"""Account linking via the cloud."""
import asyncio
import logging
from typing import Any
import aiohttp
from hass_nabucasa import account_link
from homeassistant.const import MAJOR_VERSION, MINOR_VERSION, PATCH_VERSION
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_entry_oauth2_flow, event
from .const import DOMAIN
DATA_SERVICES = "cloud_account_link_services"
CACHE_TIMEOUT = 3600
_LOGGER = logging.getLogger(__name__)
@callback
def async_setup(hass: HomeAssistant):
"""Set up cloud account link."""
config_entry_oauth2_flow.async_add_implementation_provider(
hass, DOMAIN, async_provide_implementation
)
async def async_provide_implementation(hass: HomeAssistant, domain: str):
"""Provide an implementation for a domain."""
services = await _get_services(hass)
for service in services:
if service["service"] == domain and _is_older(service["min_version"]):
return CloudOAuth2Implementation(hass, domain)
return
@callback
def _is_older(version: str) -> bool:
"""Test if a version is older than the current HA version."""
version_parts = version.split(".")
if len(version_parts) != 3:
return False
try:
version_parts = [int(val) for val in version_parts]
except ValueError:
return False
patch_number_str = ""
for char in PATCH_VERSION:
if char.isnumeric():
patch_number_str += char
else:
break
try:
patch_number = int(patch_number_str)
except ValueError:
patch_number = 0
cur_version_parts = [MAJOR_VERSION, MINOR_VERSION, patch_number]
return version_parts <= cur_version_parts
async def _get_services(hass):
"""Get the available services."""
services = hass.data.get(DATA_SERVICES)
if services is not None:
return services
try:
services = await account_link.async_fetch_available_services(hass.data[DOMAIN])
except (aiohttp.ClientError, asyncio.TimeoutError):
return []
hass.data[DATA_SERVICES] = services
@callback
def clear_services(_now):
"""Clear services cache."""
hass.data.pop(DATA_SERVICES, None)
event.async_call_later(hass, CACHE_TIMEOUT, clear_services)
return services
class CloudOAuth2Implementation(config_entry_oauth2_flow.AbstractOAuth2Implementation):
"""Cloud implementation of the OAuth2 flow."""
def __init__(self, hass: HomeAssistant, service: str) -> None:
"""Initialize cloud OAuth2 implementation."""
self.hass = hass
self.service = service
@property
def name(self) -> str:
"""Name of the implementation."""
return "Home Assistant Cloud"
@property
def domain(self) -> str:
"""Domain that is providing the implementation."""
return DOMAIN
async def async_generate_authorize_url(self, flow_id: str) -> str:
"""Generate a url for the user to authorize."""
helper = account_link.AuthorizeAccountHelper(
self.hass.data[DOMAIN], self.service
)
authorize_url = await helper.async_get_authorize_url()
async def await_tokens():
"""Wait for tokens and pass them on when received."""
try:
tokens = await helper.async_get_tokens()
except asyncio.TimeoutError:
_LOGGER.info("Timeout fetching tokens for flow %s", flow_id)
except account_link.AccountLinkException as err:
_LOGGER.info(
"Failed to fetch tokens for flow %s: %s", flow_id, err.code
)
else:
await self.hass.config_entries.flow.async_configure(
flow_id=flow_id, user_input=tokens
)
self.hass.async_create_task(await_tokens())
return authorize_url
async def async_resolve_external_data(self, external_data: Any) -> dict:
"""Resolve external data to tokens."""
# We already passed in tokens
return external_data
async def _async_refresh_token(self, token: dict) -> dict:
"""Refresh a token."""
new_token = await account_link.async_fetch_access_token(
self.hass.data[DOMAIN], self.service, token["refresh_token"]
)
return {**token, **new_token}
|
Python
| 0
|
@@ -99,16 +99,58 @@
aiohttp%0A
+from awesomeversion import AwesomeVersion%0A
from has
@@ -217,43 +217,25 @@
ort
-MAJOR_VERSION, MINOR_VERSION, PATCH
+__version__ as HA
_VER
@@ -494,16 +494,62 @@
ame__)%0A%0A
+CURRENT_VERSION = AwesomeVersion(HA_VERSION)%0A%0A
%0A@callba
@@ -988,26 +988,35 @@
ain and
-_is_older(
+CURRENT_VERSION %3E=
service%5B
@@ -1029,17 +1029,16 @@
ersion%22%5D
-)
:%0A
@@ -1108,707 +1108,8 @@
n%0A%0A%0A
-@callback%0Adef _is_older(version: str) -%3E bool:%0A %22%22%22Test if a version is older than the current HA version.%22%22%22%0A version_parts = version.split(%22.%22)%0A%0A if len(version_parts) != 3:%0A return False%0A%0A try:%0A version_parts = %5Bint(val) for val in version_parts%5D%0A except ValueError:%0A return False%0A%0A patch_number_str = %22%22%0A%0A for char in PATCH_VERSION:%0A if char.isnumeric():%0A patch_number_str += char%0A else:%0A break%0A%0A try:%0A patch_number = int(patch_number_str)%0A except ValueError:%0A patch_number = 0%0A%0A cur_version_parts = %5BMAJOR_VERSION, MINOR_VERSION, patch_number%5D%0A%0A return version_parts %3C= cur_version_parts%0A%0A%0A
asyn
|
33dd1a78a5bfdf0eca593816b15b34b86860c36f
|
install pip to bypass rally installation problem
|
lab/runners/RunnerRally.py
|
lab/runners/RunnerRally.py
|
from lab.runners import Runner
class RunnerRally(Runner):
def sample_config(self):
return {'cloud': 'cloud name', 'task-yaml': 'path to the valid task yaml file'}
def __init__(self, config):
from lab.WithConfig import read_config_from_file
super(RunnerRally, self).__init__(config=config)
self.cloud_name = config['cloud']
self.task_yaml_path = config['task-yaml']
self.task_body = read_config_from_file(yaml_path=self.task_yaml_path, is_as_string=True)
def execute(self, clouds, servers):
cloud = clouds[0]
server = servers[0]
open_rc_path = '{0}.openrc'.format(self.cloud_name)
results_path = 'rally-results.html'
task_path = 'rally-task.yaml'
venv_path = '~/venv_rally'
open_rc_body = cloud.create_open_rc()
server.create_user(new_username='rally')
server.put(string_to_put=open_rc_body, file_name=open_rc_path)
server.put(string_to_put=self.task_body, file_name=task_path)
repo_dir = server.clone_repo(repo_url='https://git.openstack.org/openstack/rally.git')
server.check_or_install_packages(package_names='libffi-devel gmp-devel postgresql-devel wget python-virtualenv')
server.run(command='./install_rally.sh -y -d {0}'.format(venv_path), in_directory=repo_dir)
server.run(command='source {0} && {1}/bin/rally deployment create --fromenv --name {2}'.format(open_rc_path, venv_path, self.cloud_name))
server.run(command='{0}/bin/rally task start {1}'.format(venv_path, task_path))
server.run(command='{0}/bin/rally task report --out {1}'.format(venv_path, results_path))
server.get(remote_path=results_path, local_path=results_path)
self.get_artefacts(server=server)
|
Python
| 0
|
@@ -1236,16 +1236,68 @@
alenv')%0A
+ server.run(command='sudo easy_install pip')%0A
|
2c5d3387f23eaff6a689aad46b7b117f3a54bed1
|
Fix wake_on_lan ping for Linux. (#6480)
|
homeassistant/components/switch/wake_on_lan.py
|
homeassistant/components/switch/wake_on_lan.py
|
"""
Support for wake on lan.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.wake_on_lan/
"""
import logging
import platform
import subprocess as sp
import voluptuous as vol
from homeassistant.components.switch import (SwitchDevice, PLATFORM_SCHEMA)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.script import Script
from homeassistant.const import (CONF_HOST, CONF_NAME)
REQUIREMENTS = ['wakeonlan==0.2.2']
_LOGGER = logging.getLogger(__name__)
CONF_MAC_ADDRESS = 'mac_address'
CONF_OFF_ACTION = 'turn_off'
DEFAULT_NAME = 'Wake on LAN'
DEFAULT_PING_TIMEOUT = 1
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_MAC_ADDRESS): cv.string,
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OFF_ACTION): cv.SCRIPT_SCHEMA,
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up a wake on lan switch."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
mac_address = config.get(CONF_MAC_ADDRESS)
off_action = config.get(CONF_OFF_ACTION)
add_devices([WOLSwitch(hass, name, host, mac_address, off_action)])
class WOLSwitch(SwitchDevice):
"""Representation of a wake on lan switch."""
def __init__(self, hass, name, host, mac_address, off_action):
"""Initialize the WOL switch."""
from wakeonlan import wol
self._hass = hass
self._name = name
self._host = host
self._mac_address = mac_address
self._off_script = Script(hass, off_action) if off_action else None
self._state = False
self._wol = wol
self.update()
@property
def should_poll(self):
"""Poll for status regularly."""
return True
@property
def is_on(self):
"""Return true if switch is on."""
return self._state
@property
def name(self):
"""The name of the switch."""
return self._name
def turn_on(self):
"""Turn the device on."""
self._wol.send_magic_packet(self._mac_address)
def turn_off(self):
"""Turn the device off if an off action is present."""
if self._off_script is not None:
self._off_script.run()
def update(self):
"""Check if device is on and update the state."""
if platform.system().lower() == 'windows':
ping_cmd = 'ping -n 1 -w {} {}'.format(
DEFAULT_PING_TIMEOUT * 1000, self._host)
else:
ping_cmd = 'ping -c 1 -W {} {}'.format(
DEFAULT_PING_TIMEOUT, self._host)
status = sp.call(ping_cmd, stdout=sp.DEVNULL)
self._state = not bool(status)
|
Python
| 0
|
@@ -2494,37 +2494,39 @@
d =
+%5B
'ping
- -n 1 -w %7B%7D %7B%7D'.format(%0A
+', '-n', '1', '-w',%0A
@@ -2529,32 +2529,39 @@
+ str(
DEFAULT_PING_TIM
@@ -2571,16 +2571,17 @@
T * 1000
+)
, self._
@@ -2584,17 +2584,17 @@
lf._host
-)
+%5D
%0A
@@ -2627,37 +2627,39 @@
d =
+%5B
'ping
- -c 1 -W %7B%7D %7B%7D'.format(%0A
+', '-c', '1', '-W',%0A
@@ -2670,16 +2670,23 @@
+ str(
DEFAULT_
@@ -2697,16 +2697,17 @@
_TIMEOUT
+)
, self._
@@ -2710,17 +2710,17 @@
lf._host
-)
+%5D
%0A%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.