commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
ef46bd346772156b6efd6e11106164cfbedc8cba
|
Add license text.
|
testfixtures/tests/test_roundcomparison.py
|
testfixtures/tests/test_roundcomparison.py
|
from decimal import Decimal
from testfixtures import RoundComparison as R, compare, ShouldRaise
from unittest import TestCase
class Tests(TestCase):
def test_equal_yes_rhs(self):
self.assertTrue(0.123457 == R(0.123456, 5))
def test_equal_yes_lhs(self):
self.assertTrue(R(0.123456, 5) == 0.123457)
def test_equal_no_rhs(self):
self.assertFalse(0.123453 == R(0.123456, 5))
def test_equal_no_lhs(self):
self.assertFalse(R(0.123456, 5) == 0.123453)
def test_not_equal_yes_rhs(self):
self.assertFalse(0.123457 != R(0.123456, 5))
def test_not_equal_yes_lhs(self):
self.assertFalse(R(0.123456, 5) != 0.123457)
def test_not_equal_no_rhs(self):
self.assertTrue(0.123453 != R(0.123456, 5))
def test_not_equal_no_lhs(self):
self.assertTrue(R(0.123456, 5) != 0.123453)
def test_comp_in_sequence_rhs(self):
self.assertTrue((1, 2, 0.123457) == (1, 2, R(0.123456, 5)))
def test_comp_in_sequence_lhs(self):
self.assertTrue((1, 2, R(0.123456, 5)) == (1, 2, 0.123457))
def test_not_numeric_rhs(self):
with ShouldRaise(TypeError):
'abc' == R(0.123456, 5)
def test_not_numeric_lhs(self):
with ShouldRaise(TypeError):
R(0.123456, 5) == 'abc'
def test_repr(self):
compare('<R:0.12346>',
repr(R(0.123456, 5)))
def test_str(self):
compare('<R:0.12346>',
repr(R(0.123456, 5)))
def test_equal_yes_decimal_to_float_rhs(self):
self.assertTrue(Decimal("0.123457") == R(0.123456, 5))
def test_equal_yes_decimal_to_float_lhs(self):
self.assertTrue(R(0.123456, 5) == Decimal("0.123457"))
def test_equal_no_decimal_to_float_rhs(self):
self.assertFalse(Decimal("0.123453") == R(0.123456, 5))
def test_equal_no_decimal_to_float_lhs(self):
self.assertFalse(R(0.123456, 5) == Decimal("0.123453"))
def test_equal_yes_float_to_decimal_rhs(self):
self.assertTrue(0.123457 == R(Decimal("0.123456"), 5))
def test_equal_yes_float_to_decimal_lhs(self):
self.assertTrue(R(Decimal("0.123456"), 5) == 0.123457)
def test_equal_no_float_to_decimal_rhs(self):
self.assertFalse(0.123453 == R(Decimal("0.123456"), 5))
def test_equal_no_float_to_decimal_lhs(self):
self.assertFalse(R(Decimal("0.123456"), 5) == 0.123453)
def test_equal_yes_integer_other_rhs(self):
self.assertTrue(1 == R(1.000001, 5))
def test_equal_yes_integer_lhs(self):
self.assertTrue(R(1.000001, 5) == 1)
def test_equal_no_integer_rhs(self):
self.assertFalse(1 == R(1.000009, 5))
def test_equal_no_integer_lhs(self):
self.assertFalse(R(1.000009, 5) == 1)
def test_equal_integer_zero_precision(self):
self.assertTrue(1 == R(1.000001, 0))
def test_equal_yes_negative_precision(self):
self.assertTrue(149.123 == R(101.123, -2))
def test_equal_no_negative_precision(self):
self.assertFalse(149.123 == R(150.001, -2))
|
Python
| 0
|
@@ -1,12 +1,88 @@
+# Copyright (c) 2014 Simplistix Ltd%0A# See license.txt for license details.%0A%0A
from decimal
@@ -196,16 +196,47 @@
stCase%0A%0A
+from ..compat import PY2, PY3%0A%0A
class Te
|
6b21430fb60848626d1659c2a19086acb4482de9
|
support for reading retrofitted models
|
openquake/parser/vulnerability.py
|
openquake/parser/vulnerability.py
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010-2011, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version 3
# only, as published by the Free Software Foundation.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License version 3 for more details
# (a copy is included in the LICENSE file that accompanied this code).
#
# You should have received a copy of the GNU Lesser General Public License
# version 3 along with OpenQuake. If not, see
# <http://www.gnu.org/licenses/lgpl-3.0.txt> for a copy of the LGPLv3 License.
"""
Codec for processing vulnerability curves
from XML files.
A DOM version of the vulnerability model parser,
that takes into account the really small size of this input file.
"""
from lxml import etree
from openquake import kvs
from openquake import shapes
from openquake import producer
from openquake import xml
from openquake.nrml import nrml_schema_file
from openquake.xml import NRML
def _parse_set_attributes(vulnerability_set):
"""Parse and return the attributes for all the
vulnerability functions defined in this set of the NRML file."""
imls = vulnerability_set.find(".//%sIML" % NRML)
vuln_function = {"IMT": imls.attrib["IMT"]}
vuln_function["IML"] = \
[float(x) for x in imls.text.strip().split()]
vuln_function["vulnerabilitySetID"] = \
vulnerability_set.attrib["vulnerabilitySetID"]
vuln_function["assetCategory"] = \
vulnerability_set.attrib["assetCategory"]
vuln_function["lossCategory"] = \
vulnerability_set.attrib["lossCategory"]
return vuln_function
class VulnerabilityModelFile(producer.FileProducer):
"""This class parsers a vulnerability model NRML file.
The class is implemented as a generator. For each vulnerability
function in the parsed instance document it yields a dictionary
with all the data defined for that function.
"""
def __init__(self, path):
producer.FileProducer.__init__(self, path)
nrml_schema = etree.XMLSchema(etree.parse(nrml_schema_file()))
self.vuln_model = etree.parse(self.path).getroot()
if not nrml_schema.validate(self.vuln_model):
raise xml.XMLValidationError(
nrml_schema.error_log.last_error, path)
model_el = self.vuln_model.getchildren()[0]
if model_el.tag != "%svulnerabilityModel" % NRML:
raise xml.XMLMismatchError(
path, 'vulnerabilityModel', str(model_el.tag)[len(NRML):])
def filter(self, region_constraint=None, attribute_constraint=None):
"""Filtering is not needed/supported for the vulnerability model."""
def _parse(self):
"""Parse the vulnerability model."""
for vuln_set in self.vuln_model.findall(
".//%sdiscreteVulnerabilitySet" % NRML):
vuln_function = _parse_set_attributes(vuln_set)
for raw_vuln_function in vuln_set.findall(
".//%sdiscreteVulnerability" % NRML):
loss_ratios = [float(x) for x in
raw_vuln_function.find(
"%slossRatio" % NRML).text.strip().split()]
coefficients_variation = [float(x) for x in
raw_vuln_function.find(
"%scoefficientsVariation" % NRML)
.text.strip().split()]
vuln_function["ID"] = \
raw_vuln_function.attrib["vulnerabilityFunctionID"]
vuln_function["probabilisticDistribution"] = \
raw_vuln_function.attrib["probabilisticDistribution"]
vuln_function["lossRatio"] = loss_ratios
vuln_function["coefficientsVariation"] = coefficients_variation
yield dict(vuln_function)
# TODO (ac): These two functions should be probably moved elsewhere
def load_vulnerability_model(job_id, path):
"""Load and store the vulnerability model defined in the
given NRML file in the underlying kvs system."""
vulnerability_model = {}
parser = VulnerabilityModelFile(path)
for vuln_curve in parser:
vuln_func = shapes.VulnerabilityFunction(vuln_curve['IML'],
vuln_curve['lossRatio'], vuln_curve['coefficientsVariation'])
vulnerability_model[vuln_curve["ID"]] = vuln_func.to_json()
kvs.set_value_json_encoded(kvs.tokens.vuln_key(job_id),
vulnerability_model)
def load_vuln_model_from_kvs(job_id):
"""Load the vulnerability model from kvs for the given job."""
vulnerability_model = kvs.get_value_json_decoded(
kvs.tokens.vuln_key(job_id))
vulnerability_curves = {}
if vulnerability_model is not None:
for k, v in vulnerability_model.items():
vulnerability_curves[k] = shapes.VulnerabilityFunction.from_json(v)
return vulnerability_curves
|
Python
| 0.000004
|
@@ -4228,24 +4228,43 @@
job_id, path
+, retrofitted=False
):%0A %22%22%22Lo
@@ -4741,16 +4741,29 @@
y(job_id
+, retrofitted
),%0A
@@ -4827,16 +4827,35 @@
s(job_id
+, retrofitted=False
):%0A %22
@@ -5009,16 +5009,29 @@
y(job_id
+, retrofitted
))%0A%0A
|
700e0889d3e38e74d2c96fc653657ca16fbb5009
|
lower its max value to 40
|
aot/cards/trumps/gauge.py
|
aot/cards/trumps/gauge.py
|
################################################################################
# Copyright (C) 2016 by Arena of Titans Contributors.
#
# This file is part of Arena of Titans.
#
# Arena of Titans is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Arena of Titans is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Arena of Titans. If not, see <http://www.gnu.org/licenses/>.
################################################################################
from aot.utils.pathfinding import a_star
class Gauge:
MAX_VALUE = 100
def __init__(self, board, value=0):
self._board = board
self._value = value
def move(self, from_, to, card=None):
if card is not None and card.is_knight:
is_knight = True
else:
is_knight = False
if from_ is not None and to is not None:
if is_knight:
self._value += 1
else:
# The list returned by a_star always contain the 1st and last square. Which means
# it over-evaluate the distance by 1.
distance = len(a_star(from_, to, self._board)) - 1
if distance > 0:
self._value += distance
if self.value > self.MAX_VALUE:
self._value = self.MAX_VALUE
def can_play_trump(self, trump):
# We are dealing with a SimpleTrump. play_trump must be called with a trump.
if hasattr(trump, 'cost'):
cost = trump.cost
else:
cost = trump.args['cost']
if self.value >= cost:
return True
else:
return False
def play_trump(self, trump):
self._value -= trump.cost
@property
def value(self):
return self._value
|
Python
| 0.999835
|
@@ -993,10 +993,9 @@
E =
-10
+4
0%0A%0A
|
826d9c3efefb2aeeaf656b592a5d4046a735520f
|
Fix console output
|
sslyze/cli/console_output.py
|
sslyze/cli/console_output.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
from sslyze.cli import CompletedServerScan
from sslyze.cli import FailedServerScan
from sslyze.cli.output_generator import OutputGenerator
from sslyze.server_connectivity import ClientAuthenticationServerConfigurationEnum
from sslyze.server_connectivity import ServerConnectivityInfo
from typing import Text
class ConsoleOutputGenerator(OutputGenerator):
TITLE_FORMAT = ' {title}\n {underline}\n'
SERVER_OK_FORMAT = ' {host}:{port:<25} => {network_route} {client_auth_msg}'
SERVER_INVALID_FORMAT = ' {server_string:<35} => WARNING: {error_msg}; discarding corresponding tasks.'
SCAN_FORMAT = 'Scan Results For {0}:{1} - {2}'
@classmethod
def _format_title(cls, title):
# type: (Text) -> Text
return cls.TITLE_FORMAT.format(title=title.upper(), underline='-' * len(title))
def command_line_parsed(self, available_plugins, args_command_list):
self._file_to.write('\n\n\n' + self._format_title('Available plugins'))
self._file_to.write('\n')
for plugin in available_plugins:
self._file_to.write(' {}\n'.format(plugin.__name__))
self._file_to.write('\n\n\n')
self._file_to.write(self._format_title('Checking host(s) availability'))
self._file_to.write('\n')
def server_connectivity_test_failed(self, failed_scan):
# type: (FailedServerScan) -> None
self._file_to.write(self.SERVER_INVALID_FORMAT.format(server_string=failed_scan.server_string,
error_msg=failed_scan.error_message))
def server_connectivity_test_succeeded(self, server_connectivity_info):
# type: (ServerConnectivityInfo) -> None
client_auth_msg = ''
client_auth_requirement = server_connectivity_info.client_auth_requirement
if client_auth_requirement == ClientAuthenticationServerConfigurationEnum.REQUIRED:
client_auth_msg = ' WARNING: Server REQUIRED client authentication, specific plugins will fail.'
elif client_auth_requirement == ClientAuthenticationServerConfigurationEnum.OPTIONAL:
client_auth_msg = ' WARNING: Server requested optional client authentication'
network_route = server_connectivity_info.ip_address
if server_connectivity_info.http_tunneling_settings:
# We do not know the server's IP address if going through a proxy
network_route = 'Proxy at {}:{}'.format(server_connectivity_info.http_tunneling_settings.hostname,
server_connectivity_info.http_tunneling_settings.port)
self._file_to.write(self.SERVER_OK_FORMAT.format(host=server_connectivity_info.hostname,
port=server_connectivity_info.port,
network_route=network_route,
client_auth_msg=client_auth_msg))
def scans_started(self):
self._file_to.write('\n\n\n\n')
def server_scan_completed(self, server_scan):
# type: (CompletedServerScan) -> None
target_result_str = ''
for plugin_result in server_scan.plugin_result_list:
# Print the result of each separate command
target_result_str += '\n'
for line in plugin_result.as_text():
target_result_str += line + '\n'
network_route = server_scan.server_info.ip_address
if server_scan.server_info.http_tunneling_settings:
# We do not know the server's IP address if going through a proxy
network_route = 'Proxy at {}:{}'.format(server_scan.server_info.http_tunneling_settings.hostname,
server_scan.server_info.http_tunneling_settings.port)
scan_txt = self.SCAN_FORMAT.format(server_scan.server_info.hostname, str(server_scan.server_info.port),
network_route)
self._file_to.write(self._format_title(scan_txt) + target_result_str + '\n\n')
def scans_completed(self, total_scan_time):
# type: (float) -> None
self._file_to.write(self._format_title('Scan Completed in {0:.2f} s'.format(total_scan_time)))
|
Python
| 0.000142
|
@@ -583,16 +583,18 @@
uth_msg%7D
+%5Cn
'%0A SE
@@ -695,16 +695,18 @@
g tasks.
+%5Cn
'%0A%0A S
|
6a8f7b3ddf6c43565efeda5d21de714808e98785
|
Add sample yaml data
|
hubblestack_nova/modules/netstat.py
|
hubblestack_nova/modules/netstat.py
|
# -*- encoding: utf-8 -*-
'''
Hubble Nova plugin for FreeBSD pkgng audit
:maintainer: HubbleStack
:maturity: 20160623
:platform: Unix
:requires: SaltStack
'''
from __future__ import absolute_import
import copy
import logging
import salt.utils
log = logging.getLogger(__name__)
def __virtual__():
if 'network.netstat' in __salt__:
return True
return False, 'No network.netstat function found'
def audit(data_list, tags, verbose=False):
'''
Run the network.netstat command
'''
ret = {'Success': [], 'Failure': []}
__tags__ = {}
for data in data_list:
if 'netstat' in data:
for check, check_args in data['netstat'].iteritems():
if 'address' in check_args:
tag_args = copy.deepcopy(check_args)
tag_args['id'] = check
__tags__[check_args['address']] = tag_args
if not __tags__:
# No yaml data found, don't do any work
return ret
for address_data in __salt__['network.netstat']():
address = address_data['local-address']
if address in __tags__:
success_data = {address: __tags__[address]}
if verbose:
success_data.update(address_data)
ret['Success'].append(success_data)
else:
failure_data = {address: {'program': address_data['program']}}
if verbose:
failure.data.update(address_data)
ret['Failure'].append(failure_data)
return ret
|
Python
| 0
|
@@ -148,16 +148,199 @@
altStack
+%0A%0ASample data for the netstat whitelist:%0A%0A.. code-block:: yaml%0A%0A netstat:%0A ssh:%0A address: 0.0.0.0:22%0A another_identifier:%0A address: 127.0.0.1:80
%0A'''%0Afro
|
1f72d0fc0fb8222ca8ffb69c164e4d118e1a9d1d
|
update version
|
meta.py
|
meta.py
|
#!/usr/bin/env python3
# @Time : 17-9-10 01:08
# @Author : Wavky Huang
# @Contact : master@wavky.com
# @File : meta.py
"""
"""
PROJECT_NAME = 'ManHourCalendar'
VERSION = '0.9.1a2'
AUTHOR = 'Wavky Huang'
AUTHOR_EMAIL = 'master@wavky.com'
|
Python
| 0
|
@@ -178,17 +178,17 @@
= '0.9.1
-a
+b
2'%0AAUTHO
|
f98a2f11768db262dcf5113375edc8fdcf7d5304
|
Fix Build Time
|
meta.py
|
meta.py
|
# TODO: Use Celery to properly manage updates, and provide dyanmic updates as everything progresses.
# TODO: Integrate with GitLab Webhooks
# TODO: Integrate with GitLab <-> Heroku
import datetime
import hashlib
import hmac
import json
import logging
import os
import threading
from flask import abort, Blueprint, jsonify, request
from upgrader import upgrade
meta = Blueprint('meta', __name__)
auto_deploy_method = os.getenv('WS_AUTO_DEPLOY')
@meta.route("/")
def statistics():
# TODO: Refactor, and if possible, have dynamic status for inprogress updates.
# TODO: Fix on Heroku
try:
last_update_timestamp = os.path.getctime("tmp/restart.txt")
except OSError:
last_update_timestamp = 0
try:
last_build_timestamp = os.path.getctime("venv")
except OSError:
last_build_timestamp = 0
return jsonify(
last_update_time=datetime.datetime.fromtimestamp(last_update_timestamp).isoformat(),
last_build_time=datetime.datetime.fromtimestamp(last_build_timestamp).isoformat()
)
def queueUpgrade(requirements_required):
threading.Thread(target=upgrade, args=(requirements_required,)).start()
if requirements_required:
return "Upgrade queued, with requirements. Please note this may take several minutes to complete. You can see when the last complete upgrade was at GET /meta."
else:
return "Upgrade queued. You can see when the last complete upgrade was at GET /meta."
def verifyGitHubHook(request):
header_signature = request.headers.get("X-Hub-Signature")
secret = os.getenv('WS_AUTO_DEPLOY_GITHUB_HOOK_SECRET')
if header_signature is None or secret is None:
logging.error("GitHub Hook Secret is not set.")
abort(403)
else:
header_signature = str(header_signature)
secret = str.encode(secret)
sha_name, signature = header_signature.split("=")
mac = hmac.new(secret, msg=request.data, digestmod=hashlib.sha1)
if not hmac.compare_digest(mac.hexdigest(), signature):
logging.error("Bad GitHub Hook Secret Signature.")
abort(403)
else:
if request.is_json:
return request.get_json()
else:
logging.error("Bad GitHub Hook Post Data.")
abort(400)
@meta.route("/github_hook", methods=["POST"])
def incomingGitHubHook():
if auto_deploy_method != "GITHUB_HOOK":
logging.error("GitHub Hook is not set as the automatic deployment method.")
abort(403)
if request.headers.get("X-GitHub-Event") == "ping":
logging.debug("GitHub Hook Ping Event received. Ponging...")
return "pong"
elif request.headers.get("X-GitHub-Event") != "push":
logging.error("Bad GitHub Hook Event received.")
abort(501)
payload = verifyGitHubHook(request)
try:
commits = payload['commits']
requirements_required = False
for commit in commits:
if "requirements.txt" in commit['modified']:
requirements_required = True
break
except KeyError:
logging.error("Bad GitHub Hook Post Data.")
abort(400)
logging.debug("Queueing upgrade...")
return queueUpgrade(requirements_required)
|
Python
| 0
|
@@ -603,38 +603,37 @@
y:%0A last_
-update
+build
_timestamp = os.
@@ -651,23 +651,12 @@
me(%22
-tmp/restart.txt
+venv
%22)%0A
@@ -683,30 +683,29 @@
last_
-update
+build
_timestamp =
@@ -722,37 +722,38 @@
y:%0A last_
-build
+update
_timestamp = os.
@@ -759,36 +759,47 @@
.path.getctime(%22
-venv
+tmp/restart.txt
%22)%0A except OS
@@ -804,32 +804,56 @@
OSError:%0A
+ last_update_timestamp =
last_build_time
@@ -853,28 +853,24 @@
ld_timestamp
- = 0
%0A%0A return
|
52e004e9a14f4cbcd56503ea0f1652cf5e4ed853
|
test untested ipcore interfaces
|
hwtLib/tests/ipCorePackager_test.py
|
hwtLib/tests/ipCorePackager_test.py
|
import shutil
import tempfile
import unittest
from hwt.hdlObjects.types.array import Array
from hwt.hdlObjects.types.struct import HStruct
from hwt.serializer.ip_packager.packager import Packager
from hwtLib.amba.axi4_streamToMem import Axi4streamToMem
from hwtLib.amba.axiLite_comp.endpoint import AxiLiteEndpoint
from hwtLib.amba.axis import AxiStream_withUserAndStrb, AxiStream_withId
from hwtLib.amba.axis_comp.en import AxiS_en
from hwtLib.i2c.masterBitCntrl import I2cMasterBitCtrl
from hwtLib.mem.fifo import Fifo
from hwtLib.types.ctypes import uint64_t
class IpCorePackagerTC(unittest.TestCase):
def setUp(self):
# Create a temporary directory
self.test_dir = tempfile.mkdtemp()
def tearDown(self):
# Remove the directory after the test
shutil.rmtree(self.test_dir)
def test_itispossibleToSerializeIpcores(self):
f = Fifo()
f.DEPTH.set(16)
testUnits = [AxiS_en(AxiStream_withUserAndStrb),
AxiS_en(AxiStream_withId),
AxiLiteEndpoint(HStruct(
(uint64_t, "f0"),
(Array(uint64_t, 10), "arr0")
)),
I2cMasterBitCtrl(),
f,
Axi4streamToMem()
]
for u in testUnits:
p = Packager(u)
p.createPackage(self.test_dir)
if __name__ == "__main__":
suite = unittest.TestSuite()
# suite.addTest(IpCorePackagerTC('test_sWithStartPadding'))
suite.addTest(unittest.makeSuite(IpCorePackagerTC))
runner = unittest.TextTestRunner(verbosity=3)
runner.run(suite)
|
Python
| 0
|
@@ -146,55 +146,227 @@
hwt.
-serializer.ip_packager.packager import Packager
+interfaces.std import BramPort, Handshaked%0Afrom hwt.serializer.ip_packager.interfaces.std import IP_Handshake%0Afrom hwt.serializer.ip_packager.packager import Packager%0Afrom hwt.synthesizer.interfaceLevel.unit import Unit
%0Afro
@@ -728,16 +728,943 @@
int64_t%0A
+from hwtLib.uart.intf import Uart%0Afrom hwt.interfaces.differential import DifferentialSig%0Afrom hwt.code import If, connect%0Afrom hwt.interfaces.utils import addClkRst%0A%0A%0Aclass Handshaked_withIP(Handshaked):%0A def _getSimAgent(self):%0A return IP_Handshake%0A%0Aclass IpCoreIntfTest(Unit):%0A def _declr(self):%0A addClkRst(self)%0A%0A self.ram0 = BramPort()%0A self.ram1 = BramPort()%0A self.uart = Uart()%0A self.hsIn = Handshaked_withIP()%0A self.hsOut = Handshaked_withIP()%0A self.difIn = DifferentialSig()%0A%0A def _impl(self):%0A r0 = self._reg(%22r0%22, defVal=0)%0A self.uart.tx ** self.uart.rx%0A self.ram0 ** self.ram1%0A%0A If(self.hsIn.vld,%0A r0 ** (self.difIn.p & ~self.difIn.n)%0A )%0A If(r0,%0A self.hsOut ** self.hsIn%0A ).Else(%0A connect(r0, self.hsOut.data, fit=True),%0A self.hsOut.vld ** 1%0A )%0A %0A
%0A%0Aclass
@@ -2361,16 +2361,16 @@
f,%0A
-
@@ -2399,16 +2399,55 @@
mToMem()
+,%0A IpCoreIntfTest()
%0A
|
979c1eb0198fbcc4b27894a74abd9c5423c1ff9a
|
Define device type name
|
stoqdrivers/devices/base.py
|
stoqdrivers/devices/base.py
|
# -*- Mode: Python; coding: iso-8859-1 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Stoqdrivers
## Copyright (C) 2006 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307,
## USA.
##
## Author(s): Henrique Romano <henrique@async.com.br>
##
"""
stoqdrivers/devices/base.py:
Generic base class implementation for all devices.
"""
import gobject
from stoqdrivers.log import Logger
from stoqdrivers.configparser import StoqdriversConfig
from stoqdrivers.exceptions import CriticalError, ConfigError
from stoqdrivers.constants import PRINTER_DEVICE, SCALE_DEVICE
from stoqdrivers.translation import stoqdrivers_gettext
from stoqdrivers.devices.serialbase import SerialPort
_ = lambda msg: stoqdrivers_gettext(msg)
class BaseDevice(Logger):
""" Base class for all device interfaces, responsible for instantiate
the device driver itself based on the brand and model specified or in
the configuration file.
"""
log_domain = "stoqdrivers"
typename_translate_dict = {PRINTER_DEVICE: "Printer",
SCALE_DEVICE: "Scale"}
# Subclasses must define these attributes
device_dirname = None
required_interfaces = None
device_type = None
def __init__(self, brand=None, model=None, device=None, config_file=None,
port=None, consts=None):
Logger.__init__(self)
if not self.device_dirname:
raise ValueError("Subclasses must define the "
"`device_dirname' attribute")
elif self.device_type is None:
raise ValueError("device_type must be defined")
self.brand = brand
self.device = device
self.model = model
self._port = port
self._driver_constants = consts
self._load_configuration(config_file)
def _load_configuration(self, config_file):
section_name = BaseDevice.typename_translate_dict[self.device_type]
if not self.model or not self.brand or (not self.device and not self._port):
self.config = StoqdriversConfig(config_file)
if not self.config.has_section(section_name):
raise ConfigError(_("There is no section named `%s'!")
% section_name)
self.brand = self.config.get_option("brand", section_name)
self.device = self.config.get_option("device", section_name)
self.model = self.config.get_option("model", section_name)
name = "stoqdrivers.devices.%s.%s.%s" % (self.device_dirname,
self.brand, self.model)
try:
module = __import__(name, None, None, 'stoqdevices')
except ImportError, reason:
raise CriticalError("Could not load driver %s %s: %s"
% (self.brand.capitalize(),
self.model.upper(), reason))
class_name = self.model
driver_class = getattr(module, class_name, None)
if not driver_class:
raise CriticalError("Device driver at %s needs a class called %s"
% (name, class_name))
if not self._port:
self._port = SerialPort(self.device)
self._driver = driver_class(self._port, consts=self._driver_constants)
self.debug(("Config data: brand=%s,device=%s,model=%s\n"
% (self.brand, self.device, self.model)))
self.check_interfaces()
def get_model_name(self):
return self._driver.model_name
def check_interfaces(self):
""" This method must be implemented in subclass and must ensure that the
driver implements a valid interface for the current operation state.
"""
raise NotImplementedError
def notify_read(self, func):
""" This function can be called when the callsite must know when data
is coming from the serial port. It is necessary that a gobject main
loop is already running before calling this method.
"""
gobject.io_add_watch(self._driver.fd, gobject.IO_IN,
lambda fd, cond: func(self, cond))
def set_port(self, port):
self._driver.set_port(port)
def get_port(self):
return self._driver.get_port()
def get_driver(self):
""" Get the internal driver, this is normally not needed to be able
to print or use the driver.
@returns: the driver
"""
return self._driver
|
Python
| 0.998514
|
@@ -1259,16 +1259,17 @@
import
+(
PRINTER_
@@ -1288,16 +1288,75 @@
E_DEVICE
+,%0A BARCODE_READER_DEVICE)
%0Afrom st
@@ -1773,16 +1773,25 @@
dict = %7B
+%0A
PRINTER_
@@ -1821,52 +1821,88 @@
- SCALE_DEVICE: %22Scale%22
+SCALE_DEVICE: %22Scale%22,%0A BARCODE_READER_DEVICE: %22Barcode Reader%22,%0A
%7D%0A
|
cfce39c33d416a67f118c0259f601d5306fe6185
|
this is 0.7.1
|
cligj/__init__.py
|
cligj/__init__.py
|
"""cligj
A package of arguments, options, and parsers for the Python GeoJSON
ecosystem.
"""
import sys
from warnings import warn
import click
from .features import normalize_feature_inputs
__version__ = "0.7.1dev"
if sys.version_info < (3, 6):
warn("cligj 1.0.0 will require Python >= 3.6", FutureWarning)
# Multiple input files.
files_in_arg = click.argument(
'files',
nargs=-1,
type=click.Path(resolve_path=True),
required=True,
metavar="INPUTS...")
# Multiple files, last of which is an output file.
files_inout_arg = click.argument(
'files',
nargs=-1,
type=click.Path(resolve_path=True),
required=True,
metavar="INPUTS... OUTPUT")
# Features from files, command line args, or stdin.
# Returns the input data as an iterable of GeoJSON Feature-like
# dictionaries.
features_in_arg = click.argument(
'features',
nargs=-1,
callback=normalize_feature_inputs,
metavar="FEATURES...")
# Options.
verbose_opt = click.option(
'--verbose', '-v',
count=True,
help="Increase verbosity.")
quiet_opt = click.option(
'--quiet', '-q',
count=True,
help="Decrease verbosity.")
# Format driver option.
format_opt = click.option(
'-f', '--format', '--driver', 'driver',
default='GTiff',
help="Output format driver")
# JSON formatting options.
indent_opt = click.option(
'--indent',
type=int,
default=None,
help="Indentation level for JSON output")
compact_opt = click.option(
'--compact/--not-compact',
default=False,
help="Use compact separators (',', ':').")
# Coordinate precision option.
precision_opt = click.option(
'--precision',
type=int,
default=-1,
help="Decimal precision of coordinates.")
# Geographic (default), projected, or Mercator switch.
projection_geographic_opt = click.option(
'--geographic',
'projection',
flag_value='geographic',
default=True,
help="Output in geographic coordinates (the default).")
projection_projected_opt = click.option(
'--projected',
'projection',
flag_value='projected',
help="Output in dataset's own, projected coordinates.")
projection_mercator_opt = click.option(
'--mercator',
'projection',
flag_value='mercator',
help="Output in Web Mercator coordinates.")
# Feature collection or feature sequence switch.
sequence_opt = click.option(
'--sequence/--no-sequence',
default=False,
help="Write a LF-delimited sequence of texts containing individual "
"objects or write a single JSON text containing a feature "
"collection object (the default).",
callback=lambda ctx, param, value: warn(
"Sequences of Features, not FeatureCollections, will be the default in version 1.0.0",
FutureWarning,
)
or value,
)
use_rs_opt = click.option(
'--rs/--no-rs',
'use_rs',
default=False,
help="Use RS (0x1E) as a prefix for individual texts in a sequence "
"as per http://tools.ietf.org/html/draft-ietf-json-text-sequence-13 "
"(default is False).")
def geojson_type_collection_opt(default=False):
"""GeoJSON FeatureCollection output mode"""
return click.option(
'--collection',
'geojson_type',
flag_value='collection',
default=default,
help="Output as GeoJSON feature collection(s).")
def geojson_type_feature_opt(default=False):
"""GeoJSON Feature or Feature sequence output mode"""
return click.option(
'--feature',
'geojson_type',
flag_value='feature',
default=default,
help="Output as GeoJSON feature(s).")
def geojson_type_bbox_opt(default=False):
"""GeoJSON bbox output mode"""
return click.option(
'--bbox',
'geojson_type',
flag_value='bbox',
default=default,
help="Output as GeoJSON bounding box array(s).")
|
Python
| 0.999999
|
@@ -211,11 +211,8 @@
.7.1
-dev
%22%0A%0Ai
|
a1d9247e0d72a468e0fa70793501cd2e7dfec854
|
Update wsgi.py.
|
clintools/wsgi.py
|
clintools/wsgi.py
|
"""
WSGI config for clintools project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "clintools.settings")
application = get_wsgi_application()
|
Python
| 0
|
@@ -1,314 +1,334 @@
-%22%22%22%0AWSGI config for clintools project.%0A%0AIt exposes the WSGI callable as a module-level variable named %60%60application%60%60.%0A%0AFor more information on this file, see%0Ahttps://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/%0A%22%22%22%0A%0Aimport os%0A%0Afrom django.core.wsgi import get_wsgi_application%0A%0Aos.environ.setdefault(%22
+import os%0Aimport sys%0Aimport site%0A%0A# Add the site-packages of the chosen virtualenv to work with%0Asite.addsitedir('/home/washu/.virtualenvs/osler/local/lib/python2.7/site-packages')%0A%0A# Add the app's directory to the PYTHONPATH%0Asys.path.append('/home/washu/clintools')%0Asys.path.append('/home/washu/clintools/clintools')%0A%0Aos.environ%5B'
DJAN
@@ -349,12 +349,14 @@
DULE
-%22, %22
+'%5D = '
clin
@@ -373,26 +373,234 @@
ings
-%22)%0A%0Aapplication =
+'%0A%0A# Activate your virtual env%0Aactivate_env=os.path.expanduser(%22/home/washu/.virtualenvs/osler/bin/activate_this.py%22)%0Aexecfile(activate_env, dict(__file__=activate_env))%0A%0Aimport django.core.wsgi%0Aapplication = django.core.wsgi.
get_
|
fca2e7e1b2e9e3931d077c2ef0dd3e92ca56a8af
|
add mising import
|
misc.py
|
misc.py
|
#pylint: disable=F0401
from helpers import *
from time import time as now
import org.bukkit.inventory.ItemStack as ItemStack
#
# Welcome new players
#
@hook.event("player.PlayerJoinEvent", "monitor")
def onJoin(event):
player = event.getPlayer()
# send welcome broadcast
if not server.getOfflinePlayer(player.getName()).hasPlayedBefore():
broadcast("utils.greet_new", "")
broadcast("utils.greet_new", "&a&lPlease welcome &f" + player.getDisplayName() + " &a<o Redstoner!")
broadcast("utils.greet_new", "")
# clear out some eventual crap before
msg(player, " \n \n \n \n \n \n \n \n \n \n \n \n ")
msg(player, " &4Welcome to the Redstoner Server!")
msg(player, " &6Before you ask us things, take a quick")
msg(player, " &6look at the &a&l/FAQ &6command.")
msg(player, " \n&6thank you and happy playing ;)")
msg(player, " \n \n")
# teleport to spawn when spawning inside portal
loginloc = player.getLocation().getBlock().getType()
headloc = player.getEyeLocation().getBlock().getType()
if str(loginloc) == "PORTAL" or str(headloc) == "PORTAL":
msg(player, "&4Looks like you spawned in a portal... Let me help you out")
msg(player, "&6You can use /back if you &nreally&6 want to go back")
player.teleport(player.getWorld().getSpawnLocation())
#
# /sudo - execute command/chat *as* a player/console
#
@hook.command("sudo")
def onSudoCommand(sender, args):
if sender.hasPermission("utils.sudo"):
plugHeader(sender, "Sudo")
if not checkargs(sender, args, 2, -1):
return True
target = args[0]
cmd = " ".join(args[1:])
msg(sender, "Running '&e%s&r' as &3%s" % (cmd, target))
if cmd[0] == "/":
cmd = cmd[1:]
if target.lower() == "server" or target.lower() == "console":
runas(server.getConsoleSender(), cmd)
elif server.getPlayer(target):
runas(server.getPlayer(target), cmd)
else:
msg(sender, "&cPlayer %s not found!" % target)
else:
if target.lower() == "server" or target.lower() == "console":
runas(server.getConsoleSender(), "say %s" % cmd)
elif server.getPlayer(target):
server.getPlayer(target).chat(cmd)
else:
msg(sender, "&cPlayer %s not found!" % target)
else:
noperm(sender)
return True
#
# Clicking redstone_sheep with shears will drop redstone + wool and makes a moo sound
#
last_shear = 0.0
@hook.event("player.PlayerInteractEntityEvent")
def onPlayerInteractEntity(event):
global last_shear
if not event.isCancelled():
shear_time = now()
if last_shear + 0.4 < shear_time:
last_shear = shear_time
sender = event.getPlayer()
entity = event.getRightClicked()
if isPlayer(entity) and str(entity.getUniqueId()) == "ae795aa8-6327-408e-92ab-25c8a59f3ba1" and str(sender.getItemInHand().getType()) == "SHEARS" and str(sender.getGameMode()) == "CREATIVE":
for i in range(5):
entity.getWorld().dropItemNaturally(entity.getLocation(), ItemStack(bukkit.Material.getMaterial("REDSTONE")))
entity.getWorld().dropItemNaturally(entity.getLocation(), ItemStack(bukkit.Material.getMaterial("WOOL")))
sender.playSound(entity.getLocation(), "mob.cow.say", 1, 1)
#
# /pluginversions - print all plugins + versions; useful when updating plugins
#
@hook.command("pluginversions")
def onPluginversionsCommand(sender, args):
plugHeader(sender, "Plugin versions")
plugins = list(server.getPluginManager().getPlugins())
plugins.sort(key=lambda pl: pl.getDescription().getName())
msg(sender, "&3Listing all " + str(len(plugins)) + " plugins and their version:")
for plugin in plugins:
msg(sender, "&6" + plugin.getDescription().getName() + "&r: &e" + plugin.getDescription().getVersion())
return True
#
# /echo - essentials echo sucks and prints mail alerts sometimes
#
@hook.command("echo")
def onEchoCommand(sender, args):
msg(sender, " ".join(args).replace("\\n", "\n"))
#
# /pyeval - run python ingame
#
# has to be in main.py so we can access the modules
def evalThread(sender, code):
try:
msg(sender, "%s" % unicode(eval(code)), False, "a")
except Exception, e:
msg(sender, "%s: %s" % (e.__class__.__name__, e), False, "c")
thread.exit()
@hook.command("pyeval")
def onPyevalCommand(sender, args):
if sender.hasPermission("utils.pyeval"):
if not checkargs(sender, args, 1, -1):
return True
msg(sender, "%s" % " ".join(args), False, "e")
try:
thread.start_new_thread(evalThread, (sender, " ".join(args)))
except Exception, e:
msg(sender, "&cInternal error: %s" % e)
else:
noperm(sender)
return True
|
Python
| 0
|
@@ -67,16 +67,30 @@
as now%0A
+import thread%0A
import o
|
937aa61393f46167806c1f4913c42e873ea1c435
|
fix misc.lastfile()
|
misc.py
|
misc.py
|
"""miscellaneous definitions"""
from math import ceil
import os.path
def file_name(args, par_type):
"""returns file name format for any time step"""
return args.name + '_' + par_type + '{:05d}'
def path_fmt(args, par_type):
"""returns full path format for any time step"""
return os.path.join(args.path, file_name(args, par_type))
def takefield(idx):
"""returns a function returning a field from
a StagData object"""
return lambda stagdata: stagdata.fields[idx]
def lastfile(args, begstep):
"""look for the last binary file (research based on
temperature files)"""
fmt = path_fmt(args, 't')
endstep = 99999
while begstep + 1 < endstep:
guess = int(ceil((endstep + begstep) / 2))
if os.path.isfile(fmt.format(guess)):
begstep = guess
else:
endstep = guess
return begstep
|
Python
| 0.000001
|
@@ -200,16 +200,17 @@
:05d%7D'%0A%0A
+%0A
def path
@@ -656,13 +656,14 @@
p =
-99999
+100000
%0A
|
6fdba909f03090649bee2255770a570114ed117f
|
Fix lint errors
|
manage.py
|
manage.py
|
#!/usr/bin/env python
import os.path as p
from subprocess import call, check_call
from flask.ext.script import Manager
from app import create_app
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=p.abspath)
@manager.command
def checkstage():
"""Checks staged with git pre-commit hook"""
path = p.join(p.dirname(__file__), 'tests', 'test.sh')
cmd = "sh %s" % path
return call(cmd, shell=True)
@manager.option('-F', '--file', help='Lint file', default='')
def lint(file):
"""Check style with flake8"""
return call("flake8 %s" % file, shell=True)
@manager.option('-w', '--where', help='Requirement file', default='')
def test(where):
"""Run nose tests"""
return call("nosetests -xvw %s" % where, shell=True)
@manager.option('-r', '--requirement', help='Requirement file', default='test')
def pipme(requirement):
"""Install requirements.txt"""
call('pippy -r requirements/%s.txt' % requirement, shell=True)
@manager.command
def require():
"""Create requirements.txt"""
cmd = 'pip freeze -l | grep -vxFf requirements/dev.txt '
cmd += '| grep -vxFf requirements/prod.txt '
cmd += '| grep -vxFf requirements/test.txt '
cmd += '> requirements/common.txt'
call(cmd, shell=True)
if __name__ == '__main__':
manager.run()
|
Python
| 0.000396
|
@@ -67,20 +67,8 @@
call
-, check_call
%0Afro
|
4c72fd4af23d78c3b62ebd24cfbe6a18fc098a5e
|
remove $Id$ svn line
|
manage.py
|
manage.py
|
#!/usr/bin/env python3
# $Id: manage.py 11966 2014-10-23 22:59:19Z jrms $
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tsadm.settings")
os.environ.setdefault("TSADM_DEV", "true")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Python
| 0.00006
|
@@ -19,59 +19,8 @@
hon3
-%0A# $Id: manage.py 11966 2014-10-23 22:59:19Z jrms $
%0A%0Aim
|
3bf50c7298b7634886d510ef07dfe13dda067247
|
Fix manage.py pep8
|
manage.py
|
manage.py
|
#!/usr/bin/env python
import os
COV = None
if os.environ.get('FLASK_COVERAGE'):
import coverage
COV = coverage.coverage(branch=True, include='app/*')
COV.start()
if os.path.exists('.env'):
print('Importing environment from .env...')
for line in open('.env'):
var = line.strip().split('=')
if len(var) == 2:
os.environ[var[0]] = var[1]
from app import create_app, db
from app.models import User, Role, Note, Tag, Notebook
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
def make_shell_context():
return dict(app=app, db=db, User=User, Note=Note, Role=Role, Tag=Tag, Notebook=Notebook)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def test(coverage=False):
"""Run the unit tests."""
if coverage and not os.environ.get('FLASK_COVERAGE'):
import sys
os.environ['FLASK_COVERAGE'] = '1'
os.execvp(sys.executable, [sys.executable] + sys.argv)
import unittest
import xmlrunner
tests = unittest.TestLoader().discover('tests')
#unittest.TextTestRunner(verbosity=2).run(tests)
xmlrunner.XMLTestRunner(output='test-reports').run(tests)
if COV:
COV.stop()
COV.save()
print('Coverage Summary:')
COV.report()
basedir = os.path.abspath(os.path.dirname(__file__))
covdir = os.path.join(basedir, 'test-reports/coverage')
COV.html_report(directory=covdir)
print('HTML version: file://%s/index.html' % covdir)
COV.erase()
@manager.command
def profile(length=25, profile_dir=None):
"""Start the application under the code profiler."""
from werkzeug.contrib.profiler import ProfilerMiddleware
app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[length],
profile_dir=profile_dir)
app.run()
@manager.command
def deploy():
"""Run deployment tasks."""
from flask.ext.migrate import upgrade
# migrate database to latest revision
upgrade()
if __name__ == '__main__':
manager.run()
|
Python
| 0
|
@@ -25,16 +25,17 @@
port os%0A
+%0A
COV = No
@@ -715,16 +715,25 @@
rn dict(
+%0A
app=app,
@@ -750,16 +750,24 @@
er=User,
+%0A
Note=No
@@ -785,24 +785,32 @@
le, Tag=Tag,
+%0A
Notebook=No
@@ -817,16 +817,17 @@
tebook)%0A
+%0A
manager.
@@ -842,16 +842,21 @@
and(
+%0A
%22shell%22,
She
@@ -851,16 +851,20 @@
%22shell%22,
+%0A
Shell(m
@@ -1294,16 +1294,17 @@
')%0A #
+
unittest
|
a35289cbcffc3e1597f77dd8dbda7539f01c2856
|
Fix flake8 complaint
|
manage.py
|
manage.py
|
#!/usr/bin/env python
import os
import time
from app import create_app, db
from app.models import (
User,
Role,
Agency,
Permission,
IncidentReport,
EditableHTML
)
from redis import Redis
from rq import Worker, Queue, Connection
from rq_scheduler.scheduler import Scheduler
from rq_scheduler.utils import setup_loghandlers
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
from app.parse_csv import parse_to_db
# Import settings from .env file. Must define FLASK_CONFIG
if os.path.exists('.env'):
print('Importing environment from .env file')
for line in open('.env'):
var = line.strip().split('=')
if len(var) == 2:
os.environ[var[0]] = var[1]
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
def make_shell_context():
return dict(app=app, db=db, User=User, Role=Role)
manager.add_command('shell', Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def test():
"""Run the unit tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
@manager.command
def recreate_db():
"""
Recreates a local database. You probably should not use this on
production.
"""
db.drop_all()
db.create_all()
db.session.commit()
@manager.option('-nu',
'--number-users',
default=10,
type=int,
help='Number of users to create',
dest='number_users')
@manager.option('-nr',
'--number-reports',
default=100,
type=int,
help='Number of reports to create',
dest='number_reports')
def add_fake_data(number_users, number_reports):
"""
Adds fake data to the database.
"""
User.generate_fake(count=number_users)
IncidentReport.generate_fake(count=number_reports)
@manager.command
def setup_dev():
"""Runs the set-up needed for local development."""
setup_general()
# Create a default admin user
admin = User(email='admin@user.com',
phone_number='+12345678910',
password='password',
first_name='Admin',
last_name='User',
role=Role.query.filter_by(permissions=Permission.ADMINISTER)
.first(),
confirmed=True)
# Create a default agency worker user
worker = User(email='agency@user.com',
phone_number='+11098764321',
password='password',
first_name='AgencyWorker',
last_name='User',
role=Role.query
.filter_by(permissions=Permission.AGENCY_WORKER)
.first(),
confirmed=True)
worker.agencies = [Agency.get_agency_by_name('SEPTA')]
# Create a default general user
general = User(email='general@user.com',
phone_number='+15434549876',
password='password',
first_name='General',
last_name='User',
role=Role.query.filter_by(permissions=Permission.GENERAL)
.first(),
confirmed=True)
db.session.add(admin)
db.session.add(worker)
db.session.add(general)
db.session.commit()
@manager.option('-f',
'--filename',
default='poll244.csv',
type=str,
help='Filename of csv to parse',
dest='filename')
def parse_csv(filename):
"""Parses the given csv file into the database."""
parse_to_db(db, filename)
@manager.command
def setup_prod():
"""Runs the set-up needed for production."""
setup_general()
def setup_general():
"""Runs the set-up needed for both local development and production."""
Role.insert_roles()
Agency.insert_agencies()
EditableHTML.add_default_faq()
@manager.command
def run_worker():
"""Initializes a slim rq task queue."""
listen = ['default']
conn = Redis(
host=app.config['RQ_DEFAULT_HOST'],
port=app.config['RQ_DEFAULT_PORT'],
db=0,
password=app.config['RQ_DEFAULT_PASSWORD']
)
with Connection(conn):
worker = Worker(map(Queue, listen))
worker.work()
@manager.command
def run_scheduler():
"""Initializes a rq scheduler."""
conn = Redis(
host=app.config['RQ_DEFAULT_HOST'],
port=app.config['RQ_DEFAULT_PORT'],
db=0,
password=app.config['RQ_DEFAULT_PASSWORD']
)
setup_loghandlers('INFO')
scheduler = Scheduler(connection=conn, interval=60.0)
for _ in xrange(10):
try:
scheduler.run()
except ValueError as exc:
if exc.message == 'There\'s already an active RQ scheduler':
scheduler.log.info(
'An RQ scheduler instance is already running. Retrying in '
'%d seconds.', 10,
)
time.sleep(10)
else:
raise exc
if __name__ == '__main__':
manager.run()
|
Python
| 0
|
@@ -5240,16 +5240,17 @@
se exc%0A%0A
+%0A
if __nam
|
a3923263a100dd39772533aa37ea7ff956e6c874
|
Make app accessible outside the development machine.
|
manage.py
|
manage.py
|
# -*- coding: utf-8 -*-
from flask.ext.script import Manager, Server
from yoyo import create_app
manager = Manager(create_app)
manager.add_option('-c', '--configfile', dest='configfile', required=False)
if __name__ == '__main__':
manager.run()
|
Python
| 0
|
@@ -200,16 +200,150 @@
False)%0A%0A
+manager.add_command('runserver', Server(%0A use_debugger = True,%0A use_reloader = True,%0A host = '0.0.0.0',%0A port = 8080,%0A))%0A%0A
if __nam
|
08b54819a56d9bfc65225045d97a4c331f9a3e11
|
Fix model import needed by create_all()
|
manage.py
|
manage.py
|
#!/usr/bin/env python3
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from service import app, db
# db.create_all() needs all models to be imported
from service.db_access import *
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
Python
| 0
|
@@ -179,16 +179,35 @@
imported
+ explicitly (not *)
%0Afrom se
@@ -229,17 +229,20 @@
import
-*
+User
%0A%0A%0Amigra
|
b96b8b79a792cc900cdcdac6325aa3a94fe54697
|
Add read_dotenv function to manage.py
|
manage.py
|
manage.py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Python
| 0
|
@@ -37,16 +37,52 @@
rt sys%0A%0A
+import dotenv%0Adotenv.read_dotenv()%0A%0A
if __nam
|
643e95765d4308661d95ee2f7360ff3f09c90bd5
|
use string.format()
|
manage.py
|
manage.py
|
#!/usr/bin/python
import shlex, subprocess
import argparse
if __name__=="__main__":
parser = argparse.ArgumentParser(description='Manage postfix container')
parser.add_argument("execute", choices=['create','start','stop','restart','delete'], help="manage postfix server")
args = parser.parse_args()
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
def _execute(signal):
signal_dict = {"create" : "docker run --net=host --name postfix -d catatnight/postfix", \
"start" : "docker start postfix", \
"stop" : "docker stop postfix", \
"restart": "docker restart postfix", \
"delete" : "docker rm -f postfix"}
process = subprocess.Popen(shlex.split(signal_dict[signal]), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.stdout.readline():
if signal == "create": signal += " and start"
print bcolors.OKGREEN + signal + " postfix successfully" + bcolors.ENDC
else:
_err = process.stderr.readline()
if 'No such container' in _err:
print bcolors.WARNING + "Please create postfix container first" + bcolors.ENDC
else: print bcolors.WARNING + _err + bcolors.ENDC
output = process.communicate()[0]
_execute(args.execute)
|
Python
| 0.000037
|
@@ -79,16 +79,42 @@
ain__%22:%0A
+ app_name = 'postfix'%0A %0A
parser
@@ -160,23 +160,18 @@
'Manage
-postfix
+%25s
contain
@@ -173,16 +173,27 @@
ntainer'
+ %25 app_name
)%0A pars
@@ -282,31 +282,37 @@
elp=
-%22
+'
manage
-postfix server%22
+%25s server' %25 app_name
)%0A
@@ -582,23 +582,19 @@
--name
-postfix
+%7B0%7D
-d cata
@@ -600,24 +600,37 @@
atnight/
-postfix%22
+%7B0%7D%22.format(app_name)
, %5C%0A
@@ -671,24 +671,30 @@
start
-postfix%22
+%25s%22 %25 app_name
, %5C%0A
@@ -735,24 +735,30 @@
stop
-postfix%22
+%25s%22 %25 app_name
, %5C%0A
@@ -799,24 +799,30 @@
restart
-postfix%22
+%25s%22 %25 app_name
, %5C%0A
@@ -867,16 +867,22 @@
f
-postfix%22
+%25s%22 %25 app_name
%7D%0A
@@ -1119,23 +1119,18 @@
nal + %22
-postfix
+%25s
success
@@ -1135,16 +1135,27 @@
ssfully%22
+ %25 app_name
+ bcolo
@@ -1296,23 +1296,18 @@
create
-postfix
+%25s
contain
@@ -1315,16 +1315,27 @@
r first%22
+ %25 app_name
+ bcolo
|
e141ab4c9286b95b2234b93feef4cf1b91fbe5bd
|
Remove dependence on settings except when absolutely necessary
|
markup.py
|
markup.py
|
"""
Utilities for text-to-HTML conversion.
"""
from django.conf import settings
def textile(text, **kwargs):
import textile
if 'encoding' not in kwargs:
kwargs.update(encoding=settings.DEFAULT_CHARSET)
if 'output' not in kwargs:
kwargs.update(output=settings.DEFAULT_CHARSET)
return textile.textile(text, **kwargs)
def markdown(text, **kwargs):
import markdown
return markdown.markdown(text, **kwargs)
def restructuredtext(text, **kwargs):
from docutils import core
if 'settings_overrides' not in kwargs:
kwargs.update(settings_overrides=getattr(settings,
"RESTRUCTUREDTEXT_FILTER_SETTINGS",
{}))
parts = core.publish_parts(source=text,
writer_name='html4css1',
**kwargs)
return parts['fragment']
DEFAULT_MARKUP_FILTERS = {
'textile': textile,
'markdown': markdown,
'restructuredtext': restructuredtext
}
class MarkupFormatter(object):
"""
Generic markup formatter which can handle multiple text-to-HTML
conversion systems.
Overview
--------
Any programmatic method of converting plain text to HTML can be
supported by registering a new "filter"; the filter should be a
function which accepts a string as its first positional argument
and optional extra keyword arguments (so the filter function must
accept ``**kwargs``), and returns the string converted to
HTML. The default filter set includes Markdown, reStructuredText
and Textile, using the same names as the template filters in
``django.contrib.markup``.
To register a new filter, call the ``register`` method and pass it
a name to use for the filter, and the filter function. For
example::
formatter = MarkupFormatter()
formatter.register('my_filter', my_filter_func)
Instances are callable, so you can work with them like so::
formatter = MarkupFormatter()
my_html = formatter(my_string)
The filter to use is determined in either of two ways:
1. If the keyword argument ``filter_name`` is supplied, it
will be used as the filter name.
2. Absent an explicit argument, the filter name will be taken
from the ``MARKUP_FILTER`` setting in your Django settings
file (see below).
Additionally, arbitrary keyword arguments can be supplied, and
they will be passed on to the filter function.
The Django setting ``MARKUP_FILTER`` can be used to specify
default behavior; its value should be a 2-tuple:
* The first element should be the name of a filter.
* The second element should be a dictionary to use as keyword
arguments for that filter.
So, for example, to have the default behavior apply Markdown with
safe mode enabled, you would add this to your Django settings
file::
MARKUP_FILTER = ('markdown', { 'safe_mode': True }
The filter named in this setting does not have to be from the
default set; as long as you register a filter of that name before
trying to use the formatter, it will work.
To have the default behavior apply no conversion whatsoever, set
``MARKUP_FILTER`` like so::
MARKUP_FILTER = (None, {})
When the ``filter_name`` keyword argument is supplied, the
``MARKUP_FILTER`` setting is ignored entirely -- neither a filter
name nor any keyword arguments will be read from it.
Examples
--------
Using the default behavior, with the filter name and arguments
taken from the ``MARKUP_FILTER`` setting::
formatter = MarkupFormatter()
my_string = 'Lorem ipsum dolor sit amet.\n\nConsectetuer adipiscing elit.'
my_html = formatter(my_string)
Explicitly naming the filter to use::
my_html = formatter(my_string, filter_name='markdown')
Passing keyword arguments::
my_html = formatter(my_string, filter_name='markdown', safe_mode=True)
Perform no conversion (return the text as-is)::
my_html = formatter(my_string, filter_name=None)
"""
def __init__(self):
self.filters = {}
for filter_name, filter_func in DEFAULT_MARKUP_FILTERS.iteritems():
self.register(filter_name, filter_func)
def register(self, filter_name, filter_func):
"""
Registers a new filter for use.
"""
self.filters[filter_name] = filter_func
def __call__(self, text, **kwargs):
if 'filter_name' in kwargs:
filter_name = kwargs['filter_name']
filter_kwargs = {}
else:
filter_name, filter_kwargs = settings.MARKUP_FILTER
if filter_name is None:
return text
if filter_name not in self.filters:
raise ValueError("'%s' is not a registered markup filter. Registered filters are: %s." % (filter_name,
', '.join(self.filters.iterkeys())))
filter_func = self.filters[filter_name]
filter_kwargs.update(**kwargs)
return filter_func(text, **filter_kwargs)
markup_filter = MarkupFormatter()
|
Python
| 0.000001
|
@@ -46,42 +46,8 @@
%22%22%0A%0A
-from django.conf import settings%0A%0A
def
@@ -94,184 +94,8 @@
ile%0A
- if 'encoding' not in kwargs:%0A kwargs.update(encoding=settings.DEFAULT_CHARSET)%0A if 'output' not in kwargs:%0A kwargs.update(output=settings.DEFAULT_CHARSET)%0A
@@ -264,24 +264,24 @@
**kwargs):%0A
+
from doc
@@ -302,249 +302,8 @@
ore%0A
- if 'settings_overrides' not in kwargs:%0A kwargs.update(settings_overrides=getattr(settings,%0A %22RESTRUCTUREDTEXT_FILTER_SETTINGS%22,%0A %7B%7D))%0A
@@ -3178,16 +3178,177 @@
from it.
+ This means%0A that, by always supplying %60%60filter_name%60%60 explicitly, it is%0A possible to use this formatter without configuring or even%0A installing Django.
%0A %0A
@@ -4556,16 +4556,61 @@
else:%0A
+ from django.conf import settings%0A
|
9a2c7e186276f58ec5165323a33a316d9ca80fc0
|
correct malcode feed
|
Malcom/feeds/malcode.py
|
Malcom/feeds/malcode.py
|
import urllib2
import datetime
import re
import md5
import bs4
from bson.objectid import ObjectId
from bson.json_util import dumps
from Malcom.model.datatypes import Evil, Url
from Malcom.feeds.feed import Feed
import Malcom.auxiliary.toolbox as toolbox
class MalcodeBinaries(Feed):
def __init__(self, name):
super(MalcodeBinaries, self).__init__(name, run_every="1h")
self.name = "MalcodeBinaries"
self.description = "Updated Feed of Malicious Executables"
self.source = "http://malc0de.com/rss/"
def update(self):
for dict in self.update_xml('item', ['title', 'description', 'link'], headers={"User-Agent": "Mozilla/5.0 (X11; U; Linux i686) Gecko/20071127 Firefox/2.0.0.11"}):
self.analyze(dict)
return True
def analyze(self, dict):
g = re.match(r'^URL: (?P<url>.+), IP Address: (?P<ip>[\d.]+), Country: (?P<country>[A-Z]{2}), ASN: (?P<asn>\d+), MD5: (?P<md5>[a-f0-9]+)$', dict['description'])
evil = g.groupdict()
evil['description'] = "N/A"
evil['link'] = dict['link']
evil['id'] = md5.new(dict['description']).hexdigest()
evil['source'] = self.name
url = Url(url=evil['url'])
url.add_evil(evil)
self.commit_to_db(url)
|
Python
| 0.000002
|
@@ -918,16 +918,25 @@
tion'%5D)%0A
+%09%09if g:%0A%09
%09%09evil =
@@ -946,24 +946,25 @@
groupdict()%0A
+%09
%09%09evil%5B'desc
@@ -983,16 +983,17 @@
%22N/A%22%0A%09%09
+%09
evil%5B'li
@@ -1018,62 +1018,187 @@
%5D%0A%09%09
-evil%5B'id'%5D = md5.new(dict%5B'description'%5D).hexdigest(
+%09try:%0A%09%09%09%09d=dict%5B'description'%5D.encode('UTF-8')%0A%09%09%09%09evil%5B'id'%5D = md5.new(d).hexdigest()%0A%09%09%09except UnicodeError:%0A%09%09%09%09print dict%5B'description'%5D%0A%09%09%09%09print type(dict%5B'description'%5D
)%0A
+%09
%09%09ev
@@ -1224,18 +1224,16 @@
.name%0A%09%09
-%0A%09
%09url = U
@@ -1254,16 +1254,17 @@
rl'%5D)%0A%09%09
+%09
url.add_
@@ -1274,17 +1274,17 @@
l(evil)%0A
-%0A
+%09
%09%09self.c
|
9320f891d7d972fc2cc4f9569b66767c5dfc4242
|
Revert "Revert "Remove unnecessary TException.message hack""
|
lib/py/src/Thrift.py
|
lib/py/src/Thrift.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import sys
class TType(object):
STOP = 0
VOID = 1
BOOL = 2
BYTE = 3
I08 = 3
DOUBLE = 4
I16 = 6
I32 = 8
I64 = 10
STRING = 11
UTF7 = 11
STRUCT = 12
MAP = 13
SET = 14
LIST = 15
UTF8 = 16
UTF16 = 17
_VALUES_TO_NAMES = (
'STOP',
'VOID',
'BOOL',
'BYTE',
'DOUBLE',
None,
'I16',
None,
'I32',
None,
'I64',
'STRING',
'STRUCT',
'MAP',
'SET',
'LIST',
'UTF8',
'UTF16',
)
class TMessageType(object):
CALL = 1
REPLY = 2
EXCEPTION = 3
ONEWAY = 4
class TProcessor(object):
"""Base class for processor, which works on two streams."""
def process(self, iprot, oprot):
"""
Process a request. The normal behvaior is to have the
processor invoke the correct handler and then it is the
server's responsibility to write the response to oprot.
"""
pass
def on_message_begin(self, func):
"""
Install a callback that receives (name, type, seqid)
after the message header is read.
"""
pass
class TException(Exception):
"""Base class for all thrift exceptions."""
# BaseException.message is deprecated in Python v[2.6,3.0)
if (2, 6, 0) <= sys.version_info < (3, 0):
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
def __init__(self, message=None):
Exception.__init__(self, message)
self.message = message
class TApplicationException(TException):
"""Application level thrift exceptions."""
UNKNOWN = 0
UNKNOWN_METHOD = 1
INVALID_MESSAGE_TYPE = 2
WRONG_METHOD_NAME = 3
BAD_SEQUENCE_ID = 4
MISSING_RESULT = 5
INTERNAL_ERROR = 6
PROTOCOL_ERROR = 7
INVALID_TRANSFORM = 8
INVALID_PROTOCOL = 9
UNSUPPORTED_CLIENT_TYPE = 10
def __init__(self, type=UNKNOWN, message=None):
TException.__init__(self, message)
self.type = type
def __str__(self):
if self.message:
return self.message
elif self.type == self.UNKNOWN_METHOD:
return 'Unknown method'
elif self.type == self.INVALID_MESSAGE_TYPE:
return 'Invalid message type'
elif self.type == self.WRONG_METHOD_NAME:
return 'Wrong method name'
elif self.type == self.BAD_SEQUENCE_ID:
return 'Bad sequence ID'
elif self.type == self.MISSING_RESULT:
return 'Missing result'
elif self.type == self.INTERNAL_ERROR:
return 'Internal error'
elif self.type == self.PROTOCOL_ERROR:
return 'Protocol error'
elif self.type == self.INVALID_TRANSFORM:
return 'Invalid transform'
elif self.type == self.INVALID_PROTOCOL:
return 'Invalid protocol'
elif self.type == self.UNSUPPORTED_CLIENT_TYPE:
return 'Unsupported client type'
else:
return 'Default (unknown) TApplicationException'
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.message = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.type = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
oprot.writeStructBegin('TApplicationException')
if self.message is not None:
oprot.writeFieldBegin('message', TType.STRING, 1)
oprot.writeString(self.message)
oprot.writeFieldEnd()
if self.type is not None:
oprot.writeFieldBegin('type', TType.I32, 2)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
class TFrozenDict(dict):
"""A dictionary that is "frozen" like a frozenset"""
def __init__(self, *args, **kwargs):
super(TFrozenDict, self).__init__(*args, **kwargs)
# Sort the items so they will be in a consistent order.
# XOR in the hash of the class so we don't collide with
# the hash of a list of tuples.
self.__hashval = hash(TFrozenDict) ^ hash(tuple(sorted(self.items())))
def __setitem__(self, *args):
raise TypeError("Can't modify frozen TFreezableDict")
def __delitem__(self, *args):
raise TypeError("Can't modify frozen TFreezableDict")
def __hash__(self):
return self.__hashval
|
Python
| 0
|
@@ -783,20 +783,8 @@
%0A#%0A%0A
-import sys%0A%0A
%0Acla
@@ -2059,317 +2059,8 @@
%22%22%0A%0A
- # BaseException.message is deprecated in Python v%5B2.6,3.0)%0A if (2, 6, 0) %3C= sys.version_info %3C (3, 0):%0A def _get_message(self):%0A return self._message%0A%0A def _set_message(self, message):%0A self._message = message%0A message = property(_get_message, _set_message)%0A%0A
|
53827da4c1637b5be85f8ddf88fa1d3ab0c0d2b7
|
Remove unintentional debug print statement.
|
floof/lib/helpers.py
|
floof/lib/helpers.py
|
"""Helper functions
Consists of functions to typically be used within templates, but also
available to Controllers. This module is available to templates as 'h'.
"""
from __future__ import absolute_import
import re
import unicodedata
import lxml.html
import lxml.html.clean
import markdown
from webhelpers.html import escape, HTML, literal, tags, url_escape
# XXX replace the below with tags.?
from webhelpers.html.tags import form, end_form, hidden, submit, javascript_link
from webhelpers.util import update_params
from pyramid.security import has_permission
def render_rich_text(raw_text, chrome=False):
"""Takes a unicode string of Markdown source. Returns literal'd HTML."""
# First translate the markdown
md = markdown.Markdown(
extensions=[],
output_format='html',
)
html = md.convert(raw_text)
# Then sanitize the HTML -- whitelisting only, thanks!
# Make this as conservative as possible to start. Might loosen it up a bit
# later.
fragment = lxml.html.fragment_fromstring(html, create_parent='div')
if chrome:
# This is part of the site and is free to use whatever nonsense it wants
allow_tags = None
else:
# This is user content; beware!!
allow_tags = [
# Structure
'p', 'div', 'span', 'ul', 'ol', 'li',
# Tables
#'table', 'thead', 'tbody', 'tfoot', 'tr', 'th', 'td',
# Embedding
'a',
# Oldschool styling
'strong', 'b', 'em', 'i', 's', 'u',
]
cleaner = lxml.html.clean.Cleaner(
scripts = True,
javascript = True,
comments = True,
style = True,
links = True,
meta = True,
page_structure = True,
#processing_instuctions = True,
embedded = True,
frames = True,
forms = True,
annoying_tags = True,
safe_attrs_only = True,
remove_unknown_tags = False,
allow_tags = allow_tags,
)
cleaner(fragment)
# Autolink URLs
lxml.html.clean.autolink(fragment)
# And, done. Flatten the thing and return it
friendly_html = lxml.html.tostring(fragment)
# We, uh, need to remove the <div> wrapper that lxml imposes.
# I am so sorry.
match = re.match(r'\A<div>(.*)</div>\Z', friendly_html, flags=re.DOTALL)
if match:
friendly_html = match.group(1)
return literal(friendly_html)
def friendly_serial(serial):
"""Returns a more user-friendly rendering of the passed cert serial."""
result = ''
length = min(len(serial), 10)
start = len(serial) - length
for i, char in enumerate(serial[start:]):
result += char
if i % 2 == 1:
result += ':'
print serial, result
return result[:-1]
def reduce_display_name(name):
"""Return a reduced version of a display name for comparison with a
username.
"""
# Strip out diacritics
name = ''.join(char for char in unicodedata.normalize('NFD', name)
if not unicodedata.combining(char))
name = re.sub(r'\s+', '_', name)
name = name.lower()
return name
|
Python
| 0.000002
|
@@ -2761,33 +2761,8 @@
':'
-%0A print serial, result
%0A%0A
|
6e9560a12ac24e1c09b17c20f72644812aff6cd8
|
Update fontasticToCobalt.py
|
fontasticToCobalt.py
|
fontasticToCobalt.py
|
#
# Created by Roxane P. on 13/01/2016
#
from HTMLParser import HTMLParser
from xml.dom.minidom import Document
from string import Template
import os, errno
import shutil
import sys
if (len(sys.argv) != 3):
print 'Usage: python', sys.argv[0], 'icons-references.html Fontxxx.ttf'
exit(1)
# configuration
fontname = 'fontastic' # Change this before launching the script
fontname = fontname.title()
fontpath = 'font' + fontname + '/'
assetspath = 'font' + fontname + '/src/main/assets/'
javapath = 'font' + fontname + '/src/main/java/fr/cobaltians/fonts/font' + fontname + '/'
drawablepath = 'font' + fontname + '/src/main/res/drawable/'
valuepath = 'font' + fontname + '/src/main/res/values/'
# storage
names = []
glyphs = []
print 'Starting to create ' + fontname + ' package.'
# creating package architecture
def mkdir_p(path):
print 'Creating path:', path
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
print 'Creating package architecture...'
mkdir_p(assetspath)
mkdir_p(javapath)
mkdir_p(drawablepath)
mkdir_p(valuepath)
print 'All done.'
# create a subclass of HTMLParser and override the handler methods
class MyHTMLParser(HTMLParser):
def handle_starttag(self, tag, attrs):
global ulmapping
for name, value in attrs:
if tag == 'ul':
if value == 'glyphs css-mapping':
ulmapping = True;
elif value == 'glyphs character-mapping':
ulmapping = False;
if tag == 'input':
if name == 'value':
if ulmapping == True:
names.append(value.replace("-", "_"))
elif ulmapping == False:
glyphs.append(value.replace("-", "_"))
# instantiate the parser and fed it some HTML
print 'Opening', sys.argv[1], 'and parsing HTML...',
file = open(sys.argv[1], 'r')
parser = MyHTMLParser()
parser.feed(file.read())
file.close()
print 'done.'
# Create xml file
print 'Setting strings.xml file infos...',
doc = Document()
base = doc.createElement('ressource')
doc.appendChild(base)
for i, j in zip(names, glyphs):
entry = doc.createElement('string')
base.appendChild(entry)
entry.setAttribute("name" , i)
entry.setAttribute("translatable" , "false")
entry_content = doc.createTextNode(j)
entry.appendChild(entry_content)
# store strings.xml
strings = open(valuepath + 'strings.xml', "w")
strings.write(doc.toprettyxml(indent=" ", encoding="utf-8"))
strings.close()
print 'done.'
# Create values.xml
print 'Creating values.xml...',
values = open(valuepath + 'values.xml', "w")
values.write('<?xml version="1.0" encoding="utf-8"?>\n<resources>\n <dimen name="padding">2dp</dimen>\n <dimen name="textSize">20sp</dimen>\n</resources>\n')
values.close()
print 'done.'
# Create Manifest.xml
print 'Creating Manifest.xml...',
manifest = open(fontpath + 'src/main/AndroidManifest.xml', "w")
manifest.write('<manifest\n package="fr.cobaltians.fonts.font' + fontname + '">\n</manifest>\n')
manifest.close()
print 'done.'
# Create build.gradle
print 'Creating build.gradle...',
gradle = open(fontpath + 'build.gradle', "w")
gradle.write("""apply plugin: 'com.android.library'
android {
compileSdkVersion 23
buildToolsVersion "23.0.2"
defaultConfig {
minSdkVersion 8
targetSdkVersion 23
versionCode 1
versionName "1.0"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
sourceSets { main { assets.srcDirs = ['src/main/assets', 'src/main/assets/'] } }
}
dependencies {
compile project(':cobalt')
}\n""")
gradle.close()
print 'done.'
# Copy font.ttf
path = assetspath + 'Font' + fontname + '.ttf'
print 'Copying', sys.argv[2], 'to', path, '...',
shutil.copy(sys.argv[2], path)
print 'done.'
# Create FontDrawable.java
templatefontdrawable = Template("""package fr.cobaltians.fonts.font${fontkey};
import android.content.Context;
import android.graphics.Color;
import android.util.Log;
import fr.cobaltians.cobalt.Cobalt;
import fr.cobaltians.cobalt.font.CobaltAbstractFontDrawable;
/**
* Created by sebastienfamel on 16/10/2015.
*/
public class Font${fontkey}Drawable extends CobaltAbstractFontDrawable {
private static final String TAG = Font${fontkey}Drawable.class.getSimpleName();
private static final String FONT_FILE = "Font${fontkey}.ttf";
public static final int TEXT_COLOR_LIGHT = Color.argb(153, 51, 51, 51);
public static final int TEXT_COLOR_DARK = Color.argb(204, 255, 255, 255);
public Font${fontkey}Drawable(Context context, String text, int color) {
super(context, text, color, context.getResources().getDimensionPixelSize(R.dimen.textSize), context.getResources().getDimensionPixelSize(R.dimen.padding));
}
@Override
protected String getStringResource(String identifier) {
if (identifier.contains("-")) {
identifier = identifier.replace("-", "_");
}
try {
String packageName = mContext.getPackageName();
int resourceId = mContext.getResources().getIdentifier(identifier, "string", packageName);
if (resourceId != 0) {
String iconId = mContext.getResources().getString(resourceId);
return iconId;
}
else if (Cobalt.DEBUG) Log.e(TAG, "- getStringResource : no found resource");
} catch (Exception e) {
e.printStackTrace();
}
return "";
}
@Override
protected String getFontFilePath() {
return FONT_FILE;
}
}
""")
path = javapath + 'Font' + fontname + 'Drawable.java'
print 'Generating ' + path + '...',
fontdrawable = open(path,'w')
fontdrawable.write(templatefontdrawable.substitute(fontkey=fontname))
fontdrawable.close()
print 'done.'
print 'All jobs done.'
exit(0)
|
Python
| 0.000291
|
@@ -545,18 +545,19 @@
in/java/
-fr
+org
/cobalti
|
a3ad91928f7d4753204a2443237c7f720fed37f1
|
Fix persistence of 'sort by' preference on Windows
|
inselect/gui/sort_document_items.py
|
inselect/gui/sort_document_items.py
|
from PySide.QtCore import QSettings
from inselect.lib.sort_document_items import sort_document_items
# QSettings path
_PATH = 'sort_by_columns'
# Global - set to instance of CookieCutterChoice in cookie_cutter_boxes
_SORT_DOCUMENT = None
def sort_items_choice():
"Returns an instance of SortDocumentItems"
global _SORT_DOCUMENT
if not _SORT_DOCUMENT:
_SORT_DOCUMENT = SortDocumentItems()
return _SORT_DOCUMENT
class SortDocumentItems(object):
def __init__(self):
self._by_columns = QSettings().value(_PATH, False)
@property
def by_columns(self):
"""The user's preference for ordering by columns (True) or by rows
(False)
"""
return self._by_columns
def sort_items(self, items, by_columns):
"""Returns items sorted by columns (True) or by rows (False) or by the
user's most recent preference (None).
"""
self._by_columns = by_columns
QSettings().setValue(_PATH, by_columns)
return sort_document_items(items, by_columns)
|
Python
| 0.000178
|
@@ -483,32 +483,63 @@
__init__(self):%0A
+ # Key holds an integer%0A
self._by
@@ -548,16 +548,21 @@
olumns =
+ 1 ==
QSettin
@@ -983,16 +983,154 @@
columns%0A
+ # Pass integer to setValue - calling setValue with a bool with result%0A # in a string being written to the QSettings store.%0A
@@ -1156,16 +1156,21 @@
e(_PATH,
+ 1 if
by_colu
@@ -1172,16 +1172,23 @@
_columns
+ else 0
)%0A
|
e0248985f21d0611f5942dbe1bb6cc99360621b7
|
Modify code to conform to test
|
src/Server/chatServer.py
|
src/Server/chatServer.py
|
# chatServer.py
# Lab 4 CS4032
# Cathal Geoghegan #11347076
import socket
import re
import sys
import hashlib
from tcpServer import TCPServer
import logging
logging.basicConfig(filename="sentMessage.log", level=logging.DEBUG)
class ChatServer(TCPServer):
JOIN_REGEX = "JOIN_CHATROOM:[a-zA-Z0-9_]*\nCLIENT_IP:0\nPORT:0\nCLIENT_NAME:[a-zA-Z0-9_]*"
LEAVE_REGEX = "LEAVE_CHATROOM: [0-9]*\nJOIN_ID: [0-9]*\nCLIENT_NAME: [a-zA-Z0-9_]*"
MESSAGE_REGEX = "CHAT: [0-9]*\nJOIN_ID: [0-9]*\nCLIENT_NAME: [a-zA-Z0-9_]*\nMESSAGE: .*\n\n"
DISCONNECT_REGEX = "DISCONNECT:0\nPORT:0\nCLIENT_NAME:[a-zA-Z0-9_]*\n"
JOIN_REQUEST_RESPONSE_SUCCESS = "JOINED_CHATROOM:%s\nSERVER_IP:%s\nPORT:%s\nROOM_REF:%d\nJOIN_ID:%d\n"
JOIN_REQUEST_RESPONSE_FAIL = "ERROR_CODE:%d\nERROR_DESCRIPTION:%s\n"
LEAVE_REQUEST_RESPONSE_SUCCESS = "LEFT_CHATROOM:%s\nJOIN_ID:%s\n"
LEAVE_REQUEST_RESPONSE_FAIL = LEAVE_REQUEST_RESPONSE_SUCCESS
MESSAGE_RESPONSE = "CHAT:%s\nCLIENT_NAME:%s\nMESSAGE:%s\n\n"
MESSAGE_HEADER = "CHAT:%s\nCLIENT_NAME:%s\nMESSAGE:%s\n\n"
JOIN_MESSAGE = MESSAGE_HEADER
LEAVE_MESSAGE = MESSAGE_HEADER
DISCONNECT_MESSAGE = MESSAGE_HEADER
def __init__(self, port_use=None):
TCPServer.__init__(self, port_use, self.handler)
self.rooms = dict()
def handler(self, message, con, addr):
if re.match(self.JOIN_REGEX, message):
self.join(con, addr, message)
elif re.match(self.LEAVE_REGEX, message):
self.leave(con, addr, message)
elif re.match(self.MESSAGE_REGEX, message):
self.message(con, addr, message)
elif re.match(self.DISCONNECT_REGEX, message):
self.disconnect(con, addr, message)
else:
return False
return True
def join(self, con, addr, text):
request = text.splitlines()
room_name = request[0].split(":")[1]
client_name = request[3].split(":")[1]
hash_room_name = int(hashlib.md5(room_name).hexdigest(), 16)
hash_client_name = int(hashlib.md5(client_name).hexdigest(), 16)
if hash_room_name not in self.rooms:
self.rooms[hash_room_name] = dict()
if hash_client_name not in self.rooms[hash_room_name].keys():
join_string = self.JOIN_MESSAGE % (str(hash_room_name), client_name, client_name + " has joined this chatroom.")
self.rooms[hash_room_name][hash_client_name] = con
return_string = self.JOIN_REQUEST_RESPONSE_SUCCESS % (room_name, self.HOST, self.PORT, hash_room_name, hash_client_name)
logging.debug("Sending:\n" + return_string + "\n")
con.sendall(return_string)
clients = self.rooms[hash_room_name].keys()
logging.debug("Sending:\n" + join_string + "\n")
for client in clients:
msg_con = self.rooms[hash_room_name][client]
msg_con.sendall(join_string)
else:
return_string = self.JOIN_REQUEST_RESPONSE_FAIL % (1, "Client already in room")
con.sendall(return_string)
return
def leave(self, con, addr, text):
request = text.splitlines()
room_id = int(request[0].split()[1])
client_id = int(request[1].split()[1])
client_name = request[2].split()[1]
return_string = self.LEAVE_REQUEST_RESPONSE_SUCCESS % (room_id, client_id)
con.sendall(return_string)
logging.debug("Sending:\n" + return_string + "\n")
leave_string = self.LEAVE_MESSAGE % (str(room_id), client_name, client_name + " has left this chatroom.")
if room_id in self.rooms.keys() and client_id in self.rooms[room_id].keys():
logging.debug("Sending:\n" + leave_string + "\n")
clients = self.rooms[room_id].keys()
for client in clients:
msg_con = self.rooms[room_id][client]
msg_con.sendall(leave_string)
del self.rooms[room_id][client_id]
return
def message(self, con, addr, text):
request = text.splitlines()
room_id = int(request[0].split()[1])
client_id = int(request[1].split()[1])
client_name = request[2].split()[1]
msg = request[3].split(" ", 1)[1]
if room_id in self.rooms.keys() and client_id in self.rooms[room_id].keys():
return_string = self.MESSAGE_RESPONSE % (room_id, client_name, msg)
for client in self.rooms[room_id].keys():
client_con = self.rooms[room_id][client]
client_con.sendall(return_string)
logging.debug("Sending:\n" + return_string + "\n")
return
def disconnect(self, con, addr, text):
request = text.splitlines()
client_id = int(request[2].split(":")[1])
hash_client_name = int(hashlib.md5(client_id).hexdigest(), 16)
rooms = self.rooms.keys()
for room in rooms:
if client_id in self.rooms[room].keys():
clients = self.rooms[room].keys()
for client in clients:
return_string = self.DISCONNECT_MESSAGE % (str(room), client_id, client_name + " has left this chatroom.")
msg_con = self.rooms[room][client]
msg_con.sendall(return_string)
con = None
del self.rooms[room][hash_client_name]
return
def main():
try:
if len(sys.argv) > 1 and sys.argv[1].isdigit():
port = int(sys.argv[1])
server = ChatServer(port)
else:
server = ChatServer()
server.listen()
except socket.error, msg:
print "Unable to create socket connection: " + str(msg)
con = None
if __name__ == "__main__": main()
|
Python
| 0
|
@@ -4722,36 +4722,32 @@
client_id =
-int(
request%5B2%5D.split
@@ -4750,25 +4750,24 @@
plit(%22:%22)%5B1%5D
-)
%0A has
|
bdcef226ad626bd8b9a4a377347a2f8c1726f3bb
|
Update Skylib version to 0.8.0
|
lib/repositories.bzl
|
lib/repositories.bzl
|
# Copyright 2018 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Definitions for handling Bazel repositories used by apple_support."""
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
def _maybe(repo_rule, name, **kwargs):
"""Executes the given repository rule if it hasn't been executed already.
Args:
repo_rule: The repository rule to be executed (e.g., `git_repository`.)
name: The name of the repository to be defined by the rule.
**kwargs: Additional arguments passed directly to the repository rule.
"""
if name not in native.existing_rules():
repo_rule(name = name, **kwargs)
def apple_support_dependencies():
"""Fetches repository dependencies of the `apple_support` workspace.
Users should call this macro in their `WORKSPACE` to ensure that all of the
dependencies of the Swift rules are downloaded and that they are isolated from
changes to those dependencies.
"""
_maybe(
git_repository,
name = "bazel_skylib",
remote = "https://github.com/bazelbuild/bazel-skylib.git",
tag = "0.7.0",
)
|
Python
| 0.000003
|
@@ -716,19 +716,20 @@
epo:
-git
+http
.bzl%22, %22
git_
@@ -724,30 +724,28 @@
.bzl%22, %22
-git_repository
+http_archive
%22)%0A%0Adef
@@ -1517,30 +1517,28 @@
-git_repository
+http_archive
,%0A
@@ -1574,16 +1574,28 @@
-remote =
+urls = %5B%0A
%22ht
@@ -1638,35 +1638,154 @@
ylib
-.git%22,%0A tag = %220.7.0
+/releases/download/0.8.0/bazel-skylib.0.8.0.tar.gz%22,%0A %5D,%0A sha256 = %222ef429f5d7ce7111263289644d233707dba35e39696377ebab8b0bc701f7818e
%22,%0A
|
7824e00308fa11454be004ec4de7ec3038a4adbd
|
Update example, make sure one is False
|
examples/embed/embed_multiple_responsive.py
|
examples/embed/embed_multiple_responsive.py
|
from bokeh.browserlib import view
from bokeh.plotting import figure
from bokeh.embed import components
from bokeh.resources import Resources
from bokeh.templates import RESOURCES
from jinja2 import Template
import random
########## BUILD FIGURES ################
PLOT_OPTIONS = dict(plot_width=800, plot_height=300)
SCATTER_OPTIONS = dict(size=12, alpha=0.5)
data = lambda: [random.choice([i for i in range(100)]) for r in range(10)]
p1 = figure(responsive=True, tools='pan', **PLOT_OPTIONS)
p1.scatter(data(), data(), color="red", **SCATTER_OPTIONS)
p2 = figure(responsive=True, tools='pan', **PLOT_OPTIONS)
p2.scatter(data(), data(), color="blue", **SCATTER_OPTIONS)
p3 = figure(responsive=True, tools='pan,resize', **PLOT_OPTIONS)
p3.scatter(data(), data(), color="green", **SCATTER_OPTIONS)
########## RENDER PLOTS ################
# Define our html template for out plots
template = Template('''<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Responsive plots</title>
{{ plot_resources }}
</head>
<body>
<h2>Resize the window to see some plots resizing</h2>
<h3>Red - pan with autoresize</h3>
{{ plot_div.red }}
<h3>Green - pan with reize & autoresize (should maintain new aspect ratio)</h3>
{{ plot_div.green }}
<h3>Blue - pan no autoresize</h3>
{{ plot_div.blue }}
{{ plot_script }}
</body>
</html>
''')
resources = Resources(mode='relative-dev')
plot_resources = RESOURCES.render(
js_raw=resources.js_raw,
css_raw=resources.css_raw,
js_files=resources.js_files,
css_files=resources.css_files,
)
script, div = components({'red': p1, 'blue': p2, 'green': p3})
html = template.render(plot_resources=plot_resources, plot_script=script, plot_div=div)
html_file = 'embed_multiple_responsive.html'
with open(html_file, 'w') as f:
f.write(html)
view(html_file)
|
Python
| 1
|
@@ -430,18 +430,19 @@
ge(10)%5D%0A
-p1
+red
= figur
@@ -489,18 +489,19 @@
PTIONS)%0A
-p1
+red
.scatter
@@ -549,18 +549,20 @@
PTIONS)%0A
-p2
+blue
= figur
@@ -566,35 +566,36 @@
gure(responsive=
-Tru
+Fals
e, tools='pan',
@@ -610,18 +610,20 @@
PTIONS)%0A
-p2
+blue
.scatter
@@ -672,18 +672,21 @@
PTIONS)%0A
-p3
+green
= figur
@@ -740,18 +740,21 @@
PTIONS)%0A
-p3
+green
.scatter
@@ -1159,25 +1159,25 @@
an with
-autoresiz
+responsiv
e%3C/h3%3E%0A
@@ -1229,23 +1229,24 @@
h re
+s
ize &
-autoresiz
+responsiv
e (s
@@ -1334,17 +1334,17 @@
no
-autoresiz
+responsiv
e%3C/h
@@ -1664,10 +1664,11 @@
d':
-p1
+red
, 'b
@@ -1677,10 +1677,12 @@
e':
-p2
+blue
, 'g
@@ -1692,10 +1692,13 @@
n':
-p3
+green
%7D)%0Ah
|
a8d79ff10481c98ae7b7206a1d84627a3f01f698
|
Fix to tests to run with context dicts instead of context objects for django 1.10
|
test_haystack/test_altered_internal_names.py
|
test_haystack/test_altered_internal_names.py
|
# encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
from django.conf import settings
from django.test import TestCase
from test_haystack.core.models import AnotherMockModel, MockModel
from test_haystack.utils import check_solr
from haystack import connection_router, connections, constants, indexes
from haystack.management.commands.build_solr_schema import Command
from haystack.query import SQ
from haystack.utils.loading import UnifiedIndex
class MockModelSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(model_attr='foo', document=True)
name = indexes.CharField(model_attr='author')
pub_date = indexes.DateTimeField(model_attr='pub_date')
def get_model(self):
return MockModel
class AlteredInternalNamesTestCase(TestCase):
def setUp(self):
check_solr()
super(AlteredInternalNamesTestCase, self).setUp()
self.old_ui = connections['solr'].get_unified_index()
ui = UnifiedIndex()
ui.build(indexes=[MockModelSearchIndex()])
connections['solr']._index = ui
constants.ID = 'my_id'
constants.DJANGO_CT = 'my_django_ct'
constants.DJANGO_ID = 'my_django_id'
def tearDown(self):
constants.ID = 'id'
constants.DJANGO_CT = 'django_ct'
constants.DJANGO_ID = 'django_id'
connections['solr']._index = self.old_ui
super(AlteredInternalNamesTestCase, self).tearDown()
def test_altered_names(self):
sq = connections['solr'].get_query()
sq.add_filter(SQ(content='hello'))
sq.add_model(MockModel)
self.assertEqual(sq.build_query(), u'(hello)')
sq.add_model(AnotherMockModel)
self.assertEqual(sq.build_query(), u'(hello)')
def test_solr_schema(self):
command = Command()
context_data = command.build_context(using='solr').dicts[-1]
self.assertEqual(len(context_data), 6)
self.assertEqual(context_data['DJANGO_ID'], 'my_django_id')
self.assertEqual(context_data['content_field_name'], 'text')
self.assertEqual(context_data['DJANGO_CT'], 'my_django_ct')
self.assertEqual(context_data['default_operator'], 'AND')
self.assertEqual(context_data['ID'], 'my_id')
self.assertEqual(len(context_data['fields']), 3)
self.assertEqual(sorted(context_data['fields'], key=lambda x: x['field_name']), [
{
'indexed': 'true',
'type': 'text_en',
'stored': 'true',
'field_name': 'name',
'multi_valued': 'false'
},
{
'indexed': 'true',
'type': 'date',
'stored': 'true',
'field_name': 'pub_date',
'multi_valued': 'false'
},
{
'indexed': 'true',
'type': 'text_en',
'stored': 'true',
'field_name': 'text',
'multi_valued': 'false'
},
])
schema_xml = command.build_template(using='solr')
self.assertTrue('<uniqueKey>my_id</uniqueKey>' in schema_xml)
self.assertTrue('<field name="my_id" type="string" indexed="true" stored="true" multiValued="false" required="true"/>' in schema_xml)
self.assertTrue('<field name="my_django_ct" type="string" indexed="true" stored="true" multiValued="false"/>' in schema_xml)
|
Python
| 0
|
@@ -1916,18 +1916,8 @@
lr')
-.dicts%5B-1%5D
%0A
|
acd1ec90801e498c759c7462ecdd16fa43457fde
|
Fix typos in tensorflow notebook images in spawner (#526)
|
kubeflow/core/jupyterhub_spawner.py
|
kubeflow/core/jupyterhub_spawner.py
|
import json
import os
from kubespawner.spawner import KubeSpawner
from jhub_remote_user_authenticator.remote_user_auth import RemoteUserAuthenticator
from oauthenticator.github import GitHubOAuthenticator
class KubeFormSpawner(KubeSpawner):
# relies on HTML5 for image datalist
def _options_form_default(self):
return '''
<label for='image'>Image</label>
<input list="image" name="image" placeholder='repo/image:tag'>
<datalist id="image">
<option value="gcr.io/kubeflow-images-staging/tensorflow-1-4.1-notebook-cpu:v20180327-6bb4058">
<option value="gcr.io/kubeflow-images-staging/tensorflow-1-4.1-notebook-gpu:v20180327-6bb4058">
<option value="gcr.io/kubeflow-images-staging/tensorflow-1-5.1-notebook-cpu:v20180327-6bb4058">
<option value="gcr.io/kubeflow-images-staging/tensorflow-1-5.1-notebook-gpu:v20180327-6bb4058">
<option value="gcr.io/kubeflow-images-staging/tensorflow-1-6.1-notebook-cpu:v20180327-6bb4058">
<option value="gcr.io/kubeflow-images-staging/tensorflow-1-6.1-notebook-gpu:v20180327-6bb4058">
</datalist>
<br/><br/>
<label for='cpu_guarantee'>CPU</label>
<input name='cpu_guarantee' placeholder='200m, 1.0, 2.5, etc'></input>
<br/><br/>
<label for='mem_guarantee'>Memory</label>
<input name='mem_guarantee' placeholder='100Mi, 1.5Gi'></input>
<br/><br/>
<label for='extra_resource_limits'>Extra Resource Limits</label>
<input name='extra_resource_limits' placeholder='{'nvidia.com/gpu': '3'}'></input>
<br/><br/>
'''
def options_from_form(self, formdata):
options = {}
options['image'] = formdata.get('image', [''])[0].strip()
options['cpu_guarantee'] = formdata.get('cpu_guarantee', [''])[0].strip()
options['mem_guarantee'] = formdata.get('mem_guarantee', [''])[0].strip()
options['extra_resource_limits'] = formdata.get('extra_resource_limits', [''])[0].strip()
return options
@property
def singleuser_image_spec(self):
image = 'gcr.io/kubeflow/tensorflow-notebook-cpu'
if self.user_options.get('image'):
image = self.user_options['image']
return image
@property
def cpu_guarantee(self):
cpu = '500m'
if self.user_options.get('cpu_guarantee'):
cpu = self.user_options['cpu_guarantee']
return cpu
@property
def mem_guarantee(self):
mem = '1Gi'
if self.user_options.get('mem_guarantee'):
mem = self.user_options['mem_guarantee']
return mem
@property
def extra_resource_limits(self):
extra = ''
if self.user_options.get('extra_resource_limits'):
extra = json.loads(self.user_options['extra_resource_limits'])
return extra
###################################################
### JupyterHub Options
###################################################
c.JupyterHub.ip = '0.0.0.0'
c.JupyterHub.hub_ip = '0.0.0.0'
# Don't try to cleanup servers on exit - since in general for k8s, we want
# the hub to be able to restart without losing user containers
c.JupyterHub.cleanup_servers = False
###################################################
###################################################
### Spawner Options
###################################################
c.JupyterHub.spawner_class = KubeFormSpawner
c.KubeSpawner.singleuser_image_spec = 'gcr.io/kubeflow/tensorflow-notebook'
c.KubeSpawner.cmd = 'start-singleuser.sh'
c.KubeSpawner.args = ['--allow-root']
# gpu images are very large ~15GB. need a large timeout.
c.KubeSpawner.start_timeout = 60 * 30
###################################################
### Persistent volume options
###################################################
# Using persistent storage requires a default storage class.
# TODO(jlewi): Verify this works on minikube.
# TODO(jlewi): Should we set c.KubeSpawner.singleuser_fs_gid = 1000
# see https://github.com/kubeflow/kubeflow/pull/22#issuecomment-350500944
pvc_mount = os.environ.get('NOTEBOOK_PVC_MOUNT')
if pvc_mount:
c.KubeSpawner.user_storage_pvc_ensure = True
# How much disk space do we want?
c.KubeSpawner.user_storage_capacity = '10Gi'
c.KubeSpawner.pvc_name_template = 'claim-{username}{servername}'
c.KubeSpawner.volumes = [
{
'name': 'volume-{username}{servername}',
'persistentVolumeClaim': {
'claimName': 'claim-{username}{servername}'
}
}
]
c.KubeSpawner.volume_mounts = [
{
'mountPath': pvc_mount,
'name': 'volume-{username}{servername}'
}
]
|
Python
| 0
|
@@ -541,33 +541,33 @@
ing/tensorflow-1
--
+.
4.1-notebook-cpu
@@ -651,17 +651,17 @@
orflow-1
--
+.
4.1-note
@@ -745,33 +745,33 @@
ing/tensorflow-1
--
+.
5.1-notebook-cpu
@@ -855,17 +855,17 @@
orflow-1
--
+.
5.1-note
@@ -949,36 +949,36 @@
ing/tensorflow-1
--6.1
+.6.0
-notebook-cpu:v2
@@ -1063,12 +1063,12 @@
ow-1
--6.1
+.6.0
-not
@@ -4632,9 +4632,8 @@
%7D%0A %5D%0A
-%0A
|
265c73ffb54714f7aa32a3ff5f840185d1d1df2b
|
Create main.py
|
main.py
|
main.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#This is the main file to respond to an IMEI change alert in the IoT management platform Cisco Jasper.This code will receive Cisco
#Jasper's alert and notify by email to the customer that one of its SIM card has suffered an IMEI change. If the IMEI change is
#intentional the customer will ignore the email, if it is not, the customer is suffering an attack and will have the possibility
#of getting the location of the SIM card and deactivate it with the link in the email received.
# Note that this time the following code has been divided in different files
# to make a clearer code.
# Disclaimer: Don´t use this code as a best practices example, as it has not
# been verified as the best way of coding Python. Refer to
# https://www.python.org/ for reliable documentation.
from flask import Flask
from flask import request
import xml.etree.ElementTree as ET
from threading import Thread
import geocoder
#Private libraries create for the app development
import email_lib
import jasper_lib
#We use a Flask app as a global layout
app = Flask(__name__)
#We declare this variables as global so we can use it in both webhooks
iccid = ""
admin_details =
customer_email =""
#We define a thread that will run after receiving the notification from Jasper into the /webhook listener. We need to create this
#thread as Jasper will resend the notification unless it receives a 'status 200' HTTPS message
def send_email(xml):
#We mark this variables as global so the assigments done to them in this threat will affect variable used in the /response webhook
global iccid
global customer_email
global admin_details
#Here we parse the data receive as a unicode into a elementtree object to process it as XML file and get the iccid affected
xml = ET.fromstring(xml)
iccid = req[0]
#All the details needed for the first email notification will be obteined through these functions
admin_details = jasper_lib.Terminals.get_account(iccid)
customer_email = jasper_lib.Accounts.get_email(admin_details[0])
#We create and send an email to the customer affected
email_lib.email_alert(customer_email,iccid, admin_details[1])
return None
#Jasper alerts will be sent receive in this webhook.
@app.route('/alert', methods=['POST','GET'])
def alert():
#We will extract the data to use it for the application communications as unicode
req = request.form
data = req['data']
#We open a new thread to process the xml data receive as we need to answer Jasper to stop receiving messages
t = Thread(target=send_email, args=(data,))
t.start()
#Jasper will resend the notification unless it receives a status 200 confirming the reception
return '',200
#If we are facing a real unauthorized IMEI change we will receive the confirmation from the customer in this webhook.
@app.route('/response', methods=['POST','GET'])
def response:
#We get the location of the SIM card with the Jasper function
location = jasper_lib.Terminals.get_location(iccid)
#We deactivate the SIM card as we already have the location
jasper_lib.Termianls.deactivateSIM(iccid)
#We find the exact location of the SIM with a library created by google to get location information in JSON
address = geocoder.google(location, method='reverse')
#We send an email to the customer with the location of the SIM card
email_lib.email_action(customer_email,admin_details[1],location,iccid,address)
return "Acabamos de procesar su petición, en breve recibirá un email con los detalles"
# App is listening to webhooks. Next line is used to executed code only if it is
# running as a script, and not as a module of another script.
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
app.run(debug=True, port=port, host='0.0.0.0', threaded=True)
|
Python
| 0.000001
|
@@ -1671,16 +1671,33 @@
details%0A
+ global event%0A
#Her
@@ -1812,24 +1812,77 @@
id affected%0A
+ event = req%5B'eventType'%5D%0A data = req%5B'data'%5D %0A
xml = ET
@@ -2270,16 +2270,22 @@
tails%5B1%5D
+,event
)%0A re
@@ -2524,33 +2524,8 @@
orm%0A
- data = req%5B'data'%5D %0A
@@ -2673,20 +2673,19 @@
, args=(
-data
+req
,))%0A
|
aad8b12851d822ef42ac8f4957bc90a2cf2d56a2
|
hello world
|
main.py
|
main.py
|
import webapp2
from jinja2 import Environment, FileSystemLoader
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.write('Hello, World!')
app = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
|
Python
| 0.999981
|
@@ -117,16 +117,59 @@
t(self):
+%0A%0A # Load the main page welcome page
%0A
@@ -268,16 +268,73 @@
rld!')%0A%0A
+class UploadModel(webapp2.RequestHandler):%0A pass%0A %0A
app = we
|
069308e5685ad6dcb6c0c9f852b6750eb52ab4c7
|
Fix command line usage hint
|
main.py
|
main.py
|
#!/usr/bin/env python3.5
import math
import sys
from midi import midi as midiParser
from pico8.game import game
# Constants
PICO8_MAX_CHANNELS = 4
PICO8_MAX_NOTES_PER_SFX = 32
PICO8_MAX_SFX = 64
# Song-Specific Config
CART_PATH = 'bwv578.p8'
midiConfig = {'ppq': None}
def quantize(x, ppq):
return int(ppq * round(x / ppq))
def convert_deltatime_to_notelength(deltaTime):
# Quantize to nearest ppq
qdt = quantize(deltaTime, midiConfig['ppq'])
if qdt != deltaTime:
print('quantized deltaTime {0} to {1}'.format(deltaTime, qdt))
length = qdt / midiConfig['ppq']
#if length != math.floor(length):
# print('inaccurate TIME_SIGNATURE detected')
# sys.exit(1)
return int(length)
def read_ppq(midi):
for event in midi.tracks[0].events:
if event.type == 'TIME_SIGNATURE':
return event.data[2]
def get_tracks(midi):
# DEBUG
#i = 0
#for event in midi.tracks[2].events:
# if event.type == 'NOTE_ON':
# i += 1
# print(i, event)
# else:
# print('', event)
if midiConfig['ppq'] == None:
ppq = read_ppq(midi)
print('setting ticks per quarter note (ppq) to {0}'.format(ppq))
midiConfig['ppq'] = ppq
picoTracks = []
for t, track in enumerate(midi.tracks):
picoNotes = []
for e, event in enumerate(track.events):
if event.type == 'NOTE_ON' or event.type == 'NOTE_OFF':
note = {}
note['pitch'] = event.pitch - 36
note['volume'] = math.floor((event.velocity / 127) * 7)
if event.type == 'NOTE_OFF':
note['volume'] = 0
# If this is the first note in this track
if len(picoNotes) == 0:
# Add information on how many PICO-8 notes to wait before
# starting this channel
prevDelta = track.events[e - 1].time
length = convert_deltatime_to_notelength(prevDelta)
note['startDelay'] = length
# Repeat the PICO-8 note as necessary to match the
# length of the MIDI note
deltaTime = track.events[e + 1].time
picoNoteCount = convert_deltatime_to_notelength(deltaTime)
for i in range(picoNoteCount):
picoNotes.append(note)
if len(picoNotes) > 0:
picoTracks.append(picoNotes)
return picoTracks
def parse_command_line_args():
global path
if len(sys.argv[0]) < 2:
print('give a filename argument')
sys.exit(1)
# Get the filename from the 1st command line argument
path = sys.argv[1]
# Get the (optional) PPQ (pulses/ticks per quarternote) from the 2nd command
# line argument
if len(sys.argv) >= 3:
midiConfig['ppq'] = int(sys.argv[2])
parse_command_line_args()
# Open the MIDI file
midi = midiParser.MidiFile()
midi.open(path)
midi.read()
# Get all the notes converted to PICO-8-like notes
tracks = get_tracks(midi)
midiConfig['numTracks'] = len(tracks)
pico8Config = {
'noteDuration': 14,
'maxSfxPerTrack': PICO8_MAX_SFX / midiConfig['numTracks'],
'waveforms': [1, 2, 3, 4],
}
# Make an empty PICO-8 catridge
cart = game.Game.make_empty_game()
lines = [
'music(0)\n',
'function _update()\n',
'end']
cart.lua.update_from_lines(lines)
sfxIndex = -1
for t, track in enumerate(tracks):
if t > PICO8_MAX_CHANNELS - 1:
print('Reached PICO-8 channel limit')
break
noteIndex = -1
musicIndex = -1
trackSfxCount = 0
if 'startDelay' in track[0]:
trackOffset = track[0]['startDelay']
# offset by whole music patterns
musicOffset = math.floor(trackOffset / PICO8_MAX_NOTES_PER_SFX)
musicIndex = musicOffset - 1
# offset the remaining individual notes
noteOffset = trackOffset % PICO8_MAX_NOTES_PER_SFX
noteIndex = noteOffset - 1
print(trackOffset)
print(musicOffset)
print(noteOffset)
print('track {0}'.format(t))
# Write the notes to a series of PICO-8 SFXes
firstIteration = True
for note in track:
if noteIndex < PICO8_MAX_NOTES_PER_SFX - 1:
noteIndex += 1
else:
noteIndex = 0
if noteIndex == 0 or firstIteration:
firstIteration = False
trackSfxCount += 1
if trackSfxCount > pico8Config['maxSfxPerTrack']:
print('Ended track {0} early'.format(t))
break
# Move to the next PICO-8 SFX
sfxIndex += 1
if sfxIndex > PICO8_MAX_SFX - 1:
print('reached max SFX')
break
# Set the SFX note duration
cart.sfx.set_properties(
sfxIndex,
editor_mode=1,
loop_start=0,
loop_end=0,
note_duration=pico8Config['noteDuration'])
# Add the SFX to a music pattern
musicIndex += 1
cart.music.set_channel(musicIndex, t, sfxIndex)
if note != None and note['pitch'] >= 0 and note['pitch'] <= 63:
# Add this note to the current PICO-8 SFX
cart.sfx.set_note(
sfxIndex,
noteIndex,
pitch = note['pitch'],
volume = note['volume'],
waveform = 2)
#waveform = pico8Config['waveforms'][t])
with open(CART_PATH, 'w', encoding='utf-8') as fh:
cart.to_p8_file(fh)
#print(cart.sfx.get_note(0, 0))
#print(cart.sfx.get_note(0, 8))
|
Python
| 0.00028
|
@@ -2579,19 +2579,16 @@
sys.argv
-%5B0%5D
) %3C 2:%0A
@@ -2605,32 +2605,63 @@
nt('
-give a filename argument
+usage: main.py %3CMIDI FILENAME%3E %5BTicks Per Quarter Note%5D
')%0A
|
1b6319a84c7df68cea1ce483d9426c888d3b3a7c
|
Fix tweet length. Cleanup the doctext somewhat before sending to summarize
|
main.py
|
main.py
|
#!/usr/bin/env python
#
# Copyright 2014 Justin Huff <jjhuff@mspin.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "lib"))
import datetime
import logging
import urllib
import webapp2
from google.appengine.api import taskqueue
from google.appengine.ext import db
from webapp2_extras import jinja2
from markupsafe import Markup
from mapreduce import operation as op
import datastore
from summarize import summarize
MAX_TWEET_SUMMARY_SIZE = 115
def urlencode_filter(s):
if type(s) == 'Markup':
s = s.unescape()
s = s.encode('utf8')
s = urllib.quote_plus(s)
return Markup(s)
class BaseHandler(webapp2.RequestHandler):
@webapp2.cached_property
def jinja2(self):
# Returns a Jinja2 renderer cached in the app registry.
j = jinja2.get_jinja2(app=self.app)
j.environment.filters['urlencode'] = urlencode_filter
return j
def render_response(self, _template, **context):
# Renders a template and writes the result to the response.
rv = self.jinja2.render_template(_template, **context)
self.response.write(rv)
def permalinkForComment(comment):
return webapp2.uri_for("comment", proceeding=comment.key.parent().id(), comment_id=comment.key.id())
def comment_text_for_tweet(comment):
ss = summarize.SimpleSummarizer()
if comment.DocText:
summarized = ss.summarize(comment.DocText, 1)
if len(summarized) > MAX_TWEET_SUMMARY_SIZE:
return "{0}...".format(summarized[0:MAX_TWEET_SUMMARY_SIZE])
else:
return summarized
else:
return 'FCC Net Neutrality Comments'
class IndexHandler(BaseHandler):
def get(self, proceeding="14-28", comment_id=None):
if comment_id:
self.response.cache_control = 'public'
self.response.cache_control.max_age = 10*60
comment = datastore.Comment.getComment(proceeding, comment_id)
if not comment:
webapp2.abort(404)
else:
comment = datastore.Comment.getRandom(proceeding)
args = {
'comment': comment,
'comment_text': None,
'comment_link': permalinkForComment(comment),
'comment_text_for_tweet': comment_text_for_tweet(comment)
}
if comment.DocText:
args['comment_text'] = comment.DocText.replace('\n\n', '</p>\n<p>').replace('\n', '');
self.render_response("index.html", **args)
def touch(entity):
yield op.db.Put(entity)
def extract_text(entity):
taskqueue.add(queue_name="extract", url="/extract_text?proceeding=%s&id=%s"%(entity.key.parent().id(), entity.key.id()), method="GET", target="batch")
app = webapp2.WSGIApplication([
webapp2.Route(r'/', handler=IndexHandler, name='home'),
webapp2.Route(r'/comment/<proceeding>/<comment_id>', handler=IndexHandler, name='comment'),
],debug=True)
|
Python
| 0.000047
|
@@ -1027,11 +1027,10 @@
E =
-115
+96
%0A%0Ade
@@ -1912,32 +1912,137 @@
omment.DocText:%0A
+ # Cleanup the text somewhat%0A text = comment.DocText.replace('%5Cn', ' ').replace(' ', ' ')%0A
summariz
@@ -2059,28 +2059,17 @@
mmarize(
-comment.DocT
+t
ext, 1)%0A
|
6aea96621251d6f54e39c43a0a3f84275f2be214
|
Fix indentation error
|
main.py
|
main.py
|
import document
import time
evalstr = '''
var a=new XMLHttpRequest();a.open('GET','https://raw.githubusercontent.com/Zirientis/skulpt-canvas/master/l.js', false);a.send();eval(a.responseText);
'''
b = document.createElement('button')
b.innerHTML = 'Run'
b.setAttribute('id', 'runinjector')
b.setAttribute('onclick', evalstr)
pre = document.getElementById('edoutput')
pre.appendChild(b)
bridge = None
while True:
time.sleep(1)
bridge = document.getElementById('injectedcanvas')
if bridge != None:
break
bridge.innerHTML = 'ready'
# Put Python<->JS class here.
class Canvas:
def fillRect(self, x, y, width, height):
cmd = document.createElement('span');
cmd.innerHTML = "{0} {1} {2} {3}".format(x, y, width, height)
bridge.appendChild(cmd)
# Your code here
|
Python
| 0.000285
|
@@ -752,10 +752,16 @@
ht)%0A
-%09%09
+
brid
|
1c1604f0f2138e83787375d78d27fb199139b035
|
Enforce UTF-8
|
main.py
|
main.py
|
#!/usr/bin/env python3
'''
main.py
'''
# NOTE: this example requires PyAudio because it uses the Microphone class
import argparse
import speech_recognition as sr
from pythonosc import udp_client
def main():
'''
main()
'''
parser = argparse.ArgumentParser()
parser.add_argument("--ip", default="127.0.0.1",
help="The ip of the OSC server")
parser.add_argument("--port", type=int, default=3000,
help="The port the OSC server is listening on")
parser.add_argument("--filename", default="D:/words.txt",
help="The filename that wil contain the recognized words.")
args = parser.parse_args()
client = udp_client.SimpleUDPClient(args.ip, args.port)
rec = sr.Recognizer()
mic = sr.Microphone()
try:
print("A moment of silence, please...")
with mic as source:
rec.adjust_for_ambient_noise(source)
print("Set minimum energy threshold to {}".format(rec.energy_threshold))
while True:
print("Say something!")
with mic as source:
audio = rec.listen(source)
print("Got it! Now to recognize it...")
try:
# recognize speech using Google Speech Recognition
# value = r.recognize_google(audio)
value = rec.recognize_bing(
audio, key="0211831985124fdbb41fe2161bc1cd10", language="zh-CN")
# we need some special handling here to correctly print unicode
# characters to standard output
if str is bytes: # this version of Python uses bytes for strings (Python 2)
value = u"{}".format(value).encode("utf-8")
print("You said", value)
with open(args.filename, 'w') as f:
f.write(value);
client.send_message("/recognized", args.filename)
except sr.UnknownValueError:
print("Oops! Didn't catch that")
except sr.RequestError as err:
print("Uh oh! Couldn't request results from; {0}".format(err))
except KeyboardInterrupt:
pass
if __name__ == "__main__":
main()
|
Python
| 0.999975
|
@@ -1838,16 +1838,33 @@
ame, 'w'
+, encoding='utf8'
) as f:%0A
@@ -1938,33 +1938,18 @@
e(%22/
-recognized%22, args.filenam
+say%22, valu
e)%0A
|
2124f27506a5dc29f5a98b17f14257ffa3323dd3
|
Converted all spaces to tabs
|
main.py
|
main.py
|
#imports
import pygame, math, json
from pygame.locals import *
from config import *
#setup code
pygame.init()
screen = pygame.display.set_mode((WIDTH, HEIGHT))
#world object
class World(object):
def __init__(self, screen, bgcolor):
self.screen = screen
self.bgcolor = bgcolor
def render(self):
self.screen.fill(self.bgcolor)
#menu object
class Menu(object):
def __init__(self, screen, bgcolor):
self.screen = screen
self.bgcolor = bgcolor
def render(self):
self.screen.fill(self.bgcolor)
world = World(screen, (255,255,200))
#main game loop
running = True
font = pygame.font.Font("PressStart2P.ttf", FONTSIZE)
clock = pygame.time.Clock()
while running:
keys = []
#event processing
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
pygame.quit()
running = False
else:
keys.append(event.key)
if event.type == QUIT:
pygame.quit()
running = False
if not running: break
#determine the # of game ticks since last frame.
print clock.get_time()
clock.tick()
world.render()
#if FPS is on, render it
if SHOWFPS:
fps = clock.get_fps()
if math.isinf(fps):
fps = 10000.0
screen_rect = screen.get_rect()
fps_surf = font.render("%0.1f" % fps, False, (255,255,255))
fps_rect = fps_surf.get_rect()
fps_rect.topright = screen_rect.move(-5, 5).topright
screen.blit(fps_surf, fps_rect)
pygame.display.update()
|
Python
| 0.999973
|
@@ -182,33 +182,36 @@
World(object):%0A
-%09
+
def __init__(sel
@@ -223,34 +223,40 @@
reen, bgcolor):%0A
-%09%09
+
self.screen = sc
@@ -252,34 +252,40 @@
screen = screen%0A
-%09%09
+
self.bgcolor = b
@@ -283,36 +283,37 @@
color = bgcolor%0A
-%09%09%0A%09
+%0A
def render(self)
@@ -306,34 +306,40 @@
f render(self):%0A
-%09%09
+
self.screen.fill
@@ -353,18 +353,16 @@
gcolor)%0A
-%09%09
%0A#menu o
@@ -387,17 +387,20 @@
bject):%0A
-%09
+
def __in
@@ -428,18 +428,24 @@
color):%0A
-%09%09
+
self.scr
@@ -457,18 +457,24 @@
screen%0A
-%09%09
+
self.bgc
@@ -492,12 +492,13 @@
lor%0A
-%09%09%0A%09
+%0A
def
@@ -511,18 +511,24 @@
(self):%0A
-%09%09
+
self.scr
@@ -674,17 +674,16 @@
ONTSIZE)
-
%0Aclock =
@@ -719,17 +719,20 @@
unning:%0A
-%09
+
keys = %5B
@@ -733,17 +733,20 @@
ys = %5B%5D%0A
-%09
+
#event p
@@ -755,17 +755,20 @@
cessing%0A
-%09
+
for even
@@ -792,18 +792,24 @@
.get():%0A
-%09%09
+
if event
@@ -826,19 +826,28 @@
EYDOWN:%0A
-%09%09%09
+
if event
@@ -868,59 +868,104 @@
PE:%0A
-%09%09%09%09pygame.quit()%0A%09%09%09%09running = False%0A%09%09%09else:%0A%09%09%09%09
+ pygame.quit()%0A running = False%0A else:%0A
keys
@@ -983,18 +983,24 @@
nt.key)%0A
-%09%09
+
if event
@@ -1014,19 +1014,28 @@
= QUIT:%0A
-%09%09%09
+
pygame.q
@@ -1040,19 +1040,28 @@
.quit()%0A
-%09%09%09
+
running
@@ -1068,17 +1068,20 @@
= False%0A
-%09
+
if not r
@@ -1095,17 +1095,20 @@
break%0A%0A
-%09
+
#determi
@@ -1148,17 +1148,20 @@
frame.%0A
-%09
+
print cl
@@ -1175,17 +1175,20 @@
_time()%0A
-%09
+
clock.ti
@@ -1192,19 +1192,21 @@
.tick()%0A
-%09%0A%09
+%0A
world.re
@@ -1216,11 +1216,13 @@
r()%0A
-%09%0A%09
+%0A
#if
@@ -1242,17 +1242,20 @@
nder it%0A
-%09
+
if SHOWF
@@ -1258,18 +1258,24 @@
HOWFPS:%0A
-%09%09
+
fps = cl
@@ -1288,18 +1288,24 @@
t_fps()%0A
-%09%09
+
if math.
@@ -1320,11 +1320,20 @@
s):%0A
-%09%09%09
+
fps
@@ -1635,26 +1635,13 @@
ct)%0A
+%0A
- %0A%09
pyga
|
c7bec5ac8dadb332c889e3470f454a61be42ffad
|
Version 0.1.10
|
libgsync/__init__.py
|
libgsync/__init__.py
|
# Copyright (C) Craig Phillips. All rights reserved.
__version__ = '0.1.9'
|
Python
| 0.000001
|
@@ -71,7 +71,8 @@
0.1.
-9
+10
'%0A
|
545c0ac33ae2eba9951e285c58f50b2d4f6365a3
|
Use a dict rather than a list for flags
|
parser/bitflags.py
|
parser/bitflags.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
class BitFlags(object):
"""
v = BitFlags(5, ['race', 'sex', 'alive']) # v.race is True, v.sex is False, v.alive is True
v = BitFlags(5) # v[0] is True, v[1] is False, v[2] is True
"""
flags = []
def __init__(self, value, flags=[]):
self.bitmask = value
self.flags = flags
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, int(self))
def __getitem__(self, key):
assert isinstance(key, int) and key >= 0, "key must be positive integer"
bit = 1
bit <<= key
return bool(self.bitmask & bit)
def __setitem__(self, key, value):
assert isinstance(key, int) and key >= 0, "key must be positive integer"
bit = 1
bit <<= key
if value:
self.bitmask |= bit
else:
self.bitmask &= ~bit
def __getattr__(self, name):
if name in self.flags:
return self[self.flags.index(name)]
raise AttributeError
def __setattr__(self, name, value):
if name in self.flags:
self[self.flags.index(name)] = value
super(BitFlags, self).__setattr__(name, value)
def __int__(self):
return self.bitmask
# introspection support:
__members__ = property(lambda self: self.__dir__())
def __dir__(self):
result = self.__dict__.keys()
result.extend(self.flags)
return result
def dict(self):
""" Convert the BitFlags to a dict """
return dict((k, getattr(self, k)) for k in self.flags)
|
Python
| 0.000001
|
@@ -70,17 +70,16 @@
%0D%0A%09%22%22%22%0D%0A
-%09
%09v = Bit
@@ -91,32 +91,47 @@
(5,
-%5B'
+%7B0x1: %22
race
-', 'sex', '
+%22, 0x2: %22sex%22, 0x4: %22
alive
-'%5D
+%22%7D
) #
@@ -180,17 +180,16 @@
s True%0D%0A
-%09
%09v = Bit
@@ -255,24 +255,8 @@
%0A%09%0D%0A
-%09flags = %5B%5D%0D%0A%09%0D%0A
%09def
@@ -288,10 +288,10 @@
ags=
-%5B%5D
+%7B%7D
):%0D%0A
@@ -293,24 +293,83 @@
%7D):%0D%0A%09%09self.
+_values = dict(zip(flags.values(), flags.keys()))%0D%0A%09%09self._
bitmask = va
@@ -376,24 +376,25 @@
lue%0D%0A%09%09self.
+_
flags = flag
@@ -436,18 +436,20 @@
n '%3C%25s:
-%25s
+0x%25X
%3E' %25 (se
@@ -475,17 +475,21 @@
__,
-int(self)
+self._bitmask
)%0D%0A%09
@@ -569,32 +569,34 @@
0, %22key must be
+a
positive integer
@@ -602,34 +602,8 @@
r%22%0D%0A
-%09%09bit = 1%0D%0A%09%09bit %3C%3C= key%0D%0A
%09%09re
@@ -611,18 +611,14 @@
urn
-bool(
self.
+_
bitm
@@ -627,12 +627,18 @@
k &
-bit)
+key == key
%0D%0A%09%0D
@@ -732,16 +732,18 @@
must be
+a
positive
@@ -766,104 +766,71 @@
= 1
-%0D%0A%09%09bit
%3C%3C
-=
key
+-1
%0D%0A%09
-%09if value:%0D%0A%09%09%09self.bitmask %7C= bit%0D%0A%09%09else:%0D%0A%09%09%09self.bitmask &= ~bit%0D%0A%09%0D%0A%09def
+%0D%0A%09def __getattr__(self, name):%0D%0A%09%09values = object.
__ge
@@ -826,32 +826,37 @@
object.__getattr
+ibute
__(self, name):%0D
@@ -844,30 +844,34 @@
ute__(self,
-name):
+%22_values%22)
%0D%0A%09%09if name
@@ -869,33 +869,29 @@
%09if name in
-self.flag
+value
s:%0D%0A%09%09%09retur
@@ -901,55 +901,67 @@
elf%5B
-self.flags.index(
+values%5B
name
-)
+%5D
%5D%0D%0A%09%09r
-aise AttributeError
+eturn object.__getattribute__(self, name)
%0D%0A%09%0D
@@ -1009,29 +1009,60 @@
if name
-in self.flags
+!= %22_values%22 and name in self._values.keys()
:%0D%0A%09%09%09se
@@ -1073,25 +1073,21 @@
elf.
-flags.index(
+_values%5B
name
-)
+%5D
%5D =
@@ -1181,16 +1181,17 @@
rn self.
+_
bitmask%0D
@@ -1348,24 +1348,25 @@
extend(self.
+_
flags)%0D%0A%09%09re
@@ -1494,16 +1494,17 @@
in self.
+_
flags)%0D%0A
|
33546b978745270a723469c4f27a2da4780b772c
|
add global 'group' object
|
main.py
|
main.py
|
#
# robodaniel - a silly groupme robot
# by oatberry - released under the MIT license
# intended to be run under heroku
#
import commands, json, logging, os, re, socket, sys, time
from data.factoids import factoids
from groupy import Bot, config
def generate_triggers():
'regex-compile trigger rules into readily available bits'
triggers = []
with open('data/triggers.txt') as triggers_file:
for rule in triggers_file:
trigger = rule.split()
pattern = re.compile(trigger[0])
response = ' '.join(trigger[1:])
triggers.append((pattern, response))
return triggers
def match_trigger(triggers, message):
'check if a message begins with "!" or matches a trigger rule'
response = None
if message['text'][0] == '!':
# message contains a !command; interpret it
logging.info('interpreted command: "{}"'.format(message['text']))
response = interpret(message)
else:
# try each trigger rule
for rule in triggers:
if rule[0].match(message['text']):
# response is triggered
response = [rule[1]]
break
if response:
# we have a response to print!
logging.info('sending response: "{}"'.format(response))
bot.post(*response)
def interpret(message):
'decide what to do with a "!command" message'
# extract the message text, minus the beginning '!'
command = message['text'][1:]
# put a precautionary space before each '@'; GroupMe does weird stuff with mentions
command = re.sub('@', ' @', command)
# check if command/factoid exists, then run it
if command in list(factoids):
# print a factoid
return [factoids[command]]
elif command.split()[0] in dir(commands):
# run a function from `commands` with arguments
args = command.split()
return getattr(commands, args[0])(args[1:], # command and command arguments
message['name'], # nickname of sender
message['user_id'], # user id of sender
message['attachments'], # attachments of message
bot) # bot object
else:
logging.warning('invalid command: {}'.format(command))
return False
def listen():
"listen for new messages in the bot's groupme channel"
# heroku provides the port variable for us
port = int(os.getenv('PORT')) or 5000
# generate rules for matching text in messages ahead of time for efficiency
logging.info('generating trigger rules...')
triggers = generate_triggers()
# open the listening socket
logging.info('opening listener socket on port {}...'.format(port))
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((socket.gethostname(), port))
s.listen(10)
# attempt to extract chat message text from received data
while True:
(connection, address) = s.accept()
try:
time.sleep(0.3)
data = connection.recv(4096)
message = json.loads(data.decode('utf-8').split('\n')[-1])
if message['sender_type'] == 'user':
logging.info('message received: {}'.format(message))
match_trigger(triggers, message) # try to match all messages against triggers
except Exception:
pass
# set up logging
logging.basicConfig(level=logging.INFO, format="--> %(levelname)s: %(message)s")
logging.getLogger('requests').setLevel(logging.WARNING) # quiet down, requests!
# set api key from env variable instead of ~/.groupy.key
config.API_KEY = os.getenv('API_KEY')
if not config.API_KEY:
logging.error('API_KEY environment variable not set. aborting...')
sys.exit()
# set up bot
bot = Bot.list().filter(name='RoboDaniel').first
if __name__ == '__main__':
# start listening and interpreting
logging.info('launching robodaniel...')
listen()
|
Python
| 0.999245
|
@@ -238,16 +238,23 @@
ort Bot,
+ Group,
config%0A
@@ -4054,16 +4054,94 @@
).first%0A
+# get group that bot is in%0Agroup = Group.list().filter(id=bot.group_id).first%0A
%0A%0Aif __n
|
df7e1caec0c3166196a5da08c292740ca0bceb0d
|
Set correct assets paths
|
vulyk_declaration/models/tasks.py
|
vulyk_declaration/models/tasks.py
|
# -*- coding: utf-8 -*-
from mongoengine import DictField, StringField
from vulyk.models.tasks import AbstractTask, AbstractAnswer
from vulyk.models.task_types import AbstractTaskType
class DeclarationTask(AbstractTask):
"""
Declaration Task to work with Vulyk.
"""
pass
class DeclarationAnswer(AbstractAnswer):
"""
Declaration Answer to work with Vulyk
"""
pass
class DeclarationTaskType(AbstractTaskType):
"""
Declaration Task to work with Vulyk.
"""
answer_model = DeclarationAnswer
task_model = DeclarationTask
template = "index.html"
helptext_template = "help.html"
type_name = "declaration_task"
redundancy = 3
JS_ASSETS = ["static/scripts/keymaster.js",
"static/scripts/handlebars.js",
"static/scripts/bootstrap-select.js",
"static/scripts/base.js"]
CSS_ASSETS = ["static/styles/bootstrap-select.css",
"static/styles/base.css"]
|
Python
| 0.000002
|
@@ -726,121 +726,453 @@
pts/
-keymaster.js%22,%0A %22static/scripts/handlebars.js%22,%0A %22static/scripts/bootstrap-select
+main.js%22,%0A %22static/scripts/messages_uk.min.js%22,%0A %22static/scripts/html5shiv.js%22,%0A %22static/scripts/jquery-cloneya.min.js%22,%0A %22static/scripts/jquery-ui.min.js%22,%0A %22static/scripts/jquery.dateSelectBoxes.js%22,%0A %22static/scripts/jquery.min.js%22,%0A %22static/scripts/jquery.placeholder.min.js%22,%0A %22static/scripts/jquery.serializejson
.js%22
@@ -1206,20 +1206,35 @@
scripts/
-base
+jquery.validate.min
.js%22%5D%0A%0A
@@ -1269,24 +1269,18 @@
les/
-bootstrap-select
+core-style
.css
@@ -1319,11 +1319,12 @@
les/
-bas
+styl
e.cs
|
d7207571aecb34fa6ea4c685b9a96be9c3427f4c
|
fixed the rating to pg and excited
|
main.py
|
main.py
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import webapp2
import jinja2
import urllib2
import urllib
import json
from models import Awards
from models import Routine
from google.appengine.api import users
env = jinja2.Environment(loader=jinja2.FileSystemLoader('templates'))
class MainHandler(webapp2.RequestHandler):
def get(self):
template=env.get_template('main.html')
user = users.get_current_user()
usernickname = user.nickname()
redirect = users.create_logout_url('/')
self.response.write(template.render({'user': True, 'usernickname':usernickname, 'redirect': redirect}))
class AboutHandler(webapp2.RequestHandler):
def get(self):
template=env.get_template('about.html')
redirect = users.create_logout_url('/')
self.response.write(template.render({'redirect': redirect}))
#make sure it's working
class RoutineHandler(webapp2.RequestHandler):
def get(self):
template=env.get_template('routine.html')
user=users.get_current_user()
usernickname = user.nickname()
redirect = users.create_logout_url('/')
quantities = self.request.get_all('quantity')
descriptions = self.request.get_all('description')
for (quantity, description) in zip(quantities, descriptions):
Routine(
usernickname=usernickname,
quantity=quantity,
description=description
).put()
self.response.write(template.render({'redirect': redirect}))
class AwardsHandler(webapp2.RequestHandler):
def get(self):
template=env.get_template('awards.html')
user=users.get_current_user()
usernickname = user.nickname()
redirect = users.create_logout_url('/')
routine_number = (Routine.query(Routine.usernickname == usernickname)).count()
award = Awards(
routine_number=routine_number,
usernickname=usernickname
)
if (int(award.routine_number)%5==0 and int(award.routine_number)>=5):
giphy_data_source = urllib2.urlopen("http://api.giphy.com/v1/gifs/random?api_key=dc6zaTOxFJmzC&limit=10&tag=congratulations&rating=g")
giphy_json_content = giphy_data_source.read()
parsed_giphy_dictionary = json.loads(giphy_json_content)
gif_url = parsed_giphy_dictionary['data']["image_original_url"]
key=award.put()
self.response.write(template.render({'routine_number':award.routine_number,'gif':gif_url,'redirect': redirect}))
else:
key=award.put()
self.response.write(template.render({'routine_number':award.routine_number,'redirect': redirect}))
class WorkoutHandler(webapp2.RequestHandler):
def get(self):
template=env.get_template('workouts.html')
redirect = users.create_logout_url('/')
self.response.write(template.render({'redirect': redirect}))
class WorkoutsHistoryHandler(webapp2.RequestHandler):
def get(self):
template=env.get_template('workout_history.html')
user = users.get_current_user()
usernickname = user.nickname()
redirect = users.create_logout_url('/')
# timestamp=Routine.timestamp
history=Routine.query(Routine.usernickname == usernickname).fetch()
# self.response.write(history)
# for routine in history:
# str(routine.timestamp) + ":" + str(routine.quantity) + "," + str(routine.description)
self.response.write(template.render({'history':history,'redirect': redirect}))
class HowToHandler(webapp2.RequestHandler):
def get(self):
template=env.get_template('workouthowto.html')
redirect = users.create_logout_url('/')
self.response.write(template.render({'redirect': redirect}))
class UpperHandler(webapp2.RequestHandler):
def get(self):
template=env.get_template('upperbody.html')
redirect = users.create_logout_url('/')
self.response.write(template.render({'redirect': redirect}))
class LowerHandler(webapp2.RequestHandler):
def get(self):
template=env.get_template('lowerbody.html')
redirect = users.create_logout_url('/')
self.response.write(template.render({'redirect': redirect}))
class AbsHandler(webapp2.RequestHandler):
def get(self):
template=env.get_template('abs.html')
redirect = users.create_logout_url('/')
self.response.write(template.render({'redirect': redirect}))
class InstructionsHandler(webapp2.RequestHandler):
def get(self):
template=env.get_template('instructions.html')
redirect = users.create_logout_url('/')
self.response.write(template.render({'redirect': redirect}))
class SuppliesHandler(webapp2.RequestHandler):
def get(self):
template=env.get_template('supplies.html')
redirect = users.create_logout_url('/')
self.response.write(template.render({'redirect': redirect}))
app = webapp2.WSGIApplication([
('/', MainHandler),
('/about', AboutHandler),
('/workout_history', WorkoutsHistoryHandler),
('/routine', RoutineHandler),
('/awards', AwardsHandler),
('/workouts', WorkoutHandler),
('/workouthowto', HowToHandler),
('/upperbody', UpperHandler),
('/lowerbody', LowerHandler),
('/abs', AbsHandler),
('/instructions', InstructionsHandler),
('/supplies', SuppliesHandler)
], debug=True)
|
Python
| 0.99933
|
@@ -2568,31 +2568,23 @@
tag=
-congratulations
+excited
&rating=
g%22)%0A
@@ -2579,16 +2579,17 @@
&rating=
+p
g%22)%0A
|
ebd3b45138b41663a0e534ecb53a0d3163b433a3
|
Update Shutdown
|
main.py
|
main.py
|
from flask import Flask, render_template
app = Flask(__name__)
app.DEBUG = True
@app.route("/")
def hello():
return render_template("index.html")
if __name__=="__main__":
app.run(host = "166.111.5.226")
|
Python
| 0.000001
|
@@ -33,16 +33,26 @@
template
+, request%0A
%0A%0A%0A%0Aapp
@@ -87,16 +87,198 @@
= True%0A%0A
+def shutdown_server():%0A func = request.environ.get(%22werkzeug.server.shutdown%22)%0A if func is None:%0A raise RuntimeError('Not running with the Werkzeug Server')%0A func()%0A%0A
@app.rou
@@ -309,16 +309,36 @@
return
+%22hello%22%0A #return
render_t
@@ -358,16 +358,136 @@
x.html%22)
+%0A%0A@app.route('/shutdown', methods=%5B'POST'%5D)%0Adef shutdown():%0A shutdown_server()%0A return 'Server shutting down...'%0A%0A
%0Aif __na
@@ -538,11 +538,22 @@
1.5.226%22
+, port=8888
)%0A%0A
|
d8d9dd32bf7722a3811565c8141f54b745deaf0a
|
extend timeout in autotest
|
tests/libfixmath_unittests/tests/01-run.py
|
tests/libfixmath_unittests/tests/01-run.py
|
#!/usr/bin/env python3
# Copyright (C) 2017 Inria
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
from testrunner import run
# Float and print operations are slow on boards
# Got 80 iotlab-m3 and 250 on samr21-xpro
TIMEOUT = 300
def testfunc(child):
child.expect('SUCCESS', timeout=TIMEOUT)
if __name__ == "__main__":
sys.exit(run(testfunc))
|
Python
| 0.000001
|
@@ -320,20 +320,17 @@
otlab-m3
- and
+,
250 on
@@ -340,16 +340,36 @@
r21-xpro
+ and 640 on microbit
%0ATIMEOUT
@@ -371,17 +371,18 @@
MEOUT =
-3
+10
00%0A%0A%0Adef
|
50d08f3f5667e9aa2c29cd10a3d470f9b49682b1
|
fix LBWF_APPS to WF_APPS
|
lbworkflow/views/processinstance.py
|
lbworkflow/views/processinstance.py
|
# -*- coding: UTF-8 -*-
from __future__ import unicode_literals
import importlib
from django.shortcuts import get_object_or_404, render
from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.core.exceptions import PermissionDenied
from lbworkflow import settings
from lbworkflow.core.helper import as_func
from lbworkflow.models import Process
from lbworkflow.models import ProcessInstance
from .helper import user_wf_info_as_dict
can_edit_wf = as_func(settings.CAN_EDIT_WF_FUNC)
can_submit_wf = as_func(settings.CAN_SUBMIT_WF_FUNC)
can_view_wf = as_func(settings.CAN_VIEW_WF_FUNC)
def import_wf_views(wf_code):
wf_module = settings.LBWF_APPS.get(wf_code)
return importlib.import_module('%s.views' % wf_module)
def new(request, wf_code):
views = import_wf_views(wf_code)
process = Process.objects.get(code=wf_code)
if not can_submit_wf(process, request.user):
raise PermissionDenied
return views.new(request, wf_code=wf_code)
def show_list(request, wf_code):
views = import_wf_views(wf_code)
return views.show_list(request, wf_code=wf_code)
def edit(request, pk):
instance = get_object_or_404(ProcessInstance, pk=pk)
wf_code = instance.process.code
views = import_wf_views(wf_code)
if not can_edit_wf(instance, request.user):
messages.info(request, 'No permission: %s' % instance)
return redirect(reverse('wf_my_wf'))
return views.edit(request, instance.content_object)
def _default_detail(request, instance, ext_ctx={}, template_name=None):
if not template_name:
template_name = '%s/detail.html' % instance.process.code
ctx = {}
ctx.update(ext_ctx)
return render(request, template_name, ctx)
def detail(request, pk, template_name=None, ext_ctx={}):
instance = ProcessInstance.objects.get(pk=pk)
views = import_wf_views(instance.process.code)
is_print = ext_ctx.get('is_print')
__ext_param_process = getattr(views, '__ext_param_process', None)
ctx = {}
ctx.update(ext_ctx)
if not can_view_wf(instance, request.user, ext_param_process=__ext_param_process):
messages.info(request, 'No permission to view this process')
return redirect(reverse('wf_my_wf'))
user_wf_info = user_wf_info_as_dict(instance.content_object, request.user)
ctx.update(user_wf_info)
if not is_print and instance.cur_activity.can_edit \
and instance.cur_activity.audit_view_type == 'edit' \
and ext_ctx['workitem'] and instance.cur_activity.resolution == 'started':
return redirect(reverse('wf_edit', args=[instance.pk]))
detail_func = getattr(views, 'detail', _default_detail)
ret = detail_func(request, instance, ext_ctx=ctx)
if isinstance(ret, dict):
ret.update(ctx)
return _default_detail(request, instance, ret, template_name)
return ret
|
Python
| 0.999989
|
@@ -716,10 +716,8 @@
ngs.
-LB
WF_A
|
d46fefd74a5b1318058740868bde0ddc96635645
|
Test that formatting doesn't happen without format strings/types.
|
tests/tabular_output/test_preprocessors.py
|
tests/tabular_output/test_preprocessors.py
|
# -*- coding: utf-8 -*-
"""Test CLI Helpers' tabular output preprocessors."""
from __future__ import unicode_literals
from decimal import Decimal
from cli_helpers.tabular_output.preprocessors import (align_decimals,
bytes_to_string,
convert_to_string,
quote_whitespaces,
override_missing_value,
format_numbers)
def test_convert_to_string():
"""Test the convert_to_string() function."""
data = [[1, 'John'], [2, 'Jill']]
headers = [0, 'name']
expected = ([['1', 'John'], ['2', 'Jill']], ['0', 'name'])
assert expected == convert_to_string(data, headers)
def test_override_missing_values():
"""Test the override_missing_values() function."""
data = [[1, None], [2, 'Jill']]
headers = [0, 'name']
expected = ([[1, '<EMPTY>'], [2, 'Jill']], [0, 'name'])
assert expected == override_missing_value(data, headers,
missing_value='<EMPTY>')
def test_bytes_to_string():
"""Test the bytes_to_string() function."""
data = [[1, 'John'], [2, b'Jill']]
headers = [0, 'name']
expected = ([[1, 'John'], [2, 'Jill']], [0, 'name'])
assert expected == bytes_to_string(data, headers)
def test_align_decimals():
"""Test the align_decimals() function."""
data = [[Decimal('200'), Decimal('1')], [
Decimal('1.00002'), Decimal('1.0')]]
headers = ['num1', 'num2']
expected = ([['200', '1'], [' 1.00002', '1.0']], ['num1', 'num2'])
assert expected == align_decimals(data, headers)
def test_align_decimals_empty_result():
"""Test align_decimals() with no results."""
data = []
headers = ['num1', 'num2']
expected = ([], ['num1', 'num2'])
assert expected == align_decimals(data, headers)
def test_quote_whitespaces():
"""Test the quote_whitespaces() function."""
data = [[" before", "after "], [" both ", "none"]]
headers = ['h1', 'h2']
expected = ([["' before'", "'after '"], ["' both '", "'none'"]],
['h1', 'h2'])
assert expected == quote_whitespaces(data, headers)
def test_quote_whitespaces_empty_result():
"""Test the quote_whitespaces() function with no results."""
data = []
headers = ['h1', 'h2']
expected = ([], ['h1', 'h2'])
assert expected == quote_whitespaces(data, headers)
def test_quote_whitespaces_non_spaces():
"""Test the quote_whitespaces() function with non-spaces."""
data = [["\tbefore", "after \r"], ["\n both ", "none"]]
headers = ['h1', 'h2']
expected = ([["'\tbefore'", "'after \r'"], ["'\n both '", "'none'"]],
['h1', 'h2'])
assert expected == quote_whitespaces(data, headers)
def test_format_integer():
"""Test formatting for an INTEGER datatype."""
data = [[1], [1000], [1000000]]
headers = ['h1']
result = format_numbers(data,
headers,
column_types=(int,),
decimal_format=',d',
float_format=',')
expected = [['1'], ['1,000'], ['1,000,000']]
assert expected == result[0]
def test_format_decimal():
"""Test formatting for a DECIMAL(12, 4) datatype."""
data = [[Decimal('1.0000')], [Decimal('1000.0000')], [Decimal('1000000.0000')]]
headers = ['h1']
result = format_numbers(data,
headers,
column_types=(float,),
decimal_format=',d',
float_format=',')
expected = [['1.0000'], ['1,000.0000'], ['1,000,000.0000']]
assert expected == result[0]
def test_format_float():
"""Test formatting for a REAL datatype."""
data = [[1.0], [1000.0], [1000000.0]]
headers = ['h1']
result = format_numbers(data,
headers,
column_types=(float,),
decimal_format=',d',
float_format=',')
expected = [['1.0'], ['1,000.0'], ['1,000,000.0']]
assert expected == result[0]
|
Python
| 0
|
@@ -4284,28 +4284,616 @@
ssert expected == result%5B0%5D%0A
+%0A%0Adef test_format_numbers_no_format_strings():%0A %22%22%22Test that numbers aren't formatted without format strings.%22%22%22%0A data = ((1), (1000), (1000000))%0A headers = ('h1',)%0A result = format_numbers(data, headers, column_types=(int,))%0A assert data, headers == result%0A%0A%0Adef test_format_numbers_no_column_types():%0A %22%22%22Test that numbers aren't formatted without column types.%22%22%22%0A data = ((1), (1000), (1000000))%0A headers = ('h1',)%0A result = format_numbers(data, headers, decimal_format=',d',%0A float_format=',')%0A assert data, headers == result%0A
|
cec6a0003d9167426bef5eb2fdfd1582b1e8f8a9
|
add accuracy figure
|
main.py
|
main.py
|
#!/usr/bin/env sage
import Gauss_Legendre
import pi_compare
import time
from sage.all import *
class Analyser(object):
def __init__(self, method_list):
self.end = 1000
self.start = 100
self.step = 100
self.time_set = list()
self.figure = point((0,0))
self.figure2 = None
self.methods = method_list
def run(self):
for m in self.methods:
for d in range(self.start, self.end, self.step):
start_time = time.time()
m.function(d)
end_time = time.time() - start_time
self.time_set.append((d, end_time))
print d, end_time
self.figure += list_plot(self.time_set, color = m.color, legend_label = m.name)
save(self.figure.plot(), filename="time.svg")
class Pi_Func(object):
def __init__(self, name, color, function):
self.name = name
self.color = color
self.function = function
if __name__ == "__main__":
method_list = [Pi_Func("Gauss_Legendre", "red", Gauss_Legendre.pi)]
analyse = Analyser(method_list)
analyse.run()
|
Python
| 0.000001
|
@@ -229,16 +229,46 @@
list()%0A
+%09%09self.accuracy_list = list()%0A
%09%09self.f
@@ -309,12 +309,20 @@
2 =
-None
+point((0,0))
%0A%09%09s
@@ -475,16 +475,22 @@
e()%0A%09%09%09%09
+res =
m.functi
@@ -575,16 +575,98 @@
_time))%0A
+%09%09%09%09accuracy = pi_compare.compare(res)%5B0%5D%0A%09%09%09%09self.accuracy_list.append(accuracy)%0A
%09%09%09%09prin
@@ -674,24 +674,34 @@
d, end_time
+, accuracy
%0A%09%09%09self.fig
@@ -778,47 +778,190 @@
)%0A%09%09
-save(self.figure.plot(), filename=%22time
+%09self.figure2 += list_plot(self.accuracy_list, color = m.color, legend_label = m.name)%0A%09%09save(self.figure.plot(), filename=%22time.svg%22)%0A%09%09save(self.figure2.plot(), filename=%22accurancy
.svg
|
e7ad4520f295de6fad559051203689f28831db79
|
fix bugs
|
main.py
|
main.py
|
# !/usr/bin/env python3
# -*- coding: utf-8 -*-
# -----------------------------
# Author: Bruce Zhang
# Email: zy183525594@163.com
# Version: 0.1
# -----------------------------
# License: The MIT License (MIT)
# Copyright (c) 2015 Bruce Zhang
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import irc.bot
import irc.strings
from irc.client import ip_numstr_to_quad, ip_quad_to_numstr
import json
from time import strftime
from timeloop import TimeLoop
import re
from bs4 import BeautifulSoup
import requests
import chardet
from ImageHandler import ImageHandler
class MyBot(irc.bot.SingleServerIRCBot):
admins = ["bruceutut", "bruceutut-m"]
def __init__(self, channel, nickname, server, port=6667):
irc.bot.SingleServerIRCBot.__init__(self, [(server, port)],
nickname, nickname)
self.channel = channel
self.timer = TimeLoop(1, self.time_func)
# TODO:10 More channels support
def time_func(self):
hour = int(strftime("%H"))
minute = int(strftime("%M"))
sec = int(strftime("%S"))
if minute == 0 and sec == 0:
self.connection.action(self.channel, "It's %d o'clock now!" % hour)
def on_nicknameinuse(self, c, e):
c.nick(c.get_nickname() + "_")
def on_welcome(self, c, e):
c.join(self.channel)
self.timer.start()
def on_privmsg(self, c, e):
pass
def on_pubmsg(self, c, e):
self.url_detect(e.arguments[0])
# Following condition only matches when $ at the beginning
a = e.arguments[0].split('$')
if len(a) > 1 and a[0] == "":
self.do_command(e, a[1].strip())
def url_detect(self, msg):
words = msg.split()
for word in words:
if self.is_url(word):
if self.is_image(word):
image = ImageHandler(word)
imtype = image.get_format()
imsize = image.get_size("%W x %H")
self.connection.privmsg(self.channel, "[ Image ] 类型: %s 尺寸: %s" % (imtype, imsize))
else:
title = self.get_title(word)
if title:
self.connection.privmsg(self.channel, "[ %s ] %s" % (title, word))
def is_url(self, url):
return re.match(r'^https?:\/\/', url)
def is_image(self, url):
return re.match(r'\.jpg$|\.png$|\.ico$|\.gif$|\.tiff$|\.jpeg$|\.bmp$|\.svg$|\.tga$', url, re.IGNORECASE)
def get_title(self, url):
head = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:43.0) Gecko/20100101 Firefox/43.0"
}
r = requests.get(url, headers = head)
r.encoding = chardet.detect(r.text.encode())["encoding"]
soup = BeautifulSoup(r.text, "html5lib")
return soup.title.string
def on_dccmsg(self, c, e):
# non-chat DCC messages are raw bytes; decode as text
text = e.arguments[0].decode('utf-8')
c.privmsg("You said: " + text)
def on_dccchat(self, c, e):
if len(e.arguments) != 2:
return
args = e.arguments[1].split()
if len(args) == 4:
try:
address = ip_numstr_to_quad(args[2])
port = int(args[3])
except ValueError:
return
self.dcc_connect(address, port)
def do_command(self, e, cmd):
nick = e.source.nick
c = self.connection
cmd_args = cmd.split()
cmd = cmd_args[0]
args = " ".join(cmd_args[1:])
self.execute_command(cmd, args, nick, self.channel)
def execute_command(self, cmd, args, nick, channel):
# DONE:0 Finish function command_string
# TODO:0 More commands and command interface
simplecommands = {
"say": "%s wanted me to say: %s" % (nick, args)
}
c = self.connection
if cmd in simplecommands.keys():
c.privmsg(self.channel, simplecommands[cmd])
elif cmd == "quit":
if nick in self.admins:
c.quit("admin %s asked me to quit." % nick)
sys.exit(0)
else:
c.privmsg(self.channel,
"%s: You're not one of the admins." % nick)
else:
return False
def main():
# DONE:10 Try using config file
fp = None
try:
fp = open('config.json', "r")
config = json.load(fp)
channel = config["channel"]
nickname = config["nick"]
server = config["network"]
port = config["port"]
except IOError:
print("I/O Error.")
import sys
sys.exit(1)
finally:
if fp:
fp.close()
bot = MyBot(channel, nickname, server, port)
bot.start()
if __name__ == "__main__":
main()
|
Python
| 0.000001
|
@@ -3048,32 +3048,57 @@
nection.privmsg(
+%0A
self.channel, %22%5B
@@ -3472,67 +3472,59 @@
.jpg
-$
%7C%5C.png
-$
%7C%5C.ico
-$
%7C%5C.gif
-$
%7C%5C.tiff
-$
%7C%5C.jpeg
-$
%7C%5C.bmp
-$
%7C%5C.svg
-$
%7C%5C.tga
-$
+%EF%BC%9A
', u
@@ -3739,19 +3739,17 @@
headers
- =
+=
head)%0A
|
57c10f38c0103c7ac6682272829f8eef55b88ea7
|
Set thread as daemon to kill it when program exits
|
main.py
|
main.py
|
# 3rd party libs
from slackclient import SlackClient
from flask import request
from flask import Flask
import requests
# Builtin libs
import threading
import json
import time
import os
# internals
from logs import logs as log
import plugins as plugs
import contentextract
import personality
import intent
logs = log()
token = "Your token here"
app = Flask(__name__)
def slack():
'''Slack rtm reader started in seprate thread'''
logs.write("In slack function in new thread", 'working')
sc = SlackClient(token)
if sc.rtm_connect():
logs.write("Connected to rtm socket", 'success')
while True:
time.sleep(0.1)
# Get message from rtm socket
message = sc.rtm_read()
# If the message isn't empty
if message != []:
# If the message is text as opposed to a notification. Eventually
# plan to have other kinds of messages in a backend communications
# channel.
if message[0].keys()[0] == 'text':
command = message[0].values()[0]
logs.write(command, 'working')
# The commands are json or plain text. If it isn't a json
# backend command, interpret it as a "normal" command
try:
command = json.loads(command)
except ValueError:
command = [{'type': 'command'}, {'devices': 'all'}, {
'action': "{0}".format(command)}]
# Json slack commands or management can eventually be formatted like so: [{"type":"management/command",{"devices":"all/mobile/desktop/network/device name"},{"action":"message content"}]
# Not sure if I want to do that in the backend or command
# channel or what really, but I'm definitely working with it.
commandtype = command[0]
devices = command[1]
action = command[2]
# Replace thisdevicename with whatever you want to name yours
# in the W.I.L.L slack network (obviously)
if devices.values()[0] == 'all' or devices.values()[0] == "thisdevicename":
logs.write("Checking local W.I.L.L server", 'trying')
# Hit W.I.L.L with the command. This is also where you
# could add exceptions or easter eggs
answer = requests.get(
'http://127.0.0.1:5000/?context=command&command={0}'.format(action.values()[0])).text
print sc.api_call("chat.postMessage", channel="#w_i_l_l", text="{0}".format(answer), username='W.I.L.L')
else:
logs.write("Connection Failed, invalid token?", 'error')
@app.route("/")
def main():
'''Take command from 127.0.0.1:5000 and run it through various modules'''
try:
# Get command
command = request.args.get("command", '')
logs.write("Command is {0}".format(command), 'working')
logs.write("Analyzing content in command", 'trying')
# Run command through contentextract.py
contentextract.main(command)
logs.write("Analyzed command content", 'success')
logs.write("Trying to load plugin modules", 'trying')
# Load plugins using plugins.py
plugins = plugs.load()
# If the plugins encounter an error
if plugins is False:
logs.write("Could not load plugins", 'error')
return "error"
# If plugins.py says that there are no plugins found. All functions are
# a plugin so no point in continuing
elif plugins == []:
logs.write("No plugins found", 'error')
return 'error'
logs.write("Successfully loaded plugin modules", 'success')
logs.write("Using the intent module to parse the command", 'trying')
# Use intent.py to try to extract intent from command
parsed = intent.parse(command, plugins)
logs.write("Parsed the command", 'success')
# If the intent parser says to execute the following plugin. Leaves
# room if I ever want to expand the capabilities of the intent module
if parsed.keys()[0] == "execute":
logs.write("Executing plugin {0}".format(
parsed.values()[0].keys()[0]), 'trying')
response = plugs.execute(parsed.values()[0], command)
logs.write("Found answer {0}, returning it".format(
response), 'success')
return response
elif parsed.keys()[0]=="error":
logs.write("Parse function returned the error {0}".format(parsed.values()[0]),'working')
if parsed.values()[0]=="notfound":
#This would have unhandled exceptions if the search plugin was gone, but I can't imagine why it would be
logs.write("The error means that the command was not recognized",'working')
logs.write("Using the search plugin on the command phrase", 'working')
logs.write("Trying to find search plugin", 'trying')
for plugin in plugins:
if plugin.keys()[0]=="search":
searchplug=plugin
break
logs.write("Found search plugin", 'success')
response=plugs.execute(searchplug,command)
logs.write("Found answer {0}, returning it".format(response), 'success')
return response
else:
return "Unhandled error {0}. If you get this error message something is broken in the intent module. Please raise an issue on https://github.com/ironman5366/W.I.L.L".format(str(parsed.values()[0]))
except Exception as e:
logs.write(e, 'error')
return str(e)
if __name__ == "__main__":
'''Open logs, check log settings, and start the flask server and slack thread'''
logs.openlogs()
logs.write('''
\ / | | |
\ / | | |
\ / | | |
\ /\ / | | |
\ / \ / | | |
\/ \/ | ------------ ------------
''', 'success')
if logs.debug():
debugval = True
else:
debugval = False
logs.write("Debug value is {0}".format(debugval), 'working')
logs.write("Connecting to rtm socket", 'trying')
t = threading.Thread(target=slack)
t.start()
logs.write("Starting flask server on localhost", 'trying')
print app.run(debug=debugval, use_reloader=False)
|
Python
| 0
|
@@ -6443,16 +6443,68 @@
=slack)%0A
+ t.daemon=True #Kills the thread on program exit%0A
t.st
|
1e6958314bb2f51927b196be0a97dccbf7933099
|
add remove term view
|
src/apps/entrez/views.py
|
src/apps/entrez/views.py
|
# -*- coding: utf-8 -*-
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from django.views.decorators.csrf import csrf_exempt
from entrez.models import EntrezEntry, EntrezTerm
from entrez.forms import AddTermForm
from entrez.utils import get_current_date
def get_user_all_terms(request):
return EntrezTerm.objects.filter(owner=request.user).select_related()
def get_user_all_entries(request):
return EntrezEntry.objects.filter(owner=request.user).select_related()
@login_required()
def index(request):
tpl = 'entrez/entrez_index.html'
ctx = {}
ctx["objects"] = get_user_all_entries(request)
ctx["terms"] = get_user_all_terms(request)
ctx["form"] = AddTermForm()
return render_to_response(tpl, ctx, context_instance=RequestContext(request))
@login_required()
def term_list(request, slug):
tp = 'entrez/entrez_term_list.html'
# todo: permission to check other user's term
term = EntrezTerm.objects.get(slug=slug)
objects = EntrezEntry.objects.filter(term=term).select_related()
terms = EntrezTerm.objects.filter(owner=request.user).select_related()
form = AddTermForm()
ct = {
"objects": objects,
"terms": terms,
"form": form,
"current_term": term,
}
return render_to_response(tp, ct, context_instance=RequestContext(request))
@csrf_exempt
def add_term(request):
form_class = AddTermForm
if request.method == 'POST':
form = form_class(request.POST)
if form.is_valid():
term = EntrezTerm.objects.create(
name=form.cleaned_data["name"],
slug=form.cleaned_data["slug"],
db=form.cleaned_data["db"],
period=form.cleaned_data["period"],
owner=request.user,
term=form.cleaned_data["term"],
creation_date=get_current_date(),
lastedit_date=get_current_date(),
)
term.save()
return HttpResponseRedirect(reverse('entrez-index', ))
@csrf_exempt
def mark_as_read(request):
if request.method == "POST":
entry = get_object_or_404(EntrezEntry, pk=request.POST.get('feed_item_id'))
entry.read = True
entry.save()
return HttpResponse()
@csrf_exempt
def mark_as_unread(request):
if request.method == "POST":
entry = get_object_or_404(EntrezEntry, pk=request.POST.get('feed_item_id'))
entry.read = False
entry.save()
return HttpResponse()
|
Python
| 0
|
@@ -2236,24 +2236,290 @@
ndex', ))%0A%0A%0A
+@csrf_exempt%0Adef remove_term(request):%0A if request.method == 'POST':%0A if form.is_valid():%0A term = get_object_or_404(EntrezTerm, pk=request.POST.get('term_id'))%0A term.status = False%0A term.save()%0A%0A return HttpResponse()%0A%0A%0A
@csrf_exempt
|
3973ae5dbb48d6200c6a12da0018365c67babce0
|
Fix buggy argument parsing.
|
analytics/management/commands/update_analytics_counts.py
|
analytics/management/commands/update_analytics_counts.py
|
from argparse import ArgumentParser
from datetime import timedelta
from django.core.management.base import BaseCommand
from django.utils import timezone
from django.utils.dateparse import parse_datetime
from analytics.models import RealmCount, UserCount
from analytics.lib.counts import COUNT_STATS, CountStat, process_count_stat
from zerver.lib.timestamp import datetime_to_string, is_timezone_aware
from zerver.models import UserProfile, Message
from typing import Any
class Command(BaseCommand):
help = """Fills Analytics tables.
Run as a cron job that runs every hour."""
def add_arguments(self, parser):
# type: (ArgumentParser) -> None
parser.add_argument('--range-start', '-s',
type=str,
help="Time to backfill from.")
parser.add_argument('--range-end', '-e',
type=str,
help='Time to backfill to.',
default=datetime_to_string(timezone.now()))
parser.add_argument('--utc',
type=bool,
help="Interpret --range-start and --range-end as times in UTC.",
default=False)
parser.add_argument('--stat', '-q',
type=str,
help="CountStat to process. If omitted, all stats are processed")
def handle(self, *args, **options):
# type: (*Any, **Any) -> None
range_start = parse_datetime(options['range_start'])
if 'range_end' in options:
range_end = parse_datetime(options['range_end'])
else:
range_end = range_start - timedelta(seconds = 3600)
# throw error if start time is greater than end time
if range_start > range_end:
raise ValueError("--range-start cannot be greater than --range-end.")
if options['utc'] is True:
range_start = range_start.replace(tzinfo=timezone.utc)
range_end = range_end.replace(tzinfo=timezone.utc)
if not (is_timezone_aware(range_start) and is_timezone_aware(range_end)):
raise ValueError("--range-start and --range-end must be timezone aware. Maybe you meant to use the --utc option?")
if 'stat' in options:
process_count_stat(COUNT_STATS[options['stat']], range_start, range_end)
else:
for stat in COUNT_STATS.values():
process_count_stat(stat, range_start, range_end)
|
Python
| 0
|
@@ -949,16 +949,33 @@
kfill to
+, defaulst to now
.',%0A
@@ -1523,21 +1523,19 @@
range_
-start
+end
= parse
@@ -1559,21 +1559,19 @@
%5B'range_
-start
+end
'%5D)%0A
@@ -1581,30 +1581,42 @@
if
-'range_end' in options
+options%5B'range_start'%5D is not None
:%0A
@@ -1623,35 +1623,37 @@
range_
-end
+start
= parse_datetim
@@ -1661,35 +1661,37 @@
(options%5B'range_
-end
+start
'%5D)%0A else
@@ -1702,35 +1702,37 @@
range_
-end
+start
= range_start -
@@ -1724,21 +1724,19 @@
= range_
-start
+end
- timed
@@ -1966,16 +1966,8 @@
tc'%5D
- is True
:%0A
@@ -2320,25 +2320,35 @@
if
+options%5B
'stat'
+%5D
i
-n options
+s not None
:%0A
|
a2ae1aaab669c7cb54bd6cae43fc77e7bea57373
|
update build system
|
make.py
|
make.py
|
# -*- coding: utf-8 -*-
import re
import os
class Config:
src = 'src/IR101.md'
dest = 'IR101.md'
pattern = '{{import\((.+)\)}}'
def import_resource(match):
if not match:
return ''
path = match.groups()[0]
if os.path.isfile(path):
return open(path).read()
else:
return ''
def main():
raw = open(Config.src).read()
build = re.sub(Config.pattern, import_resource, raw)
open(Config.dest, 'w').write(build)
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -240,93 +240,32 @@
-if os.path.isfile(path):%0A return open(path).read()%0A else:%0A return ''
+return open(path).read()
%0A%0Ade
|
4e32167e1c9205ef5d377bee1b3147e84604e2e2
|
test code in maze module
|
maze.py
|
maze.py
|
# Depth-first maze generation from
# http://www.mazeworks.com/mazegen/mazetut/index.htm
from random import choice as random_choice
from sys import stdout
def make_maze(width, height):
walls = all_walls(width, height)
stack = []
current_cell = (0, 0)
cells_visited = 1
while cells_visited < width*height:
intact_neighbours = filter(lambda(x): is_intact(x, walls),
get_neighbours(current_cell, width, height))
if len(intact_neighbours) > 0:
next_cell = random_choice(intact_neighbours)
walls.remove(wall_between(current_cell, next_cell))
stack.append(current_cell)
current_cell = next_cell
cells_visited += 1
else:
current_cell = stack.pop()
return walls
def is_valid_cell(cell, width, height):
return cell[0]>=0 and cell[1]>=0 and cell[0]<width and cell[1]<height
def get_neighbours(cell, width, height):
return filter(lambda(x): is_valid_cell(x, width, height),
set([above(cell), below(cell), left(cell), right(cell)]))
def is_intact(cell, walls):
return walls_of(cell).issubset(walls)
def above(cell):
return (cell[0], cell[1] - 1)
def left(cell):
return (cell[0] - 1, cell[1])
def below(cell):
return (cell[0], cell[1] + 1)
def right(cell):
return (cell[0] + 1, cell[1])
def top_wall(cell):
return (cell[0], cell[1], cell[0]+1, cell[1])
def bottom_wall(cell):
return (cell[0], cell[1]+1, cell[0]+1, cell[1]+1)
def left_wall(cell):
return (cell[0], cell[1], cell[0], cell[1]+1)
def right_wall(cell):
return (cell[0]+1, cell[1], cell[0]+1, cell[1]+1)
def walls_of(cell):
return set([top_wall(cell), bottom_wall(cell),
left_wall(cell), right_wall(cell)])
def wall_between(cell_1, cell_2):
common_walls = walls_of(cell_1).intersection(walls_of(cell_2))
return common_walls.pop()
def all_walls(width, height):
walls = set()
for x in range(0, width ):
for y in range(0, height):
walls.update(walls_of((x,y)))
return walls
def print_maze(width, height, walls):
for y in range(0, height+1):
for x in range(0, width+1):
if (x,y-1,x,y) in walls:
stdout.write('|')
else:
stdout.write(' ')
if (x,y,x+1,y) in walls:
stdout.write('_')
else:
stdout.write(' ')
stdout.write('\n')
|
Python
| 0
|
@@ -140,16 +140,22 @@
s import
+ argv,
stdout%0A
@@ -2485,8 +2485,157 @@
e('%5Cn')%0A
+%0Aif __name__ == %22__main__%22:%0A width, height = int(argv%5B1%5D), int(argv%5B2%5D)%0A walls = make_maze(width, height)%0A print_maze(width, height, walls)%0A
|
a99e97ffce75263e80f7a3334bbe3ba02196291d
|
Remove infinite loop
|
pattsgui/editor.py
|
pattsgui/editor.py
|
##
## patts-qt - Qt GUI client for PATTS
## Copyright (C) 2015 Delwink, LLC
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU Affero General Public License as published by
## the Free Software Foundation, version 3 only.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Affero General Public License for more details.
##
## You should have received a copy of the GNU Affero General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
import patts
from PyQt4.QtCore import QAbstractTableModel, Qt
from PyQt4.QtGui import QApplication, QDialog, QGraphicsWidget, QHBoxLayout
from PyQt4.QtGui import QPushButton, QStyle, QStyleOptionButton, QTableView
from PyQt4.QtGui import QTableWidgetItem, QVBoxLayout
from .lang import _
class Field:
def __init__(self, name, boolean=False, quoted=False):
self._name = name
self._bool = boolean
self._quoted = quoted
@property
def name(self):
return self._name
@property
def is_bool(self):
return self._bool
@property
def quoted(self):
return self.quoted
def format(self, val):
if self._bool:
return str(int(bool(val)))
return patts.escape_string(str(val), self._quoted)
class PattsTableModel(QAbstractTableModel):
def __init__(self, table_name, get_table_info, fields, parent=None):
super().__init__(parent)
table_info = get_table_info()
self._table_name = table_name
self._primary_key = patts.get_primary_key(table_name)
self._keys = [k for k in table_info]
self._keys.sort()
self._fields = fields
self.init_rows(table_info)
self._orig = []
for row in self._rows:
self._orig.append([col for col in row])
def init_rows(self, table_info):
self._rows = []
for k in self._keys:
row = table_info[k]
field_data = [row[field.name] for field in self._fields]
self._rows.append(field_data)
def rowCount(self, parent):
return len(self._rows)
def columnCount(self, parent):
return len(self._fields)
def flags(self, index):
flags = Qt.ItemIsEditable | Qt.ItemIsEnabled
if self._fields[index.column()].is_bool:
flags |= Qt.ItemIsUserCheckable
return flags
def _raw_data(self, index, role):
row = index.row()
col = index.column()
if role in (Qt.DisplayRole, Qt.EditRole):
return self._rows[row][col]
def data(self, index, role):
if self._fields[index.column()].is_bool:
if role == Qt.CheckStateRole:
if self._raw_data(index, Qt.DisplayRole):
return Qt.Checked
else:
return Qt.Unchecked
else:
return None
return self._raw_data(index, role)
def _set(self, index, value):
self._rows[index.row()][index.column()] = value
self.dataChanged.emit(index, index)
return True
def setData(self, index, value, role=Qt.EditRole):
row = index.row()
col = index.column()
if self._fields[index.column()].is_bool:
if role == Qt.CheckStateRole:
return self._set(index, value)
return False
if role == Qt.EditRole:
return self._set(index, value)
return False
def headerData(self, section, orientation, role):
if role == Qt.DisplayRole:
if orientation == Qt.Horizontal:
return _('.'.join((self.table, self._fields[section].name)))
return self._keys[section]
def primary_key_value(self, i):
return self._keys[i]
def add_change(self, queries, changes, row, i, j):
field = self._fields[j]
changes.append(field.name + '=' + field.format(row[j]))
def save_row_query(self, i):
row = self._rows[i]
pkval = self.primary_key_value(i)
changes = []
queries = []
try:
orig_row = self._orig[i]
except IndexError:
orig_row = None
for j in range(len(row)):
if not orig_row or row[j] != orig_row[j]:
self.add_change(queries, changes, row, i, j)
if changes:
changes = ','.join(changes)
query = 'UPDATE {} SET {} WHERE {}={}'.format(self.table, changes,
self.primary_key,
pkval)
queries.append((patts.query, (query,)))
return queries
def save(self):
queries = []
for i in range(len(self._rows)):
query = self.save_row_query(i)
if query:
queries += query
for query in queries:
# each item here is a tuple whose first element is a function and
# whose second element is another tuple of the arguments
query[0](*query[1])
@property
def table(self):
return self._table_name
@property
def primary_key(self):
return self._primary_key
class UserTableModel(PattsTableModel):
def __init__(self, parent=None):
fields = (
Field('state', boolean=True),
Field('isAdmin', boolean=True),
Field('firstName', quoted=True),
Field('middleName', quoted=True),
Field('lastName', quoted=True)
)
super().__init__('User', patts.get_users, fields, parent)
def primary_key_value(self, i):
return patts.escape_string(super().primary_key_value(i), quote=True)
def add_change(self, queries, changes, row, i, j):
field = self._fields[j]
if field.name == 'state':
queries.append((patts.delete_user, (self._keys[i],)))
elif field.name == 'isAdmin':
val = field.format(row[j])
if val == '0':
queries.append((patts.revoke_admin, (self._keys[i], '%')))
elif val == '1':
queries.append((patts.grant_admin, (self._keys[i], '%')))
else:
raise ValueError('Illegal boolean value')
else:
super().add_change(queries, changes, row, i, j)
class Editor(QDialog):
def __init__(self, model):
super().__init__()
tableView = QTableView()
tableView.setModel(model)
cancelButton = QPushButton(_('cancel'))
cancelButton.clicked.connect(self.reject)
okButton = QPushButton(_('OK'))
okButton.clicked.connect(self.accept)
buttonBox = QHBoxLayout()
buttonBox.addStretch(1)
buttonBox.addWidget(cancelButton)
buttonBox.addWidget(okButton)
layout = QVBoxLayout()
layout.addWidget(tableView)
layout.addLayout(buttonBox)
self.setLayout(layout)
self.accepted.connect(model.save)
self.setWindowTitle(_('Admin.edit' + model.table))
self.resize(600, 300)
|
Python
| 0.002062
|
@@ -1324,16 +1324,17 @@
rn self.
+_
quoted%0A%0A
|
6370ccee3057ee5aeaac98a59b2f4a5c84fbd6cb
|
Make additional exceptions subclass the appropriate exception class
|
cumulusci/core/exceptions.py
|
cumulusci/core/exceptions.py
|
from __future__ import unicode_literals
class CumulusCIException(Exception):
pass
class CumulusCIUsageError(CumulusCIException):
pass
class CumulusCIFailure(CumulusCIException):
pass
class NotInProject(CumulusCIUsageError):
""" Raised when no project can be found in the current context """
pass
class ProjectConfigNotFound(CumulusCIUsageError):
""" Raised when a project is found in the current context but no configuration was found for the project """
pass
class KeychainNotFound(CumulusCIException):
""" Raised when no keychain could be found """
pass
class KeychainKeyNotFound(CumulusCIException):
""" Raised when the keychain key couldn't be found """
pass
class OrgNotFound(CumulusCIException):
""" Raised when no org could be found by a given name in the project keychain """
pass
class ServiceNotConfigured(CumulusCIException):
""" Raised when no service configuration could be found by a given name in the project keychain """
pass
class ServiceNotValid(CumulusCIException):
""" Raised when no service configuration could be found by a given name in the project configuration """
pass
class DependencyResolutionError(CumulusCIException):
""" Raised when an issue is encountered while resolving a static dependency map """
pass
class ConfigError(CumulusCIException):
""" Raised when a configuration enounters an error """
pass
class AntTargetException(CumulusCIException):
""" Raised when a generic Ant target error occurs """
pass
class DeploymentException(CumulusCIException):
""" Raised when a metadata api deployment error occurs """
pass
class ApexTestException(CumulusCIFailure):
""" Raised when a build fails because of an Apex test failure """
pass
class SalesforceCredentialsException(CumulusCIException):
""" Raise when Salesforce credentials are invalid """
pass
class TaskRequiresSalesforceOrg(CumulusCIUsageError):
""" Raise when a task that requires a Salesforce org_config is not initialized with an org_config """
pass
class TaskOptionsError(CumulusCIUsageError):
""" Raise when a task's options are invalid """
pass
class GithubNotConfigured(CumulusCIException):
""" Raise when attempting to get the Github configuration from the keychain and no configuration is set """
pass
class MrbelvedereNotConfigured(CumulusCIException):
""" Raise when attempting to get the mrbelvedere configuration from the keychain and no configuration is set """
pass
class ApexTestsDBNotConfigured(CumulusCIException):
""" Raise when attempting to get the ApexTestsDB configuration from the keychain and no configuration is set """
pass
class TaskNotFoundError(CumulusCIException):
""" Raise when task is not found in project config """
pass
class FlowInfiniteLoopError(CumulusCIException):
""" Raised when a flow configuration creates a infinite loop """
pass
class FlowConfigError(CumulusCIException):
""" Raised when a flow configuration encounters an error """
pass
class FlowNotFoundError(CumulusCIException):
""" Raise when flow is not found in project config """
pass
class FlowNotReadyError(CumulusCIException):
""" Raise when flow is called before it has been prepared """
pass
class MrbelvedereError(CumulusCIException):
""" Raise for errors from mrbelvedere installer """
pass
class ScratchOrgException(CumulusCIException):
""" Raise for errors related to scratch orgs """
pass
class GithubException(CumulusCIException):
""" Raise for errors related to GitHub """
pass
class GithubApiError(CumulusCIException):
pass
class GithubApiNotFoundError(CumulusCIException):
pass
class GithubApiNoResultsError(CumulusCIException):
pass
class GithubApiUnauthorized(CumulusCIException):
pass
class SalesforceException(CumulusCIException):
""" Raise for errors related to Salesforce """
pass
class SalesforceDXException(CumulusCIException):
""" Raise for errors related to Salesforce DX """
pass
class SOQLQueryException(CumulusCIException):
""" Raise for errors related to Salesforce DX """
pass
class CommandException(CumulusCIException):
""" Raise for errors coming from spawned CLI subprocesses """
pass
class BrowserTestFailure(CumulusCIFailure):
""" Raise when browser tests fail """
pass
class ApexCompilationException(CumulusCIException):
""" Raise when apex compilation fails """
pass
class ApexException(CumulusCIFailure):
""" Raise when an Apex Exception is raised in an org """
pass
class PushApiObjectNotFound(CumulusCIException):
""" Raise when Salesforce Push API object is not found """
pass
class RobotTestFailure(CumulusCIFailure):
""" Raise when a robot test fails in a test suite """
pass
|
Python
| 0.000147
|
@@ -879,33 +879,34 @@
ed(CumulusCI
-Exception
+UsageError
):%0A %22%22%22 R
@@ -1038,33 +1038,34 @@
id(CumulusCI
-Exception
+UsageError
):%0A %22%22%22 R
@@ -2759,33 +2759,34 @@
or(CumulusCI
-Exception
+UsageError
):%0A %22%22%22 R
@@ -3120,33 +3120,34 @@
or(CumulusCI
-Exception
+UsageError
):%0A %22%22%22 R
@@ -4470,33 +4470,31 @@
on(CumulusCI
-Exception
+Failure
):%0A %22%22%22 R
|
136d00313b4c7ec3b22020a61e000650b5c14b87
|
Add `timeout` parameter to send_command()
|
mihome.py
|
mihome.py
|
import binascii
import code
import importlib
import json
import psycopg2
import readline
import socket
import struct
import sys
import time
from Crypto.Cipher import AES
from datetime import datetime
from threading import Thread
import config
from plugins import sensor_ht, gateway, yeelight
from utils import get_store
from web.w import run_app as web_app
conn = psycopg2.connect("dbname={} user={} password={}".format(config.DBNAME, config.DBUSER, config.DBPASS))
cursor = conn.cursor()
MULTICAST = {
'mihome': ('224.0.0.50', 9898),
'yeelight': ('239.255.255.250', 1982)
}
SOCKET_BUFSIZE = 1024
IV = bytes([0x17, 0x99, 0x6d, 0x09, 0x3d, 0x28, 0xdd, 0xb3, 0xba, 0x69, 0x5a, 0x2e, 0x6f, 0x58, 0x56, 0x2e])
def receiver(service='mihome'):
assert service in MULTICAST, 'No such service'
store = get_store()
address, port = MULTICAST.get(service)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind(("0.0.0.0", port))
mreq = struct.pack("=4sl", socket.inet_aton(address), socket.INADDR_ANY)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 32)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, SOCKET_BUFSIZE)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
current = {}
while True:
data, _ = sock.recvfrom(SOCKET_BUFSIZE) # buffer size is 1024 bytes
print(datetime.now().isoformat(), data)
if service == 'mihome':
message = json.loads(data.decode())
data = json.loads(message['data'])
if message.get('model') == 'sensor_ht' and not sensor_ht.process(conn, cursor, current, message, data):
continue
elif message.get('model') == 'gateway':
gateway.process(store, message, data)
current = {}
elif service == 'yeelight':
yeelight.process(data.decode())
def send_command(command):
_, port = MULTICAST.get('mihome')
if isinstance(command.get('data'), dict):
command['data'] = json.dumps(command['data'])
address = get_store().get('gateway_addr')
if address is None:
print("Doesn't receive any heartbeat from gateway. Delaying request for 10 seconds.")
time.sleep(10)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.connect((address, port))
sock.send(json.dumps(command).encode('ascii'))
try:
data, addr = sock.recvfrom(SOCKET_BUFSIZE)
except ConnectionRefusedError:
data = None
finally:
sock.close()
return data
def get_key():
"""Get current gateway key"""
cipher = AES.new(config.MIHOME_GATEWAY_PASSWORD, AES.MODE_CBC, IV)
encrypted = cipher.encrypt(get_store().get('gateway_token'))
return binascii.hexlify(encrypted)
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'shell':
vars = globals().copy()
vars.update(locals())
shell = code.InteractiveConsole(vars)
shell.interact()
sys.exit()
Thread(target=web_app).start()
for app_name in config.ENABLED_APPS:
try:
app = importlib.import_module('apps.{}'.format(app_name))
except ImportError as e:
print('Could not import app "{}": {}'.format(app_name, e))
continue
kwargs = {'store': get_store(), 'conn': conn, 'cursor': cursor}
Thread(target=app.run, kwargs=kwargs).start()
for service in MULTICAST:
Thread(target=receiver, args=(service,)).start()
# Discover Yeelight bulbs
yeelight.discover()
|
Python
| 0
|
@@ -1980,19 +1980,31 @@
(command
+, timeout=10
):%0A
-
_, p
@@ -2372,32 +2372,61 @@
ket.SOCK_DGRAM)%0A
+ sock.settimeout(timeout)%0A
sock.connect
@@ -2490,24 +2490,40 @@
e('ascii'))%0A
+ data = None%0A
try:%0A
@@ -2609,35 +2609,235 @@
or:%0A
-data = None
+print(%22send_command :: recvfrom() connection refused: %7B%7D:%7B%7D%22.format(address.decode(), port))%0A except socket.timeout:%0A print(%22send_command :: recvfrom() timed out: %7B%7D:%7B%7D%22.format(address.decode(), port))
%0A finally
@@ -3738,32 +3738,81 @@
=kwargs).start()
+%0A print('Loaded app: %7B%7D'.format(app_name))
%0A%0A for servic
|
9d98366e54f837ffa524c8915fc017e3a3ca1bf6
|
Add forum_id field to torrent
|
models.py
|
models.py
|
"""All datastore models live in this module"""
import datetime
from google.appengine.ext import ndb
class Torrent(ndb.Model):
"""A main model for representing an individual Torrent entry."""
title = ndb.StringProperty(indexed=False, required=True)
btih = ndb.StringProperty(indexed=False, required=True) # Infohash
dt = ndb.DateTimeProperty(required=True) # Create/update time, as reported by tracker
nbytes = ndb.IntegerProperty(indexed=False, required=True) # Torrent data size, bytes
description = ndb.TextProperty(required=True)
_memcache_timeout = 2592000 # 30 days
class Account(ndb.Model):
"""Represents tracker user account along with its session"""
username = ndb.StringProperty(indexed=False, required=True)
password = ndb.StringProperty(indexed=False, required=True)
userid = ndb.IntegerProperty(indexed=False, required=True)
cookies = ndb.JsonProperty()
_memcache_timeout = 86400 # 1 day
def __repr__(self):
return "<Account username='{}' userid='{}' cookies=[{}]>".format(
self.username, self.userid, self.cookies and self.cookies.keys())
class Category(ndb.Model):
"""Represents category entry"""
title = ndb.StringProperty(indexed=False, required=True)
_memcache_timeout = 86400 # 1 day
class PersistentScalarValue(ndb.Expando):
"""Persistent scalar value that is stored in datastore"""
pass
|
Python
| 0
|
@@ -580,32 +580,147 @@
y(required=True)
+%0A forum_id = ndb.IntegerProperty(required=True) # for finding torrents in category but not its subcategories
%0A%0A _memcache_
|
a2251254af6942e892d03d5705a6317ff5824451
|
Revert "delete legs if flight is deleted"
|
models.py
|
models.py
|
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import (Column, Integer, ForeignKey,
String, Float, DateTime, Boolean)
from sqlalchemy.orm import relationship, backref
Base = declarative_base()
class FlightLegLocation(Base):
""" Represents an airport at a specific point in time
corresponding to a scheduled departure or arrival of a flight.
Attributes:
airport: airport 3-letter code
tz: timezone
dt: departure or arrival time
dt_utc: departure or arrival time in UTC
dt_formatted: departure or arrival time formatted to string
Known Issues:
- This isn't a well-defined entity and is likely to cause
confusion, errors, or incorrect use in calling code.
"""
__tablename__ = 'flight_leg_location'
id = Column(Integer, primary_key=True)
# flightleg_id = Column(Integer, ForeignKey('flight_leg.id'))
airport = Column(String(3))
# tz: don't store this in the database
dt = Column(DateTime())
dt_formatted = Column(String())
dt_utc = Column(DateTime())
dt_utc_formatted = Column(String())
class FlightLeg(Base):
""" Represents a segment of a flight from an airport to the
next airport.
Attributes:
flight_number: the flight number, format: '#123'
depart: a FlightLegLocation for the departure city
arrive: a FlightLegLocation for the arrival city
Reference for multiple joins (depart and arrive):
http://docs.sqlalchemy.org/en/rel_0_7/orm/relationships.html#setting-the-primaryjoin-and-secondaryjoin
"""
__tablename__ = 'flight_leg'
id = Column(Integer, primary_key=True)
flight_id = Column(Integer, ForeignKey('flight.id'))
flight_number = Column(String(6))
depart_id = Column(Integer, ForeignKey("flight_leg_location.id"))
arrive_id = Column(Integer, ForeignKey("flight_leg_location.id"))
depart = relationship("FlightLegLocation",
primaryjoin="FlightLegLocation.id==FlightLeg.depart_id")
arrive = relationship("FlightLegLocation",
primaryjoin="FlightLegLocation.id==FlightLeg.arrive_id")
# depart = relationship("FlightLegLocation", uselist=False, backref='flight') #, foreign_keys=[depart_id])
# arrive = relationship("FlightLegLocation", uselist=False) #, foreign_keys=[arrive_id])
def __repr__(self):
return '<Flight Leg: %r>' % self.flight_number
class Flight(Base):
""" A flight goes from an origin airport to a destination airport.
It consists of one or more FlightLegs.
Attributes:
legs: a list of FlightLegs
"""
__tablename__ = 'flight'
id = Column(Integer, primary_key=True)
reservation_id = Column(Integer, ForeignKey('reservation.id'))
legs = relationship("FlightLeg", backref='flight', cascade="all, delete, delete-orphan")
active = Column(Boolean(), default=True)
success = Column(Boolean(), default=False)
position = Column(String())
sched_time = Column(Float())
sched_time_formatted = Column(String())
sched_time_local_formatted = Column(String())
seconds = Column(Float())
task_uuid = Column(String())
def task_status(self):
if self.task_uuid == None: return False
from celery.result import AsyncResult
return AsyncResult(self.task_uuid).state
class Reservation(Base):
""" Represents a reservation.
A reservation is identified by a 6-character confirmation code.
It can have one or more people on it and can have one or more
flights.
KNOWN ISSUES:
- This table links a single code with a single person.
Adding more than one person with the same code will cause
an error or an overwrite of the reservation.
This shouldn't affect check-in as all people on the reservation
will be checked in.
"""
__tablename__ = 'reservation'
id = Column(Integer, primary_key=True)
first_name = Column(String())
last_name = Column(String())
code = Column(String(6), unique=True)
active = Column(Boolean(), default=True)
new = Column(Boolean(), default=True)
email = Column(String())
flights = relationship("Flight", backref='reservation', cascade="all, delete, delete-orphan")
def __init__(self, first_name, last_name, code, email=None):
self.first_name = first_name
self.last_name = last_name
self.code = code
self.email = email
def __repr__(self):
return '<Reservation: %r>' % self.code
def isReservationActive(self):
if len(self.flights) > 0:
active = False
for flight in self.flights:
if flight.active:
active = True
self.active = active
else:
self.active = False
return self.active
|
Python
| 0
|
@@ -101,17 +101,18 @@
eignKey,
+
%0A
-
String
@@ -606,17 +606,23 @@
string%0A
+
%0A
-
Kn
@@ -1112,17 +1112,19 @@
ring())%0A
+
%0A
-
%0Aclass F
@@ -1750,17 +1750,19 @@
ing(6))%0A
+
%0A
-
depart
@@ -2792,46 +2792,8 @@
ght'
-, cascade=%22all, delete, delete-orphan%22
)%0A
@@ -3457,17 +3457,23 @@
lights.%0A
+
%0A
-
KN
@@ -4509,16 +4509,16 @@
lights:%0A
-
@@ -4534,16 +4534,17 @@
.active:
+
%0A
|
3a76fef3e514476a80850939664df912e40ead22
|
make partial veneer client excludes explicit (#4995)
|
owlbot.py
|
owlbot.py
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import logging
from pathlib import Path
import subprocess
import synthtool as s
from synthtool.languages import php
from synthtool import _tracked_paths
logging.basicConfig(level=logging.DEBUG)
src = Path(f"../{php.STAGING_DIR}/Redis").resolve()
dest = Path().resolve()
# Added so that we can pass copy_excludes in the owlbot_main() call
_tracked_paths.add(src)
php.owlbot_main(
src=src,
dest=dest,
copy_excludes=[
src / "*/src/*/*Client.php"
]
)
# document and utilize apiEndpoint instead of serviceAddress
s.replace(
"**/Gapic/*GapicClient.php",
r"'serviceAddress' =>",
r"'apiEndpoint' =>")
s.replace(
"**/Gapic/*GapicClient.php",
r"@type string \$serviceAddress\n\s+\*\s+The address",
r"""@type string $serviceAddress
* **Deprecated**. This option will be removed in a future major release. Please
* utilize the `$apiEndpoint` option instead.
* @type string $apiEndpoint
* The address""")
s.replace(
"**/Gapic/*GapicClient.php",
r"\$transportConfig, and any \$serviceAddress",
r"$transportConfig, and any `$apiEndpoint`")
# V1 is GA, so remove @experimental tags
s.replace(
'src/V1/**/*Client.php',
r'^(\s+\*\n)?\s+\*\s@experimental\n',
'')
# Change the wording for the deprecation warning.
s.replace(
'src/*/*_*.php',
r'will be removed in the next major release',
'will be removed in a future release')
# Fix class references in gapic samples
for version in ['V1', 'V1beta1']:
pathExpr = 'src/' + version + '/Gapic/CloudRedisGapicClient.php'
types = {
'new CloudRedisClient': r'new Google\\Cloud\\Redis\\'+ version + r'\\CloudRedisClient',
'new Instance': r'new Google\\Cloud\\Redis\\' + version + r'\\Instance',
'= Tier::': r'= Google\\Cloud\\Redis\\' + version + r'\\Instance\\Tier::',
'new FieldMask': r'new Google\\Protobuf\\FieldMask',
'new InputConfig': r'new Google\\Cloud\\Redis\\' + version + r'\\InputConfig',
'new OutputConfig': r'new Google\\Cloud\\Redis\\' + version + r'\\OutputConfig',
'= DataProtectionMode': r'= Google\\Cloud\\Redis\\' + version + r'\\FailoverInstanceRequest\\DataProtectionMode::'
}
for search, replace in types.items():
s.replace(
pathExpr,
search,
replace
)
### [START] protoc backwards compatibility fixes
# roll back to private properties.
s.replace(
"src/**/V*/**/*.php",
r"Generated from protobuf field ([^\n]{0,})\n\s{5}\*/\n\s{4}protected \$",
r"""Generated from protobuf field \1
*/
private $""")
# prevent proto messages from being marked final
s.replace(
"src/**/V*/**/*.php",
r"final class",
r"class")
# Replace "Unwrapped" with "Value" for method names.
s.replace(
"src/**/V*/**/*.php",
r"public function ([s|g]\w{3,})Unwrapped",
r"public function \1Value"
)
### [END] protoc backwards compatibility fixes
# fix relative cloud.google.com links
s.replace(
"src/**/V*/**/*.php",
r"(.{0,})\]\((/.{0,})\)",
r"\1](https://cloud.google.com\2)"
)
|
Python
| 0
|
@@ -1095,19 +1095,81 @@
%22*/src/
-*/*
+V1/CloudRedisClient.php%22,%0A src / %22*/src/V1beta1/CloudRedis
Client.p
|
86273d0aaa619268c3501a16cd4cd02c4d6ef6e5
|
fix bug when two threads trying to create one dir
|
parsed.py
|
parsed.py
|
#coding:utf-8
import os
import argparse
import requests
import json
import threading
import Queue
#number of threads
THREADS_NUM = 5
VK_audio_url = 'http://vk.com/audio'
queue = Queue.Queue()
class ThreadGrabAudio(threading.Thread):
"""Worker class
For every download there is a thread which is
represented by its instance
"""
def __init__(self, queue):
threading.Thread.__init__(self)
self.queue = queue
def run(self):
#queue has tasks?
while not queue.empty():
#grab file url from queue
file = self.queue.get()
self.ensure_dir('music')
self.download(file)
self.queue.task_done()
def ensure_dir(self, dir):
if not os.path.exists(dir):
os.makedirs(dir)
def download(self, file):
"""Download files asynchronously
and save them to local directory 'music'
"""
request = requests.get(file.get('link'), stream=True)
file_name = self.make_filename(file)
with open(file_name, 'wb') as f:
for chunk in request.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
f.flush()
f.close()
def make_filename(self, file):
"""Create normalized file name
consist of author and track name
"""
author = self.normalize_name(file.get('author'))
name = self.normalize_name(file.get('name'))
return 'music/' + author + ' - ' + name + '.mp3'
def normalize_name(self, name):
"""remove bullshit from the name"""
return name.replace('/', ' ').replace('\\', ' ')
class Parsed():
"""Parser class
Creates threading pool to download playlist files
in parallel
"""
SID = None
def __init__(self, user_id='', login='', passwd=''):
self.user_id = str(user_id)
self.login = login
self.passwd = passwd
self.SID = self.auth()
def run(self):
pass
def auth(self):
s = requests.Session()
s.post(
'https://login.vk.com',
data={
"act": "login",
"email": self.login,
"pass": self.passwd
}
)
return s.cookies.get('remixsid')
def process_playlist(self):
audios = self.getAudioJSON()
try:
all = json.loads(audios)
for track in all.get('all'):
file = self.trackToFile(track)
#populate queue with files for download
queue.put(file)
#spawn a thread pool
for i in range(THREADS_NUM):
t = ThreadGrabAudio(queue)
#t.setDaemon(True)
t.start()
except Exception, e:
print e
def trackToFile(self, track):
"""Converting track (which is array) to file dict
picking only interesting track info
"""
return {
'link': track[2],
'author': track[5],
'name': track[6]
}
def fix_json(self, json):
"""remove slashes cause it can break downloading"""
json = json.replace('\'', '"')
sep_index = json.find('<!>')
json = json[:sep_index]
return json
def getAudioJSON(self):
"""Make request for vk.com audio
session id must be provided for remixsid cookie param
"""
res = requests.post(
url=VK_audio_url,
headers={
'Cookie': '; '.join([
'remixdt=0',
'remixtst=8537d36c',
'remixlang=0',
'remixsid=' + self.SID,
'remixflash=11.9.900',
'remixseenads=1'
])
},
data={
'act': 'load_audios_silent',
'al': '1',
'gid': '0',
'id': self.user_id,
'please_dont_ddos': '2'
}
)
#cut some garbage at the beginning
#and decode cyrilic symbols in response
print res
res = res.content[48:].decode('1251')
res = self.fix_json(res)
return res
def main():
parser = argparse.ArgumentParser(description='process params')
parser.add_argument('-u', '--user', help='vk.com user id', required=True)
parser.add_argument('-e', '--email', help='vk.com user email', required=True)
parser.add_argument('-p', '--password', help='vk.com user pass', required=True)
args = parser.parse_args()
#request audio playlist of the user with given id
if args.user is not None:
p = Parsed(
user_id=args.user,
login=args.email,
passwd=args.password
)
p.process_playlist()
queue.join()
print 'Playlist successfully downloaded!'
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -595,45 +595,8 @@
t()%0A
- self.ensure_dir('music')%0A
@@ -663,105 +663,8 @@
()%0A%0A
- def ensure_dir(self, dir):%0A if not os.path.exists(dir):%0A os.makedirs(dir)%0A%0A
@@ -2257,16 +2257,49 @@
oJSON()%0A
+ self.ensure_dir('music')%0A
@@ -2747,16 +2747,113 @@
rint e%0A%0A
+ def ensure_dir(self, dir):%0A if not os.path.exists(dir):%0A os.makedirs(dir)%0A%0A
def
|
fbf61270d3356e0841e7a990cdc6f6224dbba143
|
Worked around an exception: FieldError
|
planetstack/dependency_walker.py
|
planetstack/dependency_walker.py
|
#!/usr/bin/python
import os
import imp
from planetstack.config import Config
import inspect
import time
import traceback
import commands
import threading
import json
import pdb
from core.models import *
missing_links={}
try:
dep_data = open(Config().dependency_graph).read()
except:
dep_data = open('/opt/planetstack/model-deps').read()
dependencies = json.loads(dep_data)
inv_dependencies = {}
for k, lst in dependencies.items():
for v in lst:
try:
inv_dependencies[v].append(k)
except KeyError:
inv_dependencies[v]=[k]
def plural(name):
if (name.endswith('s')):
return name+'es'
else:
return name+'s'
def walk_deps(fn, object):
model = object.__class__.__name__
try:
deps = dependencies[model]
except:
deps = []
__walk_deps(fn, object, deps)
def walk_inv_deps(fn, object):
model = object.__class__.__name__
try:
deps = inv_dependencies[model]
except:
deps = []
__walk_deps(fn, object, deps)
def __walk_deps(fn, object, deps):
model = object.__class__.__name__
for dep in deps:
#print "Checking dep %s"%dep
peer=None
link = dep.lower()
try:
peer = getattr(object, link)
except AttributeError:
link = plural(link)
try:
peer = getattr(object, link)
except AttributeError:
if not missing_links.has_key(model+'.'+link):
print "Model %s missing link for dependency %s"%(model, link)
missing_links[model+'.'+link]=True
if (peer):
try:
peer_objects = peer.all()
except:
peer_objects = [peer]
for o in peer_objects:
fn(o, object)
# Uncomment the following line to enable recursion
# walk_inv_deps(fn, o)
def p(x):
print x,x.__class__.__name__
return
def main():
#pdb.set_trace()
import django
django.setup()
s = Site.objects.filter(login_base='onlab')
#pdb.set_trace()
walk_inv_deps(p,s[0])
if __name__=='__main__':
main()
|
Python
| 0.999198
|
@@ -1005,17 +1005,16 @@
_name__%0A
-%0A
%09for dep
@@ -1455,24 +1455,39 @@
()%0A%09%09%09except
+ AttributeError
:%0A%09%09%09%09peer_o
@@ -1501,16 +1501,49 @@
= %5Bpeer%5D
+%0A%09%09%09except:%0A%09%09%09%09peer_objects = %5B%5D
%0A%0A%09%09%09for
|
775a1a2cde6bef565fc8f326b6bba9aecfaafa83
|
Remove gsf tests (too long).
|
plugins/PluginDownloader/test.py
|
plugins/PluginDownloader/test.py
|
###
# Copyright (c) 2011, Valentin Lorentz
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import os
import shutil
from supybot.test import *
pluginsPath = '%s/test-plugins' % os.getcwd()
class PluginDownloaderTestCase(PluginTestCase):
plugins = ('PluginDownloader',)
config = {'supybot.directories.plugins': [pluginsPath]}
def setUp(self):
PluginTestCase.setUp(self)
try:
shutil.rmtree(pluginsPath)
except:
pass
os.mkdir(pluginsPath)
def tearDown(self):
try:
shutil.rmtree(pluginsPath)
finally:
PluginTestCase.tearDown(self)
def _testPluginInstalled(self, name):
assert os.path.isdir(pluginsPath + '/%s/' % name)
assert os.path.isfile(pluginsPath + '/%s/plugin.py' % name)
assert os.path.isfile(pluginsPath + '/%s/config.py' % name)
def testRepolist(self):
self.assertRegexp('repolist', '(.*, )?ProgVal(, .*)?')
self.assertRegexp('repolist', '(.*, )?quantumlemur(, .*)?')
self.assertRegexp('repolist ProgVal', '(.*, )?AttackProtector(, .*)?')
def testInstallProgVal(self):
self.assertError('plugindownloader install ProgVal Darcs')
self.assertNotError('plugindownloader install ProgVal AttackProtector')
self.assertError('plugindownloader install ProgVal Darcs')
self._testPluginInstalled('AttackProtector')
def testInstallQuantumlemur(self):
self.assertError('plugindownloader install quantumlemur AttackProtector')
self.assertNotError('plugindownloader install quantumlemur Listener')
self.assertError('plugindownloader install quantumlemur AttackProtector')
self._testPluginInstalled('Listener')
def testInstallStepnem(self):
self.assertNotError('plugindownloader install stepnem Freenode')
self._testPluginInstalled('Freenode')
def testInstallGsf(self):
self.assertNotError('plugindownloader install gsf-snapshot Debian')
self._testPluginInstalled('Debian')
self.assertError('plugindownloader install gsf-snapshot Anagram')
self.assertError('plugindownloader install gsf-snapshot Acronym')
self.assertNotError('plugindownloader install gsf-edsu Anagram')
self._testPluginInstalled('Anagram')
self.assertError('plugindownloader install gsf-edsu Debian')
self.assertError('plugindownloader install gsf-edsu Acronym')
self.assertNotError('plugindownloader install gsf Acronym')
self._testPluginInstalled('Acronym')
self.assertError('plugindownloader install gsf Anagram')
self.assertError('plugindownloader install gsf Debian')
def testInstallNanotubeBitcoin(self):
self.assertNotError('plugindownloader install nanotube-bitcoin GPG')
self._testPluginInstalled('GPG')
def testInstallMtughanWeather(self):
self.assertNotError('plugindownloader install mtughan-weather '
'WunderWeather')
self._testPluginInstalled('WunderWeather')
def testInstallSpiderDave(self):
self.assertNotError('plugindownloader install SpiderDave Pastebin')
self._testPluginInstalled('Pastebin')
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
|
Python
| 0
|
@@ -3395,808 +3395,8 @@
')%0A%0A
- def testInstallGsf(self):%0A self.assertNotError('plugindownloader install gsf-snapshot Debian')%0A self._testPluginInstalled('Debian')%0A self.assertError('plugindownloader install gsf-snapshot Anagram')%0A self.assertError('plugindownloader install gsf-snapshot Acronym')%0A%0A self.assertNotError('plugindownloader install gsf-edsu Anagram')%0A self._testPluginInstalled('Anagram')%0A self.assertError('plugindownloader install gsf-edsu Debian')%0A self.assertError('plugindownloader install gsf-edsu Acronym')%0A%0A self.assertNotError('plugindownloader install gsf Acronym')%0A self._testPluginInstalled('Acronym')%0A self.assertError('plugindownloader install gsf Anagram')%0A self.assertError('plugindownloader install gsf Debian')%0A%0A
|
abb05af71f33a568c7b9d8e9a48872acecaf1a75
|
Add pretty print indent.
|
ssexp/__init__.py
|
ssexp/__init__.py
|
import sys
from fn import recur
import preserialize
DATA = preserialize.DATA
STR = preserialize.STR
class SsexpError(Exception):
pass
SCHEME_TYPES = preserialize.BASIC_TYPES + (
(bool,),
(type(None), preserialize.Deconstructor, dict(name=u"none")),
(dict, preserialize.DictDeconstructor, dict(name=u":")))
class LabelLinkManager(preserialize.LinkManager):
"""Source and destination integer label."""
KEY = u":label"
def is_ref(self, obj):
return type(obj) == dict and self.KEY in obj
def make_ref(self, dest):
return {self.KEY: self._links[dest][0]}
def label_destination(self, i, obj):
return {self.KEY: i, DATA: [obj]}
def unlabel_destination(self, obj):
return obj[DATA][0]
class SchemeEncoder(preserialize.Encoder):
"""Map ``is_`` prefix to ``?`` suffix, and ``_`` to ``-``."""
def encode(self, s):
if s.startswith(u"is_"):
s = u"{0}?".format(s[3:])
return s.replace(u"_", u"-")
def decode(self, s):
s = s.replace(u"-", u"_")
if s.endswith(u"?"):
s = u"is_{0}".format(s)
return s
class SsexpPreserializer(preserialize.Preserializer):
"""Preserializer to a Scheme-friendly S-expression precursor."""
def __init__(self, is_translated=True):
super().__init__(types=SCHEME_TYPES,
link_manager_cls=LabelLinkManager,
key_encoder=(
SchemeEncoder() if is_translated else None))
@recur.stackless
def remove_mapping(data, head_key=u":type", dict_name=None,
list_type=list, mapping_type=dict):
"""Turn each *mapping* in ``data`` into a list.
If ``head_key`` is in the mapping, the corresponding value becomes
the head of the list.
Strings are double quote encoded, as we need to distinguish
between lists and mappings (which have an unquoted string as the
head).
"""
escape_char = head_key[0]
dict_name = escape_char if dict_name is None else dict_name
t = type(data)
if t == preserialize.STR:
yield preserialize.DoubleQuoteEncoder.encode(data)
elif t == list_type:
new = t()
for item in data:
new.append((yield remove_mapping.call(
item, head_key, dict_name, list_type, mapping_type)))
yield new
elif t == mapping_type:
new = list_type()
if head_key in data:
new.append(data[head_key])
for key, item in data.items(): # metadata keys first
if key.startswith(escape_char) and key != head_key:
new.extend((u"{0}:".format(key),
(yield remove_mapping.call(
item, head_key, dict_name, list_type, mapping_type))))
for key, item in data.items(): # kwargs first as nice for eg XML
if not (key.startswith(escape_char) or key == preserialize.DATA):
new.extend((u"{0}:".format(key),
(yield remove_mapping.call(
item, head_key, dict_name, list_type, mapping_type))))
if preserialize.DATA in data: # finally splice list items
maybe_splice = data[preserialize.DATA]
if type(maybe_splice) == list_type:
for item in maybe_splice:
new.append((yield remove_mapping.call(
item, head_key, dict_name, list_type, mapping_type)))
else:
raise SsexpError(
u'Found data key ("") but item not a list_type.')
yield new
else:
yield data
@recur.stackless
def to_ssexp(tree):
t = type(tree)
if t == int or t == float:
yield preserialize.STR(tree)
elif t == preserialize.STR:
yield tree
elif t == bool:
yield u"#t" if tree else u"#f"
elif tree and tree[0] == u":label:":
n = len(tree)
if n == 2: # dest
yield u"#{0}#".format(tree[1])
elif n == 3: # source
yield u"#{0}={1}".format(tree[1], (yield to_ssexp.call(tree[2])))
else:
raise Exception(u"Bad :label: form.")
else:
new = []
for item in tree:
new.append((yield to_ssexp.call(item)))
yield u"({0})".format(u" ".join(new))
def dumps(obj, preserializer=None):
preserializer = preserializer if preserializer else SsexpPreserializer()
return to_ssexp(remove_mapping(preserializer.preserialize(obj)))
|
Python
| 0
|
@@ -3703,16 +3703,37 @@
exp(tree
+, indent=2, current=2
):%0A t
@@ -4162,16 +4162,33 @@
(tree%5B2%5D
+, indent, current
)))%0A
@@ -4277,32 +4277,35 @@
= %5B%5D%0A for
+ i,
item in tree:%0A
@@ -4301,14 +4301,233 @@
in
+enumerate(
tree
+)
:%0A
+ if i %3E 0:%0A if isinstance(item, list):%0A space = %22 %22*current%0A new.append(f%22%5Cn%7Bspace%7D%22)%0A else:%0A new.append(%22 %22)%0A
@@ -4570,16 +4570,42 @@
all(item
+, indent, current + indent
)))%0A
@@ -4632,17 +4632,16 @@
ormat(u%22
-
%22.join(n
|
a0eab53b1e810bb3b4f1a3887ad3be5d755de0d9
|
bump v0.8.9
|
steam/__init__.py
|
steam/__init__.py
|
__version__ = "0.8.8"
__author__ = "Rossen Georgiev"
version_info = (0, 8, 8)
from steam.steamid import SteamID
from steam.globalid import GlobalID
from steam.webapi import WebAPI
from steam.webauth import WebAuth, MobileWebAuth
# proxy object
# avoids importing steam.enums.emsg unless it's needed
class SteamClient(object):
def __new__(cls, *args, **kwargs):
from steam.client import SteamClient as SC
bases = cls.__bases__
if bases != (object, ):
if bases[0] != SteamClient:
raise ValueError("SteamClient needs to be the first base for custom classes")
SC = type("SteamClient", (SC,) + bases[1:], {})
return SC(*args, **kwargs)
|
Python
| 0.000002
|
@@ -12,17 +12,17 @@
= %220.8.
-8
+9
%22%0A__auth
@@ -69,17 +69,17 @@
(0, 8,
-8
+9
)%0A%0Afrom
|
318589d6a6d2536f2097a5e60fafe019697da4c3
|
fix tests - fax server cares for TO: in email not user...
|
pimail.py
|
pimail.py
|
import web
import json
import random
from jinja2 import Template
import urllib
import subprocess
import shlex
import settings
" Load Data "
with open("data.json") as f:
meps = json.load(f)
total_score = sum((i['score'] for i in meps))
def weighted_choice(a):
""" Pick a MEP based on the score weight """
r = random.uniform(0,total_score)
n = 0
for c in a:
n = n + c['score']
if n>r:
return c
return False
def unquote(a):
return (a[0],unicode(urllib.unquote_plus(a[1]).decode("utf-8")))
def decode_args(a):
return dict((unquote(i.split("=")) for i in a.split("&")))
def get_mep_by_id(id):
for m in meps:
if m['id']==int(id):
return m
return None
class Fax:
""" Handle the Fax Widget """
def GET(self):
""" display the fax widget """
web.header("Content-Type", "text/html;charset=utf-8")
with open("fax.tmpl") as f:
template = Template(f.read().decode("utf-8"))
m = weighted_choice(meps)
return template.render(m)
def POST(self):
"send out the fax"
args=decode_args(web.data())
m = get_mep_by_id(args['id'])
fax = m[settings.FAX_FIELD].replace(" ","").replace("+","00")
with open("fax-out.tmpl") as f:
template = Template(f.read().decode("utf-8"))
data = {"body": args['body'],
"from": settings.FROM,
"to": "%s@%s" % (fax, settings.FAX_GATEWAY),
}
a = shlex.split(settings.SENDMAIL)
" add the recipient as args "
if settings.TEST:
fax = '100'
a.append("%s@%s" % (fax,settings.FAX_GATEWAY))
p = subprocess.Popen(a,
stdin=subprocess.PIPE)
p.communicate(template.render(data).encode("utf-8"))
with open("fax-sent.tmpl") as f:
template = Template(f.read().decode("utf-8"))
return template.render(m)
class mail:
""" Handle Requests for Mail """
def GET(self):
""" Handle GET Requests """
web.header("Content-Type", "text/html;charset=utf-8")
with open("mail.tmpl") as f:
template = Template(f.read().decode("utf-8"))
m = weighted_choice(meps)
return template.render(m)
urls = ('/widget/', 'mail',
'/widget/fax/', 'Fax')
app = web.application(urls,globals())
if __name__ == "__main__":
app.run()
|
Python
| 0
|
@@ -1190,16 +1190,84 @@
%5B'id'%5D)%0A
+ if settings.TEST:%0A fax = '100'%0A else:%0A
@@ -1667,58 +1667,8 @@
s %22%0A
- if settings.TEST:%0A fax = '100'%0A
|
d510b454554330fab85ed94d7c3f5910b891229d
|
Fix missing _
|
plugin.py
|
plugin.py
|
###
# Copyright (c) 2014, spline
# All rights reserved.
#
#
###
# my libs
import json
import time
import pytz
import datetime
# supybot libs
import supybot.utils as utils
from supybot.commands import *
import supybot.plugins as plugins
import supybot.ircutils as ircutils
import supybot.callbacks as callbacks
try:
from supybot.i18n import PluginInternationalization
_ = PluginInternationalization('WorldTime')
except ImportError:
# Placeholder that allows to run the plugin on a bot
# without the i18n module
_ = lambda x:x
class WorldTime(callbacks.Plugin):
"""Add the help for "@plugin help WorldTime" here
This should describe *how* to use this plugin."""
threaded = True
##################
# TIME FUNCTIONS #
##################
def _qp(self, url):
"""quote_plus."""
import sys
if sys.version_info[0] == 2:
from urllib import quote_plus
else:
from urllib.parse import quote_plus
# now do quote plus.
url = quote_plus(url)
return url
def _utcnow(self):
"""Calculate Unix timestamp from GMT. Code from calendar.timegm()"""
ttuple = datetime.datetime.utcnow().utctimetuple()
_EPOCH_ORD = datetime.date(1970, 1, 1).toordinal()
year, month, day, hour, minute, second = ttuple[:6]
days = datetime.date(year, month, 1).toordinal() - _EPOCH_ORD + day - 1
hours = days*24 + hour
minutes = hours*60 + minute
seconds = minutes*60 + second
return seconds
def _converttz(self, s, outputTZ):
"""Convert epoch seconds to a HH:MM readable string."""
# now do some timezone math.
try:
dtobj = datetime.datetime.fromtimestamp(s, tz=pytz.timezone(outputTZ)) # convert epoch into aware dtobj.
outstrf = '%a, %H:%M' # Day, HH:MM
local_dt = dtobj.astimezone(pytz.timezone(outputTZ))
return local_dt.strftime(outstrf)
except Exception as e:
self.log.info("ERROR: _converttz: {0}".format(e))
return None
##############
# GAPI STUFF #
##############
def _fetch(self, url, headers=None):
"""
General HTTP resource fetcher.
"""
try:
if headers:
result = utils.web.getUrl(url, headers=headers)
else:
result = utils.web.getUrl(url)
# return
return result
except Exception as e:
self.log.info("_fetch :: I could not open {0} error: {1}".format(url, e))
return None
def _getlatlng(self, location):
location = self._qp(location)
url = 'http://maps.googleapis.com/maps/api/geocode/json?address=%s&sensor=false' % location
# try and fetch url
response = self._fetch(url)
if not response:
irc.reply("ERROR: I could not fetch: {0}".format(url))
return None
# wrap in a big try/except
try:
result = json.loads(response.decode('utf-8'))
if result['status'] == 'OK':
lat = str(result['results'][0]['geometry']['location']['lat'])
lng = str(result['results'][0]['geometry']['location']['lng'])
place = (result['results'][0]['formatted_address'])
ll = '%s,%s' % (lat, lng) # lat+long into a single string.
return {'place':place, 'll':ll}
else:
self.log.info("ERROR: _getlatlng: status result NOT ok. Result: {0}".format(result))
return None
except Exception as e:
self.log.info("ERROR: _getlatlng: {0}".format(e))
return None
def _gettime(self, latlng):
latlng = self.qp(latlng)
url = 'https://maps.googleapis.com/maps/api/timezone/json?location=%s&sensor=false×tamp=%s' % (latlng, time.time())
# try and fetch url
response = self._fetch(url)
if not response:
irc.reply("ERROR: I could not fetch: {0}".format(url))
return None
# wrap in a big try/except
try:
result = json.loads(response.decode('utf-8'))
if result['status'] == 'OK':
# {u'status': u'OK', u'dstOffset': 0, u'rawOffset': -18000, u'timeZoneName': u'Eastern Standard Time', u'timeZoneId': u'America/New_York'}
return result
else:
self.log.info("ERROR: _gettime: status result NOT ok. Result: {0}".format(result))
return None
except Exception as e:
self.log.info("ERROR: _gettime: {0}".format(e))
return None
###################
# PUBLIC FUNCTION #
###################
def worldtime(self, irc, msg, args, optinput):
"""<location>
Query GAPIs for <location> and attempt to figure out local time.
"""
# first, grab lat and long for user location
gc = self._getlatlng(optinput)
if not gc:
irc.reply("ERROR: I could not find lat/long for: {0}. Bad location? Spelled wrong?".format(optinput))
return
# next, lets grab the localtime for that location w/lat+long.
ll = self._gettime(gc['ll'])
if not ll:
irc.reply("ERROR: I could not find local timezone for: {0}. Bad location? Spelled wrong?".format(optinput))
return
# if we're here, we have localtime zone.
utcnow = self._utcnow() # grab UTC now.
# localtm = utcnow+ll['rawOffset'] # grab raw offset from
# now lets use pytz to convert into the localtime in the place.
lt = self._converttz(utcnow, ll['timeZoneId'])
if lt: # make sure we get it back.
if self.registryValue('disableANSI', msg.args[0]): # disable ANSI.
irc.reply("{0} :: Current local time is: {1} ({2})".format(gc['place'].encode('utf-8'), lt, ll['timeZoneName'].encode('utf-8')))
else:
irc.reply("{0} :: Current local time is: {1} ({2})".format(ircutils.bold(gc['place'].encode('utf-8')), lt, ll['timeZoneName'].encode('utf-8')))
else:
irc.reply("ERROR: Something went wrong during conversion to timezone. Check logs.")
worldtime = wrap(worldtime, [('text')])
Class = WorldTime
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
|
Python
| 0.999997
|
@@ -3842,16 +3842,17 @@
= self.
+_
qp(latln
|
23ae5b17121e65b5be23baf78a7e021ef2b5262e
|
Make signals work
|
pox/tk.py
|
pox/tk.py
|
# Copyright 2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
Lets you use Tk with POX.
Highly experimental.
"""
from collections import deque
from pox.core import core
log = core.getLogger()
#TODO: Bind revent events across thread
class MessageBoxer (object):
def __init__ (self, tk):
import tkMessageBox, tkColorChooser, tkSimpleDialog, tkFileDialog
fields = "ERROR INFO QUESTION WARNING ABORTRETRYIGNORE OKCANCEL "
fields += "RETRYCANCEL YESNO YESNOCANCEL ABORT RETRY IGNORE OK "
fields += "CANCEL YES NO"
for f in fields.split():
setattr(self, f, getattr(tkMessageBox, f))
methods = "showinfo showwarning showerror askquestion "
methods += "askokcancel askyesno askretrycancel"
self._addmethods(tkMessageBox, methods, tk)
methods = "askinteger askfloat askstring"
self._addmethods(tkSimpleDialog, methods, tk)
methods = "askcolor"
self._addmethods(tkColorChooser, methods, tk)
methods = "askopenfilename asksaveasfilename"
self._addmethods(tkFileDialog, methods, tk)
def _addmethods (self, module, methods, tk):
for m in methods.split():
def f (m):
def f2 (*args, **kw):
return getattr(module, m)(*args,**kw)
def f4 (*args, **kw):
_ = kw.pop('_', None)
tk.do_ex(getattr(module, m), rv = _, args=args, kw=kw)
def f5 (_, *args, **kw):
tk.do_ex(f2, rv = _, args=args, kw=kw)
return f4,f5
a,b = f(m)
setattr(self, m, a)
setattr(self, m+"_cb", b)
class Tk (object):
_core_name = "tk"
def __init__ (self):
self._q = deque()
self.dialog = MessageBoxer(self)
def do_ex (self, code, rv=None, args=[], kw={}):
self._q.append((code, rv, args, kw))
self.root.event_generate('<<Ping>>', when='tail')
def do (__self, __code, __rv=None, *args, **kw):
__self._q.append((__code, __rv, args, kw))
__self.root.event_generate('<<Ping>>', when='tail')
def _dispatch (self, event):
while len(self._q):
self._dispatch_one(*self._q.popleft())
def _dispatch_one (self, code, rv, args, kw):
if callable(code):
r = code(*args, **kw)
else:
def f ():
l = {'self':self}
l.update(kw)
exec code in globals(), l
r = f()
if rv: core.callLater(rv, r)
def run (self):
import Tkinter
root = Tkinter.Tk()
root.bind('<<Ping>>', self._dispatch)
self.root = root
self.root.withdraw()
try:
root.mainloop()
except KeyboardInterrupt:
pass
log.debug("Quitting")
def launch ():
import boot
core.registerNew(Tk)
boot.set_main_function(core.tk.run)
"""
def pr (msg):
print "From Tk:", msg
core.callDelayed(5,lambda: core.tk.msgbox.showinfo_cb(pr,
"Hello", "Hello, World!"))
"""
|
Python
| 0
|
@@ -3041,16 +3041,139 @@
= root%0A
+%0A # Become live once in a while so that signals get handled%0A def timer ():%0A root.after(500, timer)%0A timer()%0A%0A
self
|
8ee98a3a25d078db4472783cfe5eec095a6cfbcd
|
Fix path of Info.plist for Mac.
|
ppapi.gyp
|
ppapi.gyp
|
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'ppapi_c',
'type': 'none',
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'sources': [
'c/pp_event.h',
'c/pp_instance.h',
'c/pp_module.h',
'c/pp_point.h',
'c/pp_rect.h',
'c/pp_resource.h',
'c/pp_size.h',
'c/pp_stdint.h',
'c/pp_var.h',
'c/ppb.h',
'c/ppb_core.h',
'c/ppb_device_context_2d.h',
'c/ppb_image_data.h',
'c/ppb_instance.h',
'c/ppb_var.h',
'c/ppp.h',
'c/ppp_class.h',
'c/ppp_instance.h',
],
},
{
'target_name': 'ppapi_example',
'dependencies': [
],
'include_dirs': [
'..',
],
'xcode_settings': {
'INFOPLIST_FILE': 'Info.plist',
},
'sources': [
'example/example.cc',
'cpp/device_context_2d.cc',
'cpp/device_context_2d.h',
'cpp/image_data.cc',
'cpp/image_data.h',
'cpp/instance.cc',
'cpp/instance.h',
'cpp/module.cc',
'cpp/module.h',
'cpp/scriptable_object.cc',
'cpp/scriptable_object.h',
'cpp/rect.h',
'cpp/resource.cc',
'cpp/resource.h',
'cpp/var.cc',
'cpp/var.h',
],
'conditions': [
['OS=="win"', {
'product_name': 'ppapi_example',
'type': 'shared_library',
'msvs_guid': 'EE00E36E-9E8C-4DFB-925E-FBE32CEDB91B',
'sources': [
'example/example.rc',
],
'run_as': {
'action': [
'<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)chrome<(EXECUTABLE_SUFFIX)',
'--no-sandbox',
'--internal-pepper',
'--enable-gpu-plugin',
'--load-plugin=$(TargetPath)',
'file://$(ProjectDir)test_page.html',
],
},
}],
['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
'type': 'shared_library',
'cflags': ['-fvisibility=hidden'],
# -gstabs, used in the official builds, causes an ICE. Simply remove
# it.
'cflags!': ['-gstabs'],
}],
['OS=="linux" or OS=="openbsd" or OS=="freebsd" and (target_arch=="x64" or target_arch=="arm") and linux_fpic!=1', {
'product_name': 'ppapi_example',
# Shared libraries need -fPIC on x86-64
'cflags': ['-fPIC'],
}, {
# Dependencies for all other OS/CPU combinations except those above
'dependencies': [
],
}],
['OS=="mac"', {
'type': 'loadable_module',
'mac_bundle': 1,
'product_name': 'PPAPIExample',
'product_extension': 'plucpgin',
'sources+': [
'example/Info.plist'
],
}],
],
# See README for instructions on how to run and debug on the Mac.
#'conditions' : [
# ['OS=="mac"', {
# 'target_name' : 'Chromium',
# 'type' : 'executable',
# 'xcode_settings' : {
# 'ARGUMENTS' : '--renderer-startup-dialog --internal-pepper --no-sandbox file://${SRCROOT}/test_page.html'
# },
# }],
#],
},
],
}
|
Python
| 0
|
@@ -1003,16 +1003,24 @@
FILE': '
+example/
Info.pli
@@ -2990,18 +2990,16 @@
n': 'plu
-cp
gin',%0A
|
be6f36311fdec93bca1f26672c1c3cca02d6d203
|
Now is executable
|
pypipe.py
|
pypipe.py
|
import argparse
from pypipe.formats import *
from pypipe.utils import run_pipeline, generate_pipeline_graph
_parser = argparse.ArgumentParser(
description="Bioinformatics pipelines framework")
_parser.add_argument('pipeline', help='name of pipeline file')
_parser.add_argument('--draw', action='store_true',
help='draw pipeline to PNG')
_parser.add_argument('--run', action='store', nargs=1, metavar='NODE',
help='run pipeline')
_args = _parser.parse_args()
execfile(_args.pipeline)
if _args.draw:
from pypipe.utils import generate_pipeline_graph
generate_pipeline_graph(_args.pipeline)
if _args.run:
from pypipe.utils import run_pipeline
run_pipeline(_args.pipeline, eval(_args.run[0]))
|
Python
| 0.998662
|
@@ -1,12 +1,36 @@
+#!/usr/bin/env python2%0A%0A
import argpa
|
1171d371e4b2bb91bc842a9ed129b512f9b8499f
|
Add version flag
|
pypush.py
|
pypush.py
|
#!/usr/bin/env python
import os
import time
import watchdog.events
import watchdog.observers
import watchdog.utils
import signal
import sys
import subprocess
import string
import tempfile
import argparse
class PypushHandler(watchdog.events.FileSystemEventHandler):
"""Push all changes in the current directory to a remote server."""
def __init__(self, flags):
# Check if current directory is a git repo
if subprocess.call(['git', 'rev-parse']): # If this or any parent directory isn't a git repo, this command returns non-zero and prints an error message
print "Hint: run 'git init'"
sys.exit(1)
self.user = flags.user
self.path = flags.dest
self.quiet = flags.quiet
self.verbose = flags.verbose
self.cwd = os.getcwd()
if self.path[-1] != '/': # Ensure path ends in a slash, i.e. it is a directory
self.path += '/'
args = ['ssh', '-t', '-t', # Force tty allocation - this prevents certain error messages
'-M', '-S', '~/.ssh/socket-%r@%h:%p', # Create a master TCP connection that we can use later every time a file changes
self.user]
subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
if flags.skip_init:
print 'Waiting for file changes\n'
return
print 'Generating list of files'
args = ['git', 'ls-files', '-c', '-o', '--exclude-standard'] # Show all non-excluded files in the current directory
output = subprocess.Popen(args, stdout=subprocess.PIPE).communicate()[0]
tf = tempfile.NamedTemporaryFile(delete=False)
for line in string.split(output, '\n'):
if line != '':
tf.write('/' + line + '\n')
tf.close()
print 'Performing initial one-way sync'
args = ['rsync', '-az', # Usual flags - archive, compress
'-e', 'ssh -S ~/.ssh/socket-%r@%h:%p', # Connect to the master TCP connection from earlier
'--include-from=' + tf.name, # Include the list of files we got from git
'--exclude=*', # Exclude everything else
'--delete-excluded', # Delete excluded files
'./', # Sync current directory
self.user + ':' + self.path]
if self.verbose:
args.append('-v')
if subprocess.call(args):
print 'Error with rsync, aborting'
sys.exit(1)
os.remove(tf.name)
print 'Startup complete, waiting for file changes\n'
def print_quiet(self, message):
"""Only print the given message if not in quiet mode.
If message ends in a '\r', then it is printed without a newline. On most
shells, this means that a subsequent call to print will overwrite that
line.
"""
if not self.quiet:
if message[-1] == '\r':
print message,
sys.stdout.flush()
else:
print message
def should_ignore(self, filename):
"""Return whether changes to filename should be ignored."""
args = ['git', 'ls-files', filename, '-c', '-o', '--exclude-standard']
if subprocess.Popen(args, stdout=subprocess.PIPE).communicate()[0]: # If git outputs something, then that file isn't ignored
return False
return True
def relative_path(self, filename):
"""Convert filename to a path relative to the current directory."""
return filename.replace(self.cwd, '', 1)[1:]
def dispatch(self, event):
"""Dispatch events to the appropriate methods."""
if not event.is_directory: # Git doesn't care about directories, so neither do we
path = self.relative_path(event.src_path)
if watchdog.utils.has_attribute(event, 'dest_path'): # File move
dest = self.relative_path(event.dest_path)
self.on_moved(path, dest, path + ' moved to ' + dest)
elif event.event_type == 'deleted':
# We can't do 'git ls-files' on a deleted file, so just try to
# delete it - if it doesn't exist on the remote, nothing will happen
self.on_deleted(path, path + ' deleted')
else: # Created or deleted
if self.should_ignore(event.src_path):
return
self.on_modified(path, path + ' ' + event.event_type)
def on_modified(self, path, output=''):
"""Call rsync on the given relative path."""
if output:
self.print_quiet(output + '\r')
args = ['rsync', '-az', '-e', 'ssh -S ~/.ssh/socket-%r@%h:%p', path, self.user + ':' + self.path + path]
if self.verbose:
args.append('-v')
subprocess.call(args)
if output:
self.print_quiet(output + '...pushed')
def on_moved(self, src, dest, output):
self.print_quiet(output + '\r')
if not self.should_ignore(dest):
self.on_modified(dest)
self.on_deleted(src)
self.print_quiet(output + '...pushed')
def on_deleted(self, path, output=''):
if output:
self.print_quiet(output + '\r')
args = ['ssh', '-S', '~/.ssh/socket-%r@%h:%p', self.user, 'rm -f ' + self.path + path]
subprocess.call(args)
if output:
self.print_quiet(output + '...pushed')
def main():
parser = argparse.ArgumentParser(description='Continuously push changes in the current directory to a remote server.',
epilog="""WARNING: pypush only performs a one-way sync. If you make
changes directly on the remote machine, they may be overwritten at
any time by changes made locally.""")
parser.add_argument('-q', '--quiet', action='store_const', default=False, const=True,
help='quiet mode - do not show output whenever a file changes')
parser.add_argument('-v', '--verbose', action='store_const', default=False, const=True,
help='verbose mode - run rsync in verbose mode')
parser.add_argument('-s', '--skip-init', action='store_const', default=False, const=True,
help='skip the initial one-way sync performed on startup')
parser.add_argument('user', metavar='[user@]hostname', help='the remote machine (and optional user name) to login to')
parser.add_argument('dest', help='the path to the remote directory to push changes to')
args = parser.parse_args()
event_handler = PypushHandler(args)
observer = watchdog.observers.Observer()
observer.schedule(event_handler, path='.', recursive=True)
observer.start()
try:
while True:
time.sleep(10)
except KeyboardInterrupt:
observer.stop()
observer.join()
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -5403,16 +5403,92 @@
artup')%0A
+%09parser.add_argument('--version', action='version', version='%25(prog)s 1.0')%0A
%09parser.
|
6588ac0990f635a84127df3c125130d2379746c3
|
Fix nodereseat false success message
|
confluent_server/confluent/plugins/hardwaremanagement/enclosure.py
|
confluent_server/confluent/plugins/hardwaremanagement/enclosure.py
|
# Copyright 2017 Lenovo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import confluent.core as core
import confluent.messages as msg
import pyghmi.exceptions as pygexc
def update(nodes, element, configmanager, inputdata):
emebs = configmanager.get_node_attributes(
nodes, (u'enclosure.manager', u'enclosure.bay'))
for node in nodes:
try:
em = emebs[node]['enclosure.manager']['value']
eb = emebs[node]['enclosure.bay']['value']
except KeyError:
yield msg.ConfluentNodeError(
node,
'Reseat is only supported on servers in an enclosure, and '
'with enclosure.manager and enclosure.bay defined')
continue
try:
for rsp in core.handle_path(
'/nodes/{0}/_enclosure/reseat_bay'.format(em),
'update', configmanager,
inputdata={'reseat': int(eb)}):
yield rsp
except pygexc.UnsupportedFunctionality as uf:
yield msg.ConfluentNodeError(node, str(uf))
|
Python
| 0
|
@@ -660,16 +660,51 @@
s pygexc
+%0Aimport confluent.exceptions as exc
%0A%0Adef up
@@ -1613,8 +1613,116 @@
tr(uf))%0A
+ except exc.TargetEndpointUnreachable as uf:%0A yield msg.ConfluentNodeError(node, str(uf))%0A
|
14cf74eff6160cb1d4306087204e48c88999258a
|
deal with just one spectrum
|
apogee/modelspec/ferre.py
|
apogee/modelspec/ferre.py
|
###############################################################################
# ferre.py: module for interacting with Carlos Allende Prieto's FERRE code
###############################################################################
import os
import subprocess
import numpy
from apogee.modelspec import paramArrayInputDecorator
def run_ferre(dir,verbose=False):
"""
NAME:
run_ferre
PURPOSE:
run an instance of FERRE
INPUT:
dir - directory to run the instance in (has to have an input.nml file)
verbose= (False) if True, print the FERRE output
OUTPUT:
(none)
HISTORY:
2015-01-22 - Written - Bovy (IAS)
"""
# Set up the subprocess to run FERRE
if verbose:
stdout= None
stderr= None
else:
stdout= open('/dev/null', 'w')
stderr= subprocess.STDOUT
try:
subprocess.check_call(['ferre'],cwd=dir,stdout=stdout,stderr=stderr)
except subprocess.CalledProcessError:
raise Exception("Running FERRE instance in directory %s failed ..." % dir)
return None
def write_input_nml(dir,
pfile,
offile,
ffile=None,
erfile=None,
opfile=None,
ndim=6,
nov=0,
synthfile=None,
inter=3,
errbar=1,
indini=0,
init=0,
f_format=1,
f_access=1):
"""
NAME:
write_input_nml
PURPOSE:
write a FERRE input.nml file
INPUT:
dir - directory where the input.nml file will be written to
pfile - name of the input parameter file
offile - name of the output best-fitting model file
ffile= name of the input parameter file
erfile= name of the flux errors file
opfile= name of the output parameter file
ndim= (6) number of dimensions/parameters
nov= (0) number of parameters to search (0=interpolation)
synthfile= (default ferreModelLibraryPath in apogee.tools.path) file name of the model grid's header
inter= (3) order of the interpolation
errbar= (1) method for calculating the error bars
indini= (0) how to initialize the search
init= (0) if 0, initialize the search at the parameters in the pfile
f_format= (1) file format (0=ascii, 1=unf)
f_access= (1) 0: load whole library, 1: use direct access (for small numbers of interpolations)
OUTPUT:
(none; just writes the file)
HISTORY:
2015-01-22 - Written - Bovy (IAS)
"""
if synthfile is None:
import apogee.tools.path as appath
synthfile= appath.ferreModelLibraryPath(header=True)
with open(os.path.join(dir,'input.nml'),'w') as outfile:
outfile.write('&LISTA\n')
outfile.write('NDIM = %i\n' % ndim)
outfile.write('NOV = %i\n' % nov)
indvstr= 'INDV ='
for ii in range(1,ndim+1):
indvstr+= ' %i' % ii
outfile.write(indvstr+'\n')
outfile.write("SYNTHFILE(1) = '%s'\n" % synthfile)
outfile.write("PFILE = '%s'\n" % pfile)
if not ffile is None:
outfile.write("FFILE = '%s'\n" % ffile)
if not erfile is None:
outfile.write("ERFILE = '%s'\n" % erfile)
if not opfile is None:
outfile.write("OPFILE = '%s'\n" % opfile)
outfile.write("OFFILE = '%s'\n" % offile)
outfile.write('INTER = %i\n' % inter)
outfile.write('ERRBAR = %i\n' % errbar)
outfile.write('INDINI = %i\n' % indini)
outfile.write('INIT = %i\n' % init)
outfile.write('F_FORMAT = %i\n' % f_format)
outfile.write('F_ACCESS = %i\n' % f_access)
outfile.write('/\n')
return None
# Interpolation
@paramArrayInputDecorator(1)
def write_ipf(dir,teff,logg,metals,am,nm,cm,vm=None):
"""
NAME:
write_ipf
PURPOSE:
write a FERRE input.ipf file
INPUT:
dir - directory where the input.ipf file will be written to
Parameters (can be 1D arrays):
teff - Effective temperature (K)
logg - log10 surface gravity / cm s^-2
metals - overall metallicity
am - [alpha/M]
nm - [N/M]
cm - [C/M]
vm= if using the 7D library, also specify the microturbulence
OUTPUT:
(none; just writes the file)
HISTORY:
2015-01-23 - Written - Bovy (IAS)
"""
with open(os.path.join(dir,'input.ipf'),'w') as outfile:
for ii in range(len(teff)):
outStr= 'dummy '
if not vm is None:
outStr+= '%.3f ' % numpy.log10(vm[ii])
outStr+= '%.3f %.3f %.3f %.3f %.3f %.1f\n' \
% (cm[ii],nm[ii],am[ii],
metals[ii],logg[ii],teff[ii])
outfile.write(outStr)
return None
# Fitting
def write_ffile(dir,spec,specerr=None):
"""
NAME:
write_ffile
PURPOSE:
write FERRE input.frd file with input fluxes and input.err with input flux errors
INPUT:
dir - directory where the input.frd file will be written to
spec - spectra (nspec,nwave)
specerr= (None) if set, aos write the input.err file
OUTPUT:
(none; just writes the file)
HISTORY:
2015-01-23 - Written - Bovy (IAS)
"""
numpy.savetxt(os.path.join(dir,'input.frd'),spec)
if not specerr is None:
numpy.savetxt(os.path.join(dir,'input.err'),specerr)
return None
|
Python
| 0
|
@@ -5401,24 +5401,159 @@
AS)%0A %22%22%22%0A
+ if len(spec.shape) == 1: %0A spec= numpy.reshape(spec,(1,len(spec)))%0A specerr= numpy.reshape(specerr,(1,len(specerr)))%0A
numpy.sa
|
98246db3d2d75593550b66f0fcb198eccc24b5f0
|
Increment version number
|
rencfs.py
|
rencfs.py
|
#!/usr/bin/python
# Copyright (c) 2016, Jan Varho
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import errno
import hmac
import os
import sys
from base64 import b16encode
from hashlib import sha256
from Crypto.Cipher import AES
from Crypto.Util import Counter
from fuse import FUSE, FuseOSError, Operations
__version__ = '0.1'
BLOCK_MASK = 15
BLOCK_SIZE = 16
BUFFER_SIZE = 1024*16
MAC_SIZE = 16
VERIFY = True
class RencFS(Operations):
def __init__(self, root, key, decrypt):
self.root = root
self.hmac = hmac.new(key[:16], digestmod=sha256)
self.aes_ecb = AES.new(key[16:], AES.MODE_ECB)
self.keys = {}
self.decrypt = decrypt
# Helpers
def _fullpath(self, partial):
if partial.startswith('/'):
partial = partial[1:]
path = os.path.join(self.root, partial)
return path
def _mac(self, fh, h=''):
pos, hmac = 0, self.hmac.copy()
if self.decrypt:
pos = MAC_SIZE
os.lseek(fh, pos, os.SEEK_SET)
while True:
d = os.read(fh, BUFFER_SIZE)
if not d:
break
if self.decrypt:
pos, d = len(d), self._enc(h, pos, d)
hmac.update(d)
return hmac.digest()[:MAC_SIZE]
def _getkey(self, fh):
if fh in self.keys:
return self.keys[fh]
if self.decrypt:
os.lseek(fh, 0, os.SEEK_SET)
h = self.aes_ecb.decrypt(os.read(fh, MAC_SIZE))
if VERIFY and h != self._mac(fh, h):
raise FuseOSError(errno.EPERM)
else:
h = self._mac(fh)
self.keys[fh] = h
return h
def _enc(self, key, offset, data):
if self.decrypt:
offset -= MAC_SIZE
index = offset // BLOCK_SIZE
ctr = Counter.new(128, initial_value=index)
aes = AES.new(key, AES.MODE_CTR, counter=ctr)
if not offset & BLOCK_MASK:
return aes.encrypt(data)
data = '\0' * (offset & BLOCK_MASK) + data
return aes.encrypt(data)[offset & BLOCK_MASK:]
# Filesystem methods
def access(self, path, mode):
full_path = self._fullpath(path)
if mode in (os.W_OK, os.X_OK):
return False
return os.access(full_path, mode)
def getattr(self, path, fh=None):
full_path = self._fullpath(path)
st = os.lstat(full_path)
st = dict((key, getattr(st, key)) for key in (
'st_atime', 'st_ctime', 'st_gid', 'st_mode',
'st_mtime', 'st_nlink', 'st_size', 'st_uid'
))
if self.decrypt:
st['st_size'] -= MAC_SIZE
else:
st['st_size'] += MAC_SIZE
return st
def readdir(self, path, fh):
full_path = self._fullpath(path)
dirents = ['.', '..']
try:
dirents.extend(os.listdir(full_path))
except OSError as e:
raise FuseOSError(e.errno)
for r in dirents:
yield r
def readlink(self, path):
pathname = os.readlink(self._fullpath(path))
return os.path.relpath(pathname, self.root)
def statfs(self, path):
full_path = self._fullpath(path)
stv = os.statvfs(full_path)
return dict((key, getattr(stv, key)) for key in (
'f_bavail', 'f_bfree', 'f_blocks', 'f_bsize', 'f_favail',
'f_ffree', 'f_files', 'f_flag', 'f_frsize', 'f_namemax'
))
def utimens(self, path, times=None):
raise FuseOSError(errno.EROFS)
# File methods
def open(self, path, flags):
full_path = self._fullpath(path)
return os.open(full_path, flags)
def create(self, path, mode, fi=None):
raise FuseOSError(errno.EROFS)
def read(self, path, length, offset, fh):
data = ''
h = self._getkey(fh)
if self.decrypt:
offset += MAC_SIZE
elif offset < MAC_SIZE:
data = self.aes_ecb.encrypt(h)[offset:offset+length]
length -= MAC_SIZE - offset
offset = 0
else:
offset -= MAC_SIZE
if length > 0:
os.lseek(fh, offset, os.SEEK_SET)
data += self._enc(h, offset, os.read(fh, length))
return data
def release(self, path, fh):
self.keys.pop(fh, None)
return os.close(fh)
def main(mountpoint, root, rawkey, decrypt): #pragma no cover
key = sha256(rawkey).digest()
FUSE(RencFS(root, key, decrypt), mountpoint, nothreads=True, foreground=True)
if __name__ == '__main__': #pragma no cover
main(sys.argv[2], sys.argv[1], sys.argv[3], '-d' in sys.argv)
|
Python
| 0.000021
|
@@ -998,17 +998,17 @@
__ = '0.
-1
+2
'%0A%0ABLOCK
|
03484fa3b9349df6a8310e25a55d9c372f2743dd
|
Fix the signing servlet
|
sydent/http/servlets/blindlysignstuffservlet.py
|
sydent/http/servlets/blindlysignstuffservlet.py
|
# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import signedjson.key
import signedjson.sign
from sydent.db.invite_tokens import JoinTokenStore
from sydent.http.servlets import get_args, jsonwrap, send_cors, MatrixRestError
from sydent.http.auth import authIfV2
class BlindlySignStuffServlet(Resource):
isLeaf = True
def __init__(self, syd):
self.server_name = syd.server_name
self.tokenStore = JoinTokenStore(syd)
@jsonwrap
def render_POST(self, request):
send_cors(request)
authIfV2(self.sydent, request)
args = get_args(request, ("private_key", "token", "mxid"))
private_key_base64 = args['private_key']
token = args['token']
mxid = args['mxid']
sender = self.tokenStore.getSenderForToken(token)
if sender is None:
raise MatrixRestError(404, "M_UNRECOGNIZED", "Didn't recognize token")
to_sign = {
"mxid": mxid,
"sender": sender,
"token": token,
}
try:
private_key = signedjson.key.decode_signing_key_base64(
"ed25519",
"0",
private_key_base64
)
signed = signedjson.sign.sign_json(
to_sign,
self.server_name,
private_key
)
except:
logger.exception("signing failed")
raise MatrixRestError(500, "M_UNKNOWN", "Internal Server Error")
return signed
@jsonwrap
def render_OPTIONS(self, request):
send_cors(request)
request.setResponseCode(200)
return {}
|
Python
| 0
|
@@ -956,16 +956,42 @@
, syd):%0A
+ self.sydent = syd%0A
|
0728b083cf6da761c525fd763bd9371ef4d41f7a
|
correct some error in extension function
|
resize.py
|
resize.py
|
import string
import imghdr
import random
import os, sys
from os import listdir, path
from os.path import isfile, join
import argparse
from PIL import Image
def id_generator(size=6, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
def extension(mypath, outputpath, convert):
outfilepath = os.path.join(outputpath, id_generator() + "." + imghdr.what(mypath) )
f, e = os.path.splitext(mypath)
outfile = outfilepath + convert
print convert
print outfile
if mypath != outfile:
try:
Image.open(mypath).save(outfile)
except IOError:
print("cannot convert", mypath)
def convertlist(mypath, outputpath, convert):
onlyfiles = [ f for f in listdir(mypath) if isfile(join(mypath,f)) ]
print onlyfiles
onlyimagefile = []
for x in onlyfiles:
try:
Image.open(os.path.join(mypath, x))
onlyimagefile.append(x)
except:
pass
for y in onlyimagefile:
convertion(os.path.join(mypath,y), outputpath, convert)
print onlyimagefile
def rotate(mypath, outputpath, degree):
outfile = os.path.join(outputpath, id_generator() + "." + imghdr.what(mypath) )
im = Image.open(mypath)
out = im.rotate(degree)
out.save(outfile)
def rotatelist(mypath, outputpath, degree):
onlyfiles = [ f for f in listdir(mypath) if isfile(join(mypath,f)) ]
print onlyfiles
onlyimagefile = []
for x in onlyfiles:
try:
Image.open(os.path.join(mypath, x))
onlyimagefile.append(x)
except:
pass
for y in onlyimagefile:
rotate(os.path.join(mypath,y), outputpath, degree)
print onlyimagefile
def resizelist(mypath, outputpath, width, height):
onlyfiles = [ f for f in listdir(mypath) if isfile(join(mypath,f)) ]
print onlyfiles
onlyimagefile = []
for x in onlyfiles:
try:
Image.open(os.path.join(mypath, x))
onlyimagefile.append(x)
except:
pass
for y in onlyimagefile:
resize(os.path.join(mypath,y), outputpath, width, height)
print onlyimagefile
def resize(a, b, width, height):
size = (width, height)
outfile = os.path.join(b, id_generator() + "." + imghdr.what(a) )
im = Image.open(a)
out = im.resize(size)
out.save(outfile)
parser = argparse.ArgumentParser()
parser.add_argument("-rz","--resize", help="image to be resize",
action="store_true")
parser.add_argument("-ro","--rotate", help="image to rotate",
action="store_true")
parser.add_argument("-p","--picture", help="display the resized image")
parser.add_argument("-o","--output", help="image path")
parser.add_argument("-d","--directory", help="path directory")
parser.add_argument("-ext","--convert", help="convert any images to any extension")
parser.add_argument("-w","--width", help="image width",
type=int)
parser.add_argument("-i","--height", help="image height",
type=int)
parser.add_argument("-deg","--degree", help="rotate image to a degree",
type=int)
args = parser.parse_args()
if args.resize:
if args.picture:
resize(args.picture, args.output, args.width, args.height)
elif args.directory:
resizelist(args.directory, args.output, args.width, args.height)
elif args.rotate:
if args.degree:
if args.picture:
rotate(args.picture, args.output, args.degree)
elif args.directory:
rotatelist(args.directory, args.output, args.degree)
elif args.convert:
if args.picture:
extension(args.picture, args.output, args.convert)
elif args.directory:
convertlist(args.directory, args.output, args.convert)
|
Python
| 0.000064
|
@@ -392,39 +392,11 @@
or()
- + %22.%22 + imghdr.what(mypath)
)%0A
+
@@ -1003,23 +1003,22 @@
-convert
+extens
ion(os.p
@@ -2595,32 +2595,152 @@
n=%22store_true%22)%0A
+parser.add_argument(%22-con%22,%22--conversion%22, help=%22images to convert extension%22,%0A action=%22store_true%22)%0A
parser.add_argum
@@ -3736,19 +3736,47 @@
s.conver
-t:%0A
+sion:%0A if args.convert:%0A
if a
@@ -3788,32 +3788,36 @@
icture:%0A
+
+
extension(args.p
@@ -3847,24 +3847,28 @@
gs.convert)%0A
+
elif arg
@@ -3872,32 +3872,36 @@
args.directory:%0A
+
convertl
|
b371a0e0b1a334de044c811515bce46377c886df
|
fix for Django==2.2
|
dj_anonymizer/anonymizer.py
|
dj_anonymizer/anonymizer.py
|
import django
from dj_anonymizer.conf import settings
from dj_anonymizer.utils import import_if_exist
if django.__version__ < '2.2':
try:
from django_bulk_update.helper import bulk_update
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Django %s does not have native support for bulk_update and "
"django_bulk_update is not installed""" % django.__version__
)
class Anonymizer:
anonym_models = {}
clean_models = {}
skip_models = []
def __init__(self, soft_mode=True):
models_set = set()
# this for django contrib.auth.models or can be used
# as single file for defining all models to anonymize
import_if_exist('base')
for app in django.apps.apps.get_app_configs():
models_set.update(
model.__module__ + '.' + model.__name__
for model in app.get_models()
)
import_if_exist(app.name)
all_models = set(
self.skip_models +
list(self.anonym_models.keys()) +
list(self.clean_models.keys())
)
if not soft_mode and not models_set.issubset(all_models):
raise LookupError(
'You did not set those models to any list: {}'.format(
list(models_set.difference(all_models))))
def anonymize(self):
print('Updating started')
for anonym_cls in list(self.anonym_models.values()):
if not anonym_cls.get_fields_names():
continue
queryset = anonym_cls.Meta.queryset.only(
*anonym_cls.get_fields_names()
)
print('\nGenerating fake values for model "{}"'.format(
queryset.model.__name__
))
i = 0
total = queryset.count()
for j in list(range(0, total,
settings.ANONYMIZER_SELECT_BATCH_SIZE)) + [None]:
subset = queryset.order_by('pk')[i:j]
for obj in subset:
i += 1
for name in anonym_cls.get_fields_names():
setattr(obj, name, next(
getattr(anonym_cls, name))
)
if django.__version__ <= '2.2':
bulk_update(
subset,
batch_size=settings.ANONYMIZER_UPDATE_BATCH_SIZE,
update_fields=anonym_cls.get_fields_names()
)
else:
subset.model.objects.bulk_update(
subset,
anonym_cls.get_fields_names(),
batch_size=settings.ANONYMIZER_UPDATE_BATCH_SIZE,
)
print('\n\nUpdating finished')
def clean(self):
print('\nCleaning started\n')
for queryset in self.clean_models.values():
print('Cleaning "{}" ...'.format(queryset.model.__name__))
queryset.delete()
print('\nCleaning finished')
|
Python
| 0.000031
|
@@ -2320,17 +2320,16 @@
sion__ %3C
-=
'2.2':%0A
|
d76811778334473274701a40b1f846146d0f92d1
|
Use inspect.cleandoc in autogen_config.
|
docs/autogen_config.py
|
docs/autogen_config.py
|
#!/usr/bin/env python
from os.path import join, dirname, abspath
from IPython.terminal.ipapp import TerminalIPythonApp
from ipykernel.kernelapp import IPKernelApp
from traitlets import Undefined
from collections import defaultdict
here = abspath(dirname(__file__))
options = join(here, 'source', 'config', 'options')
generated = join(options, 'config-generated.txt')
import textwrap
indent = lambda text,n: textwrap.indent(text,n*' ')
def dedent(text):
"""Equivalent of textwrap.dedent that ignores unindented first line.
This means it will still dedent strings like:
'''foo
is a bar
'''
For use in wrap_paragraphs.
"""
if text.startswith('\n'):
# text starts with blank line, don't ignore the first line
return textwrap.dedent(text)
# split first line
splits = text.split('\n',1)
if len(splits) == 1:
# only one line
return textwrap.dedent(text)
first, rest = splits
# dedent everything but the first line
rest = textwrap.dedent(rest)
return '\n'.join([first, rest])
def interesting_default_value(dv):
if (dv is None) or (dv is Undefined):
return False
if isinstance(dv, (str, list, tuple, dict, set)):
return bool(dv)
return True
def format_aliases(aliases):
fmted = []
for a in aliases:
dashes = '-' if len(a) == 1 else '--'
fmted.append('``%s%s``' % (dashes, a))
return ', '.join(fmted)
def class_config_rst_doc(cls, trait_aliases):
"""Generate rST documentation for this class' config options.
Excludes traits defined on parent classes.
"""
lines = []
classname = cls.__name__
for k, trait in sorted(cls.class_traits(config=True).items()):
ttype = trait.__class__.__name__
fullname = classname + '.' + trait.name
lines += ['.. configtrait:: ' + fullname,
''
]
help = trait.help.rstrip() or 'No description'
lines.append(indent(dedent(help), 4) + '\n')
# Choices or type
if 'Enum' in ttype:
# include Enum choices
lines.append(indent(
':options: ' + ', '.join('``%r``' % x for x in trait.values), 4))
else:
lines.append(indent(':trait type: ' + ttype, 4))
# Default value
# Ignore boring default values like None, [] or ''
if interesting_default_value(trait.default_value):
try:
dvr = trait.default_value_repr()
except Exception:
dvr = None # ignore defaults we can't construct
if dvr is not None:
if len(dvr) > 64:
dvr = dvr[:61] + '...'
# Double up backslashes, so they get to the rendered docs
dvr = dvr.replace('\\n', '\\\\n')
lines.append(indent(':default: ``%s``' % dvr, 4))
# Command line aliases
if trait_aliases[fullname]:
fmt_aliases = format_aliases(trait_aliases[fullname])
lines.append(indent(':CLI option: ' + fmt_aliases, 4))
# Blank line
lines.append('')
return '\n'.join(lines)
def reverse_aliases(app):
"""Produce a mapping of trait names to lists of command line aliases.
"""
res = defaultdict(list)
for alias, trait in app.aliases.items():
res[trait].append(alias)
# Flags also often act as aliases for a boolean trait.
# Treat flags which set one trait to True as aliases.
for flag, (cfg, _) in app.flags.items():
if len(cfg) == 1:
classname = list(cfg)[0]
cls_cfg = cfg[classname]
if len(cls_cfg) == 1:
traitname = list(cls_cfg)[0]
if cls_cfg[traitname] is True:
res[classname+'.'+traitname].append(flag)
return res
def write_doc(name, title, app, preamble=None):
trait_aliases = reverse_aliases(app)
filename = join(options, name+'.rst')
with open(filename, 'w') as f:
f.write(title + '\n')
f.write(('=' * len(title)) + '\n')
f.write('\n')
if preamble is not None:
f.write(preamble + '\n\n')
#f.write(app.document_config_options())
for c in app._classes_inc_parents():
f.write(class_config_rst_doc(c, trait_aliases))
f.write('\n')
if __name__ == '__main__':
# Touch this file for the make target
with open(generated, 'w'):
pass
write_doc('terminal', 'Terminal IPython options', TerminalIPythonApp())
write_doc('kernel', 'IPython kernel options', IPKernelApp(),
preamble=("These options can be used in :file:`ipython_kernel_config.py`. "
"The kernel also respects any options in `ipython_config.py`"),
)
|
Python
| 0
|
@@ -58,16 +58,31 @@
abspath
+%0Aimport inspect
%0A%0Afrom I
@@ -452,640 +452,8 @@
')%0A%0A
-def dedent(text):%0A %22%22%22Equivalent of textwrap.dedent that ignores unindented first line.%0A%0A This means it will still dedent strings like:%0A '''foo%0A is a bar%0A '''%0A%0A For use in wrap_paragraphs.%0A %22%22%22%0A%0A if text.startswith('%5Cn'):%0A # text starts with blank line, don't ignore the first line%0A return textwrap.dedent(text)%0A%0A # split first line%0A splits = text.split('%5Cn',1)%0A if len(splits) == 1:%0A # only one line%0A return textwrap.dedent(text)%0A%0A first, rest = splits%0A # dedent everything but the first line%0A rest = textwrap.dedent(rest)%0A return '%5Cn'.join(%5Bfirst, rest%5D)%0A%0A%0A
%0Adef
@@ -1377,14 +1377,24 @@
ent(
-dedent
+inspect.cleandoc
(hel
|
7d7059082bf3b4d87585905a9a02711366e0446a
|
fix typo
|
tensorforce/core/memories/prioritized_replay.py
|
tensorforce/core/memories/prioritized_replay.py
|
# Copyright 2017 reinforce.io. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
Replay memory implementing priotised experience replay.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from random import random, randrange
from six.moves import xrange
import numpy as np
from tensorforce import util, TensorForceError
from tensorforce.core.memories import Memory
class PrioritizedReplay(Memory):
def __init__(self, capacity, states_config, actions_config, prioritization_weight=1.0):
super(PrioritizedReplay, self).__init__(capacity, states_config, actions_config)
self.prioritization_weight = prioritization_weight
self.internals_config = None
self.observations = list() # stores (priority, observation) pairs in reverse priority order
self.none_priority_index = 0
self.batch_indices = None
self.last_observation = None # stores last observation until next_state value is known
def add_observation(self, state, action, reward, terminal, internal):
if self.internals_config is None and internal is not None:
self.internals_config = [(i.shape, i.dtype) for i in internal]
if self.last_observation is not None:
observation = self.last_observation + (state, internal)
if len(self.observations) < self.capacity:
self.observations.append((None, observation))
elif self.none_priority_index > 0:
priority, _ = self.observations.pop(self.none_priority_index - 1)
self.observations.append((None, observation))
self.none_priority_index -= 1
else:
raise TensorForceError("Memory contains only unseen observations.")
self.last_observation = (state, action, reward, terminal, internal)
def get_batch(self, batch_size, next_states=False):
"""
Samples a batch of the specified size according to priority.
Args:
batch_size: The batch size
next_states: A boolean flag indicating whether 'next_states' values should be included
Returns: A dict containing states, actions, rewards, terminals, internal states (and next states)
"""
if batch_size<len(self.observations):
raise TensorForceError("Batch size is larger than observations in memory: increase config.first_update.")
states = {name: np.zeros((batch_size,) + tuple(state.shape), dtype=util.np_dtype(state.type)) for name, state in self.states_config.items()}
actions = {name: np.zeros((batch_size,) + tuple(action.shape), dtype=util.np_dtype('float' if action.continuous else 'int')) for name, action in self.actions_config.items()}
rewards = np.zeros((batch_size,), dtype=util.np_dtype('float'))
terminals = np.zeros((batch_size,), dtype=util.np_dtype('bool'))
internals = [np.zeros((batch_size,) + shape, dtype) for shape, dtype in self.internals_config]
if next_states:
next_states = {name: np.zeros((batch_size,) + tuple(state.shape), dtype=util.np_dtype(state.type)) for name, state in self.states_config.items()}
next_internals = [np.zeros((batch_size,) + shape, dtype) for shape, dtype in self.internals_config]
self.batch_indices = list()
not_sampled_index = self.none_priority_index
sum_priorities = sum(priority for priority, _ in self.observations if priority is not None)
for n in xrange(batch_size):
if not_sampled_index < len(self.observations):
_, observation = self.observations[not_sampled_index]
index = not_sampled_index
not_sampled_index += 1
elif sum_priorities / self.capacity < util.epsilon:
index = randrange(self.none_priority_index)
while index in self.batch_indices:
index = randrange(self.none_priority_index)
else:
while True:
sample = random()
for index, (priority, observation) in enumerate(self.observations):
sample -= priority / sum_priorities
if sample < 0.0 or index >= self.none_priority_index:
break
if index not in self.batch_indices:
break
for name, state in states.items():
state[n] = observation[0][name]
for name, action in actions.items():
action[n] = observation[1][name]
rewards[n] = observation[2]
terminals[n] = observation[3]
for k, internal in enumerate(internals):
internal[n] = observation[4][k]
if next_states:
for name, next_state in next_states.items():
next_state[n] = observation[5][name]
for k, next_internal in enumerate(next_internals):
next_internal[n] = observation[6][k]
self.batch_indices.append(index)
if next_states:
return dict(states=states, actions=actions, rewards=rewards, terminals=terminals, internals=internals, next_states=next_states, next_internals=next_internals)
else:
return dict(states=states, actions=actions, rewards=rewards, terminals=terminals, internals=internals)
def update_batch(self, loss_per_instance):
"""
Computes priorities according to loss.
Args:
loss_per_instance:
Returns:
"""
if self.batch_indices is None:
raise TensorForceError("Need to call get_batch before each update_batch call.")
if len(loss_per_instance) != len(self.batch_indices):
raise TensorForceError("For all instances a loss value has to be provided.")
updated = list()
for index, loss in zip(self.batch_indices, loss_per_instance):
priority, observation = self.observations[index]
updated.append((loss ** self.prioritization_weight, observation))
for index in sorted(self.batch_indices, reverse=True):
priority, _ = self.observations.pop(index)
self.none_priority_index -= (priority is not None)
self.batch_indices = None
updated = sorted(updated, key=(lambda x: x[0]))
update_priority, update_observation = updated.pop()
index = -1
for priority, _ in iter(self.observations):
index += 1
if index == self.none_priority_index:
break
if update_priority < priority:
continue
self.observations.insert(index, (update_priority, update_observation))
index += 1
self.none_priority_index += 1
if not updated:
break
update_priority, update_observation = updated.pop()
else:
self.observations.insert(index, (update_priority, update_observation))
self.none_priority_index += 1
while updated:
self.observations.insert(index, updated.pop())
self.none_priority_index += 1
|
Python
| 0.999991
|
@@ -2906,17 +2906,17 @@
tch_size
-%3C
+%3E
len(self
|
397d501d8c1356b97030a75da74f1b82252b94b9
|
version bump
|
django_mako_plus/version.py
|
django_mako_plus/version.py
|
# This file should have NO imports and be entirely standalone.
# This allows it to import into the runtime DMP as well as
# setup.py during installation.
__version__ = '5.7.6'
|
Python
| 0.000001
|
@@ -171,7 +171,7 @@
5.7.
-6
+7
'%0A
|
4aade2f9253415a1f75384e1ea64f9cfd8f36cdd
|
version bump
|
django_mako_plus/version.py
|
django_mako_plus/version.py
|
# This file should have NO imports and be entirely standalone.
# This allows it to import into the runtime DMP as well as
# setup.py during installation.
__version__ = '3.7.1'
# Reminder on uploading to pypi and removing the build folders:
'''
python3 setup.py sdist upload
rm -rf dist/ django_mako_plus.egg-info/
'''
|
Python
| 0.000001
|
@@ -171,9 +171,9 @@
3.7.
-1
+2
'%0A%0A%0A
|
1b1fb03626475a0e32998e108a6f974b567cd2c4
|
Fix bugs: 1. fix pool not working. 2. fix autocommit setting not working in SQLAlchemy proxied connection.
|
django_postgrespool/base.py
|
django_postgrespool/base.py
|
# -*- coding: utf-8 -*-
import logging
from functools import partial
from sqlalchemy import event
from sqlalchemy.pool import manage, QueuePool
from psycopg2 import InterfaceError, ProgrammingError, OperationalError
# from django.db import transaction
from django.conf import settings
from django.db.backends.postgresql_psycopg2.base import *
from django.db.backends.postgresql_psycopg2.base import DatabaseWrapper as Psycopg2DatabaseWrapper
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation as Psycopg2DatabaseCreation
POOL_SETTINGS = 'DATABASE_POOL_ARGS'
# DATABASE_POOL_ARGS should be something like:
# {'max_overflow':10, 'pool_size':5, 'recycle':300}
pool_args = getattr(settings, POOL_SETTINGS, {})
db_pool = manage(Database, **pool_args)
log = logging.getLogger('z.pool')
def _log(message, *args):
log.debug(message)
# Only hook up the listeners if we are in debug mode.
if settings.DEBUG:
event.listen(QueuePool, 'checkout', partial(_log, 'retrieved from pool'))
event.listen(QueuePool, 'checkin', partial(_log, 'returned to pool'))
event.listen(QueuePool, 'connect', partial(_log, 'new connection'))
def is_disconnect(e, connection, cursor):
"""
Connection state check from SQLAlchemy:
https://bitbucket.org/sqlalchemy/sqlalchemy/src/tip/lib/sqlalchemy/dialects/postgresql/psycopg2.py
"""
if isinstance(e, OperationalError):
# these error messages from libpq: interfaces/libpq/fe-misc.c.
# TODO: these are sent through gettext in libpq and we can't
# check within other locales - consider using connection.closed
return 'terminating connection' in str(e) or \
'closed the connection' in str(e) or \
'connection not open' in str(e) or \
'could not receive data from server' in str(e)
elif isinstance(e, InterfaceError):
# psycopg2 client errors, psycopg2/conenction.h, psycopg2/cursor.h
return 'connection already closed' in str(e) or \
'cursor already closed' in str(e)
elif isinstance(e, ProgrammingError):
# not sure where this path is originally from, it may
# be obsolete. It really says "losed", not "closed".
return "closed the connection unexpectedly" in str(e)
else:
return False
class DatabaseCreation(Psycopg2DatabaseCreation):
def destroy_test_db(self, *args, **kw):
"""Ensure connection pool is disposed before trying to drop database."""
self.connection._dispose()
super(DatabaseCreation, self).destroy_test_db(*args, **kw)
class DatabaseWrapper(Psycopg2DatabaseWrapper):
"""SQLAlchemy FTW."""
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.creation = DatabaseCreation(self)
def _commit(self):
if self.connection is not None and self.is_usable():
with self.wrap_database_errors:
return self.connection.commit()
def _rollback(self):
if self.connection is not None and self.is_usable():
with self.wrap_database_errors:
return self.connection.rollback()
def _dispose(self):
"""Dispose of the pool for this instance, closing all connections."""
self.close()
# _DBProxy.dispose doesn't actually call dispose on the pool
conn_params = self.get_connection_params()
key = db_pool._serialize(**conn_params)
try:
pool = db_pool.pools[key]
except KeyError:
pass
else:
pool.dispose()
del db_pool.pools[key]
|
Python
| 0
|
@@ -3628,28 +3628,763 @@
del db_pool.pools%5Bkey%5D%0A
+%0A def get_new_connection(self, conn_params):%0A # get new connection through pool, not creating a new one outside.%0A connection = db_pool.connect(**conn_params)%0A return connection%0A%0A def _set_autocommit(self, autocommit):%0A # fix autocommit setting not working in proxied connection%0A with self.wrap_database_errors:%0A if self.psycopg2_version %3E= (2, 4, 2):%0A self.connection.connection.autocommit = autocommit%0A else:%0A if autocommit:%0A level = psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT%0A else:%0A level = self.isolation_level%0A self.connection.connection.set_isolation_level(level)%0A
|
783f7a5d17b3db83e1f27ad3bebb4c165c4e66ca
|
Fix convert to support python 2 and python 3
|
django_settings/keymaker.py
|
django_settings/keymaker.py
|
class KeyMaker(object):
def __init__(self, prefix):
self.prefix = prefix
def convert(self, arg):
return str(arg)
def args_to_key(self, args):
return ":".join(map(self.convert, args))
def kwargs_to_key(self, kwargs):
return ":".join([
"%s:%s" % (self.convert(k), self.convert(v))
for k, v in kwargs.items()
])
def make(self, method_name, args, kwargs):
key = ":".join((
self.prefix,
method_name,
self.args_to_key(args),
self.kwargs_to_key(kwargs),
))
return key
|
Python
| 0.999994
|
@@ -1,8 +1,20 @@
+import sys%0A%0A
class Ke
@@ -119,16 +119,144 @@
, arg):%0A
+ if sys.version_info %3C (3,) and isinstance(arg, unicode):%0A return arg.encode(django.settings.DEFAULT_CHARSET)%0A
|
0225173efe5fcb0de78239f26a5eca9c4d7d7a6e
|
add url to match language session view
|
django_test/article/urls.py
|
django_test/article/urls.py
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^all/$', 'article.views.articles'),
url(r'^get/(?P<article_id>\d+)/$', 'article.views.article'),
)
|
Python
| 0
|
@@ -184,9 +184,111 @@
icle'),%0A
+ # for session language%0A url(r'%5Elanguage/(?P%3Clanguage%3E%5Ba-z%5C-%5D+)/$', 'article.views.language'),%0A%0A
)
|
0b6b236f2be92f408cce9a91bf0c8100c3ecbac0
|
Switch to jpg
|
rodent.py
|
rodent.py
|
"""
Rodent
Usage:
rodent.py capture [--until=<time>] [--folder=<folder>] [--interval=<interval>]
rodent.py make_video [--folder=<folder>]
rodent.py automate [--until=<time>] [--folder=<folder>] [--interval=<interval>]
Options:
-h --help Show this screen
--until=<time> Until when to record, needs to be a HH:MM format (ie 12:45)
--folder=<folder> The folder in which the pictures are stored [default: photos]
--interval=<interval> The interval between 2 photos [default: 20]
"""
import datetime
import os
import time
import sys
import cv2
from docopt import docopt
def clear_directory(folder):
"""
Delete all the pics in the photos directory
"""
for filename in os.listdir(folder):
os.remove('%s/%s' % (folder, filename))
def start_camera(folder, interval, until=None):
"""
Start taking pictures every interval.
If until is specified, it will take pictures
until that time is reached (24h format).
Needs to be of the following format: HH:MM
"""
clear_directory(folder)
camera = cv2.VideoCapture(0)
filename = '%s/%s.png'
number = 0
if until:
until_hour, until_minutes = until.split(':')
until_hour = int(until_hour)
until_minutes = int(until_minutes)
while True:
number += 1
_, image = camera.read()
now = datetime.datetime.now()
print 'Taking picture number %d at %s' % (number, now.isoformat())
cv2.imwrite(filename % (folder, now), image)
if until:
if now.hour > until_hour or (now.hour == until_hour and now.minute >= until_minutes):
break
time.sleep(interval)
del(camera)
def make_video(folder):
# Sorting on dates, ISO ftw
filenames = sorted(os.listdir('photos'))
# Find out size of the pictures we're taking
#filename = '%s/%s.png'
first_pic = cv2.imread('%s/%s' % (folder, filenames[0]))
# first_pic.shape gives a tuple (height, width, layer)
height, width, _ = first_pic.shape
# magic below, might need to change the codec for your own webcam
fourcc = cv2.cv.CV_FOURCC(*'XVID')
video = cv2.VideoWriter('output.avi', fourcc, 10, (width, height))
for filename in filenames:
video.write(cv2.imread('%s/%s' % (folder, filename)))
video.release()
if __name__ == "__main__":
arguments = docopt(__doc__)
folder = arguments['--folder']
interval = int(arguments['--interval'])
until = arguments['--until']
if arguments['capture']:
start_camera(folder, interval, until)
elif arguments['make_video']:
make_video(folder)
elif arguments['automate']:
start_camera(folder, interval, until)
make_video(folder)
|
Python
| 0.000002
|
@@ -515,10 +515,9 @@
lt:
-20
+1
%5D%0A%22%22
@@ -606,16 +606,38 @@
docopt%0A
+from PIL import Image%0A
%0A%0Adef cl
@@ -1140,26 +1140,26 @@
me = '%25s/%25s.
+j
p
-n
g'%0A numbe
@@ -1507,51 +1507,315 @@
-cv2.imwrite(filename %25 (folder, now), image
+# Tried %5Bcv2.cv.CV_IMWRITE_PNG_COMPRESSION, 3%5D but still atrocious compression%0A filepath = filename %25 (folder, now)%0A cv2.imwrite(filepath, image)%0A%0A # Resave it with pillow to do a better compression%0A img = Image.open(filepath)%0A img.save(filepath, optimize=True, quality=80
)%0A%0A
|
f8eb93f1845a7776c61a59bafc6fdeb689712aff
|
Add dialog title to example
|
examples/comp/ask_user_dialog.py
|
examples/comp/ask_user_dialog.py
|
"""Example showing the Ask User dialog controls and overall usage."""
import fusionless as fu
dialog = fu.AskUserDialog()
dialog.add_text("text", default="Default text value")
dialog.add_position("position", default=(0.2, 0.8))
dialog.add_slider("slider", default=0.5, min=-10, max=10)
dialog.add_screw("screw")
dialog.add_file_browse("file", default="C:/path/to/foo")
dialog.add_path_browse("path")
dialog.add_clip_browse("clip")
dialog.add_checkbox("checkbox", name="Do not check this!")
dialog.add_dropdown("dropdown", options=["A", "B", "C"])
dialog.add_multibutton("multibutton", options=["Foo", "Bar", "Nugget"])
result = dialog.show()
if result is None:
# Dialog was cancelled
pass
else:
checked = result['checkbox']
if checked:
print("You sure are living on the edge!")
import pprint
pprint.pprint(result)
|
Python
| 0
|
@@ -115,16 +115,41 @@
rDialog(
+%22Example Ask User Dialog%22
)%0Adialog
|
b971cd102e30f721feb50c934012eb9c26105186
|
query input working. have empty input handled
|
runsql.py
|
runsql.py
|
#!/usr/bin/python
import urwid
import mainview
"""
NOTES
-----
This module builds the widget to allow the user to enter in their own SQL query
This module will also run the sql query and show a success message if it works
"""
class Qinfo:
def __init__(self):
query_text = ""
query_status = ""
def show_runsql(frame, body, user_info):
#used to easily insert a blank line widget
blank = urwid.Divider()
query_info = Qinfo()
#signal handler for text input, stores input information from user
def edit_change_event(self, text):
query_info.query_text = text
#signal handler for the run button
def run_btn_press(button):
if query_info.query_text != "":
query_info.query_status = user_info.db_obj.runquery(user_info.db_conn, query_info.query_text)
if query_info.query_status == 1:
#show success message
frame.footer = urwid.AttrWrap(urwid.Text(u" Query executed successfully"), 'header')
#reload main view. this updates tables list if table was created
mainview.show_main_view(frame, body, user_info)
else:
text_error.original_widget = urwid.AttrWrap( urwid.Text(query_info.query_status), 'error')
else:
text_error.original_widget = urwid.AttrWrap( urwid.Text(u"You have enter in a query."), 'error')
#variables to hold text to show user for login view
text_1 = urwid.Text(u"Enter a SQL query to run below:")
text_2 = urwid.Text(u"(The edit box supports multiple lines when you press enter)")
text_error = urwid.AttrMap( urwid.Text(u""), 'body')
#setting up the edit input widgets for database name and password
sql_edit = urwid.Edit(caption="", edit_text="", multiline=True)
urwid.connect_signal(sql_edit, 'change', edit_change_event)
sql_edit = urwid.AttrWrap(sql_edit, 'btnf', 'btn')
#run button
runsql_btn = urwid.AttrWrap( urwid.Button(u"Run", run_btn_press), 'btnf', 'btn')
#This is the pile widget that holds all of the main body widgets
runsql = urwid.WidgetPlaceholder(
urwid.Pile([
urwid.Padding(text_error, left=5, width = 50),
blank,
urwid.Padding(text_1, left=2),
urwid.Padding(text_2, left=2),
urwid.Padding( sql_edit, left=2, width=60),
blank,
urwid.Padding(runsql_btn, left=10, width=11)
]))
return runsql
|
Python
| 0.999999
|
@@ -259,24 +259,29 @@
(self):%0A
+self.
query_text =
@@ -285,15 +285,22 @@
t =
-%22%22
+None
%0A
+self.
quer
@@ -310,18 +310,20 @@
tatus =
-%22%22
+None
%0A%0Adef sh
@@ -695,15 +695,13 @@
!=
-%22%22:
+None:
%0A
@@ -1275,16 +1275,17 @@
.Text(u%22
+
You have
|
5e5f5f4c0786bd87ab197a17a74d24a6a31b5dce
|
add proper order to institution list again
|
api/institutions/views.py
|
api/institutions/views.py
|
from rest_framework import generics
from rest_framework import permissions as drf_permissions
from rest_framework import status
from rest_framework.response import Response
from modularodm import Q
from framework.auth.oauth_scopes import CoreScopes
from website.models import Node, User, Institution
from api.base import permissions as base_permissions
from api.base.filters import ODMFilterMixin
from api.base.views import JSONAPIBaseView
from api.base.serializers import JSONAPISerializer
from api.base.utils import get_object_or_error
from api.nodes.serializers import NodeSerializer
from api.users.serializers import UserSerializer
from .authentication import InstitutionAuthentication
from .serializers import InstitutionSerializer
class InstitutionMixin(object):
"""Mixin with convenience method get_institution
"""
institution_lookup_url_kwarg = 'institution_id'
def get_institution(self):
inst = get_object_or_error(
Node,
Q('institution_id', 'eq', self.kwargs[self.institution_lookup_url_kwarg]),
display_name='institution',
allow_institution=True
)
return Institution(inst)
class InstitutionList(JSONAPIBaseView, generics.ListAPIView, ODMFilterMixin):
"""
Paginated list of verified Institutions affiliated with COS
##Institution Attributes
OSF Institutions have the "institutions" `type`.
name type description
=========================================================================
name string title of the institution
id string unique identifier in the OSF
logo_path string a path to the institution's static logo
#This Request/Response
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.INSTITUTION_READ]
required_write_scopes = [CoreScopes.NULL]
model_class = Institution
serializer_class = InstitutionSerializer
view_category = 'institutions'
view_name = 'institution-list'
def get_default_odm_query(self):
return Q('_id', 'ne', None)
# overrides ListAPIView
def get_queryset(self):
return Institution.find(self.get_query_from_request())
class InstitutionDetail(JSONAPIBaseView, generics.RetrieveAPIView, InstitutionMixin):
""" Details about a given institution.
##Attributes
OSF Institutions have the "institutions" `type`.
name type description
=========================================================================
name string title of the institution
id string unique identifier in the OSF
logo_path string a path to the institution's static logo
##Relationships
###Nodes
List of nodes that have this institution as its primary institution.
###Users
List of users that are affiliated with this institution.
##Links
self: the canonical api endpoint of this institution
html: this institution's page on the OSF website
#This Request/Response
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.INSTITUTION_READ]
required_write_scopes = [CoreScopes.NULL]
model_class = Institution
serializer_class = InstitutionSerializer
view_category = 'institutions'
view_name = 'institution-detail'
# overrides RetrieveAPIView
def get_object(self):
return self.get_institution()
class InstitutionNodeList(JSONAPIBaseView, ODMFilterMixin, generics.ListAPIView, InstitutionMixin):
"""Nodes that have selected an institution as their primary institution.
##Permissions
Only public nodes or ones in which current user is a contributor.
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.INSTITUTION_READ, CoreScopes.NODE_BASE_READ]
required_write_scopes = [CoreScopes.NULL]
model_class = Node
serializer_class = NodeSerializer
view_category = 'institutions'
view_name = 'institution-nodes'
ordering = ('-date_modified', )
base_node_query = (
Q('is_deleted', 'ne', True) &
Q('is_folder', 'ne', True) &
Q('is_registration', 'eq', False) &
Q('parent_node', 'eq', None)
)
# overrides ODMFilterMixin
def get_default_odm_query(self):
base_query = self.base_node_query
user = self.request.user
permission_query = Q('is_public', 'eq', True)
if not user.is_anonymous():
permission_query = (permission_query | Q('contributors', 'eq', user._id))
query = base_query & permission_query
return query
# overrides RetrieveAPIView
def get_queryset(self):
inst = self.get_institution()
query = self.get_query_from_request()
return Node.find_by_institution(inst, query)
class InstitutionUserList(JSONAPIBaseView, ODMFilterMixin, generics.ListAPIView, InstitutionMixin):
"""Users that have been authenticated with the institution.
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.INSTITUTION_READ, CoreScopes.USERS_READ]
required_write_scopes = [CoreScopes.NULL]
model_class = User
serializer_class = UserSerializer
view_category = 'institutions'
view_name = 'institution-users'
# overrides ODMFilterMixin
def get_default_odm_query(self):
inst = self.get_institution()
query = Q('_affiliated_institutions', 'eq', inst.node)
return query
# overrides RetrieveAPIView
def get_queryset(self):
query = self.get_query_from_request()
return User.find(query)
class InstitutionAuth(JSONAPIBaseView, generics.CreateAPIView):
permission_classes = (
drf_permissions.IsAuthenticated,
base_permissions.TokenHasScope,
)
serializer_class = JSONAPISerializer
required_read_scopes = [CoreScopes.NULL]
required_write_scopes = [CoreScopes.NULL]
authentication_classes = (InstitutionAuthentication, )
view_category = 'institutions'
view_name = 'institution-auth'
def post(self, request, *args, **kwargs):
return Response(status=status.HTTP_204_NO_CONTENT)
class InstitutionRegistrationList(InstitutionNodeList):
"""Registrations have selected an institution as their primary institution.
"""
view_name = 'institution-registrations'
base_node_query = (
Q('is_deleted', 'ne', True) &
Q('is_folder', 'ne', True) &
Q('is_registration', 'eq', True)
)
|
Python
| 0
|
@@ -2179,16 +2179,43 @@
-list'%0A%0A
+ ordering = ('name', )%0A%0A
def
|
c73d8fe3f83fb245095cf8f45c15aa8ec1982143
|
Update views.py
|
app/grandchallenge/groups/views.py
|
app/grandchallenge/groups/views.py
|
from dal import autocomplete
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.mixins import UserPassesTestMixin
from django.contrib.messages.views import SuccessMessageMixin
from django.db.models import CharField, Q, Value
from django.db.models.functions import Concat
from django.utils.html import format_html
from django.views.generic import FormView
from guardian.mixins import (
LoginRequiredMixin,
PermissionRequiredMixin as ObjectPermissionRequiredMixin,
)
from guardian.shortcuts import get_objects_for_user
from grandchallenge.verifications.models import Verification
class UserGroupUpdateMixin(
LoginRequiredMixin,
ObjectPermissionRequiredMixin,
SuccessMessageMixin,
FormView,
):
raise_exception = True
def get_permission_object(self):
return self.obj
@property
def obj(self):
raise NotImplementedError
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({"object": self.obj, "role": self.get_form().role})
return context
def get_success_url(self):
return self.obj.get_absolute_url()
def form_valid(self, form):
form.add_or_remove_user(obj=self.obj)
return super().form_valid(form)
class UserAutocomplete(
LoginRequiredMixin, UserPassesTestMixin, autocomplete.Select2QuerySetView
):
def test_func(self):
allowed_perms = [
"algorithms.change_algorithm",
"organizations.change_organization",
"archives.change_archive",
"reader_studies.change_readerstudy",
"workstations.change_workstation",
"algorithms.change_job",
]
# TODO reduce number of queries
return any(
get_objects_for_user(user=self.request.user, perms=perm,).exists()
for perm in allowed_perms
)
def get_queryset(self):
qs = (
get_user_model()
.objects.order_by("username")
.exclude(username=settings.ANONYMOUS_USER_NAME)
.annotate(
full_name=Concat(
"first_name",
Value(" "),
"last_name",
output_field=CharField(),
)
)
.select_related("verification", "user_profile")
)
if self.q:
qs = qs.filter(
Q(username__icontains=self.q)
| Q(email__icontains=self.q)
| Q(full_name__icontains=self.q)
| Q(verification__email__icontains=self.q)
)
return qs
def get_result_label(self, result):
try:
is_verified = result.verification.is_verified
except Verification.DoesNotExist:
is_verified = False
if is_verified:
return format_html(
'<img src="{}" width ="20" height ="20" style="vertical-align:top"> '
" <b>{}</b> {} "
'<i class="fas fa-user-check text-success">'
" Verified email address at {}",
result.user_profile.get_mugshot_url(),
result.get_username(),
result.get_full_name().title(),
result.verification.email.split("@")[1],
)
else:
return format_html(
'<img src="{}" width ="20" height ="20" style="vertical-align:top"> '
" <b>{}</b> {}",
result.user_profile.get_mugshot_url(),
result.get_username(),
result.get_full_name().title(),
)
|
Python
| 0
|
@@ -3129,16 +3129,20 @@
uccess%22%3E
+%3C/i%3E
'%0A
|
077ea35c78b750d4e091f62d38fe7f42e0d685bb
|
add token filters
|
api/rest/viewsets/xtas.py
|
api/rest/viewsets/xtas.py
|
from rest_framework.response import Response
from rest_framework.viewsets import ViewSet
from api.rest.viewsets.articleset import ArticleSetViewSetMixin
from api.rest.viewsets.project import ProjectViewSetMixin
from api.rest.viewsets.article import ArticleViewSetMixin
from api.rest.mixins import DatatablesMixin
from amcat.tools.amcatxtas import ANALYSES, get_result
import json
class XTasViewSet(ProjectViewSetMixin, ArticleSetViewSetMixin, ArticleViewSetMixin, ViewSet):
model_key = "xta"# HACK to get xtas in url. Sorry!
def retrieve(self, request, *args, **kargs):
aid = int(kargs['article'])
plugin = kargs['pk']
result = get_result(aid, plugin)
return Response({"results" : result})
def list(self, request, *args, **kargs):
plugins = ANALYSES.__dict__
return Response(plugins)
from rest_framework.serializers import Serializer
from amcat.models import Article, ArticleSet
from rest_framework.viewsets import ModelViewSet
import itertools
class ArticleXTasSerializer(Serializer):
@property
def module(self):
module = self.context['request'].GET.get('module')
if not module:
raise Exception("Please specify the NLP/xTas module to use "
"with a module= GET parameter")
elif not module in dir(ANALYSES):
raise Exception("Unknown module: {module}".format(**locals()))
return module
def field_to_native(self, obj, field_name):
result = super(ArticleXTasSerializer, self).field_to_native(obj, field_name)
if field_name == "results":
# flatting lists of tokens
result = itertools.chain(*result)
return result
def to_native(self, article):
saf = get_result(article.pk, self.module)
return list(self.get_xtas_results(article.pk, saf))
class ArticleLemmataSerializer(ArticleXTasSerializer):
@property
def filter_pos(self):
return self.context['request'].GET.get('pos1')
def output_token(self, token):
for key, vals in self.context['request'].GET.iterlists():
if key in token and token[key] not in vals:
return False
return True
def get_xtas_results(self, aid, saf):
if self.context['request'].GET.get('sources'):
return self.get_sources(aid, saf)
else:
return self.get_tokens(aid, saf)
def get_tokens(self, aid, saf):
for token in saf.get('tokens', []):
token["aid"] = aid
if self.output_token(token):
yield token
def get_sources(self, aid, saf):
if not 'tokens' in saf and 'sources' in saf:
return
tokendict = {t['id'] : t for t in saf['tokens']}
for sid, source in enumerate(saf['sources']):
for place, tokens in source.iteritems():
for tid in tokens:
token = tokendict[tid]
token["aid"] = aid
token["source_id"] = sid
token["source_place"] = place
yield token
class XTasLemmataViewSet(ProjectViewSetMixin, ArticleSetViewSetMixin, DatatablesMixin, ModelViewSet):
model_key = "token"
model = Article
model_serializer_class = ArticleLemmataSerializer
def filter_queryset(self, queryset):
queryset = super(XTasLemmataViewSet, self).filter_queryset(queryset)
# only(.) would be better on serializer, but meh
queryset = queryset.filter(articlesets_set=self.articleset).only("pk")
return queryset
|
Python
| 0.000001
|
@@ -2962,16 +2962,69 @@
ct%5Btid%5D%0A
+ if self.output_token(token):%0A
@@ -3046,32 +3046,36 @@
en%5B%22aid%22%5D = aid%0A
+
@@ -3115,32 +3115,36 @@
+
token%5B%22source_pl
@@ -3157,16 +3157,20 @@
= place%0A
+
|
7b30141365a009597bbaecaa70072bcd6fd6868e
|
support "power" command for SP1/SP2 devices
|
mqtt.py
|
mqtt.py
|
#!/usr/bin/env python
import paho.mqtt.client as paho # pip install paho-mqtt
import broadlink # pip install broadlink
import os
import sys
import time
import logging
import logging.config
import socket
import sched
from threading import Thread
# read initial config files
dirname = os.path.dirname(os.path.abspath(__file__)) + '/'
logging.config.fileConfig(dirname + 'logging.conf')
CONFIG = os.getenv('BROADLINKMQTTCONFIG', dirname + 'mqtt.conf')
class Config(object):
def __init__(self, filename=CONFIG):
self.config = {}
execfile(filename, self.config)
def get(self, key, default='special empty value'):
v = self.config.get(key, default)
if v == 'special empty value':
logging.error("Configuration parameter '%s' should be specified" % key)
sys.exit(2)
return v
try:
cf = Config()
except Exception, e:
print "Cannot load configuration from file %s: %s" % (CONFIG, str(e))
sys.exit(2)
qos = cf.get('mqtt_qos', 0)
retain = cf.get('mqtt_retain', False)
topic_prefix = cf.get('mqtt_topic_prefix', 'broadlink/')
# noinspection PyUnusedLocal
def on_message(mosq, device, msg):
command = msg.topic[len(topic_prefix):]
if command == 'temperature': # internal notification
return
logging.debug("Received MQTT message " + msg.topic + " " + str(msg.payload))
file = dirname + "commands/" + command
action = str(msg.payload)
try:
if action == '' or action == 'auto':
record_or_replay(device, file)
elif action == 'record':
record(device, file)
elif action == 'replay':
replay(device, file)
else:
logging.debug("Unrecognized MQTT message " + action)
except Exception:
logging.exception("I/O error")
# noinspection PyUnusedLocal
def on_connect(mosq, device, result_code):
topic = topic_prefix + '#'
logging.debug("Connected to MQTT broker, subscribing to topic " + topic)
mqttc.subscribe(topic, qos)
# noinspection PyUnusedLocal
def on_disconnect(mosq, device, rc):
logging.debug("OOOOPS! Broadlink disconnects")
time.sleep(10)
def record_or_replay(device, file):
if os.path.isfile(file):
replay(device, file)
else:
record(device, file)
def record(device, file):
logging.debug("Recording command to file " + file)
# receive packet
device.enter_learning()
ir_packet = None
attempt = 0
while ir_packet is None and attempt < 6:
time.sleep(5)
ir_packet = device.check_data()
attempt = attempt + 1
if ir_packet is not None:
# write to file
directory = os.path.dirname(file)
if not os.path.exists(directory):
os.makedirs(directory)
with open(file, 'wb') as f:
f.write(str(ir_packet).encode('hex'))
logging.debug("Done")
else:
logging.warn("No command received")
def replay(device, file):
logging.debug("Replaying command from file " + file)
with open(file, 'rb') as f:
ir_packet = f.read()
device.send_data(ir_packet.decode('hex'))
def get_device(cf):
device_type = cf.get('device_type', 'lookup')
if device_type == 'lookup':
local_address = cf.get('local_address', None)
lookup_timeout = cf.get('lookup_timeout', 20)
devices = broadlink.discover(timeout=lookup_timeout) if local_address is None else \
broadlink.discover(timeout=lookup_timeout, local_ip_address=local_address)
if len(devices) == 0:
logging.error('No Broadlink device found')
sys.exit(2)
if len(devices) > 1:
logging.error('More than one Broadlink device found (' + ', '.join([d.host for d in devices]) + ')')
sys.exit(2)
return devices[0]
elif device_type == 'test':
class TestDevice:
type = 'test'
host = 'test'
def auth(self):
pass
def check_temperature(self):
return 23.5
return TestDevice()
else:
host = (cf.get('device_host'), 80)
mac = bytearray.fromhex(cf.get('device_mac').replace(':', ' '))
if device_type == 'rm':
return broadlink.rm(host=host, mac=mac)
elif device_type == 'sp1':
return broadlink.sp1(host=host, mac=mac)
elif device_type == 'sp2':
return broadlink.sp2(host=host, mac=mac)
elif device_type == 'a1':
return broadlink.a1(host=host, mac=mac)
elif device_type == 'mp1':
return broadlink.mp1(host=host, mac=mac)
else:
logging.error('Incorrect device configured: ' + device_type)
sys.exit(2)
def broadlink_rm_temperature_timer(scheduler, delay, device):
scheduler.enter(delay, 1, broadlink_rm_temperature_timer, [scheduler, delay, device])
temperature = str(device.check_temperature())
topic = topic_prefix + "temperature"
logging.debug("Sending RM temperature " + temperature + " to topic " + topic)
mqttc.publish(topic, temperature, qos=qos, retain=retain)
class TimerThread(Thread):
def __init__(self, s):
Thread.__init__(self)
self.s = s
def run(self):
self.s.run()
if __name__ == '__main__':
device = get_device(cf)
device.auth()
logging.debug('Connected to %s Broadlink device at %s' % (device.type, device.host))
clientid = cf.get('mqtt_clientid', 'broadlink-%s' % os.getpid())
# initialise MQTT broker connection
mqttc = paho.Client(clientid, clean_session=cf.get('mqtt_clean_session', False), userdata=device)
mqttc.on_message = on_message
mqttc.on_connect = on_connect
mqttc.on_disconnect = on_disconnect
mqttc.will_set('clients/broadlink', payload="Adios!", qos=0, retain=False)
# Delays will be: 3, 6, 12, 24, 30, 30, ...
# mqttc.reconnect_delay_set(delay=3, delay_max=30, exponential_backoff=True)
mqttc.username_pw_set(cf.get('mqtt_username'), cf.get('mqtt_password'))
mqttc.connect(cf.get('mqtt_broker', 'localhost'), int(cf.get('mqtt_port', '1883')), 60)
broadlink_rm_temperature_interval = cf.get('broadlink_rm_temperature_interval', 0)
if broadlink_rm_temperature_interval > 0:
scheduler = sched.scheduler(time.time, time.sleep)
scheduler.enter(broadlink_rm_temperature_interval, 1, broadlink_rm_temperature_timer, [scheduler, broadlink_rm_temperature_interval, device])
# scheduler.run()
tt = TimerThread(scheduler)
tt.daemon = True
tt.start()
while True:
try:
mqttc.loop_forever()
except socket.error:
time.sleep(5)
except KeyboardInterrupt:
sys.exit(0)
|
Python
| 0.000005
|
@@ -1281,16 +1281,181 @@
return
+%0A if command == 'power':%0A if device.type == 'SP1' or device.type == 'SP2':%0A device.set_power(1 if msg.payload == 'on' else 0)%0A return
%0A%0A lo
|
2a5e84e1c4d9c8e4c4236e1eccfa580406a29b6b
|
Add failing test
|
tests/functional/test_new_resolver_errors.py
|
tests/functional/test_new_resolver_errors.py
|
from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
def test_new_resolver_conflict_constraints_file(tmpdir, script):
create_basic_wheel_for_package(script, "pkg", "1.0")
constrats_file = tmpdir.joinpath("constraints.txt")
constrats_file.write_text("pkg!=1.0")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-c", constrats_file,
"pkg==1.0",
expect_error=True,
)
assert "ResolutionImpossible" in result.stderr, str(result)
message = "The user requested (constraint) pkg!=1.0"
assert message in result.stdout, str(result)
|
Python
| 0.000138
|
@@ -1,8 +1,20 @@
+import sys%0A%0A
from tes
@@ -57,16 +57,48 @@
_package
+, create_test_package_with_setup
%0A%0A%0Adef t
@@ -1452,20 +1452,914 @@
stdout, str(result)%0A
+%0A%0Adef test_new_resolver_requires_python_error(script):%0A compatible_python = %22%3E=%7B0.major%7D.%7B0.minor%7D%22.format(sys.version_info)%0A incompatible_python = %22%3C%7B0.major%7D.%7B0.minor%7D%22.format(sys.version_info)%0A%0A pkga = create_test_package_with_setup(%0A script,%0A name=%22pkga%22,%0A version=%221.0%22,%0A python_requires=compatible_python,%0A )%0A pkgb = create_test_package_with_setup(%0A script,%0A name=%22pkgb%22,%0A version=%221.0%22,%0A python_requires=incompatible_python,%0A )%0A%0A # This always fails because pkgb can never be satisfied.%0A result = script.pip(%22install%22, %22--no-index%22, pkga, pkgb, expect_error=True)%0A%0A # The error message should mention the Requires-Python: value causing the%0A # conflict, not the compatible one.%0A assert incompatible_python in result.stderr, str(result)%0A assert compatible_python not in result.stderr, str(result)%0A
|
eaa92ab6a207b5b7c10b15948eb37d16f3005ee8
|
fix pandas compat
|
statsmodels/compat/pandas.py
|
statsmodels/compat/pandas.py
|
from __future__ import absolute_import
from distutils.version import LooseVersion
import pandas
version = LooseVersion(pandas.__version__)
pandas_lte_0_19_2 = version <= LooseVersion('0.19.2')
pandas_gt_0_19_2 = version > LooseVersion('0.19.2')
try:
from pandas.api.types import is_numeric_dtype # noqa:F401
except ImportError:
from pandas.core.common import is_numeric_dtype # noqa:F401
if version >= '0.20':
try:
from pandas.tseries import offsets as frequencies
except ImportError:
from pandas.tseries import frequencies
data_klasses = (pandas.Series, pandas.DataFrame, pandas.Panel)
else:
try:
import pandas.tseries.frequencies as frequencies
except ImportError:
from pandas.core import datetools as frequencies # noqa
data_klasses = (pandas.Series, pandas.DataFrame, pandas.Panel,
pandas.WidePanel)
try:
import pandas.testing as testing
except ImportError:
import pandas.util.testing as testing
assert_frame_equal = testing.assert_frame_equal
assert_index_equal = testing.assert_index_equal
assert_series_equal = testing.assert_series_equal
|
Python
| 0.000001
|
@@ -242,17 +242,118 @@
.19.2')%0A
+pandas_ge_20_0 = version %3E= LooseVersion('0.20.0')%0Apandas_ge_25_0 = version %3E= LooseVersion('0.25.0')
%0A
-
%0Atry:%0A
@@ -506,25 +506,152 @@
%0Aif
-version %3E= '0.20'
+pandas_ge_25_0:%0A from pandas.tseries import frequencies # noqa:F401%0A data_klasses = (pandas.Series, pandas.DataFrame)%0Aelif pandas_ge_20_0
:%0A
|
1e3f3e387230ac500289fe4064b24999d9727abd
|
use MongoClient instead of Connection if pymongo >= 2.4
|
mtop.py
|
mtop.py
|
#!/usr/bin/python
#
# Copyright 2011 Allan Beaufour
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from optparse import OptionParser
import sys
from pymongo.connection import Connection
from pymongo.errors import AutoReconnect
from lib.runner import Runner
def main():
parser = OptionParser(usage='mtop.py [options]\nSee also: https://github.com/beaufour/mtop')
parser.add_option('-s', '--server',
dest='server', default='localhost',
help='connect to mongo on SERVER', metavar='SERVER')
parser.add_option('-d', '--delay',
dest='delay', type=int, default=1000,
help='update every MS', metavar='MS')
(options, _) = parser.parse_args()
try:
connection = Connection(options.server, slave_okay=True)
except AutoReconnect, ex:
print 'Connection to %s failed: %s' % (options.server, str(ex))
return -1
runner = Runner(connection, options.delay)
rc = runner.run()
if rc == -3:
print 'Screen size too small'
return rc
if __name__ == '__main__':
sys.exit(main())
|
Python
| 0.000001
|
@@ -642,49 +642,22 @@
ys%0A%0A
-from pymongo.connection import Connection
+import pymongo
%0Afro
@@ -1218,16 +1218,447 @@
try:%0A
+ if hasattr(pymongo, 'version_tuple') and pymongo.version_tuple%5B0%5D %3E= 2 and pymongo.version_tuple%5B1%5D %3E= 4:%0A from pymongo import MongoClient%0A from pymongo.read_preferences import ReadPreference%0A connection = MongoClient(host=options.server,%0A read_preference=ReadPreference.SECONDARY)%0A else:%0A from pymongo.connection import Connection%0A
|
9591f7179886024c6feae00f5876d5889595ee5f
|
Fix linking wrong user when multiple users have same student_id.
|
app/service/user_service.py
|
app/service/user_service.py
|
import bcrypt
from flask_babel import _
from app.enums import FileCategory
from app.exceptions import ResourceNotFoundException, ValidationException, \
AuthorizationException, BusinessRuleException
from app.repository import user_repository
from app.service import file_service, mail_service
from app.utils import copernica
def set_password(user_id, password):
"""Set the new password for user with id."""
password = bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt())
user = get_user_by_id(user_id)
user.password = password
user_repository.save(user)
return user
def find_user_by_email(email):
"""Retrieve the user by email or return None."""
return user_repository.find_user_by_email(email)
def get_user_by_email(email):
"""Retrieve the user by email, throw error if not found."""
user = find_user_by_email(email)
if not user:
raise ResourceNotFoundException("user", email)
return user
def get_user_by_id(user_id):
"""Retrieve the user by id, throw error if not found."""
user = find_by_id(user_id)
if not user:
raise ResourceNotFoundException('user', user_id)
return user
def find_by_id(user_id):
"""Retrieve the user or return None."""
return user_repository.find_by_id(user_id)
def find_user_by_student_id(student_id):
"""Retrieve the user or return None."""
return user_repository.find_user_by_student_id(student_id)
def get_user_by_student_id(student_id):
"""Retrieve the user by student id, throw error if not found."""
user = find_user_by_student_id(student_id)
if not user:
raise ResourceNotFoundException("user", student_id)
return user
def set_confirmed_student_id(user, student_id):
# Check if there is not another account with the same student ID
# that is already confirmed
other_user = find_user_by_student_id(student_id)
if other_user is not None and other_user != user:
raise BusinessRuleException("Student ID already linked to other user.")
# Find all users with the same student ID (unconfirmed)
# and set it to None
other_users = user_repository.find_all_users_with_unconfirmed_student_id(
student_id)
for user in other_users:
user.student_id = None
# Set the confirmed student ID of the user
user.student_id = student_id
user.student_id_confirmed = True
# Save everything at once
user_repository.save_all(other_users + [user])
def set_unconfirmed_student_id(user, student_id):
user.student_id = student_id
user.student_id_confirmed = False
user_repository.save(user)
def remove_student_id(user):
user.student_id = None
user.student_id_confirmed = False
user_repository.save(user)
def find_members():
"""Find all users which are marked as member."""
return user_repository.find_members()
def get_user_by_login(email, password):
user = user_repository.find_user_by_email(email)
if not user:
raise ResourceNotFoundException('user', email)
if user.disabled:
raise AuthorizationException("User is disabled.")
if not validate_password(user, password):
raise ValidationException("Invalid password.")
return user
def validate_password(user, password): # type: (User, str) -> bool
submitted_hash = bcrypt.hashpw(password, user.password)
if submitted_hash == user.password:
return True
else:
return False
def user_has_avatar(user_id):
user = get_user_by_id(user_id)
return user.avatar_file_id is not None
def remove_avatar(user_id):
user = get_user_by_id(user_id)
_file = file_service.get_file_by_id(user.avatar_file_id)
user.avatar_file_id = None
user_repository.save(user)
file_service.delete_file(_file)
def set_avatar(user_id, file_data):
"""
Upload the new avatar.
Checks if the file type is allowed if so removes any
previous uploaded avatars.
"""
user = get_user_by_id(user_id)
# Remove old avatar
if user.avatar_file_id is not None:
old_file = file_service.get_file_by_id(user.avatar_file_id)
user.avatar_file_id = None
else:
old_file = None
_file = file_service.add_file(FileCategory.USER_AVATAR,
file_data, file_data.filename)
user.avatar_file_id = _file.id
if old_file:
file_service.delete_file(old_file)
user_repository.save(user)
def register_new_user(email, password, first_name, last_name, student_id,
education_id, birth_date, study_start,
receive_information, phone_nr, address,
zip_, city, country, locale, link_student_id=False):
if find_user_by_email(email) is not None:
raise BusinessRuleException(
'A user with the same email address already exists.')
user = user_repository.create_user()
user.email = email
user.password = bcrypt.hashpw(password, bcrypt.gensalt())
user.first_name = first_name
user.last_name = last_name
user.student_id = student_id
user.education_id = education_id
user.birth_date = birth_date
user.study_start = study_start
user.receive_information = receive_information
user.phone_nr = phone_nr
user.address = address
user.zip = zip_
user.city = city
user.country = country
user.locale = locale
users = [user]
if link_student_id:
user.student_id_confirmed = True
unconfirmed_users = user_repository. \
find_all_users_with_unconfirmed_student_id(user.student_id)
for u in unconfirmed_users:
u.student_id = None
users.append(u)
user_repository.save_all(users)
copernica.update_user(user)
if locale == 'nl':
mail_template = 'email/sign_up_nl.html'
else:
mail_template = 'email/sign_up_en.html'
mail_service.send_mail(
user.email, _('Welcome to via, %(name)s', name=user.first_name),
mail_template, user=user)
return user
|
Python
| 0
|
@@ -2215,24 +2215,30 @@
d)%0A%0A for
+other_
user in othe
@@ -2246,32 +2246,38 @@
_users:%0A
+other_
user.student_id
|
6c409362c6bf00f03700fadfc14e87dd93033ff9
|
use 'get_variables'
|
atest/testdata/core/resources_and_variables/vars_from_cli2.py
|
atest/testdata/core/resources_and_variables/vars_from_cli2.py
|
scalar_from_cli_varfile = 'This value is not taken into use because this ' \
+ 'variable already exists in vars_from_cli.py'
scalar_from_cli_varfile_2 = 'Variable from second variable file from cli'
|
Python
| 0.000001
|
@@ -1,28 +1,71 @@
+def get_variables():%0A return %7B%0A '
scalar_from_cli_varfile = 'T
@@ -63,19 +63,22 @@
file
- =
+' : (
'This va
lue
@@ -77,10 +77,13 @@
s va
+riab
l
-u
e is
@@ -106,25 +106,24 @@
use
-because this ' %5C%0A
+'%0A
@@ -146,19 +146,19 @@
-+ 'variable
+'because it
alr
@@ -172,16 +172,57 @@
ists in
+'%0A '
vars_fro
@@ -230,17 +230,28 @@
_cli.py'
-%0A
+),%0A '
scalar_f
@@ -271,11 +271,12 @@
le_2
- =
+': (
'Var
@@ -311,14 +311,69 @@
ile
-from cli'
+'%0A 'from cli')%0A %7D%0A%0A%0A
%0A
|
9377a9add86ee6716b83f78674e885aa6ef451f3
|
Add requires_micro decorators to microtvm tests (#6747)
|
tests/python/unittest/test_micro_artifact.py
|
tests/python/unittest/test_micro_artifact.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Unit tests for the artifact module."""
import json
import os
import shutil
from tvm.contrib import util
from tvm.micro import artifact
FILE_LIST = ["label1", "label2", "label12", "unlabelled"]
TEST_METADATA = {"foo": "bar"}
TEST_LABELS = {"label1": ["label1", "label12"], "label2": ["label2", "label12"]}
def build_artifact(artifact_path, immobile=False):
os.mkdir(artifact_path)
for f in FILE_LIST:
with open(os.path.join(artifact_path, f), "w") as lib_f:
lib_f.write(f"{f}\n")
sub_dir = os.path.join(artifact_path, "sub_dir")
os.mkdir(sub_dir)
os.symlink("label1", os.path.join(artifact_path, "rel_symlink"))
os.symlink("label2", os.path.join(artifact_path, "abs_symlink"), "label2")
os.symlink(
os.path.join(artifact_path, "sub_dir"), os.path.join(artifact_path, "abs_dir_symlink")
)
art = artifact.Artifact(artifact_path, TEST_LABELS, TEST_METADATA, immobile=immobile)
return art
def test_basic_functionality():
temp_dir = util.tempdir()
artifact_path = temp_dir.relpath("foo")
art = build_artifact(artifact_path)
assert art.abspath("bar") == os.path.join(artifact_path, "bar")
for label, paths in TEST_LABELS.items():
assert art.label(label) == paths
assert art.label_abspath(label) == [os.path.join(artifact_path, p) for p in paths]
def test_archive():
temp_dir = util.tempdir()
art = build_artifact(temp_dir.relpath("foo"))
# Create archive
archive_path = art.archive(temp_dir.temp_dir)
assert archive_path == temp_dir.relpath("foo.tar")
# Inspect created archive
unpack_dir = temp_dir.relpath("unpack")
os.mkdir(unpack_dir)
shutil.unpack_archive(archive_path, unpack_dir)
for path in FILE_LIST:
with open(os.path.join(unpack_dir, "foo", path)) as f:
assert f.read() == f"{path}\n"
with open(os.path.join(unpack_dir, "foo", "metadata.json")) as metadata_f:
metadata = json.load(metadata_f)
assert metadata["version"] == 2
assert metadata["labelled_files"] == TEST_LABELS
assert metadata["metadata"] == TEST_METADATA
# Unarchive and verify basic functionality
unarchive_base_dir = temp_dir.relpath("unarchive")
unarch = artifact.Artifact.unarchive(archive_path, unarchive_base_dir)
assert unarch.metadata == TEST_METADATA
assert unarch.labelled_files == TEST_LABELS
for f in FILE_LIST:
assert os.path.exists(os.path.join(unarchive_base_dir, f))
def test_metadata_only():
temp_dir = util.tempdir()
base_dir = temp_dir.relpath("foo")
art = build_artifact(base_dir)
artifact_path = art.archive(temp_dir.relpath("foo.artifact"), metadata_only=True)
unarch_base_dir = temp_dir.relpath("bar")
unarch = artifact.Artifact.unarchive(artifact_path, unarch_base_dir)
assert unarch.base_dir == base_dir
for p in unarch.label_abspath("label1") + unarch.label_abspath("label2"):
assert os.path.exists(p)
os.unlink(art.abspath("label1"))
with open(art.abspath("label2"), "w+") as f:
f.write("changed line\n")
try:
artifact.Artifact.unarchive(artifact_path, os.path.join(temp_dir.temp_dir, "bar2"))
assert False, "unarchive should raise error"
except artifact.ArchiveModifiedError as err:
assert str(err) == (
"Files in metadata-only archive have been modified:\n"
" * label1: original file not found\n"
" * label2: sha256 mismatch: expected "
"6aa3c5668c8794c791400e19ecd7123949ded1616eafb0395acdd2d896354e83, got "
"ed87db21670a81819d65eccde87c5ae0243b2b61783bf77e9b27993be9a3eca0"
)
if __name__ == "__main__":
test_basic_functionality()
test_archive()
test_metadata_only()
# TODO: tests for dir symlinks, symlinks out of bounds, loading malformed artifact tars.
|
Python
| 0
|
@@ -857,16 +857,27 @@
t shutil
+%0Aimport tvm
%0A%0Afrom t
@@ -903,39 +903,8 @@
til%0A
-from tvm.micro import artifact%0A
%0A%0AFI
@@ -1622,16 +1622,52 @@
%0A )%0A%0A
+ from tvm.micro import artifact%0A%0A
art
@@ -1766,16 +1766,44 @@
n art%0A%0A%0A
+@tvm.testing.requires_micro%0A
def test
@@ -2193,27 +2193,91 @@
%5D%0A%0A%0A
-def test_archive():
+@tvm.testing.requires_micro%0Adef test_archive():%0A from tvm.micro import artifact%0A
%0A
@@ -3394,33 +3394,97 @@
)%0A%0A%0A
-def test_metadata_only():
+@tvm.testing.requires_micro%0Adef test_metadata_only():%0A from tvm.micro import artifact%0A
%0A
|
7193bf919a49e09b0be6ff492f2fd774900a8c93
|
set option group title to '$CommandName Options'
|
pip/basecommand.py
|
pip/basecommand.py
|
"""Base Command class, and related routines"""
import os
from pkgutil import walk_packages
import socket
import sys
import tempfile
import traceback
import time
import optparse
from pip.log import logger
from pip.download import urlopen
from pip.exceptions import (BadCommand, InstallationError, UninstallationError,
CommandError)
from pip.backwardcompat import StringIO
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip.status_codes import SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND
from pip.util import get_prog
__all__ = ['Command']
# for backwards compatibiliy
get_proxy = urlopen.get_proxy
class Command(object):
name = None
usage = None
hidden = False
def __init__(self, main_parser):
parser_kw = {
'usage' : self.usage,
'prog' : '%s %s' % (get_prog(), self.name),
'formatter' : UpdatingDefaultsHelpFormatter(),
'add_help_option' : False,
'name' : self.name,
}
self.main_parser = main_parser
self.parser = ConfigOptionParser(**parser_kw)
# Commands should add options to this option group
self.cmd_opts = optparse.OptionGroup(self.parser, 'Command Options')
# Re-add all options and option groups.
for group in main_parser.option_groups:
self._copy_option_group(self.parser, group)
# Copies all general options from the main parser.
self._copy_options(self.parser, main_parser.option_list)
def _copy_options(self, parser, options):
"""Populate an option parser or group with options."""
for option in options:
if not option.dest or option.dest == 'help':
continue
parser.add_option(option)
def _copy_option_group(self, parser, group):
"""Copy option group (including options) to another parser."""
new_group = optparse.OptionGroup(parser, group.title)
self._copy_options(new_group, group.option_list)
parser.add_option_group(new_group)
def merge_options(self, initial_options, options):
# Make sure we have all global options carried over
for attr in ['log', 'proxy', 'require_venv',
'log_explicit_levels', 'log_file',
'timeout', 'default_vcs',
'skip_requirements_regex',
'no_input', 'exists_action']:
setattr(options, attr, getattr(initial_options, attr) or getattr(options, attr))
options.quiet += initial_options.quiet
options.verbose += initial_options.verbose
def setup_logging(self):
pass
def main(self, args, initial_options):
options, args = self.parser.parse_args(args)
self.merge_options(initial_options, options)
level = 1 # Notify
level += options.verbose
level -= options.quiet
level = logger.level_for_integer(4-level)
complete_log = []
logger.consumers.extend(
[(level, sys.stdout),
(logger.DEBUG, complete_log.append)])
if options.log_explicit_levels:
logger.explicit_levels = True
self.setup_logging()
if options.no_input:
os.environ['PIP_NO_INPUT'] = '1'
if options.exists_action:
os.environ['PIP_EXISTS_ACTION'] = ''.join(options.exists_action)
if options.require_venv:
# If a venv is required check if it can really be found
if not os.environ.get('VIRTUAL_ENV'):
logger.fatal('Could not find an activated virtualenv (required).')
sys.exit(VIRTUALENV_NOT_FOUND)
if options.log:
log_fp = open_logfile(options.log, 'a')
logger.consumers.append((logger.DEBUG, log_fp))
else:
log_fp = None
socket.setdefaulttimeout(options.timeout or None)
urlopen.setup(proxystr=options.proxy, prompting=not options.no_input)
exit = SUCCESS
store_log = False
try:
status = self.run(options, args)
# FIXME: all commands should return an exit status
# and when it is done, isinstance is not needed anymore
if isinstance(status, int):
exit = status
except (InstallationError, UninstallationError):
e = sys.exc_info()[1]
logger.fatal(str(e))
logger.info('Exception information:\n%s' % format_exc())
store_log = True
exit = ERROR
except BadCommand:
e = sys.exc_info()[1]
logger.fatal(str(e))
logger.info('Exception information:\n%s' % format_exc())
store_log = True
exit = ERROR
except CommandError:
e = sys.exc_info()[1]
logger.fatal('ERROR: %s' % e)
logger.info('Exception information:\n%s' % format_exc())
exit = ERROR
except KeyboardInterrupt:
logger.fatal('Operation cancelled by user')
logger.info('Exception information:\n%s' % format_exc())
store_log = True
exit = ERROR
except:
logger.fatal('Exception:\n%s' % format_exc())
store_log = True
exit = UNKNOWN_ERROR
if log_fp is not None:
log_fp.close()
if store_log:
log_fn = options.log_file
text = '\n'.join(complete_log)
try:
log_fp = open_logfile(log_fn, 'w')
except IOError:
temp = tempfile.NamedTemporaryFile(delete=False)
log_fn = temp.name
log_fp = open_logfile(log_fn, 'w')
logger.fatal('Storing complete log in %s' % log_fn)
log_fp.write(text)
log_fp.close()
return exit
def format_exc(exc_info=None):
if exc_info is None:
exc_info = sys.exc_info()
out = StringIO()
traceback.print_exception(*exc_info, **dict(file=out))
return out.getvalue()
def open_logfile(filename, mode='a'):
"""Open the named log file in append mode.
If the file already exists, a separator will also be printed to
the file to separate past activity from current activity.
"""
filename = os.path.expanduser(filename)
filename = os.path.abspath(filename)
dirname = os.path.dirname(filename)
if not os.path.exists(dirname):
os.makedirs(dirname)
exists = os.path.exists(filename)
log_fp = open(filename, mode)
if exists:
log_fp.write('%s\n' % ('-'*60))
log_fp.write('%s run on %s\n' % (sys.argv[0], time.strftime('%c')))
return log_fp
|
Python
| 0.000087
|
@@ -1189,16 +1189,78 @@
n group%0A
+ optgroup_name = '%25s Options' %25 self.name.capitalize()%0A
@@ -1313,25 +1313,21 @@
er,
-'Command Options'
+optgroup_name
)%0A%0A
|
d8f0cb110393aeabf0fdadebdff324d6f656f4c3
|
return set of unique triples
|
pipeline/writer.py
|
pipeline/writer.py
|
from pipeline import *
import json
import pickle
import os
class JsonWriter(BasePipeline):
def __init__(self, outputfolder, basefilename=None, filesize=10000, startfile=0):
"""
when attached to the pipeline this file log all json
:param outputfolder: folder to save output files in
:param basefilename: filename prefix to add before all file names
:param filesize:
"""
self.outputfolder = outputfolder
if not os.path.exists(outputfolder):
os.makedirs(outputfolder)
self.basefilename = basefilename
self.filesize = filesize
self.counter = 0 + startfile
self.buffer = []
def run(self, document):
self.counter += 1
self.buffer.append(document.toJSON())
if self.counter % self.filesize == 0:
self.flush()
return document
def flush(self):
filename = "%s-%s.json" % (self.counter-self.filesize, self.counter)
filename = "%s_%s" % (self.basefilename, filename) if self.basefilename is not None else filename
filename = os.path.join(self.outputfolder, filename)
with open(filename, 'w') as outfile:
json.dump(self.buffer, outfile)
print "Saved file %s" % filename
del self.buffer
self.buffer = []
class CustomeWriterTriples(JsonWriter):
def __init__(self, outputfolder, basefilename=None, filesize=10000, startfile=0):
#super(CostumeWriterTriples, self).__init__(outputfolder, basefilename, filesize, startfile)
JsonWriter.__init__(self, outputfolder, basefilename, filesize, startfile)
def run(self, document):
self.counter += 1
triples = self.createTriples(document)
self.buffer.append(triples)
if self.counter % self.filesize == 0:
self.flush()
return document
def createTriples(self, document):
triples = {}
triples['triples'] = []
triples['additionalTriples'] = []
triples['summary'] = document.text
for t in document.triples:
# check if main enitity of document is subject or object in the triple
if t.subject.uri == document.docid:
str_triple = t.subject.uri + ' ' + t.predicate.uri + ' ' + t.object.uri
triples['triples'].append(str_triple)
elif t.object.uri == document.docid:
str_triple = t.subject.uri + ' ' + t.predicate.uri + ' ' + t.object.uri
triples['additionalTriples'].append(str_triple)
return triples
def flush(self):
filename = "%s-%s-triples.pkl" % (self.counter-self.filesize, self.counter)
filename = "%s_%s" % (self.basefilename, filename) if self.basefilename is not None else filename
filename = os.path.join(self.outputfolder, filename)
with open(filename, 'w') as outfile:
pickle.dump(self.buffer, outfile)
print "Saved file %s" % filename
del self.buffer
self.buffer = []
class CustomeWriterEntities(JsonWriter):
def __init__(self, outputfolder, basefilename=None, filesize=10000, startfile=0):
JsonWriter.__init__(self, outputfolder, basefilename, filesize, startfile)
def run(self, document):
self.counter += 1
entities = self.createEntities(document)
self.buffer.append(entities)
if self.counter % self.filesize == 0:
self.flush()
return document
def createEntities(self, document):
entities = []
for e in document.entities:
entity = {}
entity['URI'] = e.uri
entity['offset'] = e.boundaries[0]
entity['surfaceForm'] = e.surfaceform
entity['propertyplaceholder'] = e.property_placeholder
entity['typeplaceholder'] = e.type_placeholder
entity['annotator'] = e.annotator
entities.append(entity)
entities = sorted(entities, key=lambda x: x['offset'])
return entities
def flush(self):
filename = "%s-%s-entities.pkl" % (self.counter-self.filesize, self.counter)
filename = "%s_%s" % (self.basefilename, filename) if self.basefilename is not None else filename
filename = os.path.join(self.outputfolder, filename)
with open(filename, 'w') as outfile:
pickle.dump(self.buffer, outfile)
print "Saved file %s" % filename
del self.buffer
self.buffer = []
|
Python
| 0.000278
|
@@ -2598,23 +2598,34 @@
return
+list(set(
triples
+))
%0A%0A de
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.