commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
0fe76a38aff965aca9f672b48ed4a4933ee10161
|
add an argument taskid to EventLoopProgressReportWriter.write()
|
AlphaTwirl/EventReader/EventLoopProgressReportWriter.py
|
AlphaTwirl/EventReader/EventLoopProgressReportWriter.py
|
# Tai Sakuma <tai.sakuma@cern.ch>
from AlphaTwirl.ProgressBar import ProgressReport
##____________________________________________________________________________||
class EventLoopProgressReportWriter(object):
def write(self, component, event):
return ProgressReport(name = component.name, done = event.iEvent + 1, total = event.nEvents)
##____________________________________________________________________________||
|
Python
| 0.000001
|
@@ -223,16 +223,24 @@
te(self,
+ taskid,
compone
@@ -281,16 +281,29 @@
sReport(
+%0A
name = c
@@ -316,16 +316,28 @@
nt.name,
+%0A
done =
@@ -353,16 +353,28 @@
ent + 1,
+%0A
total =
@@ -387,16 +387,54 @@
.nEvents
+,%0A taskid = taskid%0A
)%0A%0A##___
|
662cc443f7c32182aaef89e5b61e90797b7e3e58
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/81d27bd006f86cc3fd3d78a7193583ab9d18367a.
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "bd4c5dc54997aaffe6f37a802b106c3ac88f150f"
TFRT_SHA256 = "a3ee3c259c5d7ea631177a75195b35bbfb695d69ad70adf4b0830ee2d91a9625"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
|
Python
| 0.000002
|
@@ -210,133 +210,133 @@
= %22
-bd4c5dc54997aaffe6f37a802b106c3ac88f150f%22%0A TFRT_SHA256 = %22a3ee3c259c5d7ea631177a75195b35bbfb695d69ad70adf4b0830ee2d91a9625
+81d27bd006f86cc3fd3d78a7193583ab9d18367a%22%0A TFRT_SHA256 = %22f7cafc8d2b512ff3be61dc5a3d8a3a5bcc3e749b213c1afa4909116b90710e2e
%22%0A%0A
|
d5813abf5b6b5ac142880781d9bb021b63670928
|
Update AndroidGatewayTester python testdriver for new Android protocol
|
AndroidGatewayPlugin/Testdriver/AndroidGatewayTester.py
|
AndroidGatewayPlugin/Testdriver/AndroidGatewayTester.py
|
#!/usr/bin/env python
#Test driver for Android gateway plugin to test deserialization of messages.
import sys
import socket
import struct
import zlib
import time
import AmmoMessages_pb2
class GatewayTestClient:
def __init__(self, host, port):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((host, int(port)))
def sendMessageWrapper(self, msg):
serializedMsg = msg.SerializeToString()
self.sock.sendall(struct.pack("<I", len(serializedMsg))) #little-endian byte order for now
self.sock.sendall(struct.pack("<i", zlib.crc32(serializedMsg)))
print serializedMsg
self.sock.sendall(serializedMsg);
def receiveMessage(self):
(messageSize,) = struct.unpack("<I", self.sock.recv(4));
(checksum,) = struct.unpack("<i", self.sock.recv(4));
protobufMsg = ""
while len(protobufMsg) < messageSize:
receivedData = self.sock.recv(messageSize - len(protobufMsg))
protobufMsg += receivedData
calculatedChecksum = zlib.crc32(protobufMsg)
if calculatedChecksum != checksum:
print "Checksum error!"
return None
msg = AmmoMessages_pb2.MessageWrapper()
msg.ParseFromString(protobufMsg)
return msg
if __name__ == "__main__":
print "Android Gateway Tester"
if len(sys.argv) != 4:
print "Usage:", sys.argv[0], "host port message-type"
print '''
where message-type is one of:"
authenticate : always run, this a dummy actually anything would work.
subscribe : subscribe to type:edu.vanderbilt.isis.ammo.Test.
push : send a data message of topic type:edu.vanderbilt.isis.ammo.Test.
'''
exit(-1)
print "Creating client"
client = GatewayTestClient(sys.argv[1], sys.argv[2])
print "Generating message"
m = AmmoMessages_pb2.MessageWrapper()
m.type = AmmoMessages_pb2.MessageWrapper.AUTHENTICATION_MESSAGE
m.authentication_message.device_id = "device:test/device1"
m.authentication_message.user_id = "user:test/user1"
m.authentication_message.user_key = "dummy"
print "Sending message"
client.sendMessageWrapper(m)
if(sys.argv[3] == "push"):
#wait for auth response, then send a data push message
response = client.receiveMessage()
if response.authentication_result.result != AmmoMessages_pb2.AuthenticationResult.SUCCESS:
print "Authentication failed..."
m = AmmoMessages_pb2.MessageWrapper()
m.type = AmmoMessages_pb2.MessageWrapper.DATA_MESSAGE
m.data_message.uri = "type:edu.vanderbilt.isis.ammo.Test"
m.data_message.mime_type = "text/plain"
m.data_message.data = "This is some text being pushed out to the gateway."
print "Sending data message"
client.sendMessageWrapper(m)
elif sys.argv[3] == "subscribe":
#wait for auth response, then send a data push message
response = client.receiveMessage()
if response.authentication_result.result != AmmoMessages_pb2.AuthenticationResult.SUCCESS:
print "Authentication failed..."
m = AmmoMessages_pb2.MessageWrapper()
m.type = AmmoMessages_pb2.MessageWrapper.SUBSCRIBE_MESSAGE
m.subscribe_message.mime_type = "text/plain"
print "Sending subscription request..."
client.sendMessageWrapper(m)
while True:
msg = client.receiveMessage()
print msg
time.sleep(0.5)
if(sys.argv[3] == "push"):
m = AmmoMessages_pb2.MessageWrapper()
m.type = AmmoMessages_pb2.MessageWrapper.DATA_MESSAGE
m.data_message.uri = "type:edu.vanderbilt.isis.ammo.Test"
m.data_message.mime_type = "text/plain"
m.data_message.data = "This is some text being pushed out to the gateway."
print "Sending data message"
client.sendMessageWrapper(m)
print "Closing socket"
|
Python
| 0
|
@@ -208,16 +208,51 @@
Client:%0A
+ HEADER_MAGIC_NUMBER = 0xfeedbeef%0A
def __
@@ -474,34 +474,32 @@
g()%0A
-self.sock.sendall(
+messageHeader =
struct.p
@@ -509,30 +509,121 @@
(%22%3CI
+Ii
%22,
-len(serializedMsg))
+self.HEADER_MAGIC_NUMBER, len(serializedMsg), zlib.crc32(serializedMsg))%0A self.sock.sendall(messageHeader
) #l
@@ -705,29 +705,29 @@
b.crc32(
-serializedMsg
+messageHeader
)))%0A
@@ -782,17 +782,16 @@
izedMsg)
-;
%0A %0A
@@ -820,17 +820,70 @@
f):%0A
-(
+messageHeader = self.sock.recv(3*4)%0A (magicNumber,
messageS
@@ -886,16 +886,25 @@
ageSize,
+ checksum
) = stru
@@ -920,37 +920,40 @@
(%22%3CI
+Ii
%22,
-self.sock.recv(4));%0A (c
+messageHeader)%0A (headerC
heck
@@ -1002,9 +1002,276 @@
(4))
-;
+%0A %0A if magicNumber != self.HEADER_MAGIC_NUMBER:%0A raise IOError(%22Invalid magic number received from gateway: %22 + magicNumber)%0A %0A if headerChecksum != zlib.crc32(messageHeader):%0A raise IOError(%22Invalid header checksum received from gateway%22)%0A
%0A
|
f0d76cae236cded0bfa6cc0f6486efb04daeb133
|
convert latency to int before posting to cbmonitor
|
cbagent/collectors/secondary_latency.py
|
cbagent/collectors/secondary_latency.py
|
import os.path
from cbagent.collectors import Collector
class SecondaryLatencyStats(Collector):
COLLECTOR = "secondaryscan_latency"
def _get_secondaryscan_latency(self):
stats = {}
if os.path.isfile(self.secondary_statsfile):
with open(self.secondary_statsfile, 'rb') as fh:
next(fh).decode()
fh.seek(-400, 2)
last = fh.readlines()[-1].decode()
duration = last.split(',')[-1]
stats = {}
latency = duration.split(':')[1]
latency = latency.rstrip()
latency_key = duration.split(':')[0]
latency_key = latency_key.strip()
stats[latency_key] = latency
return stats
def sample(self):
stats = self._get_secondaryscan_latency()
if stats:
self.update_metric_metadata(stats.keys())
self.store.append(stats, cluster=self.cluster, collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
|
Python
| 0
|
@@ -733,23 +733,28 @@
_key%5D =
+int(
latency
+)
%0A
|
69c9322827ed95ce845b49119bc58aa4f36d82bb
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/ecf8607212b519546828e3fcc66f68985597a622.
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "078534d79809852ea069d23bbacd2483ade18c11"
TFRT_SHA256 = "55905ff389c5294ac1ce4be5e3f0af2d171e6061aa886fb66d59e3636f03412b"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
Python
| 0.000002
|
@@ -210,133 +210,133 @@
= %22
-078534d79809852ea069d23bbacd2483ade18c11%22%0A TFRT_SHA256 = %2255905ff389c5294ac1ce4be5e3f0af2d171e6061aa886fb66d59e3636f03412b
+ecf8607212b519546828e3fcc66f68985597a622%22%0A TFRT_SHA256 = %22545c097a241ff80701e54d1e088762f27a7494980f01c08fee3ce3aeb4fd22cf
%22%0A%0A
|
66284e57accec5977d606fc91a0b28177b352eb4
|
Add end-to-end integration testing for all compression types
|
test/test_producer.py
|
test/test_producer.py
|
import pytest
from kafka import KafkaConsumer, KafkaProducer
from test.conftest import version
from test.testutil import random_string
@pytest.mark.skipif(not version(), reason="No KAFKA_VERSION set")
def test_end_to_end(kafka_broker):
connect_str = 'localhost:' + str(kafka_broker.port)
producer = KafkaProducer(bootstrap_servers=connect_str,
max_block_ms=10000,
value_serializer=str.encode)
consumer = KafkaConsumer(bootstrap_servers=connect_str,
group_id=None,
consumer_timeout_ms=10000,
auto_offset_reset='earliest',
value_deserializer=bytes.decode)
topic = random_string(5)
for i in range(1000):
producer.send(topic, 'msg %d' % i)
producer.flush()
producer.close()
consumer.subscribe([topic])
msgs = set()
for i in range(1000):
try:
msgs.add(next(consumer).value)
except StopIteration:
break
assert msgs == set(['msg %d' % i for i in range(1000)])
|
Python
| 0
|
@@ -201,42 +201,225 @@
t%22)%0A
-def test_end_to_end(kafka_broker):
+@pytest.mark.parametrize(%22compression%22, %5BNone, 'gzip', 'snappy', 'lz4'%5D)%0Adef test_end_to_end(kafka_broker, compression):%0A%0A # LZ4 requires 0.8.2%0A if compression == 'lz4' and version() %3C (0, 8, 2):%0A return%0A
%0A
@@ -523,32 +523,32 @@
rs=connect_str,%0A
-
@@ -572,32 +572,91 @@
block_ms=10000,%0A
+ compression_type=compression,%0A
|
7c12b82cb410540dfa3b65150ce39924b5793bce
|
handle package.json exceptions
|
python_package_manager/utils/package_json.py
|
python_package_manager/utils/package_json.py
|
import os
import json
def get_dependencies():
package_file_path = os.path.join(os.getcwd(), 'package.json')
with open(package_file_path, 'r') as infile:
package_dict = json.load(infile)
dependencies = package_dict.get("pythonDependencies", [])
dependencies_dev = package_dict.get("pythonDevDependencies", [])
return dependencies
def write_dependencies(dependencies):
package_file_path = os.path.join(os.getcwd(), 'package.json')
with open(package_file_path, 'r') as infile:
package_dict = json.load(infile)
package_dict["pythonDependencies"] = dependencies
with open(package_file_path, 'w') as outfile:
json.dump(package_dict, outfile, indent=2)
|
Python
| 0.000003
|
@@ -95,32 +95,39 @@
'package.json')%0A
+%09try:%0A%09
%09with open(packa
@@ -150,32 +150,33 @@
') as infile:%0A%09%09
+%09
package_dict = j
@@ -192,16 +192,17 @@
infile)%0A
+%09
%09%09depend
@@ -253,16 +253,17 @@
s%22, %5B%5D)%0A
+%09
%09%09depend
@@ -321,16 +321,76 @@
s%22, %5B%5D)%0A
+%09except:%0A%09%09print(%22unable to read package.json%22)%0A%09%09return %5B%5D%0A
%09return
@@ -504,16 +504,23 @@
.json')%0A
+%09try:%0A%09
%09with op
@@ -557,16 +557,17 @@
infile:%0A
+%09
%09%09packag
@@ -595,16 +595,17 @@
file)%0A%09%09
+%09
package_
@@ -646,16 +646,80 @@
dencies%0A
+%09except:%0A%09%09print(%22unable to read package.json%22)%0A%09%09return%0A%09try:%0A%09
%09with op
@@ -759,16 +759,17 @@
file:%0A%09%09
+%09
json.dum
@@ -803,8 +803,66 @@
dent=2)%0A
+%09except:%0A%09%09print(%22unable to write package.json%22)%0A%09%09return%0A
|
b0dd18d4e4e18dafae9d93848f633afc396c91b4
|
remove outdated/misguided meta __variables__, https://mail.python.org/pipermail/python-dev/2001-March/013328.html
|
fastly/__init__.py
|
fastly/__init__.py
|
"""
"""
from fastly import *
__author__ = 'Tyler McMullen <tbmcmullen@gmail.com>'
__copyright__ = 'Copyright (c) 2012 Fastly Inc'
__license__ = 'BSD'
__version__ = '0.0.1'
__url__ = 'http://www.fastly.com/docs/fastly-py'
|
Python
| 0.000065
|
@@ -1,14 +1,4 @@
-%22%22%22%0A%0A%22%22%22%0A%0A
from
@@ -18,197 +18,4 @@
t *%0A
-%0A__author__ = 'Tyler McMullen %3Ctbmcmullen@gmail.com%3E'%0A__copyright__ = 'Copyright (c) 2012 Fastly Inc'%0A__license__ = 'BSD'%0A__version__ = '0.0.1'%0A__url__ = 'http://www.fastly.com/docs/fastly-py'%0A
|
bc02e845f4a8b726f7474efa77753c7de6fe600b
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/300e7ac61cda0eb2ddb13b7f2ad850d80646adcd.
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "ed6f666ac14b939d7303607c950b88b7d5607c46"
TFRT_SHA256 = "b99fed746abe39cb0b072e773af53a4c7189056737fc0118ef3b013c187660c9"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
|
Python
| 0.000001
|
@@ -210,133 +210,133 @@
= %22
-ed6f666ac14b939d7303607c950b88b7d5607c46%22%0A TFRT_SHA256 = %22b99fed746abe39cb0b072e773af53a4c7189056737fc0118ef3b013c187660c9
+300e7ac61cda0eb2ddb13b7f2ad850d80646adcd%22%0A TFRT_SHA256 = %222b79ada8dbacd5de1b868121822ffde58564a1f8749c4f3d91f8f951e76c3fbc
%22%0A%0A
|
c01cf49ec3cc6423d61e364ee06cc8907db7ba83
|
add utility method for getting sql checkpoints
|
pillowtop/checkpoints/manager.py
|
pillowtop/checkpoints/manager.py
|
from collections import namedtuple
from datetime import datetime
from dateutil import parser
import pytz
from pillowtop.checkpoints.util import get_formatted_current_timestamp
from pillowtop.dao.django import DjangoDocumentStore
from pillowtop.dao.exceptions import DocumentNotFoundError
from pillowtop.exceptions import PillowtopCheckpointReset
from pillowtop.logger import pillow_logging
from pillowtop.models import DjangoPillowCheckpoint
from pillowtop.pillow.interface import ChangeEventHandler
DocGetOrCreateResult = namedtuple('DocGetOrCreateResult', ['document', 'created'])
class PillowCheckpointManager(object):
def __init__(self, dao):
self._dao = dao
def get_or_create_checkpoint(self, checkpoint_id):
created = False
try:
checkpoint_doc = self._dao.get_document(checkpoint_id)
except DocumentNotFoundError:
checkpoint_doc = {'seq': '0', 'timestamp': get_formatted_current_timestamp()}
self._dao.save_document(checkpoint_id, checkpoint_doc)
created = True
return DocGetOrCreateResult(checkpoint_doc, created)
def reset_checkpoint(self, checkpoint_id):
checkpoint_doc = self.get_or_create_checkpoint(checkpoint_id).document
checkpoint_doc['old_seq'] = checkpoint_doc['seq']
checkpoint_doc['seq'] = '0'
checkpoint_doc['timestamp'] = get_formatted_current_timestamp()
self._dao.save_document(checkpoint_id, checkpoint_doc)
def update_checkpoint(self, checkpoint_id, checkpoint_doc):
self._dao.save_document(checkpoint_id, checkpoint_doc)
class PillowCheckpoint(object):
def __init__(self, dao, checkpoint_id):
self._manager = PillowCheckpointManager(dao=dao)
self.checkpoint_id = checkpoint_id
self._last_checkpoint = None
def get_or_create(self, verify_unchanged=False):
result = self._manager.get_or_create_checkpoint(self.checkpoint_id)
checkpoint, created = result
if (verify_unchanged and self._last_checkpoint and
str(checkpoint['seq']) != str(self._last_checkpoint['seq'])):
raise PillowtopCheckpointReset(u'Checkpoint {} expected seq {} but found {} in database.'.format(
self.checkpoint_id, self._last_checkpoint['seq'], checkpoint['seq'],
))
self._last_checkpoint = checkpoint
return result
def update_to(self, seq):
pillow_logging.info(
"(%s) setting checkpoint: %s" % (self.checkpoint_id, seq)
)
checkpoint = self.get_or_create(verify_unchanged=True).document
checkpoint['seq'] = seq
checkpoint['timestamp'] = get_formatted_current_timestamp()
self._manager.update_checkpoint(self.checkpoint_id, checkpoint)
self._last_checkpoint = checkpoint
def reset(self):
return self._manager.reset_checkpoint(self.checkpoint_id)
def touch(self, min_interval):
"""
Update the checkpoint timestamp without altering the sequence.
:param min_interval: minimum interval between timestamp updates
"""
checkpoint = self.get_or_create(verify_unchanged=True).document
now = datetime.now(tz=pytz.UTC)
previous = self._last_checkpoint.get('timestamp')
do_update = True
if previous:
diff = now - parser.parse(previous).replace(tzinfo=pytz.UTC)
do_update = diff.total_seconds() >= min_interval
if do_update:
checkpoint['timestamp'] = now.isoformat()
self._manager.update_checkpoint(self.checkpoint_id, checkpoint)
class PillowCheckpointEventHandler(ChangeEventHandler):
def __init__(self, checkpoint, checkpoint_frequency):
self.checkpoint = checkpoint
self.checkpoint_frequency = checkpoint_frequency
def fire_change_processed(self, change, context):
if context.changes_seen % self.checkpoint_frequency == 0 and context.do_set_checkpoint:
self.checkpoint.update_to(change['seq'])
def get_django_checkpoint_store():
return DjangoDocumentStore(
DjangoPillowCheckpoint, DjangoPillowCheckpoint.to_dict, DjangoPillowCheckpoint.from_dict,
)
|
Python
| 0
|
@@ -4204,12 +4204,178 @@
dict,%0A )%0A
+%0A%0Adef get_default_django_checkpoint_for_legacy_pillow_class(pillow_class):%0A return PillowCheckpoint(get_django_checkpoint_store(), pillow_class.get_legacy_name())%0A
|
42a147b0dcc24ea51207cca020d2bfc6fa7bde46
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/926650aa8e303d62814e45f709d16673501d96bc.
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "d50aae4b79fb4aa5a3c4dd280004313c7f1fda51"
TFRT_SHA256 = "3d02021cbd499d749eeb4e3e6bdcd47a67695bfc145827c5821548c3c6f1494c"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
Python
| 0
|
@@ -228,133 +228,133 @@
= %22
-d50aae4b79fb4aa5a3c4dd280004313c7f1fda51%22%0A TFRT_SHA256 = %223d02021cbd499d749eeb4e3e6bdcd47a67695bfc145827c5821548c3c6f1494c
+926650aa8e303d62814e45f709d16673501d96bc%22%0A TFRT_SHA256 = %22f178d137127c3a67962362f596b8015fdcdc58271e1e3d692eba47b09d31402a
%22%0A%0A
|
aad4e64e0619e14fce898898b79d35e5c18ec8a2
|
Fix for version resolution in non-git-controlled directories.
|
fermipy/version.py
|
fermipy/version.py
|
# -*- coding: utf-8 -*-
# Author: Douglas Creager <dcreager@dcreager.net>
# This file is placed into the public domain.
# Calculates the current version number. If possible, this is the
# output of “git describe”, modified to conform to the versioning
# scheme that setuptools uses. If “git describe” returns an error
# (most likely because we're in an unpacked copy of a release tarball,
# rather than in a git working copy), then we fall back on reading the
# contents of the RELEASE-VERSION file.
#
# To use this script, simply import it your setup.py file, and use the
# results of get_git_version() as your package version:
#
# from version import *
#
# setup(
# version=get_git_version(),
# .
# .
# .
# )
#
# This will automatically update the RELEASE-VERSION file, if
# necessary. Note that the RELEASE-VERSION file should *not* be
# checked into git; please add it to your top-level .gitignore file.
#
# You'll probably want to distribute the RELEASE-VERSION file in your
# sdist tarballs; to do this, just create a MANIFEST.in file that
# contains the following line:
#
# include RELEASE-VERSION
__all__ = ("get_git_version")
import os
from subprocess import check_output
_refname = '$Format: %D$'
_tree_hash = '$Format: %t$'
_commit_info = '$Format:%cd by %aN$'
_commit_hash = '$Format: %h$'
def render_pep440(vcs):
if vcs is None: return None
tags = vcs.split('-')
# Bare version number
if len(tags) == 1:
return tags[0]
else:
return tags[0] + '+' + '.'.join(tags[1:])
def call_git_describe(abbrev=4):
try:
dirname = os.path.abspath(os.path.dirname(__file__))
line = check_output(['git', 'describe', '--abbrev=%d' % abbrev, '--dirty'],
cwd=os.path.join('..',dirname))
return line.strip().decode('utf-8')
except:
return None
def read_release_keywords(keyword):
refnames = keyword.strip()
if refnames.startswith("$Format"): return None
refs = set([r.strip() for r in refnames.strip("()").split(",")])
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags: return None
return sorted(tags)[-1]
def read_release_version():
import re
dirname = os.path.abspath(os.path.dirname(__file__))
try:
f = open(os.path.join(dirname,"_version.py"), "rt")
for line in f.readlines():
m = re.match("__version__ = '([^']+)'", line)
if m:
ver = m.group(1)
return ver
except:
return None
return None
def write_release_version(version):
dirname = os.path.abspath(os.path.dirname(__file__))
f = open(os.path.join(dirname,"_version.py"), "wt")
f.write("__version__ = '%s'\n" % version)
f.close()
def get_git_version(abbrev=4):
# Read in the version that's currently in _version.py.
release_version = read_release_version()
# First try to get the current version using “git describe”.
git_version = call_git_describe(abbrev)
git_version = render_pep440(git_version)
# Try to deduce the version from keyword expansion
keyword_version = read_release_keywords(_refname)
keyword_version = render_pep440(keyword_version)
# If that doesn't work, fall back on the value that's in
# _version.py.
if git_version is not None:
version = git_version
elif release_version is not None:
version = release_version
elif keyword_version is not None:
version = keyword_version
else:
version = 'unknown'
# If we still don't have anything, that's an error.
if version is None:
raise ValueError("Cannot find the version number!")
# If the current version is different from what's in the
# _version.py file, update the file to be current.
if version != release_version and version != 'unknown':
write_release_version(version)
# Finally, return the current version.
return version
if __name__ == "__main__":
print(get_git_version())
|
Python
| 0
|
@@ -1162,16 +1162,34 @@
port os%0A
+import subprocess%0A
from sub
@@ -1339,16 +1339,349 @@
: %25h$'%0A%0A
+def capture_output(cmd,dirname):%0A %0A p = subprocess.Popen(cmd,%0A stdout=subprocess.PIPE,%0A stderr=subprocess.PIPE,%0A cwd=dirname)%0A p.stderr.close()%0A%0A output = p.stdout.readlines()%0A %0A if not output: return None%0A else: return output%5B0%5D.strip()%0A%0A
def rend
@@ -1932,29 +1932,16 @@
ev=4):%0A%0A
- try:%0A
dirn
@@ -1984,24 +1984,196 @@
e(__file__))
+%0A%0A has_git_tree = capture_output(%5B'git','rev-parse',%0A '--is-inside-work-tree'%5D,dirname)%0A%0A if not has_git_tree: return None%0A%0A try:
%0A lin
@@ -2236,20 +2236,8 @@
rev,
- '--dirty'%5D,
%0A
@@ -2265,38 +2265,32 @@
-cwd=os.path.join('..',
+ '--dirty'%5D,cwd=
dirname)
)%0A%0A
@@ -2285,17 +2285,16 @@
dirname)
-)
%0A%0A
|
4cbbe7c3ab891a11492f368d780a1416d37358ff
|
Change the method of generating content of GUID element
|
feedzilla/syndication.py
|
feedzilla/syndication.py
|
# -*- coding: utf-8 -*-
# Copyright: 2011, Grigoriy Petukhov
# Author: Grigoriy Petukhov (http://lorien.name)
# License: BSD
from django.contrib.syndication.views import Feed
from django.conf import settings
from feedzilla.models import Post
class PostFeed(Feed):
title_template = 'feedzilla/feed/post_title.html'
description_template = 'feedzilla/feed/post_description.html'
title = settings.FEEDZILLA_SITE_TITLE
description = settings.FEEDZILLA_SITE_DESCRIPTION
link = '/'
def items(self, obj):
return Post.active_objects.all()\
.order_by('-created')[:settings.FEEDZILLA_PAGE_SIZE]
#def item_title(self, item):
#return item.name
#def item_description(self, item):
#return item.description
def item_pubdate(self, item):
return item.created
def item_guid(self, item):
return str(item.guid)
|
Python
| 0.000003
|
@@ -881,19 +881,14 @@
urn
-str(
item.
-guid)
+link
%0A
|
7744aca1edc6afd263ac386efa9a1e92a41c30aa
|
Add variable PY2
|
file_metadata/_compat.py
|
file_metadata/_compat.py
|
# -*- coding: utf-8 -*-
"""
Provides utilities to handle the python2 and python3 differences.
"""
from __future__ import (division, absolute_import, unicode_literals,
print_function)
import json
import re
import subprocess
try: # Python 3
from urllib.request import urlopen # flake8: noqa (unused import)
except ImportError: # Python 2
from urllib2 import urlopen # flake8: noqa (unused import)
try: # pragma: no cover
JSONDecodeError = json.decoder.JSONDecodeError
except AttributeError: # pragma: no cover
JSONDecodeError = ValueError
def check_output(*popenargs, **kwargs):
"""
Run command with arguments and return its output.
If the exit code was non-zero it raises a CalledProcessError. The
CalledProcessError object will have the returncode and output attributes.
The arguments are the same as for the Popen constructor. Example::
>>> check_output(["echo", "hello world"]).strip()
'hello world'
The stdout argument is not allowed as it is used internally.
To capture standard error in the result, use ``stderr=subprocess.STDOUT``::
>>> check_output(["non_existent_file"], stderr=subprocess.STDOUT)
Traceback (most recent call last):
...
OSError: [Errno 2] No such file or directory
"""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
exc = subprocess.CalledProcessError(retcode,
kwargs.get("args", popenargs[0]))
exc.output = output # output attrib not there in python2.6
raise exc
return output
def ffprobe_parser(output):
"""
Parse output from the older versions of avprode/ffprobe. The -of or
-print_format argument was added in versions 0.9+. This allows json
output. But in older versions like 0.8.17 which is used in ubuntu
precise, json output is not possible. In such cases, this function
can be used to parse the output.
:param output: The INI like syntax from ffprobe.
:return: The parsed dict.
"""
streams = re.findall('\[STREAM\](.*?)\[\/STREAM\]', output, re.S)
_format = re.findall('\[FORMAT\](.*?)\[\/FORMAT\]', output, re.S)
def parse_section(section):
section_dict = {}
for line in section.strip().splitlines():
key, val = line.strip().split("=", 1)
section_dict[key.strip()] = val.strip()
return section_dict
data = {}
if streams:
parsed_streams = [parse_section(stream) for stream in streams]
data['streams'] = parsed_streams
if _format:
parsed_format = parse_section(_format[0])
data['format'] = parsed_format
return data
|
Python
| 0.000272
|
@@ -241,16 +241,27 @@
bprocess
+%0Aimport sys
%0A%0Atry:
@@ -592,16 +592,48 @@
eError%0A%0A
+PY2 = sys.version_info%5B0%5D == 2%0A%0A
%0Adef che
|
05451dd584f7d06da1d8e7fda16044c86b4615e1
|
Fix a bug in extracting subnetwork.
|
SIF.py
|
SIF.py
|
"""Module to work with network in SIF format.
http://wiki.cytoscape.org/Cytoscape_User_Manual/Network_Formats#SIF_Format
"""
class SIFNode(object):
def __init__(self, id):
self.id = id
self.edges = []
self.reverse= []
class SIFEdge(object):
def __init__(self, type, target):
self.type = type
self.target = target
class SIFNetwork(object):
def __init__(self):
self.nodes = {}
def parse(self, fp):
if isinstance(fp, basestring):
fp = open(fp, 'r')
for l in fp:
tmp = l.split()
source = tmp[0]
edge_type = tmp[1]
if source not in self.nodes:
source_node = SIFNode(source)
self.nodes[source] = source_node
else:
source_node = self.nodes[source]
for target in tmp[2:]:
if target not in self.nodes:
target_node = SIFNode(target)
self.nodes[target] = target_node
else:
target_node = self.nodes[target]
edge = SIFEdge(edge_type, target_node)
source_node.edges.append(edge)
r_edge = SIFEdge(edge_type, source_node)
target_node.reverse.append(r_edge)
fp.close()
def subnetwork(self, center_id, degree=1):
"""Find the subnetwork with up to a given degree neighbors,
regardless of direction.
"""
return self._subnetwork(center_id, degree)
def _subnetwork(self, id, degree=1, sub=None, visited=None):
if degree == 0:
return None
if visited is None:
visited = set()
if id in visited:
return None
if sub is None:
sub = SIFNetwork()
if id not in sub.nodes:
center = SIFNode(id)
sub.nodes[id] = center
else:
center = sub.nodes[id]
visited.add(id)
for edge in self.nodes[id].edges:
target_node = edge.target
if target_node.id in visited:
continue
if target_node.id not in sub.nodes:
new_target = SIFNode(target_node.id)
sub.nodes[target_node.id] = new_target
else:
new_target = sub.nodes[target_node.id]
center.edges.append(SIFEdge(edge.type, new_target))
new_target.reverse.append(SIFEdge(edge.type, center))
self._subnetwork(target_node.id, degree-1, sub, visited)
for edge in self.nodes[id].reverse:
target_node = edge.target
if target_node.id in visited:
continue
if target_node.id not in sub.nodes:
new_target = SIFNode(target_node.id)
sub.nodes[target_node.id] = new_target
else:
new_target = sub.nodes[target_node.id]
center.reverse.append(SIFEdge(edge.type, new_target))
new_target.edges.append(SIFEdge(edge.type, center))
self._subnetwork(target_node.id, degree-1, sub, visited)
return sub
def write(self, filename):
with open(filename, 'w') as fp:
for node in self.nodes.itervalues():
for edge in node.edges:
fp.write('%s\t%s\t%s\n' %
(node.id, edge.type, edge.target.id))
def to_json(self):
"""For use with Cytoscape.js"""
import json
js = []
for node in self.nodes.itervalues():
js.append({'group': 'nodes',
'data': {'id': node.id}
})
for edge in node.edges:
js.append({'group': 'edges',
'data': {
'source': node.id,
'target': edge.target.id,
'type': edge.type
}
})
return json.dumps(js)
# vim: ts=4 expandtab sw=4 sts=4 tw=78
|
Python
| 0
|
@@ -1331,17 +1331,16 @@
lose()%0A%0A
-%0A
def
@@ -1503,51 +1503,886 @@
-return self._subnetwork(center_id, degree)%0A
+sub = self._subnetwork(center_id, degree)%0A for n in sub.nodes.itervalues():%0A for edge in n.edges:%0A complement_flag = False%0A for r_edge in edge.target.reverse:%0A if n.id == r_edge.target.id and edge.type == r_edge.type:%0A complement_flag = True%0A break%0A if not complement_flag:%0A edge.target.reverse.append(SIFEdge(edge.type, n))%0A for edge in n.reverse:%0A complement_flag = False%0A for r_edge in edge.target.edges:%0A if n.id == r_edge.target.id and edge.type == r_edge.type:%0A complement_flag = True%0A break%0A if not complement_flag:%0A edge.target.edges.append(SIFEdge(edge.type, n))%0A%0A return sub
%0A%0A
@@ -2805,32 +2805,8 @@
%5Bid%5D
-%0A visited.add(id)
%0A%0A
@@ -3247,74 +3247,8 @@
t))%0A
- new_target.reverse.append(SIFEdge(edge.type, center))%0A
@@ -3761,72 +3761,8 @@
t))%0A
- new_target.edges.append(SIFEdge(edge.type, center))%0A
@@ -3819,32 +3819,56 @@
sub, visited)%0A%0A
+ visited.add(id)%0A
return s
|
26ad97dc7e6cdc6afa9d819665b35ca78e095811
|
sha1 ok
|
fillSimHBaseNewFormat.py
|
fillSimHBaseNewFormat.py
|
import os,sys
# http://happybase.readthedocs.org/en/latest/user.html
import happybase
import MySQLdb
import json
import time
import requests
import shutil
import hashlib
#import numpy as np
tmp_img_dl_dir = 'tmp_img_dl'
# MySQL connection infos
global_var = json.load(open('../conf/global_var_all.json'))
localhost=global_var['local_db_host']
localuser=global_var['local_db_user']
localpwd=global_var['local_db_pwd']
localdb=global_var['local_db_dbname']
# HBase connection infos
connection = happybase.Connection('10.1.94.57')
# use fields: meta:columbia_near_dups, meta:columbia_near_dups_dist
tab_aaron = connection.table('aaron_memex_ht-images')
#use field: image:hash
tab_hash = connection.table('image_hash')
# use field: images:images_doc
tab_samples = connection.table('dig_isi_cdr2_ht_images_sample')
# save sha1 in 'ht_images_cdrid_to_sha1_sample'
# save similarities in 'ht_columbia_similar_images_sample'
# save all image info in 'ht_images_infos_sample' with sha1 as rowkey and
# a JSON of all_cdr_ids, all_parents_cdr_ids, all_cdr_docs, all_images_htid, all_images_htadsid.
# [check if column exist, if id already there, append]
def mkpath(outpath):
pos_slash=[pos for pos,c in enumerate(outpath) if c=="/"]
for pos in pos_slash:
try:
os.mkdir(outpath[:pos])
except:
pass
def dlImage(url):
pos_slash=[pos for pos,c in enumerate(url) if c=="/"]
file_img=url[pos_slash[-1]:]
outpath=os.path.join(tmp_img_dl_dir,file_img)
mkpath(outpath)
try:
r = requests.get(url, stream=True, timeout=5)
if r.status_code == 200:
with open(outpath, 'wb') as f:
r.raw.decode_content = True
shutil.copyfileobj(r.raw, f)
return outpath
except Exception as inst:
print "Download failed for img that should be saved at {} from url {}.".format(outpath,url)
print inst
return None
def getSHA1FromMySQL(image_id):
sha1 = None
db=MySQLdb.connect(host=localhost,user=localuser,passwd=localpwd,db=localdb)
c=db.cursor()
sql='SELECT sha1 FROM uniqueIds WHERE htid={}'.format(image_id)
c.execute(sql)
res=c.fetchall()
if res:
sha1=res[0][0]
return sha1
def getSHA1FromFile(filepath):
sha1 = hashlib.sha1()
f = open(filepath, 'rb')
try:
sha1.update(f.read())
finally:
f.close()
return sha1.hexdigest()
def computeSHA1(cdr_id):
sha1hash = None
# get image url
one_row = tab_samples.row(cdr_id)
one_url = one_row[1]['obj_stored_url']
if not one_url:
print "Could not get URL from cdrid {}.".format(cdr_id)
else: # download
localpath = dlImage(one_url)
# compute sha1
if localpath:
sha1hash = getSHA1FromFile(localpath)
else:
print "Could not download image from URL {} of cdrid {}.".format(one_url,cdr_id)
return sha1hash
def saveSHA1(image_id,cdr_id,sha1hash):
# save in the two tables
pass
# old table indexed by htid 'tab_hash'
# new table indexed by cdrid
def getSHA1(image_id,cdr_id):
hash_row = tab_hash.row(str(image_id))
sha1hash = None
if hash_row:
sha1hash = hash_row['image:hash']
else:
print "HBase Hash row is empty. Trying to get SHA1 from MySQL."
# Get hash from MySQL...
sha1hash = getSHA1FromMySQL(image_id)
# or recompute from image if failed.
if not sha1hash:
print "Could not get SHA1 from MYSQL. Recomputing..."
sha1hash = computeSHA1(cdr_id)
if sha1hash:
print "Saving SHA1 {} for image ({},{}) in HBase".format(sha1hash,cdr_id,image_id)
saveSHA1(image_id,cdr_id,sha1hash.upper())
else:
print "Could not get/compute SHA1..."
return sha1hash
def getSimIds(image_id):
sim_row = tab_aaron.row(str(image_id))
sim_ids = None
if not sim_row:
print "Sim row is empty. Skipping."
return sim_ids # Should compute similarity from API?
if 'meta:columbia_near_dups' in sim_row:
sim_ids=(sim_row['meta:columbia_near_dups'], sim_row['meta:columbia_near_dups_dist'])
else:
print "Similarity not yet computed. Skipping"
return sim_ids
if __name__ == '__main__':
for one_row in tab_samples.scan():
doc = one_row[1]['images:images_doc']
jd = json.loads(doc)
image_id=jd['crawl_data']['image_id']
print image_id
# TODO also get obj_parent, one_row[0] i.e. CDR_ID, crawl_data.memex_ht_id
sha1 = getSHA1(image_id,one_row[0])
print sha1
if not sha1:
time.sleep(1)
continue
sim_ids = getSimIds(image_id)
if not sim_ids:
time.sleep(1)
continue
|
Python
| 0.99877
|
@@ -2543,28 +2543,102 @@
-one_url = one_row%5B1%5D
+print one_row%0A doc = one_row%5B'images:images_doc'%5D%0A jd = json.loads(doc)%0A one_url = jd
%5B'ob
|
9ad9eb083be4e1a834f2a91c581259d86a8f493a
|
fix integration test NBO encoding
|
tests/integration/wallet/test_transactions.py
|
tests/integration/wallet/test_transactions.py
|
import asyncio
from binascii import hexlify
from orchstr8.testcase import IntegrationTestCase, d2f
from lbryschema.claim import ClaimDict
from torba.constants import COIN
from lbrynet.wallet.transaction import Transaction
from lbrynet.wallet.account import generate_certificate
import lbryschema
lbryschema.BLOCKCHAIN_NAME = 'lbrycrd_regtest'
example_claim_dict = {
"version": "_0_0_1",
"claimType": "streamType",
"stream": {
"source": {
"source": "d5169241150022f996fa7cd6a9a1c421937276a3275eb912790bd07ba7aec1fac5fd45431d226b8fb402691e79aeb24b",
"version": "_0_0_1",
"contentType": "video/mp4",
"sourceType": "lbry_sd_hash"
},
"version": "_0_0_1",
"metadata": {
"license": "LBRY Inc",
"description": "What is LBRY? An introduction with Alex Tabarrok",
"language": "en",
"title": "What is LBRY?",
"author": "Samuel Bryan",
"version": "_0_1_0",
"nsfw": False,
"licenseUrl": "",
"preview": "",
"thumbnail": "https://s3.amazonaws.com/files.lbry.io/logo.png"
}
}
}
class BasicTransactionTest(IntegrationTestCase):
VERBOSE = False
async def test_creating_updating_and_abandoning_claim_with_channel(self):
await d2f(self.account.ensure_address_gap())
address1, address2 = await d2f(self.account.receiving.get_addresses(2, only_usable=True))
sendtxid1 = await self.blockchain.send_to_address(address1, 5)
sendtxid2 = await self.blockchain.send_to_address(address2, 5)
await self.blockchain.generate(1)
await asyncio.wait([
self.on_transaction_id(sendtxid1),
self.on_transaction_id(sendtxid2),
])
self.assertEqual(round(await d2f(self.account.get_balance(0))/COIN, 1), 10.0)
cert, key = generate_certificate()
cert_tx = await d2f(Transaction.claim(b'@bar', cert, 1*COIN, address1, [self.account], self.account))
claim = ClaimDict.load_dict(example_claim_dict)
claim = claim.sign(key, address1, hexlify(cert_tx.get_claim_id(0)))
claim_tx = await d2f(Transaction.claim(b'foo', claim, 1*COIN, address1, [self.account], self.account))
await self.broadcast(cert_tx)
await self.broadcast(claim_tx)
await asyncio.wait([ # mempool
self.on_transaction(claim_tx),
self.on_transaction(cert_tx),
])
await self.blockchain.generate(1)
await asyncio.wait([ # confirmed
self.on_transaction(claim_tx),
self.on_transaction(cert_tx),
])
self.assertEqual(round(await d2f(self.account.get_balance(0))/COIN, 1), 8.0)
self.assertEqual(round(await d2f(self.account.get_balance(0, True))/COIN, 1), 10.0)
response = await d2f(self.ledger.resolve(0, 10, 'lbry://@bar/foo'))
self.assertIn('lbry://@bar/foo', response)
abandon_tx = await d2f(Transaction.abandon([claim_tx.outputs[0]], [self.account], self.account))
await self.broadcast(abandon_tx)
await self.on_transaction(abandon_tx)
await self.blockchain.generate(1)
await self.on_transaction(abandon_tx)
# should not resolve, but does, why?
# response = await d2f(self.ledger.resolve(0, 10, 'lbry://@bar/foo'))
# self.assertNotIn('lbry://@bar/foo', response)
|
Python
| 0.000001
|
@@ -2175,16 +2175,22 @@
im_id(0)
+%5B::-1%5D
))%0A
|
20dc4b6d80842579740ed91ebb848446a0cecdbf
|
fix test_settings
|
test_settings.py
|
test_settings.py
|
from settings import *
ROOT_URLCONF = 'urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': rel('mirosubs.sqlite3'),
}
}
INSTALLED_APPS += ('django_nose', )
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.remove('mirosubs')
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
|
Python
| 0.000002
|
@@ -15,16 +15,75 @@
import *
+%0A__import__('dev-settings', globals(), locals(), %5B'*'%5D, -1)
%0A%0AROOT_U
|
555981d288b1e3970e2cb9432db3e72f57ba48b4
|
deal with zero args corner case and return correct type
|
finat/pyop2_interface.py
|
finat/pyop2_interface.py
|
try:
from pyop2.pyparloop import Kernel
except:
Kernel = None
from .interpreter import evaluate
def pyop2_kernel(kernel, kernel_args, interpreter=False):
"""Return a :class:`pyop2.Kernel` from the recipe and kernel data
provided.
:param kernel: The :class:`~.utils.Kernel` to map to PyOP2.
:param kernel_args: The ordered list of Pymbolic variables constituting
the kernel arguments, excluding the result of the recipe (the latter
should be prepended to the argument list).
:param interpreter: If set to ``True``, the kernel will be
evaluated using the FInAT interpreter instead of generating a
compiled kernel.
:result: The :class:`pyop2.Kernel`
"""
if Kernel is None:
raise ImportError("pyop2 was not imported. Is it installed?")
if set(kernel_args) != kernel.kernel_data.kernel_args:
raise ValueError("Incomplete value list")
if interpreter:
def kernel_function(*args):
context = {kernel_args: args[1:]}
args[0][:] = evaluate(kernel.recipe, context, kernel.kernel_data)
return (Kernel(kernel_function), kernel_args)
else:
raise NotImplementedError
|
Python
| 0
|
@@ -810,16 +810,41 @@
%0A if
+kernel_args and %5C%0A
set(kern
@@ -1136,17 +1136,16 @@
return
-(
Kernel(k
@@ -1163,22 +1163,8 @@
ion)
-, kernel_args)
%0A%0A
|
bc7899c989eae19812f4f527d89ccea138748477
|
Exclude data previously outside reliable_window
|
tests/test_sourcefinder/test_L15_12h_const.py
|
tests/test_sourcefinder/test_L15_12h_const.py
|
"""
Tests for simulated LOFAR datasets.
"""
import os
import unittest2 as unittest
import tkp.accessors.fitsimage
import tkp.sourcefinder.image as image
import tkp.utility.coordinates as coords
from tkp.testutil.decorators import requires_data
from tkp.testutil.data import DATAPATH
# The simulation code causes a factor of 2 difference in the
# measured flux.
FUDGEFACTOR = 0.5
# The different sections (observed, corrected, model) of the
# MeasurementSet contain different simulations.
corrected_fits = os.path.join(DATAPATH, 'L15_12h_const/corrected-all.fits')
observed_fits = os.path.join(DATAPATH, 'L15_12h_const/observed-all.fits')
all_fits = os.path.join(DATAPATH, 'L15_12h_const/model-all.fits')
class L15_12hConstObs(unittest.TestCase):
# Single, constant 1 Jy source at centre of image.
def setUp(self):
# Beam here is a random beam, in this case the WENSS beam
# without the declination dependence.
fitsfile = tkp.accessors.fitsimage.FitsImage(observed_fits,
beam=(54./3600, 54./3600, 0.))
self.image = image.ImageData(fitsfile.data, fitsfile.beam, fitsfile.wcs)
self.results = self.image.extract(det=10)
@requires_data(observed_fits)
def testNumSources(self):
self.assertEqual(len(self.results), 1)
@requires_data(observed_fits)
def testSourceProperties(self):
mysource = self.results.closest_to(1440, 1440)[0]
self.assertAlmostEqual(mysource.peak, 1.0*FUDGEFACTOR, 1)
@requires_data(observed_fits)
def tearDown(self):
del(self.results)
del(self.image)
class L15_12hConstCor(unittest.TestCase):
# Cross shape of 5 sources, 2 degrees apart, at centre of image.
def setUp(self):
# Beam here is a random beam, in this case the WENSS beam
# without the declination dependence.
fitsfile = tkp.accessors.fitsimage.FitsImage(corrected_fits,
beam=(54./3600, 54./3600, 0.))
self.image = image.ImageData(fitsfile.data, fitsfile.beam, fitsfile.wcs)
self.results = self.image.extract(det=10)
@requires_data(corrected_fits)
def testNumSources(self):
self.assertEqual(len(self.results), 5)
@requires_data(corrected_fits)
def testFluxes(self):
# All sources in this image are supposed to have the same flux.
# But they don't, because the simulation is broken, so this test
# checks they fall in a vaguely plausible range.
for mysource in self.results:
self.assert_(mysource.peak.value > 0.35)
self.assert_(mysource.peak.value < 0.60)
@requires_data(corrected_fits)
def testSeparation(self):
centre = self.results.closest_to(1440, 1440)[0]
# How accurate should the '2 degrees' be?
for mysource in filter(lambda src: src != centre, self.results):
self.assertAlmostEqual(round(
coords.angsep(centre.ra, centre.dec, mysource.ra, mysource.dec) /
60**2), 2)
def tearDown(self):
del(self.results)
del(self.image)
class L15_12hConstMod(unittest.TestCase):
# 1 Jy constant source at centre; 1 Jy (peak) transient 3 degrees away.
def setUp(self):
# This image is of the whole sequence, so obviously we won't see the
# transient varying. In fact, due to a glitch in the simulation
# process, it will appear smeared out & shouldn't be identified at
# all.
# Beam here is a random beam, in this case the WENSS beam
# without the declination dependence.
fitsfile = tkp.accessors.fitsimage.FitsImage(all_fits,
beam=(54./3600, 54./3600, 0.))
self.image = image.ImageData(fitsfile.data, fitsfile.beam, fitsfile.wcs)
self.results = self.image.extract(det=5)
@requires_data(all_fits)
def testNumSources(self):
self.assertEqual(len(self.results), 1)
@requires_data(all_fits)
def testFluxes(self):
self.results.sort(lambda x, y: cmp(y.peak, x.peak))
self.assertAlmostEqual(self.results[0].peak.value, 1.0*FUDGEFACTOR, 1)
def tearDown(self):
del(self.results)
del(self.image)
class FitToPointTestCase(unittest.TestCase):
def setUp(self):
# Beam here is a random beam, in this case the WENSS beam
# without the declination dependence.
fitsfile = tkp.accessors.fitsimage.FitsImage(corrected_fits,
beam=(54./3600, 54./3600, 0.))
self.my_im = image.ImageData(fitsfile.data, fitsfile.beam,
fitsfile.wcs)
@requires_data(corrected_fits)
def testFixed(self):
d = self.my_im.fit_to_point(1379.00938273, 1438.38801493, 20,
threshold=2, fixed='position')
self.assertAlmostEqual(d.x.value, 1379.00938273)
self.assertAlmostEqual(d.y.value, 1438.38801493)
@requires_data(corrected_fits)
def testUnFixed(self):
d = self.my_im.fit_to_point(1379.00938273, 1438.38801493, 20,
threshold=2, fixed=None)
self.assertAlmostEqual(d.x.value, 1379.00938273, 0)
self.assertAlmostEqual(d.y.value, 1438.38801493, 0)
if __name__ == '__main__':
unittest.main()
|
Python
| 0
|
@@ -3768,32 +3768,44 @@
am, fitsfile.wcs
+, radius=100
)%0A self.r
|
c676eb448a51c89b0f1aefed1c2328ec9b88b97a
|
Update pages_page_studio.py
|
regression/pages/studio/pages_page_studio.py
|
regression/pages/studio/pages_page_studio.py
|
"""
Extended Pages page for a course.
"""
from edxapp_acceptance.pages.common.utils import click_css
from edxapp_acceptance.pages.studio.utils import drag
from regression.pages.studio.utils import (
click_css_with_animation_enabled,
sync_on_notification
)
from regression.pages.studio.course_page_studio import CoursePageExtended
class PagesPageExtended(CoursePageExtended):
"""
Extended Pages page for a course.
"""
url_path = "tabs"
def is_browser_on_page(self):
return self.q(css='body.view-static-pages').visible
def add_page(self):
"""
Adds a new empty page.
"""
click_css_with_animation_enabled(
page=self,
css='.button.new-button.new-tab',
source_index=0,
require_notification=False
)
self.wait_for_element_visibility(
'.component.course-tab.is-movable', 'New page is not visible'
)
def edit_page(self, new_content, index=0):
"""
Edits the page present at the index passed.
Arguments:
new_content (str): New content to set.
index (int): Index of page
"""
click_css_with_animation_enabled(
page=self,
css='.action-button-text',
source_index=index,
require_notification=False
)
self.browser.execute_script(
'tinyMCE.activeEditor.setContent("{}")'.format(new_content)
)
self.browser.execute_script(
'document.querySelectorAll(".button.action-primary'
'.action-save")[0].click();'
)
sync_on_notification(self)
def delete_page(self, index=0):
"""
Deletes the page present at the index passed.
Arguments:
index (int): Index of page
"""
click_css_with_animation_enabled(
page=self,
css='.delete-button.action-button',
source_index=index,
require_notification=False
)
self.q(css='.prompt.warning button.action-primary ').first.click()
sync_on_notification(self)
def delete_all_pages(self):
"""
Deletes all pages.
"""
while self.get_custom_page_count() > 0:
self.delete_page()
def reload_and_wait_for_page(self):
"""
Reloads and waits for the newly added page to appear.
"""
self.browser.refresh()
self.wait_for_the_visibility_of_new_page()
def wait_for_the_visibility_of_new_page(self):
"""
Ensures that newly added page is rendered and is visible.
"""
self.wait_for_element_visibility(
'.delete-button.action-button', 'Added pages have been loaded.'
)
def get_custom_page_count(self):
"""
Returns the count of custom pages
"""
return len(self.q(css='.component.course-tab.is-movable'))
def get_page_content(self, index=0):
"""
Get the contents of a page present at the index passed.
Arguments:
index (int): Index of page
Returns:
str: Content of page.
"""
click_css(
page=self,
css='.action-button-text',
source_index=index,
require_notification=False
)
content = self.browser.execute_script(
'return tinyMCE.activeEditor.getContent()'
)
click_css(
page=self,
css='.button.action-cancel',
source_index=0,
require_notification=False
)
return content
def click_view_live_button(self):
"""
Clicks view live button on pages page and switches to new window
"""
self.q(css='.view-live-button').click()
self.browser.switch_to_window(self.browser.window_handles[-1])
def click_and_verify_see_an_example(self):
"""
Clicks see an example pop up on pages page and verifies pop up displays
"""
self.q(css='a[href="#preview-lms-staticpages"]').click()
self.wait_for_element_visibility(
'img[alt="Preview of Pages in your course"]', 'Pop up visibility'
)
def click_hide_show_toggle(self):
"""
Clicks hide/show toggle button
"""
toggle_checkbox_css = '.is-movable[data-tab-id="wiki"] ' \
'.action-visible [type="checkbox"]'
self.wait_for_element_presence(
toggle_checkbox_css, 'Toggle button presence'
)
self.browser.execute_script(
"$('{}').click()".format(toggle_checkbox_css)
)
sync_on_notification(self)
return 'Wiki'
def get_all_pages(self):
"""
Returns all pages
"""
temp = self.q(css='.course-tab .course-nav-item-header').text
all_pages = temp + self.q(css='.course-tab .xblock').text
return all_pages
def get_all_pages_count(self):
"""
Returns the count of all pages.
"""
return len(
self.q(
css='.course-tab'
).results
)
def is_page_configured_to_show(self):
"""
Check whether a page is configured to show of not.
Arguments:
index (int): Index of the page.
Returns:
bool: True if shown otherwise False.
"""
toggle_value = self.q(
css='.is-movable[data-tab-id="wiki"] '
'.action-visible [type="checkbox"]'
).results[0].get_attribute('checked')
if toggle_value:
return False
return True
def drag_and_drop(self, source_index, target_index):
"""
Drags and drops the page at source_index to the page at target_index
Args:
source_index (int):The index of element to be dragged and dropped
target_index (int):The index element to be dragged and dropped upon
"""
drag(self, source_index, target_index)
|
Python
| 0.000002
|
@@ -4699,32 +4699,61 @@
_css)%0A )%0A
+ self.wait_for_ajax()%0A
sync_on_
|
dfff8bf474663d4efc02fdd3344054e20a6385ca
|
Update test_smoothing.py
|
tests/test_smoothing.py
|
tests/test_smoothing.py
|
try:
from . import generic as g
except BaseException:
import generic as g
class SmoothTest(g.unittest.TestCase):
def test_smooth(self):
"""
Load a collada scene with pycollada.
"""
m = g.trimesh.creation.icosahedron()
m.vertices, m.faces = g.trimesh.remesh.subdivide_to_size(
m.vertices, m.faces, 0.1)
s = m.copy()
f = m.copy()
d = m.copy()
assert m.is_volume
# Equal Weights
lap = g.trimesh.smoothing.laplacian_calculation(
mesh=m, equal_weight=True)
g.trimesh.smoothing.filter_laplacian(s, 0.5, 10, lap)
g.trimesh.smoothing.filter_humphrey(f, 0.1, 0.5, 10, lap)
g.trimesh.smoothing.filter_taubin(d, 0.5, 0.53, 10, lap)
assert s.is_volume
assert f.is_volume
assert d.is_volume
assert g.np.isclose(s.volume, m.volume, rtol=0.1)
assert g.np.isclose(f.volume, m.volume, rtol=0.1)
assert g.np.isclose(d.volume, m.volume, rtol=0.1)
s = m.copy()
f = m.copy()
d = m.copy()
# umbrella Weights
lap = g.trimesh.smoothing.laplacian_calculation(m, equal_weight=False)
g.trimesh.smoothing.filter_laplacian(s, 0.5, 10, lap)
g.trimesh.smoothing.filter_humphrey(f, 0.1, 0.5, 10, lap)
g.trimesh.smoothing.filter_taubin(d, 0.5, 0.53, 10, lap)
assert s.is_volume
assert f.is_volume
assert d.is_volume
assert g.np.isclose(s.volume, m.volume, rtol=0.1)
assert g.np.isclose(f.volume, m.volume, rtol=0.1)
assert g.np.isclose(d.volume, m.volume, rtol=0.1)
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
|
Python
| 0.000001
|
@@ -375,32 +375,53 @@
s = m.copy()%0A
+ q = m.copy()%0A
f = m.co
@@ -417,32 +417,32 @@
f = m.copy()%0A
-
d = m.co
@@ -644,32 +644,119 @@
cian(s, 0.5, 10,
+ False, True, lap)%0A g.trimesh.smoothing.filter_laplacian(q, 0.5, 10, True, True,
lap)%0A g.
@@ -896,32 +896,59 @@
ert s.is_volume%0A
+ assert q.is_volume%0A
assert f
@@ -1036,32 +1036,90 @@
lume, rtol=0.1)%0A
+ assert g.np.isclose(q.volume, m.volume, rtol=0.1)%0A
assert g
@@ -1232,32 +1232,53 @@
s = m.copy()%0A
+ q = m.copy()%0A
f = m.co
@@ -1459,32 +1459,119 @@
cian(s, 0.5, 10,
+ False, True, lap)%0A g.trimesh.smoothing.filter_laplacian(q, 0.5, 10, True, True,
lap)%0A g.
@@ -1683,33 +1683,41 @@
0.53, 10, lap)%0A
+
%0A
-
assert s
@@ -1719,32 +1719,59 @@
ert s.is_volume%0A
+ assert q.is_volume%0A
assert f
@@ -1801,32 +1801,32 @@
rt d.is_volume%0A%0A
-
assert g
@@ -1859,32 +1859,90 @@
lume, rtol=0.1)%0A
+ assert g.np.isclose(q.volume, m.volume, rtol=0.1)%0A
assert g
|
3f85183da738e337d51a8523524eb992e2dd29bf
|
Reorder tests
|
tests/test_tictactoe.py
|
tests/test_tictactoe.py
|
import unittest
from games import TicTacToe
class TestTicTacToe(unittest.TestCase):
def setUp(self):
self.game = TicTacToe()
def test_cur_player_start(self):
self.assertEqual(self.game.cur_player, 0)
def test_cur_player_after_one_move(self):
self.game.make_move(3)
self.assertEqual(self.game.cur_player, 1)
def test_cur_player_after_two_moves(self):
self.game.make_move(3)
self.game.make_move(7)
self.assertEqual(self.game.cur_player, 0)
def test_is_not_over_at_start(self):
self.assertFalse(self.game.is_over())
def test_is_over_at_end_of_game(self):
self.game.make_moves([1, 4, 2, 5, 3])
self.assertTrue(self.game.is_over())
def test_make_move_returns_self(self):
self.assertIs(self.game.make_move(1), self.game)
def test_make_moves(self):
self.game.make_moves([1, 2, 3])
actual = self.game.legal_moves()
expected = [4, 5, 6, 7, 8, 9]
self.assertItemsEqual(actual, expected)
def test_make_moves_returns_self(self):
actual = self.game.make_moves([1, 2, 3])
expected = self.game
self.assertEquals(actual, expected)
def test_legal_moves_start(self):
actual = self.game.legal_moves()
expected = [1, 2, 3, 4, 5, 6, 7, 8, 9]
self.assertItemsEqual(actual, expected)
def test_legal_moves_after_one_move(self):
self.game.make_move(1)
actual = self.game.legal_moves()
expected = [2, 3, 4, 5, 6, 7, 8, 9]
self.assertItemsEqual(actual, expected)
def test_legal_moves_after_two_moves(self):
self.game.make_move(3)
self.game.make_move(7)
actual = self.game.legal_moves()
expected = [1, 2, 4, 5, 6, 8, 9]
self.assertItemsEqual(actual, expected)
def test_legal_moves_are_empty_when_is_over(self):
self.game.make_moves([1, 4, 2, 5, 3])
self.assertTrue(len(self.game.legal_moves()) == 0)
def test_outcomes_win_first_player(self):
self.game.make_moves([1, 4, 2, 5, 3])
self.assertItemsEqual(self.game.outcomes(), ['W', 'L'])
def test_outcomes_win_second_player(self):
self.game.make_moves([1, 4, 2, 5, 9, 6])
self.assertItemsEqual(self.game.outcomes(), ['L', 'W'])
def test_outcomes_draw(self):
self.game.make_moves([1, 3, 2, 4, 6, 5, 7, 8, 9])
self.assertItemsEqual(self.game.outcomes(), ['D', 'D'])
def test_copy(self):
self.game.make_moves([1, 3, 2])
clone = self.game.copy()
self.assertItemsEqual(self.game.legal_moves(), clone.legal_moves())
self.assertEqual(self.game.cur_player, clone.cur_player)
self.assertEqual(self.game, clone)
|
Python
| 0
|
@@ -134,16 +134,299 @@
cToe()%0A%0A
+ def test_copy(self):%0A self.game.make_moves(%5B1, 3, 2%5D)%0A clone = self.game.copy()%0A self.assertItemsEqual(self.game.legal_moves(), clone.legal_moves())%0A self.assertEqual(self.game.cur_player, clone.cur_player)%0A self.assertEqual(self.game, clone)%0A%0A
def
@@ -2750,287 +2750,4 @@
'%5D)%0A
-%0A def test_copy(self):%0A self.game.make_moves(%5B1, 3, 2%5D)%0A clone = self.game.copy()%0A self.assertItemsEqual(self.game.legal_moves(), clone.legal_moves())%0A self.assertEqual(self.game.cur_player, clone.cur_player)%0A self.assertEqual(self.game, clone)%0A
|
379822ab16229174071172792132cbb549c5f841
|
celery task for requests
|
ekanalyzer.py
|
ekanalyzer.py
|
import os
from flask import Flask
from flask import render_template
from flask import request, redirect, url_for
from werkzeug import secure_filename
import hashlib
from pymongo import Connection
import dpkt
import sys
import socket
from celery import Celery
from requests import Request, Session
# FIXME: move to config.py
ALLOWED_EXTENSIONS = set(['pcap'])
def create_app():
return Flask("ekanalyzer")
app = create_app()
app.config.from_pyfile('config.py')
connection = Connection(app.config['MONGODB_SERVER'] , app.config['MONGODB_PORT'])
db = connection.ekanalyzer
app.debug = True
celery = Celery('ekanalyzer', broker=app.config['BROKER_URL'] )
@celery.task
def perform_results(hash):
try:
pcap = {'hash' : hash}
result = db.pcap.find(pcap)
if result.count() > 0:
return
else:
db.pcap.insert(pcap)
f = open(app.config['UPLOAD_FOLDER'] + hash)
pcap = dpkt.pcap.Reader(f)
for ts, buf in pcap:
eth = dpkt.ethernet.Ethernet(buf)
ip = eth.data
tcp = ip.data
# FIXME: assuming only http traffic on port 80
if tcp.dport == 80 and len(tcp.data) > 0:
http = dpkt.http.Request(tcp.data)
ipaddress = socket.inet_ntoa(ip.dst)
data = { 'ip' : ipaddress,
'uri' : http.uri,
'method' : http.method,
'data' : http.data,
'headers' : http.headers,
'hash': hash
}
db.requests.insert(data)
print "Data imported"
status = process_requests(hash)
except NameError as e:
print e
except :
print "Unexpected error:", sys.exc_info()
pass
def process_requests(hash):
request = { 'hash' : hash}
result = db.requests.find(request)
for r in result:
print process_request(r['ip'], r['uri'], r['method'], r['headers'], r['data'])
def process_request(ip, uri, method, headers, data):
#FIXME: port 80
url = "http://{0}:80{1}".format(ip, uri)
s = Session()
req = Request(method, url,
data=data,
headers=headers
)
prepped = req.prepare()
resp = s.send(prepped)
return resp.status_code
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
@app.route('/upload-ek/', methods=['POST'])
def upload_file():
file = request.files['pcap']
if file and allowed_file(file.filename):
hash = hashlib.sha256()
try:
for chunk in file.chunks():
hash.update(chunk)
finally:
file.seek(0)
hash_name = "%s" % (hash.hexdigest())
file.save(os.path.join(app.config['UPLOAD_FOLDER'], hash_name))
return redirect(url_for('launch', hash=hash_name))
@app.route('/launch/<hash>/')
def launch(hash):
perform_results.delay(hash)
return render_template('launch.html', hash=hash)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == "__main__":
app.run(debug=True)
|
Python
| 0.999885
|
@@ -2029,16 +2029,22 @@
_request
+.delay
(r%5B'ip'%5D
@@ -2094,16 +2094,29 @@
ata'%5D)%0A%0A
+@celery.task%0A
def proc
|
3efda93bfd4a4245b15a712a0d4bdefeb111710f
|
make server ip a variable
|
misc/build-windows.py
|
misc/build-windows.py
|
#!/usr/bin/env python
# start up windows vm
# some script should start on bootup and try to connect back to this script (through some port)
# this script should send commands for building the tree and creating the exe
# could also have a simple file transfer mechanism so I can get the exe without having to muck around with ftp or whatever
port = 15421
quit_message = '**quit**'
# higher numbers of verbose output more stuff
verbose = 1
def log_debug(str, level = 2):
if verbose >= level:
print str
def log_info(str):
log_debug(str, 1)
def log_error(str):
log_debug(str, 0)
def client_side():
def connect(address):
import socket
connection = socket.socket()
res = socket.getaddrinfo(address, port)
af, socktype, proto, canonname, socket_address = res[0]
log_info("Connecting to %s:%d.." % (address, port))
connection.connect(socket_address)
log_info("Connected!")
return connection
# execute a command
def do_command(command, connection):
import subprocess
args = command.split(' ')
if args[0] == 'cd':
import os
os.chdir(args[1])
connection.send('changed directory to ' + args[1])
else:
process = subprocess.Popen(command.split(' '), stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
stdout = process.stdout
out = stdout.readline()
while out != None and out != "":
log_debug("Sending line '%s'" % out)
connection.send(out)
out = stdout.readline()
process.wait()
def read_commands(connection):
import re
try:
line = re.compile('(.*)\n\n')
data = ""
while True:
more = connection.recv(4096)
if not more:
break
data += more
log_debug("Buffer is now '%s'" % data)
get = line.match(data)
while get != None:
command = get.group(1)
if command == quit_message:
connection.close()
return
log_debug("Got command '%s'" % command)
out = do_command(command, connection)
# chop of the command from the buffer
data = data[(len(command) + 2):]
get = line.match(data)
except Exception, e:
log_error("Got an error.. closing the connection: " + str(e))
connection.close()
def run():
read_commands(connect('192.168.90.2'))
run()
def server_side():
def start_windows_vm():
def start_virtualbox():
import subprocess
# specific to jon's setup
vm_name = "xp-dev"
executable = "VBoxSDL"
return subprocess.Popen([executable, "-startvm", vm_name])
start_virtualbox()
# returns a connection
def wait_for_connect():
import socket
server = socket.socket()
server.bind(('0.0.0.0', port))
server.listen(1)
log_info("Waiting for a connection on port %d.." % port)
(client, address) = server.accept()
log_info("Got a connection from %s!" % str(address))
return client
# write the command and two newlines
def send_command(connection, command):
connection.send(command)
connection.send("\n\n")
# gets the text output from sending commands
def send_build_commands(connection):
# send_command(connection, 'ls')
send_command(connection, 'cd c:/svn/paintown')
send_command(connection, 'svn update')
send_command(connection, 'make win')
send_command(connection, quit_message)
size = 4096
data = connection.recv(size)
while data:
print data.strip()
data = connection.recv(size)
connection.close()
def run():
# start_windows_vm()
send_build_commands(wait_for_connect())
log_info("All done")
run()
import sys
if len(sys.argv) < 2:
log_error("""valid arguments:
client - run as the client
server - run as the server
verbose=# - set verbose level. 1 is the default. higher numbers is more verbose
""")
else:
import re
verbose_arg = re.compile('verbose=(\d+)')
for arg in sys.argv[1:]:
if arg == 'client':
client_side()
elif arg == 'server':
server_side()
elif verbose_arg.match(arg) != None:
out = verbose_arg.match(arg)
verbose = int(out.group(1))
|
Python
| 0.000315
|
@@ -349,16 +349,84 @@
= 15421%0A
+# network settings in jon's virtual box %0Aserver_ip = '192.168.90.2'%0A
quit_mes
@@ -2764,30 +2764,25 @@
connect(
-'192.168.90.2'
+server_ip
))%0A%0A
|
337f6d67bce331a26b44e65671c63f223f0c5ebc
|
add help
|
ckstyle/command/ConsoleCommandParser.py
|
ckstyle/command/ConsoleCommandParser.py
|
import sys
import os
import getopt
import string
from ckstyle.doCssCheck import checkFile, checkDir, checkDirRecursively
import CommandFileParser
def usage():
print '''
[Usage]
ckstyle -h / ckstyle --help
ckstyle file.css
ckstyle dir
ckstyle -r dir
ckstyle -p file.css
ckstyle -p -r dir
ckstyle -c config_file_path
ckstyle -c config_file_path -r -p
'''
def getDefaultConfigPath():
homedir = os.getenv('USERPROFILE') or os.getenv('HOME')
return os.path.realpath(os.path.join(homedir, 'ckstyle.ini'))
def getErrorLevel(value):
if value.strip() == '':
return None
try:
realValue = string.atoi(value)
errorLevel = realValue
if errorLevel > 2:
errorLevel = 2
elif errorLevel < 0:
errorLevel = 0
return errorLevel
except ValueError:
print '[error] --errorLevel option should be number\n'
return None
def getExtension(value):
if value.strip() == '':
return None
value = value.strip()
if not value.startswith('.'):
value = '.' + value
return value
def getValue(value):
if value.strip() == '':
return None
return value.strip()
def getConfigFile(value):
value = value.strip()
if value == '':
print '[error] no config file, ckstyle.ini path should be after -c.\n'
return None
if os.path.exists(value) and value.endswith('.ini'):
return value
else:
print '[error] %s does not exist, or is not a ".ini" file' % value
return None
def parseCmdArgs(config, opts, args, parser):
recur = False
printFlag = False
configFile = None
errorLevel = None
extension = None
include = None
exclude = None
for op, value in opts:
if op == "-r":
recur = True
elif op == '-p':
printFlag = True
elif op == '-c' or op == '-config':
configFile = getConfigFile(value)
elif op == "--help" or op == '-h':
usage()
sys.exit()
elif op == '--extension':
extension = getExtension(value)
elif op == '--errorLevel':
errorLevel = getErrorLevel(value)
elif op == '--include':
include = getValue(value)
elif op == '--exclude':
exclude = getValue(value)
if configFile is not None :
parser.load(configFile)
config = parser.args
if recur: config.recursive = True
if printFlag: config.printFlag = True
if errorLevel: config.errorLevel = errorLevel
if extension: config.extension = extension
if include: config.include = include
if exclude: config.exclude = exclude
return config
def handleCmdArgs():
try:
opts, args = getopt.getopt(sys.argv[1:], "hrpc:", ["help", "config=", "errorLevel=", "extension=", "include=", "exclude="])
except getopt.GetoptError, e:
print '[option error] %s ' % e.msg
return
configFile = getDefaultConfigPath()
parser = CommandFileParser.CommandFileParser(configFile)
config = parser.args
if len(args) == 0 and len(opts) == 0:
checkDir(os.getcwd(), config = config)
return
config = parseCmdArgs(config, opts, args, parser)
filePath = None
if len(args) == 0:
filePath = os.getcwd()
else:
filePath = args[0]
if not os.path.exists(filePath):
print '[error] %s not exist' % filePath
return
if filePath.endswith('.css'):
checkFile(filePath, config = config)
return
checkDir(filePath, config = config)
|
Python
| 0
|
@@ -1,16 +1,50 @@
+#/usr/bin/python%0A#encoding=utf-8%0A%0A
import sys%0Aimpor
@@ -241,16 +241,28 @@
--help%0A
+ ckstyle%0A
ckst
@@ -329,108 +329,749 @@
-p
-file.css%0A ckstyle -p -r dir%0A ckstyle -c config_file_path%0A ckstyle -c config_file_path -r -p
+-r dir%0A ckstyle -c xxx.ini %0A ckstyle -c xxx.ini -r -p%0A%0A%5BExample%5D%0A ckstyle -c xxx.ini -r -p -c xxx.ini --extension=.test.txt --include=all --exclude=none --errorLevel=2 dirpath%0A%0A%5BOptions%5D%0A -h / --help show help%0A -r check files in directory recursively%0A -p print check result to console(delete result files at the same time)%0A -c / --config specify the config file name(use %22~/ckstyle.ini%22 as default)%0A --include specify rules(can be configed in .ini file)%0A --exclude specify exclude rules(can be configed in .ini file)%0A --extension specify check result file extension(use %22.ckstyle.txt%22 as default)%0A --errorLevel specify error level(0-error, 1-warning, 2-log)
%0A
|
64bae35c0c2d6132b1ba5b2fa73e8bc2df541c33
|
swap x-y size for library call
|
ani.py
|
ani.py
|
# Python ctypes wrapper and circle test for fast anisotropic 2-D Gaussian filter
# wrapper for C library by J. M. Geusebroek: http://staff.science.uva.nl/~mark
# compile original C function as: gcc -fPIC -shared -o anigauss.so anigauss.c
#
# Lindy L Blackburn (lindylam@gmail.com)
# Jan 29, 2014
import numpy as np
import ctypes as C
# load in external library
aglib = C.cdll.LoadLibrary("./anigauss.so")
# inarr: input array
# v-axis = short axis
# u-aixs = long axis
# (sigv, sigu) = sigmas for Gaussian filter
# phi = orientation angle in degrees (from x)
# (derv, deru) = for line and edge detection (default zero)
def anigauss(inarr, sigv, sigu, phi=0., derv=0, deru=0):
# make sure we have a C-order array of doubles
if inarr.dtype is not np.double:
inarr = inarr.astype(np.double, order='C')
elif not inarr.flags.c_contiguous:
inarr = np.ascontiguousarray(inarr)
# create output array
outarr = np.zeros_like(inarr, dtype=np.double)
# size parameters for array
(sizex, sizey) = inarr.shape
# call external function
aglib.anigauss(inarr.ctypes.data_as(C.POINTER(C.c_double)),
outarr.ctypes.data_as(C.POINTER(C.c_double)),
C.c_int(sizex), C.c_int(sizey), C.c_double(sigv), C.c_double(sigu),
C.c_double(phi), C.c_int(derv), C.c_int(deru))
# return filtered image
return outarr
# plot circle and line in noise, and compare various Gaussian filters in 2D
def circletest():
import matplotlib.pyplot as plt
cm = plt.get_cmap('binary')
kw = {'cmap':cm, 'interpolation':'none'}
(xx, yy) = np.mgrid[0:101, 0:101] - 50.
r = np.sqrt(xx**2 + yy**2)
zz = np.exp(-(r - 36.)**2/8.)
zz = np.maximum(zz, np.exp(-(xx+yy + 80.)**2/8.))
zz = np.maximum(zz, 0.5 * ((xx**2 + yy**2) < 20**2))
zz += np.random.randn(*zz.shape)
plt.subplot(2, 3, 1)
plt.imshow(zz, **kw)
plt.title('original')
plt.subplot(2, 3, 2)
zzf = anigauss(zz, 2, 2)
plt.imshow(zzf, **kw)
plt.title('isotropic gaussian')
plt.subplot(2, 3, 3)
zz1 = anigauss(zz, 4, 4, derv=0, deru=2)
zz2 = anigauss(zz, 4, 4, derv=2, deru=0)
zzf = np.sqrt((zz1 + zz2)**2)
plt.imshow(zzf, **kw)
plt.title('laplace magnitude')
zzfs = []
zzds = []
nangles = 32
angles = np.arange(0, 180, 180./nangles)
for phi in angles:
zzf = anigauss(zz, 2, 6, phi=phi)
zzd = anigauss(zz, 4, 6, derv=2, deru=0, phi=phi)
zzfs.append(zzf)
zzds.append(-zzd)
plt.subplot(2, 3, 4)
plt.imshow(zzfs[4], **kw)
plt.title(u'anisotropic gaussian %d\u00b0' % angles[4])
plt.subplot(2, 3, 5)
plt.imshow(np.max(np.array(zzfs), axis=0), **kw)
plt.title('maximum of %d anisotropic' % nangles)
plt.subplot(2, 3, 6)
plt.imshow(np.max(np.array(zzds), axis=0), **kw)
plt.title('maximum of %d laplace' % nangles)
|
Python
| 0
|
@@ -1023,16 +1023,16 @@
size
-x
+y
, size
-y
+x
) =
|
12a85a17194610f81c9ff0c73ea69f4adfc2b307
|
remove old routine
|
floss/render/sanitize.py
|
floss/render/sanitize.py
|
import string
def sanitize_string_for_printing(s: str) -> str:
"""
Return sanitized string for printing to cli.
"""
sanitized_string = s.replace("\\\\", "\\") # print single backslashes
sanitized_string = "".join(c for c in sanitized_string if c in string.printable)
return sanitized_string
def sanitize_string_for_script(s: str) -> str:
"""
Return sanitized string that is added to IDA script source.
"""
sanitized_string = sanitize_string_for_printing(s)
sanitized_string = sanitized_string.replace("\\", "\\\\")
sanitized_string = sanitized_string.replace('"', '\\"')
return sanitized_string
|
Python
| 0.000654
|
@@ -314,339 +314,4 @@
ring
-%0A%0A%0Adef sanitize_string_for_script(s: str) -%3E str:%0A %22%22%22%0A Return sanitized string that is added to IDA script source.%0A %22%22%22%0A sanitized_string = sanitize_string_for_printing(s)%0A sanitized_string = sanitized_string.replace(%22%5C%5C%22, %22%5C%5C%5C%5C%22)%0A sanitized_string = sanitized_string.replace('%22', '%5C%5C%22')%0A return sanitized_string%0A
|
8379d56ac1be68c9c1d255893644813df8300ed8
|
add verbose name
|
awesomepose/categories/models/category.py
|
awesomepose/categories/models/category.py
|
from django.db import models
from mptt.models import MPTTModel, TreeForeignKey
class Category(models.Model):
name = models.CharField(max_length=50, unique=True)
parent = TreeForeignKey('self', null=True, blank=True, related_name="children", db_index=True)
class MPTTMeta:
order_insertion_by = ['name']
def __str__(self):
return self.name
|
Python
| 0.999996
|
@@ -89,23 +89,20 @@
ategory(
-models.
+MPTT
Model):%0A
@@ -316,16 +316,106 @@
name'%5D%0A%0A
+ class Meta:%0A verbose_name = %22%EC%B9%B4%ED%85%8C%EA%B3%A0%EB%A6%AC%22%0A verbose_name_plural = verbose_name%0A%0A
def
|
502ab3c29d66840b030f2bd277b11e9bdd33e297
|
version bump
|
diffantom/info.py
|
diffantom/info.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Base module variables
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from datetime import datetime
__packagename__ = 'diffantom'
__version__ = '1.0.1'
__author__ = 'Oscar Esteban'
__affiliation__ = 'Psychology Department, Stanford University'
__credits__ = ['Oscar Esteban']
__license__ = 'MIT License'
__maintainer__ = 'Oscar Esteban'
__email__ = 'code@oscaresteban.es'
__status__ = 'Prototype'
__copyright__ = 'Copyright {}, {}'.format(datetime.now().year, __author__)
__description__ = """\
Diffantom: Whole-Brain Diffusion MRI Phantoms Derived from Real Datasets of the \
Human Connectome Project"""
__longdesc__ = """\
Diffantom is a whole-brain diffusion MRI (dMRI) phantom publicly available through the \
Dryad Digital Repository (doi:10.5061/dryad.4p080). The dataset contains two single-shell \
dMRI images, along with the corresponding gradient information, packed following the BIDS \
standard (Brain Imaging Data Structure, Gorgolewski et al., 2015). \
The released dataset is designed for the evaluation of the impact of susceptibility \
distortions and benchmarking existing correction methods.\
This project contains the software instruments involved in generating \
diffantoms, so that researchers are able to generate new phantoms derived \
from different subjects, and apply these data in other applications like \
investigating diffusion sampling schemes, the assessment of dMRI processing methods, \
the simulation of pathologies and imaging artifacts, etc. In summary, Diffantom is \
intended for unit testing of novel methods, cross-comparison of established methods, \
and integration testing of partial or complete processing flows to extract connectivity \
networks from dMRI.
"""
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Image Recognition',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
]
DOWNLOAD_URL = (
'https://pypi.python.org/packages/source/{name[0]}/{name}/{name}-{ver}.tar.gz'.format(
name=__packagename__, ver=__version__))
URL = 'https://github.com/oesteban/{}'.format(__packagename__)
REQUIRES = [
'future',
'numpy',
'nipype',
'nibabel',
'nipy',
'scipy',
'phantomas',
]
LINKS_REQUIRES = [
'git+https://github.com/oesteban/phantomas.git#egg=phantomas',
'git+https://github.com/oesteban/nipype.git#egg=nipype',
]
TESTS_REQUIRES = ['mock', 'codecov', 'pytest-xdist']
EXTRA_REQUIRES = {
'doc': ['sphinx'],
'tests': TESTS_REQUIRES,
'duecredit': ['duecredit']
}
# Enable a handle to install all extra dependencies at once
EXTRA_REQUIRES['all'] = [val for _, val in list(EXTRA_REQUIRES.items())]
|
Python
| 0.000001
|
@@ -346,17 +346,19 @@
= '1.0.
-1
+2a0
'%0A__auth
|
bcd8d27194131e48d73d843bdae9930e6720130f
|
Update Vartype
|
dimod/vartypes.py
|
dimod/vartypes.py
|
"""
Vartype is an enumeration of the valid types for variables in a binary quadratic models.
Examples:
>>> vartype = dimod.Vartype.SPIN
>>> print(vartype)
Vartype.SPIN
>>> isinstance(vartype, dimod.Vartype)
True
Access can also be by value or name.
>>> print(dimod.Vartype({0, 1}))
Vartype.BINARY
>>> print(dimod.Vartype['SPIN'])
Vartype.SPIN
To check correctness, use the `.value` parameter.
>>> sample = {'u': -1, 'v': 1}
>>> vartype = dimod.Vartype.SPIN
>>> all(val in vartype.value for val in sample.values())
True
The different Vartypes are also in the main namespace
for easy access.
>>> vartype = dimod.SPIN
>>> print(vartype)
Vartype.SPIN
>>> vartype = dimod.BINARY
>>> print(vartype)
Vartype.BINARY
"""
import enum
__all__ = ['Vartype', 'SPIN', 'BINARY']
class Vartype(enum.Enum):
"""An :py:class:`~enum.Enum` over the types of variables for the binary quadratic model.
Attributes:
SPIN (:class:`.Vartype`): The vartype for spin-valued models. That
is the variables of the model are either -1 or 1.
BINARY (:class:`.Vartype`): The vartype for binary models. That is
the variables of the model are either 0 or 1.
"""
SPIN = frozenset({-1, 1})
BINARY = frozenset({0, 1})
SPIN = Vartype.SPIN
BINARY = Vartype.BINARY
|
Python
| 0
|
@@ -1,23 +1,9 @@
%22%22%22%0A
-Vartype is an e
+E
nume
@@ -16,18 +16,23 @@
of
-the
valid
+variable
type
@@ -40,23 +40,8 @@
for
- variables in a
bin
@@ -69,24 +69,262 @@
%0A%0AExamples:%0A
+ This example shows easy access to different Vartypes, which are in the main%0A namespace.%0A%0A %3E%3E%3E vartype = dimod.SPIN%0A %3E%3E%3E print(vartype)%0A Vartype.SPIN%0A %3E%3E%3E vartype = dimod.BINARY%0A %3E%3E%3E print(vartype)%0A Vartype.BINARY%0A
%3E%3E%3E vart
@@ -449,26 +449,33 @@
-Access can also be
+This example shows access
by
@@ -610,32 +610,24 @@
T
-o check correctness,
+his example
use
+s
the
@@ -645,16 +645,28 @@
arameter
+ to validate
.%0A%0A %3E
@@ -807,231 +807,8 @@
ue%0A%0A
- The different Vartypes are also in the main namespace%0A for easy access.%0A%0A %3E%3E%3E vartype = dimod.SPIN%0A %3E%3E%3E print(vartype)%0A Vartype.SPIN%0A %3E%3E%3E vartype = dimod.BINARY%0A %3E%3E%3E print(vartype)%0A Vartype.BINARY%0A%0A
%22%22%22%0A
@@ -1024,37 +1024,33 @@
ss:%60.Vartype%60):
-The v
+V
artype for spin-
@@ -1066,33 +1066,9 @@
dels
-. That%0A is the
+;
var
@@ -1068,32 +1068,43 @@
ls; variables of
+%0A
the model are e
@@ -1158,13 +1158,9 @@
%60):
-The v
+V
arty
@@ -1183,33 +1183,9 @@
dels
-. That is%0A the
+;
var
@@ -1193,24 +1193,35 @@
ables of the
+%0A
model are e
|
8ed7b3e4367f5ed43a901fee048228c6e6aeeb8c
|
fix for edge case with invalid user logged in
|
backend/unpp_api/apps/management/views.py
|
backend/unpp_api/apps/management/views.py
|
from django.db.models import Q
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.generics import CreateAPIView, ListAPIView, UpdateAPIView
from account.models import User
from agency.models import AgencyOffice
from agency.permissions import AgencyPermission
from common.pagination import SmallPagination
from common.permissions import HasUNPPPermission
from management.filters import AgencyUserFilter, PartnerUserFilter
from management.serializers import AgencyUserManagementSerializer, PartnerOfficeManagementSerializer, \
AgencyOfficeManagementSerializer, PartnerUserManagementSerializer
from partner.models import Partner
from partner.permissions import PartnerPermission
class UserViewSet(CreateAPIView, ListAPIView, UpdateAPIView):
permission_classes = (
HasUNPPPermission(
agency_permissions=[
AgencyPermission.MANAGE_OWN_AGENCY_USERS,
],
partner_permissions=[
PartnerPermission.MANAGE_OFFICE_USERS,
]
),
)
pagination_class = SmallPagination
filter_backends = (DjangoFilterBackend,)
@property
def filter_class(self):
if self.request.agency_member:
return AgencyUserFilter
elif self.request.partner_member:
return PartnerUserFilter
def get_serializer_class(self):
if self.request.agency_member:
return AgencyUserManagementSerializer
elif self.request.partner_member:
return PartnerUserManagementSerializer
def get_queryset(self):
if self.request.agency_member:
queryset = User.objects.filter(agency_members__office__agency=self.request.user.agency).distinct('id')
elif self.request.partner_member:
query = Q(partner_members__partner=self.request.partner_member.partner)
if self.request.partner_member.partner.is_hq:
query |= Q(partner_members__partner__hq=self.request.partner_member.partner)
queryset = User.objects.filter(query).distinct('id')
# We don't want user to edit own account
return queryset.exclude(id=self.request.user.id)
class OfficeListView(ListAPIView):
permission_classes = (
HasUNPPPermission(
agency_permissions=[
AgencyPermission.MANAGE_OWN_AGENCY_USERS,
],
partner_permissions=[
PartnerPermission.MANAGE_OFFICE_USERS,
]
),
)
def get_queryset(self):
if self.request.agency_member:
return AgencyOffice.objects.filter(agency=self.request.user.agency)
elif self.request.partner_member:
query = Q(id=self.request.partner_member.partner_id)
if self.request.partner_member.partner.is_hq:
query |= Q(hq=self.request.partner_member.partner)
return Partner.objects.filter(query)
def get_serializer_class(self):
if self.request.agency_member:
return AgencyOfficeManagementSerializer
elif self.request.partner_member:
return PartnerOfficeManagementSerializer
|
Python
| 0
|
@@ -1571,32 +1571,71 @@
queryset(self):%0A
+ queryset = User.objects.none()%0A
if self.
@@ -2969,16 +2969,54 @@
r(query)
+%0A return Partner.objects.none()
%0A%0A de
@@ -3150,38 +3150,10 @@
el
-if self.request.partner_member
+se
:%0A
|
9077b4e319807fd2791a28c55d41cf4fcd149365
|
Fix Member.colour to use the new algorithm for determining colour.
|
discord/member.py
|
discord/member.py
|
# -*- coding: utf-8 -*-
"""
The MIT License (MIT)
Copyright (c) 2015-2016 Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from .user import User
from .game import Game
from . import utils
from .enums import Status
from .colour import Colour
class Member(User):
"""Represents a Discord member to a :class:`Server`.
This is a subclass of :class:`User` that extends more functionality
that server members have such as roles and permissions.
Attributes
----------
deaf : bool
Indicates if the member is currently deafened by the server.
mute : bool
Indicates if the member is currently muted by the server.
self_mute : bool
Indicates if the member is currently muted by their own accord.
self_deaf : bool
Indicates if the member is currently deafened by their own accord.
is_afk : bool
Indicates if the member is currently in the AFK channel in the server.
voice_channel : :class:`Channel`
The voice channel that the member is currently connected to. None if the member
is not currently in a voice channel.
roles
A list of :class:`Role` that the member belongs to. Note that the first element of this
list is always the default '@everyone' role.
joined_at : `datetime.datetime`
A datetime object that specifies the date and time in UTC that the member joined the server for
the first time.
status : :class:`Status`
The member's status. There is a chance that the status will be a ``str``
if it is a value that is not recognised by the enumerator.
game : :class:`Game`
The game that the user is currently playing. Could be None if no game is being played.
server : :class:`Server`
The server that the member belongs to.
nick : Optional[str]
The server specific nickname of the user.
"""
__slots__ = [ 'deaf', 'mute', 'self_mute', 'self_deaf', 'is_afk',
'voice_channel', 'roles', 'joined_at', 'status', 'game',
'server', 'nick' ]
def __init__(self, **kwargs):
super().__init__(**kwargs.get('user'))
self.deaf = kwargs.get('deaf')
self.mute = kwargs.get('mute')
self.joined_at = utils.parse_time(kwargs.get('joined_at'))
self.roles = kwargs.get('roles', [])
self.status = Status.offline
game = kwargs.get('game', {})
self.game = Game(**game) if game else None
self.server = kwargs.get('server', None)
self.nick = kwargs.get('nick', None)
self._update_voice_state(mute=self.mute, deaf=self.deaf)
def _update_voice_state(self, **kwargs):
self.self_mute = kwargs.get('self_mute', False)
self.self_deaf = kwargs.get('self_deaf', False)
self.is_afk = kwargs.get('suppress', False)
self.mute = kwargs.get('mute', False)
self.deaf = kwargs.get('deaf', False)
old_channel = getattr(self, 'voice_channel', None)
self.voice_channel = kwargs.get('voice_channel')
if old_channel is None and self.voice_channel is not None:
# we joined a channel
self.voice_channel.voice_members.append(self)
elif old_channel is not None:
try:
# we either left a channel or we switched channels
old_channel.voice_members.remove(self)
except ValueError:
pass
finally:
# we switched channels
if self.voice_channel is not None:
self.voice_channel.voice_members.append(self)
@property
def colour(self):
"""A property that returns a :class:`Colour` denoting the rendered colour
for the member. If the default colour is the one rendered then an instance
of :meth:`Colour.default` is returned.
There is an alias for this under ``color``.
"""
# highest order of the colour is the one that gets rendered.
if self.roles:
role = max(self.roles, key=lambda r: r.position)
return role.colour
else:
return Colour.default()
color = colour
@property
def mention(self):
if self.nick:
return '<@!{}>'.format(self.id)
return '<@{}>'.format(self.id)
def mentioned_in(self, message):
mentioned = super().mentioned_in(message)
if mentioned:
return True
for role in message.role_mentions:
has_role = utils.get(self.roles, id=role.id) is not None
if has_role:
return True
return False
|
Python
| 0
|
@@ -4903,24 +4903,66 @@
%22%22%22%0A%0A
+ default_colour = Colour.default()%0A
# hi
@@ -5014,24 +5014,131 @@
s rendered.%0A
+ # if the highest is the default colour then the next one with a colour%0A # is chosen instead%0A
if s
@@ -5168,14 +5168,18 @@
role
+s
=
-max
+sorted
(sel
@@ -5215,41 +5215,142 @@
tion
-)%0A return role.colour%0A
+, reverse=True)%0A for role in roles:%0A if role.colour == default_colour:%0A continue%0A
@@ -5375,31 +5375,65 @@
+
+
return
-Colour.default()
+role.colour%0A%0A return default_colour
%0A%0A
|
0cb9b65fc0030922fea122a82451fef0d6d3653b
|
update version 1.0.0
|
flyingpigeon/__init__.py
|
flyingpigeon/__init__.py
|
from .wsgi import application
from .demo import main
__version__ = "0.11.0"
|
Python
| 0.000001
|
@@ -66,12 +66,11 @@
= %22
-0.11
+1.0
.0%22%0A
|
402e350e2dd39f1b60743771a8b27a73fe664112
|
Change update race to use HTTP PUT method
|
api.py
|
api.py
|
"""API around S3, SQS, and MK64 database"""
import boto
import json
import multiprocessing
import os
import requests
import time
import uuid
import syslog
from subprocess import call
from boto.sqs.message import Message
from boto.sqs.message import RawMessage
from boto.exception import *
import boto.utils
from const import *
class EC2(object):
def killself(self):
asg = boto.connect_autoscale()
instanceID = boto.utils.get_instance_identity()['document']['instanceId']
asg.terminate_instance(instanceID)
def get_launch_time(self):
#TODO This.
pass
class SQS(object):
def __init__(self, queue_names):
self.conn = boto.connect_sqs()
self.queues = {q : self.conn.get_queue(q) for q in queue_names}
def delete_message(self, msg):
return msg.delete()
def listen(self, queue_name):
try:
q = self.queues[queue_name]
raw_msg = q.get_messages(wait_time_seconds=WAIT)[0]
msg = json.loads(raw_msg.get_body())
url = str(msg['video_url'])
video_id = str(msg['id'])
rv = {'msg':raw_msg,'id':video_id,'url':url}
return rv
except UnicodeDecodeError:
filename = None
except Exception as ex:
# No messages
return None
def write(self, queue_name, payload):
try:
msg = Message()
msg.set_body(payload)
return self.queues[queue_name].write(msg)
except SQSError:
return None
class S3(object):
def __init__(self, bucket_names):
self.conn = boto.connect_s3()
self.buckets = {b : self.conn.get_bucket(b) for b in bucket_names}
def upload(self, bucket, file_path):
k = boto.s3.key.Key(self.buckets[bucket])
name = file_path.split('/')[-1]
k.key = 'raw/%s/%s' % (str(uuid.uuid1()), name)
k.set_contents_from_filename(file_path)
return k.key
def download_url(self, data_type, url, data_id):
filename = None
if 'race' in data_type:
name = 'race-videos'
elif 'session' in data_type:
name = 'session-videos'
elif 'audio' in data_type:
name = 'race-audio'
else:
raise ValueError("Invalid video type")
try:
key_name = url.split('.com/')[-1]
bucket = self.buckets[name]
key = bucket.get_key(key_name)
ext = url.split('.')[-1]
filename = '%s%s.%s' % (name, data_id, ext)
key.get_contents_to_filename(filename)
except:
filename = None
finally:
return filename
class DB(object):
def __init__(self):
self.database = 'http://n64storageflask-env.elasticbeanstalk.com'
self.port = 80
def get_regions(self, race_id):
url = '%s:%d/races/%d' % (self.database, self.port, race_id)
res = requests.get(url)
if res.ok:
return res.json()['player_regions']
else:
print 'DB Error: ' + res.json()['message']
return None
def post_events(self, race_id, events):
responses = []
url = '%s:%d/races/%s/events' % (self.database, self.port, race_id)
for e in events:
payload = json.dumps(e)
header = {'Content-Type': 'application/json'}
res = requests.post(url, data=payload, headers=header)
if res.ok:
responses.append(res.json()['id'])
else:
responses.append(None)
return responses
def post_race(self, session_id, payload):
"""Sends race JSON object to database for storage"""
url = '%s:%d/sessions/%d/races' % (self.database, self.port, session_id)
headers = {'content-type': 'application/json'}
json_payload = json.dumps(payload)
res = requests.post(url, data=json.dumps(payload), headers=headers)
if res.ok:
return res.json()['id']
else:
return None
def update_race(self, race_id, payload):
"""Sends race JSON object to database for storage"""
url = '%s:%d/races/%d' % (self.database, self.port, race_id)
headers = {'content-type': 'application/json'}
json_payload = json.dumps(payload)
res = requests.post(url, data=json.dumps(payload), headers=headers)
if res.ok:
return res.json()['id']
else:
return None
|
Python
| 0
|
@@ -4372,34 +4372,33 @@
res = requests.p
-os
+u
t(url, data=json
|
ba78e359c709c8d70c1f68f3e9eaee455408de06
|
Update tests to v1.6.0 (#1532)
|
exercises/phone-number/phone_number_test.py
|
exercises/phone-number/phone_number_test.py
|
import unittest
from phone_number import Phone
# Tests adapted from `problem-specifications//canonical-data.json` @ v1.4.0
class PhoneNumberTest(unittest.TestCase):
def test_cleans_number(self):
number = Phone("(223) 456-7890").number
self.assertEqual(number, "2234567890")
def test_cleans_number_with_dots(self):
number = Phone("223.456.7890").number
self.assertEqual(number, "2234567890")
def test_cleans_number_with_multiple_spaces(self):
number = Phone("223 456 7890 ").number
self.assertEqual(number, "2234567890")
def test_invalid_when_9_digits(self):
with self.assertRaisesWithMessage(ValueError):
Phone("123456789")
def test_invalid_when_11_digits_and_first_not_1(self):
with self.assertRaisesWithMessage(ValueError):
Phone("22234567890")
def test_valid_when_11_digits_and_first_is_1(self):
number = Phone("12234567890").number
self.assertEqual(number, "2234567890")
def test_valid_when_11_digits_and_first_is_1_with_punctuation(self):
number = Phone("+1 (223) 456-7890").number
self.assertEqual(number, "2234567890")
def test_invalid_when_more_than_11_digits(self):
with self.assertRaisesWithMessage(ValueError):
Phone("321234567890")
def test_invalid_with_letters(self):
with self.assertRaisesWithMessage(ValueError):
Phone("123-abc-7890")
def test_invalid_with_punctuation(self):
with self.assertRaisesWithMessage(ValueError):
Phone("123-@:!-7890")
def test_invalid_if_area_code_starts_with_0(self):
with self.assertRaisesWithMessage(ValueError):
Phone("(023) 456-7890")
def test_invalid_if_area_code_starts_with_1(self):
with self.assertRaisesWithMessage(ValueError):
Phone("(123) 456-7890")
def test_invalid_if_exchange_code_starts_with_0(self):
with self.assertRaisesWithMessage(ValueError):
Phone("(223) 056-7890")
def test_invalid_if_exchange_code_starts_with_1(self):
with self.assertRaisesWithMessage(ValueError):
Phone("(223) 156-7890")
# Track specific tests
def test_area_code(self):
number = Phone("2234567890")
self.assertEqual(number.area_code, "223")
def test_pretty_print(self):
number = Phone("2234567890")
self.assertEqual(number.pretty(), "(223) 456-7890")
def test_pretty_print_with_full_us_phone_number(self):
number = Phone("12234567890")
self.assertEqual(number.pretty(), "(223) 456-7890")
# Utility functions
def setUp(self):
try:
self.assertRaisesRegex
except AttributeError:
self.assertRaisesRegex = self.assertRaisesRegexp
def assertRaisesWithMessage(self, exception):
return self.assertRaisesRegex(exception, r".+")
if __name__ == '__main__':
unittest.main()
|
Python
| 0
|
@@ -2183,24 +2183,716 @@
156-7890%22)%0A%0A
+ def test_invalid_if_area_code_starts_with_0_on_valid_11_digit_number(self):%0A with self.assertRaisesWithMessage(ValueError):%0A Phone(%221 (023) 456-7890%22)%0A%0A def test_invalid_if_area_code_starts_with_1_on_valid_11_digit_number(self):%0A with self.assertRaisesWithMessage(ValueError):%0A Phone(%221 (123) 456-7890%22)%0A%0A def test_invalid_exchange_code_starts_with_0_valid_11_digit_number(self):%0A with self.assertRaisesWithMessage(ValueError):%0A Phone(%221 (223) 056-7890%22)%0A%0A def test_invalid_exchange_code_starts_with_1_valid_11_digit_number(self):%0A with self.assertRaisesWithMessage(ValueError):%0A Phone(%221 (223) 156-7890%22)%0A%0A
# Track
|
3806de4adf5ebe25a8e15fe76657d10a1b1c01c6
|
Fix path to pip
|
dodo_commands/dodo_system_commands/install-commands.py
|
dodo_commands/dodo_system_commands/install-commands.py
|
from argparse import ArgumentParser
from dodo_commands.framework import Dodo, CommandError
from dodo_commands.framework.config import Paths
from dodo_commands.framework.util import is_using_system_dodo, symlink
from dodo_commands.framework.config import load_global_config_parser
import sys
import os
import tempfile
def _args():
parser = ArgumentParser(
description=("Install command packages into the global " +
"commands directory. " + _packages_in_extra_dir()))
parser.add_argument("paths",
nargs='*',
help='Create symlinks to these command directories')
parser.add_argument("--pip",
nargs='*',
help='Pip install the commands in these packages')
parser.add_argument("--git",
nargs='*',
help='Clone a git repo into the commands directory')
parser.add_argument("--remove",
action='store_true',
help='Remove commands from the commands directory')
parser.add_argument("--to-defaults",
action='store_true',
help='Install into the default commands directory')
group = parser.add_mutually_exclusive_group()
group.add_argument(
"--make-default",
help=
'Create a symlink to a global commands package in the default commands directory'
)
group.add_argument(
"--remove-default",
help=
'Remove a symlink to a global commands package from the default commands directory'
)
args = Dodo.parse_args(parser)
return args
def _packages_in_extra_dir():
extra_dir = Paths().extra_dir()
packages = [
x for x in os.listdir(extra_dir)
if os.path.isdir(os.path.join(extra_dir, x)) and not x.startswith('__')
]
if len(packages) == 0:
return ""
if len(packages) == 1:
msg = " The %s package is found automagically " % packages[0]
else:
packages[-1] = "and " + packages[-1]
msg = " The %s packages are found automagically " % ", ".join(packages)
return (msg + " in the dodo_commands.extra package" +
", e.g. the following works: dodo install-default-commands %s." %
packages[0])
def _report_error(msg):
sys.stderr.write(msg + os.linesep)
def _remove_package(package, only_from_defaults=False):
"""Install the dir with the default commands."""
if not only_from_defaults:
dest_dir = os.path.join(Paths().global_commands_dir(), package)
if not os.path.exists(dest_dir):
raise CommandError("Not installed: %s" % dest_dir)
Dodo.run(['rm', '-rf', dest_dir])
defaults_dest_dir = os.path.join(Paths().default_commands_dir(), package)
if os.path.exists(defaults_dest_dir):
Dodo.run(['rm', defaults_dest_dir])
def _install_package(package, install_commands_function, add_to_defaults):
"""Install the dir with the global commands."""
dest_dir = os.path.join(Paths().global_commands_dir(), package)
defaults_dest_dir = os.path.join(Paths().default_commands_dir(), package)
if add_to_defaults and os.path.exists(defaults_dest_dir):
_report_error("Error: already installed: %s" % defaults_dest_dir)
return False
if not install_commands_function():
return False
if add_to_defaults:
if not os.path.exists(dest_dir):
_report_error("Error: not found: %s" % dest_dir)
return False
if os.name == 'nt' and not args.confirm:
symlink(dest_dir, defaults_dest_dir)
else:
Dodo.run(['ln', '-s', dest_dir, defaults_dest_dir])
return True
def _install_commands_from_path(path, mv=False):
"""Install the dir with the global commands."""
dest_dir = os.path.join(Paths().global_commands_dir(),
os.path.basename(path))
if not os.path.exists(path):
alt_path = os.path.join(Paths().extra_dir(), path)
if os.path.exists(alt_path):
path = alt_path
else:
_report_error("Error: path not found: %s" % path)
return False
if os.path.exists(dest_dir):
_report_error("Error: already installed: %s" % dest_dir)
return False
if mv:
Dodo.run(['mv', path, dest_dir])
else:
try:
if os.name == 'nt' and not args.confirm:
symlink(os.path.abspath(path), dest_dir)
else:
Dodo.run(['ln', '-s', os.path.abspath(path), dest_dir])
except:
_report_error("Error: could not create a symlink in %s." %
dest_dir)
return True
def _install_commands_from_package(package):
config = load_global_config_parser()
python_path_parts = os.path.split(
config.get("settings", "python_interpreter"))
pip_path_parts = python_path_parts[:-1] + python_path_parts[-1].replace(
'python', 'pip')
pip = os.path.join(*pip_path_parts)
Dodo.run([
pip, 'install', '--upgrade', '--target',
Paths().global_commands_dir(), package
])
return True
def _clone_git_repo(repo_path):
tmp_dir = tempfile.mkdtemp()
Dodo.run(['git', 'clone', repo_path], cwd=tmp_dir)
package = os.listdir(tmp_dir)[0]
return tmp_dir, package
if Dodo.is_main(__name__):
args = _args()
if args.pip and not is_using_system_dodo():
raise CommandError(
"Please deactivate your dodo project first by running 'deactivate'."
)
if args.make_default:
_install_package(args.make_default, lambda: True, True)
sys.exit(0)
if args.remove_default:
_remove_package(args.remove_default, only_from_defaults=True)
sys.exit(0)
if args.paths:
for path in args.paths:
package = os.path.basename(path)
if args.remove:
_remove_package(package)
else:
_install_package(
package, lambda: _install_commands_from_path(path),
args.to_defaults)
if args.pip:
for package in args.pip:
if args.remove:
_remove_package(package)
else:
_install_package(
package, lambda: _install_commands_from_package(package),
args.to_defaults)
if args.git:
for repo_path in args.git:
if args.remove:
raise CommandError(
"The --git option is not supported when removing a package."
+ " Please use dodo install-commands --remove <package>.")
tmp_dir, package = _clone_git_repo(repo_path)
_install_package(
package, lambda: _install_commands_from_path(
os.path.join(tmp_dir, package), mv=True), args.to_defaults)
Dodo.run(['rm', '-rf', tmp_dir])
|
Python
| 0.000068
|
@@ -4968,16 +4968,21 @@
parts =
+list(
python_p
@@ -4995,18 +4995,29 @@
rts%5B:-1%5D
+)
+
+ %5B%0A
python_
@@ -5039,25 +5039,16 @@
replace(
-%0A
'python'
@@ -5056,16 +5056,22 @@
'pip')%0A
+ %5D%0A
pip
|
cfc6083c58d151934403ccf55444b122fec46604
|
Resolve here
|
takeyourmeds/utils/test.py
|
takeyourmeds/utils/test.py
|
from django.test import TestCase
from django.shortcuts import resolve_url
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
User = get_user_model()
class TestCase(TestCase):
def setUp(self):
self.user = self.create_user('testuser')
def assertStatusCode(self, status_code, fn, urlconf, *args, **kwargs):
if kwargs.pop('login', False):
user = kwargs.pop('user', self.user)
self.client.login(email=user.email, password='password')
response = fn(resolve_url(urlconf, *args, **kwargs))
self.assertEqual(
response.status_code,
status_code,
"Got HTTP %d but expected HTTP %d. Response:\n%s" % (
response.status_code,
status_code,
response,
)
)
return response
def assertGET(self, status_code, urlconf, *args, **kwargs):
return self.assertStatusCode(
status_code,
self.client.get,
urlconf,
*args,
**kwargs
)
def assertPOST(self, status_code, data, *args, **kwargs):
return self.assertStatusCode(
status_code, lambda x: self.client.post(x, data), *args, **kwargs
)
def assertRedirectsTo(self, response, urlconf, *args, **kwargs):
status_code = kwargs.pop('status_code', 302)
target_status_code = kwargs.pop('target_status_code', 200)
return self.assertRedirects(
response,
reverse(urlconf, args=args, kwargs=kwargs),
status_code,
target_status_code,
)
def create_user(self, email):
return User.objects.create_user(email, 'password')
class SuperuserTestCase(TestCase):
def setUp(self):
super(SuperuserTestCase, self).setUp()
self.user.is_staff = True
self.user.is_superuser = True
self.user.save()
|
Python
| 0
|
@@ -117,53 +117,8 @@
odel
-%0Afrom django.core.urlresolvers import reverse
%0A%0AUs
@@ -1510,13 +1510,17 @@
re
-verse
+solve_url
(url
@@ -1529,26 +1529,17 @@
nf,
-args=
+*
args,
-kwargs=
+**
kwar
|
d9ca3d7113423a84026ad59e1369321baa54d532
|
Add a simple neutron_hanler
|
drcontroller/replication/controller/neutron_handler.py
|
drcontroller/replication/controller/neutron_handler.py
|
import logging
def post_handle(message):
pass
def delete_handle(message):
pass
def put_handle(mesage):
pass
class NeutronHandler(object):
def __init__(self):
self.logger = logging.getLogger("NeutronHandler")
self.logger.info('Init NeutronHandler')
def accept(self, *req, **kwargs):
self.logger = logging.getLogger("NeutronHandler:accept")
self.logger.info("--- Hello Neutron ---")
return ['Hello Neutron']
|
Python
| 0.999996
|
@@ -12,316 +12,291 @@
ing%0A
-%0Adef post_handle(message):%0A pass%0A%0Adef delete_handle(message):%0A pass%0A%0Adef put_handle(mesage):%0A pass%0A%0A%0Aclass NeutronHandler(object):%0A def __init__(self):%0A self.logger = logging.getLogger(%22NeutronHandler%22)%0A self.logger.info('Init NeutronHandler')%0A%0A def accept(self, *req, **kwargs):
+import base_handler%0A%0Aclass NeutronHandler(base_handler.BaseHandler):%0A def __init__(self, set_conf, handle_type):%0A '''%0A set_conf: the configuration file path of keystone authorization%0A handle_type: the handle service type, eg, glance, nova, neutron%0A '''
%0A
@@ -351,19 +351,11 @@
dler
-:accept
%22)%0A
-%0A
@@ -379,62 +379,96 @@
nfo(
-%22--- Hello Neutron ---%22)%0A return %5B'Hello Neutron'%5D
+'Init NeutronHandler')%0A super(NeutronHandler, self).__init__(set_conf, handle_type)%0A
%0A
|
ca8622f5af66ef01c9c185065f2e77fca30bef79
|
Remove unused update method
|
irctk/nick.py
|
irctk/nick.py
|
import re
class Nick(object):
IRC_USERHOST_REGEX = re.compile(r'^(.*)!(.*)@(.*)$')
@classmethod
def parse(cls, client, userhost):
m = cls.IRC_USERHOST_REGEX.match(userhost)
if m:
return cls(client, m.group(1), m.group(2), m.group(3))
return cls(client, host=userhost)
def __init__(self, client, nick='', ident='', host=''):
self.client = client
self.nick = nick
self.ident = ident
self.host = host
def __str__(self):
return self.nick
def __repr__(self):
return '<Nick %s!%s@%s>' % (self.nick, self.ident, self.host)
def __eq__(self, other):
return self.client.irc_equal(str(other), self.nick)
@property
def channels(self):
"""
Returns all the Channels that both the nick and the client has joined.
"""
return [channel for channel in self.client.channels if channel.has_nick(self)]
def update(self):
if self == self.client.nick:
self.client.nick.ident = self.ident
self.client.nick.host = self.host
for channel in self.client.channels:
n = channel.find_nick(self)
if n:
n.ident = self.ident
n.host = self.host
|
Python
| 0.000001
|
@@ -946,335 +946,4 @@
f)%5D%0A
-%0A def update(self):%0A if self == self.client.nick:%0A self.client.nick.ident = self.ident%0A self.client.nick.host = self.host%0A%0A for channel in self.client.channels:%0A n = channel.find_nick(self)%0A if n:%0A n.ident = self.ident%0A n.host = self.host%0A%0A
|
52c17672d73a9461771c3ec09465d91992160fc5
|
Fix quota init migration
|
src/nodeconductor_saltstack/exchange/migrations/0004_init_quotas.py
|
src/nodeconductor_saltstack/exchange/migrations/0004_init_quotas.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from uuid import uuid4
from django.contrib.contenttypes.models import ContentType
from django.db import models, migrations
GLOBAL_MAILBOX_SIZE_QUOTA = 'global_mailbox_size'
USER_COUNT_QUOTA = 'user_count'
def convert_mailbox_size_to_mb(apps, schema_editor):
Tenant = apps.get_model('exchange', 'Tenant')
for tenant in Tenant.objects.all():
tenant.mailbox_size *= 1024
tenant.save()
def init_quotas(apps, schema_editor):
Quota = apps.get_model('quotas', 'Quota')
Tenant = apps.get_model('exchange', 'Tenant')
tenant_ct = ContentType.objects.get_for_model(Tenant)
for tenant in Tenant.objects.all():
if not Quota.objects.filter(content_type_id=tenant_ct.id, object_id=tenant.id, name=GLOBAL_MAILBOX_SIZE_QUOTA):
Quota.objects.create(
uuid=uuid4(), name=GLOBAL_MAILBOX_SIZE_QUOTA, limit=tenant.max_users*tenant.mailbox_size, usage=0,
content_type_id=tenant_ct.id, object_id=tenant.id)
if not Quota.objects.filter(content_type_id=tenant_ct.id, object_id=tenant.id, name=USER_COUNT_QUOTA):
Quota.objects.create(
uuid=uuid4(), name=USER_COUNT_QUOTA, limit=tenant.max_users, usage=0,
content_type_id=tenant_ct.id, object_id=tenant.id)
class Migration(migrations.Migration):
dependencies = [
('exchange', '0003_rename_tenant_model'),
]
operations = [
migrations.AlterField(
model_name='tenant',
name='mailbox_size',
field=models.PositiveSmallIntegerField(help_text=b'Maximum size of single mailbox, MB'),
preserve_default=True,
),
migrations.RunPython(convert_mailbox_size_to_mb),
migrations.RunPython(init_quotas),
]
|
Python
| 0.000002
|
@@ -354,32 +354,40 @@
el('exchange', '
+Exchange
Tenant')%0A for
@@ -604,16 +604,24 @@
ange', '
+Exchange
Tenant')
@@ -1548,16 +1548,24 @@
l_name='
+exchange
tenant',
|
cd98b91fecd6735254133649ec39c19877aabbc5
|
Update celery settings to avoid a warning about pickle, and disable the now-useless GZIP compression.
|
oneflow/settings/snippets/celery.py
|
oneflow/settings/snippets/celery.py
|
# -*- coding: utf-8 -*-
#
# NOTE: this snippet should come *after* the other celery_*
# because it uses the BROKER_URL that should have been
# defined in these.
#
"""
Copyright 2013 Olivier Cortès <oc@1flow.io>
This file is part of the 1flow project.
1flow is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
1flow is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public
License along with 1flow. If not, see http://www.gnu.org/licenses/
"""
#from datetime import timedelta
import djcelery
djcelery.setup_loader()
from celery.schedules import crontab
from kombu import Exchange, Queue
# Avoid sharing the same celery states file
# when multiple workers run on the same machine.
try:
index = sys.argv.index('--hostname')
except:
CELERYD_STATE_DB = 'celery.states'
else:
# get 'medium' from 'medium.worker-03.1flow.io'
CELERYD_STATE_DB = 'celery.states.{0}'.format(
sys.argv[index + 1].split('.', 1)[0])
del index
CELERYD_PREFETCH_MULTIPLIER = 1
CELERY_DEFAULT_QUEUE = 'medium'
CELERY_QUEUES = (
Queue('high', Exchange('high'), routing_key='high'),
Queue('medium', Exchange('medium'), routing_key='medium'),
Queue('low', Exchange('low'), routing_key='low'),
Queue('fetch', Exchange('fetch'), routing_key='fetch'),
Queue('swarm', Exchange('swarm'), routing_key='swarm'),
Queue('clean', Exchange('clean'), routing_key='clean'),
Queue('background', Exchange('background'), routing_key='background'),
)
BROKER_URL = os.environ.get('BROKER_URL')
# Disabling the heartbeat because workers seems often disabled in flower,
# thanks to http://stackoverflow.com/a/14831904/654755
BROKER_HEARTBEAT = 0
CELERY_RESULT_BACKEND = BROKER_URL
CELERY_RESULT_PERSISTENT = True
# Allow to recover from any unknown crash.
CELERY_ACKS_LATE = True
# Sometimes, Ask asks us to enable this to debug issues.
# BTW, it will save some CPU cycles.
CELERY_DISABLE_RATE_LIMITS = True
# Allow our remote workers to get tasks faster if they have a
# slow internet connection (yes Gurney, I'm thinking of you).
CELERY_MESSAGE_COMPRESSION = 'gzip'
# Avoid long running and retried tasks to be run over-and-over again.
BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 86400}
# Half a day is enough
CELERY_TASK_RESULT_EXPIRES = 43200
# The default beiing 5000, we need more than this.
CELERY_MAX_CACHED_RESULTS = 32768
# NOTE: I don't know if this is compatible with upstart.
CELERYD_POOL_RESTARTS = True
# I use these to debug kombu crashes; we get a more informative message.
#CELERY_TASK_SERIALIZER = 'json'
#CELERY_RESULT_SERIALIZER = 'json'
#CELERY_ALWAYS_EAGER=True
CELERY_TRACK_STARTED = True
CELERY_SEND_TASK_SENT_EVENT = True
# Disabled by default and I like it, because we use Sentry for this.
#CELERY_SEND_TASK_ERROR_EMAILS = False
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYBEAT_SCHEDULE = {
# 'celery-beat-test': {
# 'task': 'oneflow.base.tasks.celery_beat_test',
# 'schedule': timedelta(seconds=15),
# 'schedule': timedelta(seconds=5),
# 'schedule': crontab(minute='*'),
# },
#
# •••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••• Core tasks
'refresh-all-feeds': {
'task': 'oneflow.core.tasks.refresh_all_feeds',
'schedule': crontab(hour='*', minute='*'),
},
'global-checker-task': {
'task': 'oneflow.core.tasks.global_checker_task',
'schedule': crontab(hour='1', minute='1'),
},
# •••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••• Statistics
# We update stats regularly to avoid "loosing" data and desynchronization.
# UDP packets are not reliable. But that's the point of it, isn't it?
'synchronize-statsd-gauges': {
'task': 'oneflow.core.stats.synchronize_statsd_gauges',
'schedule': crontab(minute='59'),
'args': (True, ),
},
# •••••••••••••••••••••••••••••••••••••••••••••••••••••••••• Cleaning tasks
'clean-obsolete-redis-keys': {
'task': 'oneflow.core.tasks.clean_obsolete_redis_keys',
'schedule': crontab(hour='2', minute='2'),
},
# ••••••••••••••••••••••••••••••••••••••••••••••••••••• Social auth refresh
'refresh-access-tokens-00': {
'task': 'oneflow.base.tasks.refresh_access_tokens',
'schedule': crontab(hour='*/4', minute='0,48'),
},
'refresh-access-tokens-12': {
'task': 'oneflow.base.tasks.refresh_access_tokens',
'schedule': crontab(hour='3,7,11,15,19,23', minute=12),
},
'refresh-access-tokens-24': {
'task': 'oneflow.base.tasks.refresh_access_tokens',
'schedule': crontab(hour='2,6,10,14,18,22', minute=24),
},
'refresh-access-tokens-36': {
'task': 'oneflow.base.tasks.refresh_access_tokens',
'schedule': crontab(hour='1,5,9,13,17,21', minute=36),
},
}
|
Python
| 0
|
@@ -2540,16 +2540,145 @@
f you).%0A
+#%0A# 20140309: no more remote worker and we have very small messages (only%0A# IDs, no full instance), so stop wasting CPU cycles.%0A#
CELERY_M
@@ -3066,16 +3066,133 @@
= True%0A%0A
+# Since Celery 3.1/3.2, no 'pickle' anymore.%0A# JSON is my prefered option, anyway.%0ACELERY_ACCEPT_CONTENT = %5B'json'%5D%0A%0A
# I use
|
7d8517ff714ca501500e610d6cc59e69c43ff5bd
|
use zmq API for json for py2/3 compatibility #15 ENH: provide std. deserialize_data function REG: use json serializable commands only
|
online_monitor/receiver/receiver.py
|
online_monitor/receiver/receiver.py
|
from pyqtgraph.Qt import QtCore
import zmq
import logging
from threading import Event
from online_monitor.utils import utils
class DataWorker(QtCore.QObject):
data = QtCore.pyqtSignal(dict)
finished = QtCore.pyqtSignal()
def __init__(self, deserializer):
QtCore.QObject.__init__(self)
self.deserializer = deserializer
self._stop_readout = Event()
self._send_data = None
def connect_zmq(self, frontend_address, socket_type):
self.context = zmq.Context()
self.receiver = self.context.socket(socket_type) # subscriber
self.socket_type = socket_type
# A subscriber has to set to not filter any data
if self.socket_type == zmq.SUB:
self.receiver.setsockopt_string(zmq.SUBSCRIBE, u'')
# Buffer only 10 meassages, then throw data away
self.receiver.set_hwm(10)
self.receiver.connect(frontend_address)
def receive_data(self): # pragma: no cover; no covered since qt event loop
''' Infinite loop via QObject.moveToThread(), does not block event loop
'''
while(not self._stop_readout.wait(0.01)): # use wait(), do not block
if self._send_data:
if self.socket_type != zmq.DEALER:
raise RuntimeError('You send data without a bidirectional '
'connection! Define a bidirectional '
'connection.')
self.receiver.send(self._send_data)
self._send_data = None
try:
data_serialized = self.receiver.recv(flags=zmq.NOBLOCK)
data = self.deserializer(data_serialized)
self.data.emit(data)
except zmq.Again:
pass
self.finished.emit()
def shutdown(self):
self._stop_readout.set()
def send_data(self, data): # FIXME: not thread safe
self._send_data = data
class Receiver(QtCore.QObject):
'''The receiver connects to a converter and vizualizes the data according
to the specified data type.
Usage:
'''
def __init__(self, frontend, kind, name='Undefined', loglevel='INFO', **kwarg):
QtCore.QObject.__init__(self)
self.kind = kind
self.frontend_address = frontend
self.name = name # name of the DAQ/device
self.config = kwarg
# Flag to tell receiver if its active (viewed int the foreground)
self._active = False
# Standard is unidirectional communication with PUB/SUB pattern
self.socket_type = zmq.SUB
self.frontend_address = self.frontend_address
utils.setup_logging(loglevel)
logging.debug("Initialize %s receiver %s at %s", self.kind, self.name,
self.frontend_address)
self.setup_receiver_device()
self.setup_receiver()
def set_bidirectional_communication(self):
self.socket_type = zmq.DEALER
def setup_receiver_device(self): # start new receiver thread
logging.info("Start %s receiver %s at %s", self.kind, self.name,
self.frontend_address)
self.thread = QtCore.QThread() # no parent
self.worker = DataWorker(self.deserialize_data) # no parent
# move worker instance to new thread
self.worker.moveToThread(self.thread)
# Slot called if the receiver tab widget gets active
def active(self, value):
self._active = value
def start(self):
# Connect to ZMQ publisher
self.worker.connect_zmq(self.frontend_address, self.socket_type)
# Quit thread on worker finished
self.worker.finished.connect(self.thread.quit)
# Activate data handle
self.worker.data.connect(self.handle_data_if_active)
# Start receive data loop when thread starts
self.thread.started.connect(self.worker.receive_data)
# Print on thread finished info
self.thread.finished.connect(self.finished_info)
self.thread.start() # start thread
def shutdown(self):
# Set signal to quit receive loop; can take some time
self.worker.shutdown()
# Tell thread to exit, loop is/should be terminated already
self.thread.exit()
# Delay needed if thread did not exit yet, otherwise message:
# QThread: Destroyed while thread is still running
self.thread.wait(500)
def finished_info(self): # called when thread finished successfully
logging.info("Close %s receiver %s at %s", self.kind, self.name,
self.frontend_address)
def handle_data_if_active(self, data):
''' Forwards data to data handling function if reveiver is active'''
if self._active:
self.handle_data(data)
def setup_receiver(self):
''' Method can be defined to setup receiver specific parameters
(e.g. bidirectional communication)
'''
pass
def setup_widgets(self, parent, name):
raise NotImplementedError('You have to implement a setup_widgets '
'method!')
def handle_data(self, data):
''' Handle data
Receives a dictionary with data and sets the visualization
accordningly. It is only called if the receiver is active.
'''
raise NotImplementedError('You have to implement a handle_data '
'method!')
def send_command(self, command):
self.worker.send_data(command)
def deserialize_data(self, data):
''' Has to convert the data do a python dict '''
raise NotImplementedError('You have to implement a deserialize_data '
'method. Look at the examples!')
|
Python
| 0.000001
|
@@ -1496,16 +1496,21 @@
ver.send
+_json
(self._s
@@ -5524,16 +5524,109 @@
mmand):%0A
+ ''' Send command to transceiver%0A%0A Has to be json serializable%0A '''%0A
@@ -5765,140 +5765,79 @@
r
-aise NotImplementedError('You have to implement a deserialize_data '%0A 'method. Look at the examples!'
+eturn zmq.utils.jsonapi.loads(data, object_hook=utils.json_numpy_obj_hook
)%0A
|
e91eb6aaad52010b7441595cc55695e6ee21b360
|
Add support for setting login_service
|
oauthenticator/azuread.py
|
oauthenticator/azuread.py
|
"""
Custom Authenticator to use Azure AD with JupyterHub
"""
import json
import jwt
import os
import urllib
from tornado.auth import OAuth2Mixin
from tornado.log import app_log
from tornado.httpclient import HTTPRequest, AsyncHTTPClient
from jupyterhub.auth import LocalAuthenticator
from traitlets import Unicode, default
from .oauth2 import OAuthLoginHandler, OAuthenticator
def azure_token_url_for(tentant):
return 'https://login.microsoftonline.com/{0}/oauth2/token'.format(tentant)
def azure_authorize_url_for(tentant):
return 'https://login.microsoftonline.com/{0}/oauth2/authorize'.format(
tentant)
class AzureAdMixin(OAuth2Mixin):
tenant_id = os.environ.get('AAD_TENANT_ID', '')
_OAUTH_ACCESS_TOKEN_URL = azure_token_url_for(tenant_id)
_OAUTH_AUTHORIZE_URL = azure_authorize_url_for(tenant_id)
class AzureAdLoginHandler(OAuthLoginHandler, AzureAdMixin):
pass
class AzureAdOAuthenticator(OAuthenticator):
login_service = "Azure AD"
login_handler = AzureAdLoginHandler
tenant_id = Unicode(config=True)
username_claim = Unicode(config=True)
@default('tenant_id')
def _tenant_id_default(self):
return os.environ.get('AAD_TENANT_ID', '')
@default('username_claim')
def _username_claim_default(self):
return 'name'
async def authenticate(self, handler, data=None):
code = handler.get_argument("code")
http_client = AsyncHTTPClient()
params = dict(
client_id=self.client_id,
client_secret=self.client_secret,
grant_type='authorization_code',
code=code,
resource=self.client_id,
redirect_uri=self.get_callback_url(handler))
data = urllib.parse.urlencode(
params, doseq=True, encoding='utf-8', safe='=')
url = azure_token_url_for(self.tenant_id)
headers = {
'Content-Type':
'application/x-www-form-urlencoded; ; charset=UTF-8"'
}
req = HTTPRequest(
url,
method="POST",
headers=headers,
body=data # Body is required for a POST...
)
resp = await http_client.fetch(req)
resp_json = json.loads(resp.body.decode('utf8', 'replace'))
# app_log.info("Response %s", resp_json)
access_token = resp_json['access_token']
id_token = resp_json['id_token']
decoded = jwt.decode(id_token, verify=False)
userdict = {"name": decoded[self.username_claim]}
userdict["auth_state"] = auth_state = {}
auth_state['access_token'] = access_token
# results in a decoded JWT for the user data
auth_state['user'] = decoded
return userdict
class LocalAzureAdOAuthenticator(LocalAuthenticator, AzureAdOAuthenticator):
"""A version that mixes in local system user creation"""
pass
|
Python
| 0
|
@@ -977,18 +977,139 @@
e =
-%22Azure AD%22
+Unicode(%0A%09%09os.environ.get('LOGIN_SERVICE', 'Azure AD'),%0A%09%09config=True,%0A%09%09help=%22%22%22Azure AD domain name string, e.g. My College%22%22%22%0A%09)
%0A%0A
|
586dd06a429e5017d798ca55c20f1d095e5171d4
|
fix scaling/stacking defaulting to podi footprint
|
odi_scalestack_process.py
|
odi_scalestack_process.py
|
#!/usr/bin/env python
import sys, os, glob, string
import numpy as np
import astropy as ast
import matplotlib.pyplot as plt
from pyraf import iraf
from tqdm import tqdm
import odi_config as odi
import glob
import shutil
import pandas as pd
try:
object_str, filters, instrument, images, illcor_flag, skyflat_src, wcs_flag, reproject_flag, scale_flag, scale_ref, stack_flag, align_flag, gaia_flag, cluster_flag, ra_center, dec_center, min_radius = odi.cfgparse('config.yaml')
except IOError:
print 'config.yaml does not exist, quitting...'
exit()
source = 'sdss'
inst = odi.instrument(images[filters[0]][0])
imgnum,fwhm_d,zp_med, zp_std, bg_mean, bg_median, bg_std = np.loadtxt('derived_props.txt',usecols=(0,3,4,5,6,7,8),unpack=True)
ota_d, filt_d = np.loadtxt('derived_props.txt',usecols=(1,2),unpack=True,dtype=str)
id_d = zip(imgnum,ota_d,filt_d)
fwhm_dict = dict(zip(id_d,fwhm_d))
run_detect = False
align_these = []
for filter in filters:
# Scaling with all sources
images_ = images[filter]
print 'Scaling images for filter ',filter
for img in images_:
# img = images_[dith]
dither = img.dither()+'_'
print 'Gathering sources for {:s}'.format(img.f)
for key in tqdm(odi.OTA_dictionary):
ota = odi.OTA_dictionary[key]
# if not os.path.isfile(odi.sourcepath+'source_'+ota+'.'+img.base()+'.csv'):
if not os.path.isfile(odi.sourcepath+'source_'+ota+'.'+img.base()+'.totphot'):
if run_detect == True:
odi.source_find(img,ota,inst)
gaps = odi.get_gaps_rep(img, ota)
odi.source_xy(img,ota,gaps,filter,inst)
fwhm = odi.getfwhm_source(img,ota)
#fwhm = fwhm_dict[img_id]
tqdm.write('GWFM in {:s}: {:5.3f}'.format(ota, fwhm))
else:
fwhm_file = odi.coordspath+img.nofits()+'.'+ota+'.fwhm.log'
gfwhm = np.loadtxt(fwhm_file, usecols=(10,), unpack=True)
fwhm = np.median(gfwhm[np.where(gfwhm < 900.0)])
tqdm.write('GWFM in {:s}: {:5.3f}'.format(ota, fwhm))
odi.phot_sources(img, ota, fwhm, run_detect = run_detect)
odi.phot_combine(img, ota, run_detect = run_detect)
if not os.path.isfile(odi.sourcepath+dither+filter+'.allsource'):
dither_total = odi.sourcepath+dither+filter+'.allsource'
cat_command = 'cat sources/*SCI.'+dither+'*'+filter+'*.totphot' + '>' + dither_total
os.system(cat_command)
# choose the initial reference image (lowest airmass to start, unless we've specified one)
# print images_.values()
if filter not in scale_ref.keys():
refimg_ = odi.find_ref_image(images_)
ref_img = images_[refimg_]
else:
ref_img = scale_ref[filter]
# calculate scaling factors
scales_ = {}
stds_ = {}
n_ = {}
iters = 1
for img in images_:
# img = images_[dith]
scale,std,n = odi.source_scale(img,ref_img,filter)
scales_[img] = scale
stds_[img] = std
n_[img] = n
# recalculate scaling factors IF the highest scaling factor is not the initial reference image
# BUT ONLY IF we haven't specifically selected a reference image
# print the scaling factors out to a file for review
# iterate
# print np.array(scales_.values()) > 1.002
if filter not in scale_ref.keys():
while (np.array(scales_.values()) > 1.002).any() and iters < 6:
iters += 1
ims = scales_.keys()
scls = scales_.values()
new_ref = ims[np.argmax(scls)]
if new_ref != ref_img:
ref_img = new_ref
for img in images_:
# img = images_[dith]
scale,std,n = odi.source_scale(img,ref_img,filter)
scales_[img] = scale
stds_[img] = std
n_[img] = n
with open(filter+'_scales.txt','w+') as sclfile:
print >> sclfile, '# image'+' '*(len(images_[0].stem())-4)+'scale std n (iters = '+repr(iters)+')'
for img in images_:
# img = images_[dith]
print >> sclfile, img.stem(), '{0:7.5f} {1:7.5f} {2:5d}'.format(scales_[img], stds_[img], n_[img])
# actually apply the scaling factors to the images
if scale_flag:
for img in images_:
# img = images_[dith]
for key in odi.OTA_dictionary:
ota = odi.OTA_dictionary[key]
if not os.path.isfile(odi.scaledpath+'scaled_'+ota+'.'+img.stem()):
# gaps = odi.get_gaps_rep(img, ota)
odi.scale_ota(img, ota, scales_[img])
odi.force_update_bpm(img, ota)
else:
print 'scaling not performed, set flag in config.yaml'
# finally stack the images
if stack_flag:
stacked_img = odi.stack_images(object_str, ref_img)
align_these.append(odi.StackedImage(stacked_img))
else:
print 'stacking not performed, set flag in config.yaml'
# if the option is turned on, align the images with pixel shifts
if align_flag:
odi.imalign(align_these)
|
Python
| 0
|
@@ -596,28 +596,17 @@
nt(i
-mages%5Bfilters%5B0%5D%5D%5B0%5D
+nstrument
)%0A%0Ai
|
ce01340348dd99faf9fc6c06b3c96e23f7714252
|
FIX __str__ of Event for python 3.5
|
gcsa/event.py
|
gcsa/event.py
|
from tzlocal import get_localzone
from datetime import datetime, date, timedelta
from .attachment import Attachment
from .reminder import PopupReminder, EmailReminder
from util.date_time_util import insure_localisation
class Visibility:
""" Possible values of the event visibility.
DEFAULT - Uses the default visibility for events on the calendar. This is the default value.
PUBLIC - The event is public and event details are visible to all readers of the calendar.
PRIVATE - The event is private and only event attendees may view event details.
"""
DEFAULT = "default"
PUBLIC = "public"
PRIVATE = "private"
class Event:
def __init__(self,
summary,
start,
end=None,
timezone=str(get_localzone()),
event_id=None,
description=None,
location=None,
recurrence=None,
color=None,
visibility=Visibility.DEFAULT,
gadget=None,
attachments=None,
reminders=None,
default_reminders=False,
minutes_before_popup_reminder=None,
minutes_before_email_reminder=None,
**other):
"""
:param summary:
title of the event.
:param start:
starting date/datetime.
:param end:
ending date/datetime. If 'end' is not specified, event is considered as a 1-day or 1-hour event
if 'start' is date or datetime respectively.
:param timezone:
timezone formatted as an IANA Time Zone Database name, e.g. "Europe/Zurich". By default,
the computers configured local timezone(if any) is used.
:param event_id:
opaque identifier of the event. By default is generated by the server. You can specify id as a
5-1024 long string of characters used in base32hex ([a-vA-V0-9]). The ID must be unique per
calendar.
:param description:
description of the event.
:param location:
geographic location of the event as free-form text.
:param recurrence:
RRULE/RDATE/EXRULE/EXDATE string or list of such strings. TODO link to code.
:param color:
color id referring to an entry from colors endpoint (list_event_colors)
:param visibility:
visibility of the event. Default is default visibility for events on the calendar.
:param gadget:
a gadget that extends the event. TODO link to code.
:param attachments:
attachment or list of attachments. TODO link to code.
:param reminders:
reminder or list of reminder objects. TODO link to code.
:param default_reminders:
whether the default reminders of the calendar apply to the event.
:param minutes_before_popup_reminder:
minutes before popup reminder or None if reminder is not needed.
:param minutes_before_email_reminder:
minutes before email reminder or None if reminder is not needed.
:param other:
Other fields that should be included in request json. Will be included as they are.
"""
def assure_list(obj):
return obj if isinstance(obj, list) else obj or []
self.timezone = timezone
self.start = start
if end:
self.end = end
elif isinstance(start, datetime):
self.end = start + timedelta(hours=1)
elif isinstance(start, date):
self.end = start + timedelta(days=1)
if isinstance(self.start, datetime) and isinstance(self.end, datetime):
self.start = insure_localisation(self.start, timezone)
self.end = insure_localisation(self.end, timezone)
elif isinstance(self.start, datetime) or isinstance(self.end, datetime):
raise TypeError('Start and end must either both be date or both be datetime.')
reminders = assure_list(reminders)
if len(reminders) > 5:
raise ValueError('The maximum number of override reminders is 5.')
if default_reminders and reminders:
raise ValueError('Cannot specify both default reminders and overrides at the same time.')
self.event_id = event_id and event_id.lower()
self.summary = summary
self.description = description
self.location = location
self.recurrence = assure_list(recurrence)
self.color_id = color
self.visibility = visibility
self.gadget = gadget
self.attachments = assure_list(attachments)
self.reminders = reminders
self.default_reminders = default_reminders
self.other = other
if minutes_before_popup_reminder:
self.add_popup_reminder(minutes_before_popup_reminder)
if minutes_before_email_reminder:
self.add_email_reminder(minutes_before_email_reminder)
def get_id(self):
return self.event_id
def add_attachment(self, file_url, title, mime_type):
self.attachments.append(Attachment(title=title, file_url=file_url, mime_type=mime_type))
def add_email_reminder(self, minutes_before_start=60):
self.add_reminder(EmailReminder(minutes_before_start))
def add_popup_reminder(self, minutes_before_start=30):
self.add_reminder(PopupReminder(minutes_before_start))
def add_reminder(self, reminder):
if len(self.reminders) > 4:
raise ValueError('The maximum number of override reminders is 5.')
self.reminders.append(reminder)
def __str__(self):
return f'{self.start} - {self.summary}'
|
Python
| 0.999753
|
@@ -5836,11 +5836,25 @@
urn
-f
'%7B
+%7D - %7B%7D'.format(
self
@@ -5859,21 +5859,18 @@
lf.start
-%7D - %7B
+,
self.sum
@@ -5873,11 +5873,10 @@
.summary
-%7D'
+)
%0A
|
edebc05d3df68faadc6c0547de7cc06f1469915e
|
make click example do stuff
|
fosscon2015/cli_click.py
|
fosscon2015/cli_click.py
|
import click
@click.command()
def cli():
click.echo("I'm a click CLI.")
if __name__ == '__main__':
cli()
|
Python
| 0.000001
|
@@ -1,80 +1,791 @@
-import click%0A%0A@click.command()%0Adef cli():%0A click.echo(%22I'm a click CLI.%22)
+#!/usr/bin/env python%0A%0Aimport click%0Aimport json%0A%0Atry:%0A from collections import Counter%0Aexcept ImportError:%0A # backport_collections needed for python 2.6 compatibility%0A from backport_collections import Counter%0A%0A%0A@click.command()%0A@click.argument('infile', type=click.File('r'), default='-')%0A@click.argument('outfile', type=click.File('w'), default='-')%0A@click.option('--verbose', '-v')%0Adef cli(infile, outfile, verbose):%0A %22%22%22 Count the occurances of characters in INFILE and save results in OUTFILE. %22%22%22%0A%0A click.echo(%22Hi!%22)%0A click.secho(%22infile: %7B0%7D%22.format(infile))%0A click.secho(%22outfile: %7B0%7D%22.format(outfile))%0A text = infile.read()%0A char_counts = Counter(text)%0A click.secho(json.dumps(dict(char_counts.most_common())), file=outfile,%0A fg='green')%0A
%0A%0Aif
|
514c6c145e6e2f2c327fb89cfe780eb196508f79
|
change absolute site url
|
publishconf.py
|
publishconf.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://3-strand-code.github.io/3sc-blog/'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = "3strandcode"
#GOOGLE_ANALYTICS = ""
|
Python
| 0.000001
|
@@ -279,40 +279,28 @@
p://
+blog.
3
--
strand
--
code.
-github.io/3sc-blog
+com
/'%0AR
|
ca863134d20cda67c6e7f4abf1df595d5d549952
|
Fix agent changelog command (#3233)
|
datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/common.py
|
datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/common.py
|
# (C) Datadog, Inc. 2019
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from ...git import git_tag_list
def get_agent_tags(since, to):
"""
Return a list of tags from integrations-core representing an Agent release,
sorted by more recent first.
"""
agent_tags = git_tag_list(r'^\d+\.\d+\.\d+$')
# default value for `to` is the latest tag
if not to:
to = agent_tags[-1]
# filter out versions according to the interval [since, to]
agent_tags = [t for t in agent_tags if since <= t <= to]
# reverse so we have descendant order
return agent_tags[::-1]
|
Python
| 0
|
@@ -100,16 +100,55 @@
ICENSE)%0A
+from semver import parse_version_info%0A%0A
from ...
@@ -349,16 +349,54 @@
t_tags =
+ sorted(parse_version_info(t) for t in
git_tag
@@ -420,16 +420,17 @@
%5C.%5Cd+$')
+)
%0A%0A #
@@ -481,43 +481,124 @@
if
-not to:%0A to = agent_tags%5B-1%5D
+to:%0A to = parse_version_info(to)%0A else:%0A to = agent_tags%5B-1%5D%0A%0A since = parse_version_info(since)
%0A%0A
@@ -774,16 +774,42 @@
return
+%5Bstr(t) for t in reversed(
agent_ta
@@ -814,11 +814,7 @@
tags
-%5B::-1
+)
%5D%0A
|
2988d50cdef4a2c617f20817911826f2f7863f0e
|
Fix string comparison in the fedimg proc.
|
fedmsg_meta_fedora_infrastructure/fedimg.py
|
fedmsg_meta_fedora_infrastructure/fedimg.py
|
# This file is part of fedmsg.
# Copyright (C) 2014 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: David Gay <oddshocks@riseup.net
from fedmsg_meta_fedora_infrastructure import BaseProcessor
class FedimgProcessor(BaseProcessor):
__name__ = "fedimg"
__description__ = "The Fedora cloud image service"
__link__ = "https://github.com/oddshocks/fedimg"
# TODO: Create an icon and set its URL to __icon__
__docs__ = "https://fedoraproject.org/wiki/Features/" + \
"FirstClassCloudImages/KojiPlan"
__obj__ = "New cloud image upload"
def subtitle(self, msg, **config):
if 'image.upload' in msg['topic']:
if msg['msg']['status'] is "started":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} started uploading to {dest}')
return tmpl.format(image_name=name, dest=dest)
elif msg['msg']['status'] is "completed":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} finished uploading to to {dest}')
return tmpl.format(image_name=name, dest=dest)
elif msg['msg']['status'] is "failed":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} failed to upload to {dest}')
return tmpl.format(image_name=name, dest=dest)
if 'image.test' in msg['topic']:
if msg['msg']['status'] is "started":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} started testing on {dest}')
return tmpl.format(image_name=name, dest=dest)
elif msg['msg']['status'] is "completed":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} finished testing on {dest}')
return tmpl.format(image_name=name, dest=dest)
elif msg['msg']['status'] is "failed":
name = msg['msg']['image_name']
dest = msg['msg']['destination']
tmpl = self._('Image {image_name} failed testing on {dest}')
return tmpl.format(image_name=name, dest=dest)
|
Python
| 0.000266
|
@@ -1363,34 +1363,34 @@
msg'%5D%5B'status'%5D
-is
+==
%22started%22:%0A
@@ -1655,34 +1655,34 @@
msg'%5D%5B'status'%5D
-is
+==
%22completed%22:%0A
@@ -1953,34 +1953,34 @@
msg'%5D%5B'status'%5D
-is
+==
%22failed%22:%0A
@@ -2290,18 +2290,18 @@
tatus'%5D
-is
+==
%22starte
@@ -2580,18 +2580,18 @@
tatus'%5D
-is
+==
%22comple
@@ -2873,18 +2873,18 @@
tatus'%5D
-is
+==
%22failed
|
7b585baab70e85e4d28f8827a11c0be9c6cc2938
|
fix travis
|
_unittests/ut_cli/test_pymy_install_cli_tool.py
|
_unittests/ut_cli/test_pymy_install_cli_tool.py
|
# coding: latin-1
"""
@brief test log(time=1s)
"""
import sys
import os
import unittest
try:
import src
except ImportError:
path = os.path.normpath(
os.path.abspath(
os.path.join(
os.path.split(__file__)[0],
"..",
"..")))
if path not in sys.path:
sys.path.append(path)
import src
try:
import pyquickhelper as skip_
except ImportError:
path = os.path.normpath(
os.path.abspath(
os.path.join(
os.path.split(__file__)[0],
"..",
"..",
"..",
"pyquickhelper",
"src")))
if path not in sys.path:
sys.path.append(path)
if "PYQUICKHELPER" in os.environ and len(os.environ["PYQUICKHELPER"]) > 0:
sys.path.append(os.environ["PYQUICKHELPER"])
import pyquickhelper as skip_
from src.pymyinstall.installhelper.install_cmd_helper import run_cmd
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import get_temp_folder
class TestPyMyInstallCliTool(unittest.TestCase):
def test_install_tool(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
temp = get_temp_folder(__file__, "temp_install_tool")
this = os.path.abspath(os.path.dirname(__file__))
script = os.path.normpath(os.path.join(
this, "..", "..", "src", "pymyinstall", "cli", "pymy_install.py"))
cmd = "{0} {1} {2} --force --folder={3}".format(
sys.executable, script, "graphviz --task=tool --source=zip", temp)
out, err = run_cmd(cmd, wait=True, do_not_log=True)
fLOG("----", cmd)
fLOG(out.replace("\r", "").replace("\n\n", "\n"))
fLOG("-----")
fLOG(err.replace("\r", "").replace("\n\n", "\n"))
content = os.listdir(temp)
assert content
if __name__ == "__main__":
unittest.main()
|
Python
| 0.000002
|
@@ -86,16 +86,32 @@
unittest
+%0Aimport warnings
%0A%0Atry:%0A
@@ -1081,16 +1081,39 @@
p_folder
+, is_travis_or_appveyor
%0A%0A%0Aclass
@@ -1963,22 +1963,209 @@
-assert content
+if not content:%0A if is_travis_or_appveyor():%0A warnings.warn(%22content is empty for: %22 + temp)%0A else:%0A raise Exception(%22content is empty for: %22 + temp)
%0A%0A%0Ai
|
07e12dd0942329aadc8fb3ed47b6f088779800b9
|
fix logcollector
|
src/bots/outputs/logcollector/logcollector.py
|
src/bots/outputs/logcollector/logcollector.py
|
import sys
import time
import socket
from lib.bot import *
from lib.utils import *
from lib.event import *
try:
import simplejson as json
except ImportError:
import json
class LogCollectorBot(Bot):
def process(self):
event = self.receive_message()
if event:
data = ''
for key, value in event.items():
data += key.replace(' ','_') + '=' + json.dumps(value) + ' '
data += "\n"
self.send_data(data)
self.acknowledge_message()
def connect(self):
address = (self.parameters.ip, int(self.parameters.port))
self.con = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
while True:
try:
self.con.connect(address)
break
except socket.error, e:
self.logger.error(e.args[1] + ". Retrying in 10 seconds.")
time.sleep(10)
self.logger.info("Connected successfully to %s:%i", address[0], address[1])
def send_data(self, data):
while True:
try:
self.con.send(unicode(data).encode("utf-8"))
self.con.sendall("")
break
except socket.error, e:
self.logger.error(e.args[1] + ". Reconnecting..")
self.con.close()
self.connect()
except AttributeError:
self.connect()
if __name__ == "__main__":
bot = LogCollectorBot(sys.argv[1])
bot.start()
|
Python
| 0.000002
|
@@ -105,80 +105,8 @@
*%0A%0A
-try:%0A import simplejson as json%0Aexcept ImportError:%0A import json%0A%0A
clas
@@ -346,33 +346,23 @@
+ '=
+%22
' +
-json.dumps(
value
-)
+ '
+%22
'%0A
|
2de1effbf33ea3aae0bbc36a4c89fad432975e9e
|
Fix tests #3
|
tests/web_cache_test.py
|
tests/web_cache_test.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import collections
import logging
import os
import pickle
import random
import string
import sys
import time
import unittest
import mkstemp_ctx
import google_speech.web_cache as web_cache
web_cache.DISABLE_PERSISTENT_CACHING = True
INFINITY = sys.maxsize
def get_random_string(length, chars=string.ascii_letters + string.digits):
return "".join(random.choice(chars) for _ in range(length))
class TestWebCache(unittest.TestCase):
def test_getSetDelete(self):
""" Get/set/delete cache items using all cache parameter combinations. """
for cache_class in (web_cache.WebCache, web_cache.ThreadedWebCache):
for compression in (None,) + tuple(web_cache.Compression):
for compression_level in range(1, 9):
for caching_strategy in web_cache.CachingStrategy:
for expiration in (None, 0, INFINITY):
for sql_crash_safe in (True, False):
table_name = get_random_string(16, string.ascii_letters)
with mkstemp_ctx.mkstemp(suffix=".sqlite") as cache_filepath:
# init cache
cache = cache_class(table_name,
caching_strategy=caching_strategy,
expiration=expiration,
db_filepath=cache_filepath,
compression=compression,
compression_level=compression_level,
safe_mode=sql_crash_safe)
already_used_keys = set()
item_count = 0
for req_type in ("get", "post"):
for item_count in range(item_count + 1, item_count + 4):
while True:
# generate cache key
key = get_random_string(16)
if req_type == "post":
key = key, collections.OrderedDict(((k, v) for k, v in zip((get_random_string(8) for _ in range(4)),
(get_random_string(16) for _ in range(4)))))
# ensure key is unique for this cache
bin_key = pickle.dumps(key)
if bin_key not in already_used_keys:
already_used_keys.add(bin_key)
break
# generate cache data
data = os.urandom(2 ** 13)
# check cache size
self.assertEqual(len(cache), item_count - 1)
# check key is not in cache
self.assertNotIn(key, cache)
with self.assertRaises(KeyError):
_ = cache[key]
with self.assertRaises(KeyError):
del cache[key]
# add data to cache
cache[key] = data
# check key is in cache
self.assertIn(key, cache)
self.assertEqual(cache[key], data)
# check cache size
self.assertEqual(len(cache), item_count)
# delete cache item
del cache[key]
# check it is not in cache anymore
self.assertNotIn(key, cache)
with self.assertRaises(KeyError):
_ = cache[key]
with self.assertRaises(KeyError):
del cache[key]
# check cache size
self.assertEqual(len(cache), item_count - 1)
# check other keys are still here
for old_key in map(pickle.loads, already_used_keys):
if old_key != key:
self.assertIn(old_key, cache)
# add cache item again
cache[key] = data
def test_getCacheHitStats(self):
""" Get cache stats using all cache parameter combinations. """
for cache_class in (web_cache.WebCache, web_cache.ThreadedWebCache):
for compression in (None,) + tuple(web_cache.Compression):
for compression_level in range(1, 9):
for caching_strategy in web_cache.CachingStrategy:
for expiration in (None, 0, INFINITY):
for sql_crash_safe in (True, False):
table_name = get_random_string(16, string.ascii_letters)
with mkstemp_ctx.mkstemp(suffix=".sqlite") as cache_filepath:
# init cache
cache = cache_class(table_name,
caching_strategy=caching_strategy,
expiration=expiration,
db_filepath=cache_filepath,
compression=compression,
compression_level=compression_level,
safe_mode=sql_crash_safe)
i = 0
for req_type in ("get", "post"):
for i in range(i + 1, 5):
# generate item
key = "%s_%u" % (req_type, i)
if req_type == "post":
key = key, collections.OrderedDict(((k, v) for k, v in zip((get_random_string(4) for _ in range(2)),
(get_random_string(8) for _ in range(2)))))
data = os.urandom(2 ** 13)
# add item
cache[key] = data
# check cache hit stats
self.assertEqual(cache.getCacheHitStats(), (i - 1, i - 1))
self.assertIn(key, cache)
self.assertEqual(cache.getCacheHitStats(), (i, i - 1))
self.assertNotIn("(o_o)", cache)
self.assertEqual(cache.getCacheHitStats(), (i, i))
def test_purge(self):
""" Purge obsolete cache entries. """
for cache_class in (web_cache.WebCache, web_cache.ThreadedWebCache):
for caching_strategy in web_cache.CachingStrategy:
for expiration in (None, 2, INFINITY):
table_name = get_random_string(16, string.ascii_letters)
with mkstemp_ctx.mkstemp(suffix=".sqlite") as cache_filepath:
# init cache
cache = cache_class(table_name,
caching_strategy=caching_strategy,
expiration=expiration,
db_filepath=cache_filepath)
# add items
for req_type in ("get", "post"):
for i in range(5):
key = "%s_%u" % (req_type, i)
if req_type == "post":
key = key, collections.OrderedDict(((k, v) for k, v in zip((get_random_string(4) for _ in range(2)),
(get_random_string(8) for _ in range(2)))))
data = os.urandom(2 ** 13)
cache[key] = data
# purge
purged_count = cache.purge()
if expiration and (expiration != INFINITY):
# before expiration, nothing should have been purged
time.sleep(1)
self.assertEqual(purged_count, 0)
self.assertEqual(len(cache), 10)
# wait for expiration
time.sleep(expiration)
# after expiration, all should have been purged
purged_count = cache.purge()
self.assertEqual(purged_count, 10)
self.assertEqual(len(cache), 0)
else:
# nothing should have been purged
self.assertEqual(purged_count, 0)
self.assertEqual(len(cache), 10)
if __name__ == "__main__":
# disable logging
logging.basicConfig(level=logging.CRITICAL + 1)
# run tests
unittest.main()
|
Python
| 0.000001
|
@@ -167,16 +167,23 @@
ittest%0A%0A
+from .
import m
|
cc1d72d68fb46cccdf22e08d416a49b18e4a39b2
|
Disable cache during CLI tests
|
tests/whack_cli_test.py
|
tests/whack_cli_test.py
|
import os
import subprocess
import contextlib
from nose.tools import istest, assert_equal
from whack import cli
from whack.sources import SourceTarball
from . import whack_test
@istest
def params_are_passed_to_install_command_as_dict():
argv = [
"whack", "install", "hello=1", "apps/hello",
"-p", "version=1.2.4", "-p", "pcre_version=8.32"
]
expected_params = {"version": "1.2.4", "pcre_version": "8.32"}
_test_install_arg_parse(argv, params=expected_params)
@istest
def param_values_can_contain_equals_sign():
argv = [
"whack", "install", "hello=1", "apps/hello",
"-p", "version_range===1.2.4"
]
expected_params = {"version_range": "==1.2.4"}
_test_install_arg_parse(argv, params=expected_params)
@istest
def param_without_equal_sign_has_value_of_empty_string():
argv = [
"whack", "install", "hello=1", "apps/hello",
"-p", "verbose"
]
expected_params = {"verbose": ""}
_test_install_arg_parse(argv, params=expected_params)
def _test_install_arg_parse(argv, **expected_kwargs):
args = cli.parse_args(argv)
for key, value in expected_kwargs.iteritems():
assert_equal(value, getattr(args, key))
class CliOperations(object):
def install(self, package_name, install_dir, params):
self._command("install", package_name, install_dir, params)
def build(self, package_name, target_dir, params):
self._command("build", package_name, target_dir, params)
def deploy(self, package_dir, target_dir=None):
if target_dir is None:
self._whack("deploy", package_dir, "--in-place")
else:
self._whack("deploy", package_dir, target_dir)
def create_source_tarball(self, source_dir, tarball_dir):
output = self._whack(
"create-source-tarball",
source_dir, tarball_dir,
)
return SourceTarball(output.strip())
def _command(self, command_name, package_name, target_dir, params):
params_args = [
"-p{0}={1}".format(key, value)
for key, value in params.iteritems()
]
self._whack(command_name, package_name, target_dir, *params_args)
def _whack(self, *args):
return subprocess.check_output(["whack"] + list(args))
def _run_cli_operations_test(test_func):
ops = CliOperations()
test_func(ops)
WhackCliOperationsTest = whack_test.create(
"WhackCliOperationsTest",
_run_cli_operations_test,
)
@contextlib.contextmanager
def _updated_env(env):
original_env = os.environ.copy()
for key, value in env.iteritems():
os.environ[key] = value
yield
for key in env:
if key in original_env:
os.environ[key] = original_env[value]
else:
del os.environ[key]
|
Python
| 0.000001
|
@@ -2334,16 +2334,33 @@
st(args)
+ + %5B%22--no-cache%22%5D
)%0A
|
e383d309483af6c8f33494b80d766d471def9773
|
Update test tolerance
|
tests/chainer_tests/functions_tests/array_tests/test_scatter_add.py
|
tests/chainer_tests/functions_tests/array_tests/test_scatter_add.py
|
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.utils import type_check
@testing.parameterize(
{'slices': (0, slice(0, 1), numpy.array(-1)), 'b_data': numpy.array([1])},
{'slices': (slice(None), 0, [0, 2]),
'b_data': numpy.random.uniform(size=(4, 2))},
{'slices': ([1, 0], [0, 0], [2, 0]),
'b_data': numpy.random.uniform(size=(2,))},
{'slices': 1, 'b_data': numpy.random.uniform(size=(2, 3))},
{'slices': numpy.array([False, True, False, True]),
'b_data': numpy.random.uniform(size=(2, 2, 3))},
{'slices': [], 'b_data': numpy.empty(shape=(0, 2, 3))},
)
class TestScatterAdd(unittest.TestCase):
def setUp(self):
self.shape = (4, 2, 3)
self.a_data = numpy.random.uniform(
-1, 1, self.shape).astype(numpy.float32)
self.a_data_original = self.a_data.copy()
self.gy_data = numpy.random.uniform(
-1, 1, self.shape).astype(numpy.float32)
self.b_data = self.b_data.astype(numpy.float32)
self.gga_data = numpy.random.uniform(
-1, 1, self.a_data.shape).astype(numpy.float32)
self.ggb_data = numpy.random.uniform(
-1, 1, self.b_data.shape).astype(numpy.float32)
def check_forward(self, a_data, b_data):
a = chainer.Variable(a_data)
b = chainer.Variable(b_data)
y = functions.scatter_add(a, self.slices, b)
self.assertEqual(y.data.dtype, numpy.float32)
# Test to make sure that the input values are not changed
numpy.testing.assert_equal(cuda.to_cpu(a.data), self.a_data_original)
a_data_copy = cuda.to_cpu(a_data).copy()
numpy.add.at(a_data_copy, self.slices, cuda.to_cpu(b_data))
numpy.testing.assert_equal(a_data_copy, cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.a_data, self.b_data)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.a_data), cuda.to_gpu(self.b_data))
def check_backward(self, a_data, b_data, y_grad):
def f(a, b):
return functions.scatter_add(a, self.slices, b)
gradient_check.check_backward(
f, (a_data, b_data), y_grad, dtype='f', atol=1e-3, rtol=1e-3)
def test_backward_cpu(self):
self.check_backward(self.a_data, self.b_data, self.gy_data)
@attr.gpu
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.a_data), cuda.to_gpu(self.b_data),
cuda.to_gpu(self.gy_data))
def check_double_backward(self, a_data, b_data, y_grad, a_grad_grad,
b_grad_grad):
def f(a, b):
y = functions.scatter_add(a, self.slices, b)
return y * y
gradient_check.check_double_backward(
f, (a_data, b_data), y_grad, (a_grad_grad, b_grad_grad), rtol=1e-3)
def test_double_backward_cpu(self):
self.check_double_backward(self.a_data, self.b_data, self.gy_data,
self.gga_data, self.ggb_data)
@attr.gpu
def test_double_backward_gpu(self):
self.check_double_backward(cuda.to_gpu(self.a_data),
cuda.to_gpu(self.b_data),
cuda.to_gpu(self.gy_data),
cuda.to_gpu(self.gga_data),
cuda.to_gpu(self.ggb_data))
class TestInvalidScatterAdd(unittest.TestCase):
def setUp(self):
self.default_debug = chainer.is_debug()
chainer.set_debug(True)
self.a_data = numpy.random.uniform(-1, 1, (4, 3, 2))
self.b_data = numpy.random.uniform(-1, 1, (2, 2))
def tearDown(self):
chainer.set_debug(self.default_debug)
def test_multiple_ellipsis(self):
with self.assertRaises(ValueError):
functions.scatter_add(
self.a_data, (Ellipsis, Ellipsis), self.b_data)
def test_too_many_indices(self):
with self.assertRaises(type_check.InvalidType):
functions.scatter_add(self.a_data, (0, 0, 0, 0), self.b_data)
def test_requires_broadcasting(self):
with self.assertRaises(ValueError):
functions.scatter_add(self.a_data, slice(0, 2), self.b_data)
testing.run_module(__name__, __file__)
|
Python
| 0
|
@@ -1355,16 +1355,97 @@
float32)
+%0A self.check_backward_options = %7B'atol': 5e-4, 'rtol': 1e-4, 'dtype': 'f'%7D
%0A%0A de
@@ -3078,26 +3078,58 @@
d_grad),
- rtol=1e-3
+%0A **self.check_backward_options
)%0A%0A d
|
5098d48598a3e57b00d1c813d0ff95b9b15a74cb
|
Update azure_recorded_testcase.py (#21771)
|
tools/azure-sdk-tools/devtools_testutils/azure_recorded_testcase.py
|
tools/azure-sdk-tools/devtools_testutils/azure_recorded_testcase.py
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import functools
import logging
import os
import os.path
import six
import sys
import time
from typing import TYPE_CHECKING
from dotenv import load_dotenv, find_dotenv
from azure_devtools.scenario_tests.config import TestConfig
from azure_devtools.scenario_tests.utilities import trim_kwargs_from_test_function
from . import mgmt_settings_fake as fake_settings
from .azure_testcase import _is_autorest_v3, get_resource_name, get_qualified_method_name
try:
# Try to import the AsyncFakeCredential, if we cannot assume it is Python 2
from .fake_async_credential import AsyncFakeCredential
except SyntaxError:
pass
if TYPE_CHECKING:
from typing import Any
load_dotenv(find_dotenv())
def is_live():
"""A module version of is_live, that could be used in pytest marker."""
if not hasattr(is_live, "_cache"):
is_live._cache = TestConfig().record_mode
return is_live._cache
class AzureRecordedTestCase(object):
"""Test class for use by data-plane tests that use the azure-sdk-tools test proxy.
For more details and usage examples, refer to
https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/test_proxy_migration_guide.md
"""
@property
def settings(self):
if self.is_live:
return os.environ
else:
return fake_settings
def _load_settings(self):
return fake_settings, os.environ
@property
def is_live(self):
return is_live()
@property
def qualified_test_name(self):
return get_qualified_method_name(self, "method_name")
@property
def in_recording(self):
return self.is_live
# TODO: This needs to be removed, recording processors are handled on the proxy side, but
# this is needed for the preparers
@property
def recording_processors(self):
return []
def is_playback(self):
return not self.is_live
def get_settings_value(self, key):
key_value = os.environ.get("AZURE_" + key, None)
if not key_value or self.is_playback:
try:
key_value = getattr(self.settings, key)
except Exception as ex:
six.raise_from(ValueError("Could not get {}".format(key)), ex)
return key_value
def get_credential(self, client_class, **kwargs):
tenant_id = os.environ.get("AZURE_TENANT_ID", getattr(os.environ, "TENANT_ID", None))
client_id = os.environ.get("AZURE_CLIENT_ID", getattr(os.environ, "CLIENT_ID", None))
secret = os.environ.get("AZURE_CLIENT_SECRET", getattr(os.environ, "CLIENT_SECRET", None))
is_async = kwargs.pop("is_async", False)
if tenant_id and client_id and secret and self.is_live:
if _is_autorest_v3(client_class):
# Create azure-identity class
from azure.identity import ClientSecretCredential
if is_async:
from azure.identity.aio import ClientSecretCredential
return ClientSecretCredential(tenant_id=tenant_id, client_id=client_id, client_secret=secret)
else:
# Create msrestazure class
from msrestazure.azure_active_directory import (
ServicePrincipalCredentials,
)
return ServicePrincipalCredentials(tenant=tenant_id, client_id=client_id, secret=secret)
else:
if _is_autorest_v3(client_class):
if is_async:
if self.is_live:
raise ValueError(
"Async live doesn't support mgmt_setting_real, please set AZURE_TENANT_ID, "
"AZURE_CLIENT_ID, AZURE_CLIENT_SECRET"
)
return AsyncFakeCredential()
else:
return self.settings.get_azure_core_credentials()
else:
return self.settings.get_credentials()
def create_client_from_credential(self, client_class, credential, **kwargs):
# Real client creation
# TODO decide what is the final argument for that
# if self.is_playback():
# kwargs.setdefault("polling_interval", 0)
if _is_autorest_v3(client_class):
kwargs.setdefault("logging_enable", True)
client = client_class(credential=credential, **kwargs)
else:
client = client_class(credentials=credential, **kwargs)
if self.is_playback():
try:
client._config.polling_interval = 0 # FIXME in azure-mgmt-core, make this a kwargs
except AttributeError:
pass
if hasattr(client, "config"): # Autorest v2
if self.is_playback():
client.config.long_running_operation_timeout = 0
client.config.enable_http_logger = True
return client
def create_random_name(self, name):
unique_test_name = os.getenv("PYTEST_CURRENT_TEST").encode("utf-8")
return get_resource_name(name, unique_test_name)
def get_resource_name(self, name):
"""Alias to create_random_name for back compatibility."""
return self.create_random_name(name)
def get_replayable_random_resource_name(self, name):
"""In a replay scenario (not live), gives the static moniker. In the random scenario, gives generated name."""
if self.is_live:
created_name = self.create_random_name(name)
self.scrubber.register_name_pair(created_name, name)
return name
def get_preparer_resource_name(self, prefix):
"""Random name generation for use by preparers.
If prefix is a blank string, use the fully qualified test name instead.
This is what legacy tests do for resource groups."""
return self.get_resource_name(prefix)
@staticmethod
def await_prepared_test(test_fn):
"""Synchronous wrapper for async test methods. Used to avoid making changes
upstream to AbstractPreparer, which only awaits async tests that use preparers.
(Add @AzureTestCase.await_prepared_test decorator to async tests without preparers)
# Note: this will only be needed so long as we maintain unittest.TestCase in our
test-class inheritance chain.
"""
if sys.version_info < (3, 5):
raise ImportError("Async wrapper is not needed for Python 2.7 code.")
import asyncio
@functools.wraps(test_fn)
def run(test_class_instance, *args, **kwargs):
trim_kwargs_from_test_function(test_fn, kwargs)
loop = asyncio.get_event_loop()
return loop.run_until_complete(test_fn(test_class_instance, **kwargs))
return run
def sleep(self, seconds):
if self.is_live:
time.sleep(seconds)
def generate_sas(self, *args, **kwargs):
sas_func = args[0]
sas_func_pos_args = args[1:]
fake_value = kwargs.pop("fake_value", "fake_token_value")
token = sas_func(*sas_func_pos_args, **kwargs)
fake_token = self._create_fake_token(token, fake_value)
if self.is_live:
return token
return fake_token
def _create_fake_token(self, token, fake_value):
parts = token.split("&")
for idx, part in enumerate(parts):
if part.startswith("sig"):
key = part.split("=")
key[1] = fake_value
parts[idx] = "=".join(key)
elif part.startswith("st"):
key = part.split("=")
key[1] = "start"
parts[idx] = "=".join(key)
elif part.startswith("se"):
key = part.split("=")
key[1] = "end"
parts[idx] = "=".join(key)
return "&".join(parts)
|
Python
| 0.000001
|
@@ -2360,16 +2360,18 @@
playback
+()
:%0A
|
3ba71de7d03b99376b70cd40de6dfcd45f1d35c0
|
replace distutils with which
|
tmscoring/tests/test.py
|
tmscoring/tests/test.py
|
from __future__ import division
import subprocess
import distutils
import tmscoring
import numpy as np
from numpy.testing import assert_almost_equal, TestCase
from nose.exc import SkipTest
class TestAligningBase(TestCase):
def test_matrix(self):
align_object = tmscoring.Aligning('pdb1.pdb', 'pdb2.pdb')
np.random.seed(124)
for _ in range(100):
theta, phi, psi = 2 * np.pi * np.random.random(3)
dx, dy, dz = 10 * np.random.random(3)
matrix = align_object.get_matrix(theta, phi, psi, dx, dy, dz)
rotation = matrix[:3, :3]
assert_almost_equal(1, np.linalg.det(rotation), 6)
assert_almost_equal(1, np.linalg.det(matrix), 6)
def test_tm_valuex(self):
align_object = tmscoring.Aligning('pdb1.pdb', 'pdb2.pdb')
np.random.seed(124)
for _ in range(100):
theta, phi, psi = 2 * np.pi * np.random.random(3)
dx, dy, dz = 10 * np.random.random(3)
tm = align_object._tm(theta, phi, psi, dx, dy, dz)
assert 0 <= -tm / align_object.N <= 1
def test_load_data_alignment(self):
align_object = tmscoring.Aligning('pdb1.pdb', 'pdb2.pdb', mode='align')
assert align_object.coord1.shape[0] == 4
assert align_object.coord2.shape[0] == 4
assert align_object.coord1.shape == align_object.coord2.shape
def test_load_data_index(self):
align_object = tmscoring.Aligning('pdb1.pdb', 'pdb2.pdb', mode='index')
assert align_object.coord1.shape[0] == 4
assert align_object.coord2.shape[0] == 4
assert align_object.coord1.shape == align_object.coord2.shape
def test_identity():
sc = tmscoring.TMscoring('pdb1.pdb', 'pdb1.pdb')
assert sc.tmscore(0, 0, 0, 0, 0, 0) == 1
sc = tmscoring.RMSDscoring('pdb1.pdb', 'pdb1.pdb')
assert sc.rmsd(0, 0, 0, 0, 0, 0) == 0.0
def test_tm_output():
if not distutils.spawn.find_executable('TMscore'):
raise SkipTest('TMscore is not installed in the system.')
pdb1, pdb2 = 'pdb1.pdb', 'pdb2.pdb'
sc = tmscoring.TMscoring(pdb1, pdb2)
_, tm, rmsd = sc.optimise()
p = subprocess.Popen('TMscore {} {} | grep TM-score | grep d0'.format(pdb1, pdb2),
stdout=subprocess.PIPE, shell=True)
ref_tm = float(p.communicate()[0].split('=')[1].split('(')[0])
assert_almost_equal(ref_tm, tm, decimal=2)
p = subprocess.Popen('TMscore {} {} | grep RMSD | grep common'.format(pdb1, pdb2),
stdout=subprocess.PIPE, shell=True)
ref_rmsd = float(p.communicate()[0].split('=')[1])
assert abs(ref_rmsd - rmsd) < 0.1
def test_repeated():
pdb1, pdb2 = 'pdbrep_1.pdb', 'pdbrep_2.pdb'
sc = tmscoring.TMscoring(pdb1, pdb2)
_, tm, rmsd = sc.optimise()
assert_almost_equal(tm, 0.27426501120343644)
assert_almost_equal(rmsd, 15.940038528551929)
|
Python
| 0.000011
|
@@ -48,26 +48,8 @@
ess%0A
-import distutils%0A%0A
impo
@@ -61,17 +61,16 @@
scoring%0A
-%0A
import n
@@ -166,16 +166,41 @@
kipTest%0A
+from shutil import which%0A
%0A%0Aclass
@@ -1076,16 +1076,23 @@
assert
+np.all(
0 %3C= -tm
@@ -1112,13 +1112,9 @@
ct.N
- %3C= 1
+)
%0A%0A
@@ -1939,54 +1939,32 @@
if
-not distutils.spawn.find_executable('TMscore')
+which(%22TMscore%22) is None
:%0A
@@ -2232,33 +2232,8 @@
b2),
-%0A
std
@@ -2295,32 +2295,48 @@
ommunicate()%5B0%5D.
+decode('utf-8').
split('=')%5B1%5D.sp
@@ -2536,32 +2536,32 @@
PE, shell=True)%0A
-
ref_rmsd = f
@@ -2584,16 +2584,32 @@
te()%5B0%5D.
+decode('utf-8').
split('=
|
d451814584318ac45cdefa9702c72eb5c15fe690
|
Fix Flakes Errors: openspending/ui/controllers/home.py
|
openspending/ui/controllers/home.py
|
openspending/ui/controllers/home.py
|
import logging
import os
import random
import subprocess
from datetime import datetime
from pylons import request, response, tmpl_context as c, url, config
from pylons.controllers.util import redirect
from pylons.decorators.cache import beaker_cache
from pylons.i18n import _
from openspending.model import Dataset
from openspending.lib.solr_util import dataset_entries
from openspending.ui.i18n import set_session_locale
from openspending.ui.lib import views
from openspending.ui.lib.base import BaseController, require
from openspending.ui.lib.helpers import flash_success, flash_error
from openspending.ui.lib import helpers as h
from openspending.ui.alttemplates import templating
log = logging.getLogger(__name__)
class HomeController(BaseController):
def index(self):
# Get all of the datasets available to the account of the logged in
# or an anonymous user (if c.account is None)
c.datasets = Dataset.all_by_account(c.account)
c.num_entries = dataset_entries(None)
return templating.render('home/index.html')
def set_locale(self):
locale = request.params.get('locale')
if locale is not None:
set_session_locale(locale)
def version(self):
cwd = os.path.dirname(__file__)
process = subprocess.Popen('git rev-parse --verify HEAD'.split(' '),
cwd=cwd,
stdout=subprocess.PIPE)
output = process.communicate()[0]
if process.returncode == 0:
return output
else:
import openspending.version
return openspending.version.__version__
def favicon(self):
return redirect('/static/img/favicon.ico', code=301)
def ping(self):
from openspending.tasks import ping
ping.delay()
flash_success(_("Sent ping!"))
redirect('/')
|
Python
| 0.000013
|
@@ -29,62 +29,18 @@
ort
-random%0Aimport subprocess%0Afrom datetime import datetime
+subprocess
%0A%0Afr
@@ -68,18 +68,8 @@
est,
- response,
tmp
@@ -86,21 +86,8 @@
as c
-, url, config
%0Afro
@@ -132,57 +132,8 @@
ect%0A
-from pylons.decorators.cache import beaker_cache%0A
from
@@ -305,46 +305,8 @@
ale%0A
-from openspending.ui.lib import views%0A
from
@@ -356,17 +356,8 @@
ller
-, require
%0Afro
@@ -410,66 +410,8 @@
cess
-, flash_error%0Afrom openspending.ui.lib import helpers as h
%0Afro
|
7db11fa7aad4b53a1f50988e83de2abfbae61dde
|
Fix the senddeletionnotices command to take into account the new default SMS limit.
|
hc/accounts/management/commands/senddeletionnotices.py
|
hc/accounts/management/commands/senddeletionnotices.py
|
from datetime import timedelta
import time
from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils.timezone import now
from hc.accounts.models import Profile, Member
from hc.api.models import Ping
from hc.lib import emails
class Command(BaseCommand):
help = """Send deletion notices to inactive user accounts.
Conditions for sending the notice:
- deletion notice has not been sent recently
- last login more than a year ago
- none of the owned projects has invited team members
"""
def handle(self, *args, **options):
year_ago = now() - timedelta(days=365)
q = Profile.objects.order_by("id")
# Exclude accounts with logins in the last year_ago
q = q.exclude(user__last_login__gt=year_ago)
# Exclude accounts less than a year_ago old
q = q.exclude(user__date_joined__gt=year_ago)
# Exclude accounts with the deletion notice already sent
q = q.exclude(deletion_notice_date__gt=year_ago)
# Exclude paid accounts
q = q.exclude(sms_limit__gt=0)
sent = 0
for profile in q:
members = Member.objects.filter(project__owner_id=profile.user_id)
if members.exists():
print("Skipping %s, has team members" % profile)
continue
pings = Ping.objects
pings = pings.filter(owner__project__owner_id=profile.user_id)
pings = pings.filter(created__gt=year_ago)
if pings.exists():
print("Skipping %s, has pings in last year" % profile)
continue
self.stdout.write("Sending notice to %s" % profile.user.email)
profile.deletion_notice_date = now()
profile.save()
ctx = {"email": profile.user.email, "support_email": settings.SUPPORT_EMAIL}
emails.deletion_notice(profile.user.email, ctx)
# Throttle so we don't send too many emails at once:
time.sleep(1)
sent += 1
return "Done! Sent %d notices" % sent
|
Python
| 0
|
@@ -1104,17 +1104,17 @@
mit__gt=
-0
+5
)%0A%0A
|
43ab753c4a9892c55f115a4dd5345e94c4bb5d41
|
Fix auth initialization logging
|
opwen_email_server/services/auth.py
|
opwen_email_server/services/auth.py
|
from ast import literal_eval
from os import environ
from typing import Callable
from typing import Mapping
from opwen_email_server.utils.log import LogMixin
class EnvironmentAuth(LogMixin):
def __init__(self, client_to_domain: Mapping[str, str]=None,
envgetter: Callable[[str, str], str]=environ.get,
envkey: str='LOKOLE_CLIENTS') -> None:
self.__client_to_domain = dict(client_to_domain or {})
self._envgetter = envgetter
self._envkey = envkey
@property
def _client_to_domain(self):
if not self.__client_to_domain:
self.log_debug('initialized auth to %r', self.__client_to_domain)
self.__client_to_domain = self._create_client_to_domain()
return self.__client_to_domain
def _create_client_to_domain(self) -> Mapping[str, str]:
client_to_domain = literal_eval(self._envgetter(self._envkey, '{}'))
if not client_to_domain:
raise ValueError('environment key {} not set'.format(self._envkey))
return client_to_domain
def __contains__(self, client: str) -> bool:
return client in self._client_to_domain
def domain_for(self, client: str) -> str:
return self._client_to_domain[client]
|
Python
| 0.000004
|
@@ -614,50 +614,41 @@
elf.
-log_debug('initialized auth to %25r',
+__client_to_domain =
self._
+create
_cli
@@ -660,16 +660,17 @@
o_domain
+(
)%0A
@@ -684,41 +684,50 @@
elf.
-__client_to_domain =
+log_debug('initialized auth to %25r',
self._
-create
_cli
@@ -731,33 +731,32 @@
client_to_domain
-(
)%0A return
|
5118104dad921128e4dec0cd1ea00aa7d854c0a9
|
fix token create params to match new keystone - this change will need to be ported to novaclient
|
openstackx/auth/tokens.py
|
openstackx/auth/tokens.py
|
from openstackx.api import base
class Tenant(base.Resource):
def __repr__(self):
return "<Tenant %s>" % self._info
@property
def id(self):
return self._info['id']
@property
def description(self):
return self._info['description']
@property
def enabled(self):
return self._info['enabled']
class Token(base.Resource):
def __repr__(self):
return "<Token %s>" % self._info
@property
def id(self):
return self._info['token']['id']
@property
def username(self):
try:
return self._info['user']['username']
except:
return "?"
@property
def tenant_id(self):
try:
return self._info['user']['tenantId']
except:
return "?"
def delete(self):
self.manager.delete(self)
class TokenManager(base.ManagerWithFind):
resource_class = Token
def create(self, tenant, username, password):
params = {"passwordCredentials": {"username": username,
"password": password,
"tenantId": tenant}}
return self._create('tokens', params, "auth")
class TenantManager(base.ManagerWithFind):
resource_class = Tenant
def for_token(self, token):
# FIXME(ja): now that tenants & tokens are separate managers we shouldn't
# need the uglyness of setting token this way?
orig = self.api.connection.auth_token
self.api.connection.auth_token = token
rval = self._list('tenants', "tenants")
self.api.connection.auth_token = orig
return rval
|
Python
| 0
|
@@ -994,16 +994,25 @@
params =
+ %7B%22auth%22:
%7B%22passw
@@ -1113,16 +1113,17 @@
password
+%7D
,%0A
@@ -1228,19 +1228,21 @@
rams, %22a
-uth
+ccess
%22)%0A%0A%0Acla
|
180b7810cdc9a2a17a13c184fc2143f9a5f10cd6
|
change var name article to container (opps 0.2) in sitemap generator class
|
opps/sitemaps/sitemaps.py
|
opps/sitemaps/sitemaps.py
|
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.containers.models import Container
def InfoDisct(googlenews=False):
article = Container.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
article = article[:1000]
return {
'queryset': article,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Container.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
|
Python
| 0
|
@@ -272,23 +272,25 @@
e):%0A
-article
+container
= Conta
@@ -376,32 +376,34 @@
+
published=True)%0A
@@ -433,25 +433,29 @@
-article = article
+container = container
%5B:10
@@ -495,15 +495,17 @@
t':
-article
+container
,%0A
|
ba92d4b5854f31b97255c98b83d8dfb8874c8668
|
Fix arg list for thumbnailpath override
|
frontend/src/indexing.py
|
frontend/src/indexing.py
|
#!/usr/bin/env python
from pymongo import MongoClient
#from subprocess import Popen, PIPE
import FindVid as fv
from sys import argv, exit
import hashlib
import os
def hashFile(filename, blocksize):
hash = hashlib.sha1()
with open(filename, 'rb') as f:
buffer = f.read(blocksize)
while len(buffer) > 0:
hash.update(buffer)
buffer = f.read(blocksize)
return hash.hexdigest()
# returns the configuration dictionary
def config(db="findvid", collection="videos", config={"_id": "config"}):
client = MongoClient()
db = client[db]
videos = db[collection]
return videos.find(config).next()
CONFIG = config() # abs, thumbnail, video
VIDEOPATH = CONFIG["abspath"] + CONFIG["videopath"]
# path to shotbounds program
SHOTBOUNDS = "{0}main/impl/shotbounds".format(CONFIG["abspath"])
THUMBNAILER = "{0}main/impl/thumbnails".format(CONFIG["abspath"])
#Index the given videofile (abs. path), create thumbnails in the
def index_video(videofile, searchable=False, uploaded=True, thumbpath = None):
#Get PyMongo client
client = MongoClient()
db = client["findvid"]
videos = db["videos"]
#Get Hash
fileHash = str(hashFile(videofile, 65536))
#Check if this exact video exists already
video = videos.find_one({'_id': fileHash})
if (video):
return False
#Use C-Lib to get cuts in the video
cuts = fv.getCuts(videofile)
#Heuristic approach: Suitable keyframe between 2 cuts
keyframes = [(cuts[i-1] + cuts[i])/2 for i in range(1, len(cuts))]
#extract features from videofile given the keyframes array, use the middle keyframe as videothumb and save to default folder
features = fv.getFeatures(videofile, keyframes[len(keyframes)/2], keyframes, thumbpath)
prev = 0
scenes = [] # scenes collection
for i, c in enumerate(cuts[1:]):
scene = {} # scene document
scene["_id"] = str(i)
scene["startframe"] = prev
scene["endframe"] = c
# save features
scene["colorhist"] = []
for v in features[i][0]:
scene["colorhist"].append(v)
scene["edges"] = []
for v in features[i][1]:
scene["edges"].append(v)
# TinyIMG
# scene["tinyimg"]
# for v in features[i][2]:
# scene["tinyimg"].append(v)
# GIST
# scene["gist"]
# for v in features[i][2]:
# scene["gist"].append(v)
scenes.append(scene)
prev = c
video = {}
# TODO sequence counter
video["_id"] = fileHash
video["filename"] = videofile
fps = fv.getFramerate(videofile)
video["fps"] = fps
video["framecount"] = cuts[-1:][0] # last entry
video["scenes"] = scenes
video["upload"] = uploaded
video["searchable"] = searchable
videos.insert(video)
return True
if __name__ == "__main__":
if len(argv) < 2:
print "ERROR: file missing!"
exit(1)
videofile = argv[1]
index_video(videofile)
|
Python
| 0.000002
|
@@ -1583,16 +1583,128 @@
folder%0A
+%09if (thumbpath == None):%0A%09%09features = fv.getFeatures(videofile, keyframes%5Blen(keyframes)/2%5D, keyframes)%0A%09else:%0A%09
%09feature
|
964fcba7cc91bd84bec234943a90ecbfa33caa8f
|
Fix response handling bugs
|
pynba/pynba.py
|
pynba/pynba.py
|
import requests
import json
from os import path
from abc import ABCMeta, abstractmethod
_REQUEST_DATA_PATH = path.join(path.dirname(path.abspath(__file__)), "requests.json")
class WebInterface:
def __init__(self):
with open(_REQUEST_DATA_PATH, 'r') as f:
request_data = json.load(f)
self._param_types = {x: _construct_param_type_from_json(x, y)
for x, y in request_data['params'].items()}
self._request_types = {x: _RequestType(x, y, self._param_types)
for x, y in request_data['requests'].items()}
def request(self, request_name, params={}):
self._request_types[request_name].send(params)
class _RequestType:
def __init__(self, name, data, param_types):
self.name = name
self.endpoint = data['endpoint']
self.params = [param_types[x] for x in data['params']]
self.response_format = data['response-format']
if self.response_format == 'result-set':
self.outputs = data['returns']
self.url_param = data.get('url-param')
def send(self, params):
params_composed = self._compose_params(params)
url = 'http://stats.nba.com/{0}'.format(self.endpoint)
if self.url_param is not None:
url = url.format(params_composed[self.url_param])
response = requests.get(url, params=params_composed)
print("URL: {0}".format(url))
print("Params: {0}".format(params_composed))
#if self.response_format == 'result-set':
# return self._label_result_sets(response['resultSets'])
#else:
# return response
def _compose_params(self, param_values_provided):
params_composed = {}
for param in self.params:
if param.name in param_values_provided:
value_provided = param_values_provided[param.name]
params_composed[param.name] = \
param.format_value(value_provided)
else:
if param.has_default:
params_composed[param.name] = param.default_formatted
else:
raise ValueError("Request {0} is missing parameter {1}."
.format(self.name, param.name))
return params_composed
def _label_result_sets(self, result_set_list):
results = {}
for output, index in zip(self.outputs, range(len(self.outputs))):
headers = result_set_list[index]['headers']
values = result_set_list[index]['rowSet']
results[output] = [dict(zip(headers, x)) for x in values]
return results
class _ParamType(metaclass=ABCMeta):
def __init__(self, name, data):
self.name = name
self.description = data.get('description', '')
self.default_string = data.get('default')
self.has_default = (self.default_string is not None)
if self.has_default:
if self.default_string:
self.default_value = self._parse(self.default_string)
self.default_formatted = self.format_value(self.default_value)
else:
self.default_value = None
self.default_formatted = ""
@abstractmethod
def _parse(self, text):
"""Parse argument value from string"""
@abstractmethod
def format_value(self, value):
"""Format argument value into string to be used in HTTP request"""
class _IntParamType(_ParamType):
def _parse(self, text):
return int(text)
def format_value(self, value):
return value
class _SeasonParamType(_IntParamType):
def format_value(self, value):
if value < 1000 or value > 9999:
raise ValueError("Seasons should be four digit integers")
next_year_two_digits = str(int(value) % 100 + 1)[-2:].zfill(2)
return '{0}-{1}'.format(value, next_year_two_digits)
class _SeasonIDParamType(_IntParamType):
def format_value(self, value):
if value < 1000 or value > 9999:
raise ValueError("Seasons should be four digit integers")
return '2{0}'.format(value)
class _BooleanParamType(_ParamType):
def _parse(self, text):
return {'True': True, 'False': False}[text]
class _BooleanYNParamType(_BooleanParamType):
def format_value(self, value):
return 'y' if value else 'n'
class _Boolean01ParamType(_BooleanParamType):
def format_value(self, value):
return '1' if value else '0'
class _EnumParamType(_ParamType):
def __init__(self, name, data):
self.options = data['options']
super().__init__(name, data)
def _parse(self, text):
return text
def format_value(self, value):
if value not in self.options:
raise ValueError(("Unrecognized value '{0}' for option "
"'{1}'. Options are [{2}]")
.format(value, self.name, self.options))
return value
class _MappedEnumParamType(_EnumParamType):
def format_value(self, value):
if value not in self.options:
raise ValueError(("Unrecognized value '{0}' for option "
"'{1}'. Options are [{2}]")
.format(value, self.name,
list(self.options.keys())))
return self.options[value]
class _DateParamType(_ParamType):
def _parse(self, text):
if not text:
return None
else:
raise NotImplementedError
def format_value(self, value):
return str(value)
_PARAM_TYPE_NAME_MAP = {
'int': _IntParamType,
'int-season': _SeasonParamType,
'int-season-id': _SeasonIDParamType,
'boolean-yn': _BooleanYNParamType,
'boolean-01': _Boolean01ParamType,
'enum': _EnumParamType,
'enum-mapped': _MappedEnumParamType,
'date': _DateParamType,
}
def _construct_param_type_from_json(name, data):
return _PARAM_TYPE_NAME_MAP[data['type']](name, data)
|
Python
| 0.000005
|
@@ -617,32 +617,39 @@
ms=%7B%7D):%0A
+return
self._request_ty
@@ -1404,109 +1404,8 @@
-print(%22URL: %7B0%7D%22.format(url))%0A print(%22Params: %7B0%7D%22.format(params_composed))%0A %0A #
if s
@@ -1441,33 +1441,32 @@
t-set':%0A
-#
return self.
@@ -1492,16 +1492,23 @@
response
+.json()
%5B'result
@@ -1523,17 +1523,16 @@
-#
else:%0A
@@ -1541,9 +1541,8 @@
-#
@@ -1556,16 +1556,23 @@
response
+.json()
%0A%0A de
|
0d01756c1db9a6c19d263edadeda775adf5291af
|
Add unit tests on the sphinxext indent function
|
oslo_policy/tests/test_sphinxext.py
|
oslo_policy/tests/test_sphinxext.py
|
# Copyright 2017 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import textwrap
from oslotest import base
from oslo_policy import policy
from oslo_policy import sphinxext
class FormatPolicyTest(base.BaseTestCase):
def test_minimal(self):
results = '\n'.join(list(sphinxext._format_policy_section(
'foo', [policy.RuleDefault('rule_a', '@')])))
self.assertEqual(textwrap.dedent("""
foo
===
``rule_a``
:Default: ``@``
(no description provided)
""").lstrip(), results)
def test_with_description(self):
results = '\n'.join(list(sphinxext._format_policy_section(
'foo', [policy.RuleDefault('rule_a', '@', 'My sample rule')]
)))
self.assertEqual(textwrap.dedent("""
foo
===
``rule_a``
:Default: ``@``
My sample rule
""").lstrip(), results)
def test_with_operations(self):
results = '\n'.join(list(sphinxext._format_policy_section(
'foo', [policy.DocumentedRuleDefault(
'rule_a', '@', 'My sample rule', [
{'method': 'GET', 'path': '/foo'},
{'method': 'POST', 'path': '/some'}])]
)))
self.assertEqual(textwrap.dedent("""
foo
===
``rule_a``
:Default: ``@``
:Operations:
- **GET** ``/foo``
- **POST** ``/some``
My sample rule
""").lstrip(), results)
def test_with_scope_types(self):
operations = [
{'method': 'GET', 'path': '/foo'},
{'method': 'POST', 'path': '/some'}
]
scope_types = ['bar']
rule = policy.DocumentedRuleDefault(
'rule_a', '@', 'My sample rule', operations,
scope_types=scope_types
)
results = '\n'.join(list(sphinxext._format_policy_section(
'foo', [rule]
)))
self.assertEqual(textwrap.dedent("""
foo
===
``rule_a``
:Default: ``@``
:Operations:
- **GET** ``/foo``
- **POST** ``/some``
:Scope Types:
- **bar**
My sample rule
""").lstrip(), results)
|
Python
| 0.000029
|
@@ -683,16 +683,990 @@
nxext%0A%0A%0A
+class IndentTest(base.BaseTestCase):%0A%0A def test_indent(self):%0A result = sphinxext._indent(%22foo%5Cnbar%22)%0A self.assertEqual(%22 foo%5Cn bar%22, result)%0A%0A result = sphinxext._indent(%22%22)%0A self.assertEqual(%22%22, result)%0A%0A result = sphinxext._indent(%22%5Cn%22)%0A self.assertEqual(%22%5Cn%22, result)%0A%0A result = sphinxext._indent(%22test%5Cntesting%5Cn%5Cnafter blank%22)%0A self.assertEqual(%22 test%5Cn testing%5Cn%5Cn after blank%22, result)%0A%0A result = sphinxext._indent(%22%5Ctfoo%5Cnbar%22)%0A self.assertEqual(%22 %5Ctfoo%5Cn bar%22, result)%0A%0A result = sphinxext._indent(%22 foo%5Cnbar%22)%0A self.assertEqual(%22 foo%5Cn bar%22, result)%0A%0A result = sphinxext._indent(%22foo%5Cn bar%22)%0A self.assertEqual(%22 foo%5Cn bar%22, result)%0A%0A result = sphinxext._indent(%22foo%5Cn%5Cn bar%22)%0A self.assertEqual(%22 foo%5Cn%5Cn bar%22, result)%0A%0A self.assertRaises(AttributeError, sphinxext._indent, None)%0A%0A%0A
class Fo
|
f127f0e9bb0b8778feafbdbc1fa68e79a923d639
|
Update product listing test to use product ids rather than index
|
whats_fresh/whats_fresh_api/tests/views/entry/test_list_products.py
|
whats_fresh/whats_fresh_api/tests/views/entry/test_list_products.py
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListProductTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-products')
self.assertEqual(url, '/entry/products')
def test_list_items(self):
"""
Tests to see if the list of products contains the proper productss and
proper product data
"""
response = self.client.get(reverse('entry-list-products'))
items = response.context['item_list']
for product in Product.objects.all():
self.assertEqual(
items[product.id-1]['description'], product.description)
self.assertEqual(
items[product.id-1]['name'], product.name)
self.assertEqual(
items[product.id-1]['link'],
reverse('edit-product', kwargs={'id': product.id}))
self.assertEqual(
items[product.id-1]['modified'],
product.modified.strftime("%I:%M %P, %d %b %Y"))
self.assertEqual(
sort(items[product.id-1]['preparations']),
sort([prep.name for prep in product.preparations.all()]))
|
Python
| 0
|
@@ -478,17 +478,16 @@
products
-s
and%0A
@@ -633,24 +633,190 @@
tem_list'%5D%0A%0A
+ product_dict = %7B%7D%0A%0A for product in items:%0A product_id = product%5B'link'%5D.split('/')%5B-1%5D%0A product_dict%5Bstr(product_id)%5D = product%0A%0A
for
@@ -887,38 +887,49 @@
-items%5B
+product_dict%5Bstr(
product.id-1%5D%5B'd
@@ -922,18 +922,17 @@
oduct.id
--1
+)
%5D%5B'descr
@@ -940,16 +940,32 @@
ption'%5D,
+%0A
product
@@ -1016,38 +1016,49 @@
-items%5B
+product_dict%5Bstr(
product.id-1%5D%5B'n
@@ -1051,18 +1051,17 @@
oduct.id
--1
+)
%5D%5B'name'
@@ -1115,38 +1115,49 @@
-items%5B
+product_dict%5Bstr(
product.id-1%5D%5B'l
@@ -1150,18 +1150,17 @@
oduct.id
--1
+)
%5D%5B'link'
@@ -1276,22 +1276,33 @@
-items%5B
+product_dict%5Bstr(
product.
@@ -1303,18 +1303,17 @@
oduct.id
--1
+)
%5D%5B'modif
@@ -1439,14 +1439,25 @@
ort(
-items%5B
+product_dict%5Bstr(
prod
@@ -1462,18 +1462,17 @@
oduct.id
--1
+)
%5D%5B'prepa
|
3229cd37fab3d453798b7440f5519b0be17a1345
|
"mode" option
|
pyspeedtest.py
|
pyspeedtest.py
|
#!/usr/bin/python
'''
TODO:
- improve upload() test to match speedtest.net flash app results (dns cache, keep-alive?)
- choose server based on latency (http://www.speedtest.net/speedtest-servers.php / http://SERVER/speedtest/latency.txt)
'''
import urllib, urllib2
import getopt, sys
from time import time
from random import random
from threading import Thread, currentThread
###############
HOST = 'http://speedtest-po.vodafone.pt'
RUNS = 2
###############
VERBOSE = 0
DOWNLOAD_FILES = [
('/speedtest/random350x350.jpg',245388),
('/speedtest/random500x500.jpg',505544),
('/speedtest/random1500x1500.jpg',4468241),
]
UPLOAD_FILES = [
132884,
493638
]
def printv(msg):
if VERBOSE : print msg
def download():
total_start_time = time()
total_downloaded = 0
for (current_file, current_file_size) in DOWNLOAD_FILES:
threads = []
for run in range(RUNS):
total_downloaded += current_file_size
thread = Thread(target=urllib.urlretrieve, args = (HOST + current_file + '?x=' + str(int(time() * 1000)), '/dev/null'))
thread.run_number = run
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
printv('Run %d for %s finished' % (thread.run_number, current_file))
total_ms = (time() - total_start_time) * 1000
printv('Took %d ms to download %d bytes' % (total_ms, total_downloaded))
return (total_downloaded * 8000 / total_ms)
def uploadthread(req):
response = urllib2.urlopen(req)
reply = response.read()
self_thread = currentThread()
self_thread.uploaded = int(reply.split('=')[1])
def upload():
url = HOST + '/speedtest/upload.php?x=' + str(random())
total_start_time = time()
total_uploaded = 0
for current_file_size in UPLOAD_FILES:
values = {'content0' : open('/dev/random').read(current_file_size) }
data = urllib.urlencode(values)
req = urllib2.Request(url, data)
threads = []
for run in range(RUNS):
thread = Thread(target = uploadthread, kwargs = { 'req': req })
thread.run_number = run
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
printv('Run %d for %d bytes finished' % (thread.run_number, thread.uploaded))
total_uploaded += thread.uploaded
total_ms = (time() - total_start_time) * 1000
printv('Took %d ms to upload %d bytes' % (total_ms, total_uploaded))
return (total_uploaded * 8000 / total_ms)
def usage():
print '''
usage: pyspeedtest.py [-h] [-v] [-r N] [-m N]
Test your bandwidth speed using Speedtest.net servers.
optional arguments:
-h, --help show this help message and exit
-v enabled verbose mode
-r N, --runs=N use N runs (default is 2).
-m N, --mode=N test mode: 1 - download only, 2 - upload only, 3 - both (default)
'''
def main():
global VERBOSE, RUNS
mode = 3
try:
opts, args = getopt.getopt(sys.argv[1:], "hr:vm:", ["help", "runs=","mode="])
except getopt.GetoptError, err:
print str(err)
usage()
sys.exit(2)
for o, a in opts:
if o == "-v":
VERBOSE = 1
elif o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-r", "--runs"):
RUNS = a
elif o in ("-m", "--mode"):
mode = a
if mode & 1 == 1:
print 'Download speed: ' + pretty_speed(download())
if mode & 2 == 2:
print 'Upload speed: ' + pretty_speed(upload())
def pretty_speed(speed):
units = [ 'bps', 'Kbps', 'Mbps', 'Gbps' ]
unit = 0
while speed >= 1024:
speed /= 1024
unit += 1
return '%0.2f %s' % (speed, units[unit])
if __name__ == '__main__':
main()
|
Python
| 0.999349
|
@@ -3110,16 +3110,95 @@
%0A%09%09%09
-mode = a
+try:%0A%09%09%09%09mode = int(a)%0A%09%09%09except ValueError:%0A%09%09%09%09print 'Bad mode value'%0A%09%09%09%09sys.exit(2)
%0A%09if
|
266ee5a6798c1fe09e9c6b36ee5831a1060b4624
|
Fix lust.py
|
python/lust.py
|
python/lust.py
|
#!/usr/bin/env python
class LustObject(object):
# executes the command
def handle(self, arguments): pass
def print_help(self): pass
class FactorialCommand(LustObject):
def handle(self, arguments):
try: argument = int(arguments[0])
except (ValueError, IndexError):
print("fact: could not read integer argument.")
return
if argument < 0:
print("fact: argument has to be non-negative!")
return
print(self.__calculate_factorial(argument))
def print_help(self):
print(" fact <integer>")
print(" Calculates the factorial of <integer>.")
def __calculate_factorial(self, argument):
# Hmmm...
result = 0
for i in range(1, argument+1):
result *= i
return result
class QuitCommand(LustObject):
def handle(self, arguments = None):
print("Bye!")
exit()
def print_help(self):
print(" quit")
print(" Quits.")
class HelpCommand(LustObject):
def __init__(self, commands):
self.commands = commands
def handle(self, arguments = None):
print("List of all commands")
print("--------------------")
for command in sorted(self.commands):
self.commands[command].print_help()
def print_help(self):
print(" help")
print(" Prints help for all commands.")
print("Hello! Welcome to the LARICS Universal Shell Terminal (LUST)!")
print("Enter 'help' for a list of commands. Press Ctrl-D or enter 'quit' to quit.")
# dictionary for storing all commands
commands = { }
commands["fact"] = FactorialCommand()
commands["quit"] = QuitCommand()
# help command needs a reference to the parent dictionary in order to call each
# command's print_help() function
commands["help"] = HelpCommand(commands)
# input from Python 3 is raw_input in Python 2
try: input = raw_input
except NameError: pass
while True:
# read current line and try to extract command name
try:
cmd_line = input(">> ")
except (EOFError):
break
arguments = cmd_line.split()
try: cmd_name = arguments[0].lower()
except IndexError: continue
# look up the appropriate command in commands dictionary
if cmd_name not in commands:
print("lust: no such command '{}'.".format(cmd_name))
continue
else:
# command found, pass its handler the rest of the read arguments
commands[cmd_name].handle(arguments[1:])
print
commands["quit"].handle()
|
Python
| 0.000004
|
@@ -662,17 +662,17 @@
esult =
-0
+1
%0A for
|
3312bd93f42aad077bb3c580c40e918bbff73f66
|
fix issue#160
|
pywren/wait.py
|
pywren/wait.py
|
from __future__ import absolute_import
import time
from multiprocessing.pool import ThreadPool
from pywren.future import JobState
import pywren.storage as storage
import pywren.wrenconfig as wrenconfig
ALL_COMPLETED = 1
ANY_COMPLETED = 2
ALWAYS = 3
def wait(fs, return_when=ALL_COMPLETED, THREADPOOL_SIZE=64,
WAIT_DUR_SEC=5):
"""
this will eventually provide an optimization for checking if a large
number of futures have completed without too much network traffic
by exploiting the callset
From python docs:
Wait for the Future instances (possibly created by different Executor
instances) given by fs to complete. Returns a named 2-tuple of
sets. The first set, named "done", contains the futures that completed
(finished or were cancelled) before the wait completed. The second
set, named "not_done", contains uncompleted futures.
http://pythonhosted.org/futures/#concurrent.futures.wait
"""
N = len(fs)
if return_when == ALL_COMPLETED:
result_count = 0
while result_count < N:
fs_dones, fs_notdones = _wait(fs, THREADPOOL_SIZE)
result_count = len(fs_dones)
if result_count == N:
return fs_dones, fs_notdones
else:
time.sleep(WAIT_DUR_SEC)
elif return_when == ANY_COMPLETED:
while True:
fs_dones, fs_notdones = _wait(fs, THREADPOOL_SIZE)
if len(fs_dones) != 0:
return fs_dones, fs_notdones
else:
time.sleep(WAIT_DUR_SEC)
elif return_when == ALWAYS:
return _wait(fs, THREADPOOL_SIZE)
else:
raise ValueError()
def _wait(fs, THREADPOOL_SIZE):
"""
internal function that performs the majority of the WAIT task
work.
"""
# get all the futures that are not yet done
not_done_futures = [f for f in fs if f._state not in [JobState.success,
JobState.error]]
if len(not_done_futures) == 0:
return fs, []
# check if the not-done ones have the same callset_id
present_callsets = set([f.callset_id for f in not_done_futures])
if len(present_callsets) > 1:
raise NotImplementedError()
# get the list of all objects in this callset
callset_id = present_callsets.pop() # FIXME assume only one
storage_config = wrenconfig.extract_storage_config(wrenconfig.default())
storage_handler = storage.Storage(storage_config)
callids_done = storage_handler.get_callset_status(callset_id)
callids_done = set(callids_done)
fs_dones = []
fs_notdones = []
f_to_wait_on = []
for f in fs:
if f._state in [JobState.success, JobState.error]:
# done, don't need to do anything
fs_dones.append(f)
else:
if f.call_id in callids_done:
f_to_wait_on.append(f)
fs_dones.append(f)
else:
fs_notdones.append(f)
def test(f):
f.result(throw_except=False, storage_handler=storage_handler)
pool = ThreadPool(THREADPOOL_SIZE)
pool.map(test, f_to_wait_on)
pool.close()
pool.join()
return fs_dones, fs_notdones
|
Python
| 0
|
@@ -33,16 +33,30 @@
import%0A%0A
+import random%0A
import t
@@ -2529,111 +2529,1628 @@
ig)%0A
- callids_done = storage_handler.get_callset_status(callset_id)%0A%0A callids_done = set(callids_done)
+%0A # Signal the completion of tasks by probing status files.%0A # Because S3 does not provide list-after-write consistency (which might also apply%0A # to other storage backend). List() can be only used as an optimization%0A # but not a timely way to signal completion. Thus, our strategy is to:%0A # 1) do list()%0A # 2) use get() to signal N tasks that do not show up in 1)%0A # 3) repeat 2) if all N tasks completed, otherwise stop%0A # Note: a small N is probably preferred here.%0A%0A%0A callids_done = storage_handler.get_callset_status(callset_id)%0A callids_done = set(callids_done)%0A%0A num_samples = 4%0A still_not_done_futures = %5Bf for f in not_done_futures if (f.call_id not in callids_done)%5D%0A def fetch_status(f):%0A return storage_handler.get_callset_status(f.callset_id, f.call_id)%0A%0A pool = ThreadPool(num_samples)%0A # repeat util all futures are done%0A while still_not_done_futures:%0A fs_samples = random.sample(still_not_done_futures,%0A min(num_samples, len(still_not_done_futures)))%0A fs_statuses = pool.map(fetch_status, fs_samples)%0A%0A callids_found = %5Bfs_samples%5Bi%5D.call_id for i in range(len(fs_samples))%0A if (fs_statuses%5Bi%5D is not None)%5D%0A%0A # update done call_ids%0A callids_done.update(callids_found)%0A%0A # break if not all N tasks completed%0A if (len(fs_found) %3C len(fs_samples)):%0A break%0A # calculate new still_not_done_futures%0A still_not_done_futures = %5Bf for f in not_done_futures if (f.call_id not in callids_done)%5D%0A pool.close()%0A pool.join()%0A
%0A%0A
|
0c0613f8b5719e396d6a053201c32482f95bdb7f
|
simplified the dict merging. small fix
|
qface/utils.py
|
qface/utils.py
|
def merge(a, b, path=None):
"merges b into a"
# import pdb; pdb.set_trace()
path = path or []
for key in b:
if key in a:
if isinstance(a[key], dict) and isinstance(b[key], dict):
merge(a[key], b[key], path + [str(key)])
else:
a[key] = b[key]
else:
a[key] = b[key]
return a
|
Python
| 0.999987
|
@@ -13,19 +13,8 @@
a, b
-, path=None
):%0A
@@ -36,65 +36,42 @@
to a
-%22%0A # import pdb; pdb.set_trace()%0A path = path or %5B%5D
+ recursively if a and b are dicts%22
%0A
@@ -89,33 +89,8 @@
b:%0A
- if key in a:%0A
@@ -108,21 +108,25 @@
stance(a
-%5Bkey%5D
+.get(key)
, dict)
@@ -141,21 +141,25 @@
stance(b
-%5Bkey%5D
+.get(key)
, dict):
@@ -171,20 +171,16 @@
-
merge(a%5B
@@ -195,78 +195,9 @@
key%5D
-, path + %5Bstr(key)%5D)%0A else:%0A a%5Bkey%5D = b%5Bkey%5D
+)
%0A
|
12b34fc09baa5060495e25e57680d1f6170559c5
|
Enable estimation reports for FPBŻ
|
addons/bestja_configuration_fpbz/__openerp__.py
|
addons/bestja_configuration_fpbz/__openerp__.py
|
# -*- coding: utf-8 -*-
{
'name': "Bestja: FBŻ",
'summary': "Installation configuration for FPBŻ",
'description': "Installation configuration for Federacja Polskich Banków Żywności",
'author': "Laboratorium EE",
'website': "http://www.laboratorium.ee",
'version': '0.1',
'category': 'Specific Industry Applications',
'depends': [
'base',
'bestja_base',
'bestja_volunteer',
'bestja_volunteer_notes',
'bestja_account_deletion',
'bestja_organization',
'bestja_organization_hierarchy',
'bestja_project',
'bestja_project_hierarchy',
'bestja_stores',
'bestja_requests',
'bestja_detailed_reports',
'bestja_offers',
'bestja_offers_by_org',
'bestja_files',
'quizzes',
'bestja_organization_warehouse',
'bestja_age_verification',
'bestja_frontend_fpbz',
'bestja_page_fixtures_fpbz',
],
'data': [
'data.xml',
],
'application': True,
}
|
Python
| 0
|
@@ -708,24 +708,61 @@
d_reports',%0A
+ 'bestja_estimation_reports',%0A
'bes
|
a0b488490ad32f7251e0ec2a35607a78c3022695
|
FIx Entity.save signature in alias loader.
|
grano/service/aliases.py
|
grano/service/aliases.py
|
import logging
from unicodecsv import DictReader, DictWriter
from grano.core import db
from grano.model import Entity, Schema
log = logging.getLogger(__name__)
## Import commands
def import_aliases(path):
with open(path, 'r') as fh:
reader = DictReader(fh)
for row in reader:
data = {}
for k, v in row.items():
k = k.lower().strip()
data[k] = v
assert 'canonical' in data, 'No "canonical" column!'
assert 'alias' in data, 'No "alias" column!'
import_alias(data)
db.session.commit()
def import_alias(data):
# TODO: this actually deleted old entities, i.e. makes invalid
# entities - we should try and either re-direct them, or keep
# old entities whenever that makes sense.
canonical = Entity.by_name(data.get('canonical'))
if canonical is None:
schema = Schema.cached(Entity, 'base')
prop = {
'name': 'name',
'value': data.get('canonical'),
'active': True,
'schema': schema,
'source_url': data.get('source_url')
}
canonical = Entity.save([schema], [prop], [])
db.session.flush()
alias = Entity.by_name(data.get('alias'))
if alias is None:
Entity.PROPERTIES.save(canonical, 'name', {
'schema': Schema.cached(Entity, 'base'),
'value': data.get('alias'),
'active': False,
'source_url': data.get('source_url')
})
elif alias.id != canonical.id:
alias.merge_into(canonical)
if alias.id != canonical.id:
log.info("Mapped: %s -> %s", alias.id, canonical.id)
## Export commands
def export_aliases(path):
with open(path, 'w') as fh:
writer = DictWriter(fh, ['entity_id', 'alias', 'canonical'])
writer.writeheader()
for entity in Entity.all():
#print entity
export_entity(entity, writer)
def export_entity(entity, writer):
canonical = None
aliases = []
for prop in entity.properties.filter_by(name='name'):
aliases.append(prop.value)
if prop.active:
canonical = prop.value
for alias in aliases:
writer.writerow({
'entity_id': entity.id,
'alias': alias,
'canonical': canonical
})
|
Python
| 0
|
@@ -954,37 +954,8 @@
= %7B%0A
- 'name': 'name', %0A
@@ -1161,14 +1161,22 @@
a%5D,
-%5B
+%7B'name':
prop
-%5D
+%7D
, %5B%5D
|
4a170c7ea5fb67d15d04485973ce46b112b8a0ac
|
Make CDN regression handle missing files in rebaseline.
|
tools/cdn/regression.py
|
tools/cdn/regression.py
|
#!/usr/bin/env python
#
# regression.py - Checks for visual regressions in content loaded from
# the CDN by rendering before and after versions and comparing them.
#
# regression.py [-n num_assets] [-t type] url command
#
# regression.py is a utility for finding visual regressions in
# meshes. It can be used to check for regressions when making bug
# fixes to the model loading code, model rendering code, or different
# processed versions of models from the CDN (e.g. original
# vs. optimized).
#
# The basic approach is to grab a listing of content from the CDN (you
# pick how many models to evaluate) and two images of each. This is
# split into two phases so you don't have to be able to generate both
# versions at the same time (e.g. if you are updating the rendering
# code) and so you can create baseline images once and continue to
# compare against them.
#
# The command always takes two basic arguments, the url of the CDN to
# work with and a command. There are 4 commands:
#
# baseline: generate baseline images
#
# new: generate images based on new code (or by specifying a
# different -t type)
#
# compare: generate a report comparing the baseline and new images
#
# rebaseline: replace the current baseline images with the current
# new images, accepting any differences as fixes rather than
# errors
#
# There are also these additional options:
#
# -n num_assets - The number of asset listings to grab from the
# CDN. Defaults to 10.
#
# -t type - The type of mesh to get.
# original - the original uploaded mesh [default]
# optimized - version optimized for streaming
#
from list import grab_list
import sys, os.path, subprocess, shutil
import Image, ImageChops, ImageStat
def usage():
print 'regression.py [-n num_assets] [-t type] url command'
exit(-1)
def name_from_url(url):
name = url.replace('meerkat:///', '')
idx = max(name.find('original'), name.find('optimized'))
name = name[:idx-1]
name = name.replace('/', '_').replace('.', '_')
return name
def data_directory():
return 'regression'
def filename_from_url(url, klass):
return os.path.join(data_directory(), name_from_url(url) + '.' + klass + '.' + 'png')
def generate_images(items, klass):
if not os.path.exists(data_directory()): os.makedirs(data_directory())
for item in items:
sshot = filename_from_url(item, klass)
# FIXME requiring running from a specific directory sucks,
# currently required to be in root directory
# FIXME how to decide between debug/release meshview?
subprocess.Popen(['build/cmake/meshview_d', '--mesh=' + item, '--screenshot=' + sshot]).communicate()
def compute_diffs(items):
for item in items:
try:
sshot_baseline = filename_from_url(item, 'baseline')
sshot_new = filename_from_url(item, 'new')
sshot_diff = filename_from_url(item, 'diff')
im_base = Image.open(sshot_baseline)
im_new = Image.open(sshot_new)
# Note that this comparison isn't great, its RGB based but something like LAB would be preferable.
im_diff = ImageChops.difference(im_base, im_new)
diff_stats = ImageStat.Stat(im_diff)
print item, max(diff_stats.rms) # max of rgb RMSs
if max([channel[1] for channel in diff_stats.extrema]) > 0: # max of RGBs maxes
im_diff.save(sshot_diff)
except IOError:
print item, 'skipped due to missing files.'
def rebaseline(items):
# This is simple, just copy over the new images to baseline
for item in items:
sshot_baseline = filename_from_url(item, 'baseline')
sshot_new = filename_from_url(item, 'new')
shutil.copy(sshot_new, sshot_baseline)
def main():
if len(sys.argv) < 3:
usage()
url = sys.argv[-2]
command = sys.argv[-1]
args = sys.argv[:-2]
if command not in ['baseline', 'new', 'compare', 'rebaseline']:
usage()
num = 10
model_type = 'original'
x = 1
while x < len(args):
if args[x] == '-n':
x += 1
num = int(args[x])
elif args[x] == '-t':
x += 1
model_type = args[x]
else:
usage()
x += 1
items = grab_list(url, num, 'model', model_type, 'meerkat')
if command in ['baseline', 'new']:
generate_images(items, command)
elif command == 'compare':
compute_diffs(items)
elif command == 'rebaseline':
rebaseline(items)
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -2342,32 +2342,80 @@
item in items:%0A
+ print 'Generating screenshot for', item%0A
sshot =
@@ -3682,32 +3682,49 @@
item in items:%0A
+ try:%0A
sshot_ba
@@ -3768,32 +3768,36 @@
eline')%0A
+
sshot_new = file
@@ -3815,32 +3815,36 @@
rl(item, 'new')%0A
+
shutil.c
@@ -3873,16 +3873,57 @@
aseline)
+%0A except IOError:%0A pass
%0A%0Adef ma
|
7856a9f7e63c6a5146dc6976686e22420155f80c
|
if heroku debug=False
|
app.py
|
app.py
|
from flask import Flask, render_template, jsonify, request
from get_solution import Solution
app = Flask(__name__)
@app.route('/')
@app.route('/index')
def index():
return render_template('base.html', title="Codeforces Solution by Handle Name")
@app.route('/get_solution', methods=['POST', 'GET'])
def get_solution():
user = request.args.get('user')
contest = request.args.get('contest')
pid = request.args.get('problem')
_solution = Solution(user, contest, pid)
return _solution.compute()
LOCAL = True
if __name__ == '__main__':
app.run(debug=LOCAL)
|
Python
| 0.999999
|
@@ -85,16 +85,26 @@
Solution
+%0Aimport os
%0A%0Aapp =
@@ -515,22 +515,8 @@
()%0A%0A
-LOCAL = True%0A%0A
if _
@@ -542,26 +542,90 @@
_':%0A
- app.run(debug=LOCAL)
+%0A if os.environ.get('HEROKU') is None:%0A app.run(debug=True)%0A else:%0A app.run()%0A
|
aca7362a3056045f869375797cbaef58b76c6f3b
|
Return airlines as array instead of map
|
app.py
|
app.py
|
from flask import Flask, flash, request, redirect, url_for, render_template, jsonify
from werkzeug.utils import secure_filename
import sys
import os
import json
import logging
import urllib
import location_analyzer
import suggester
import db
import precomputed
app = Flask(__name__, static_url_path='/static')
model = db.model(os.environ['PSQL_URI'])
######################################################################
### Constants
ALLOWED_EXTENSIONS = set(['json'])
app.config['UPLOAD_FOLDER'] = 'upload/'
######################################################################
### Routes
@app.route("/static/<path:path>")
def get_static(path):
"""
Endpoint that serves static files
"""
return send_from_directory('static', path)
@app.route('/', methods=['GET'])
def home():
return render_template('upload.html', airports=precomputed.airports)
@app.route('/', methods=['POST'])
def upload_location():
raw_data = ''
# get location data
if len(request.form) != 0 and request.form['use_example'] == "1":
file = open('./data/example_locations.json', 'r')
raw_data = ''.join(file.readlines())
else:
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if not allowed_file(file.filename):
flash('Filetype not allowed')
return redirect(request.url)
if not file:
flash('Problems with uploaded file')
return redirect(request.url)
raw_data = ''.join(file.stream.readlines())
# analyze location data to get Points of Interests and their
# probabilities
pois, data_size = location_analyzer.analyze(raw_data)
for i, poi in enumerate(pois):
lat, lon = poi['position']['lat'], poi['position']['lng']
nearest = model.getNearestAirports(lat, lon)
if poi['label'] > 30:
for j in xrange(len(nearest)):
nearest[j]['is_home'] = True
nearest.sort(key=lambda x: x['distance'])
pois[i]['nearbyAirports'] = nearest
return render_template('map.html', pois=pois, data_size=data_size)
@app.route('/map', methods=['GET'])
def show_map_from_manual_entry():
home_iatas, other_iatas = get_airport_params(request)
pois = []
selected = []
pos = model.get_airport_locations(other_iatas)
for iata in other_iatas:
label, is_home = 5, False
if iata in home_iatas:
label, is_home = 33, True
pois.append({'position': pos[iata], 'label': label})
selected.append({'name': precomputed.iata2name[iata], 'iata': iata, 'is_home': is_home})
for i, poi in enumerate(pois):
lat, lon = poi['position']['lat'], poi['position']['lng']
nearest = model.getNearestAirports(lat, lon)
if poi['label'] > 30:
for j in xrange(len(nearest)):
nearest[j]['is_home'] = True
nearest.sort(key=lambda x: x['distance'])
pois[i]['nearbyAirports'] = nearest
return render_template('map.html', pois=pois, data_size=len(pois), selected=selected)
@app.route('/suggestions', methods=['GET'])
def show_suggestion():
home_iatas, other_iatas = get_airport_params(request)
return jsonify({'suggestions': suggester.get_suggestion(home_iatas, other_iatas)})
@app.route('/airlines', methods=['GET'])
def show_airlines():
home_iatas, other_iatas = get_airport_params(request)
coverage = model.getAirlinesCoveringAirports(home_iatas, other_iatas)
prices = model.getPricesCoveringAirports(home_iatas, other_iatas)
airlines = {}
for cov in coverage:
airlines[cov['iata']] = {'num_routes': cov['num_routes'], 'share': cov['p']}
for price in prices:
iata = price['airline_iata'].strip()
if 'name' not in airlines[iata]:
airlines[iata]['name'] = price['airline']
if 'routes' not in airlines[iata]:
airlines[iata]['routes'] = []
airlines[iata]['routes'].append(price)
return jsonify({'airlines': airlines})
@app.route('/airlines/<code>', methods=['GET'])
def show_airline_details(code):
reviews = model.get_airline_reviews(code)
details = model.get_airline_data(code)
return render_template('airline.html', code=code, reviews=reviews, details=details)
######################################################################
### Helpers
def get_airport_params(request):
home_iatas = request.args.get('home_iatas').split(',')
other_iatas = request.args.get('other_iatas').split(',')
home_iatas = map(lambda x: urllib.unquote(x).decode('utf8'), home_iatas)
other_iatas = map(lambda x: urllib.unquote(x).decode('utf8'), other_iatas)
# Home airports must be in the list
for h in home_iatas:
if h not in other_iatas:
other_iatas.append(h)
return home_iatas, other_iatas
def allowed_file(filename):
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
######################################################################
### Main
if __name__ == "__main__":
app.secret_key = 'It was the best of times, it was the worst of times. Turbo-the-tardigrade'
app.config['SESSION_TYPE'] = 'filesystem'
# This is needed to log errors to heroku
app.logger.addHandler(logging.StreamHandler(sys.stdout))
app.logger.setLevel(logging.ERROR)
app.run(debug=True)
|
Python
| 0.000058
|
@@ -3827,16 +3827,37 @@
cov%5B'p'%5D
+, 'iata': cov%5B'iata'%5D
%7D%0A%0A f
@@ -4191,16 +4191,25 @@
airlines
+.values()
%7D)%0A%0A@app
|
2dc3e7eb3e6e5b32347d24d5353f9a5f0f6915c2
|
Create app.py
|
app.py
|
app.py
|
#!/usr/bin/env python
import urllib
import json
import os
import time
from flask import Flask
from flask import request
from flask import make_response
from datetime import datetime
# Flask app should start in global layout
app = Flask(__name__)
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = makeWebhookResult(req)
res = json.dumps(res, indent=4)
print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def makeWebhookResult(req):
if req.get("result").get("action") != "time.get":
return {}
result = req.get("result")
parameters = result.get("parameters")
zone = parameters.get("sys.location")
// cost = {'Europe':100, 'North America':200, 'South America':300, 'Asia':400, 'Africa':500}
// speech = "The cost of shipping to " + zone + " is " + str(cost[zone]) + " euros."
localtime = time.localtime(time.time())
print "Local current time :", localtime
// print("Response:")
// print(speech)
return {
"speech": speech,
"displayText": speech,
#"data": {},
# "contextOut": [],
"source": "apiai-onlinestore-shipping"
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print "Starting app on port %d" % port
app.run(debug=True, port=port, host='0.0.0.0')
|
Python
| 0.000003
|
@@ -809,18 +809,16 @@
n%22)%0A%0A
-//
cost =
@@ -904,18 +904,16 @@
00%7D%0A%0A
-//
speech
@@ -1078,26 +1078,24 @@
caltime%0A%0A
-//
print(%22Resp
@@ -1109,10 +1109,8 @@
%0A
-//
pri
|
7cae85a5a0da8a49297eade0dd2ebdca0259e6ff
|
Support jpeg tiles.
|
app.py
|
app.py
|
#!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at: http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distrib-
# uted under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# specific language governing permissions and limitations under the License.
"""The main WSGI application, which dispatches to all the handlers.
URL paths take one of these two forms:
<root_path>[/<domain>]/<publication_label>
<root_path>[/<domain>]/.<handler_name>[/<args>]
We want the entire app to work under any root path, which is determined by
the 'root_path' Config setting. The domain part is optional; domains and
handler names are distinguishable because domains cannot begin with a dot.
The domain turns into a 'domain' argument to the handler; see BaseHandler.
"""
__author__ = 'kpy@google.com (Ka-Ping Yee)'
import re
import webapp2
import config
import utils
class RootPathRoute(webapp2.BaseRoute):
"""A Route that prepends a root path to its child routes."""
def __init__(self, routes):
webapp2.BaseRoute.__init__(self, None)
self.router = webapp2.Router(routes)
def match(self, request): # pylint: disable=g-bad-name
root_path = config.Get('root_path') or ''
if request.path.startswith(root_path):
return self.router.match(utils.Struct(
get=request.get, path=request.path[len(root_path):]))
class OptionalDomainRoute(webapp2.BaseRoute):
"""A Route that accepts an optional domain name in the path or query string.
The domain name can appear as the first component of the path, or as the
"domain" query parameter, or not at all, e.g.:
/.blah
/example.com/.blah
/.blah?domain=example.com
"""
DOMAIN_PREFIX_RE = re.compile(r'^/([a-z0-9.-]+\.[a-z]+)(/.*)')
def __init__(self, routes):
webapp2.BaseRoute.__init__(self, None)
self.router = webapp2.Router(routes)
def match(self, request): # pylint: disable=g-bad-name
domain = request.get('domain', None)
match = self.DOMAIN_PREFIX_RE.match(request.path)
if match:
domain = domain or match.group(1) # query param overrides domain in path
request = utils.Struct(get=request.get, path=match.group(2))
result = self.router.match(request)
if result and domain:
result[2]['domain'] = domain # add an extra 'domain' keyword argument
return result
def Route(template, handler):
"""Make a Route whose placeholders accept only allowable map IDs or labels."""
return webapp2.Route(template.replace('>', r':[\w-]+>'), handler)
app = webapp2.WSGIApplication([
Route('/', 'index.Index'),
RootPathRoute([
OptionalDomainRoute([
# User-facing request handlers
Route('', 'index.Index'),
Route('/', 'index.Index'),
Route('/<label>', 'maps.MapByLabel'),
Route('/.admin', 'admin.Admin'),
Route('/.admin/<map_id>', 'admin.AdminMap'),
Route('/.catalog', 'catalog.Catalog'),
Route('/.create', 'create.Create'),
Route('/.delete', 'delete.Delete'),
Route('/.login', 'login.Login'),
Route('/.maps', 'maps.MapList'),
Route('/.maps/<map_id>', 'maps.MapById'),
Route('/.prefs', 'prefs.Prefs'),
Route('/.publish', 'publish.Publish'),
Route('/.redirect/<label>', 'redirect.Redirect'),
Route('/.wms/tiles/<tileset_id>/<z>/<x>/<y>.png',
'wmscache.tilecache_main.Tiles'),
# XHR or JSONP request handlers
Route('/.api/maps', 'api.PublishedMaps'),
Route('/.api/maps/<map_id>', 'api.MapById'),
Route('/.diff/<map_id>', 'diff.Diff'),
Route('/.legend', 'legend_item_extractor.GetLegendItems'),
Route('/.jsonp', 'jsonp.Jsonp'),
Route('/.metadata', 'metadata.Metadata'),
Route('/.share/<map_id>', 'share.Share'),
Route('/.wms/configure',
'wmscache.tileset_config.ConfigureTileset'),
Route('/.wms/query', 'wmscache.wms_query.WmsQuery'),
# Tasks executed by cron or taskqueue
Route('/.metadata_fetch', 'metadata_fetch.MetadataFetch'),
Route('/.wms/cleanup', 'wmscache.tileworker.CleanupOldWorkers'),
Route('/.wms/tileworker', 'wmscache.tileworker.StartWorker'),
])
]),
])
|
Python
| 0.000001
|
@@ -3752,11 +3752,13 @@
%3Cy%3E.
-png
+%3Cfmt%3E
',%0A
|
cb5aa965254c6abe3a865747e823fbe90f894a2c
|
return addresses as 'items'
|
app.py
|
app.py
|
from flask import Flask, Response, url_for, request, json
import os
import urllib2
import datetime
app = Flask(__name__)
@app.route('/hello')
def hello():
return 'Hello world'
@app.route('/')
def index():
response_data = json.dumps({ 'closures_href': url_for('closures') })
response = Response(response_data, status=200, mimetype='application/json')
return response
@app.route('/closures')
def closures():
d = datetime.datetime.today().strftime('%Y-%m-%d')
scraperwiki_query = "https://api.scraperwiki.com/api/1.0/datastore/sqlite?format=jsondict&name=denver_streets_and_sidewalks&query=select%20*%20from%20%60swdata%60%20where%20start_date%20%3C%20date('"+ d +"')%20and%20end_date%20%3E%20date('" + d + "')"
scraperwiki_response = urllib2.urlopen(scraperwiki_query).read()
response = Response(scraperwiki_response, status=200, mimetype='application/json')
#return url_for('closure_id', closure_id=1)
return response
@app.route('/closures/<int:closure_id>')
def closure_id():
return ""
# find closure with closure_id
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
Python
| 0.999977
|
@@ -744,16 +744,38 @@
ponse =
+%7B 'items': json.loads(
urllib2.
@@ -807,16 +807,19 @@
).read()
+) %7D
%0A respo
@@ -833,16 +833,27 @@
esponse(
+json.dumps(
scraperw
@@ -864,16 +864,17 @@
response
+)
, status
@@ -912,54 +912,8 @@
n')%0A
- #return url_for('closure_id', closure_id=1)%0A
re
|
1f0261486710cebc1d7f279b5cada22b9a92c30f
|
Correct URL expressions
|
app.py
|
app.py
|
from flask import Flask, render_template, request, url_for, redirect, abort
import flask_bootstrap
import db
import os
import rpi
import psycopg2.extras
import json
app = Flask(__name__)
flask_bootstrap.Bootstrap(app)
# This method is used by typeahead.js
@app.route("/api/v1/player")
def api_list_players():
conn = db.database_connection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute("SELECT name FROM player")
names = [rec['name'] for rec in cur.fetchall()]
return json.dumps(names)
@app.route("/about/")
def show_about():
githubLink = "https://github.com/JamesLaverack/scoreboard"
return render_template('about.html', githubLink=githubLink)
@app.route("/")
def show_index():
conn = db.database_connection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute("SELECT name FROM (SELECT game.name, count(score.id) AS num_games FROM game LEFT JOIN score ON score.game_id = game.id GROUP BY game.name) AS games WHERE num_games > 3 ORDER BY num_games")
popularGames = [x['name'] for x in cur.fetchall()]
cur.execute("SELECT game.name AS game_name, winner.name AS winner_name, loser.name AS loser_name FROM score JOIN game ON score.game_id = game.id JOIN player winner ON winner.id = score.winner_id JOIN player loser ON loser.id = score.loser_id ORDER BY score.happened DESC LIMIT 5")
recentScores = cur.fetchall()
return render_template('index.html',
popularGames=popularGames,
recentScores=recentScores)
@app.route("/player/")
def show_players():
cur = db.database_connection().cursor()
cur.execute("SELECT name FROM player")
players = [x[0] for x in cur.fetchall()]
return render_template('players.html', players=players)
@app.route("/player/<name>")
def show_player(name):
if not player_exists(name):
abort(404)
conn = db.database_connection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute("SELECT game.name AS game_name, winner.name AS winner_name, loser.name AS loser_name FROM score JOIN game ON score.game_id = game.id JOIN player winner ON winner.id = score.winner_id JOIN player loser ON loser.id = score.loser_id WHERE winner.name = %s OR loser.name = %s ORDER BY score.happened DESC LIMIT 5", [name, name])
recentScores = cur.fetchall()
return render_template('player.html',
name=name,
recentScores=recentScores)
@app.route("/game/<gamename>")
def show_game(gamename):
conn = db.database_connection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute("SELECT id FROM game WHERE name = %s", [gamename])
id = cur.fetchone()
if id is None:
abort(404)
id = id['id']
cur.execute("SELECT winner.name AS winner_name, loser.name AS loser_name FROM score JOIN player winner ON winner.id = score.winner_id JOIN player loser ON loser.id = score.loser_id WHERE score.game_id = %s ORDER BY score.happened DESC LIMIT 3", [id])
scores = cur.fetchall()
rankings = rpi.calculate_rpi(gamename)
leaderboard = rpi.generate_leaderboard(rankings)
return render_template('game.html',
gamename=gamename,
scores=scores,
leaderboard=leaderboard)
@app.route("/game/")
def show_games():
cur = db.database_connection().cursor()
cur.execute("SELECT name FROM game")
return render_template('games.html', games=cur.fetchall())
@app.route("/game/", methods=['POST'])
def add_game():
conn = db.database_connection()
cur = conn.cursor()
cur.execute("INSERT INTO game (name) VALUES (%s)",
[request.form['gameName']])
conn.commit()
return redirect(url_for('show_game', gamename=request.form['gameName']))
@app.route("/game/<gamename>/submit")
def show_submit_score(gamename):
if not game_exists(gamename):
abort(404)
return render_template('submit.html', gamename=gamename)
def get_or_create_id_for_player(playerName):
conn = db.database_connection()
cur = conn.cursor()
cur.execute("SELECT id FROM player WHERE name = %s",
[playerName])
playerId = cur.fetchone()
# If it doesn't exist, make it
if playerId is None:
cur.execute("INSERT INTO player (name) VALUES (%s)",
[playerName])
conn.commit()
cur.execute("SELECT id FROM player WHERE name = %s", [playerName])
playerId = cur.fetchone()
return playerId
@app.route("/game/<gamename>/submit", methods=['POST'])
def submit_score(gamename):
if not game_exists(gamename):
abort(404)
winnerId = get_or_create_id_for_player(request.form['winnerName'])
loserId = get_or_create_id_for_player(request.form['loserName'])
conn = db.database_connection()
cur = conn.cursor()
cur.execute("""INSERT INTO score (winner_id, loser_id, game_id)
VALUES (%s, %s, (SELECT id FROM game WHERE name = %s))""",
(winnerId, loserId, gamename))
conn.commit()
return redirect(url_for('show_game', gamename=gamename))
def game_exists(gamename):
cur = db.database_connection().cursor()
cur.execute("SELECT id FROM game WHERE name = %s", [gamename])
return cur.fetchone() is not None
def player_exists(name):
cur = db.database_connection().cursor()
cur.execute("SELECT id FROM player WHERE name = %s", [name])
return cur.fetchone() is not None
@app.route("/game/<gamename>/leaderboard")
def show_leaderboard(gamename):
if not game_exists(gamename):
abort(404)
scoreThreashold = int(os.environ["LEADERBOARD_SCORE_THREASHOLD"])
leaderboard = rpi.generate_leaderboard(rpi.calculate_rpi(gamename))
print("Final leaderboard: %s" % leaderboard)
return render_template('leaderboard.html',
gamename=gamename,
leaderboard=leaderboard,
scoreThreashold=scoreThreashold)
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html')
if __name__ == "__main__":
app.run(debug=True)
|
Python
| 0
|
@@ -2558,27 +2558,28 @@
e/%3Cgamename%3E
+/
%22)%0A
-
def show_gam
@@ -3942,16 +3942,17 @@
%3E/submit
+/
%22)%0Adef s
@@ -4664,16 +4664,17 @@
%3E/submit
+/
%22, metho
@@ -5581,16 +5581,16 @@
None%0A%0A%0A
-
@app.rou
@@ -5621,16 +5621,17 @@
derboard
+/
%22)%0Adef s
|
86fc8afe0a8eed0f9f61372c19f9655abfb8c556
|
fix for not caching when mergeable_state==unknown
|
app.py
|
app.py
|
#!/usr/bin/env python
import json
import os
import hashlib
import requests
from werkzeug.contrib.cache import MemcachedCache
from flask import Flask, request, make_response, jsonify, send_file
from flask.views import MethodView
MEMCACHE_URL = os.environ.get('MEMCACHE_URL', '127.0.0.1:11211').split(',')
DEBUG = os.environ.get('DEBUG', False) in ('true', '1', 'y', 'yes')
GITHUB_OAUTH_TOKEN = os.environ.get('GITHUB_OAUTH_TOKEN')
APP_LOCATION = 'app'
if os.path.isdir('./dist') and os.listdir('./dist'):
print "Note: Serving files from ./dist"
APP_LOCATION = 'dist'
app = Flask(
__name__,
static_folder=os.path.join(APP_LOCATION, 'static')
)
cache = MemcachedCache(MEMCACHE_URL)
class ProxyView(MethodView):
# when we serve straight from memcache
short_expires = 60 * 5 * (1 + 3 * int(DEBUG))
# store long term
long_expires = 60 * 60 * 24
def _attach_auth(self, headers):
if GITHUB_OAUTH_TOKEN:
headers['Authorization'] = 'token %s' % GITHUB_OAUTH_TOKEN
def get(self, path):
if '://' in path:
assert path.startswith(self.base)
path = path.replace(self.base, '')
path = '%s?%s' % (path, request.query_string)
key = self.prefix + hashlib.md5(path).hexdigest()
short_key = 'short-' + key
long_key = 'long-' + key
short_value, long_value = cache.get_many(*[short_key, long_key])
if short_value:
value = json.loads(short_value)
elif long_value:
value = json.loads(long_value)
if value.get('_etag'):
headers = {'If-None-Match': value['_etag']}
self._attach_auth(headers)
# print path
# print headers
print "CONDITIONAL GET", self.base + path
response = requests.get(self.base + path, headers=headers)
if response.status_code == 304:
# it's still fresh!
cache.set(
short_key,
json.dumps(value),
self.short_expires
)
value['_ratelimit_limit'] = (
response.headers.get('X-RateLimit-Limit')
)
value['_ratelimit_remaining'] = (
response.headers.get('X-RateLimit-Remaining')
)
else:
value = None
else:
value = None
else:
value = None
if not value:
print "GET", self.base + path
headers = {}
self._attach_auth(headers)
response = requests.get(self.base + path, headers=headers)
assert response.status_code == 200, response.status_code
value = response.json()
if not isinstance(value, dict):
# if the JSON response is a list or something we can't
# attach extra stuff to it
value = {'_data': value}
cache.set(short_key, json.dumps(value), self.short_expires)
# we only need these for the long-storage stuff
value['_etag'] = response.headers.get('ETag')
# often when pulling down a pull request, the state of
# whether the pull request is mergeable takes a while to figure
# out so we don't want to cache that.
if value.get('mergeable_state') != 'unknown':
cache.set(long_key, json.dumps(value), self.long_expires)
# these values aren't worth storing in the cache but
# useful to return as part of the response
value['_ratelimit_limit'] = (
response.headers.get('X-RateLimit-Limit')
)
value['_ratelimit_remaining'] = (
response.headers.get('X-RateLimit-Remaining')
)
return make_response(jsonify(value))
class GithubProxyView(ProxyView):
prefix = 'github'
base = 'https://api.github.com/'
class BugzillaProxyView(ProxyView):
prefix = 'bugzilla'
base = 'https://bugzilla.mozilla.org/rest/'
class Webhook(MethodView):
def post(self):
# print "Incoming webhook"
payload = json.loads(request.form['payload'])
# from pprint import pprint
# pprint(payload)
paths = []
if payload.get('action') == 'opened' and payload.get('repository'):
repo_full_name = payload['repository']['full_name']
# print "FULL_NAME", repr(repo_full_name)
paths.append('repos/%s/pulls?state=open' % repo_full_name)
elif payload.get('action') == 'synchronize' and payload.get('pull_request'):
# repo_full_name = payload['repository']['full_name']
commits_url = payload.get('pull_request').get('commits_url')
path = commits_url.replace(GithubProxyView.base, '')
paths.append(path)
paths.append(path + '?')
comments_url = payload.get('pull_request').get('comments_url')
path = comments_url.replace(GithubProxyView.base, '')
paths.append(path)
paths.append(path + '?')
if payload.get('pull_request', {}).get('statuses_url'):
statuses_url = payload['pull_request']['statuses_url']
path = statuses_url.replace(GithubProxyView.base, '')
paths.append(path)
paths.append(path + '?')
for path in paths:
cache_key = self._path_to_cache_key(path)
# print "CACHE_KEY", cache_key
if cache.get(cache_key):
print "\tDELETED", cache_key, 'FOR', path
cache.delete(cache_key)
if not paths:
return make_response("No action\n")
return make_response('OK\n')
def _path_to_cache_key(self, path):
return 'short-' + GithubProxyView.prefix + hashlib.md5(path).hexdigest()
app.add_url_rule(
'/webhook',
view_func=Webhook.as_view('webhook')
)
@app.route('/')
def index_html():
return catch_all('index.html')
@app.route('/<path:path>')
def catch_all(path):
if path == 'favicon.ico':
path = 'static/favicon.ico'
path = path or 'index.html'
path = os.path.join(APP_LOCATION, path)
# print "PATH", path
if not (os.path.isdir(path) or os.path.isfile(path)):
path = os.path.join(APP_LOCATION, 'index.html')
return send_file(path)
app.add_url_rule(
'/githubproxy/<path:path>',
view_func=GithubProxyView.as_view('githubproxy')
)
app.add_url_rule(
'/bugzillaproxy/<path:path>',
view_func=BugzillaProxyView.as_view('bugzillaproxy')
)
if __name__ == '__main__':
app.debug = DEBUG
port = int(os.environ.get('PORT', 5000))
host = os.environ.get('HOST', '0.0.0.0')
app.run(host=host, port=port)
|
Python
| 0.000001
|
@@ -3077,16 +3077,271 @@
value%7D%0A
+ # often when pulling down a pull request, the state of%0A # whether the pull request is mergeable takes a while to figure%0A # out so we don't want to cache that.%0A if value.get('mergeable_state') != 'unknown':%0A
@@ -3542,186 +3542,77 @@
#
-often when pulling down a pull request, the state of%0A # whether the pull request is mergeable takes a while to figure%0A # out so we don't want to cache that.
+see comment about about possibly not caching based on mergeable_state
%0A
|
32cb09df8f8c62bdc2ba5331b46b217abed49705
|
Falling back to batch mode should be a WARNING not ERROR
|
rootpy/logger/roothandler.py
|
rootpy/logger/roothandler.py
|
import ctypes
import logging
import re
import sys
from . import root_logger, log
from .magic import DANGER, set_error_handler, re_execute_with_exception
class SHOWTRACE:
enabled = False
SANE_REGEX = re.compile("^[^\x80-\xFF]*$")
class Initialized:
value = False
ABORT_LEVEL = log.ERROR
def python_logging_error_handler(level, root_says_abort, location, msg):
"""
A python error handler for ROOT which maps ROOT's errors and warnings on
to python's.
"""
import rootpy.util.quickroot as QROOT
if not Initialized.value:
QROOT.kInfo, QROOT.kWarning, QROOT.kError, QROOT.kFatal, QROOT.kSysError
QROOT.kTRUE
QROOT.gErrorIgnoreLevel
Initialized.value = True
try:
QROOT.kTRUE
except RuntimeError:
# Note: If the above causes us problems, it's because this logging
# handler has been called multiple times already with an
# exception. In that case we need to force upstream to raise it.
_, exc, traceback = sys.exc_info()
caller = sys._getframe(2)
re_execute_with_exception(caller, exc, traceback)
if level < QROOT.gErrorIgnoreLevel:
# Needed to silence some "normal" startup warnings
# (copied from PyROOT Utility.cxx)
return
log = root_logger.getChild(location.replace("::", "."))
if level >= QROOT.kSysError or level >= QROOT.kFatal:
lvl = logging.CRITICAL
elif level >= QROOT.kError:
lvl = logging.ERROR
elif level >= QROOT.kWarning:
lvl = logging.WARNING
elif level >= QROOT.kInfo:
lvl = logging.INFO
else:
lvl = logging.DEBUG
if not SANE_REGEX.match(msg):
# Not ASCII characters. Escape them.
msg = repr(msg)[1:-1]
log.log(lvl, msg)
# String checks are used because we need a way of (un)forcing abort without
# modifying a global variable (gErrorAbortLevel) for the multithread tests
abort = lvl >= ABORT_LEVEL or "rootpy.ALWAYSABORT" in msg or root_says_abort
if abort and not "rootpy.NEVERABORT" in msg:
caller = sys._getframe(1)
try:
# We can't raise an exception from here because ctypes/PyROOT swallows it.
# Hence the need for dark magic, we re-raise it within a trace.
from rootpy import ROOTError
raise ROOTError(level, location, msg)
except RuntimeError:
_, exc, traceback = sys.exc_info()
if SHOWTRACE.enabled:
from traceback import print_stack
print_stack(caller)
if DANGER.enabled:
# Avert your eyes, dark magic be within...
re_execute_with_exception(caller, exc, traceback)
if root_says_abort:
log.CRITICAL("abort().. expect a stack trace")
ctypes.CDLL(None).abort()
|
Python
| 0.999601
|
@@ -293,16 +293,433 @@
.ERROR%0A%0A
+def fixup_msg(lvl, msg):%0A %0A # Fixup for this ERROR to a WARNING because it has a reasonable fallback.%0A # WARNING:ROOT.TGClient.TGClient%5D can't open display %22localhost:10.0%22, switching to batch mode...%0A # In case you run from a remote ssh session, reconnect with ssh -Y%0A if %22switching to batch mode...%22 in msg and lvl == logging.ERROR:%0A return logging.WARNING, msg%0A %0A return lvl, msg%0A%0A
def pyth
@@ -2197,16 +2197,117 @@
g)%5B1:-1%5D
+%0A %0A # Apply fixups to improve consistency of errors/warnings%0A lvl, msg = fixup_msg(lvl, msg)
%0A%0A lo
|
070589ee7dba86dd3d0a8928ebdd331d4faaa0c6
|
Remove pdf_file field from plugin
|
addons/plugin_thunderbird/plugin_thunderbird.py
|
addons/plugin_thunderbird/plugin_thunderbird.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields
from osv import osv
class plugin_thunderbird_installer(osv.osv_memory):
_name = 'plugin_thunderbird.installer'
_inherit = 'res.config.installer'
_columns = {
'thunderbird': fields.boolean('Thunderbird Plug-in', help="Allows you to select an object that you would like to add to your email and its attachments."),
'plugin_name': fields.char('File name', size=64),
'plugin_file': fields.char('Thunderbird Plug-in', size=256, readonly=True, help="Thunderbird plug-in file. Save this file and install it in Thunderbird."),
}
_defaults = {
'thunderbird': True,
'plugin_name': 'openerp_plugin.xpi',
'pdf_file' : 'http://doc.openerp.com/v6.1/book/2/3_CRM_Contacts/communicate.html#managing-your-crm-from-mozilla-thunderbird',
}
def default_get(self, cr, uid, fields, context=None):
res = super(plugin_thunderbird_installer, self).default_get(cr, uid, fields, context)
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
res['plugin_file'] = base_url + '/plugin_thunderbird/static/openerp_plugin.xpi'
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Python
| 0
|
@@ -1655,142 +1655,8 @@
i',%0A
- 'pdf_file' : 'http://doc.openerp.com/v6.1/book/2/3_CRM_Contacts/communicate.html#managing-your-crm-from-mozilla-thunderbird',%0A
|
8dbe0a3f0b9371ef63b099389fc2a8e12b2632c6
|
test button
|
app.py
|
app.py
|
#!/usr/bin/env python
from __future__ import print_function
from future.standard_library import install_aliases
install_aliases()
from urllib.parse import urlparse, urlencode
from urllib.request import urlopen, Request
from urllib.error import HTTPError
import json
import os
from flask import Flask
from flask import request
from flask import make_response
# Flask app should start in global layout
app = Flask(__name__)
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = processRequest(req)
res = json.dumps(res, indent=4)
# print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def processRequest(req):
if req.get("result").get("action") != "yahooWeatherForecast":
return {}
baseurl = "https://query.yahooapis.com/v1/public/yql?"
yql_query = makeYqlQuery(req)
if yql_query is None:
return {}
yql_url = baseurl + urlencode({'q': yql_query}) + "&format=json"
result = urlopen(yql_url).read()
data = json.loads(result)
res = makeWebhookResult(data)
return res
def makeYqlQuery(req):
result = req.get("result")
parameters = result.get("parameters")
city = parameters.get("geo-city")
if city is None:
return None
return "select * from weather.forecast where woeid in (select woeid from geo.places(1) where text='" + city + "')"
def makeWebhookResult(data):
query = data.get('query')
if query is None:
return {}
result = query.get('results')
if result is None:
return {}
channel = result.get('channel')
if channel is None:
return {}
item = channel.get('item')
location = channel.get('location')
units = channel.get('units')
if (location is None) or (item is None) or (units is None):
return {}
condition = item.get('condition')
if condition is None:
return {}
# print(json.dumps(item, indent=4))
speech = "Today in " + location.get('city') + ": " + condition.get('text') + \
", the temperature is " + condition.get('temp') + " " + units.get('temperature')
print("Response:")
print(speech)
return {
"speech": speech,
"displayText": speech,
# "data": data,
# "contextOut": [],
"source": "apiai-weather-webhook-sample"
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print("Starting app on port %d" % port)
app.run(debug=False, port=port, host='0.0.0.0')
|
Python
| 0.000002
|
@@ -20,117 +20,8 @@
on%0A%0A
-from __future__ import print_function%0Afrom future.standard_library import install_aliases%0Ainstall_aliases()%0A%0A
from
@@ -2345,16 +2345,330 @@
sample%22%0A
+ %22buttons%22:%5B%0A %7B%0A %22type%22:%22web_url%22,%0A %22url%22:%22https://petersapparel.parseapp.com%22,%0A %22title%22:%22Show Website%22%0A %7D,%0A %7B%0A %22type%22:%22postback%22,%0A %22title%22:%22Start Chatting%22,%0A %22payload%22:%22USER_DEFINED_PAYLOAD%22%0A %7D%0A %5D%0A
%7D%0A%0A%0A
|
41792170e33d1904a8ed4f7021fe57ed9fd96dc1
|
test insert into DB by param
|
app.py
|
app.py
|
#!/usr/bin/env python
from Data import Database
import urllib
import json
import os
from flask import Flask
from flask import request
from flask import make_response
import psycopg2
import urlparse
global name
global singletonObject
singletonObject = None
# Flask app should start in global layout
app = Flask(__name__)
@app.route('/webhook', methods=['POST','GET'])
def webhook():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = makeWebhookResult(req)
res = json.dumps(res, indent=4)
print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def requestGame(req):
originalRequest = req.get("originalRequest")
data = originalRequest.get("data")
sender = data.get("sender")
id = sender.get("id")
parameter = req.get("result").get("parameters").get("requestParam")
print "-------------- " + parameter + " ------------"
parameter = "hello game"
print "-------------- " + parameter + " ------------"
return {
"speech" : "",
"displayText": "",
"data": {},
"contextOut": [],
"source": "test-python",
"followupEvent":{
"name":"param",
"data":{
"event":parameter
}
}
}
def connectDB():
urlparse.uses_netloc.append("postgres")
url = urlparse.urlparse(os.environ["DATABASE_URL"])
conn = psycopg2.connect(
database=url.path[1:],
user=url.username,
password=url.password,
host=url.hostname,
port=url.port
)
### conn = psycopg2.connect(database="testpgdp", user="postgres", password="pgAdmin_postgreSQL", host="127.0.0.1", port="5432")
print "Opened database successfully"
return conn
def createTable(conn):
cur = conn.cursor()
cur.execute('''CREATE TABLE "USER"
(ID INT PRIMARY KEY NOT NULL,
NAME TEXT NOT NULL,
AGE INT NOT NULL);''')
print "Table created successfully"
def createTable_Answers(conn):
print "--------in Database createTable_Answers--------"
cur = conn.cursor()
cur.execute('''CREATE TABLE "AnswersOut"
(ID SERIAL PRIMARY KEY NOT NULL,
Answer TEXT NOT NULL);''')
conn.commit()
print "--------Table Answers created successfully--------"
def insertIntoDB(conn, Name):
cur = conn.cursor()
cur.execute("INSERT INTO \"USER\" (ID,NAME,AGE) \
VALUES (5, " + (str)(Name) + ", 20)");
conn.commit()
print "Records created successfully";
def selectDB(conn):
cur = conn.cursor()
cur.execute("SELECT id, name from \"USER\"")
rows = cur.fetchall()
for row in rows:
global name
name = row[1]
print "ID = ", row[0]
print "NAME = ", row[1], "\n"
print "Operation done successfully";
return name
def requestDB(req):
name = "Empty";
conn = connectDB()
### createTable_Answers(conn)
###createTable(conn)
insertIntoDB(conn, "Nany")
print "before " + name
name = selectDB(conn)
print "after " + name
conn.close()
return {
"speech" : name,
"displayText": "",
"data": {},
"contextOut": [],
"source": "test-python"
}
def requestEvent(req):
return {
"speech" : "",
"displayText": "",
"data": {},
"contextOut": [],
"source": "test-python",
"followupEvent":{
"name":"test-event",
"data":{
"event":"inside event"
}
}
}
def requestSingleton(req):
global singletonObject
if singletonObject is None:
print "---- not singleton -----"
singletonObject = "updated"
print singletonObject
return {
"speech" : singletonObject,
"displayText": "",
"data": {},
"contextOut": [],
"source": "test-python"
}
def getRandomName(conn):
cur = conn.cursor()
cur.execute("SELECT * FROM \"USER\" OFFSET floor(random()*(SELECT COUNT(*) FROM \"USER\")) LIMIT 1")
rows = cur.fetchall()
for row in rows:
global name
name = row[1]
print "ID = ", row[0]
print "NAME = ", row[1], "\n"
print "Operation done successfully";
return name
def requestRandomName():
name = "Empty";
conn = connectDB()
print "before " + name
name = getRandomName(conn)
print "after " + name
conn.close()
return {
"speech" : name,
"displayText": "",
"data": {},
"contextOut": [],
"source": "test-python"
}
def makeWebhookResult(req):
if req.get("result").get("action") == "request-game":
return requestGame(req)
elif req.get("result").get("action") == "get-from-db":
return requestDB(req)
elif req.get("result").get("action") == "test-event":
return requestEvent(req)
elif req.get("result").get("action") == "test-singleton":
return requestSingleton(req)
elif req.get("result").get("action") == "createDB":
requestDB(req)
conn = Database.Database()
return conn.__createTables__()
elif req.get("result").get("action") == "get-random-name":
return requestRandomName()
else:
return {}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
#print "Starting app on port %d" % port
app.run(debug=True, port=port, host='0.0.0.0')
|
Python
| 0
|
@@ -2426,16 +2426,21 @@
nn, Name
+, age
):%0A c
@@ -2540,27 +2540,41 @@
%22 +
-(str)(Name) + %22, 20
+%22'%22 + Name + %22'%22 + %22, %22 + age + %22
)%22);
@@ -3107,16 +3107,20 @@
, %22Nany%22
+, 20
)%0A %0A
|
052dc22a82267d381636f5f5fbbf4b5149ffb518
|
check if the values are empty before adding the data
|
gui/specieslistdialog.py
|
gui/specieslistdialog.py
|
# -*- coding: utf8 -*-
from PyQt4 import QtGui, QtCore
class speciesListDialog(QtGui.QDialog):
_tableview = None
def __init__(self, parent, app):
QtGui.QDialog.__init__(self, parent)
self._app = app
self._parent = parent
self.initUI()
self.setWindowTitle('List species')
self.show()
def initUI(self):
layout = QtGui.QVBoxLayout(self)
tablemodel = SpeciesTableModel(self._app.map.species, self)
self._tableview = QtGui.QTableView()
self._tableview.setModel(tablemodel)
form = QtGui.QGridLayout()
nameLabel = QtGui.QLabel("Species name")
self._nameField = QtGui.QLineEdit()
descriptionLabel = QtGui.QLabel("Species Description")
self._descriptionField = QtGui.QTextEdit()
self._saveButton = QtGui.QPushButton("Create")
self._saveButton.clicked.connect(self.createSpecies)
closeButton = QtGui.QPushButton("Close")
closeButton.clicked.connect(self.close)
form.addWidget(nameLabel, 0, 0)
form.addWidget(self._nameField, 0, 1)
form.addWidget(descriptionLabel, 1, 0)
form.addWidget(self._descriptionField, 1, 1)
form.addWidget(self._saveButton, 2, 1)
layout.addWidget(self._tableview)
layout.addLayout(form)
layout.addWidget(closeButton)
self.setLayout(layout)
def createSpecies(self):
self._app.addSpecies(self._nameField.text(), self._descriptionField.toPlainText())
tablemodel = SpeciesTableModel(self._app.map.species, self)
self._tableview.setModel(tablemodel)
class SpeciesTableModel(QtCore.QAbstractTableModel):
def __init__(self, datain, parent = None, *args):
QtCore.QAbstractTableModel.__init__(self, parent, *args)
self.dataChanged.connect(self.saveChange)
self.arraydata = datain
def rowCount(self, parent):
return len(self.arraydata)
def columnCount(self, parent):
if len(self.arraydata) == 0:
return 0
return len(self.arraydata[0])
def data(self, index, role):
if not index.isValid():
return None
elif role != QtCore.Qt.DisplayRole:
return None
return (self.arraydata[index.row()][index.column()])
def saveChange(self, x, y):
print x, y
def flags(self, index):
return QtCore.Qt.ItemIsEditable | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
|
Python
| 0.000005
|
@@ -1250,28 +1250,18 @@
:%0A%09%09
-self._app.addSpecies
+name = str
(sel
@@ -1279,18 +1279,38 @@
d.text()
-,
+)%0A%09%09description = str(
self._de
@@ -1340,16 +1340,114 @@
ext())%0A%0A
+%09%09if name is %22%22 or description is %22%22:%0A%09%09%09return False%0A%0A%09%09self._app.addSpecies(name, description)%0A%0A
%09%09tablem
|
fda5277442f62ff9a81dcc853a8de86b825751b0
|
use rq.SimpleWork (execute job in main process)
|
app.py
|
app.py
|
#! /usr/bin/env python3.6
# coding: utf-8
from kamera.logger import log
import sys
import os
from hashlib import sha256
import hmac
import json
import datetime as dt
from pathlib import Path
from flask import Flask, request, abort, Response, g
import redis
import rq
import rq_dashboard
import redis_lock
import dropbox
from kamera.task import Task
from kamera import config
from typing import Optional, Generator, Tuple
app = Flask(__name__)
redis_client = redis.from_url(config.redis_url)
queue = rq.Queue(connection=redis_client)
listen = ['default']
running_jobs_registry = rq.registry.StartedJobRegistry(connection=redis_client)
def get_redis_client():
redis_client = getattr(g, '_redis_client', None)
if redis_client is None:
redis_client = redis.from_url(config.redis_url)
g._redis_client = redis_client
return redis_client
# Define and apply rq-dashboard authenication,
# from https://github.com/eoranged/rq-dashboard/issues/75
def check_auth(username, password) -> bool:
return (
username == config.rq_dashboard_username and
password == config.rq_dashboard_password
)
def basic_auth() -> Optional[Response]:
"""Ensure basic authorization."""
error_resp = Response(
'Could not verify your access level for that URL.\n'
'You have to login with proper credentials', 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'}
)
auth = request.authorization
return (error_resp
if not (auth or check_auth(auth.username, auth.password))
else None)
app.config.from_object(rq_dashboard.default_settings)
app.config["REDIS_URL"] = config.redis_url
rq_dashboard.blueprint.before_request(basic_auth)
app.register_blueprint(rq_dashboard.blueprint, url_prefix="/rq")
def set_time_of_request(account_id: str):
now = dt.datetime.utcnow()
get_redis_client().hset(f"user:{account_id}", "last_request_at", now.timestamp())
def time_since_last_request_greater_than_limit(account_id: str) -> bool:
timestamp = get_redis_client().hget(f"user:{account_id}", "last_request_at")
if timestamp is None:
return True
last_request_at = dt.datetime.fromtimestamp(float(timestamp))
delta = dt.datetime.utcnow() - last_request_at
if delta >= dt.timedelta(seconds=config.flask_rate_limit):
return True
return False
@app.route('/')
def hello_world() -> str:
return f"{config.app_id}.home"
@app.route('/kamera', methods=['GET'])
def verify() -> str:
'''Respond to the webhook verification (GET request) by echoing back the challenge parameter.'''
return request.args.get('challenge')
def check_enqueue_entries(account_id: str):
queued_and_running_jobs = (
set(queue.job_ids) | set(running_jobs_registry.get_job_ids())
)
log.debug(queued_and_running_jobs)
token = config.get_dbx_token(get_redis_client(), account_id)
dbx = dropbox.Dropbox(token)
for entry, metadata in dbx_list_entries(dbx, config.uploads_path):
job_id = f"{account_id}:{entry.name}"
if job_id in queued_and_running_jobs:
continue
log.info(f"enqueing entry: {entry}")
task = Task(
account_id,
entry,
metadata,
config.review_path,
config.backup_path,
config.errors_path
)
queue.enqueue_call(
func=task.process_entry,
result_ttl=600,
job_id=job_id
)
@app.route('/kamera', methods=['POST'])
def webhook() -> str:
log.info("request incoming")
signature = request.headers.get('X-Dropbox-Signature')
digest = hmac.new(config.APP_SECRET, request.data, sha256).hexdigest()
if not hmac.compare_digest(signature, digest):
abort(403)
accounts = json.loads(request.data)["list_folder"]["accounts"]
for account_id in accounts:
if not time_since_last_request_greater_than_limit(account_id):
log.info(f"rate limit exceeded: {account_id}")
continue
lock = redis_lock.Lock(get_redis_client(), name=account_id, expire=60)
if not lock.acquire(blocking=False):
log.info(f"User request already being processed: {account_id}")
continue
try:
check_enqueue_entries(account_id)
except Exception:
log.exception(f"Exception occured, when handling request: {account_id}")
finally:
set_time_of_request(account_id)
lock.release()
log.info("request finished")
return ""
def dbx_list_entries(
dbx: dropbox.Dropbox,
path: Path
) -> Generator[Tuple[dropbox.files.FileMetadata, Optional[dropbox.files.PhotoMetadata]], None, None]:
result = dbx.files_list_folder(
path=path.as_posix(),
include_media_info=True
)
while True:
log.info(f"Entries in upload folder: {len(result.entries)}")
log.debug(result.entries)
for entry in result.entries:
# Ignore deleted files, folders
if not (entry.path_lower.endswith(config.media_extensions) and
isinstance(entry, dropbox.files.FileMetadata)):
continue
metadata = entry.media_info.get_metadata() if entry.media_info else None
yield entry, metadata
# Repeat only if there's more to do
if result.has_more:
result = dbx.files_list_folder_continue(result.cursor)
else:
break
def main(mode: str) -> None:
if mode == "server":
redis_lock.reset_all()
app.run()
else:
if mode == "worker":
with rq.Connection(redis_client):
worker = rq.Worker(list(map(rq.Queue, listen)))
worker.work()
elif mode == "run_once":
account_id = sys.argv[2]
dbx = dropbox.Dropbox(config.get_dbx_token(redis_client, account_id))
Task.dbx_cache[account_id] = dbx
for entry, metadata in dbx_list_entries(dbx, config.uploads_path):
task = Task(
account_id,
entry,
metadata,
config.review_path,
config.backup_path,
config.errors_path
)
task.process_entry()
if __name__ == '__main__':
main(sys.argv[1])
|
Python
| 0
|
@@ -5733,42 +5733,35 @@
rq.
+Simple
Worker(
-list(map(rq.Queue, listen))
+queues=%5Bqueue%5D
)%0A
|
ffe23433056e0a710f81bb22a9161cdaf3ff2c12
|
fix format call for python 2.6
|
moban/filters/repr.py
|
moban/filters/repr.py
|
from moban.extensions import JinjaFilter
@JinjaFilter('repr')
def repr_function(string):
if isinstance(string, list):
return ["'{}'".format(str(element)) for element in string]
else:
return "'{}'".format(str(string))
|
Python
| 0.000003
|
@@ -136,16 +136,17 @@
urn %5B%22'%7B
+0
%7D'%22.form
@@ -213,16 +213,17 @@
turn %22'%7B
+0
%7D'%22.form
|
73029fe3a4cb40063820fdab0fe61a4adbe56e01
|
use tempfile instead of /tmp
|
thinc/linear/tests/test_avgtron.py
|
thinc/linear/tests/test_avgtron.py
|
from __future__ import division
import pytest
import pickle
import io
import tempfile
from thinc.linear.avgtron import AveragedPerceptron
from thinc.extra.eg import Example
def assert_near_eq(float1, float2):
assert abs(float1 - float2) < 0.001
def test_basic():
nr_class = 3
model = AveragedPerceptron(((1,), (2,), (3,), (4,), (5,)))
instances = [
(1, {1: 1, 3: -5}),
(2, {2: 4, 3: 5})
]
for clas, feats in instances:
eg = Example(nr_class)
eg.features = feats
model(eg)
eg.costs = [i != clas for i in range(nr_class)]
model.update(eg)
eg = Example(nr_class)
eg.features = {1: 2, 2: 1}
model(eg)
assert eg.guess == 2
eg = Example(nr_class)
eg.features = {0: 2, 2: 1}
model(eg)
assert eg.scores[1] == 0
eg = Example(nr_class)
eg.features = {1: 2, 2: 1}
model(eg)
assert eg.scores[2] > 0
eg = Example(nr_class)
eg.features = {1: 2, 1: 1}
model(eg)
assert eg.scores[1] > 0
eg = Example(nr_class)
eg.features = {0: 3, 3: 1}
model(eg)
assert eg.scores[1] < 0
eg = Example(nr_class)
eg.features = {0: 3, 3: 1}
model(eg)
assert eg.scores[2] > 0
@pytest.fixture
def instances():
instances = [
[
(1, {1: -1, 2: 1}),
(2, {1: 5, 2: -5}),
(3, {1: 3, 2: -3}),
],
[
(1, {1: -1, 2: 1}),
(2, {1: -1, 2: 2}),
(3, {1: 3, 2: -3})
],
[
(1, {1: -1, 2: 2}),
(2, {1: 5, 2: -5}),
(3, {4: 1, 5: -7, 2: 1})
]
]
return instances
@pytest.fixture
def model(instances):
templates = []
for batch in instances:
for _, feats in batch:
for key in feats:
templates.append((key,))
templates = tuple(set(templates))
model = AveragedPerceptron(templates)
for batch in instances:
model.time += 1
for clas, feats in batch:
for key, value in feats.items():
model.update_weight(key, clas, value)
return model
def get_score(nr_class, model, feats, clas):
eg = Example(nr_class)
eg.features = feats
eg.costs = [i != clas for i in range(nr_class)]
model(eg)
return eg.scores[clas]
def get_scores(nr_class, model, feats):
eg = Example(nr_class)
eg.features = feats
model(eg)
return list(eg.scores)
def test_averaging(model):
model.end_training()
nr_class = 4
# Feature 1
assert_near_eq(get_score(nr_class, model, {1: 1}, 1), sum([-1, -2, -3]) / 3.0)
assert_near_eq(get_score(nr_class, model, {1: 1}, 2), sum([5, 4, 9]) / 3.0)
assert_near_eq(get_score(nr_class, model, {1: 1}, 3), sum([3, 6, 6]) / 3.0)
# Feature 2
assert_near_eq(get_score(nr_class, model, {2: 1}, 1), sum([1, 2, 4]) / 3.0)
assert_near_eq(get_score(nr_class, model, {2: 1}, 2), sum([-5, -3, -8]) / 3.0)
assert_near_eq(get_score(nr_class, model, {2: 1}, 3), sum([-3, -6, -5]) / 3.0)
# Feature 3 (absent)
assert_near_eq(get_score(nr_class, model, {3: 1}, 1), 0)
assert_near_eq(get_score(nr_class, model, {3: 1}, 2), 0)
assert_near_eq(get_score(nr_class, model, {3: 1}, 3), 0)
# Feature 4
assert_near_eq(get_score(nr_class, model, {4: 1}, 1), sum([0, 0, 0]) / 3.0)
assert_near_eq(get_score(nr_class, model, {4: 1}, 2), sum([0, 0, 0]) / 3.0)
assert_near_eq(get_score(nr_class, model, {4: 1}, 3), sum([0, 0, 1]) / 3.0)
# Feature 5
assert_near_eq(get_score(nr_class, model, {5: 1}, 1), sum([0, 0, 0]) / 3.0)
assert_near_eq(get_score(nr_class, model, {5: 1}, 2), sum([0, 0, 0]) / 3.0)
assert_near_eq(get_score(nr_class, model, {5: 1}, 3), sum([0, 0, -7]) / 3.0)
def test_dump_load(model):
loc = tempfile.mkstemp()
model.end_training()
model.dump(loc)
string = open(loc, 'rb').read()
assert string
new_model = AveragedPerceptron([(1,), (2,), (3,), (4,)])
nr_class = 5
assert get_scores(nr_class, model, {1: 1, 3: 1, 4: 1}) != \
get_scores(nr_class, new_model, {1:1, 3:1, 4:1})
assert get_scores(nr_class, model, {2:1, 5:1}) != \
get_scores(nr_class, new_model, {2:1, 5:1})
assert get_scores(nr_class, model, {2:1, 3:1, 4:1}) != \
get_scores(nr_class, new_model, {2:1, 3:1, 4:1})
new_model.load(loc)
assert get_scores(nr_class, model, {1:1, 3:1, 4:1}) == \
get_scores(nr_class, new_model, {1:1, 3:1, 4:1})
assert get_scores(nr_class, model, {2:1, 5:1}) == \
get_scores(nr_class, new_model, {2:1, 5:1})
assert get_scores(nr_class, model, {2:1, 3:1, 4:1}) == \
get_scores(nr_class, new_model, {2:1, 3:1, 4:1})
## TODO: Need a test that exercises multiple lines. Example bug:
## in gather_weights, don't increment f_i per row, only per feature
## (so overwrite some lines we're gathering)
|
Python
| 0.000001
|
@@ -3815,16 +3815,19 @@
kstemp()
+%5B1%5D
%0A mod
|
9255fd2c34a403b14b423628f47b7a7419c0d526
|
update language lexer translations
|
packages/wakatime/wakatime/stats.py
|
packages/wakatime/wakatime/stats.py
|
# -*- coding: utf-8 -*-
"""
wakatime.stats
~~~~~~~~~~~~~~
Stats about files
:copyright: (c) 2013 Alan Hamlett.
:license: BSD, see LICENSE for more details.
"""
import logging
import os
import sys
if sys.version_info[0] == 2:
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages', 'pygments2'))
else:
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages', 'pygments3'))
from pygments.lexers import guess_lexer_for_filename
log = logging.getLogger(__name__)
# force file name extensions to be recognized as a certain language
EXTENSIONS = {
'md': 'Markdown',
}
def guess_language(file_name):
if file_name:
language = guess_language_from_extension(file_name.rsplit('.', 1)[-1])
if language:
return language
lexer = None
try:
with open(file_name) as f:
lexer = guess_lexer_for_filename(file_name, f.read(512000))
except:
pass
if lexer:
return str(lexer.name)
else:
return None
def guess_language_from_extension(extension):
if extension:
if extension in EXTENSIONS:
return EXTENSIONS[extension]
if extension.lower() in EXTENSIONS:
return mapping[EXTENSIONS.lower()]
return None
def number_lines_in_file(file_name):
lines = 0
try:
with open(file_name) as f:
for line in f:
lines += 1
except IOError:
return None
return lines
def get_file_stats(file_name):
stats = {
'language': guess_language(file_name),
'lines': number_lines_in_file(file_name),
}
return stats
|
Python
| 0
|
@@ -658,16 +658,251 @@
own',%0A%7D%0A
+TRANSLATIONS = %7B%0A 'CSS+Genshi Text': 'CSS',%0A 'CSS+Lasso': 'CSS',%0A 'HTML+Django/Jinja': 'HTML',%0A 'HTML+Lasso': 'HTML',%0A 'JavaScript+Genshi Text': 'JavaScript',%0A 'JavaScript+Lasso': 'JavaScript',%0A 'Perl6': 'Perl',%0A%7D%0A
%0A%0Adef gu
@@ -1259,16 +1259,35 @@
return
+translate_language(
str(lexe
@@ -1293,16 +1293,17 @@
er.name)
+)
%0A els
@@ -1559,16 +1559,16 @@
ower()%5D%0A
-
retu
@@ -1573,24 +1573,155 @@
turn None%0A%0A%0A
+def translate_language(language):%0A if language in TRANSLATIONS:%0A language = TRANSLATIONS%5Blanguage%5D%0A return language%0A%0A%0A
def number_l
|
b91c15f745d18bba4a884666cd3bd7eb87f82943
|
Bind to 0.0.0.0.
|
bot.py
|
bot.py
|
import functools
import os
from bottle import Bottle, request, jinja2_view
view = functools.partial(jinja2_view, template_lookup=['templates'])
app = Bottle()
@app.get('/')
@view('home.html')
def instructions():
return {}
@app.post('/')
def do_correlation():
data = request.json
print data
return "Here's your data!"
if __name__ == '__main__':
PORT = os.environ.get("PORT", 80)
DEBUG = os.environ.get("DEBUG_ON", False)
app.run(host='localhost', port=PORT, debug=DEBUG)
|
Python
| 0.997724
|
@@ -466,17 +466,15 @@
st='
-localhost
+0.0.0.0
', p
|
324cae7bc3d8d758205a0760dffd8d78ce611d48
|
Increase tweet length
|
bot.py
|
bot.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import tweepy
import datetime
import os
import os.path
import requests
import json
from models import Aufmacher, Author, Image, TweetJob
from config import db
from playhouse.shortcuts import model_to_dict
from secrets import twitter_secrets
def tweet(tweetjob):
href = tweetjob.aufmacher.unique_id.replace("http://xml", "http://www")
tweet_text = """
{supertitle}: {title}
{subtitle}
""".format(**model_to_dict(tweetjob.aufmacher)).strip()
if len(tweet_text) > 115:
tweet_text = "{:.115}…".format(tweet_text)
tweet = """
{tweet_text}
{href}
""".format(tweet_text=tweet_text,
href=href).strip()
auth = tweepy.OAuthHandler(twitter_secrets["CONSUMER_KEY"], twitter_secrets["CONSUMER_SECRET"])
auth.set_access_token(twitter_secrets["ACCESS_TOKEN"], twitter_secrets["ACCESS_TOKEN_SECRET"])
api = tweepy.API(auth)
api.update_status(status=tweet)
tweetjob.tweeted_at = datetime.datetime.now()
tweetjob.save()
def go():
tweetjobs = TweetJob.select().where(TweetJob.tweeted_at == None)
for tweetjob in tweetjobs:
tweet(tweetjob)
if __name__ == "__main__":
go()
# #media_upload_response = api.media_upload(image_filename)
# #print(media_upload_response.media_id_string)
# #api.update_status(status="test with image", media_ids=[media_upload_response.media_id_string])
# with open("last_tweeted", 'w') as file:
# file.write(todays_date)
|
Python
| 0.998431
|
@@ -517,19 +517,19 @@
text) %3E
-115
+250
:%0A
@@ -551,11 +551,11 @@
%22%7B:.
-115
+250
%7D%E2%80%A6%22.
|
4d131e93f61f879307e92e1fcabe38ff4f670e64
|
add missing )
|
controllers/api/api_district_controller.py
|
controllers/api/api_district_controller.py
|
import json
import webapp2
from controllers.api.api_base_controller import ApiBaseController
from consts.district_type import DistrictType
from consts.event_type import EventType
from datetime import datetime
from google.appengine.ext import ndb
from helpers.district_helper import DistrictHelper
from helpers.event_helper import EventHelper
from helpers.model_to_dict import ModelToDict
from models.event import Event
from models.event_team import EventTeam
from models.team import Team
class ApiDistrictControllerBase(ApiBaseController):
def _set_district(self, district):
self.district_abbrev = district
self.district = DistrictType.abbrevs[self.district_abbrev]
@property
def _validators(self):
return []
class ApiDistrictListController(ApiDistrictControllerBase):
CACHE_KEY_FORMAT = "apiv2_district_list_controller_{}" # year
CACHE_VERSION = 1
CACHE_HEADER_LENGTH = 61
def __init__(self, *args, **kw):
super(ApiDistrictListController, self).__init__(*args, **kw)
self.year = int(self.request.route_kwargs["year"] or datetime.now().year)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.year)
def _track_call(self, year=None):
if year is None:
year = datetime.now().year
self._track_call_defer('district/list', year
def _render(self, year=None):
all_cmp_event_keys = Event.query(Event.year == int(self.year), Event.event_type_enum == EventType.DISTRICT_CMP).fetch(None, keys_only=True)
events = ndb.get_multi(all_cmp_event_keys
district_keys = [DistrictType.type_abbrevs[event.event_district_enum] for event in events]
return json.dumps(district_keys, ensure_ascii=True)
class ApiDistrictEventsController(ApiDistrictControllerBase):
CACHE_KEY_FORMAT = "apiv2_district_events_controller_{}_{}" # (district_short, year)
CACHE_VERSION = 0
CACHE_HEADER_LENGTH = 61
def __init__(self, *args, **kw):
super(ApiDistrictEventsController, self).__init__(*args, **kw)
self.district_abbrev = self.request.route_kwargs["district_abbrev"]
self.year = int(self.request.route_kwargs["year"] or datetime.now().year)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.district_abbrev, self.year)
def _track_call(self, district_abbrev, year=None):
if year is None:
year = datetime.now().year
self._track_call_defer('district/events', '{}{}'.format(year, district_abrev))
def _render(self, district_abbrev, year=None):
self._set_district(district_abbrev)
event_keys = Event.query(Event.year == self.year, Event.event_district_enum == self.district).fetch(None, keys_only=True)
events = ndb.get_multi(event_keys)
events = [ModelToDict.eventConverter(event) for event in events]
return json.dumps(events, ensure_ascii=True)
class ApiDistrictRankingsController(ApiDistrictControllerBase):
CACHE_KEY_FORMAT = "apiv2_district_rankings_controller_{}_{}" # (district_short, year)
CACHE_VERSION = 1
CACHE_HEADER_LENGTH = 61
def __init__(self, *args, **kw):
super(ApiDistrictRankingsController, self).__init__(*args, **kw)
self.district_abbrev = self.request.route_kwargs["district_abbrev"]
self.year = int(self.request.route_kwargs["year"] or datetime.now().year)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.district_abbrev, self.year)
def _track_call(self, district_abbrev, year=None):
if year is None:
year = datetime.now().year
self._track_call_defer('district/rankings', '{}{}'.format(year, district_abbrev))
def _render(self, district_abbrev, year=None):
self._set_district(district_abbrev)
if year < 2014:
return json.dumps([], ensure_ascii=True)
event_keys = Event.query(Event.year == self.year, Event.event_district_enum == self.district).fetch(None, keys_only=True)
events = ndb.get_multi(event_keys)
district_cmp_keys_future = Event.query(Event.year == self.year, Event.event_type_enum == EventType.DISTRICT_CMP).fetch_async(None, keys_only=True)
event_futures = ndb.get_multi_async(event_keys)
event_team_keys_future = EventTeam.query(EventTeam.event.IN(event_keys)).fetch_async(None, keys_only=True)
if self.year == 2014: # TODO: only 2014 has accurate rankings calculations
team_futures = ndb.get_multi_async(set([ndb.Key(Team, et_key.id().split('_')[1]) for et_key in event_team_keys_future.get_result()]))
events = [event_future.get_result() for event_future in event_futures]
EventHelper.sort_events(events)
if self.year == 2014: # TODO: only 2014 has accurate rankings calculations
team_totals = DistrictHelper.calculate_rankings(events, team_futures, self.year)
else:
return json.dumps([])
rankings = []
current_rank = 1
for key, points in team_totals:
point_detail = {}
point_detail["rank"] = current_rank
point_detail["team_key"] = key
point_detail["event_points"] = {}
for event in points["event_points"]:
event_key = event[0].key_name
point_detail["event_points"][event_key] = event[1]
event_details = Event.get_by_id(event_key)
point_detail["event_points"][event[0].key_name]['district_cmp'] = True if event_details.event_type_enum == EventType.DISTRICT_CMP else False
if "rookie_bonus" in points:
point_detail["rookie_bonus"] = points["rookie_bonus"]
else:
point_detail["rookie_bonus"] = 0
point_detail["point_total"] = points["point_total"]
rankings.append(point_detail)
current_rank += 1
return json.dumps(rankings)
|
Python
| 0.000018
|
@@ -1341,24 +1341,25 @@
/list', year
+)
%0A%0A def _r
|
afc1c7331e683aeffe05a914780a5ec60cdbf81b
|
use 4GB RAM for garmin
|
conversion_service/converters/converter.py
|
conversion_service/converters/converter.py
|
import os
import shutil
import subprocess
import time
from converters import garmin_converter
from converters import gis_converter
from converters.gis_converter.bootstrap import bootstrap
from utils import chg_dir_with
class Conversion(object):
def __init__(self, formats, output_dir, osm_pbf_path, basename='osmaxx_excerpt'):
self.formats = formats
self.output_dir = output_dir
self.filename_prefix = '_'.join([
basename,
time.strftime("%Y-%m-%d_%H%M%S"),
])
self.tmp_statistics_filename = self.filename_prefix + '_tmp'
self.pbf_path = osm_pbf_path
def start_format_extraction(self):
garmin_formats, gis_formats = self._split_formats()
self._create_garmin_export(garmin_formats)
self._extract_postgis_formats(gis_formats)
def _extract_postgis_formats(self, formats):
if len(formats) > 0:
bootstrap.boostrap(self.pbf_path)
with chg_dir_with(os.path.dirname(__file__)):
# only create statistics once and remove it when done with all formats
self._get_statistics(self.tmp_statistics_filename)
for format in formats:
file_basename = '_'.join([self.filename_prefix, format])
self._copy_statistics_file_to_format_dir(file_basename)
self._export_from_db_to_format(file_basename, format)
# remove the temporary statistics file
os.remove(os.path.join(self.output_dir, 'tmp', self.tmp_statistics_filename + '_STATISTICS.csv'))
def _create_garmin_export(self, formats):
if len(formats) == 1:
garmin_format = formats[0]
path_to_mkgmap = os.path.abspath(
os.path.join(os.path.dirname(__file__), 'garmin_converter', 'command_line_utils', 'mkgmap', 'mkgmap.jar')
)
garmin_out_dir = os.path.join(self.output_dir, garmin_format)
os.makedirs(garmin_out_dir, exist_ok=True)
subprocess.check_call(['java', '-jar', path_to_mkgmap, '--output-dir={0}'.format(garmin_out_dir), '--input-file={0}'.format(self.pbf_path)])
subprocess.check_call(["zip", "-r", "--move", '.'.join([garmin_out_dir, 'zip']), garmin_out_dir])
# Export files of the specified format (file_format) from existing database
def _export_from_db_to_format(self, file_basename, file_format): # pragma: nocover
dbcmd = 'sh', './extract/extract_format.sh', self.output_dir, file_basename, file_format
dbcmd = [str(arg) for arg in dbcmd]
subprocess.check_call(dbcmd)
# Extract Statistics
def _get_statistics(self, filename): # pragma: nocover
statcmd = 'bash', './extract/extract_statistics.sh', self.output_dir, filename
statcmd = [str(arg) for arg in statcmd]
subprocess.check_call(statcmd)
def _copy_statistics_file_to_format_dir(self, file_basename): # pragma: nocover
shutil.copyfile(
os.path.join(self.output_dir, 'tmp', self.tmp_statistics_filename + '_STATISTICS.csv'),
os.path.join(self.output_dir, 'tmp', file_basename + '_STATISTICS.csv')
)
def _split_formats(self):
garmin_formats = [garmin_format for garmin_format in self.formats
if garmin_format in garmin_converter.options.get_output_formats()]
gis_formats = [gis_format for gis_format in self.formats
if gis_format in gis_converter.options.get_output_formats()]
return garmin_formats, gis_formats
|
Python
| 0.000001
|
@@ -2065,16 +2065,40 @@
%5B'java',
+ '-Xms32m', '-Xmx4096m',
'-jar',
|
22c9ac7807ad493e9b2b0b97f4299cc87b316ada
|
Fix copy_resources overwrite.
|
modelmanager/utils.py
|
modelmanager/utils.py
|
"""All handy, general utility functionality used throughout the package."""
import os
import os.path as osp
import fnmatch
import shutil
def load_module_path(name, path):
"""Load a python module source file python version aware."""
if True: # PY==27
import imp
m = imp.load_source(name, path)
elif False: # PY==33/34
from importlib.machinery import SourceFileLoader
srcloader = SourceFileLoader(name, path)
m = srcloader.load_module()
else: # PY 35
import importlib.util as iu
spec = iu.spec_from_file_location(name, path)
m = iu.module_from_spec(spec)
spec.loader.exec_module(m)
return m
def get_paths_pattern(pattern, startdir):
"""
Get all paths (including in subdirectories) matching pattern.
Returns list of relative paths from startdir.
"""
matches = []
for root, dirnames, filenames in os.walk(startdir):
fpaths = [os.path.relpath(os.path.join(root, fn), startdir)
for fn in filenames]
matches += fnmatch.filter(fpaths, pattern)
return matches
def copy_resources(sourcedir, destinationdir, overwrite=False,
ignorepatterns=[], linkpatterns=[], verbose=False):
"""
Copy/sync resource file tree from sourcedir to destinationdir.
overwrite: Overwrite existing files.
"""
def printverbose(args):
if verbose:
print(args)
return
pj = osp.join
if not osp.exists(destinationdir):
printverbose('mkdir %s' % destinationdir)
os.mkdir(destinationdir)
walker = os.walk(sourcedir, topdown=True)
for path, dirs, files in walker:
rpath = osp.relpath(path, sourcedir).replace('.', '')
# dirs
subsetdirs = []
for d in dirs:
rdir = pj(rpath, d)
dest = pj(destinationdir, rpath, d)
if any(fnmatch.fnmatch(rdir, p) for p in ignorepatterns):
printverbose('Ignoring %s' % rdir)
# dir to symlink with relative path
elif any(fnmatch.fnmatch(rdir, p) for p in linkpatterns):
rsrc = osp.relpath(pj(path, d), osp.dirname(dest))
printverbose('Linking %s to %s' % (dest, rsrc))
os.symlink(rsrc, dest)
# create new dir
else:
if not osp.exists(dest):
printverbose('mkdir %s' % dest)
os.mkdir(dest)
subsetdirs.append(d)
# update dirs (change in place will prevent walking into them)
dirs[:] = subsetdirs
# files
for f in files:
rfil = osp.join(rpath, f)
dest = pj(destinationdir, rpath, f)
src = pj(path, f)
# ignored files
if any(fnmatch.fnmatch(rfil, p) for p in ignorepatterns):
printverbose('Ignoring %s' % rfil)
continue
# file to symlink with relative path
elif any(fnmatch.fnmatch(rfil, p) for p in linkpatterns):
rsrc = osp.relpath(pj(path, f), osp.dirname(dest))
printverbose('Linking %s to %s' % (dest, rsrc))
os.symlink(rsrc, dest)
# copy/relink existing symlinks
elif osp.islink(src):
linkto = os.readlink(src)
lnabs = osp.abspath(pj(path, linkto))
rsrc = osp.relpath(lnabs, osp.dirname(dest))
printverbose('Linking %s to %s' % (dest, rsrc))
os.symlink(rsrc, dest)
# copy file
else:
printverbose('cp %s to %s' % (src, dest))
shutil.copy(src, dest)
return
|
Python
| 0
|
@@ -3592,33 +3592,67 @@
e%0A el
-s
+if not osp.exists(dest) or overwrit
e:%0A
|
3d52eca5b9a7cddcd1d2b67547c22c28847aa085
|
fix print format for python3
|
tools/run_tests/start_port_server.py
|
tools/run_tests/start_port_server.py
|
#!/usr/bin/env python2.7
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Wrapper around port server starting code.
Used by developers who wish to run individual C/C++ tests outside of the
run_tests.py infrastructure.
The path to this file is called out in test/core/util/port.c, and printed as
an error message to users.
"""
import python_utils.start_port_server as start_port_server
start_port_server.start_port_server()
print "Port server started successfully"
|
Python
| 0.000027
|
@@ -959,17 +959,17 @@
)%0A%0Aprint
-
+(
%22Port se
@@ -994,9 +994,10 @@
ssfully%22
+)
%0A
|
091ce8a1249a5e4675e4cde6b51e9e551956f299
|
Remove python version consideration in pacman commands
|
tools/check_python_dependencies.py
|
tools/check_python_dependencies.py
|
#!/usr/bin/env python
#
# Copyright 2018 Espressif Systems (Shanghai) PTE LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
import re
import sys
try:
import pkg_resources
except Exception:
print('pkg_resources cannot be imported probably because the pip package is not installed and/or using a '
'legacy Python interpreter. Please refer to the Get Started section of the ESP-IDF Programming Guide for '
'setting up the required packages.')
sys.exit(1)
def escape_backslash(path):
if sys.platform == "win32":
# escaped backslashes are necessary in order to be able to copy-paste the printed path
return path.replace("\\", "\\\\")
else:
return path
if __name__ == "__main__":
idf_path = os.getenv("IDF_PATH")
default_requirements_path = os.path.join(idf_path, 'requirements.txt')
parser = argparse.ArgumentParser(description='ESP-IDF Python package dependency checker')
parser.add_argument('--requirements', '-r',
help='Path to the requirements file',
default=default_requirements_path)
args = parser.parse_args()
not_satisfied = []
with open(args.requirements) as f:
for line in f:
line = line.strip()
# pkg_resources.require() cannot handle the full requirements file syntax so we need to make
# adjustments for options which we use.
if line.startswith('file://'):
line = os.path.basename(line)
if line.startswith('-e') and '#egg=' in line: # version control URLs, take the egg= part at the end only
line = re.search(r'#egg=([^\s]+)', line).group(1)
try:
pkg_resources.require(line)
except Exception:
not_satisfied.append(line)
if len(not_satisfied) > 0:
print('The following Python requirements are not satisfied:')
for requirement in not_satisfied:
print(requirement)
if os.path.realpath(args.requirements) != os.path.realpath(default_requirements_path):
# we're using this script to check non-default requirements.txt, so tell the user to run pip
print('Please check the documentation for the feature you are using, or run "%s -m pip install -r %s"' % (sys.executable, args.requirements))
elif os.environ.get('IDF_PYTHON_ENV_PATH'):
# We are running inside a private virtual environment under IDF_TOOLS_PATH,
# ask the user to run install.bat again.
if sys.platform == "win32" and not os.environ.get("MSYSTEM"):
install_script = 'install.bat'
else:
install_script = 'install.sh'
print('To install the missing packages, please run "%s"' % os.path.join(idf_path, install_script))
elif sys.platform == "win32" and os.environ.get("MSYSTEM", None) == "MINGW32" and "/mingw32/bin/python" in sys.executable:
print("The recommended way to install a packages is via \"pacman\". Please run \"pacman -Ss <package_name>\" for"
" searching the package database and if found then "
"\"pacman -S mingw-w64-i686-python{}-<package_name>\" for installing it.".format(sys.version_info[0],))
print("NOTE: You may need to run \"pacman -Syu\" if your package database is older and run twice if the "
"previous run updated \"pacman\" itself.")
print("Please read https://github.com/msys2/msys2/wiki/Using-packages for further information about using "
"\"pacman\"")
# Special case for MINGW32 Python, needs some packages
# via MSYS2 not via pip or system breaks...
for requirement in not_satisfied:
if requirement.startswith('cryptography'):
print("WARNING: The cryptography package have dependencies on system packages so please make sure "
"you run \"pacman -Syu\" followed by \"pacman -S mingw-w64-i686-python{}-cryptography\"."
"".format(sys.version_info[0],))
continue
elif requirement.startswith('setuptools'):
print("Please run the following command to install MSYS2's MINGW Python setuptools package:")
print("pacman -S mingw-w64-i686-python{}-setuptools".format(sys.version_info[0],))
continue
else:
print('Please follow the instructions found in the "Set up the tools" section of '
'ESP-IDF Getting Started Guide')
print('Diagnostic information:')
idf_python_env_path = os.environ.get('IDF_PYTHON_ENV_PATH')
print(' IDF_PYTHON_ENV_PATH: {}'.format(idf_python_env_path or '(not set)'))
print(' Python interpreter used: {}'.format(sys.executable))
if idf_python_env_path not in sys.executable:
print(' Warning: python interpreter not running from IDF_PYTHON_ENV_PATH')
print(' PATH: {}'.format(os.getenv('PATH')))
sys.exit(1)
print('Python requirements from {} are satisfied.'.format(args.requirements))
|
Python
| 0
|
@@ -3751,18 +3751,16 @@
6-python
-%7B%7D
-%3Cpackag
@@ -3792,37 +3792,8 @@
it.%22
-.format(sys.version_info%5B0%5D,)
)%0A
@@ -4904,18 +4904,16 @@
6-python
-%7B%7D
-setupto
@@ -4920,37 +4920,8 @@
ols%22
-.format(sys.version_info%5B0%5D,)
)%0A
|
c0e9c1bdcfb01e409e0b0ded5302fe258ee75cdb
|
Fix git method called (#149)
|
models/runbot_repo.py
|
models/runbot_repo.py
|
# Copyright <2017> <Vauxoo info@vauxoo.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
import logging
import os
import re
import urllib.parse
import requests
from odoo import fields, models
_logger = logging.getLogger(__name__)
def _get_url(url, base):
"""When get is URL_GITHUB/api/v3/User/keys must be convert to
URL_GITLAB/User.keys Because the api of gitlab required admin token
for get the ssh keys
https://docs.gitlab.com/ee/api/users.html#list-ssh-keys"""
match_object = re.search('([^/]+)/([^/]+)/([^/.]+(.git)?)', base)
if match_object:
prefix = ('https://%s/api/v3%s'
if not url.endswith('/keys') else 'https://%s%s')
project_name = (match_object.group(2) + '/' + match_object.group(3))
url = url.replace(':owner', match_object.group(2))
url = url.replace(':repo', match_object.group(3))
url = prefix % (match_object.group(1), url)
url = url.replace('/repos/', '/projects/')
url = url.replace('/commits/', '/repository/commits/')
url = url.replace(project_name,
urllib.parse.quote(project_name, safe=''))
if url.endswith('/keys'):
url = url.replace('users/', '').replace('/keys', '')
url = url + '.keys'
if '/pulls/' in url:
urls = url.split('/pulls/')
url = urls[0] + '/merge_requests?iid=' + urls[1]
return url
def _get_session(token):
session = requests.Session()
session.auth = (token, 'x-oauth-basic')
session.headers.update({'PRIVATE-TOKEN': token})
return session
class RunbotRepo(models.Model):
_inherit = "runbot.repo"
uses_gitlab = fields.Boolean(help='Enable the ability to use gitlab '
'instead of github')
def _git(self, cmd):
"""Rewriting the parent method to avoid deleting the merge_request the
gitlab"""
if cmd == ['fetch', '-p', 'origin', '+refs/pull/*/head:refs/pull/*']:
cmd.remove('-p')
return super(RunbotRepo, self)._git(cmd)
def _update_git(self):
"""Download the gitlab merge request references to work with the
referrals of pull github"""
self.ensure_one()
repo = self
if os.path.isdir(os.path.join(repo.path, 'refs')) and repo.uses_gitlab:
repo.git(['fetch', '-p', 'origin',
'+refs/merge-requests/*/head:refs/pull/*'])
return super(RunbotRepo, self)._update_git()
def _github(self, url, payload=None, ignore_errors=False):
"""This method is the same as the one in the odoo-extra/runbot.py
file but with the translation of each request github to gitlab format
- Get information from merge requests
input: URL_GITLAB/projects/... instead of URL_GITHUB/repos/...
output: res['base']['ref'] = res['gitlab_base_mr']
res['head']['ref'] = res['gitlab_head_mr']
- Get user public keys
input: URL_GITLAB/User.keys... instead of
URL_GITHUB/users/User/keys...
output: res['author'] = {'login': data['username']}
res['commiter'] = {'login': data['username']}
- Report statutes
input: URL_GITLABL/... instead of URL_GITHUB/statuses/...
output: N/A
"""
records_gitlab = self.filtered('uses_gitlab')
super(RunbotRepo, self - records_gitlab)._github(
url, payload=payload, ignore_errors=ignore_errors)
for repo in records_gitlab.filtered('token'):
try:
url = _get_url(url, repo.base)
if not url:
return
is_url_keys = url.endswith('.keys')
session = _get_session(repo.token)
if payload:
response = session.post(url, data=payload)
else:
response = session.get(url)
response.raise_for_status()
json = (response.json() if not is_url_keys
else response._content)
if 'merge_requests?iid=' in url:
json = json[0]
json['head'] = {'ref': json['target_branch']}
json['base'] = {'ref': json['source_branch']}
if '/commits/' in url:
for own_key in ['author', 'committer']:
key_email = '%s_email' % own_key
if json[key_email]:
url = _get_url('/users?search=%s' %
json[key_email],
repo.base)
response = session.get(url)
response.raise_for_status()
data = response.json()
json[own_key] = {
'login':
len(data) and data[0]['username'] or {}
}
if is_url_keys:
json = [{'key': ssh_rsa} for ssh_rsa in json.split('\n')]
return json
except Exception:
if ignore_errors:
_logger.exception('Ignored gitlab error %s %r', url,
payload)
else:
raise
|
Python
| 0
|
@@ -2375,16 +2375,17 @@
repo.
+_
git(%5B'fe
@@ -2406,16 +2406,17 @@
rigin',%0A
+
|
6561ea0b329b2f42126dc23eab59676de305dd73
|
remove unused imports
|
flask_wtf/forms.py
|
flask_wtf/forms.py
|
from __future__ import absolute_import
import jinja2
import wtforms
from flask import request, session, current_app
from wtforms.ext.csrf.session import SessionSecureForm
from wtforms.fields import HiddenField
class Form(SessionSecureForm):
"Implements a SessionSecureForm using app.SECRET_KEY and flask.session obj"
def __init__(self, formdata=None, obj=None, prefix='', csrf_enabled=None, **kwargs):
self.csrf_enabled = csrf_enabled
if csrf_enabled is None:
self.csrf_enabled = current_app.config.get('CSRF_ENABLED', True)
self.SECRET_KEY = current_app.config.get('CSRF_SESSION_KEY', '_csrf_token')
super(Form, self).__init__(formdata, obj, prefix, session, **kwargs)
def is_submitted(self):
"Check if request method is either PUT or POST"
return request and request.method in ("PUT", "POST")
def validate_on_submit(self):
"Call `form.validate()` if request method was either PUT or POST"
return self.is_submitted() and self.validate()
def validate_csrf_token(self, field):
if not self.csrf_enabled:
return True
return super(Form, self).validate_csrf_token(field)
def hidden_fields(self, *fields):
"hidden fields in a hidden DIV tag, in order to keep XHTML compliance."
if not fields:
fields = [f for f in self if isinstance(f, HiddenField)]
rv = [u'<div style="display:none;">']
for field in fields:
if isinstance(field, basestring):
field = getattr(self, field)
rv.append(unicode(field))
rv.append(u"</div>")
return jinja2.Markup(u"".join(rv))
def process(self, formdata=None, obj=None, **kwargs):
try:
if formdata is None:
formdata = request.form
except AttributeError:
pass
super(Form, self).process(formdata, obj, **kwargs)
|
Python
| 0.000001
|
@@ -50,23 +50,8 @@
nja2
-%0Aimport wtforms
%0A%0Afr
|
733afa0e339028da9f26c213f980b2c151f8c5c9
|
optimize workers count according to profiling result.
|
processor/emblem_processor.py
|
processor/emblem_processor.py
|
import logging
import operator
import os
from mapreduce.driver import MapReduceDriver
from mapreduce.emblem_finals import EmblemFinals
from mapreduce.emblem_freq import EmblemFreq
from nlp.emblem import Emblem
from processor.songci_dao import MongoDAO
class EmblemProcessor:
"""
Processor that deals with emblems.
It is normally used to extract emblems from a list of songci contents,
and then save them back to the data accessing object,
along with some other fields such as the term-frequency of emblems.
The schema of an emblem:
- name
- freq_rate
- finals
- pinyin
- rhyme
- tones
"""
logger = logging.getLogger('EmblemProcessor')
logging.basicConfig(level=logging.INFO)
def __init__(self, emblem_dao):
self.emblem_dao = emblem_dao
self.songci_list = emblem_dao.load_songci_list()
if 'load_emblem_list' in dir(emblem_dao):
self._emblem_list = emblem_dao.load_emblem_list()
@property
def emblem_list(self):
if not self._emblem_list:
self.gen_freq_rate()
return self._emblem_list
def gen_freq_rate(self):
"""
Generate frequency rates for all the emblems from songci_list.
The field freq_rate is the term-frequency rate of a word (raw_emblem),
and this field determines whether a word is recognized as an emblem.
:return: list of tuples(emblem_name, freq_rate)
"""
raw_emblem_list = Emblem(self.songci_list).raw_emblem_list()
self.logger.info(
'Generating frequency rates, total=%d', len(raw_emblem_list))
map_reduce_driver = MapReduceDriver(
EmblemFreq.map_fn, EmblemFreq.reduce_fn)
emblem_stat_list = map_reduce_driver(raw_emblem_list)
emblem_stat_list.sort(key=operator.itemgetter(1), reverse=True)
def map_to_freq_rate(freq_stat_list):
min_freq_allowed = 2
total_len = len(freq_stat_list)
ret = []
# cache previous quotient (a.k.a. freq) to improve performance
prev_freq_rate = prev_freq = 0
for freq_stat in freq_stat_list:
name, freq = freq_stat
if freq < min_freq_allowed:
break
freq_rate = prev_freq_rate if freq == prev_freq \
else freq / total_len
ret.append((name, freq_rate))
prev_freq = freq
prev_freq_rate = freq_rate
return ret
result_to_be_saved = map_to_freq_rate(emblem_stat_list)
self._emblem_list = [name for (name, freq_rate) in result_to_be_saved]
return result_to_be_saved
def gen_finals(self):
"""
Generate finals for emblems.
Finals are dictionaries whose keys are pinyin, rhyme, tones, etc.
:return: list of tuples(emblem_name, finals)
"""
emblem_list = self.emblem_list
self.logger.info(
'Generating finals, total=%d', len(emblem_list))
workers = 4 * (os.cpu_count() or 1)
map_reduce_driver = MapReduceDriver(
EmblemFinals.map_fn, EmblemFinals.reduce_fn, workers=workers)
emblem_finals_stat = map_reduce_driver(emblem_list)
result_to_be_saved = [(name, {
'pinyin': finals.pinyin,
'rhyme': finals.rhyme,
'tones': finals.tones,
}) for (name, finals) in emblem_finals_stat]
return result_to_be_saved
if __name__ == '__main__':
mongo_dao = MongoDAO()
processor = EmblemProcessor(mongo_dao)
mongo_dao.save_emblems_field(processor.gen_freq_rate(), 'freq_rate')
mongo_dao.save_emblems_field(processor.gen_finals(), 'finals')
|
Python
| 0
|
@@ -3109,12 +3109,8 @@
rs =
- 4 *
(os
@@ -3127,16 +3127,21 @@
() or 1)
+ %3C%3C 3
%0A
|
7041baaf713e7c3bf7b764aaddd439faae2b5a3b
|
Fix URL in api.get_workspaces().
|
floo/common/api.py
|
floo/common/api.py
|
import sys
import base64
import json
import subprocess
import traceback
from functools import wraps
try:
import ssl
except ImportError:
ssl = False
try:
import __builtin__
str_instances = (str, __builtin__.basestring)
except Exception:
str_instances = (str, )
try:
import urllib
from urllib.request import Request, urlopen
HTTPError = urllib.error.HTTPError
URLError = urllib.error.URLError
except (AttributeError, ImportError, ValueError):
import urllib2
from urllib2 import Request, urlopen
HTTPError = urllib2.HTTPError
URLError = urllib2.URLError
try:
from .. import editor
from . import msg, shared as G, utils
except ImportError:
import editor
import msg
import shared as G
import utils
def get_basic_auth(host):
username = G.AUTH.get(host, {}).get('username')
secret = G.AUTH.get(host, {}).get('secret')
if username is None or secret is None:
return
basic_auth = ('%s:%s' % (username, secret)).encode('utf-8')
basic_auth = base64.encodestring(basic_auth)
return basic_auth.decode('ascii').replace('\n', '')
class APIResponse():
def __init__(self, r):
self.body = None
if isinstance(r, bytes):
r = r.decode('utf-8')
if isinstance(r, str_instances):
lines = r.split('\n')
self.code = int(lines[0])
if self.code != 204:
self.body = json.loads('\n'.join(lines[1:]))
else:
self.code = r.code
if self.code != 204:
self.body = json.loads(r.read().decode("utf-8"))
def proxy_api_request(host, url, data, method):
args = ['python', '-m', 'floo.proxy', '--host', host, '--url', url]
if data:
args += ["--data", json.dumps(data)]
if method:
args += ["--method", method]
msg.log('Running ', ' '.join(args), ' (', G.PLUGIN_PATH, ')')
proc = subprocess.Popen(args, cwd=G.PLUGIN_PATH, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
(stdout, stderr) = proc.communicate()
if stderr:
raise IOError(stderr)
if proc.poll() != 0:
raise IOError(stdout)
r = APIResponse(stdout)
return r
def user_agent():
return 'Floobits Plugin %s %s %s py-%s.%s' % (
editor.name(),
G.__PLUGIN_VERSION__,
editor.platform(),
sys.version_info[0],
sys.version_info[1]
)
def hit_url(host, url, data, method):
if data:
data = json.dumps(data).encode('utf-8')
r = Request(url, data=data)
r.method = method
r.get_method = lambda: method
auth = get_basic_auth(host)
if auth:
r.add_header('Authorization', 'Basic %s' % auth)
r.add_header('Accept', 'application/json')
r.add_header('Content-type', 'application/json')
r.add_header('User-Agent', user_agent())
return urlopen(r, timeout=5)
def api_request(host, url, data=None, method=None):
if data:
method = method or 'POST'
else:
method = method or 'GET'
if ssl is False:
return proxy_api_request(host, url, data, method)
try:
r = hit_url(host, url, data, method)
except HTTPError as e:
r = e
return APIResponse(r)
def create_workspace(host, post_data):
api_url = 'https://%s/api/workspace' % host
return api_request(host, api_url, post_data)
def delete_workspace(host, owner, workspace):
api_url = 'https://%s/api/workspace/%s/%s' % (host, owner, workspace)
return api_request(host, api_url, method='DELETE')
def update_workspace(workspace_url, data):
result = utils.parse_url(workspace_url)
api_url = 'https://%s/api/workspace/%s/%s' % (result['host'], result['owner'], result['workspace'])
return api_request(result['host'], api_url, data, method='PUT')
def get_workspace_by_url(url):
result = utils.parse_url(url)
api_url = 'https://%s/api/workspace/%s/%s' % (result['host'], result['owner'], result['workspace'])
return api_request(result['host'], api_url)
def get_workspace(host, owner, workspace):
api_url = 'https://%s/api/workspace/%s/%s' % (host, owner, workspace)
return api_request(host, api_url)
def get_workspaces(host):
api_url = 'https://%s/api/workspace/can/view' % (host)
return api_request(host, api_url)
def get_orgs(host):
api_url = 'https://%s/api/orgs' % (host)
return api_request(host, api_url)
def get_orgs_can_admin(host):
api_url = 'https://%s/api/orgs/can/admin' % (host)
return api_request(host, api_url)
def send_error(description=None, exception=None):
G.ERROR_COUNT += 1
if G.ERRORS_SENT >= G.MAX_ERROR_REPORTS:
msg.warn('Already sent ', G.ERRORS_SENT, ' errors this session. Not sending any more.\n', description, exception)
return
data = {
'jsondump': {
'error_count': G.ERROR_COUNT
},
'message': {},
'dir': G.COLAB_DIR,
}
if G.AGENT:
data['owner'] = getattr(G.AGENT, "owner", None)
data['username'] = getattr(G.AGENT, "username", None)
data['workspace'] = getattr(G.AGENT, "workspace", None)
if exception:
data['message'] = {
'description': str(exception),
'stack': traceback.format_exc(exception)
}
msg.log('Floobits plugin error! Sending exception report: ', data['message'])
if description:
data['message']['description'] = description
try:
# TODO: use G.AGENT.proto.host?
api_url = 'https://%s/api/log' % (G.DEFAULT_HOST)
r = api_request(G.DEFAULT_HOST, api_url, data)
G.ERRORS_SENT += 1
return r
except Exception as e:
print(e)
def send_errors(f):
@wraps(f)
def wrapped(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as e:
send_error(None, e)
raise
return wrapped
|
Python
| 0.000002
|
@@ -4246,16 +4246,17 @@
orkspace
+s
/can/vie
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.