commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
030b23f4b7a0c0729e87d3cc712c37198ff9c6e2
|
Add setters + use own name.
|
pynessus/models/group.py
|
pynessus/models/group.py
|
"""
Copyright 2014 Quentin Kaiser
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from nessusobject import NessusObject
from user import User
class Group(NessusObject):
"""
A Nessus group.
Attributes:
id(int): identification
name(str): group's name
_Google Python Style Guide:
http://google-styleguide.googlecode.com/svn/trunk/pyguide.html
"""
def __init__(self, server):
"""Constructor"""
super(Group, self).__init__(server)
self._name = None
self._users = []
def create(self, name):
"""
Create a group.
Params:
Returns:
"""
if self._server.server_version[0] == "6":
response = self._server._api_request(
"POST",
"/groups",
{"name": name}
)
if response is not None:
self.id = response["id"]
self._name = response["name"]
self._permissions = response["permissions"]
return True
else:
return False
else:
raise Exception("Not supported.")
def edit(self, name):
"""
Edit a group
Params:
Returns:
"""
if self._server.server_version[0] == "6":
response = self._server._api_request(
"POST",
"/groups/%d" % self.id,
{"name": name}
)
if response is not None:
self.id = response["id"]
self._name = response["name"]
self._permissions = response["permissions"]
return True
else:
return False
else:
raise Exception("Not supported.")
def delete(self):
"""
Delete a group.
Params:
Returns:
"""
if self._server.server_version[0] == "6":
response = self._server._api_request(
"DELETE",
"/groups/%d" % self.id,
""
)
if response is None:
return True
else:
return False
else:
raise Exception("Not supported.")
def list_user(self):
if self._server.server_version[0] == "6":
response = self._server._api_request(
"GET",
"/groups/%d/users" % (self.id),
""
)
if response is not None:
self._users = []
if "users" in response and response["users"] is not None:
for u in response["users"]:
user = User(self._server)
user.id = u["id"]
user.username = u["username"]
user.name = u["name"]
user.email = u["email"]
user.permissions = u["permissions"]
user.lastlogin = u["lastlogin"]
user.type = u["type"]
self._users.append(user)
return True
else:
return False
else:
raise Exception("Not supported.")
def add_user(self, user):
if self._server.server_version[0] == "6":
if type(user) is User:
response = self._server._api_request(
"POST",
"/groups/%d/users/%d" % (self.id, user.id),
""
)
if response is None:
return True
else:
return False
else:
raise Exception("Invalid user format.")
else:
raise Exception("Not supported.")
def delete_user(self, user):
if self._server.server_version[0] == "6":
if type(user) is User:
response = self._server._api_request(
"DELETE",
"/groups/%d/users/%d" % (self.id, user.id),
""
)
if response is None:
return True
else:
return False
else:
raise Exception("Invalid user format.")
else:
raise Exception("Not supported.")
@property
def users(self):
"""
Return the group user list.
Params:
Returns:
"""
return self._users
@users.setter
def users(self, value):
if type(value) == list:
self._users = value
else:
raise Exception("Invalid format.")
|
Python
| 0
|
@@ -1027,33 +1027,27 @@
create(self
-, name
):%0A
+
%22%22%22%0A
@@ -1283,32 +1283,37 @@
%7B%22name%22:
+self.
name%7D%0A
@@ -1657,14 +1657,8 @@
self
-, name
):%0A
@@ -1923,16 +1923,21 @@
%22name%22:
+self.
name%7D%0A
@@ -2731,32 +2731,32 @@
t supported.%22)%0A%0A
-
def list_use
@@ -2752,24 +2752,25 @@
ef list_user
+s
(self):%0A%0A
@@ -5196,16 +5196,625 @@
nvalid format.%22)
+%0A%0A @property%0A def id(self):%0A return self._id%0A%0A @id.setter%0A def id(self, value):%0A self._id = int(value)%0A%0A @property%0A def name(self):%0A return self._name%0A%0A @name.setter%0A def name(self, value):%0A self._name = str(value)%0A%0A @property%0A def permissions(self):%0A return self._permissions%0A%0A @permissions.setter%0A def permissions(self, value):%0A self._permissions = int(value)%0A%0A @property%0A def user_count(self):%0A return self._user_count%0A%0A @user_count.setter%0A def user_count(self, value):%0A self._user_count = int(value)%0A%0A
|
d7249e710be3da451b4ca752780e5a86501f6198
|
update version number to 1.8.3
|
python/flame/__init__.py
|
python/flame/__init__.py
|
from collections import OrderedDict
from ._internal import (Machine as MachineBase,
GLPSPrinter, _GLPSParse,
_pyapi_version, _capi_version,
FLAME_ERROR, FLAME_WARN,
FLAME_INFO, FLAME_DEBUG,
setLogLevel, getLoggerName)
def _list2odict(L):
'Recursively turn list of tuples into OrderedDict'
for i in range(len(L)):
K,V = L[i]
if isinstance(V, list):
L[i] = (K,list(map(OrderedDict, V)))
return OrderedDict(L)
class GLPSParser(object):
"""GLPS parser context
"""
def parse(self, *args, **kws):
"""parse(file_or_buf, path=None, extra=None)
parse(file_or_buf, path="/dir/", extra={'VAR':'value'})
Parse the provided buffer or file-like object.
'path' is used to expand relative paths found while parsing.
If not 'path' is None then either PWD or the .name of
the file-like object is used.
'extra' may be used to provide additional variable definitions when parsing.
Returns an OrderedDict.
"""
return _GLPSParse(*args, **kws)
class Machine(MachineBase):
def conf(self, *args, **kws):
return _list2odict(super(Machine, self).conf(*args, **kws))
# by default pass all but DEBUG to python logger.
# May set to FLAME_WARN for performance
setLogLevel(FLAME_WARN)
__all__ = ['Machine',
'GLPSPrinter',
'GLPSParser',
]
__version__ = '1.8.1'
|
Python
| 0.000006
|
@@ -1502,11 +1502,11 @@
= '1.8.
-1
+3
'%0A
|
7a82076b8c7fb61a317053749957cf735721808a
|
Fix spelling error in recordio module description (#5892)
|
python/mxnet/recordio.py
|
python/mxnet/recordio.py
|
"""Read and write for the RecrodIO data format."""
from __future__ import absolute_import
from collections import namedtuple
import ctypes
import struct
import numbers
import numpy as np
from .base import _LIB
from .base import RecordIOHandle
from .base import check_call
from .base import c_str
try:
import cv2
except ImportError:
cv2 = None
class MXRecordIO(object):
"""Read/write RecordIO format data.
Parameters
----------
uri : string
uri path to recordIO file.
flag : string
"r" for reading or "w" writing.
"""
def __init__(self, uri, flag):
self.uri = c_str(uri)
self.handle = RecordIOHandle()
self.flag = flag
self.is_open = False
self.open()
def open(self):
"""Open record file."""
if self.flag == "w":
check_call(_LIB.MXRecordIOWriterCreate(self.uri, ctypes.byref(self.handle)))
self.writable = True
elif self.flag == "r":
check_call(_LIB.MXRecordIOReaderCreate(self.uri, ctypes.byref(self.handle)))
self.writable = False
else:
raise ValueError("Invalid flag %s"%self.flag)
self.is_open = True
def __del__(self):
self.close()
def close(self):
"""Close record file."""
if not self.is_open:
return
if self.writable:
check_call(_LIB.MXRecordIOWriterFree(self.handle))
else:
check_call(_LIB.MXRecordIOReaderFree(self.handle))
self.is_open = False
def reset(self):
"""Reset pointer to first item. If record is opened with 'w',
this will truncate the file to empty."""
self.close()
self.open()
def write(self, buf):
"""Write a string buffer as a record.
Parameters
----------
buf : string (python2), bytes (python3)
Buffer to write.
"""
assert self.writable
check_call(_LIB.MXRecordIOWriterWriteRecord(self.handle,
ctypes.c_char_p(buf),
ctypes.c_size_t(len(buf))))
def read(self):
"""Read a record as string.
Returns
----------
buf : string
Buffer read.
"""
assert not self.writable
buf = ctypes.c_char_p()
size = ctypes.c_size_t()
check_call(_LIB.MXRecordIOReaderReadRecord(self.handle,
ctypes.byref(buf),
ctypes.byref(size)))
if buf:
buf = ctypes.cast(buf, ctypes.POINTER(ctypes.c_char*size.value))
return buf.contents.raw
else:
return None
class MXIndexedRecordIO(MXRecordIO):
"""Read/write RecordIO format data supporting random access.
Parameters
----------
idx_path : str
Path to index file.
uri : str
Path to record file. Only support file types that are seekable.
flag : str
'w' for write or 'r' for read
key_type : type
Data type for keys.
"""
def __init__(self, idx_path, uri, flag, key_type=int):
self.idx_path = idx_path
self.idx = {}
self.keys = []
self.key_type = key_type
self.fidx = None
super(MXIndexedRecordIO, self).__init__(uri, flag)
def open(self):
super(MXIndexedRecordIO, self).open()
self.idx = {}
self.keys = []
self.fidx = open(self.idx_path, self.flag)
if not self.writable:
for line in iter(self.fidx.readline, ''):
line = line.strip().split('\t')
key = self.key_type(line[0])
self.idx[key] = int(line[1])
self.keys.append(key)
def close(self):
if not self.is_open:
return
super(MXIndexedRecordIO, self).close()
self.fidx.close()
def seek(self, idx):
"""Query current read head position."""
assert not self.writable
pos = ctypes.c_size_t(self.idx[idx])
check_call(_LIB.MXRecordIOReaderSeek(self.handle, pos))
def tell(self):
"""Query current write head position."""
assert self.writable
pos = ctypes.c_size_t()
check_call(_LIB.MXRecordIOWriterTell(self.handle, ctypes.byref(pos)))
return pos.value
def read_idx(self, idx):
"""Read record with index."""
self.seek(idx)
return self.read()
def write_idx(self, idx, buf):
"""Write record with index."""
key = self.key_type(idx)
pos = self.tell()
self.write(buf)
self.fidx.write('%s\t%d\n'%(str(key), pos))
self.idx[key] = pos
self.keys.append(key)
IRHeader = namedtuple('HEADER', ['flag', 'label', 'id', 'id2'])
_IR_FORMAT = 'IfQQ'
_IR_SIZE = struct.calcsize(_IR_FORMAT)
def pack(header, s):
"""Pack a string into MXImageRecord.
Parameters
----------
header : IRHeader
Header of the image record.
``header.label`` can be a number or an array.
s : str
string to pack
"""
header = IRHeader(*header)
if isinstance(header.label, numbers.Number):
header = header._replace(flag=0)
else:
label = np.asarray(header.label, dtype=np.float32)
header = header._replace(flag=label.size, label=0)
s = label.tostring() + s
s = struct.pack(_IR_FORMAT, *header) + s
return s
def unpack(s):
"""Unpack a MXImageRecord to string.
Parameters
----------
s : str
String buffer from ``MXRecordIO.read``.
Returns
-------
header : IRHeader
Header of the image record.
s : str
Unpacked string.
"""
header = IRHeader(*struct.unpack(_IR_FORMAT, s[:_IR_SIZE]))
s = s[_IR_SIZE:]
if header.flag > 0:
header = header._replace(label=np.fromstring(s, np.float32, header.flag))
s = s[header.flag*4:]
return header, s
def unpack_img(s, iscolor=-1):
"""Unpack a MXImageRecord to image.
Parameters
----------
s : str
String buffer from ``MXRecordIO.read``.
iscolor : int
image format option for ``cv2.imdecode``.
Returns
-------
header : IRHeader
Header of the image record.
img : numpy.ndarray
Unpacked image.
"""
header, s = unpack(s)
img = np.fromstring(s, dtype=np.uint8)
assert cv2 is not None
img = cv2.imdecode(img, iscolor)
return header, img
def pack_img(header, img, quality=95, img_fmt='.jpg'):
"""Pack an image into ``MXImageRecord``.
Parameters
----------
header : IRHeader
Header of the image record.
``header.label`` can be a number or an array.
img : numpy.ndarray
image to pack
quality : int
Quality for JPEG encoding in range 1-100, or compression for PNG encoding in range 1-9.
img_fmt : str
Encoding of the image (.jpg for JPEG, .png for PNG).
Returns
-------
s : str
The packed string.
"""
assert cv2 is not None
jpg_formats = ['.JPG', '.JPEG']
png_formats = ['.PNG']
encode_params = None
if img_fmt.upper() in jpg_formats:
encode_params = [cv2.IMWRITE_JPEG_QUALITY, quality]
elif img_fmt.upper() in png_formats:
encode_params = [cv2.IMWRITE_PNG_COMPRESSION, quality]
ret, buf = cv2.imencode(img_fmt, img, encode_params)
assert ret, 'failed to encode image'
return pack(header, buf.tostring())
|
Python
| 0.000001
|
@@ -22,18 +22,18 @@
the Rec
-r
o
+r
dIO data
|
1809f2d3c73bff910fb7e538ace0e2584d1bd857
|
remove debug print
|
python/smurff/prepare.py
|
python/smurff/prepare.py
|
import numpy as np
import scipy as sp
import pandas as pd
import scipy.sparse
import numbers
from .helper import SparseTensor
def make_sparse(Y, nnz, shape = None, seed = None):
Ytr, Yte = make_train_test(Y, nnz, shape, seed)
return Yte
def make_train_test(Y, ntest, shape = None, seed = None):
"""Splits a sparse matrix Y into a train and a test matrix.
Parameters
----------
Y : :class:`scipy.spmatrix`, (coo_matrix, csr_matrix or csc_matrix) or
:class:`numpy.ndarray` or
:class:`pandas.DataFrame` or
:class:`smurff.SparseTensor`
Matrix/Array/Tensor to split
ntest : float <1.0 or integer.
- if float, then indicates the ratio of test cells
- if integer, then indicates the number of test cells
Returns
-------
Ytrain : csr_matrix
train part
Ytest : csr_matrix
test part
"""
if isinstance(Y, pd.DataFrame):
return make_train_test(SparseTensor(Y), ntest, Y.shape, seed)
if isinstance(Y, np.ndarray):
nmodes = len(Y.shape)
if (nmodes > 2):
Ysparse = SparseTensor(Y)
else:
Ysparse = sp.sparse.coo_matrix(Y)
return make_train_test(Ysparse, ntest, shape, seed)
if sp.sparse.issparse(Y):
Y = Y.tocoo(copy = False)
elif not isinstance(Y, SparseTensor):
raise TypeError("Unsupported Y type: " + str(type(Y)))
if not isinstance(ntest, numbers.Real) or ntest < 0:
raise TypeError("ntest has to be a non-negative number (number or ratio of test samples).")
if ntest < 1:
ntest = Y.nnz * ntest
ntest = int(round(ntest))
ntest = max(1,ntest)
if seed is not None:
np.random.seed(seed)
rperm = np.random.permutation(Y.nnz)
train = rperm[ntest:]
test = rperm[0:ntest]
if shape is None:
shape = Y.shape
if sp.sparse.issparse(Y):
Ytrain = sp.sparse.coo_matrix( (Y.data[train], (Y.row[train], Y.col[train])), shape=shape )
Ytest = sp.sparse.coo_matrix( (Y.data[test], (Y.row[test], Y.col[test])), shape=shape )
else:
assert isinstance(Y, SparseTensor)
print("train = ", train)
print("test = ", test)
print("Y.columns = ", Y.columns)
print("Y.columns[0] = ", Y.columns[0])
print("np.array(Y.columns[0])[train] = ", np.array(Y.columns[0])[train])
Ytrain = SparseTensor(
( Y.values[train], [ np.array(idx)[train] for idx in Y.columns ] ),
Y.shape)
Ytest = SparseTensor(
( Y.values[test], [ np.array(idx)[test] for idx in Y.columns ] ),
Y.shape)
return Ytrain, Ytest
|
Python
| 0.000001
|
@@ -121,16 +121,38 @@
seTensor
+%0Afrom . import wrapper
%0A%0Adef ma
@@ -2247,242 +2247,8 @@
r)%0A%0A
- print(%22train = %22, train)%0A print(%22test = %22, test)%0A print(%22Y.columns = %22, Y.columns)%0A print(%22Y.columns%5B0%5D = %22, Y.columns%5B0%5D)%0A print(%22np.array(Y.columns%5B0%5D)%5Btrain%5D = %22, np.array(Y.columns%5B0%5D)%5Btrain%5D)%0A%0A
|
2573670f0875e48cfacfb96f61a69b63c80cbec7
|
debug flag
|
analysis.py
|
analysis.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#Author: Tim Henderson
#Email: tim.tadh@hackthology.com
#For licensing see the LICENSE file in the top level directory.
import itertools
from gram_parser import parse, EmptyString, EoS, NonTerminal
def first(productions, sym):
if isinstance(sym, tuple):
symbols = set()
for s in sym:
first_s = first(productions, s)
symbols |= first_s
if EmptyString() not in first_s:
break
return symbols
if sym.terminal: return set([sym])
symbols = set()
for p in productions[sym]:
all_e = True
for psym in p:
psym_first = first(productions, psym)
symbols |= psym_first
if EmptyString() not in psym_first:
all_e = False
break
if all_e:
symbols.add(EmptyString())
return symbols
class follow(object):
cache = dict()
def __new__(cls, productions, sym):
if sym in cls.cache: return cls.cache[sym]
if sym.terminal: raise Exception, "Follow does not accept terminal symbols."
symbols = set()
if sym == productions[0]:
symbols |= set([EoS()])
for nt, p in productions.containing(sym):
if sym not in p: raise Exception, "Symbol not in production"
i = p.index(sym)
if i+1 < len(p):
f = first(productions, p[i+1])
if EmptyString() in f:
f.remove(EmptyString())
symbols |= follow(productions, nt)
symbols |= f
elif i+1 == len(p) and sym != nt:
symbols |= follow(productions, nt)
cls.cache[sym] = symbols
return symbols
def LL1(productions):
ret = True
for nt, nt_productions in productions.iteritems():
follow_nt = follow(productions, nt)
for a,b in itertools.product(nt_productions, nt_productions):
if a == b: continue
first_a = first(productions, a)
first_b = first(productions, b)
if (first_a & first_b) != set():
print
print 'Error 1 @%s' % nt.sym
print ' '*4, a
print ' '*8, first_a
print ' '*4, b
print ' '*8, first_b
ret = False
if (EmptyString() in first_a) and ((first_a & follow_nt) != set()):
print
print 'Error 2 @%s' % nt.sym
print ' '*4, a
print ' '*4, b
ret = False
if (EmptyString() in first_b) and ((first_b & follow_nt) != set()):
print
print 'Error 3 @%s' % nt.sym
print ' '*4, a
print ' '*4, b
ret = False
return ret
def build_table(productions):
pass
|
Python
| 0
|
@@ -1779,24 +1779,37 @@
(productions
+, DEBUG=False
):%0A ret =
@@ -2145,24 +2145,54 @@
) != set():%0A
+ if DEBUG:%0A
@@ -2193,32 +2193,36 @@
print%0A
+
@@ -2258,32 +2258,36 @@
+
print ' '*4, a%0A
@@ -2277,32 +2277,36 @@
print ' '*4, a%0A
+
@@ -2334,32 +2334,36 @@
+
print ' '*4, b%0A
@@ -2353,32 +2353,36 @@
print ' '*4, b%0A
+
@@ -2502,32 +2502,62 @@
_nt) != set()):%0A
+ if DEBUG:%0A
@@ -2554,32 +2554,36 @@
print%0A
+
@@ -2619,32 +2619,36 @@
+
print ' '*4, a%0A
@@ -2638,32 +2638,36 @@
print ' '*4, a%0A
+
@@ -2781,32 +2781,62 @@
_nt) != set()):%0A
+ if DEBUG:%0A
@@ -2833,32 +2833,36 @@
print%0A
+
@@ -2898,32 +2898,36 @@
+
print ' '*4, a%0A
@@ -2917,32 +2917,36 @@
print ' '*4, a%0A
+
|
a26e735796534c34b31eef0d8f19eb400d137b9c
|
allow for empty string argument
|
pywebdata/baseservice.py
|
pywebdata/baseservice.py
|
import copy
import json
import requests
from itertools import product, imap
from xml.etree import ElementTree as ET
from parameter import Input, Output
from parsers import parse_query
output_parsers = {'json': json.loads, 'xml': ET.parse}
class ServiceMount(type):
def __init__(self, name, bases, attrs):
if not hasattr(self, 'services'):
self.services = {}
else:
self.services[self.name] = self
class BaseService(object):
__metaclass__ = ServiceMount
def update_parameters(self, **kwargs):
for param_name, param_value in kwargs.items():
getattr(self, param_name).update(param_value)
def convert_url(self):
inputs = self.get_input_values()
return self.url.substitute(inputs)
def query(self, param_dict={}, **kwargs):
if param_dict:
self.update_parameters(**param_dict)
else:
self.update_parameters(**kwargs)
url = self.convert_url()
r = requests.get(url)
results = output_parsers.get('json', lambda x:x)(r.text)
return self.parse_results(results)
def query_many(self, dict_list=[]):
results = []
for d in dict_list:
res = self.query(d)
results.extend(res)
return results
def conditional_query(self, qry_string=''):
outputs = self.get_outputs()
if qry_string:
conditions = parse_query(qry_string)
def attach_input_name(qry):
return dict(zip(inputs.keys(), qry))
input_ranges = []
inputs = self.get_inputs()
for input_name, input_obj in inputs.items():
input_range = input_obj.get_range(conditions[input_name])
input_ranges.append(input_range)
queries = imap(attach_input_name, product(*input_ranges))
return self.query_many(queries)
def parse_results(self, results):
return map(self.parse_row, self.f_iter(results))
def parse_row(self, row):
result_row = {}
for name, output in self.get_outputs().items():
if getattr(self, name).f_parse:
result_row[name] = getattr(self, name).f_parse(row)
else:
result_row[name] = row.get(name)
return result_row
def filter(self, *args, **kwargs):
raise NotImplementedError
@classmethod
def get_inputs(cls):
return cls.get_params(Input)
@classmethod
def get_outputs(cls):
return cls.get_params(Output)
@classmethod
def get_params(cls, param_type, f=lambda x:x):
param_dict = {}
for name, obj in cls.__dict__.items():
if isinstance(obj, param_type):
param_dict[name] = f(obj)
return param_dict
@classmethod
def get_input_values(cls):
return cls.get_params(Input, lambda x:x.value)
@classmethod
def get_output_values(cls):
return cls.get_params(Output, lambda x:x.value)
@staticmethod
def f_iter(x):
return x
def copy(self):
return copy.deepcopy(self)
|
Python
| 0.000223
|
@@ -1358,10 +1358,12 @@
ing=
-''
+None
):%0A
@@ -1369,19 +1369,18 @@
-out
+in
puts = s
@@ -1391,45 +1391,17 @@
get_
-out
+in
puts()%0A
- if qry_string:%0A
@@ -1528,32 +1528,36 @@
qry))%0A%0A
+get_
input_ranges = %5B
@@ -1555,138 +1555,27 @@
ange
-s
=
-%5B%5D%0A inputs = self.get_inputs()%0A for input_name, input_obj in inputs.items():%0A input_range = input_obj
+lambda (x, y): y
.get
@@ -1596,25 +1596,12 @@
ons%5B
-input_name%5D)%0A
+x%5D)%0A
@@ -1620,29 +1620,47 @@
nges
-.append(input_range)%0A
+ = map(get_input_range, inputs.items())
%0A
@@ -1722,16 +1722,17 @@
anges))%0A
+%0A
|
b42f1a89ad849274d2b6e78dfb772e95b49d23de
|
remove unused variable
|
qa_tests/bcr_unittest.py
|
qa_tests/bcr_unittest.py
|
# Copyright (c) 2010-2011, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version 3
# only, as published by the Free Software Foundation.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License version 3 for more details
# (a copy is included in the LICENSE file that accompanied this code).
#
# You should have received a copy of the GNU Lesser General Public License
# version 3 along with OpenQuake. If not, see
# <http://www.gnu.org/licenses/lgpl-3.0.txt> for a copy of the LGPLv3 License.
import os
import shutil
import unittest
from lxml import etree
from nose.plugins.attrib import attr
from openquake.db.models import OqCalculation
from openquake.nrml import nrml_schema_file
from tests.utils import helpers
BCR_DEMO_BASE = 'demos/benefit_cost_ratio'
CONFIG = '%s/config.gem' % BCR_DEMO_BASE
COMPUTED_OUTPUT = '%s/computed_output' % BCR_DEMO_BASE
RESULT = '%s/bcr-map.xml' % COMPUTED_OUTPUT
NRML = 'http://openquake.org/xmlns/nrml/0.3'
GML = 'http://www.opengis.net/gml'
class BCRQATestCase(unittest.TestCase):
@attr('qa')
def test_bcr(self):
# Verify the EAL (Original and Retrofitted) and BCR values to
# hand-computed results.
# For the EAL values, a delta of 0.0009 (3 decimal places of precision)
# is considered reasonable.
# For the BCR, a delta of 0.009 (2 decimal places of precision) is
# considered reasonable.
expected_result = {
# site location
(-122.0, 38.225): {
# assetRef eal_orig eal_retrof bcr
'a1': (0.009379, 0.006586, 0.483091)
}
}
delta = 1e-5
helpers.run_job(CONFIG)
calc_record = OqCalculation.objects.latest("id")
self.assertEqual('succeeded', calc_record.status)
result = self._parse_bcr_map(RESULT)
try:
self._assert_bcr_results_equal(expected_result, result)
finally:
shutil.rmtree(COMPUTED_OUTPUT)
def _assert_bcr_results_equal(self, expected, actual, eal_delta=0.0009,
bcr_delta=0.009):
"""Given a pair of dicts assert that they are equal.
Result values do not have to be exact and the following default deltas
are used:
For EAL values, a delta of
0.0009 (3 decimal places of precision) is allowed. For BCR values, a
delta of 0.009 (2 decimal places of precision) is allowed."""
self.assertEqual(len(expected), len(actual))
for site, exp_value in expected.items():
for asset_ref, (eal_orig, eal_retrof, bcr) in exp_value.items():
act_eal_orig, act_eal_retrof, act_bcr = (
actual[site][asset_ref])
# Verify 'EAL, Original'
self.assertAlmostEqual(eal_orig, act_eal_orig,
delta=eal_delta)
# Verify 'EAL, Retrofitted'
self.assertAlmostEqual(eal_retrof, act_eal_retrof,
delta=eal_delta)
# Verify BCR
self.assertAlmostEqual(bcr, act_bcr, delta=bcr_delta)
def _parse_bcr_map(self, filename):
self.assertTrue(os.path.exists(filename))
schema = etree.XMLSchema(file=nrml_schema_file())
parser = etree.XMLParser(schema=schema)
tree = etree.parse(filename, parser=parser)
bcrnodes = tree.getroot().findall(
'{%(ns)s}riskResult/{%(ns)s}benefitCostRatioMap/{%(ns)s}BCRNode' %
{'ns': NRML}
)
result = {}
for bcrnode in bcrnodes:
[site] = bcrnode.findall('{%s}site/{%s}Point/{%s}pos' %
(NRML, GML, GML))
assets = {}
valuenodes = bcrnode.findall('{%s}benefitCostRatioValue' % NRML)
for valuenode in valuenodes:
values = []
for tag in ('expectedAnnualLossOriginal',
'expectedAnnualLossRetrofitted',
'benefitCostRatio'):
[node] = valuenode.findall('{%s}%s' % (NRML, tag))
values.append(float(node.text))
assets[valuenode.attrib['assetRef']] = tuple(values)
result[tuple(map(float, site.text.split()))] = assets
return result
|
Python
| 0.001259
|
@@ -1899,29 +1899,8 @@
%7D
-%0A delta = 1e-5
%0A%0A
|
6708830ab2bde841bbc3da2befbbe5ab9f3d21aa
|
Put test stuff inside `if __name__ == '__main__'`
|
ansi_str.py
|
ansi_str.py
|
import re
_ansi_re = re.compile('\033\[((?:\d|;)*)([a-zA-Z])')
def strip_ansi(value):
return _ansi_re.sub('', value)
def len_exclude_ansi(value):
return len(strip_ansi(value))
class ansi_str(str):
"""A str subclass, specialized for strings containing ANSI escapes.
When you call the ``len`` method, it discounts ANSI color escape codes.
This is beneficial, because ANSI color escape codes won't mess up code
that tries to do alignment, padding, printing in columns, etc.
"""
_stripped = None
def __len__(self, exclude_ansi=True):
if exclude_ansi is False:
return len(self[:])
if self._stripped is None:
self._stripped = strip_ansi(self[:])
return len(self._stripped)
# s = ansi_str('abc')
# print s
# print len(s)
s = ansi_str(u'\x1b[32m\x1b[1mSUCCESS\x1b[0m')
print s
print len(s)
print s.__len__()
print s.__len__(exclude_ansi=False)
print(len_exclude_ansi(u'\x1b[32m\x1b[1mSUCCESS\x1b[0m'))
|
Python
| 0.00001
|
@@ -755,16 +755,48 @@
ipped)%0A%0A
+%0Aif __name__ == '__main__':%0A
# s = an
@@ -809,16 +809,20 @@
('abc')%0A
+
# print
@@ -823,16 +823,20 @@
print s%0A
+
# print
@@ -843,16 +843,20 @@
len(s)%0A%0A
+
s = ansi
@@ -894,24 +894,28 @@
1b%5B0m')%0A
+
print s%0A
print le
@@ -906,16 +906,20 @@
print s%0A
+
print le
@@ -923,16 +923,20 @@
len(s)%0A
+
print s.
@@ -945,16 +945,20 @@
len__()%0A
+
print s.
@@ -985,16 +985,20 @@
=False)%0A
+
print(le
|
adf8c09588c0e95207738a72dd1ec3f5fa3f7338
|
version 0.5.13
|
api/info.py
|
api/info.py
|
from collections import OrderedDict
from rest_framework import viewsets, mixins, response, reverse
NAME = 'vsemionov.boomerang.api'
VERSION = '0.5.12.2'
class ApiInfoViewSet(mixins.ListModelMixin,
viewsets.GenericViewSet):
view_name = 'Api Info'
@staticmethod
def _get_user_url(request):
return request.user.id and reverse.reverse('user-detail', request=request, args=[request.user.username])
def get_view_name(self):
return self.view_name
def list(self, request, *args, **kwargs):
app = OrderedDict((('name', NAME),
('version', VERSION)))
user = OrderedDict((('username', request.user.username),
('url', self._get_user_url(request))))
info = OrderedDict((('app', app),
('user', user)))
return response.Response(info)
|
Python
| 0.000001
|
@@ -147,11 +147,9 @@
.5.1
-2.2
+3
'%0A%0A%0A
|
0a4652c7221c16aa2a95e33dd9742e1d64fa45d5
|
Fix person search
|
api/view.py
|
api/view.py
|
# -*- coding: utf-8 -*-
from flask import abort, make_response, request
from flask.ext.sqlalchemy import BaseQuery
from flask.views import MethodView
from api.models.api_key import ApiKey
from utils.jsonify import jsonify
class ApiView(MethodView):
'''Create basic REST HTTP endpoints for a single resource type.
To create the endpoints, an API view class may inherit this class.
The view subclass should have :attr:`model` which inherits :class:`ApiModel`.
class PersonApi(ApiView):
model = Person
...
class Person(ApiModel):
...
'''
model = None
def get(self, _type=None, **kwargs):
'''Dispatch GET request to an appropriate handler based on the `type`'''
if not self.is_valid_api_key(request.args.get('api_key')):
abort(401)
response = None
if _type == 'single':
response = self.get_single(**kwargs)
elif _type == 'search':
response = self.get_list(self._search(), **kwargs)
elif _type == 'list':
response = self.get_list(self._query, **kwargs)
else:
raise Exception('unknown api request type: %s' % _type)
response = make_response(response)
response.headers['Content-Type'] = 'application/json'
return response
def is_valid_api_key(self, api_key):
if not api_key:
return False
record = ApiKey.query.filter_by(key=api_key).first()
return record is not None
def get_single(self, id, **kwargs):
'''Find a entry with `id` and return in JSON format.'''
query = self._query.filter_by(id=id)
return self._jsonify_single(query)
def get_list(self, query, **kwargs):
'''Return filtered/sorted entry list'''
if request.args.get('sort'):
key = request.args.get('sort')
order = request.args.get('order', 'desc')
query = self._sort(query, key, order)
return self._jsonify_list(query)
def _sort(self, query, key, order):
if not hasattr(self.model, key):
raise Exception('unknown sorting criteria: %s' % key)
if order not in ['asc', 'desc']:
raise Exception('unknown sorting order: %s' % order)
key = getattr(self.model, key)
if order == 'desc':
key = key.desc()
return query.order_by(key)
def _search(self):
if not self.model or not hasattr(self.model, 'name'):
raise NotImplementedError()
q = request.args.get('q', '')
s = request.args.get('s', '')
return self._query.filter(\
self.model.name.like(u'%{q}%'.format(q=q)),
self.model.sponsor.like(u'%{s}%'.format(s=s)))
@property
def _query(self):
if not self.model:
raise NotImplementedError()
return BaseQuery(self.model, self.model.query.session)
def _to_dict(self, entity):
return entity.to_dict(projection=request.args.get('projection'))
def _jsonify_single(self, query):
'''Compose a `single`-typed response data.'''
entity = query.first()
if not entity:
abort(404)
result = self._to_dict(entity)
result['kind'] = self.model.kind('single')
return jsonify(result)
def _jsonify_list(self, query):
'''Compose a `list`/`search`-typed response data.'''
page = query.paginate(int(request.args.get('page', 1)),
int(request.args.get('per_page', 20)))
result = {}
result['kind'] = self.model.kind('list')
result['items'] = [self._to_dict(entity) for entity in page.items]
if page.has_prev:
result['prev_page'] = page.prev_num
if page.has_next:
result['next_page'] = page.next_num
return jsonify(result)
|
Python
| 0.000004
|
@@ -2576,24 +2576,85 @@
et('q', '')%0A
+%0A if self.model.__module__=='popong_models.bill':%0A
s =
@@ -2679,32 +2679,36 @@
s', '')%0A
+
+
return self._que
@@ -2719,16 +2719,20 @@
ilter(%5C%0A
+
@@ -2799,32 +2799,36 @@
+
self.model.spons
@@ -2857,16 +2857,138 @@
t(s=s)))
+%0A else:%0A return self._query.filter(%5C%0A self.model.name.like(u'%25%7Bq%7D%25'.format(q=q)))
%0A%0A @p
|
17812e091bcf2a0bc8ad54fc76bb8c9bfeb97148
|
Clean up
|
ab_state.py
|
ab_state.py
|
#!/usr/bin/python
import board
import game_state
import alpha_beta
import nearby_filter
import game
import gui
from board_strip import *
from length_counter import *
from take_counter import *
import pdb
CAPTURE_SCORE_BASE = 120 ** 3
TAKE_SCORE_BASE = 190
class ABState():
""" Bridge for state, for use by alpha_beta code """
def __init__(self, parent=None):
if parent == None:
self.black_lines = LengthCounter()
self.white_lines = LengthCounter()
self.takes = [0, 0, 0]
self.search_filter = None
else:
self.black_lines = LengthCounter(parent.black_lines) # TODO: clone method
self.white_lines = LengthCounter(parent.white_lines)
self.takes = parent.takes[:]
self.search_filter = parent.search_filter.clone()
def get_black_line_counts(self):
return self.black_lines
def get_white_line_counts(self):
return self.white_lines
def get_iter(self):
return self.search_filter
def set_state(self, s):
self.state = s
self.board().add_observer(self)
if self.search_filter is None:
self.search_filter = nearby_filter.NearbyFilter(self.board())
# TODO: Remove us as an observer from previous self.state?
def to_move_colour(self):
return self.state.to_move_colour()
def to_move(self):
""" This is only to keep the AB code unchanged; the value is unused. """
return None
def __repr__(self):
ret = str(self.black_lines) + str(self.white_lines) + self.state.__repr__()
return ret
def search_player_colour(self):
""" The AI player who is performing the search """
game = self.game()
return game.to_move_colour()
def game(self):
return self.state.game
# TODO: Cache stuff somehow?
def utility(self, unused=None):
""" The search_colour is the colour of the AI player doing the search """
# The turn_colour is the colour of the player to move at the leaf state
# of the search.
turn_colour = self.to_move_colour()
search_colour = self.search_player_colour()
black_contrib = self.utility_contrib(self.black_lines, BLACK)
white_contrib = self.utility_contrib(self.white_lines, WHITE)
# Having the move is worth a lot.
if turn_colour == BLACK:
black_contrib *= 100
else:
white_contrib *= 100
#print "B/W contrib: %s, %s, %s" % (black_contrib, white_contrib, self)
if search_colour == BLACK:
return black_contrib - white_contrib
else:
return white_contrib - black_contrib
def utility_contrib(self, lines, colour):
# Check for a win first
# TODO: check rules
captured = self.state.get_all_captured()
if captured[colour] >= 10:
return alpha_beta.infinity
if lines[4] > 0:
return alpha_beta.infinity
# No win by "colour" found, fudge up a score
score = 0
for i in range(len(lines)):
score *= 100
rev = 4 - i
score += lines[rev]
cc = self.captured_contrib(captured[colour])
score += cc
tc = self.take_contrib(self.takes[colour])
score += tc
#print "black: %s, white: %s, score: %s" % (self.black_lines, self.white_lines, \
# score)
return score
def captured_contrib(self, captures):
""" TODO captures become increasingly important as we approach 5 """
# TODO: Use rules
contrib = captures * CAPTURE_SCORE_BASE
return contrib
def take_contrib(self, takes):
""" TODO takes become increasingly important as we approach 5 captures """
# TODO: Use rules
contrib = takes * TAKE_SCORE_BASE
return contrib
def board(self):
return self.state.board
def before_set_occ(self, pos, colour):
self._set_or_reset_occs(pos, -1)
def after_set_occ(self, pos, colour):
self._set_or_reset_occs(pos, 1)
# Update the move filtering
if colour == EMPTY:
self.search_filter.capture(pos)
else:
self.search_filter.move(pos)
def _set_or_reset_occs(self, pos, inc):
# update substrips
brd = self.board()
for ds in brd.get_direction_strips():
# Keep track of the lengths of lines that can form 5
# in a row
brd_size = brd.get_size()
ca = CandidateAccumulator() # TEMP HACK
bs, s_num = ds.get_strip(pos)
ind = ds.get_index(pos)
strip_min, strip_max = ds.get_bounds(s_num, brd_size)
# These are the absolute indices that bound the strip
# we want to use to adjust length stats.
min_ind = max(strip_min, ind-4) # TODO: constants
max_ind = min(ind+4, strip_max) # inclusive
process_substrips(bs, min_ind, max_ind,
ca, self.black_lines, self.white_lines, inc)
# TODO: brd_size may need changing due to some diagonal captures?
process_takes(bs, ind, brd_size, self.takes, inc)
def create_state(self, move_pos):
ab_child = ABState(self)
# clone the base level state object
base_child = game_state.GameState(self.state.game, self.state)
# connect the two (including move hook)
ab_child.set_state(base_child)
# make the move for the base (which updates ab_child)
base_child.make_move(move_pos)
return ab_child
def terminal(self):
return self.state.get_won_by() != game.EMPTY
'''
class ABGame():
""" This class acts as a bridge between the AlphaBeta code and my code """
def __init__(self, base_game):
s = self.current_state = ABState()
s.set_state(base_game.current_state)
self.base_game = base_game
def to_move(self, state=None):
if state is None:
state = self.current_state
return state.to_move()
def utility(self, state, player):
return state.utility(player)
# TODO: unit test
def successors(self, state):
pos_iter = state.get_iter()
for pos in pos_iter.get_iter():
# create a AB_State for each possible move from state
try:
succ = state.create_state(pos)
yield gui.MoveAction(pos), succ
except game_state.IllegalMoveException:
# Ignore these
pass
def terminal_test(self, state):
return state.terminal()
'''
|
Python
| 0.000002
|
@@ -5004,16 +5004,89 @@
lusive%0A%0A
+ length_counters = %5BNone, self.black_lines, self.white_lines%5D%0A
@@ -5158,41 +5158,22 @@
ca,
-self.black_lines, self.white_line
+length_counter
s, i
@@ -5737,32 +5737,32 @@
terminal(self):%0A
+
return s
@@ -5803,957 +5803,4 @@
TY%0A%0A
-'''%0Aclass ABGame():%0A %22%22%22 This class acts as a bridge between the AlphaBeta code and my code %22%22%22%0A def __init__(self, base_game):%0A s = self.current_state = ABState()%0A s.set_state(base_game.current_state)%0A self.base_game = base_game%0A%0A def to_move(self, state=None):%0A if state is None:%0A state = self.current_state%0A return state.to_move()%0A%0A def utility(self, state, player):%0A return state.utility(player)%0A%0A # TODO: unit test%0A def successors(self, state):%0A pos_iter = state.get_iter()%0A for pos in pos_iter.get_iter():%0A # create a AB_State for each possible move from state%0A try:%0A succ = state.create_state(pos)%0A yield gui.MoveAction(pos), succ%0A except game_state.IllegalMoveException:%0A # Ignore these%0A pass%0A%0A def terminal_test(self, state):%0A return state.terminal()%0A'''%0A%0A
|
b9e12e6bb1d4d4cdb337cbf3d3cd7a41f57b4d24
|
Use a more standard RPM query format
|
JsonStats/FetchStats/Plugins/RPM.py
|
JsonStats/FetchStats/Plugins/RPM.py
|
import datetime
from JsonStats.FetchStats import Fetcher
class RPM(Fetcher):
def __init__(self):
"""
Returns an rpm manifest (all rpms installed on the system.
**Note**: This takes more than a few seconds!!
"""
self.context = 'rpm'
self._load_data()
def _load_data(self):
self._refresh_time = datetime.datetime.utcnow()
self._rpms = {}
cmd = 'rpm -qa --queryformat "%{NAME} %{VERSION}\n"'
try:
for line in self._exec(cmd).split('\n')[:-1]:
(rpm_name, rpm_version) = line.split()
self._rpms[rpm_name] = rpm_version
self._loaded(True)
except Exception, e:
self._loaded(False, str(e))
def dump(self):
# poor mans cache, refresh cache in an hour
if (datetime.datetime.utcnow() -
datetime.timedelta(minutes=1)) > self._refresh_time:
self._load_data()
return self._rpms
def dump_json(self):
return self.json.dumps(self.dump())
|
Python
| 0.999989
|
@@ -462,16 +462,35 @@
VERSION%7D
+-%25%7BRELEASE%7D.%25%7BARCH%7D
%5Cn%22'%0A%0A
|
93e40e791153ee07dad3410388e662de99efcbb0
|
fix goodrain run error
|
app/blog.py
|
app/blog.py
|
#!/usr/bin/env python3
# coding=utf-8
"""
@version:0.1
@author: ysicing
@file: blog/blog.py
@time: 2017/9/10 22:46
"""
from flask_frozen import Freezer
from flask_flatpages import FlatPages
from flask import current_app as app
flatpages = FlatPages(app)
freezer = Freezer(app)
class Post(object):
def __init__(self, ext, post_dir):
self.ext = ext
self.post_dir = post_dir
def get_posts_list(self):
try:
posts = [post for post in flatpages if post.path.startwith(self.post_dir)]
except:
posts = [post for post in flatpages if post.path]
try:
posts.sort(key=lambda item: item['date'], reverse=True)
except:
posts = sorted(posts, reverse=True, key=lambda post: post['date'])
return posts
def recent_post(self):
posts = self.get_posts_list()
if len(posts) >= 10:
recent_post = posts[:10]
else:
recent_post = posts
return recent_post
def get_tags(self):
"""
:return: all tag info
"""
dkey = {}
for post in self.get_posts_list():
for i in post.__getitem__('tags').strip().split():
dkey.setdefault(i.lower(), 0)
dkey[i.lower()] += 1
return dkey
def get_tag(self, tag):
"""
:param tag:
:return: tag相关的文章列表
"""
tag = tag.lower()
tags = {}
for post in self.get_posts_list():
for itag in post.__getitem__('tags').strip().split():
if itag == tag:
tags[post.path] = {'title': post.__getitem__('title'), 'date': post.__getitem__('date'), 'blog': post.path}
return tags
def get_post_info(self, postname):
post = flatpages.get_or_404(postname)
postindex = self.get_posts_list().index(post)
postpre = None if postindex == 0 else self.get_posts_list()[postindex - 1]
postnex = None if postindex == len(self.get_posts_list()) -1 else self.get_posts_list()[postindex + 1]
post_info = {'post': post, 'postpre': postpre, 'postnex': postnex}
return post_info
|
Python
| 0.000011
|
@@ -1403,32 +1403,91 @@
%E7%AB%A0%E5%88%97%E8%A1%A8%0A %22%22%22%0A
+ if isinstance(tag, int):%0A tag = str(tag)
%0A tag = t
|
9ec8aa9fbb9b8c6656e5fe8920787f2c03a93683
|
create Cell class and method add_neighbor that returns a list of neighbor positions
|
app/life.py
|
app/life.py
|
Python
| 0
|
@@ -0,0 +1,484 @@
+class Cell(object):%0A%0A def __init__(self, pos):%0A self.neighbors = 0%0A self.neighbor_list = %5B%5D%0A self.pos = pos%0A self.posx = pos%5B0%5D%0A self.posy = pos%5B1%5D%0A%0A def add_neighbors(self):%0A self.neighbor_list = %5B%5D%0A%0A for x in xrange(self.posx-1, self.posx+1):%0A for y in xrange(self.posy-1, self.posy+1):%0A self.neighbor_list.append((x,y))%0A%0A self.neighbor_list.remove(self.pos)%0A%0A return self.neighbor_list%0A
|
|
777eaf01586b330b976c2691bf73b9a2053ff978
|
Store real non-stemmed texts
|
app/main.py
|
app/main.py
|
from flask import *
import collect_hs
import collections
import nltk
import numpy
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.decomposition import LatentDirichletAllocation
app = Flask(__name__)
## common constructs
stem = nltk.stem.snowball.SnowballStemmer('finnish')
@app.route("/")
def index():
return send_from_directory( 'static' , 'index.html' )
@app.route('/topicmodel', methods=['POST'] )
def analyze():
path1 = request.form['url'].split('/')[-1]
path = '1296808743968/' + path1
comments = collect_hs.comment( path )
_texts = []
for c in comments:
text = nltk.word_tokenize( c['text'] )
text = map( lambda x: stem.stem( x ) , text )
_texts.append( ' '.join( text ) )
tf_vectorizer = CountVectorizer(
max_df=0.95,
min_df=2,
max_features= 10000 )
texts = tf_vectorizer.fit_transform( _texts )
## test between 2 and 20 topics
topics = {}
for k in range(2, 21):
model = LatentDirichletAllocation(
n_topics= k ,
max_iter=5,
learning_method='online',
learning_offset=50.,
random_state=0
)
fit = model.fit( texts )
ll = model.score( texts )
topics[ ll ] = fit
topic = max( topics.keys() )
ret = collections.defaultdict( list )
## ugly, rewrite some day
new_topics = topics[ topic ].transform( texts )
for i, topic in enumerate( new_topics ):
topic = numpy.argmax( topic )
text = _texts[ i ].encode('utf8')
print text
ret[ topic ].append( text )
return jsonify( ret )
if __name__ == "__main__":
app.run( debug = True)
|
Python
| 0.000005
|
@@ -590,16 +590,31 @@
xts = %5B%5D
+%0A texts = %5B%5D
%0A%0A fo
@@ -630,16 +630,53 @@
mments:%0A
+%0A _texts.append( c%5B'text'%5D )%0A%0A
@@ -768,33 +768,32 @@
text )%0A
-_
texts.append( '
|
1056c3f489b162d77b6c117fad2b45bfa06beee1
|
Revert "Added a post view"
|
app/urls.py
|
app/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
#from . import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'app.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'app.views.splash', name='splash'),
url(r'^feed', 'app.views.feed', name='feed'),
url(r'^about', 'app.views.about', name='about'),
url(r'^explore', 'app.views.explore', name='explore'),
url(r'^profile_picture', 'app.views.profile_picture', name='profile_picture'),
url(r'^dashboard', 'app.views.dashboard', name='dashboard'),
url(r'^login', 'app.views.login', name='login'),
url(r'^logout', 'app.views.logout', name='logout'),
url(r'^temp', 'app.views.temp', name='temp'), #delete eventually
url(r'^posts', 'app.views.posts', name='posts'),
url(r'^admin/', include(admin.site.urls))
)
|
Python
| 0
|
@@ -133,18 +133,16 @@
views%0A%0A
-%0A%0A
urlpatte
@@ -793,80 +793,9 @@
p'),
- #delete eventually%0A url(r'%5Eposts', 'app.views.posts', name='posts'),
+%0A
%0A
@@ -839,9 +839,8 @@
rls))%0A)%0A
-%0A
|
1165c923145be18d40fda1fc4303cac3e1613078
|
Update cached_function wrapper to set qualname instead of name
|
app/util.py
|
app/util.py
|
# Various utility functions
import os
SHOULD_CACHE = os.environ.get('ENV', 'development') == 'production'
def cached_function(func):
data = {}
def wrapper(*args):
if not SHOULD_CACHE:
return func(*args)
cache_key = ' '.join([str(x) for x in args])
if cache_key not in data:
data[cache_key] = func(*args)
return data[cache_key]
wrapper.__name__ = func.__name__
return wrapper
|
Python
| 0
|
@@ -404,16 +404,20 @@
apper.__
+qual
name__ =
@@ -424,16 +424,20 @@
func.__
+qual
name__%0A
|
48f517b0b6639bcecd5bac6e2c2e750441ffc347
|
reset compass fully
|
nodes/compass.py
|
nodes/compass.py
|
#!/usr/bin/env python
"""
Copyright (c) 2012, Michael Koval
Copyright 2012, Cody Schafer <cpschafer --- gmail.com>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import roslib; roslib.load_manifest('fieldforce_tcm')
import rospy
import math
from fieldforce_tcm import Calibration, Component, Configuration, FieldforceTCM, Orientation, TimeoutException
from geometry_msgs.msg import Quaternion, Vector3
from std_msgs.msg import Header
from sensor_msgs.msg import Imu
from tf import transformations
inf = float('+inf')
var = 0.034906585 ** 2
def start_compass(compass):
#compass.setConfig(Configuration.kMountingRef, Orientation.Y_UP_180)
compass.setDataComponents([
Component.kHeading,
Component.kPAngle,
Component.kRAngle,
Component.kDistortion,
Component.kCalStatus
])
compass.startStreaming()
def main():
rospy.init_node('fieldforce_tcm')
pub = rospy.Publisher('compass', Imu)
path = rospy.get_param('~path', '/dev/ttyUSB0')
baud = rospy.get_param('~baud', 38400)
frame = rospy.get_param('~frame_id', '/base_link')
cov = rospy.get_param('~covariance', [
inf, 0.0, 0.0,
0.0, inf, 0.0,
0.0, 0.0, var
])
compass = FieldforceTCM(path, baud)
start_compass(compass)
warn_distortion = False
warn_calibration = False
timeout_ct = 0
try:
while True:
try:
datum = compass.readData(2)
except TimeoutException as e:
rospy.logwarn('Wait for data timed out, reseting compass.')
timeout_ct += 1
compass.stopStreaming()
start_compass(compass)
continue
now = rospy.get_rostime()
if datum.Distortion and not warn_distortion:
rospy.logwarn('Magnometer has exceeded its linear range.')
warn_distortion = True
if not datum.CalStatus and not warn_calibration:
rospy.logwarn('Compass is not calibrated.')
warn_calibration = True
ax = math.radians(datum.RAngle)
ay = math.radians(datum.PAngle)
az = math.radians(datum.Heading)
quaternion = transformations.quaternion_from_euler(ax, ay, az)
pub.publish(
header = Header(stamp=now, frame_id=frame),
orientation = Quaternion(*quaternion),
orientation_covariance = [ 0.0 ] * 9,
angular_velocity = Vector3(0, 0, 0),
angular_velocity_covariance = [ -1, 0, 0, 0, 0, 0, 0, 0, 0 ],
linear_acceleration = Vector3(0, 0, 0),
linear_acceleration_covariance = [ -1, 0, 0, 0, 0, 0, 0, 0, 0 ]
)
except Exception as e:
compass.stopStreaming()
compass.close()
raise e
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
# vim: set et sw=4 ts=4:
|
Python
| 0
|
@@ -1860,16 +1860,38 @@
UP_180)%0A
+ compass.stopAll()%0A
comp
@@ -2850,48 +2850,8 @@
= 1%0A
- compass.stopStreaming()%0A
|
7e8e7ff7438bad167a64beeda0d4e66562886841
|
Improve JSON rendering for invoice item [WAL-2168]
|
src/waldur_mastermind/invoices/admin.py
|
src/waldur_mastermind/invoices/admin.py
|
from __future__ import unicode_literals
from django.conf.urls import url
from django.contrib import admin
from django.forms import ModelForm, ModelChoiceField
from django.http import HttpResponse
from django.shortcuts import redirect
from django.urls import reverse
from django.utils.html import format_html
from django.utils.translation import ugettext_lazy as _
from waldur_core.core import admin as core_admin
from waldur_core.core.admin_filters import RelatedOnlyDropdownFilter
from waldur_mastermind.packages import models as package_models
from . import executors, models, tasks
class InvoiceItemInline(core_admin.UpdateOnlyModelAdmin, admin.TabularInline):
model = models.InvoiceItem
readonly_fields = ('name', 'price', 'unit_price', 'unit', 'start', 'end',
'project_name', 'project_uuid', 'product_code', 'article_code')
exclude = ('project',)
class GenericItemInline(InvoiceItemInline):
model = models.GenericInvoiceItem
readonly_fields = InvoiceItemInline.readonly_fields + ('details', 'quantity')
exclude = InvoiceItemInline.exclude + ('content_type', 'object_id')
class InvoiceAdmin(core_admin.ExtraActionsMixin,
core_admin.UpdateOnlyModelAdmin,
admin.ModelAdmin):
inlines = [GenericItemInline]
fields = ['tax_percent', 'invoice_date', 'customer', 'state', 'total', 'year', 'month', 'pdf_file']
readonly_fields = ('customer', 'total', 'year', 'month', 'pdf_file')
list_display = ('customer', 'total', 'year', 'month', 'state')
list_filter = ('state', 'customer')
search_fields = ('customer', 'uuid')
actions = ('create_pdf',)
class CreatePDFAction(core_admin.ExecutorAdminAction):
executor = executors.InvoicePDFCreateExecutor
short_description = _('Create PDF')
create_pdf = CreatePDFAction()
def get_urls(self):
my_urls = [
url(r'^(.+)/change/pdf_file/$', self.admin_site.admin_view(self.pdf_file_view)),
]
return my_urls + super(InvoiceAdmin, self).get_urls()
def pdf_file_view(self, request, pk=None):
invoice = models.Invoice.objects.get(id=pk)
file_response = HttpResponse(invoice.file, content_type='application/pdf')
filename = invoice.get_filename()
file_response['Content-Disposition'] = 'attachment; filename="{filename}"'.format(filename=filename)
return file_response
def pdf_file(self, obj):
if not obj.file:
return ''
return format_html('<a href="./pdf_file">download</a>')
pdf_file.short_description = "File"
def get_extra_actions(self):
return [
self.send_invoice_report,
self.update_current_cost,
self.create_pdf_for_all,
]
def send_invoice_report(self, request):
tasks.send_invoice_report.delay()
message = _('Invoice report task has been scheduled')
self.message_user(request, message)
return redirect(reverse('admin:invoices_invoice_changelist'))
send_invoice_report.short_description = _('Send invoice report as CSV to email')
def update_current_cost(self, request):
tasks.update_invoices_current_cost.delay()
message = _('Task has been scheduled.')
self.message_user(request, message)
return redirect(reverse('admin:invoices_invoice_changelist'))
send_invoice_report.short_description = _('Update current cost for invoices')
def create_pdf_for_all(self, request):
tasks.create_pdf_for_all_invoices.delay()
message = _('PDF creation has been scheduled')
self.message_user(request, message)
return redirect(reverse('admin:invoices_invoice_changelist'))
create_pdf_for_all.name = _('Create PDF for all invoices')
class PackageChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return '%s > %s > %s' % (
obj.tenant.service_project_link.project.customer,
obj.tenant.service_project_link.project.name,
obj.tenant.name
)
class ServiceDowntimeForm(ModelForm):
package = PackageChoiceField(
queryset=package_models.OpenStackPackage.objects.order_by(
'tenant__service_project_link__project__customer__name',
'tenant__service_project_link__project__name',
'tenant__name',
)
)
class ServiceDowntimeAdmin(admin.ModelAdmin):
list_display = ('get_customer', 'get_project', 'get_name', 'start', 'end')
list_display_links = ('get_name',)
list_filter = (
('package__tenant__service_project_link__project__customer', RelatedOnlyDropdownFilter),
('package__tenant__service_project_link__project', RelatedOnlyDropdownFilter),
)
search_fields = ('package__tenant__name',)
date_hierarchy = 'start'
form = ServiceDowntimeForm
def get_readonly_fields(self, request, obj=None):
# Downtime record is protected from modifications
if obj is not None:
return self.readonly_fields + ('start', 'end', 'package')
return self.readonly_fields
def get_customer(self, downtime):
return downtime.package.tenant.service_project_link.project.customer
get_customer.short_description = _('Organization')
get_customer.admin_order_field = 'package__tenant__service_project_link__project__customer'
def get_project(self, downtime):
return downtime.package.tenant.service_project_link.project
get_project.short_description = _('Project')
get_project.admin_order_field = 'package__tenant__service_project_link__project'
def get_name(self, downtime):
return downtime.package.tenant.name
get_name.short_description = _('Resource')
get_name.admin_order_field = 'package__tenant__name'
admin.site.register(models.Invoice, InvoiceAdmin)
admin.site.register(models.ServiceDowntime, ServiceDowntimeAdmin)
|
Python
| 0
|
@@ -589,23 +589,23 @@
%0A%0Aclass
-Invo
+Gener
ic
-e
ItemInli
@@ -677,24 +677,31 @@
el = models.
+Generic
InvoiceItem%0A
@@ -723,16 +723,25 @@
elds = (
+%0A
'name',
@@ -787,31 +787,16 @@
'end',%0A
-
@@ -861,272 +861,299 @@
ode'
-)%0A exclude = ('project',)%0A%0A%0Aclass GenericItemInline(InvoiceItemInline):%0A model = models.GenericInvoiceItem%0A readonly_fields = InvoiceItemInline.readonly_fields + ('details', 'quantity'
+,%0A 'format_details', 'quantity'%0A )%0A exclude = ('details', 'project', 'content_type', 'object_id')%0A%0A def format_details(self, obj):%0A return core_admin.format_json_field(obj.details
)%0A
+%0A
-exclude = InvoiceItemInline.exclude + ('content_type', 'object_id
+format_details.allow_tags = True%0A format_details.short_description = _('Details
')%0A%0A
|
a6ad8491e8e8625acb3eee0bf703848a94f1cad8
|
Use title-case header name to request value
|
src/weitersager/http.py
|
src/weitersager/http.py
|
"""
weitersager.http
~~~~~~~~~~~~~~~~
HTTP server to receive messages
:Copyright: 2007-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from dataclasses import dataclass
from http import HTTPStatus
from http.server import BaseHTTPRequestHandler, HTTPServer
import json
import sys
from typing import Optional
from .config import HttpConfig
from .signals import message_received
from .util import log, start_thread
@dataclass(frozen=True)
class Message:
channel: str
text: str
def parse_json_message(json_data: str) -> Message:
"""Extract message from JSON."""
data = json.loads(json_data)
channel = data['channel']
text = data['text']
return Message(channel=channel, text=text)
class RequestHandler(BaseHTTPRequestHandler):
"""Handler for messages submitted via HTTP."""
def do_POST(self) -> None:
valid_api_tokens = self.server.api_tokens
if valid_api_tokens:
api_token = self._get_api_token()
if not api_token:
self.send_response(HTTPStatus.UNAUTHORIZED)
self.end_headers()
return
if api_token not in valid_api_tokens:
self.send_response(HTTPStatus.FORBIDDEN)
self.end_headers()
return
try:
content_length = int(self.headers.get('Content-Length', 0))
data = self.rfile.read(content_length).decode('utf-8')
message = parse_json_message(data)
except (KeyError, ValueError):
log(f'Invalid message received from {self.address_string()}.')
self.send_error(HTTPStatus.BAD_REQUEST)
return
self.send_response(HTTPStatus.ACCEPTED)
self.end_headers()
message_received.send(
channel_name=message.channel,
text=message.text,
source_address=self.client_address,
)
def _get_api_token(self) -> Optional[str]:
authorization_value = self.headers.get('authorization')
if not authorization_value:
return None
prefix = 'Token '
if not authorization_value.startswith(prefix):
return None
return authorization_value[len(prefix) :]
def version_string(self) -> str:
"""Return custom server version string."""
return 'Weitersager'
class ReceiveServer(HTTPServer):
"""HTTP server that waits for messages."""
def __init__(self, config: HttpConfig) -> None:
address = (config.host, config.port)
HTTPServer.__init__(self, address, RequestHandler)
log('Listening for HTTP requests on {}:{:d}.', *address)
self.api_tokens = config.api_tokens
def start_receive_server(config: HttpConfig) -> None:
"""Start in a separate thread."""
try:
receiver = ReceiveServer(config)
except OSError as e:
sys.stderr.write(f'Error {e.errno:d}: {e.strerror}\n')
sys.stderr.write(
f'Probably no permission to open port {config.port}. '
'Try to specify a port number above 1,024 (or even '
'4,096) and up to 65,535.\n'
)
sys.exit(1)
thread_name = receiver.__class__.__name__
start_thread(receiver.serve_forever, thread_name)
|
Python
| 0
|
@@ -2019,17 +2019,17 @@
rs.get('
-a
+A
uthoriza
|
d39eb13f555daa429838b76de2f4088a46f36237
|
tweak `do`
|
amino/do.py
|
amino/do.py
|
from types import GeneratorType
from typing import TypeVar, Callable, Any, Generator, cast, Optional, Type
import functools
from amino.tc.base import F
from amino.tc.monad import Monad
A = TypeVar('A')
B = TypeVar('B')
G = TypeVar('G', bound=F)
Do = Generator
def untyped_do(f: Callable[..., Generator[G, B, None]]) -> Callable[..., G]:
@functools.wraps(f)
def do_loop(*a: Any, **kw: Any) -> F[B]:
itr = f(*a, **kw)
if not isinstance(itr, GeneratorType):
raise Exception(f'function `{f.__qualname__}` decorated with `do` does not produce a generator')
c: Optional[F] = None
m: Optional[Monad[F]] = None
def send(val: B) -> F[B]:
nonlocal c, m
try:
c = itr.send(val)
if m is None:
m = Monad.fatal_for(c)
return c.flat_map(send)
except StopIteration:
return m.pure(val)
return send(cast(B, None))
return do_loop
def tdo(tpe: Type[A]) -> Callable[[Callable[..., Generator]], Callable[..., A]]:
def deco(f: Callable[..., Generator]) -> Callable[..., A]:
return cast(Callable[[Callable[..., Generator]], Callable[..., A]], untyped_do)(f)
return deco
do = tdo
__all__ = ('do', 'F', 'tdo', 'untyped_do', 'Do')
|
Python
| 0.000001
|
@@ -88,18 +88,8 @@
ast,
- Optional,
Typ
@@ -587,29 +587,29 @@
-c: Optional%5BF%5D =
+init = itr.send(
None
+)
%0A
@@ -618,35 +618,32 @@
m
-: Optional%5BMonad%5BF%5D%5D = None
+ = Monad.fatal_for(init)
%0A
@@ -677,34 +677,8 @@
B%5D:%0A
- nonlocal c, m%0A
@@ -710,11 +710,14 @@
-c =
+return
itr
@@ -730,106 +730,8 @@
val)
-%0A if m is None:%0A m = Monad.fatal_for(c)%0A return c
.fla
@@ -776,16 +776,43 @@
ration:%0A
+ nonlocal m%0A
@@ -857,26 +857,26 @@
urn
-send(cast(B, None)
+init.flat_map(send
)%0A
@@ -898,17 +898,16 @@
p%0A%0A%0Adef
-t
do(tpe:
@@ -1149,14 +1149,14 @@
co%0A%0A
+t
do =
-t
do%0A%0A
|
9d52ed3867192ec47d7c218fa9bb9d4e5d16f1cc
|
Update src/compas_rhino/interop/primitives.py
|
src/compas_rhino/interop/primitives.py
|
src/compas_rhino/interop/primitives.py
|
from compas.geometry import Point
from compas.geometry import Vector
from compas.geometry import Line
from compas.geometry import Plane
from compas.geometry import Frame
from compas.geometry import Circle
from compas.geometry import Ellipse
from compas.geometry import Polyline
from compas.geometry import Polygon
from Rhino.Geometry import Point3d
from Rhino.Geometry import Vector3d
from Rhino.Geometry import Line as RhinoLine
from Rhino.Geometry import Plane as RhinoPlane
from Rhino.Geometry import Circle as RhinoCircle
from Rhino.Geometry import Ellipse as RhinoEllipse
from Rhino.Geometry import Polyline as RhinoPolyline
def rhino_point_to_compas_point(point):
"""Convert a Rhino point to a COMPAS point.
Parameters
----------
point : :class:`Rhino.Geometry.Point3d`
Returns
-------
:class:`compas.geometry.Point`
"""
return Point(point.X, point.Y, point.Z)
def compas_point_to_rhino_point(point):
"""Convert a COMPAS point to a Rhino point.
Parameters
----------
point : :class:`compas.geometry.Point`
Returns
-------
:class:`Rhino.Geometry.Point3d`
"""
return Point3d(point.x, point.y, point.z)
def rhino_vector_to_compas_vector(vector):
"""Convert a Rhino vector to a COMPAS vector.
Parameters
----------
vector : :class:`Rhino.Geometry.Vector3d`
Returns
-------
:class:`compas.geometry.Vector`
"""
return Vector(vector.X, vector.Y, vector.Z)
def compas_vector_to_rhino_vector(vector):
"""Convert a COMPAS vector to a Rhino vector.
Parameters
----------
vector : :class:`compas.geometry.Vector`
Returns
-------
:class:`Rhino.Geometry.Vector3d`
"""
return Vector3d(vector.x, vector.y, vector.z)
def rhino_line_to_compas_line(line):
"""Convert a Rhino line to a COMPAS line.
Parameters
----------
line : :class:`Rhino.Geometry.Line`
Returns
-------
:class:`compas.geometry.Line`
"""
return Line(rhino_point_to_compas_point(line.From),
rhino_point_to_compas_point(line.To))
def compas_line_to_rhino_line(line):
"""Convert a COMPAS line to a Rhino line.
Parameters
----------
line : :class:`compas.geometry.Line`
Returns
-------
:class:`Rhino.Geometry.Line`
"""
return RhinoLine(compas_point_to_rhino_point(line.start),
compas_point_to_rhino_point(line.end))
def rhino_plane_to_compas_plane(plane):
"""Convert a Rhino plane to a COMPAS plane.
Parameters
----------
plane : :class:`Rhino.Geometry.Plane`
Returns
-------
:class:`compas.geometry.Plane`
"""
return Plane(rhino_point_to_compas_point(plane.Origin),
rhino_vector_to_compas_vector(plane.Normal))
def compas_plane_to_rhino_plane(plane):
"""Convert a COMPAS plane to a Rhino plane.
Parameters
----------
plane : :class:`compas.geometry.Plane`
Returns
-------
:class:`Rhino.Geometry.Plane`
"""
return RhinoPlane(compas_point_to_rhino_point(plane.point),
compas_vector_to_rhino_vector(plane.normal))
def rhino_plane_to_compas_frame(plane):
"""Convert a Rhino plane to a COMPAS frame.
Parameters
----------
plane : :class:`Rhino.Geometry.Plane`
Returns
-------
:class:`compas.geometry.Frame`
"""
return Frame(rhino_point_to_compas_point(plane.Origin),
rhino_vector_to_compas_vector(plane.XAxis),
rhino_vector_to_compas_vector(plane.YAxis))
def compas_frame_to_rhino_plane(frame):
"""Convert a COMPAS frame to a Rhino plane.
Parameters
----------
frame : :class:`compas.geometry.Frame`
Returns
-------
:class:`Rhino.Geometry.Plane`
"""
return RhinoPlane(compas_point_to_rhino_point(frame.point),
compas_vector_to_rhino_vector(frame.xaxis),
compas_vector_to_rhino_vector(frame.yaxis))
def rhino_circle_to_compas_circle(circle):
"""Convert a Rhino circle to a COMPAS circle.
Parameters
----------
circle : :class:`Rhino.Geometry.Circle`
Returns
-------
:class:`compas.geometry.Circle`
"""
return Circle(rhino_plane_to_compas_plane(circle.Plane), circle.Radius)
def compas_circle_to_rhino_circle(circle):
"""Convert a COMPAS circle to a Rhino circle.
Parameters
----------
circle : :class:`compas.geometry.Circle`
Returns
-------
:class:`Rhino.Geometry.Circle`
"""
return RhinoCircle(compas_plane_to_rhino_plane(circle.plane), circle.radius)
def rhino_ellipse_to_compas_ellipse(ellipse):
"""Convert a Rhino ellipse to a COMPAS ellipse.
Parameters
----------
ellipse : :class:`Rhino.Geometry.Ellipse`
Returns
-------
:class:`compas.geometry.Ellipse`
"""
return Ellipse(rhino_plane_to_compas_plane(ellipse.Plane), ellipse.major, ellipse.minor)
def compas_ellipse_to_rhino_ellipse(ellipse):
"""Convert a COMPAS ellipse to a Rhino ellipse.
Parameters
----------
ellipse : :class:`compas.geometry.Ellipse`
Returns
-------
:class:`Rhino.Geometry.Ellipse`
"""
return RhinoEllipse(compas_plane_to_rhino_plane(ellipse.plane), ellipse.major, ellipse.minor)
def rhino_polyline_to_compas_polyline(polyline):
"""Convert a Rhino polyline to a COMPAS polyline.
Parameters
----------
polyline : :class:`Rhino.Geometry.Polyline`
Returns
-------
:class:`compas.geometry.Polyline`
"""
return Polyline([rhino_point_to_compas_point(point) for point in polyline])
def compas_polyline_to_rhino_polyline(polyline):
"""Convert a COMPAS polyline to a Rhino polyline.
Parameters
----------
polyline : :class:`compas.geometry.Ellipse`
Returns
-------
:class:`Rhino.Geometry.Ellipse`
"""
return RhinoPolyline([compas_point_to_rhino_point(point) for point in polyline])
def rhino_polygon_to_compas_polygon(polygon):
"""Convert a Rhino polygon to a COMPAS polygon.
Parameters
----------
polygon : :class:`Rhino.Geometry.Polygon`
Returns
-------
:class:`compas.geometry.Ellipse`
"""
return Polygon([rhino_point_to_compas_point(point) for point in polygon])
def compas_polygon_to_rhino_polygon(polygon):
"""Convert a COMPAS polygon to a Rhino polygon.
Parameters
----------
polygon : :class:`compas.geometry.Ellipse`
Returns
-------
:class:`Rhino.Geometry.Ellipse`
"""
raise NotImplementedError
|
Python
| 0
|
@@ -5815,38 +5815,39 @@
compas.geometry.
-Ellips
+Polylin
e%60%0A%0A Returns%0A
|
2d1466b10f74b05c32b1c2efb560515cceba4869
|
Fix YML export titles
|
rbm2m/action/exporter.py
|
rbm2m/action/exporter.py
|
# -*- coding: utf-8 -*-
"""
Record export functions
"""
from __future__ import unicode_literals
import datetime
import logging
from sqlalchemy import func, or_
import jinja2
from jinja2.filters import do_truncate
from ..models import Scan, Record, Genre, RecordFlag, Image, scan_records
from . import user_settings, export_manager
from rbm2m.action import genre_manager
BATCH_SIZE = 10000
logger = logging.getLogger(__name__)
class Exporter(object):
"""
Base class for exports. Builds record sets for export.
"""
fmt = 'base'
def __init__(self, session):
self.session = session
self.settings = user_settings.UserSettings(session)
def log_export(self, ip, user_agent):
"""
Save export entry and emit log message
"""
expman = export_manager.ExportManager(self.session)
expdata = {
'user_agent': user_agent,
'ip': ip,
'format': self.fmt
}
exp = expman.from_dict(expdata)
message = "{} export #{} for {}@{} completed"
logger.info(message.format(self.fmt, exp.id, user_agent, ip))
return exp
def latest_scans(self):
"""
List of ids of last successful scans for each export-enabled genre
"""
subquery = (
self.session.query(Scan.id)
.filter(Scan.status == 'success')
.filter(Scan.genre_id == Genre.id)
.order_by(Scan.started_at.desc())
.limit(1)
.as_scalar()
)
rows = (
self.session.query(Genre.id, subquery)
.filter(subquery.isnot(None))
.filter(Genre.export_enabled.is_(True))
.all()
)
return [scan_id for genre_id, scan_id in rows]
def records(self, scan_ids):
"""
Returns all records from scans in scan_ids, excluding the ones with
'missing_images' and 'skip' status
:param scan_ids: list of scan ids
:return: generator producing Record values
"""
batch_no = 0
while True:
records = (
self.session.query(
scan_records.c.record_id.label('id'),
Record.artist, Record.title,
Record.label, Record.notes, Record.grade, Record.format,
Record.price, Record.genre_id,
Genre.title.label('genre_title'),
func.group_concat(Image.id, ' ').label('images'))
.join(Record, Record.id == scan_records.c.record_id)
.join(Genre, Genre.id == Record.genre_id)
.outerjoin(Image, Image.record_id == scan_records.c.record_id)
.outerjoin(RecordFlag,
RecordFlag.record_id == scan_records.c.record_id)
.filter(scan_records.c.scan_id.in_(scan_ids))
.filter(or_(
RecordFlag.name.is_(None),
~RecordFlag.name.in_(['skip', 'missing_images'])))
.order_by(scan_records.c.record_id)
.group_by(scan_records.c.record_id)
.offset(batch_no * BATCH_SIZE).limit(BATCH_SIZE).all())
if not records:
break
for row in records:
yield dict(zip(row.keys(), row))
batch_no += 1
def category_list(self):
"""
List of exported categories
"""
genman = genre_manager.GenreManager(self.session)
return genman.exported_list()
class YMLExporter(Exporter):
fmt = 'yml'
def __init__(self, session):
super(YMLExporter, self).__init__(session)
self.limit = self.export_limit()
env = jinja2.Environment()
template_string = self.settings['yml_description_template']['value']
self.description_template = env.from_string(template_string)
def export_limit(self):
lim = self.settings['yml_export_limit']['value']
return int(lim) if lim else False
def generation_date(self):
"""
Export set build date
"""
return datetime.datetime.utcnow()
def offers(self):
"""
Generates a sequence of offers for YML export
"""
scans = self.latest_scans()
for num, rec in enumerate(self.records(scans)):
if self.limit and num == self.limit:
break
yield self.make_offer(rec)
def make_offer(self, rec):
"""
Generates offer for YML from result row dictionary
"""
offer = rec.copy()
offer['title'] = format_title(rec['artist'], rec['title'], rec['format'])
offer['description'] = self.format_description(rec)
offer['price'] = self.make_yml_price(rec['price'])
offer['images'] = self.format_yml_images(rec['images'])
return offer
def format_description(self, rec):
"""
Format lot description accordint to template
"""
return self.description_template.render(**rec)
def make_yml_price(self, price):
"""
Calculate lot price
"""
formula = self.settings['formula_yml']['value']
price = eval(formula, {'x': price})
return int(round(price))
def format_yml_images(self, image_ids):
"""
Accepts list of space-separated image ids, returns list of image paths
"""
rv = []
if not image_ids:
return rv
for img_id in image_ids.replace(' ', '').split(','):
rv.append(Image(id=img_id).make_filename('.jpg'))
return rv
class TableExporter(Exporter):
fmt = 'table'
def rows(self):
"""
Generates a sequence of rows for table export
"""
scans = self.latest_scans()
for num, rec in enumerate(self.records(scans)):
yield self.make_row(rec)
def make_row(self, rec):
"""
Make table row from query result row
"""
rec['price'] = self.make_price(rec['price'])
return rec
def make_price(self, price):
"""
Calculate record price according to formula
"""
formula = self.settings['formula_table']['value']
return int(round(eval(formula, {'x': price})))
def format_title(artist, title, fmt, max_length=50):
"""
Format offer title string according to format
{artist} - {title} {format}
truncating title if necessary
"""
title_maxlength = max_length - len("{} - {}".format(artist, fmt))
truncated_title = do_truncate(title, title_maxlength)
return '{} - {} {}'.format(artist, truncated_title, fmt)
|
Python
| 0.000002
|
@@ -4739,23 +4739,8 @@
tle(
-rec%5B'artist'%5D,
rec%5B
@@ -6436,24 +6436,16 @@
t_title(
-artist,
title, f
@@ -6464,16 +6464,16 @@
th=50):%0A
+
%22%22%22%0A
@@ -6537,19 +6537,8 @@
- %7Bartist%7D -
%7Bti
@@ -6640,39 +6640,16 @@
len(
-%22%7B%7D - %7B%7D%22.format(artist, fmt))
+fmt) - 1
%0A
@@ -6721,13 +6721,8 @@
'%7B%7D
- - %7B%7D
%7B%7D'
@@ -6729,24 +6729,16 @@
.format(
-artist,
truncate
|
90c79493f2b039b9bc3bea93dffe10cc17b08282
|
print overall result
|
test/mavsdk_tests/mavsdk_test_runner.py
|
test/mavsdk_tests/mavsdk_test_runner.py
|
#!/usr/bin/env python3
import argparse
import atexit
import datetime
import errno
import os
import psutil
import subprocess
import sys
test_matrix = [
{
"model": "iris",
"test_filter": "[multicopter]",
"timeout_min": 10,
},
# {
# "model": "standard_vtol",
# "test_filter": "[vtol]",
# "timeout_min": 10,
# }
]
class Runner:
def __init__(self, log_dir):
self.cmd = ""
self.cwd = None
self.args = []
self.env = {}
self.log_prefix = ""
self.log_dir = log_dir
def start(self, config):
if self.log_dir:
f = open(self.log_dir + os.path.sep +
"log-{}-{}-{}-{}.txt".format(
self.log_prefix,
config['model'],
config['test_filter'],
datetime.datetime.now().strftime("%Y-%m-%dT%H-%M-%SZ")
), 'w')
else:
f = sys.stdout
print("Running: {}".format(" ".join([self.cmd] + self.args)))
self.process = subprocess.Popen(
[self.cmd] + self.args,
cwd=self.cwd,
env=self.env,
stdout=f, stderr=f
)
atexit.register(self.stop)
def wait(self, timeout_min):
try:
return self.process.wait(timeout=timeout_min*60)
except subprocess.TimeoutExpired:
print("Timeout of {} min{} reached, stopping...".
format(timeout_min, "s" if timeout_min > 1 else ""))
self.stop()
print("stopped.")
return errno.ETIMEDOUT
def stop(self):
atexit.unregister(self.stop)
returncode = self.process.poll()
if returncode is not None:
return returncode
print("Sending terminate to {}".format(self.process.pid))
self.process.terminate()
try:
return self.process.wait(timeout=3)
except subprocess.TimeoutExpired:
print("Sending kill to {}".format(self.process.pid))
self.process.kill()
return self.process.returncode
class Px4Runner(Runner):
def __init__(self, workspace_dir, log_dir, speed_factor):
super().__init__(log_dir)
self.cmd = workspace_dir + "/build/px4_sitl_default/bin/px4"
self.cwd = workspace_dir + "/build/px4_sitl_default/tmp/rootfs"
self.args = [
workspace_dir + "/ROMFS/px4fmu_common",
"-s",
"etc/init.d-posix/rcS",
"-t",
workspace_dir + "/test_data",
"-d"
]
self.env = {"PATH": os.environ['PATH'],
"PX4_SIM_MODEL": "iris",
"PX4_SIM_SPEED_FACTOR": speed_factor}
self.log_prefix = "px4"
class GazeboRunner(Runner):
def __init__(self, workspace_dir, log_dir, speed_factor):
super().__init__(log_dir)
self.env = {"PATH": os.environ['PATH'],
"HOME": os.environ['HOME'],
"GAZEBO_PLUGIN_PATH":
workspace_dir + "/build/px4_sitl_default/build_gazebo",
"GAZEBO_MODEL_PATH":
workspace_dir + "/Tools/sitl_gazebo/models",
"PX4_SIM_SPEED_FACTOR": speed_factor}
self.cmd = "gzserver"
self.args = ["--verbose",
workspace_dir + "/Tools/sitl_gazebo/worlds/iris.world"]
self.log_prefix = "gazebo"
class TestRunner(Runner):
def __init__(self, workspace_dir, log_dir, config):
super().__init__(log_dir)
self.env = {"PATH": os.environ['PATH']}
self.cmd = workspace_dir + \
"/build/px4_sitl_default/test_mission_multicopter"
self.args = [config['test_filter']]
self.log_prefix = "test_runner"
def is_everything_ready():
result = True
for proc in psutil.process_iter(attrs=['name']):
if proc.info['name'] == 'gzserver':
print("gzserver process already running\n"
"run `killall gzserver` and try again")
result = False
elif proc.info['name'] == 'px4':
print("px4 process already running\n"
"run `killall px4` and try again")
result = False
return result
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--log-dir",
help="Directory for log files, stdout if not provided")
parser.add_argument("--speed-factor", default=1,
help="How fast to run the simulation")
args = parser.parse_args()
if not is_everything_ready():
return
for group in test_matrix:
print("Running test group for '{}' with filter '{}'"
.format(group['model'], group['test_filter']))
px4_runner = Px4Runner(
os.getcwd(), args.log_dir, args.speed_factor)
px4_runner.start(group)
gazebo_runner = GazeboRunner(
os.getcwd(), args.log_dir, args.speed_factor)
gazebo_runner.start(group)
test_runner = TestRunner(os.getcwd(), args.log_dir, group)
test_runner.start(group)
returncode = test_runner.wait(group['timeout_min'])
print("Test exited with {}".format(returncode))
returncode = gazebo_runner.stop()
print("Gazebo exited with {}".format(returncode))
px4_runner.stop()
print("PX4 exited with {}".format(returncode))
if __name__ == '__main__':
main()
|
Python
| 0.000019
|
@@ -4733,16 +4733,44 @@
return%0A%0A
+ overall_success = True%0A%0A
for
@@ -5330,16 +5330,56 @@
_min'%5D)%0A
+ was_success = (returncode == 0)%0A
@@ -5390,27 +5390,22 @@
t(%22Test
-exited with
+group:
%7B%7D%22.for
@@ -5404,36 +5404,126 @@
%7B%7D%22.format(
-returncode))
+%22Success%22 if was_success else %22Fail%22))%0A if not was_success:%0A overall_success = False
%0A%0A re
@@ -5695,16 +5695,148 @@
code))%0A%0A
+ print(%22Overall result: %7B%7D%22.%0A format(%22SUCCESS%22 if overall_success else %22FAIL%22))%0A return 0 if overall_success else 1%0A%0A
%0Aif __na
|
b613ac1b3bef1ff639bee16d874c55c4da6948f6
|
add comment
|
src/wormhole/cli/cli.py
|
src/wormhole/cli/cli.py
|
from __future__ import print_function
import os
import time
start = time.time()
import traceback
from textwrap import fill, dedent
from sys import stdout, stderr
from . import public_relay
from .. import __version__
from ..timing import DebugTiming
from ..errors import WrongPasswordError, WelcomeError, KeyFormatError
from twisted.internet.defer import inlineCallbacks, maybeDeferred
from twisted.internet.task import react
import click
top_import_finish = time.time()
class Config(object):
"""
Union of config options that we pass down to (sub) commands.
"""
def __init__(self):
# common options
self.timing = DebugTiming()
self.tor = None
self.listen = None
self.relay_url = u""
self.transit_helper = u""
self.cwd = os.getcwd()
# send/receive commands
self.code = None
self.code_length = 2
self.verify = False
self.hide_progress = False
self.dump_timing = False
self.stdout = stdout
self.stderr = stderr
self.zeromode = False
self.accept_file = None
self.output_file = None
# send only
self.text = None
self.what = None
ALIASES = {
"tx": "send",
"rx": "receive",
}
class AliasedGroup(click.Group):
def get_command(self, ctx, cmd_name):
cmd_name = ALIASES.get(cmd_name, cmd_name)
return click.Group.get_command(self, ctx, cmd_name)
# top-level command ("wormhole ...")
@click.group(cls=AliasedGroup)
@click.option(
"--relay-url", default=public_relay.RENDEZVOUS_RELAY,
metavar="URL",
help="rendezvous relay to use",
)
@click.option(
"--transit-helper", default=public_relay.TRANSIT_RELAY,
metavar="tcp:HOST:PORT",
help="transit relay to use",
)
@click.option(
"-c", "--code-length", default=2,
metavar="NUMWORDS",
help="length of code (in bytes/words)",
)
@click.option(
"-v", "--verify", is_flag=True, default=False,
help="display (and wait for acceptance of) verification string",
)
@click.option(
"--hide-progress", is_flag=True, default=False,
help="supress progress-bar display",
)
@click.option(
"--dump-timing", type=type(u""), # TODO: hide from --help output
default=None,
metavar="FILE.json",
help="(debug) write timing data to file",
)
@click.option(
"--no-listen", is_flag=True, default=False,
help="(debug) don't open a listening socket for Transit",
)
@click.option(
"--tor", is_flag=True, default=True,
help="use Tor when connecting",
)
@click.version_option(
message="magic-wormhole %(version)s",
version=__version__,
)
@click.pass_context
def wormhole(ctx, tor, no_listen, dump_timing, hide_progress,
verify, code_length, transit_helper, relay_url):
"""
Create a Magic Wormhole and communicate through it.
Wormholes are created by speaking the same magic CODE in two
different places at the same time. Wormholes are secure against
anyone who doesn't use the same code.
"""
ctx.obj = cfg = Config()
ctx.tor = tor
if no_listen:
cfg.listen = False
cfg.relay_url = relay_url
cfg.transit_helper = transit_helper
cfg.code_length = code_length
cfg.verify = verify
cfg.hide_progress = hide_progress
cfg.dump_timing = dump_timing
@inlineCallbacks
def _dispatch_command(reactor, cfg, command):
"""
Internal helper. This calls the given command (a no-argument
callable) with the Config instance in cfg and interprets any
errors for the user.
"""
cfg.timing.add("command dispatch")
cfg.timing.add("import", when=start, which="top").finish(when=top_import_finish)
try:
yield maybeDeferred(command)
except WrongPasswordError as e:
msg = fill("ERROR: " + dedent(e.__doc__))
print(msg, file=stderr)
except WelcomeError as e:
msg = fill("ERROR: " + dedent(e.__doc__))
print(msg, file=stderr)
print(file=stderr)
print(str(e), file=stderr)
except KeyFormatError as e:
msg = fill("ERROR: " + dedent(e.__doc__))
print(msg, file=stderr)
except Exception as e:
traceback.print_exc()
print("ERROR:", e, file=stderr)
raise SystemExit(1)
cfg.timing.add("exit")
if cfg.dump_timing:
cfg.timing.write(cfg.dump_timing, stderr)
# wormhole send (or "wormhole tx")
@wormhole.command()
@click.option(
"-0", "zeromode", default=False, is_flag=True,
help="enable no-code anything-goes mode",
)
@click.option(
"--code", metavar="CODE",
help="human-generated code phrase",
)
@click.option(
"--text", default=None, metavar="MESSAGE",
help="text message to send, instead of a file. Use '-' to read from stdin.",
)
@click.argument("what", default=u'')
@click.pass_obj
def send(cfg, what, text, code, zeromode):
"""Send a text message, file, or directory"""
with cfg.timing.add("import", which="cmd_send"):
from . import cmd_send
cfg.what = what
cfg.text = text
cfg.zeromode = zeromode
cfg.code = code
return react(_dispatch_command, (cfg, lambda: cmd_send.send(cfg)))
# wormhole receive (or "wormhole rx")
@wormhole.command()
@click.option(
"-0", "zeromode", default=False, is_flag=True,
help="enable no-code anything-goes mode",
)
@click.option(
"--only-text", "-t", is_flag=True,
help="refuse file transfers, only accept text transfers",
)
@click.option(
"--accept-file", is_flag=True,
help="accept file transfer without asking for confirmation",
)
@click.option(
"--output-file", "-o",
metavar="FILENAME|DIRNAME",
help=("The file or directory to create, overriding the name suggested"
" by the sender."),
)
@click.argument(
"code", nargs=-1, default=None,
# help=("The magic-wormhole code, from the sender. If omitted, the"
# " program will ask for it, using tab-completion."),
)
@click.pass_obj
def receive(cfg, code, zeromode, output_file, accept_file, only_text):
"""
Receive a text message, file, or directory (from 'wormhole send')
"""
with cfg.timing.add("import", which="cmd_receive"):
from . import cmd_receive
cfg.zeromode = zeromode
cfg.output_file = output_file
cfg.accept_file = accept_file
cfg.only_text = only_text
if len(code) == 1:
cfg.code = code[0]
elif len(code) > 1:
print(
"Pass either no code or just one code; you passed"
" {}: {}".format(len(code), ', '.join(code))
)
raise SystemExit(1)
else:
cfg.code = None
return react(_dispatch_command, (cfg, lambda: cmd_receive.receive(cfg)))
|
Python
| 0
|
@@ -5089,16 +5089,52 @@
= code%0A%0A
+ # note: react() does not return%0A
retu
@@ -6646,16 +6646,52 @@
= None%0A%0A
+ # note: react() does not return%0A
retu
|
0200eedc7c76ba0cf38f53a8fd213e55349aa7c0
|
return name under which existing resource was registered
|
rctk/resourceregistry.py
|
rctk/resourceregistry.py
|
import os
import stat
import sys
import time
import mimetypes
from rctk.compat import OrderedDict
class BaseResource(object):
type = "application/data"
counter = 0
def __init__(self, data, name=None, type=None, timestamp=None):
self._data = data
if type is not None:
self.type = type
self._timestamp = timestamp or time.time()
self.name = name
if self.name is None:
self.name = "resource%d" % BaseResource.counter
BaseResource.counter += 1 ## class attribute!
@property
def data(self):
return self._data
def timestamp(self):
return self._timestamp
def __eq__(self, other):
## what if timestamp differs?
return self.data == other.data and self.type == other.type
def __repr__(self):
return '<%s name="%s" type="%s" ts="%s" len=%d bytes>' % \
(self.__class__.__name__, self.name, self.type, self.timestamp(),
len(self.data))
class FileResource(BaseResource):
def __init__(self, path, name=None, type=None):
if name is None:
name = os.path.basename(path)
## some magic to allow paths relative to calling module
if path.startswith('/'):
self.path = path
else:
frame = sys._getframe(1)
base = os.path.dirname(frame.f_globals['__file__'])
self.path = os.path.join(base, path)
if type is None:
type, encoding = mimetypes.guess_type(self.path)
data = open(self.path, "r").read()
timestamp = os.stat(self.path)[stat.ST_MTIME]
super(FileResource, self).__init__(data, name, type, timestamp)
@property
def data(self):
if self.debug:
return open(self.path, "r").read()
else:
return self._data
def timestamp(self):
return os.stat(self.path)[stat.ST_MTIME]
def __eq__(self, other):
if isinstance(other, FileResource):
## again, what about timestamp? or name?
return self.path == other.path
return False
def __repr__(self):
return '<%s name="%s" type="%s" ts="%s" path="%s">' % \
(self.__class__.__name__, self.name, self.type, self.timestamp(),
self.path)
class JSResource(BaseResource):
type = "text/javascript"
class CSSResource(BaseResource):
type = "text/css"
class JSFileResource(FileResource):
type = "text/javascript"
class CSSFileResource(FileResource):
type = "text/css"
class DynamicResource(BaseResource):
def __init__(self, name=None):
super(DynamicResource, self).__init__("dummy data", name=name)
def __call__(self, path):
pass
def __eq__(self, other):
""" dynamic resources aren't as equal as simple ones """
return self is other
class ResourceRegistry(object):
""" The resource registry is used to register javascript and
css that is used by rctk. It allows the main page to be
built dynamically and allows certain optimizations such as
- merging
- compression
- caching
- keep resources local to the code
- possibility to render inline
Currently, the ResourceRegistry can't properly handle @import in css.
It would probably need to load and merge these imports itself, or load
the imported css into the registry itself, possibly renaming the css in
the process.
At this point, this is only an issue with jqueryui, which we'll keep
as a static dependency for now.
"""
def __init__(self, debug=True):
self.resources = OrderedDict()
self.debug = debug
def add(self, resource):
## avoid duplicates
if resource in self.resources.values():
return None
name = resource.name
counter = 1
while name in self.resources:
name = "%s%d" % (resource.name, counter)
counter += 1
self.resources[name] = resource
return name
##
## (also) provide a way to filter on mimetype, allow matching against
## image/* as well.
def names(self):
return self.resources.keys()
def css_resources(self):
""" return references to css resources. They may be merged so it
may be just a single resource """
return [k for (k,v) in self.resources.items()
if isinstance(v, (CSSFileResource, CSSResource))]
def js_resources(self):
""" return references to css resources. They may be merged so it
may be just a single resource """
return [k for (k,v) in self.resources.items()
if isinstance(v, (JSFileResource, JSResource))]
def get_resource(self, name, elements=[]):
"""
return a (type, data) tuple containing the mimetype and resource
data
"""
r = self.resources[name]
## XXX this is rather ugly way of passing the RR's debug setting
r.debug = self.debug
if isinstance(r, DynamicResource):
r = r(elements)
return r
def header(self):
""" return html usable for injection into <head></head> """
res = []
for css in self.css_resources():
o = self.resources[css]
if self.debug:
timestamp = "?%d" % o.timestamp()
res.append('<link type="text/css" href="resources/%s%s"'
'rel="stylesheet" />' % (css, timestamp))
for js in self.js_resources():
o = self.resources[js]
if self.debug:
timestamp = "?%d" % o.timestamp()
res.append('<script type="text/javascript"'
'src="resources/%s%s"></script>' % (js, timestamp))
return '<!-- dynamic resources -->\n%s\n<!-- end dynamic resources -->' % '\n'.join(res)
_instance = None
def getResourceRegistry():
""" singleton-ish """
global _instance
if _instance is None:
_instance = ResourceRegistry()
mimetypes.init()
return _instance
def addResource(r):
return getResourceRegistry().add(r)
|
Python
| 0.000049
|
@@ -3800,16 +3800,39 @@
licates%0A
+ ## XXX optimze%0A
@@ -3882,24 +3882,27 @@
+ ##
return
None%0A%0A
@@ -3893,20 +3893,190 @@
return
-None
+its name nonetheless%0A for k, v in self.resources.iteritems():%0A if v == resource:%0A return k%0A assert %22This can't happen%22
%0A%0A
|
98a82f084c6693dbd7cd44774f52e1bbdd835d05
|
Fix urls.py
|
rdmo/projects/urls/v1.py
|
rdmo/projects/urls/v1.py
|
from django.urls import include, path
from rest_framework_extensions.routers import ExtendedDefaultRouter
from ..viewsets import (ProjectQuestionSetViewSet, ProjectSnapshotViewSet, ProjectMembershipViewSet,
ProjectValueViewSet, ProjectViewSet, MembershipViewSet, SnapshotViewSet,
ValueViewSet)
app_name = 'v1-projects'
router = ExtendedDefaultRouter()
project_route = router.register(r'projects', ProjectViewSet, basename='project')
project_route.register(r'memberships', ProjectMembershipViewSet, basename='project-membership',
parents_query_lookups=['project'])
project_route.register(r'snapshots', ProjectSnapshotViewSet, basename='project-snapshot',
parents_query_lookups=['project'])
project_route.register(r'values', ProjectValueViewSet, basename='project-value',
parents_query_lookups=['project'])
project_route.register(r'questionsets', ProjectQuestionSetViewSet, basename='project-questionset',
parents_query_lookups=['project'])
router.register(r'memberships', MembershipViewSet, basename='membership')
router.register(r'snapshots', SnapshotViewSet, basename='snapshot')
router.register(r'values', ValueViewSet, basename='value')
urlpatterns = [
path('', include(router.urls)),
]
|
Python
| 0.999857
|
@@ -128,50 +128,84 @@
rt (
-ProjectQuestionSetViewSet, ProjectSnapshot
+CatalogViewSet, MembershipViewSet,%0A ProjectMembership
View
@@ -212,34 +212,35 @@
Set, Project
-Membership
+QuestionSet
ViewSet,%0A
@@ -267,29 +267,77 @@
Project
-ValueViewSet,
+SnapshotViewSet, ProjectValueViewSet,%0A
Project
@@ -341,34 +341,35 @@
ectViewSet,
-Membership
+QuestionSet
ViewSet, Sna
@@ -421,17 +421,16 @@
ewSet)%0A%0A
-%0A
app_name
@@ -1360,16 +1360,158 @@
value')%0A
+router.register(r'questionsets', QuestionSetViewSet, basename='questionset')%0Arouter.register(r'catalogs', CatalogViewSet, basename='catalog')%0A
%0A%0Aurlpat
|
0addd5540bfc24bff3aa2f66d78c24d83b6d275e
|
Use env in uninstall_hook (#677)
|
base_multi_image/hooks.py
|
base_multi_image/hooks.py
|
# -*- coding: utf-8 -*-
# © 2016 Antiun Ingeniería S.L. - Jairo Llopis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, SUPERUSER_ID
import logging
_logger = logging.getLogger(__name__)
def pre_init_hook_for_submodules(cr, model, field):
"""Moves images from single to multi mode.
Feel free to use this as a ``pre_init_hook`` for submodules.
:param str model:
Model name, like ``product.template``.
:param str field:
Binary field that had the images in that :param:`model`, like
``image``.
"""
env = api.Environment(cr, SUPERUSER_ID, dict())
with cr.savepoint():
table = env[model]._table
column_exists = table_has_column(cr, table, field)
# fields.Binary(), extract the binary content directly from the table
if column_exists:
extract_query = """
SELECT id, '%(model)s', '%(model)s,' || id, 'db', %(field)s
FROM %(table)s
WHERE %(field)s IS NOT NULL
""" % {
"table": table,
"field": field,
"model": model,
}
image_field = 'file_db_store'
# fields.Binary(attachment=True), get the ir_attachment record ID
else:
extract_query = """
SELECT
res_id,
res_model,
CONCAT_WS(',', res_model, res_id),
'filestore',
id
FROM ir_attachment
WHERE res_field='%(field)s' AND res_model='%(model)s'
""" % {"model": model, "field": field}
image_field = 'attachment_id'
cr.execute(
"""
INSERT INTO base_multi_image_image (
owner_id,
owner_model,
owner_ref_id,
storage,
%s
)
%s
""" % (image_field, extract_query)
)
def uninstall_hook_for_submodules(cr, registry, model):
"""Remove multi-images for a given model.
:param odoo.sql_db.Cursor cr:
Database cursor.
:param odoo.modules.registry.RegistryManager registry:
Database registry, using v7 api.
:param str model:
Model technical name, like "res.partner". All multi-images for that
model will be deleted
"""
Image = registry["base_multi_image.image"]
ids = Image.search(cr, SUPERUSER_ID, [("owner_model", "=", model)])
Image.unlink(cr, SUPERUSER_ID, ids)
def table_has_column(cr, table, field):
query = """
SELECT %(field)s
FROM information_schema.columns
WHERE table_name=%(table)s and column_name=%(field)s;
"""
cr.execute(query, {'table': table, 'field': field})
return bool(cr.fetchall())
|
Python
| 0.000001
|
@@ -2506,17 +2506,20 @@
%22%5D%0A i
-d
+mage
s = Imag
@@ -2527,34 +2527,16 @@
.search(
-cr, SUPERUSER_ID,
%5B(%22owner
@@ -2566,21 +2566,22 @@
-I
+i
mage
+s
.unlink(
cr,
@@ -2580,29 +2580,8 @@
ink(
-cr, SUPERUSER_ID, ids
)%0A%0A%0A
|
15a5958a92b7a1a5034cb821da0c0eb1e6b14b5c
|
Rename router.log *attribute* to .logger so it doesn't conflict with the router.log() method.
|
lib/rapidsms/router.py
|
lib/rapidsms/router.py
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import time
import threading
import log
class Router (object):
incoming_phases = ('parse', 'handle', 'cleanup')
outgoing_phases = ('outgoing',)
def __init__(self):
self.backends = []
self.apps = []
self.log = log.Log()
def log(self, level, message):
# call the function "level" on self.log
getattr(self.log, level)(message)
def add_app (self, app):
self.apps.append(app)
def add_backend (self, backend):
self.backends.append(backend)
def start_backend (self, backend):
while True:
try:
# start the backend
backend.start()
# if backend execution completed normally, end the thread
break
except Exception, e:
# an exception was raised in backend.start()
# sleep for 5 seconds, then loop and restart it
self.log.error("%s raised exception: %s" % (backend,e))
time.sleep(5)
self.log.error("restarting %s" % (backend,))
def start (self):
# dump some debug info for now
self.log.info("BACKENDS: %r" % (self.backends))
self.log.info("APPS: %r" % (self.apps))
self.log.info("SERVING FOREVER...")
workers = []
# launch each backend in its own thread
for backend in self.backends:
worker = threading.Thread(target=start_backend, args=(backend,))
worker.start()
workers.append(worker)
# wait until we're asked to stop
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
pass
except SystemExit:
pass
for backend in self.backends:
backend.stop()
for worker in workers:
worker.join()
def incoming(self, message):
# loop through all of the apps and notify them of
# the incoming message so that they all get a
# chance to do what they will with it
for phase in self.incoming_phases:
for app in self.apps:
getattr(app, phase)(message)
def outgoing(self, message):
# first notify all of the apps that want to know
# about outgoing messages so that they can do what
# they will before the message is actually sent
for phase in self.outgoing_phases:
for app in self.apps:
getattr(app, phase)(message)
# now send the message out
self.log.info("SENT MESSAGE %s to %s" % (message, message.backend))
message.backend.send(message)
|
Python
| 0
|
@@ -290,16 +290,19 @@
self.log
+ger
= log.L
@@ -414,16 +414,19 @@
self.log
+ger
, level)
|
8b7ef1066abefae83876607fd1a9153662463185
|
add try for obnl version loading in init
|
obnl/__init__.py
|
obnl/__init__.py
|
import pkg_resources # part of setuptools
__version__ = pkg_resources.require("obnl")[0].version
|
Python
| 0
|
@@ -36,16 +36,25 @@
uptools%0A
+try:%0A
__versio
@@ -95,12 +95,30 @@
)%5B0%5D.version
+%0Aexcept:%0A pass%0A
|
e27c74754eb8afebe36d982edba96383a8aa9d86
|
Remove redundant call to super(__init__)
|
lib/rapidsms/router.py
|
lib/rapidsms/router.py
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import time, datetime
import threading
import component
import log
class Router (component.Receiver):
incoming_phases = ('parse', 'handle', 'cleanup')
outgoing_phases = ('outgoing',)
def __init__(self):
component.Receiver.__init__(self)
self.backends = []
self.apps = []
self.running = False
self.logger = None
super(component.Receiver,self).__init__()
def log(self, level, msg, *args):
self.logger.write(self, level, msg, *args)
def set_logger(self, level, file):
self.logger = log.Logger(level, file)
def build_component (self, class_template, conf):
"""Imports and instantiates an module, given a dict with
the config key/value pairs to pass along."""
# break the class name off the end of the module template
# i.e. "apps.%s.app.App" -> ("apps.%s.app", "App")
module_template, class_name = class_template.rsplit(".",1)
# make a copy of the conf dict so we can delete from it
conf = conf.copy()
# resolve the component name into a real class
module_name = module_template % (conf.pop("type"))
module = __import__(module_name, {}, {}, [''])
component_class = getattr(module, class_name)
# create the component with an instance of this router
# and keep hold of it here, so we can communicate both ways
title = conf.pop("title")
component = component_class(title, self)
try:
component.configure(**conf)
except TypeError, e:
# "__init__() got an unexpected keyword argument '...'"
if "unexpected keyword" in e.message:
missing_keyword = e.message.split("'")[1]
raise Exception("Component '%s' does not support a '%s' option."
% (title, missing_keyword))
else:
raise
return component
def add_backend (self, conf):
backend = self.build_component("rapidsms.backends.%s.Backend", conf)
self.backends.append(backend)
def add_app (self, conf):
app = self.build_component("apps.%s.app.App", conf)
self.apps.append(app)
def start_backend (self, backend):
while self.running:
try:
# start the backend
backend.start()
# if backend execution completed normally, end the thread
break
except Exception, e:
# an exception was raised in backend.start()
# sleep for 5 seconds, then loop and restart it
self.error("%s failed: %s" % (backend.name,e))
if not self.running: break
time.sleep(5.0)
self.error("restarting %s" % (backend.name,))
def start_all_apps (self):
# call the "start" method of each app
for app in self.apps:
try:
app.start()
except Exception, e:
self.error("%s failed on start: %r", app, e)
def start_all_backends (self):
# launch each backend in its own thread
for backend in self.backends:
worker = threading.Thread(target=self.start_backend, args=(backend,))
worker.start()
def stop_all_backends (self):
for backend in self.backends:
try:
backend.stop()
except Exception, e:
self.error("%s failed on stop: %s" % (backend.name,e))
def start (self):
self.running = True
# dump some debug info for now
self.info("BACKENDS: %r" % (self.backends))
self.info("APPS: %r" % (self.apps))
self.info("SERVING FOREVER...")
self.start_all_backends()
self.start_all_apps()
# wait until we're asked to stop
while self.running:
try:
self.run()
except KeyboardInterrupt:
break
except SystemExit:
break
self.stop_all_backends()
self.running = False
def stop (self):
self.running = False
def run(self):
msg = self.next_message(timeout=1.0)
if msg is not None:
self.incoming(msg)
def incoming(self, message):
self.info("Incoming message via %s: %s ->'%s'" %\
(message.backend.name, message.caller, message.text))
# loop through all of the apps and notify them of
# the incoming message so that they all get a
# chance to do what they will with it
for phase in self.incoming_phases:
for app in self.apps:
self.debug('IN' + ' ' + phase + ' ' + app.name)
responses = len(message.responses)
handled = False
try:
handled = getattr(app, phase)(message)
except Exception, e:
self.error("%s failed on %s: %r", app, phase, e)
if phase == 'handle':
if handled is True:
self.debug("%s short-circuited handle phase", app.name)
break
elif responses != len(message.responses):
self.warn("App '%s' shouldn't send responses in %s()!",
app.name, phase)
# now send the message's responses
message.flush_responses()
def outgoing(self, message):
self.info("Outgoing message via %s: %s <- '%s'" %\
(message.backend.name, message.caller, message.text))
# first notify all of the apps that want to know
# about outgoing messages so that they can do what
# they will before the message is actually sent
for phase in self.outgoing_phases:
continue_sending = True
# call outgoing phases in the opposite order of the
# incoming phases so that, for example, the first app
# called with an incoming message is the last app called
# with an outgoing message
for app in reversed(self.apps):
self.debug('OUT' + ' ' + phase + ' ' + app.name)
try:
continue_sending = getattr(app, phase)(message)
except Exception, e:
self.error("%s failed on %s: %r", app, phase, e)
if continue_sending is False:
self.info("App '%s' cancelled outgoing message", app.name)
return False
# now send the message out
message.backend.send(message)
self.debug("SENT message '%s' to %s via %s" % (message.text,\
message.caller, message.backend.name))
return True
|
Python
| 0.000126
|
@@ -414,58 +414,8 @@
None
-%0A super(component.Receiver,self).__init__()
%0A%0A
|
f4d8ffbbaea5a1155540b167b740ab7bbaa4fd0f
|
load balance ICDS warehouse reads
|
corehq/sql_db/routers.py
|
corehq/sql_db/routers.py
|
from __future__ import absolute_import
from django.conf import settings
from .config import partition_config
PROXY_APP = 'sql_proxy_accessors'
FORM_PROCESSOR_APP = 'form_processor'
SQL_ACCESSORS_APP = 'sql_accessors'
ICDS_REPORTS_APP = 'icds_reports'
ICDS_MODEL = 'icds_model'
SCHEDULING_PARTITIONED_APP = 'scheduling_partitioned'
WAREHOUSE_APP = 'warehouse'
class PartitionRouter(object):
def db_for_read(self, model, **hints):
return db_for_read_write(model)
def db_for_write(self, model, **hints):
return db_for_read_write(model)
def allow_migrate(self, db, app_label, model=None, **hints):
return allow_migrate(db, app_label)
def allow_relation(self, obj1, obj2, **hints):
from corehq.sql_db.models import PartitionedModel
obj1_partitioned = isinstance(obj1, PartitionedModel)
obj2_partitioned = isinstance(obj2, PartitionedModel)
if obj1_partitioned and obj2_partitioned:
return obj1.db == obj2.db
elif not obj1_partitioned and not obj2_partitioned:
return True
return False
class MonolithRouter(object):
def allow_migrate(self, db, app_label, model=None, **hints):
return app_label != PROXY_APP
def allow_migrate(db, app_label):
if app_label == ICDS_REPORTS_APP:
return hasattr(settings, "ICDS_UCR_DATABASE_ALIAS") and db == settings.ICDS_UCR_DATABASE_ALIAS
if not settings.USE_PARTITIONED_DATABASE:
return app_label != PROXY_APP
if app_label == PROXY_APP:
return db == partition_config.get_proxy_db()
elif app_label in (FORM_PROCESSOR_APP, SCHEDULING_PARTITIONED_APP):
return (
db == partition_config.get_proxy_db() or
db in partition_config.get_form_processing_dbs()
)
elif app_label == SQL_ACCESSORS_APP:
return db in partition_config.get_form_processing_dbs()
elif app_label == WAREHOUSE_APP:
return hasattr(settings, "WAREHOUSE_DATABASE_ALIAS") and db == settings.WAREHOUSE_DATABASE_ALIAS
else:
return db == partition_config.get_main_db()
def db_for_read_write(model):
if not settings.USE_PARTITIONED_DATABASE:
return 'default'
app_label = model._meta.app_label
if app_label == FORM_PROCESSOR_APP:
return partition_config.get_proxy_db()
elif app_label == WAREHOUSE_APP:
error_msg = 'Cannot read/write to warehouse db without warehouse database defined'
assert hasattr(settings, "WAREHOUSE_DATABASE_ALIAS"), error_msg
return settings.WAREHOUSE_DATABASE_ALIAS
elif app_label == ICDS_MODEL:
assert hasattr(settings, "ICDS_UCR_TEST_DATABASE_ALIAS")
return settings.ICDS_UCR_TEST_DATABASE_ALIAS
else:
return partition_config.get_main_db()
|
Python
| 0
|
@@ -66,16 +66,93 @@
ttings%0A%0A
+from corehq.sql_db.connections import connection_manager, ICDS_UCR_ENGINE_ID%0A
from .co
@@ -538,32 +538,45 @@
read_write(model
+, write=False
)%0A%0A def db_fo
@@ -644,16 +644,28 @@
te(model
+, write=True
)%0A%0A d
@@ -2229,18 +2229,217 @@
te(model
-):
+, write=True):%0A %22%22%22%0A :param model: Django model being queried%0A :param write: Default to True since the DB for writes can also handle reads%0A :return: Django DB alias to use for query%0A %22%22%22
%0A if
@@ -2923,118 +2923,221 @@
-assert hasattr(settings, %22ICDS_UCR_TEST_DATABASE_ALIAS%22)%0A return settings.ICDS_UCR_TEST_DATABASE_ALIAS%0A
+engine_id = ICDS_UCR_ENGINE_ID%0A if not write:%0A engine_id = connection_manager.get_load_balanced_read_engine_id(ICDS_UCR_ENGINE_ID)%0A return connection_manager.get_django_db_alias(engine_id)
%0A
|
32d49946279cab868b493aae432b431fa9d5e2bc
|
Add wrap at wrap_width unless it's 0.
|
autowrap.py
|
autowrap.py
|
import sublime, sublime_plugin, re, sys
if sys.version >= '3':
long = int
class AutoWrapListener(sublime_plugin.EventListener):
saved_sel = 0
def on_modified(self, view):
if view.is_scratch() or view.settings().get('is_widget'): return
if not view.settings().get('auto_wrap', False): return
sel = view.sel()
if not sel or len(sel)>1 or sel[0].begin()!=sel[0].end(): return
rulers = view.settings().get('rulers')
if not rulers: rulers = [80]
pt = sel[0].end()
if pt<=self.saved_sel or pt-self.saved_sel>1 or view.rowcol(pt)[1]<=rulers[0] \
or view.substr(pt-1)==" ":
activate = False
else: activate = True
self.saved_sel = sel[0].end()
if not activate: return
# to obtain the insert point
line = view.substr(view.line(pt))
m = re.match('.*\s(\S*\s*)$',line)
if not m: return
insertpt = view.line(pt).end()-len(m.group(1))
if pt<insertpt: return
if view.settings().get("wrap_style") != "classic" and view.rowcol(insertpt)[1]<=rulers[0]:
return
# insert enter
view.run_command('auto_wrap_insert', {'insertpt': insertpt})
if view.settings().get('auto_indent'):
view.run_command('reindent', {'force_indent': False})
class AutoWrapInsertCommand(sublime_plugin.TextCommand):
def run(self, edit, insertpt):
self.view.insert(edit, long(insertpt), "\n")
class ToggleAutoWrap(sublime_plugin.WindowCommand):
def run(self):
view = self.window.active_view()
view.settings().set("auto_wrap", not view.settings().get("auto_wrap", False))
onoff = "on" if view.settings().get("auto_wrap") else "off"
sublime.status_message("Auto (Hard) Wrap %s" % onoff)
|
Python
| 0
|
@@ -425,83 +425,263 @@
-rulers = view.settings().get('rulers')%0A if not rulers: rulers
+wrap_width = view.settings().get('wrap_width')%0A if not wrap_width or wrap_width == 0:%0A rulers = view.settings().get('rulers')%0A if rulers:%0A wrap_width = rulers%5B0%5D%0A else:%0A wrap_width
=
-%5B
80
-%5D
%0A
@@ -779,25 +779,26 @@
pt)%5B1%5D%3C=
-rulers%5B0%5D
+wrap_width
%5C%0A
@@ -1282,25 +1282,26 @@
pt)%5B1%5D%3C=
-rulers%5B0%5D
+wrap_width
:%0A
|
2cf95fe7f2bec746cc4c76dd716d299754bf1491
|
implement click.prompt, and unicode litteral for py2 py3 compatibility
|
stackit/stackit_core.py
|
stackit/stackit_core.py
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import stackexchange
from stackexchange import Sort
# A good testing URL: http://stackoverflow.com/questions/16800049/changepassword-test
# The approved answer ID: 16800090
import requests
import webbrowser
import subprocess
import click
import bs4
import os
if sys.version_info[:2] < (3, 0):
input = raw_input
NUM_RESULTS = 5
# API key is public, according to SO documentation
# (link?)
API_KEY = "3GBT2vbKxgh*ati7EBzxGA(("
VERSION_NUM = "0.1.3"
# HTML to markdown parsing
# https://github.com/aaronsw/html2text
import html2text
h = html2text.HTML2Text()
user_api_key = API_KEY
so = stackexchange.Site(stackexchange.StackOverflow, app_key=user_api_key, impose_throttling=True)
so.be_inclusive()
class Config():
""" Main configuration object """
def __init__(self):
self.search = False
self.stderr = False
self.tag = False
self.verbose = False
pass_config = click.make_pass_decorator(Config, ensure=True)
def select(questions, num):
print_full_question(questions[num - 1])
working = True
while working:
user_input = input("Enter b to launch browser, x to return to search, or q to quit: ")
if user_input == 'b':
webbrowser.open(questions[num - 1].json['link'], new=0, autoraise=True)
elif user_input == 'q':
sys.exit()
elif user_input == 'x':
click.echo("\n" * 12)
# Ranging over the 5 questions including the user's choice
for j in range(5 * int((num - 1) / 5), 5 * int((num - 1) / 5) + 5):
print_question(questions[j], j + 1)
working = False
else:
click.echo(click.style(
"The input entered was not recognized as a valid choice.",
fg="red"))
def focus_question(questions):
working = True
while working:
user_input = input("Enter m for more, a question number to select, or q to quit: ")
if user_input == 'm':
working = False
elif user_input == 'q':
sys.exit()
elif user_input.isnumeric() and int(user_input) <= len(questions):
select(questions, int(user_input))
else:
click.echo(click.style(
"The input entered was not recognized as a valid choice.",
fg="red"))
def _search(config):
# inform user
click.echo('Searching for: {0}...'.format(config.term))
click.echo('Tags: {0}'.format(config.tag))
questions = so.search_advanced(
q=config.term,
tagged=config.tag.split(),
sort=Sort.Votes)
count = 0
question_logs = []
# quicker way for appending to list
add_to_logs = question_logs.append
for question in questions:
if 'accepted_answer_id' in question.json:
count += 1
add_to_logs(question)
print_question(question, count)
if count % NUM_RESULTS == 0:
focus_question(question_logs)
def print_question(question, count):
# questionurl gives the url of the SO question
# the answer is under id "answer-answerid", and text of answer is in class post-text
questionurl = question.json['link']
answerid = question.json['accepted_answer_id']
# Pulls the html from the StackOverflow site, converts to Beautiful Soup
response = requests.get(questionurl)
soup = bs4.BeautifulSoup(response.text)
# Prints the accepted answer div, concatonated "answer-" and answerid
# Gets the p string -- do al answers follow this format, or do some have more info?
answer = h.handle(soup.find("div", {"id": "answer-" + str(answerid)}).p.prettify())
click.echo(''.join([
click.style(''.join([str(count), '\nQuestion: ', question.title]), fg='blue'),
''.join(['\nAnswer', answer]),
]))
def get_term(config):
if config.search:
return config.search
elif config.stderr:
commandlist = config.stderr.split()
command = commandlist[0]
# Get current working directory and replace spaces with '\ ' to stop errors
filename = (os.getcwd()).replace(' ', '\ ') + "/" + commandlist[1]
process = subprocess.Popen(command + " " + filename, stderr=subprocess.PIPE, shell=True)
output = process.communicate()[1]
return (str(output.splitlines()[-1]) + " ")
return ""
def print_full_question(question):
questionurl = question.json['link']
answerid = question.json['accepted_answer_id']
response = requests.get(questionurl)
soup = bs4.BeautifulSoup(response.text)
# Focuses on the single div with the matching answerid--necessary b/c bs4 is quirky
for answerdiv in soup.find_all('div', attrs={'id': 'answer-' + str(answerid)}):
# Return printable text div--the contents of the answer
# This isn't perfect; things like code indentation aren't pretty at all
# print(answerdiv.find('div', attrs={'class': 'post-text'}))
answertext = h.handle(answerdiv.find('div', attrs={'class': 'post-text'}).prettify())
for cell in soup.find_all('td', attrs={'class': 'postcell'}):
questiontext = h.handle(cell.find('div', attrs={'class': 'post-text'}).prettify())
click.echo(''.join([
click.style(''.join([
"-------------------------QUESTION------------------------\n",
question.title, '\n', questiontext,
]), fg='blue'),
''.join([
"\n\n-------------------------------ANSWER------------------------------------\n",
answertext,
]),
]))
def search_verbose(term):
questions = so.search_advanced(q=term, sort=Sort.Votes)
question = questions[0]
print_full_question(question)
@click.command()
@click.option("-s", "--search", default="", help="Searches StackOverflow for your query")
@click.option("-e", "--stderr", default="", help="Runs an executable command (i.e. python script.py) and automatically inputs error message to StackOverflow")
@click.option("-t", "--tag", default="", help="Searches StackOverflow for your tags")
@click.option("--verbose", is_flag=True, help="displays full text of most relevant question and answer")
@click.option("--version", is_flag=True, help="displays the version")
@pass_config
def main(config, search, stderr, tag, verbose, version):
""" Parses command-line arguments for StackIt """
config.search = search
config.stderr = stderr
config.tag = tag
config.verbose = verbose
config.term = get_term(config)
if search or stderr:
_search(config)
elif verbose:
search_verbose(config.term)
elif version:
click.echo("Version {VERSION_NUM}".format(**globals()))
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -53,16 +53,56 @@
unction%0A
+from __future__ import unicode_literals%0A
import s
@@ -1189,36 +1189,43 @@
user_input =
-inpu
+click.promp
t(%22Enter b to la
@@ -1265,34 +1265,32 @@
ch, or q to quit
-:
%22)%0A if us
@@ -1986,20 +1986,27 @@
input =
-inpu
+click.promp
t(%22Enter
@@ -2059,18 +2059,16 @@
to quit
-:
%22)%0A
|
0f85b39fcca84b60815c54201f5f52eb9a2840c7
|
Split the normalize function into two.
|
avena/np.py
|
avena/np.py
|
#!/usr/bin/env python2
from numpy import around, empty as _empty, mean, std
from numpy import int8, int16, int32, int64
from numpy import uint8, uint16, uint32, uint64
from numpy import float32, float64
from sys import float_info as _float_info
_eps = 10.0 * _float_info.epsilon
# Map of NumPy array type strings to types
_np_dtypes = {
'int8': int8,
'int16': int16,
'int32': int32,
'int64': int64,
'uint8': uint8,
'uint16': uint16,
'uint32': uint32,
'uint64': uint64,
'float32': float32,
'float64': float64,
}
def from_uint8(array, dtype):
new_array = array.astype(dtype)
return new_array
def to_uint8(array):
uint8_array = _empty(array.shape, dtype=uint8)
around(array * 255, out=uint8_array)
return uint8_array
def normalize(array):
'''Normalize an array to the interval [0,1].'''
mu = mean(array)
rho2 = std(array)
min = mu - 3.0 * rho2
max = mu + 3.0 * rho2
array -= min
array /= max - min
negs = array < 0.0 + _eps
array[negs] = 0.0
bigs = array > 1.0 - _eps
array[bigs] = 1.0
return
if __name__ == '__main__':
pass
|
Python
| 0.999846
|
@@ -797,24 +797,219 @@
nt8_array%0A%0A%0A
+def clip(array, (min, max)):%0A '''Clip the values of an array to the given interval.'''%0A x = array %3C min + _eps%0A y = array %3E max - _eps%0A array%5Bx%5D = min%0A array%5By%5D = max%0A return%0A%0A%0A
def normaliz
@@ -1209,112 +1209,8 @@
min%0A
- negs = array %3C 0.0 + _eps%0A array%5Bnegs%5D = 0.0%0A bigs = array %3E 1.0 - _eps%0A array%5Bbigs%5D = 1.0%0A
|
bdcafd0c5af46e88ae06e6bbb853d415a30f8d26
|
test algo affine
|
testing/test_sct_register_multimodal.py
|
testing/test_sct_register_multimodal.py
|
#!/usr/bin/env python
#########################################################################################
#
# Test function for sct_register_multimodal script
#
# replace the shell test script in sct 1.0
#
# ---------------------------------------------------------------------------------------
# Copyright (c) 2014 Polytechnique Montreal <www.neuro.polymtl.ca>
# Author: Augustin Roux
# modified: 2014/09/28
#
# About the license: see the file LICENSE.TXT
#########################################################################################
#import sct_utils as sct
import commands
def test(path_data):
folder_data = 'mt/'
file_data = ['mt0.nii.gz', 'mt1.nii.gz']
output = ''
status = 0
cmd = 'sct_register_multimodal -i ' + path_data + folder_data + file_data[0] \
+ ' -d ' + path_data + folder_data + file_data[1] \
+ ' -o data_reg.nii.gz' \
+ ' -p step=1,algo=syn,iter=1,smooth=0,shrink=4,metric=MeanSquares' \
+ ' -x linear' \
+ ' -r 0' \
+ ' -v 1'
output += cmd+'\n' # copy command
s, o = commands.getstatusoutput(cmd)
status += s
output += o
# check other method
cmd = 'sct_register_multimodal -i ' + path_data + folder_data + file_data[0] \
+ ' -d ' + path_data + folder_data + file_data[1] \
+ ' -o data_reg.nii.gz' \
+ ' -p step=1,algo=slicereg,iter=1,smooth=0,shrink=4,metric=MeanSquares' \
+ ' -x linear' \
+ ' -r 0' \
+ ' -v 1'
output += cmd+'\n' # copy command
s, o = commands.getstatusoutput(cmd)
status += s
output += o
return status, output
if __name__ == "__main__":
# call main function
test()
|
Python
| 0.000001
|
@@ -1633,24 +1633,497 @@
utput += o%0A%0A
+ # check other method%0A cmd = 'sct_register_multimodal -i ' + path_data + folder_data + file_data%5B0%5D %5C%0A + ' -d ' + path_data + folder_data + file_data%5B1%5D %5C%0A + ' -o data_reg.nii.gz' %5C%0A + ' -p step=1,algo=affine,iter=1,smooth=0,shrink=4,metric=MeanSquares' %5C%0A + ' -x linear' %5C%0A + ' -r 0' %5C%0A + ' -v 1'%0A output += cmd+'%5Cn' # copy command%0A s, o = commands.getstatusoutput(cmd)%0A status += s%0A output += o%0A%0A
return s
|
1e704b4ac648d06a05d8c97e3ca38b64ea931c0a
|
Fix version number
|
ooni/__init__.py
|
ooni/__init__.py
|
# -*- encoding: utf-8 -*-
__author__ = "Arturo Filastò"
__version__ = "1.0.0-rc3"
__all__ = ['config', 'inputunit', 'kit',
'lib', 'nettest', 'oonicli', 'reporter',
'templates', 'utils']
|
Python
| 0.000041
|
@@ -77,9 +77,9 @@
0-rc
-3
+5
%22%0A%0A_
|
83c26b1b05df1b9593ef2e2f82764ee527806395
|
decrease noise level in the initial image
|
neural_artistic_style.py
|
neural_artistic_style.py
|
#!/usr/bin/env python
import os
import argparse
import numpy as np
import scipy.misc
import deeppy as dp
from matconvnet import vgg19_net
from style_network import StyleNetwork
def weight_tuple(s):
try:
conv_idx, weight = map(float, s.split(','))
return conv_idx, weight
except:
raise argparse.ArgumentTypeError('weights must by "int,float"')
def float_range(x):
x = float(x)
if x < 0.0 or x > 1.0:
raise argparse.ArgumentTypeError("%r not in range [0, 1]" % x)
return x
def weight_array(weights):
array = np.zeros(19)
for idx, weight in weights:
array[idx] = weight
norm = np.sum(array)
if norm > 0:
array /= norm
return array
def imread(path):
return scipy.misc.imread(path).astype(dp.float_)
def imsave(path, img):
img = np.clip(img, 0, 255).astype(np.uint8)
scipy.misc.imsave(path, img)
def to_bc01(img):
return np.transpose(img, (2, 0, 1))[np.newaxis, ...]
def to_rgb(img):
return np.transpose(img[0], (1, 2, 0))
def run():
parser = argparse.ArgumentParser(
description='Neural artistic style. Generates an image by combining '
'the subject from one image and the style from another.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument('--subject', required=True, type=str,
help='Subject image.')
parser.add_argument('--style', required=True, type=str,
help='Style image.')
parser.add_argument('--output', default='out.png', type=str,
help='Output image.')
parser.add_argument('--init', default=None, type=str,
help='Initial image. Subject is chosen as default.')
parser.add_argument('--init-noise', default=0.0, type=float_range,
help='Weight between [0, 1] to adjust the noise level '
'in the initial image.')
parser.add_argument('--random-seed', default=None, type=int,
help='Random state.')
parser.add_argument('--animation', default='animation', type=str,
help='Output animation directory.')
parser.add_argument('--iterations', default=500, type=int,
help='Number of iterations to run.')
parser.add_argument('--learn-rate', default=2.0, type=float,
help='Learning rate.')
parser.add_argument('--smoothness', type=float, default=2e-7,
help='Weight of smoothing scheme.')
parser.add_argument('--subject-weights', nargs='*', type=weight_tuple,
default=[(9, 1)],
help='List of subject weights (conv_idx,weight).')
parser.add_argument('--style-weights', nargs='*', type=weight_tuple,
default=[(0, 1), (2, 1), (4, 1), (8, 1), (12, 1)],
help='List of style weights (conv_idx,weight).')
parser.add_argument('--subject-ratio', type=float, default=2e-2,
help='Weight of subject relative to style.')
parser.add_argument('--pool-method', default='avg', type=str,
choices=['max', 'avg'], help='Subsampling scheme.')
parser.add_argument('--vgg19', default='imagenet-vgg-verydeep-19.mat',
type=str, help='VGG-19 .mat file.')
args = parser.parse_args()
if args.random_seed is not None:
np.random.seed(args.random_seed)
layers, img_mean = vgg19_net(args.vgg19, pool_method=args.pool_method)
# Inputs
pixel_mean = np.mean(img_mean, axis=(0, 1))
style_img = imread(args.style) - pixel_mean
subject_img = imread(args.subject) - pixel_mean
if args.init is None:
init_img = subject_img
else:
init_img = imread(args.init) - pixel_mean
noise = np.random.normal(size=init_img.shape, scale=np.std(init_img))
init_img = init_img * (1 - args.init_noise) + noise * args.init_noise
# Setup network
subject_weights = weight_array(args.subject_weights) * args.subject_ratio
style_weights = weight_array(args.style_weights)
net = StyleNetwork(layers, to_bc01(init_img), to_bc01(subject_img),
to_bc01(style_img), subject_weights, style_weights,
args.smoothness)
# Repaint image
def net_img():
return to_rgb(net.image) + pixel_mean
if not os.path.exists(args.animation):
os.mkdir(args.animation)
params = net._params
learn_rule = dp.Adam(learn_rate=args.learn_rate)
learn_rule_states = [learn_rule.init_state(p) for p in params]
for i in range(args.iterations):
imsave(os.path.join(args.animation, '%.4d.png' % i), net_img())
cost = np.mean(net._update())
for param, state in zip(params, learn_rule_states):
learn_rule.step(param, state)
print('Iteration: %i, cost: %.4f' % (i, cost))
imsave(args.output, net_img())
if __name__ == "__main__":
run()
|
Python
| 0
|
@@ -2517,12 +2517,12 @@
ult=
-2e-7
+5e-8
,%0A
@@ -3944,16 +3944,21 @@
nit_img)
+*1e-1
)%0A in
|
c4701ae084ae35ec4b9f9acd6e7cb7c114adcb3c
|
Fix ocr reader (#2346)
|
dygraph/ocr_recognition/data_reader.py
|
dygraph/ocr_recognition/data_reader.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import cv2
import tarfile
import numpy as np
from PIL import Image
from os import path
from paddle.dataset.image import load_image
import paddle
SOS = 0
EOS = 1
NUM_CLASSES = 95
DATA_SHAPE = [1, 48, 512]
DATA_MD5 = "7256b1d5420d8c3e74815196e58cdad5"
DATA_URL = "http://paddle-ocr-data.bj.bcebos.com/data.tar.gz"
SAVED_FILE_NAME = "data.tar.gz"
DATA_DIR_NAME = "data"
TRAIN_DATA_DIR_NAME = "train_images"
TEST_DATA_DIR_NAME = "test_images"
TRAIN_LIST_FILE_NAME = "train.list"
TEST_LIST_FILE_NAME = "test.list"
class DataGenerator(object):
def __init__(self):
pass
def train_reader(self,
img_root_dir,
img_label_list,
batchsize,
cycle,
shuffle=True):
'''
Reader interface for training.
:param img_root_dir: The root path of the image for training.
:type img_root_dir: str
:param img_label_list: The path of the <image_name, label> file for training.
:type img_label_list: str
:param cycle: If number of iterations is greater than dataset_size / batch_size
it reiterates dataset over as many times as necessary.
:type cycle: bool
'''
img_label_lines = []
to_file = "tmp.txt"
if not shuffle:
cmd = "cat " + img_label_list + " | awk '{print $1,$2,$3,$4;}' > " + to_file
elif batchsize == 1:
cmd = "cat " + img_label_list + " | awk '{print $1,$2,$3,$4;}' | shuf > " + to_file
else:
#cmd1: partial shuffle
cmd = "cat " + img_label_list + " | awk '{printf(\"%04d%.4f %s\\n\", $1, rand(), $0)}' | sort | sed 1,$((1 + RANDOM % 100))d | "
#cmd2: batch merge and shuffle
cmd += "awk '{printf $2\" \"$3\" \"$4\" \"$5\" \"; if(NR % " + str(
batchsize) + " == 0) print \"\";}' | shuf | "
#cmd3: batch split
cmd += "awk '{if(NF == " + str(
batchsize
) + " * 4) {for(i = 0; i < " + str(
batchsize
) + "; i++) print $(4*i+1)\" \"$(4*i+2)\" \"$(4*i+3)\" \"$(4*i+4);}}' > " + to_file
os.system(cmd)
print("finish batch shuffle")
img_label_lines = open(to_file, 'r').readlines()
def reader():
sizes = len(img_label_lines) // batchsize
if sizes == 0:
raise ValueError('batchsize is bigger than the dataset size.')
while True:
for i in range(sizes):
result = []
sz = [0, 0]
max_len = 0
for k in range(batchsize):
line = img_label_lines[i * batchsize + k]
items = line.split(' ')
label = [int(c) for c in items[-1].split(',')]
max_len = max(max_len, len(label))
for j in range(batchsize):
line = img_label_lines[i * batchsize + j]
items = line.split(' ')
label = [int(c) for c in items[-1].split(',')]
mask = np.zeros((max_len)).astype('float32')
mask[:len(label) + 1] = 1.0
#mask[ j, :len(label) + 1] = 1.0
if max_len > len(label) + 1:
extend_label = [EOS] * (max_len - len(label) - 1)
label.extend(extend_label)
else:
label = label[0:max_len - 1]
img = Image.open(os.path.join(img_root_dir, items[
2])).convert('L')
if j == 0:
sz = img.size
img = img.resize((sz[0], sz[1]))
img = np.array(img) - 127.5
img = img[np.newaxis, ...]
result.append([img, [SOS] + label, label + [EOS], mask])
yield result
if not cycle:
break
return reader
def num_classes():
'''Get classes number of this dataset.
'''
return NUM_CLASSES
def data_shape():
'''Get image shape of this dataset. It is a dummy shape for this dataset.
'''
return DATA_SHAPE
def data_reader(batch_size,
images_dir=None,
list_file=None,
cycle=False,
shuffle=False,
data_type="train"):
generator = DataGenerator()
if data_type == "train":
if images_dir is None:
data_dir = download_data()
images_dir = path.join(data_dir, TRAIN_DATA_DIR_NAME)
if list_file is None:
list_file = path.join(data_dir, TRAIN_LIST_FILE_NAME)
elif data_type == "test":
if images_dir is None:
data_dir = download_data()
images_dir = path.join(data_dir, TEST_DATA_DIR_NAME)
if list_file is None:
list_file = path.join(data_dir, TEST_LIST_FILE_NAME)
else:
print("data type only support train | test")
raise Exception("data type only support train | test")
return generator.train_reader(
images_dir, list_file, batch_size, cycle, shuffle=shuffle)
def download_data():
'''Download train and test data.
'''
tar_file = paddle.dataset.common.download(
DATA_URL, CACHE_DIR_NAME, DATA_MD5, save_name=SAVED_FILE_NAME)
data_dir = path.join(path.dirname(tar_file), DATA_DIR_NAME)
if not path.isdir(data_dir):
t = tarfile.open(tar_file, "r:gz")
t.extractall(path=path.dirname(tar_file))
t.close()
return data_dir
|
Python
| 0.000001
|
@@ -426,16 +426,50 @@
tar.gz%22%0A
+CACHE_DIR_NAME = %22attention_data%22%0A
SAVED_FI
|
0c244f0b295785378c85dfdf7a70c238d0a4f20b
|
Add a warning to prevent people from running nipy from the source directory.
|
neuroimaging/__init__.py
|
neuroimaging/__init__.py
|
# -*- coding: utf-8 -*-
"""
Neuroimaging tools for Python (NIPY).
The aim of NIPY is to produce a platform-independent Python environment for
the analysis of brain imaging data using an open development model.
While
the project is still in its initial stages, packages for file I/O, script
support as well as single subject fMRI and random effects group comparisons
model are currently available.
Specifically, we aim to:
1. Provide an open source, mixed language scientific programming
environment suitable for rapid development.
2. Create sofware components in this environment to make it easy
to develop tools for MRI, EEG, PET and other modalities.
3. Create and maintain a wide base of developers to contribute to
this platform.
4. To maintain and develop this framework as a single, easily
installable bundle.
Package Organization
====================
The neuroimaging package contains the following subpackages and modules:
.. packagetree::
:style: UML
"""
__docformat__ = 'restructuredtext en'
from version import version as __version__
# FIXME
#__revision__ = int("$Rev$".split()[-2])
__status__ = 'alpha'
__date__ = "$LastChangedDate$"
__url__ = 'http://neuroimaging.scipy.org'
packages = (
'neuroimaging',
'neuroimaging.algorithms',
'neuroimaging.algorithms.tests',
'neuroimaging.algorithms.statistics',
'neuroimaging.algorithms.statistics.tests',
'neuroimaging.core',
'neuroimaging.core.image',
'neuroimaging.core.image.tests',
'neuroimaging.core.reference',
'neuroimaging.core.reference.tests',
'neuroimaging.io',
'neuroimaging.io.tests',
'neuroimaging.modalities',
'neuroimaging.modalities.fmri',
'neuroimaging.modalities.fmri.tests',
'neuroimaging.modalities.fmri.fmristat',
'neuroimaging.modalities.fmri.fmristat.tests',
'neuroimaging.utils',
'neuroimaging.utils.tests',
'neuroimaging.utils.tests.data',
'neuroimaging.testing')
def import_from(modulename, objectname):
"""Import and return objectname from modulename."""
module = __import__(modulename, {}, {}, (objectname,))
try:
return getattr(module, objectname)
except AttributeError:
return None
from neuroimaging.testing import Tester
test = Tester().test
bench = Tester().bench
|
Python
| 0
|
@@ -2243,16 +2243,17 @@
Tester%0A
+%0A
test = T
@@ -2288,8 +2288,434 @@
).bench%0A
+%0Adef _test_local_install():%0A %22%22%22 Warn the user that running with neuroimaging being%0A imported locally is a bad idea.%0A %22%22%22%0A import os%0A if os.getcwd() == os.sep.join(%0A os.path.abspath(__file__).split(os.sep)%5B:-2%5D):%0A import warnings%0A warnings.warn('Running the tests from the install directory may '%0A 'trigger some failures')%0A%0A_test_local_install()%0A
|
d1b6235413ffd81266e101673facef08c699f37b
|
Update openid.kvform
|
openid/kvform.py
|
openid/kvform.py
|
__all__ = ['seqToKV', 'kvToSeq', 'dictToKV', 'kvToDict']
import types
import logging
class KVFormError(ValueError):
pass
def seqToKV(seq, strict=False):
"""Represent a sequence of pairs of strings as newline-terminated
key:value pairs. The pairs are generated in the order given.
@param seq: The pairs
@type seq: [(str, (unicode|str))]
@return: A string representation of the sequence
@rtype: str
"""
def err(msg):
formatted = 'seqToKV warning: %s: %r' % (msg, seq)
if strict:
raise KVFormError(formatted)
else:
logging.warn(formatted)
lines = []
for k, v in seq:
if isinstance(k, bytes):
k = k.decode('UTF8')
elif not isinstance(k, str):
err('Converting key to string: %r' % k)
k = str(k)
if '\n' in k:
raise KVFormError(
'Invalid input for seqToKV: key contains newline: %r' % (k,))
if ':' in k:
raise KVFormError(
'Invalid input for seqToKV: key contains colon: %r' % (k,))
if k.strip() != k:
err('Key has whitespace at beginning or end: %r' % (k,))
if isinstance(v, bytes):
v = v.decode('UTF8')
elif not isinstance(v, str):
err('Converting value to string: %r' % (v,))
v = str(v)
if '\n' in v:
raise KVFormError(
'Invalid input for seqToKV: value contains newline: %r' % (v,))
if v.strip() != v:
err('Value has whitespace at beginning or end: %r' % (v,))
lines.append(k + ':' + v + '\n')
return ''.join(lines).encode('UTF8')
def kvToSeq(data, strict=False):
"""
After one parse, seqToKV and kvToSeq are inverses, with no warnings::
seq = kvToSeq(s)
seqToKV(kvToSeq(seq)) == seq
"""
def err(msg):
formatted = 'kvToSeq warning: %s: %r' % (msg, data)
if strict:
raise KVFormError(formatted)
else:
logging.warn(formatted)
lines = data.split('\n')
if lines[-1]:
err('Does not end in a newline')
else:
del lines[-1]
pairs = []
line_num = 0
for line in lines:
line_num += 1
# Ignore blank lines
if not line.strip():
continue
pair = line.split(':', 1)
if len(pair) == 2:
k, v = pair
k_s = k.strip()
if k_s != k:
fmt = ('In line %d, ignoring leading or trailing '
'whitespace in key %r')
err(fmt % (line_num, k))
if not k_s:
err('In line %d, got empty key' % (line_num,))
v_s = v.strip()
if v_s != v:
fmt = ('In line %d, ignoring leading or trailing '
'whitespace in value %r')
err(fmt % (line_num, v))
pairs.append((k_s.decode('UTF8'), v_s.decode('UTF8')))
else:
err('Line %d does not contain a colon' % line_num)
return pairs
def dictToKV(d):
seq = list(d.items())
seq.sort()
return seqToKV(seq)
def kvToDict(s):
return dict(kvToSeq(s))
|
Python
| 0.000001
|
@@ -1,8 +1,24 @@
+import logging%0A%0A
__all__
@@ -71,36 +71,8 @@
'%5D%0A%0A
-import types%0Aimport logging%0A
%0Acla
@@ -109,16 +109,17 @@
pass%0A%0A
+%0A
def seqT
@@ -695,35 +695,36 @@
k = k.decode('
-UTF
+utf-
8')%0A elif
@@ -1679,16 +1679,18 @@
de('
-UTF
+utf-
8')%0A%0A
+%0A
def
@@ -2053,32 +2053,102 @@
arn(formatted)%0A%0A
+ if isinstance(data, bytes):%0A data = bytes.decode(%22utf-8%22)%0A%0A
lines = data
@@ -3036,43 +3036,13 @@
(k_s
-.decode('UTF8'), v_s.decode('UTF8')
+, v_s
))%0A
@@ -3136,16 +3136,17 @@
pairs%0A%0A
+%0A
def dict
@@ -3158,49 +3158,8 @@
d):%0A
- seq = list(d.items())%0A seq.sort()%0A
@@ -3174,19 +3174,34 @@
eqToKV(s
-eq)
+orted(d.items()))%0A
%0A%0Adef kv
|
0b6ced2e048d4538db68abe356b8a4719a830fa0
|
Check the needed env vars are provided to the backfill script
|
backfill.py
|
backfill.py
|
#!/usr/bin/env python
import json
from os import environ
import boto3
bucket = boto3.resource('s3').Bucket(environ.get('AWS_S3_BUCKET'))
queue = boto3.resource('sqs').Queue(environ.get('AWS_SQS_URL'))
items_queued = 0
for item in bucket.objects.all():
if not item.key.endswith('.json.gz'):
continue
queue.send_message(
MessageBody=json.dumps({
'Message': json.dumps({
's3Bucket': environ.get('AWS_S3_BUCKET'),
's3ObjectKey': [item.key]
})
})
)
items_queued += 1
print('Done! {} items were backfilled'.format(items_queued))
|
Python
| 0
|
@@ -67,16 +67,276 @@
boto3%0A%0A%0A
+if not all(%5Benviron.get('AWS_S3_BUCKET'), environ.get('AWS_SQS_URL')%5D):%0A print('You have to specify the AWS_S3_BUCKET and AWS_SQS_URL environment variables.')%0A print('Check the %22Backfilling data%22 section of the README file for more info.')%0A exit(1)%0A%0A%0A
bucket =
|
d16cdad0fd12dcab26d670e83a746fede085d085
|
fix the test .tac to use new createService arguments
|
opennsa-test.tac
|
opennsa-test.tac
|
#!/usr/bin/env python # syntax highlightning
import os, sys
from twisted.python import log
from twisted.python.log import ILogObserver
from twisted.application import internet, service
from opennsa import setup, registry, logging
from opennsa.backends import dud
from opennsa.topology import gole
DEBUG = False
PROFILE = False
TOPOLOGY = 'test-topology.owl'
MAPPING = 'test-mapping.nrm'
HOST = 'localhost'
SERVICES = [ ('Aruba', 9080), ('Bonaire', 9081), ('Curacao',9082) ]
WSDL_DIR = os.path.join(os.getcwd(), 'wsdl')
## Log messages before "real" logging infrastructure comes up
#earlyObserver = logging.EarlyObserver()
#log.startLoggingWithObserver(earlyObserver.emit, setStdout=0)
#log.defaultObserver = earlyObserver # This will make the log system plug it out when the real logging starts
logObserver = logging.DebugLogObserver(sys.stdout, DEBUG, PROFILE)
application = service.Application("OpenNSA")
application.setComponent(ILogObserver, logObserver.emit)
topo, _ = gole.parseTopology( [ open(TOPOLOGY) ], open(MAPPING))
for network, port in SERVICES:
backend = dud.DUDNSIBackend(network)
es = registry.ServiceRegistry()
factory = setup.createService(network, topo, backend, es, HOST, port, WSDL_DIR)
internet.TCPServer(port, factory, interface='localhost').setServiceParent(application)
|
Python
| 0.000001
|
@@ -1114,44 +1114,8 @@
rk)%0A
- es = registry.ServiceRegistry()%0A
@@ -1156,14 +1156,8 @@
ork,
- topo,
bac
@@ -1162,18 +1162,20 @@
ackend,
-es
+topo
, HOST,
|
f662fafd2f69d64306ab89a1360a3cadda072b59
|
clean up pylint ignores to be more specific
|
functional/util.py
|
functional/util.py
|
# pylint: disable=no-name-in-module,unused-import,too-many-instance-attributes,too-many-arguments, too-few-public-methods
import collections
import six
import builtins
if six.PY2:
from itertools import ifilterfalse as filterfalse
def dict_item_iter(dictionary):
return dictionary.viewitems()
else:
from itertools import filterfalse
def dict_item_iter(dictionary):
return dictionary.items()
def is_primitive(val):
"""
Checks if the passed value is a primitive type.
>>> is_primitive(1)
True
>>> is_primitive("abc")
True
>>> is_primitive(True)
True
>>> is_primitive({})
False
>>> is_primitive([])
False
>>> is_primitive(set([]))
:param val: value to check
:return: True if value is a primitive, else False
"""
return isinstance(val, str) \
or isinstance(val, bool) \
or isinstance(val, six.string_types + (six.text_type,)) \
or isinstance(val, six.integer_types) \
or isinstance(val, float) \
or isinstance(val, complex) \
or isinstance(val, bytes)
def is_iterable(val):
if isinstance(val, list):
return False
return isinstance(val, collections.Iterable)
class LazyFile(object):
def __init__(self, path, delimiter=None, mode='r', buffering=-1, encoding=None,
errors=None, newline=None):
self.path = path
self.delimiter = delimiter
self.mode = mode
self.buffering = buffering
self.encoding = encoding
self.errors = errors
self.newline = newline
self.file = None
def __iter__(self):
if self.file is not None:
self.file.close()
self.file = builtins.open(self.path, mode=self.mode, buffering=self.buffering,
encoding=self.encoding, errors=self.errors, newline=self.newline)
return self
def next(self):
line = self.file.readline()
if line:
return line
else:
self.file.close()
raise StopIteration
def __next__(self):
return self.next()
|
Python
| 0
|
@@ -46,80 +46,8 @@
port
-,too-many-instance-attributes,too-many-arguments, too-few-public-methods
%0A%0Aim
@@ -1175,24 +1175,98 @@
le(object):%0A
+ # pylint: disable=too-few-public-methods,too-many-instance-attributes%0A
def __in
@@ -1382,16 +1382,61 @@
=None):%0A
+ # pylint: disable=too-many-arguments%0A
|
f5679597865a9dd9607fda0a25c7ed31299c45c8
|
enforce backend in tests; clear cache in tests
|
tests/cupy_tests/core_tests/test_raw.py
|
tests/cupy_tests/core_tests/test_raw.py
|
import os
import pytest
import shutil
from tempfile import mkdtemp
import unittest
import cupy
from cupy import testing
_test_source1 = r'''
extern "C" __global__
void test_sum(const float* x1, const float* x2, float* y) {
int tid = blockDim.x * blockIdx.x + threadIdx.x;
y[tid] = x1[tid] + x2[tid];
}
'''
# test compiling and invoking multiple kernels in one single .cubin
_test_source2 = r'''
extern "C"{
__global__ void test_sum(const float* x1, const float* x2, float* y, \
unsigned int N)
{
unsigned int tid = blockDim.x * blockIdx.x + threadIdx.x;
if (tid < N)
{
y[tid] = x1[tid] + x2[tid];
}
}
__global__ void test_multiply(const float* x1, const float* x2, float* y, \
unsigned int N)
{
unsigned int tid = blockDim.x * blockIdx.x + threadIdx.x;
if (tid < N)
{
y[tid] = x1[tid] * x2[tid];
}
}
}
'''
# test C macros
_test_source3 = r'''
#ifndef PRECISION
#define PRECISION 2
#endif
#if PRECISION == 2
#define TYPE double
#elif PRECISION == 1
#define TYPE float
#else
#error precision not supported
#endif
extern "C"{
__global__ void test_sum(const TYPE* x1, const TYPE* x2, TYPE* y, \
unsigned int N)
{
unsigned int tid = blockDim.x * blockIdx.x + threadIdx.x;
if (tid < N)
{
y[tid] = x1[tid] + x2[tid];
}
}
__global__ void test_multiply(const TYPE* x1, const TYPE* x2, TYPE* y, \
unsigned int N)
{
unsigned int tid = blockDim.x * blockIdx.x + threadIdx.x;
if (tid < N)
{
y[tid] = x1[tid] * x2[tid];
}
}
}
'''
if 'CUPY_CACHE_DIR' in os.environ:
_old_cache_dir = os.environ['CUPY_CACHE_DIR']
_is_cache_env_var_set = True
else:
_old_cache_dir = os.path.expanduser('~/.cupy/kernel_cache')
_is_cache_env_var_set = False
_test_cache_dir = None
@testing.parameterize(*testing.product({
'backend': ('nvrtc', 'nvcc'),
}))
class TestRaw(unittest.TestCase):
def setUp(self):
global _test_cache_dir
_test_cache_dir = mkdtemp()
os.environ['CUPY_CACHE_DIR'] = _test_cache_dir
self.kern = cupy.RawKernel(_test_source1, 'test_sum',
backend=self.backend)
self.mod2 = cupy.RawModule(_test_source2, backend=self.backend)
self.mod3 = cupy.RawModule(_test_source3, ("-DPRECISION=2",),
backend=self.backend)
def tearDown(self):
# To avoid cache interference, we remove cached files after every test,
# and restore users' old setting
global _test_cache_dir
shutil.rmtree(_test_cache_dir)
if _is_cache_env_var_set:
os.environ['CUPY_CACHE_DIR'] = _old_cache_dir
else:
os.environ.pop('CUPY_CACHE_DIR')
def _helper(self, kernel, dtype):
N = 10
x1 = cupy.arange(N**2, dtype=dtype).reshape(N, N)
x2 = cupy.ones((N, N), dtype=dtype)
y = cupy.zeros((N, N), dtype=dtype)
kernel((N,), (N,), (x1, x2, y, N**2))
return x1, x2, y
def test_basic(self):
x1, x2, y = self._helper(self.kern, cupy.float32)
assert cupy.allclose(y, x1 + x2)
def test_kernel_attributes(self):
attrs = self.kern.attributes
for attribute in ['binary_version',
'cache_mode_ca',
'const_size_bytes',
'local_size_bytes',
'max_dynamic_shared_size_bytes',
'max_threads_per_block',
'num_regs',
'preferred_shared_memory_carveout',
'ptx_version',
'shared_size_bytes']:
assert attribute in attrs
assert self.kern.num_regs > 0
assert self.kern.max_threads_per_block > 0
assert self.kern.shared_size_bytes == 0
def test_module(self):
module = self.mod2
ker_sum = module.get_function('test_sum')
ker_times = module.get_function('test_multiply')
x1, x2, y = self._helper(ker_sum, cupy.float32)
assert cupy.allclose(y, x1 + x2)
x1, x2, y = self._helper(ker_times, cupy.float32)
assert cupy.allclose(y, x1 * x2)
def test_compiler_flag(self):
module = self.mod3
ker_sum = module.get_function('test_sum')
ker_times = module.get_function('test_multiply')
x1, x2, y = self._helper(ker_sum, cupy.float64)
assert cupy.allclose(y, x1 + x2)
x1, x2, y = self._helper(ker_times, cupy.float64)
assert cupy.allclose(y, x1 * x2)
def test_invalid_compiler_flag(self):
with pytest.raises(cupy.cuda.compiler.CompileException) as ex:
cupy.RawModule(_test_source3, ("-DPRECISION=3",))
assert 'precision not supported' in str(ex.value)
def test_module_load_failure(self):
# in principle this test is better done in test_driver.py, but
# this error is more likely to appear when using RawModule, so
# let us do it here
with pytest.raises(cupy.cuda.driver.CUDADriverError) as ex:
cupy.RawModule(os.path.expanduser("~/this_does_not_exist.cubin"))
assert 'CUDA_ERROR_FILE_NOT_FOUND' in str(ex.value)
def test_get_function_failure(self):
# in principle this test is better done in test_driver.py, but
# this error is more likely to appear when using RawModule, so
# let us do it here
with pytest.raises(cupy.cuda.driver.CUDADriverError) as ex:
self.mod2.get_function("no_such_kernel")
assert 'CUDA_ERROR_NOT_FOUND' in str(ex.value)
|
Python
| 0
|
@@ -114,16 +114,47 @@
testing%0A
+from cupy.cuda import compiler%0A
%0A%0A_test_
@@ -2873,16 +2873,67 @@
HE_DIR')
+%0A compiler._empty_file_preprocess_cache = %7B%7D
%0A%0A de
@@ -4952,16 +4952,65 @@
ION=3%22,)
+,%0A backend=self.backend
)%0A
@@ -5416,16 +5416,65 @@
.cubin%22)
+,%0A backend=self.backend
)%0A
|
2ed48a7ac268f3de781b77843420343452e012f6
|
Fix to DTypeLike
|
cupyx/jit/_cuda_types.py
|
cupyx/jit/_cuda_types.py
|
from typing import Mapping, Optional, Sequence, Union, TYPE_CHECKING
import numpy
import numpy.typing as npt
import cupy
from cupy._core._scalar import get_typename
if TYPE_CHECKING:
from cupyx.jit._internal_types import Data
# Base class for cuda types.
class TypeBase:
def __str__(self) -> str:
raise NotImplementedError
def declvar(self, x: str, init: 'Data') -> str:
if init is None:
return f'{self} {x}'
return f'{self} {x} = {init.code}'
def assign(self, var: 'Data', value: 'Data') -> str:
return f'{var.code} = {value.code}'
class Void(TypeBase):
def __init__(self) -> None:
pass
def __str__(self) -> str:
return 'void'
class Scalar(TypeBase):
def __init__(self, dtype: npt.DTypeLike) -> None:
self.dtype = numpy.dtype(dtype)
def __str__(self) -> str:
dtype = self.dtype
if dtype == numpy.float16:
# For the performance
dtype = numpy.dtype('float32')
return get_typename(dtype)
def __eq__(self, other: TypeBase) -> bool: # type: ignore[override]
return isinstance(other, Scalar) and self.dtype == other.dtype
def __hash__(self) -> int:
return hash(self.dtype)
class PtrDiff(Scalar):
def __init__(self) -> None:
super().__init__('q')
def __str__(self) -> str:
return 'ptrdiff_t'
class ArrayBase(TypeBase):
def __init__(self, child_type: TypeBase, ndim: int) -> None:
assert isinstance(child_type, TypeBase)
self.child_type = child_type
self.ndim = ndim
class CArray(ArrayBase):
def __init__(
self,
dtype: numpy.dtype,
ndim: int,
is_c_contiguous: bool,
index_32_bits: bool,
) -> None:
self.dtype = dtype
self._c_contiguous = is_c_contiguous
self._index_32_bits = index_32_bits
super().__init__(Scalar(dtype), ndim)
@classmethod
def from_ndarray(cls, x: cupy.ndarray) -> 'CArray':
return CArray(x.dtype, x.ndim, x._c_contiguous, x._index_32_bits)
def __str__(self) -> str:
ctype = get_typename(self.dtype)
c_contiguous = get_cuda_code_from_constant(self._c_contiguous, bool_)
index_32_bits = get_cuda_code_from_constant(self._index_32_bits, bool_)
return f'CArray<{ctype}, {self.ndim}, {c_contiguous}, {index_32_bits}>'
def __eq__(self, other: TypeBase) -> bool: # type: ignore[override]
return (
isinstance(other, CArray) and
self.dtype == other.dtype and
self.ndim == other.ndim and
self._c_contiguous == other._c_contiguous and
self._index_32_bits == other._index_32_bits
)
def __hash__(self) -> int:
return hash(
(self.dtype, self.ndim, self._c_contiguous, self._index_32_bits))
class SharedMem(ArrayBase):
def __init__(
self,
child_type: TypeBase,
size: Optional[int],
alignment: Optional[int] = None,
) -> None:
if not (isinstance(size, int) or size is None):
raise 'size of shared_memory must be integer or `None`'
if not (isinstance(alignment, int) or alignment is None):
raise 'alignment must be integer or `None`'
self._size = size
self._alignment = alignment
super().__init__(child_type, 1)
def declvar(self, x: str, init: 'Data') -> str:
assert init is None
if self._alignment is not None:
code = f'__align__({self._alignment})'
else:
code = ''
if self._size is None:
code = f'extern {code} __shared__ {self.child_type} {x}[]'
else:
code = f'{code} __shared__ {self.child_type} {x}[{self._size}]'
return code
class Ptr(ArrayBase):
def __init__(self, child_type: TypeBase) -> None:
super().__init__(child_type, 1)
def __str__(self) -> str:
return f'{self.child_type}*'
class Tuple(TypeBase):
def __init__(self, types: Sequence[TypeBase]) -> None:
self.types = types
def __str__(self) -> str:
types = ', '.join([str(t) for t in self.types])
return f'thrust::tuple<{types}>'
def __eq__(self, other: TypeBase) -> bool: # type: ignore[override]
return isinstance(other, Tuple) and self.types == other.types
void: Void = Void()
bool_: Scalar = Scalar(numpy.bool_)
int32: Scalar = Scalar(numpy.int32)
uint32: Scalar = Scalar(numpy.uint32)
uint64: Scalar = Scalar(numpy.uint64)
class Dim3(TypeBase):
"""
An integer vector type based on uint3 that is used to specify dimensions.
Attributes:
x (uint32)
y (uint32)
z (uint32)
"""
def x(self, code: str) -> 'Data':
from cupyx.jit import _internal_types # avoid circular import
return _internal_types.Data(f'{code}.x', uint32)
def y(self, code: str) -> 'Data':
from cupyx.jit import _internal_types # avoid circular import
return _internal_types.Data(f'{code}.y', uint32)
def z(self, code: str) -> 'Data':
from cupyx.jit import _internal_types # avoid circular import
return _internal_types.Data(f'{code}.z', uint32)
def __str__(self) -> str:
return 'dim3'
dim3: Dim3 = Dim3()
_suffix_literals_dict: Mapping[str, str] = {
'float64': '',
'float32': 'f',
'int64': 'll',
'int32': '',
'uint64': 'ull',
'uint32': 'u',
'bool': '',
}
def get_cuda_code_from_constant(
x: Union[bool, int, float, complex],
ctype: Scalar,
) -> str:
dtype = ctype.dtype
suffix_literal = _suffix_literals_dict.get(dtype.name)
if suffix_literal is not None:
s = str(x).lower()
return f'{s}{suffix_literal}'
ctype_str = str(ctype)
if dtype.kind == 'c':
return f'{ctype_str}({x.real}, {x.imag})'
if ' ' in ctype_str:
return f'({ctype_str}){x}'
return f'{ctype_str}({x})'
|
Python
| 0.000015
|
@@ -1687,25 +1687,27 @@
dtype: n
-umpy.dtyp
+pt.DTypeLik
e,%0A
@@ -1828,21 +1828,34 @@
dtype =
-dtype
+numpy.dtype(dtype)
%0A
|
dc3ef62120b9199a798243f4d3e5716838c9a8d5
|
Delete removed files from S3 on upload
|
staticpy/s3_uploader.py
|
staticpy/s3_uploader.py
|
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.s3.key import Key
import os
import Queue
import threading
class BulkUploader:
'''Multi-Threaded s3 directory BulkUploader
Walks a directory uploading everyfile to s3, you can apply filters/transforms to file paths.
On start we spawn up to max_threads threads that each upload independently.
Before uploading a file we verify that the local version is different than the version already
in s3, this avoid unneeded uploads.
params:
aws_keys: ('access_key', 'secret_key')
bucket: amazon s3 bucket name
file_filter: a function that accepts a file path and returns true if you want it uploaded
key_transform: a function that accepts a file path and returns the appropriate s3 key_transform
max_threads: the max number of threads you want to spawn
'''
def __init__(self, aws_keys, bucket, file_filter = None, key_transform = None, max_threads = 10):
self.max_threads = max_threads
self.aws_keys = aws_keys
self.bucket = bucket
self.key_transform = key_transform
self.file_filter = file_filter
def start(self, path):
#gets things started
self.path = path
queue = Queue.Queue()
for (dir_path, dir_name, file_names) in os.walk(path, followlinks = True):
for file_name in file_names:
file_path = os.path.join(dir_path, file_name)
if not self.file_filter or self.file_filter(file_path):
key = self.transform(file_path)
queue.put((key, file_path))
threads = []
for i in range(self.max_threads):
thread = Worker(self.aws_keys, self.bucket, queue)
threads.append(thread)
thread.start()
#wait untill everything is done
queue.join()
for i in threads:
i.join()
def transform(self, path):
'''Standard transform from path to key_transform
Calls your custom transform if it exists, strips off the base directory path from
the file path and replaces \ with / so they are valid urls.
params:
path: path to file
returns:
key: the s3 key
'''
if self.key_transform:
path = self.key_transform(path)
path = path[len(self.path)+1::]
return path.replace('\\', '/')
class Worker(threading.Thread):
'''A threaded s3 upload Worker
Uploads files to s3 that are passed to it through the queue returns
once the queue is empty.
Won't upload a specified file if the local MD5 hash is the same as the MD5
of the file already on s3.
params:
aws_keys: ('access_key', 'secret_key')
bucket: amazon s3 bucket name
queue: the Queue to pull the (file_path, file_key) tuples from
'''
def __init__(self, aws_keys, bucket, queue):
self.queue = queue
self.aws_keys = aws_keys
self.bucket = bucket
threading.Thread.__init__(self)
self.daemon = True
def run(self):
self.bucket = Bucket(
connection = S3Connection(*self.aws_keys),
name = self.bucket
)
for key, file_path in IterQueue(self.queue):
self.upload(key, file_path)
def upload(self, key, file_path):
s3_key = self.bucket.get_key(key)
if s3_key:
old_hash = s3_key.etag.strip('"')
else:
s3_key = Key(self.bucket)
s3_key.key = key
old_hash = None
with open(file_path) as fh:
new_hash, _ = s3_key.compute_md5(fh)
if new_hash == old_hash:
print "File %s unchanged" % key
else:
print "Uploading: %s " % key
s3_key.set_contents_from_file(fh)
class IterQueue:
'''Iterate through a Queue.Queue instance
This allows for easy iteration throught a queue. You don't need
to worry about setting anything up. Runs untill the queue is empty
Calls Queue.task_done when you get the next item. If you exit the loop
early you must call task_done yourself on the queue or queue.join will block.
use:
for i in IterQueue(queue):
do_something(i)
'''
def __init__(self, queue):
self.queue = queue
def __iter__(self):
self.task_in_progress = False
return self
def next(self):
if self.task_in_progress:
self.queue.task_done()
try:
r = self.queue.get_nowait()
self.task_in_progress = True
return r
except:
self.task_in_progress = False
raise StopIteration
|
Python
| 0
|
@@ -1334,24 +1334,53 @@
eue.Queue()%0A
+ current_keys = set()%0A
for
@@ -1701,34 +1701,124 @@
-queue.put((key, file_path)
+current_keys.add(key)%0A queue.put((key, file_path))%0A%0A self.delete_removed_keys(current_keys
)%0A
@@ -2644,16 +2644,368 @@
, '/')%0A%0A
+ def delete_removed_keys(self, current_keys):%0A conn = S3Connection(*self.aws_keys)%0A bucket = conn.get_bucket(self.bucket)%0A old_keys = set(%5Bx.name for x in bucket.list()%5D)%0A to_delete = old_keys - current_keys%0A for i in to_delete:%0A print 'Deleting %60%25s%60 from S3' %25 i%0A bucket.delete_keys(to_delete)%0A%0A
class Wo
|
4e74723aac53956fb0316ae0d438da623de133d5
|
Add and update tests for video renderer
|
tests/extensions/video/test_renderer.py
|
tests/extensions/video/test_renderer.py
|
import pytest
from mfr.core.provider import ProviderMetadata
from mfr.extensions.video import VideoRenderer
@pytest.fixture
def metadata():
return ProviderMetadata('test', '.mp4', 'text/plain', '1234', 'http://wb.osf.io/file/test.mp4?token=1234')
@pytest.fixture
def file_path():
return '/tmp/test.mp4'
@pytest.fixture
def url():
return 'http://osf.io/file/test.mp4'
@pytest.fixture
def assets_url():
return 'http://mfr.osf.io/assets'
@pytest.fixture
def export_url():
return 'http://mfr.osf.io/export?url=' + url()
@pytest.fixture
def renderer(metadata, file_path, url, assets_url, export_url):
return VideoRenderer(metadata, file_path, url, assets_url, export_url)
class TestVideoRenderer:
def test_render_video(self, renderer, url):
body = renderer.render()
assert '<video controls' in body
assert 'src="{}"'.format(metadata().download_url) in body
def test_render_video_file_required(self, renderer):
assert renderer.file_required is False
def test_render_video_cache_result(self, renderer):
assert renderer.cache_result is False
|
Python
| 0
|
@@ -55,17 +55,16 @@
etadata%0A
-%0A
from mfr
@@ -201,16 +201,44 @@
'1234',
+%0A
'http:/
@@ -940,16 +940,99 @@
in body
+%0A assert '%3Cstyle%3Ebody%7Bmargin:0;padding:0;%7D%3C/style%3E' in ''.join(body.split())
%0A%0A de
|
e6024d35e1b6717ba360fe98bc55ca1d0c9b250e
|
Add optional hide parameter to send_mail
|
stalker.py
|
stalker.py
|
#!/usr/bin/python3
# This file is part of the URL Stalker.
# URL Stalker is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# URL Stalker is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with URL Stalker. If not, see <http://www.gnu.org/licenses/>.
# SETTINGS
# Customize these strings for your setup.
stalked_file = ""
saved_name = ""
wait_time = 60
email_address = ""
email_imap_server = ""
email_smtp_server = ""
email_password = ""
email_subject = ""
sysadmin_name = ""
sysadmin_email = ""
import smtplib, os
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email.mime.text import MIMEText
from email.utils import formatdate
from email import encoders as Encoders
def send_mail(send_from, send_to, subject, text, files=[], server=email_smtp_server):
assert type(send_to)==list
assert type(files)==list
msg = MIMEMultipart()
msg['From'] = send_from
# Basically BCC the messages by leaving this out.
# msg['To'] = ', '.join(send_to)
msg['Date'] = formatdate(localtime=True)
msg['Subject'] = subject
msg.attach( MIMEText(text) )
for f in files:
part = MIMEBase('application', "octet-stream")
part.set_payload( open(f,"rb").read() )
Encoders.encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="%s"' % os.path.basename(f))
msg.attach(part)
smtp = smtp = smtplib.SMTP_SSL(server, 465)
smtp.login(send_from, email_password)
smtp.sendmail(send_from, send_to, msg.as_string())
smtp.close()
import imaplib
import email
def get_mail_subjects(server, user, password):
def extract_body(payload):
if isinstance(payload,str):
return payload
else:
return '\n'.join([extract_body(part.get_payload()) for part in payload])
emails = []
conn = imaplib.IMAP4_SSL(server, 993)
conn.login(user, password)
conn.select()
typ, data = conn.search(None, 'UNSEEN')
try:
for num in data[0].split():
typ, msg_data = conn.fetch(num, '(RFC822)')
for response_part in msg_data:
if isinstance(response_part, tuple):
msg = email.message_from_string(response_part[1].decode())
s = msg['from']
emails.append((msg['subject'], s[s.rfind("<")+1:s.rfind(">")]))
typ, response = conn.store(num, '+FLAGS', r'(\Seen)')
finally:
try:
conn.close()
except:
pass
conn.logout()
return emails
import hashlib
from functools import partial
def md5sum(filename):
with open(filename, mode='rb') as f:
d = hashlib.md5()
for buf in iter(partial(f.read, 128), b''):
d.update(buf)
return d.hexdigest()
import urllib.request
import time
def main():
currenthash = ""
currentfilename = ""
subscribers = []
# Retrieve list of users from file, and current hash from another file
try:
with open('users.txt', 'r') as f:
subscribers = f.readlines()
subscribers = [l.strip() for l in subscribers]
except FileNotFoundError:
pass
try:
with open('hash.txt', 'r') as f:
currenthash = f.readline().strip()
currentfilename = f.readline().strip()
except FileNotFoundError:
pass
try:
while True:
# Download the file
try:
urllib.request.urlretrieve(stalked_file, saved_name)
newhash = md5sum(saved_name)
if newhash != currenthash:
currenthash = newhash
# Keep a copy
new_name = time.strftime("%Y%m%d-%H%M_", time.gmtime()) + saved_name
currentfilename = new_name
os.rename(saved_name, new_name)
# And send everyone a copy
print("Sending emails!")
send_mail(email_address, subscribers, email_subject + " - Update!", "", files=[new_name])
else:
os.unlink(saved_name)
except:
pass
for i in range(10):
try:
# Check for new subscribers
tasks = get_mail_subjects(email_imap_server, email_address, email_password)
for (task, address) in tasks:
task = task.lower().strip()
if task == "unsubscribe":
print("Unsubscribing", address)
subscribers = list(filter(lambda a: a != address, subscribers))
send_mail(email_address, [address], email_subject + " - Unsubscribed!",
"You are now unsubscribed!")
elif task == "subscribe":
print("Subscribing", address)
subscribers.append(address)
send_mail(email_address, [address], email_subject + " - Subscribed!",
"You are now subscribed! Send a similar message saying UNSUBSCRIBE to cancel.\nSysadmin:\n"+sysadmin_name+'\n'+sysadmin_email,
files=[currentfilename])
else:
send_mail(email_address, [address], email_subject + " - Huh?", "Valid subjects are SUBSCRIBE and UNSUBSCRIBE. Messages must have an empty body.")
except:
pass
# And sleep
time.sleep(wait_time)
except KeyboardInterrupt:
with open('hash.txt', 'w') as f:
f.write(currenthash + '\n')
f.write(currentfilename + '\n')
with open('users.txt', 'w') as f:
for address in subscribers:
f.write(address + '\n')
print("Got Ctrl+C, saved info!")
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -1296,16 +1296,28 @@
p_server
+, hide=False
):%0A a
@@ -1426,16 +1426,37 @@
nd_from%0A
+ if not hide:%0A
# Ba
@@ -1505,17 +1505,19 @@
ut.%0A
-#
+
msg%5B'To
@@ -4613,24 +4613,35 @@
s=%5Bnew_name%5D
+, hide=True
)%0A
|
e1c359fab8c351c77556e34731cd677b4c0cc99b
|
Update mono to 4.0.1
|
packages/mono.py
|
packages/mono.py
|
class MonoPackage (Package):
def __init__ (self):
Package.__init__ (self, 'mono', '4.0.0',
sources = [
'http://download.mono-project.com/sources/%{name}/%{name}-%{version}.tar.bz2'
],
configure_flags = [
'--with-jit=yes',
'--with-ikvm=no',
'--with-mcs-docs=no',
'--with-moonlight=no',
'--enable-nls=no',
'--enable-quiet-build'
]
)
if Package.profile.name == 'darwin' and not Package.profile.m64:
self.configure_flags.extend ([
# fix build on lion, it uses 64-bit host even with -m32
#'--build=i386-apple-darwin11.2.0',
])
# Mono (in libgc) likes to fail to build randomly
self.make = 'for i in 1 2 3 4 5 6 7 8 9 10; do make && break; done'
def install (self):
Package.install (self)
if Package.profile.name == 'darwin':
self.sh ('sed -ie "s/libcairo.so.2/libcairo.2.dylib/" "%{prefix}/etc/mono/config"')
MonoPackage ()
|
Python
| 0
|
@@ -83,17 +83,17 @@
', '4.0.
-0
+1
',%0A%09%09%09so
@@ -546,17 +546,16 @@
-#
'--build
|
366501cd91f2988d793b050297b76a89082f72ef
|
Store stashed files in a private directory.
|
nimp/commands/fileset.py
|
nimp/commands/fileset.py
|
# -*- coding: utf-8 -*-
# Copyright © 2014—2016 Dontnod Entertainment
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
''' Fileset related commands '''
import abc
import os.path
import logging
import nimp.command
import nimp.system
class FilesetCommand(nimp.command.Command):
''' Perforce command base class '''
def __init__(self):
super(FilesetCommand, self).__init__()
def configure_arguments(self, env, parser):
parser.add_argument('fileset',
help = 'Set name to load (e.g. binaries, version...)',
metavar = '<fileset>')
nimp.command.add_common_arguments(parser,
'platform',
'configuration',
'target',
'free_parameters')
return True
def is_available(self, env):
return True, ''
def run(self, env):
files = nimp.system.map_files(env)
files_chain = files
files_chain.load_set(env.fileset)
return self._run_fileset(env, files_chain)
@abc.abstractmethod
def _run_fileset(self, env, file_mapper):
pass
class Fileset(nimp.command.CommandGroup):
''' Fileset related commands '''
def __init__(self):
super(Fileset, self).__init__([_List(),
_Delete(),
_Stash(),
_Unstash(),])
def is_available(self, env):
return True, ''
class _Delete(FilesetCommand):
''' Loads a fileset and delete mapped files '''
def __init__(self):
super(_Delete, self).__init__()
def _run_fileset(self, env, file_mapper):
for path, _ in file_mapper():
logging.info("Deleting %s", path)
nimp.system.force_delete(path)
return True
class _List(FilesetCommand):
''' Loads a fileset and prints mapped files '''
def __init__(self):
super(_List, self).__init__()
def _run_fileset(self, env, file_mapper):
for source, destination in file_mapper():
logging.info("%s => %s", source, destination)
return True
class _Stash(FilesetCommand):
''' Loads a fileset and moves files out of the way '''
def __init__(self):
super(_Stash, self).__init__()
def _run_fileset(self, env, file_mapper):
stash_file = '.stash-%s.txt' % (env.fileset)
nimp.system.force_delete(stash_file)
with open(stash_file, 'w') as stash:
for src, _ in file_mapper():
src = nimp.system.sanitize_path(src)
if not os.path.isfile(src):
continue
if src.endswith('.stash'):
continue
dst = src + '.stash'
os.replace(src, dst)
logging.info('Stashing %s', src)
stash.write('%s\n' % (src))
return True
class _Unstash(FilesetCommand):
''' Restores a stashed fileset; does not actually use the fileset '''
def __init__(self):
super(_Unstash, self).__init__()
def _run_fileset(self, env, file_mapper):
stash_file = '.stash-%s.txt' % (env.fileset)
success = True
with open(stash_file, 'r') as stash:
for dst in stash.readlines():
dst = dst.strip()
src = dst + '.stash'
try:
logging.info('Unstashing %s', dst)
os.replace(src, dst)
except Exception as ex:
logging.error(ex)
success = False
nimp.system.force_delete(stash_file)
return success
|
Python
| 0
|
@@ -1164,16 +1164,31 @@
ort abc%0A
+import hashlib%0A
import o
@@ -3503,34 +3503,142 @@
ash_
-file = '.stash-%25s.txt' %25 (
+dir = env.format('%7Broot_dir%7D/.nimp/stash')%0A nimp.system.safe_makedirs(stash_dir)%0A%0A stash_file = os.path.join(stash_dir,
env.
@@ -3638,33 +3638,32 @@
r, env.fileset)%0A
-%0A
nimp.sys
@@ -3691,16 +3691,17 @@
h_file)%0A
+%0A
@@ -3996,28 +3996,57 @@
-dst = src + '.stash'
+md5 = hashlib.md5(src.encode('utf8')).hexdigest()
%0A
@@ -4066,35 +4066,60 @@
os.replace(src,
-dst
+os.path.join(stash_dir, md5)
)%0A
@@ -4147,16 +4147,22 @@
ing
+%25s as
%25s', src
)%0A
@@ -4157,16 +4157,21 @@
%25s', src
+, md5
)%0A
@@ -4199,15 +4199,23 @@
('%25s
+ %25s
%5Cn' %25 (
+md5,
src)
@@ -4474,34 +4474,96 @@
ash_
-file = '.stash-%25s.txt' %25 (
+dir = env.format('%7Broot_dir%7D/.nimp/stash')%0A stash_file = os.path.join(stash_dir,
env.
@@ -4702,83 +4702,118 @@
-dst = dst.strip()%0A src = dst + '.stash'%0A try:
+try:%0A md5, dst = dst.strip().split()%0A src = os.path.join(stash_dir, md5)
%0A
@@ -4856,17 +4856,28 @@
shing %25s
-'
+ as %25s', md5
, dst)%0A
|
a376b080779c6c09e6c8f4705ef3947645433151
|
Increase timeout for File.upload_from_url test
|
tests/integration/test_api_resources.py
|
tests/integration/test_api_resources.py
|
# coding: utf-8
from __future__ import unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
from tempfile import NamedTemporaryFile
from datetime import datetime
import time
from pyuploadcare import conf
from pyuploadcare.api_resources import File, FileGroup
from .utils import upload_tmp_txt_file, create_file_group
class FileUploadTest(unittest.TestCase):
def setUp(self):
conf.pub_key = 'demopublickey'
self.tmp_config_file = NamedTemporaryFile(mode='w+t')
def tearDown(self):
conf.pub_key = None
self.tmp_config_file.close()
def test_successful_upload(self):
file_ = File.upload(self.tmp_config_file)
self.assertIsInstance(file_, File)
class FileUploadFromUrlTest(unittest.TestCase):
def setUp(self):
conf.pub_key = 'demopublickey'
def tearDown(self):
conf.pub_key = None
def test_get_some_token(self):
file_from_url = File.upload_from_url(
'https://github.com/images/error/angry_unicorn.png'
)
self.assertTrue(file_from_url.token)
def test_successful_upload_from_url(self):
file_from_url = File.upload_from_url(
'https://github.com/images/error/angry_unicorn.png'
)
timeout = 5
time_started = time.time()
while time.time() - time_started < timeout:
status = file_from_url.update_info()['status']
if status in ('success', 'failed', 'error'):
break
time.sleep(1)
self.assertIsInstance(file_from_url.get_file(), File)
class FileInfoTest(unittest.TestCase):
file_ = upload_tmp_txt_file(content='hello')
def setUp(self):
conf.pub_key = 'demopublickey'
conf.secret = 'demoprivatekey'
def tearDown(self):
conf.pub_key = None
conf.secret = None
def test_info_is_non_empty_dict(self):
self.assertIsInstance(self.file_.info(), dict)
self.assertTrue(self.file_.info())
def test_original_filename_starts_with_tmp(self):
self.assertTrue(self.file_.filename().startswith('tmp'))
def test_datetime_stored_is_none(self):
self.assertIsNone(self.file_.datetime_stored())
def test_datetime_removed_is_none(self):
self.assertIsNone(self.file_.datetime_removed())
def test_datetime_uploaded_is_datetime_instance(self):
self.assertIsInstance(self.file_.datetime_uploaded(), datetime)
def test_file_is_not_stored(self):
self.assertFalse(self.file_.is_stored())
def test_file_is_not_removed(self):
self.assertFalse(self.file_.is_removed())
def test_file_is_not_image(self):
self.assertFalse(self.file_.is_image())
def test_file_should_be_ready_in_5_seconds_after_upload(self):
timeout = 5
time_started = time.time()
while time.time() - time_started < timeout:
if self.file_.is_ready():
break
time.sleep(1)
self.file_.update_info()
self.assertTrue(self.file_.is_ready())
def test_file_size_is_5_bytes(self):
# "hello" + new line
self.assertEqual(self.file_.size(), 5)
def test_mime_type_is_application_octet_stream(self):
self.assertEqual(self.file_.mime_type(), 'application/octet-stream')
class FileStoreTest(unittest.TestCase):
def setUp(self):
conf.pub_key = 'demopublickey'
conf.secret = 'demoprivatekey'
self.file_ = upload_tmp_txt_file(content='hello')
def tearDown(self):
conf.pub_key = None
conf.secret = None
def test_successful_store(self):
self.assertFalse(self.file_.is_stored())
self.file_.store()
self.assertTrue(self.file_.is_stored())
class FileDeleteTest(unittest.TestCase):
def setUp(self):
conf.pub_key = 'demopublickey'
conf.secret = 'demoprivatekey'
self.file_ = upload_tmp_txt_file(content='hello')
def tearDown(self):
conf.pub_key = None
conf.secret = None
def test_successful_delete(self):
self.assertFalse(self.file_.is_removed())
self.file_.delete()
self.assertTrue(self.file_.is_removed())
class FileGroupCreateTest(unittest.TestCase):
def setUp(self):
conf.pub_key = 'demopublickey'
def tearDown(self):
conf.pub_key = None
conf.secret = None
def test_successful_create(self):
files = [
upload_tmp_txt_file(content='hello'),
]
group = FileGroup.create(files)
self.assertIsInstance(group, FileGroup)
class FileGroupInfoTest(unittest.TestCase):
group = create_file_group(files_qty=1)
def setUp(self):
conf.pub_key = 'demopublickey'
conf.secret = 'demoprivatekey'
def tearDown(self):
conf.pub_key = None
conf.secret = None
def test_info_is_non_empty_dict(self):
self.assertIsInstance(self.group.info(), dict)
self.assertTrue(self.group.info())
def test_datetime_stored_is_none(self):
self.assertIsNone(self.group.datetime_stored())
def test_datetime_created_is_datetime_instance(self):
self.assertIsInstance(self.group.datetime_created(), datetime)
def test_group_is_not_stored(self):
self.assertFalse(self.group.is_stored())
class FileGroupStoreTest(unittest.TestCase):
def setUp(self):
conf.pub_key = 'demopublickey'
conf.secret = 'demoprivatekey'
self.group = create_file_group(files_qty=1)
def tearDown(self):
conf.pub_key = None
conf.secret = None
def test_successful_store(self):
self.assertFalse(self.group.is_stored())
self.group.store()
self.assertTrue(self.group.is_stored())
|
Python
| 0.000001
|
@@ -1288,33 +1288,34 @@
timeout =
-5
+30
%0A time_st
|
aed0cd2d9f82d5e028f3d98d08f0e27826765a4e
|
update decorators.py
|
stoplight/decorators.py
|
stoplight/decorators.py
|
import inspect
from functools import wraps
import stoplight
from stoplight.exceptions import *
from stoplight.rule import *
def validate(**rules):
"""Validates a function's input using the specified set of rules."""
def _validate(f):
@wraps(f)
def wrapper(*args, **kwargs):
funcparams = inspect.getargspec(f)
# Holds the list of validated values. Only
# these values are passed to the endpoint
outargs = dict()
# Create dictionary that maps parameters passed
# to their values passed
param_values = dict(zip(funcparams.args, args))
# Bring in kwargs so that we can validate as well
param_values.update(kwargs)
for param, rule in rules.items():
# Where can we get the value? It's either
# the getter on the rule or we default
# to verifying parameters.
getval = rule.getter or param_values.get
# Call the validation function, passing
# the value was retrieved from the getter
try:
value = getval(param)
# Ensure that this validation function
# did not return a funciton. This
# checks that the user did not forget to
# execute the outer function of a closure
# in the rule declaration
resp = rule.vfunc(value)
if inspect.isfunction(resp):
msg = 'Val func returned a function. Rule={0}'
msg = msg.format(rule.__class__.__name__)
raise ValidationProgrammingError(msg)
# Now validate any nested rules that are part of this
# request.
for nested_rule in rule.nested_rules:
# Note: for 'nested' rules, the value
# passed into the getter is the value
# of the 'super' parameter.
nested_getter = nested_rule.getter
nested_val = nested_getter(value)
try:
resp = nested_rule.vfunc(nested_val) # throws
except ValidationFailed as ex:
nested_rule.errfunc()
val_failure = stoplight.ValidationFailureInfo()
val_failure.function = f
val_failure.parameter = param
val_failure.parameter_value = value
val_failure.rule = rule
val_failure.nested_rule = nested_rule
val_failure.nested_value = nested_val
val_failure.ex = ex
stoplight.failure_dispach(val_failure)
return
if inspect.isfunction(resp):
msg = 'Nest rule validation function ' + \
'returned a function'
raise ValidationProgrammingError(msg)
# If this is a param rule, add the
# param to the list of out args
if rule.getter is None:
outargs[param] = value
except ValidationFailed as ex:
rule.errfunc()
val_failure = stoplight.ValidationFailureInfo()
val_failure.function = f
val_failure.parameter = param
val_failure.parameter_value = value
val_failure.rule = rule
val_failure.ex = ex
stoplight.failure_dispach(val_failure)
return
assert funcparams.args[0] == 'self'
# Validation was successful, call the wrapped function
return f(*args, **outargs)
return wrapper
return _validate
def validation_function(func):
"""Decorator for creating a validation function"""
@wraps(func)
def inner(none_ok=False, empty_ok=False):
def wrapper(value, **kwargs):
if none_ok and value is None:
return
if not none_ok and value is None:
msg = 'None value not permitted'
raise ValidationFailed(msg)
if empty_ok and value == '':
return
if not empty_ok and value == '':
msg = 'Empty value not permitted'
raise ValidationFailed(msg)
func(value)
return wrapper
return inner
|
Python
| 0.000001
|
@@ -4082,19 +4082,18 @@
args, **
-out
+kw
args)%0A
|
7e627a16c85a9ffa88833176201351908a5458c2
|
Fix (#795)
|
stripe/api_resources/terminal/reader.py
|
stripe/api_resources/terminal/reader.py
|
# File generated from our OpenAPI spec
from __future__ import absolute_import, division, print_function
from stripe import util
from stripe.api_resources.abstract import APIResourceTestHelpers
from stripe.api_resources.abstract import CreateableAPIResource
from stripe.api_resources.abstract import DeletableAPIResource
from stripe.api_resources.abstract import ListableAPIResource
from stripe.api_resources.abstract import UpdateableAPIResource
from stripe.api_resources.abstract import custom_method
from stripe.api_resources.abstract import test_helpers
@test_helpers
@custom_method("cancel_action", http_verb="post")
@custom_method("process_payment_intent", http_verb="post")
@custom_method("process_setup_intent", http_verb="post")
@custom_method("set_reader_display", http_verb="post")
class Reader(
CreateableAPIResource,
DeletableAPIResource,
ListableAPIResource,
UpdateableAPIResource,
):
OBJECT_NAME = "terminal.reader"
def cancel_action(self, idempotency_key=None, **params):
url = self.instance_url() + "/cancel_action"
headers = util.populate_headers(idempotency_key)
self.refresh_from(self.request("post", url, params, headers))
return self
def process_payment_intent(self, idempotency_key=None, **params):
url = self.instance_url() + "/process_payment_intent"
headers = util.populate_headers(idempotency_key)
self.refresh_from(self.request("post", url, params, headers))
return self
def process_setup_intent(self, idempotency_key=None, **params):
url = self.instance_url() + "/process_setup_intent"
headers = util.populate_headers(idempotency_key)
self.refresh_from(self.request("post", url, params, headers))
return self
def set_reader_display(self, idempotency_key=None, **params):
url = self.instance_url() + "/set_reader_display"
headers = util.populate_headers(idempotency_key)
self.refresh_from(self.request("post", url, params, headers))
return self
@custom_method("present_payment_method", http_verb="post")
class TestHelpers(APIResourceTestHelpers):
def present_payment_method(self, idempotency_key=None, **params):
url = self.instance_url() + "/present_payment_method"
headers = util.populate_headers(idempotency_key)
resp = self.resource.request("post", url, params, headers)
stripe_object = util.convert_to_stripe_object(resp)
return stripe_object
|
Python
| 0
|
@@ -2364,14 +2364,51 @@
-resp =
+self.resource.refresh_from(%0A
sel
@@ -2472,58 +2472,8 @@
-stripe_object = util.convert_to_stripe_object(resp
)%0A
@@ -2494,17 +2494,17 @@
rn s
-tripe_object
+elf.resource
%0A
|
f6e570627cf513acb6b2de35a4ecabbadd8cfae7
|
Remove node-based dashboard nav
|
stores/dashboard/app.py
|
stores/dashboard/app.py
|
from django.conf.urls.defaults import patterns, url
from django.utils.translation import ugettext_lazy as _
from oscar.core.application import Application
from oscar.apps.dashboard.nav import register, Node
from oscar.views.decorators import staff_member_required
from stores.dashboard import views
node = Node(_('Store Manager'))
node.add_child(Node(_('Stores'), 'stores-dashboard:store-list'))
node.add_child(Node(_('Store Groups'), 'stores-dashboard:store-group-list'))
register(node, 100)
class StoresDashboardApplication(Application):
name = 'stores-dashboard'
store_list_view = views.StoreListView
store_create_view = views.StoreCreateView
store_update_view = views.StoreUpdateView
store_delete_view = views.StoreDeleteView
store_group_list_view = views.StoreGroupListView
store_group_create_view = views.StoreGroupCreateView
store_group_update_view = views.StoreGroupUpdateView
store_group_delete_view = views.StoreGroupDeleteView
def get_urls(self):
urlpatterns = patterns('',
url(
r'^$',
self.store_list_view.as_view(),
name='store-list'
),
url(
r'^create/$',
self.store_create_view.as_view(),
name='store-create'
),
url(
r'^update/(?P<pk>[\d]+)/$',
self.store_update_view.as_view(),
name='store-update'
),
url(
r'^delete/(?P<pk>[\d]+)/$',
self.store_delete_view.as_view(),
name='store-delete'
),
url(
r'^groups/$',
self.store_group_list_view.as_view(),
name='store-group-list'
),
url(
r'^groups/create/$',
self.store_group_create_view.as_view(),
name='store-group-create'
),
url(
r'^groups/update/(?P<pk>[\d]+)/$',
self.store_group_update_view.as_view(),
name='store-group-update'
),
url(
r'^groups/delete/(?P<pk>[\d]+)/$',
self.store_group_delete_view.as_view(),
name='store-group-delete'
),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = StoresDashboardApplication()
|
Python
| 0
|
@@ -48,64 +48,8 @@
url
-%0Afrom django.utils.translation import ugettext_lazy as _
%0A%0Afr
@@ -97,60 +97,8 @@
ion%0A
-from oscar.apps.dashboard.nav import register, Node%0A
from
@@ -192,204 +192,8 @@
s%0A%0A%0A
-node = Node(_('Store Manager'))%0Anode.add_child(Node(_('Stores'), 'stores-dashboard:store-list'))%0Anode.add_child(Node(_('Store Groups'), 'stores-dashboard:store-group-list'))%0Aregister(node, 100)%0A%0A%0A
clas
|
38de1280ff97d468dcb0214e6c1037ee12d9676b
|
Add another action
|
dashboard/controllers.py
|
dashboard/controllers.py
|
import cherrypy
class Dashboard:
@cherrypy.expose
def index(self):
return "Dashboard!"
|
Python
| 0.000015
|
@@ -97,8 +97,99 @@
board!%22%0A
+ @cherrypy.expose%0A def edit(self, number):%0A return %22Dashboard edit %22 + number%0A
|
cd3f94c7574825812d4e0fea6fda20f9e4432495
|
Test GetApplication
|
tests/registryd/test_root_accessible.py
|
tests/registryd/test_root_accessible.py
|
# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
import pytest
import dbus
from utils import get_property, check_unknown_property_yields_error
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
ATSPI_ROLE_DESKTOP_FRAME = 14 # see atspi-constants.h
def test_accessible_iface_properties(registry_root, session_manager):
values = [
('Name', 'main'),
('Description', ''),
('Parent', ('', '/org/a11y/atspi/null')),
('ChildCount', 0),
]
for prop_name, expected in values:
assert get_property(registry_root, ACCESSIBLE_IFACE, prop_name) == expected
def test_unknown_property_yields_error(registry_root, session_manager):
check_unknown_property_yields_error(registry_root, ACCESSIBLE_IFACE)
def test_root_get_interfaces(registry_root, session_manager):
ifaces = registry_root.GetInterfaces(dbus_interface=ACCESSIBLE_IFACE)
assert ifaces.signature == 's'
assert 'org.a11y.atspi.Accessible' in ifaces
assert 'org.a11y.atspi.Application' in ifaces
assert 'org.a11y.atspi.Component' in ifaces
assert 'org.a11y.atspi.Socket' in ifaces
def test_root_get_index_in_parent(registry_root, session_manager):
# The registry root is always index 0
assert registry_root.GetIndexInParent(dbus_interface=ACCESSIBLE_IFACE) == 0
def test_root_get_relation_set(registry_root, session_manager):
# The registry root has an empty relation set
assert len(registry_root.GetRelationSet(dbus_interface=ACCESSIBLE_IFACE)) == 0
def test_root_get_role(registry_root, session_manager):
# Hardcoded to ATSPI_ROLE_DESKTOP_FRAME
assert registry_root.GetRole(dbus_interface=ACCESSIBLE_IFACE) == ATSPI_ROLE_DESKTOP_FRAME
def test_root_get_role_name(registry_root, session_manager):
assert registry_root.GetRoleName(dbus_interface=ACCESSIBLE_IFACE) == "desktop frame"
def test_root_get_localized_role_name(registry_root, session_manager):
# FIXME: see the corresponding FIXME in registry.c, to actually localize this
assert registry_root.GetLocalizedRoleName(dbus_interface=ACCESSIBLE_IFACE) == "desktop frame"
def test_root_get_state(registry_root, session_manager):
assert registry_root.GetState(dbus_interface=ACCESSIBLE_IFACE) == [0, 0]
def test_root_get_attributes(registry_root, session_manager):
assert len(registry_root.GetAttributes(dbus_interface=ACCESSIBLE_IFACE)) == 0
|
Python
| 0
|
@@ -2555,28 +2555,219 @@
ce=ACCESSIBLE_IFACE)) == 0%0A%0A
+def test_root_get_application(registry_root, session_manager):%0A (name, path) = registry_root.GetApplication(dbus_interface=ACCESSIBLE_IFACE)%0A assert path == '/org/a11y/atspi/null'%0A %0A
|
2ee9e4200c90eae9739a44cb56270d0e873907e9
|
Add more example
|
pandas/pandas.py
|
pandas/pandas.py
|
import pandas as pd
# Reading csv without header
inp = pd.read_csv('data.txt', header=None)
# Retrieving particular columns by indexes, since column headers are not there
X_df = inp[inp.columns[0:2]]
# Converting dataframe to numpy ndarray
X_nd = X_df.values
|
Python
| 0
|
@@ -87,16 +87,372 @@
=None)%0A%0A
+# Reading csv and set name of columns%0Ainp = pd.read_csv('data.txt', names=%5B'column1', 'column2'%5D)%0A%0A# Reading csv and set index%0Ainp = pd.read_csv('data.txt', index_col=%5B'column1'%5D)%0Ainp = pd.read_csv('data.txt', index_col=0)%0A%0A# Reset index%0Ainp = inp.reset_index()%0Ainp.reset_index(inplace = True)%0A%0A# Show top 5 row%0Ainp.head(5)%0A%0A# Show last 5 row%0Ainp.tail(5)%0A%0A
# Retrie
|
1bf7b678a0420f471c2af2e943e65d7bad756ae3
|
Make the requests test use unittest2, rather than the system unittest.
|
tests/regressiontests/requests/tests.py
|
tests/regressiontests/requests/tests.py
|
from datetime import datetime, timedelta
import time
from StringIO import StringIO
import unittest
from django.http import HttpRequest, HttpResponse, parse_cookie
from django.core.handlers.wsgi import WSGIRequest, LimitedStream
from django.core.handlers.modpython import ModPythonRequest
from django.utils.http import cookie_date
class RequestsTests(unittest.TestCase):
def test_httprequest(self):
request = HttpRequest()
self.assertEqual(request.GET.keys(), [])
self.assertEqual(request.POST.keys(), [])
self.assertEqual(request.COOKIES.keys(), [])
self.assertEqual(request.META.keys(), [])
def test_wsgirequest(self):
request = WSGIRequest({'PATH_INFO': 'bogus', 'REQUEST_METHOD': 'bogus', 'wsgi.input': StringIO('')})
self.assertEqual(request.GET.keys(), [])
self.assertEqual(request.POST.keys(), [])
self.assertEqual(request.COOKIES.keys(), [])
self.assertEqual(set(request.META.keys()), set(['PATH_INFO', 'REQUEST_METHOD', 'SCRIPT_NAME', 'wsgi.input']))
self.assertEqual(request.META['PATH_INFO'], 'bogus')
self.assertEqual(request.META['REQUEST_METHOD'], 'bogus')
self.assertEqual(request.META['SCRIPT_NAME'], '')
def test_modpythonrequest(self):
class FakeModPythonRequest(ModPythonRequest):
def __init__(self, *args, **kwargs):
super(FakeModPythonRequest, self).__init__(*args, **kwargs)
self._get = self._post = self._meta = self._cookies = {}
class Dummy:
def get_options(self):
return {}
req = Dummy()
req.uri = 'bogus'
request = FakeModPythonRequest(req)
self.assertEqual(request.path, 'bogus')
self.assertEqual(request.GET.keys(), [])
self.assertEqual(request.POST.keys(), [])
self.assertEqual(request.COOKIES.keys(), [])
self.assertEqual(request.META.keys(), [])
def test_parse_cookie(self):
self.assertEqual(parse_cookie('invalid:key=true'), {})
def test_httprequest_location(self):
request = HttpRequest()
self.assertEqual(request.build_absolute_uri(location="https://www.example.com/asdf"),
'https://www.example.com/asdf')
request.get_host = lambda: 'www.example.com'
request.path = ''
self.assertEqual(request.build_absolute_uri(location="/path/with:colons"),
'http://www.example.com/path/with:colons')
def test_near_expiration(self):
"Cookie will expire when an near expiration time is provided"
response = HttpResponse()
# There is a timing weakness in this test; The
# expected result for max-age requires that there be
# a very slight difference between the evaluated expiration
# time, and the time evaluated in set_cookie(). If this
# difference doesn't exist, the cookie time will be
# 1 second larger. To avoid the problem, put in a quick sleep,
# which guarantees that there will be a time difference.
expires = datetime.utcnow() + timedelta(seconds=10)
time.sleep(0.001)
response.set_cookie('datetime', expires=expires)
datetime_cookie = response.cookies['datetime']
self.assertEqual(datetime_cookie['max-age'], 10)
def test_far_expiration(self):
"Cookie will expire when an distant expiration time is provided"
response = HttpResponse()
response.set_cookie('datetime', expires=datetime(2028, 1, 1, 4, 5, 6))
datetime_cookie = response.cookies['datetime']
self.assertEqual(datetime_cookie['expires'], 'Sat, 01-Jan-2028 04:05:06 GMT')
def test_max_age_expiration(self):
"Cookie will expire if max_age is provided"
response = HttpResponse()
response.set_cookie('max_age', max_age=10)
max_age_cookie = response.cookies['max_age']
self.assertEqual(max_age_cookie['max-age'], 10)
self.assertEqual(max_age_cookie['expires'], cookie_date(time.time()+10))
def test_limited_stream(self):
# Read all of a limited stream
stream = LimitedStream(StringIO('test'), 2)
self.assertEqual(stream.read(), 'te')
# Read a number of characters greater than the stream has to offer
stream = LimitedStream(StringIO('test'), 2)
self.assertEqual(stream.read(5), 'te')
# Read sequentially from a stream
stream = LimitedStream(StringIO('12345678'), 8)
self.assertEqual(stream.read(5), '12345')
self.assertEqual(stream.read(5), '678')
# Read lines from a stream
stream = LimitedStream(StringIO('1234\n5678\nabcd\nefgh\nijkl'), 24)
# Read a full line, unconditionally
self.assertEqual(stream.readline(), '1234\n')
# Read a number of characters less than a line
self.assertEqual(stream.readline(2), '56')
# Read the rest of the partial line
self.assertEqual(stream.readline(), '78\n')
# Read a full line, with a character limit greater than the line length
self.assertEqual(stream.readline(6), 'abcd\n')
# Read the next line, deliberately terminated at the line end
self.assertEqual(stream.readline(4), 'efgh')
# Read the next line... just the line end
self.assertEqual(stream.readline(), '\n')
# Read everything else.
self.assertEqual(stream.readline(), 'ijkl')
def test_stream(self):
request = WSGIRequest({'REQUEST_METHOD': 'POST', 'wsgi.input': StringIO('name=value')})
self.assertEqual(request.read(), 'name=value')
def test_read_after_value(self):
"""
Reading from request is allowed after accessing request contents as
POST or raw_post_data.
"""
request = WSGIRequest({'REQUEST_METHOD': 'POST', 'wsgi.input': StringIO('name=value')})
self.assertEqual(request.POST, {u'name': [u'value']})
self.assertEqual(request.raw_post_data, 'name=value')
self.assertEqual(request.read(), 'name=value')
def test_value_after_read(self):
"""
Construction of POST or raw_post_data is not allowed after reading
from request.
"""
request = WSGIRequest({'REQUEST_METHOD': 'POST', 'wsgi.input': StringIO('name=value')})
self.assertEqual(request.read(2), 'na')
self.assertRaises(Exception, lambda: request.raw_post_data)
self.assertEqual(request.POST, {})
def test_read_by_lines(self):
request = WSGIRequest({'REQUEST_METHOD': 'POST', 'wsgi.input': StringIO('name=value')})
self.assertEqual(list(request), ['name=value'])
|
Python
| 0.999814
|
@@ -1,12 +1,24 @@
+import time%0A
from datetim
@@ -46,28 +46,16 @@
medelta%0A
-import time%0A
from Str
@@ -80,210 +80,228 @@
gIO%0A
-import unittest%0A%0Afrom django.http import HttpRequest, HttpResponse, parse_cookie%0Afrom django.core.handlers.wsgi import WSGIRequest, LimitedStream%0Afrom django.core.handlers.modpython import ModPythonRequ
+%0Afrom django.core.handlers.modpython import ModPythonRequest%0Afrom django.core.handlers.wsgi import WSGIRequest, LimitedStream%0Afrom django.http import HttpRequest, HttpResponse, parse_cookie%0Afrom django.utils import unitt
est%0A
@@ -383,17 +383,16 @@
tCase):%0A
-%0A
def
|
7aab5bcf7195526c37d556872c1530051e0dc8b6
|
Fix summary tests
|
summary/test_summary.py
|
summary/test_summary.py
|
# -*- coding: utf-8 -*-
import unittest
from jinja2.utils import generate_lorem_ipsum
# generate one paragraph, enclosed with <p>
TEST_CONTENT = str(generate_lorem_ipsum(n=1))
TEST_SUMMARY = generate_lorem_ipsum(n=1, html=False)
from pelican.contents import Page
import summary
class TestSummary(unittest.TestCase):
def setUp(self):
super(TestSummary, self).setUp()
summary.register()
summary.initialized(None)
self.page_kwargs = {
'content': TEST_CONTENT,
'context': {
'localsiteurl': '',
},
'metadata': {
'summary': TEST_SUMMARY,
'title': 'foo bar',
'author': 'Blogger',
},
}
def _copy_page_kwargs(self):
# make a deep copy of page_kwargs
page_kwargs = dict([(key, self.page_kwargs[key]) for key in
self.page_kwargs])
for key in page_kwargs:
if not isinstance(page_kwargs[key], dict):
break
page_kwargs[key] = dict([(subkey, page_kwargs[key][subkey])
for subkey in page_kwargs[key]])
return page_kwargs
def test_end_summary(self):
page_kwargs = self._copy_page_kwargs()
del page_kwargs['metadata']['summary']
page_kwargs['content'] = (
TEST_SUMMARY + '<!-- PELICAN_END_SUMMARY -->' + TEST_CONTENT)
page = Page(**page_kwargs)
# test both the summary and the marker removal
self.assertEqual(page.summary, TEST_SUMMARY)
self.assertEqual(page.content, TEST_SUMMARY + TEST_CONTENT)
def test_begin_summary(self):
page_kwargs = self._copy_page_kwargs()
del page_kwargs['metadata']['summary']
page_kwargs['content'] = (
'FOOBAR<!-- PELICAN_BEGIN_SUMMARY -->' + TEST_CONTENT)
page = Page(**page_kwargs)
# test both the summary and the marker removal
self.assertEqual(page.summary, TEST_CONTENT)
self.assertEqual(page.content, 'FOOBAR' + TEST_CONTENT)
def test_begin_end_summary(self):
page_kwargs = self._copy_page_kwargs()
del page_kwargs['metadata']['summary']
page_kwargs['content'] = (
'FOOBAR<!-- PELICAN_BEGIN_SUMMARY -->' + TEST_SUMMARY +
'<!-- PELICAN_END_SUMMARY -->' + TEST_CONTENT)
page = Page(**page_kwargs)
# test both the summary and the marker removal
self.assertEqual(page.summary, TEST_SUMMARY)
self.assertEqual(page.content, 'FOOBAR' + TEST_SUMMARY + TEST_CONTENT)
|
Python
| 0.000013
|
@@ -260,16 +260,40 @@
ort Page
+%0Aimport pelican.settings
%0A%0Aimport
@@ -401,16 +401,85 @@
.setUp()
+%0A pelican.settings.DEFAULT_CONFIG%5B'SUMMARY_MAX_LENGTH'%5D = None
%0A%0A
@@ -1573,32 +1573,70 @@
(**page_kwargs)%0A
+ summary.extract_summary(page)%0A
# test b
@@ -2053,32 +2053,70 @@
(**page_kwargs)%0A
+ summary.extract_summary(page)%0A
# test b
@@ -2601,32 +2601,70 @@
(**page_kwargs)%0A
+ summary.extract_summary(page)%0A
# test b
@@ -2747,32 +2747,32 @@
, TEST_SUMMARY)%0A
-
self.ass
@@ -2814,28 +2814,77 @@
EST_SUMMARY + TEST_CONTENT)%0A
+%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
5db44f4a2aa60ef26607d81d6e6d7d5f4723d349
|
Remove irellevant test cases
|
tests/test_fms_api_event_list_parser.py
|
tests/test_fms_api_event_list_parser.py
|
import datetime
import json
import unittest2
from datafeeds.parsers.fms_api.fms_api_event_list_parser import FMSAPIEventListParser
from google.appengine.ext import ndb
from google.appengine.ext import testbed
from consts.district_type import DistrictType
from consts.event_type import EventType
from models.event import Event
class TestFMSAPIEventListParser(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
def tearDown(self):
self.testbed.deactivate()
def test_parse_event_list(self):
with open('test_data/fms_api/2015_event_list.json', 'r') as f:
events = FMSAPIEventListParser(2015).parse(json.loads(f.read()))
self.assertTrue(isinstance(events, list))
# File has 5 events, but we ignore CMP events, so only 3 are expected back
self.assertEquals(len(events), 3)
def test_parse_regional_event(self):
with open('test_data/fms_api/2015_event_list.json', 'r') as f:
events = FMSAPIEventListParser(2015).parse(json.loads(f.read()))
event = events[0]
self.assertEquals(event.key_name, "2015nyny")
self.assertEquals(event.name, "New York City Regional")
self.assertEquals(event.short_name, "New York City")
self.assertEquals(event.event_short, "nyny")
self.assertEquals(event.official, True)
self.assertEquals(event.start_date, datetime.datetime(year=2015, month=3, day=12, hour=0, minute=0, second=0))
self.assertEquals(event.end_date, datetime.datetime(year=2015, month=3, day=15, hour=23, minute=59, second=59))
self.assertEquals(event.venue, "Jacob K. Javits Convention Center")
self.assertEquals(event.location, "New York, NY, USA")
self.assertEquals(event.venue_address, "Jacob K. Javits Convention Center, New York, NY, USA")
self.assertEquals(event.year, 2015)
self.assertEquals(event.event_type_enum, EventType.REGIONAL)
self.assertEquals(event.event_district_enum, DistrictType.NO_DISTRICT)
def test_parse_district_event(self):
with open('test_data/fms_api/2015_event_list.json', 'r') as f:
events = FMSAPIEventListParser(2015).parse(json.loads(f.read()))
event = events[1]
self.assertEquals(event.key_name, "2015cthar")
self.assertEquals(event.name, "NE District - Hartford Event")
self.assertEquals(event.short_name, "Hartford")
self.assertEquals(event.event_short, "cthar")
self.assertEquals(event.official, True)
self.assertEquals(event.start_date, datetime.datetime(year=2015, month=3, day=27, hour=0, minute=0, second=0))
self.assertEquals(event.end_date, datetime.datetime(year=2015, month=3, day=29, hour=23, minute=59, second=59))
self.assertEquals(event.venue, "Hartford Public High School")
self.assertEquals(event.location, "Hartford, CT, USA")
self.assertEquals(event.venue_address, "Hartford Public High School, Hartford, CT, USA")
self.assertEquals(event.year, 2015)
self.assertEquals(event.event_type_enum, EventType.DISTRICT)
self.assertEquals(event.event_district_enum, DistrictType.NEW_ENGLAND)
def test_parse_district_cmp(self):
with open('test_data/fms_api/2015_event_list.json', 'r') as f:
events = FMSAPIEventListParser(2015).parse(json.loads(f.read()))
event = events[2]
self.assertEquals(event.key_name, "2015necmp")
self.assertEquals(event.name, "NE FIRST District Championship presented by United Technologies")
self.assertEquals(event.short_name, "NE FIRST")
self.assertEquals(event.event_short, "necmp")
self.assertEquals(event.official, True)
self.assertEquals(event.start_date, datetime.datetime(year=2015, month=4, day=8, hour=0, minute=0, second=0))
self.assertEquals(event.end_date, datetime.datetime(year=2015, month=4, day=11, hour=23, minute=59, second=59))
self.assertEquals(event.venue, "Sports and Recreation Center, WPI")
self.assertEquals(event.location, "Worcester, MA, USA")
self.assertEquals(event.venue_address, "Sports and Recreation Center, WPI, Worcester, MA, USA")
self.assertEquals(event.year, 2015)
self.assertEquals(event.event_type_enum, EventType.DISTRICT_CMP)
self.assertEquals(event.event_district_enum, DistrictType.NEW_ENGLAND)
|
Python
| 0
|
@@ -1912,115 +1912,8 @@
A%22)%0A
- self.assertEquals(event.venue_address, %22Jacob K. Javits Convention Center, New York, NY, USA%22)%0A
@@ -1948,32 +1948,32 @@
ent.year, 2015)%0A
+
self
@@ -3028,109 +3028,8 @@
A%22)%0A
- self.assertEquals(event.venue_address, %22Hartford Public High School, Hartford, CT, USA%22)%0A
@@ -4111,16 +4111,16 @@
, WPI%22)%0A
+
@@ -4183,116 +4183,8 @@
A%22)%0A
- self.assertEquals(event.venue_address, %22Sports and Recreation Center, WPI, Worcester, MA, USA%22)%0A
|
abd32bb9e79d771cae2117f9e754b4a7e38434bf
|
Add storage nominal capacity to parameters dict in storage_at_hvmv_substation
|
edisgo/flex_opt/storage_integration.py
|
edisgo/flex_opt/storage_integration.py
|
from edisgo.grid.components import Storage, Line
from edisgo.grid.tools import select_cable
import logging
def integrate_storage(network, position, operational_mode, parameters):
"""
Integrate storage units in the grid and specify its operational mode
Parameters
----------
network: :class:`~.grid.network.Network`
The eDisGo container object
position : str
Specify storage location. Available options are
* 'hvmv_substation_busbar': places a storage unit directly at the
HV/MV station's bus bar, see :func:`storage_at_hvmv_substation`
operational_mode : str
Operational mode. See :class:`~.grid.components.StorageOperation for
possible options and more information.
parameters : dict
Parameters specifying characteristics of storage in detail
The format looks like the following example and requires given
parameters
.. code-block:: python
{
'soc_initial': <float>, # in kWh,
'efficiency_in': <float>, # in per unit 0..1
'efficiency_out': <float>, # in per unit 0..1
'standing_loss': <float> # in per unit 0..1
}
"""
if position == 'hvmv_substation_busbar':
storage_at_hvmv_substation(network.mv_grid, parameters,
operational_mode)
else:
logging.error("{} is not a valid storage positioning mode".format(
position))
raise ValueError("Unknown parameter for storage posisitioning: {} is "
"not a valid storage positioning mode".format(
position))
def storage_at_hvmv_substation(mv_grid, parameters, mode, nominal_capacity=1000):
"""
Place 1 MVA battery at HV/MV substation bus bar
As this is currently a dummy implementation the storage operation is as
simple as follows:
* Feedin > 50 % -> charge at full power
* Feedin < 50 % -> discharge at full power
Parameters
----------
mv_grid : :class:`~.grid.grids.MVGrid`
MV grid instance
parameters : dict
Parameters specifying characteristics of storage in detail
nominal_capacity : float
Storage's apparent rated power
mode : str
Operational mode. See :class:`~.grid.components.StorageOperation for
possible options and more information.
"""
# define storage instance and define it's operational mode
storage_id = len(mv_grid.graph.nodes_by_attribute('storage')) + 1
storage = Storage(operation={'mode': mode},
id=storage_id,
nominal_capacity=nominal_capacity,
grid=mv_grid,
soc_initial=parameters['soc_initial'],
efficiency_in=parameters['efficiency_in'],
efficiency_out=parameters['efficiency_out'],
standing_loss=parameters['standing_loss'],
geom=mv_grid.station.geom)
# add storage itself to graph
mv_grid.graph.add_node(storage, type='storage')
# add 1m connecting line to hv/mv substation bus bar
line_type, _ = select_cable(mv_grid.network, 'mv', nominal_capacity)
line = Line(
id=storage_id,
type=line_type,
kind='cable',
length=1e-3,
grid=mv_grid)
mv_grid.graph.add_edge(mv_grid.station, storage, line=line, type='line')
|
Python
| 0.000012
|
@@ -969,16 +969,70 @@
%7B%0A
+ 'nominal_capacity': %3Cfloat%3E, # in kWh%0A
@@ -1071,17 +1071,16 @@
# in kWh
-,
%0A
@@ -1785,31 +1785,8 @@
mode
-, nominal_capacity=1000
):%0A
@@ -2230,76 +2230,8 @@
ail%0A
- nominal_capacity : float%0A Storage's apparent rated power%0A
@@ -2631,16 +2631,28 @@
apacity=
+parameters%5B'
nominal_
@@ -2659,16 +2659,18 @@
capacity
+'%5D
,%0A
@@ -3205,17 +3205,57 @@
k, 'mv',
-
+%0A storage.
nominal_
|
54fab13f466d17acfa4f9b3d67d777de8d34f67f
|
Remove interdependence from get_path_category()
|
src/core/templatetags/pycontw_tools.py
|
src/core/templatetags/pycontw_tools.py
|
import re
from django.template import Library
register = Library()
@register.filter
def message_bootstrap_class_str(message):
return ' '.join('alert-' + tag for tag in message.tags.split(' '))
@register.filter
def get_path_category(url):
lang = '\/(zh\-hant|en\-us)'
category_pattern_mapping = {
'about': '\/about/pycontw',
'sponsor': '\/sponsor/sponsor',
'speaking': '\/speaking\/(cfp|talk|tutorial|recording)',
'conference': '\/(events\/(overview|schedule)|portal)',
'event': '\/events\/(keynotes|talks|open-spaces)',
'registration': '\/registration/(financial-aid|ticket-info|registration)',
'venue': '\/venue'
}
end = '\/?$'
for category, pattern in category_pattern_mapping.items():
if re.match(lang + pattern + end, url):
return category
return 'uncategorized'
|
Python
| 0.000005
|
@@ -250,19 +250,30 @@
-lang = '%5C/(
+pattern = r'/(?P%3Clang%3E
zh%5C-
@@ -288,22 +288,21 @@
-us)
-'%0A
+/(?P%3C
category
_pat
@@ -301,292 +301,77 @@
gory
-_pattern_mapping = %7B%0A 'about': '%5C/about/pycontw',%0A 'sponsor': '%5C/sponsor/sponsor',%0A 'speaking': '%5C/speaking%5C/(cfp%7Ctalk%7Ctutorial%7Crecording)',%0A 'conference': '%5C/(events%5C/(overview%7Cschedule)%7Cportal)',%0A 'event': '%5C/events%5C/(keynotes%7Ctalks%7Copen-spaces)',
+%3E%5B0-9a-z-%5D*)/'%0A result = re.match(pattern, url)%0A if not result:
%0A
@@ -379,282 +379,72 @@
-'
re
-gistration': '%5C/registration/(financial-aid%7Cticket-info%7Cregistration)',%0A 'venue': '%5C/venue'%0A %7D%0A end = '%5C/?$'%0A for category, pattern in category_pattern_mapping.items():%0A if re.match(lang + pattern + end, url):%0A return category%0A return
+turn 'unmatched'%0A return result.groupdict().get('category',
'un
@@ -455,9 +455,10 @@
gorized'
+)
%0A
|
0bffe17c50c41f85a8dea42a468d282248c75ef9
|
Make static url absolute
|
svenv/svenv/settings.py
|
svenv/svenv/settings.py
|
"""
Django settings for svenv project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '!k4%c0+yuy2^zu@l_uk2g7h$ya9*m#zfow*0@kv15s0l776%@3'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'blog',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'svenv.urls'
WSGI_APPLICATION = 'svenv.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
mysql_password = open('/random.txt').read().splitlines()[0]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'svenv_nl',
'USER': 'admin',
'PASSWORD': mysql_password,
'HOST': 'localhost',
'PORT': '3306',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_ROOT = os.path.join('static')
STATIC_URL = 'static/'
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
REST_FRAMEWORK = {
'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.OrderingFilter',),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 3,
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
],
}
|
Python
| 0.999822
|
@@ -2240,16 +2240,17 @@
_URL = '
+/
static/'
|
435547a747b5085076bdc6ddf1f687e146f8e071
|
Fix another unit test stacktrace in pkg_resource
|
tests/unit/modules/pkg_resource_test.py
|
tests/unit/modules/pkg_resource_test.py
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Rahul Handay <rahulha@saltstack.com>`
'''
# Import Python Libs
from __future__ import absolute_import
import yaml
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.mock import (
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
# Import Salt Libs
import salt.utils
from salt.modules import pkg_resource
import salt.ext.six as six
# Globals
pkg_resource.__grains__ = {}
pkg_resource.__salt__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class PkgresTestCase(TestCase):
'''
Test cases for salt.modules.pkg_resource
'''
def test_pack_sources(self):
'''
Test to accepts list of dicts (or a string representing a
list of dicts) and packs the key/value pairs into a single dict.
'''
with patch.object(yaml,
'safe_load',
MagicMock(side_effect=yaml.parser.ParserError('f'))):
with patch.dict(pkg_resource.__salt__,
{'pkg.normalize_name': MagicMock()}):
self.assertDictEqual(pkg_resource.pack_sources('sources'), {})
self.assertDictEqual(pkg_resource.pack_sources(['A', 'a']), {})
self.assertTrue(pkg_resource.pack_sources([{'A': 'a'}]))
def test_parse_targets(self):
'''
Test to parses the input to pkg.install and
returns back the package(s) to be installed. Returns a
list of packages, as well as a string noting whether the
packages are to come from a repository or a binary package.
'''
with patch.dict(pkg_resource.__grains__, {'os': 'A'}):
self.assertEqual(pkg_resource.parse_targets(pkgs='a',
sources='a'),
(None, None))
with patch.object(pkg_resource, '_repack_pkgs',
return_value=False):
self.assertEqual(pkg_resource.parse_targets(pkgs='a'),
(None, None))
with patch.object(pkg_resource, '_repack_pkgs',
return_value='A'):
self.assertEqual(pkg_resource.parse_targets(pkgs='a'),
('A', 'repository'))
with patch.dict(pkg_resource.__grains__, {'os': 'MacOS1'}):
with patch.object(pkg_resource, 'pack_sources',
return_value=False):
self.assertEqual(pkg_resource.parse_targets(sources='s'),
(None, None))
with patch.object(pkg_resource, 'pack_sources',
return_value={'A': '/a'}):
with patch.dict(pkg_resource.__salt__,
{'config.valid_fileproto':
MagicMock(return_value=False)}):
self.assertEqual(pkg_resource.parse_targets(sources='s'),
(['/a'], 'file'))
with patch.object(pkg_resource, 'pack_sources',
return_value={'A': 'a'}):
with patch.dict(pkg_resource.__salt__,
{'config.valid_fileproto':
MagicMock(return_value=False)}):
self.assertEqual(pkg_resource.parse_targets(name='n'),
({'n': None}, 'repository'))
self.assertEqual(pkg_resource.parse_targets(),
(None, None))
def test_version(self):
'''
Test to Common interface for obtaining the version
of installed packages.
'''
with patch.object(salt.utils, 'is_true', return_value=True):
mock = MagicMock(return_value={'A': 'B'})
with patch.dict(pkg_resource.__salt__,
{'pkg.list_pkgs': mock}):
self.assertEqual(pkg_resource.version('A'), 'B')
self.assertDictEqual(pkg_resource.version(), {})
mock = MagicMock(return_value={})
with patch.dict(pkg_resource.__salt__, {'pkg.list_pkgs': mock}):
with patch('builtins.next' if six.PY3 else '__builtin__.next') as mock_next:
mock_next.side_effect = StopIteration()
self.assertEqual(pkg_resource.version('A'), '')
def test_add_pkg(self):
'''
Test to add a package to a dict of installed packages.
'''
self.assertIsNone(pkg_resource.add_pkg('pkgs', 'name', 'version'))
def test_sort_pkglist(self):
'''
Test to accepts a dict obtained from pkg.list_pkgs() and sorts
in place the list of versions for any packages that have multiple
versions installed, so that two package lists can be compared
to one another.
'''
self.assertIsNone(pkg_resource.sort_pkglist({}))
def test_stringify(self):
'''
Test to takes a dict of package name/version information
and joins each list of
installed versions into a string.
'''
self.assertIsNone(pkg_resource.stringify({}))
def test_version_clean(self):
'''
Test to clean the version string removing extra data.
'''
with patch.dict(pkg_resource.__salt__, {'pkg.version_clean':
MagicMock(return_value='A')}):
self.assertEqual(pkg_resource.version_clean('version'), 'A')
self.assertEqual(pkg_resource.version_clean('v'), 'v')
def test_check_extra_requirements(self):
'''
Test to check if the installed package already
has the given requirements.
'''
with patch.dict(pkg_resource.__salt__, {'pkg.check_extra_requirements':
MagicMock(return_value='A')}):
self.assertEqual(pkg_resource.check_extra_requirements('a', 'b'),
'A')
self.assertTrue(pkg_resource.check_extra_requirements('a', False))
if __name__ == '__main__':
from integration import run_tests
run_tests(PkgresTestCase, needs_daemon=False)
|
Python
| 0.000002
|
@@ -4670,14 +4670,20 @@
pkg(
+%7B
'pkgs'
+: %5B%5D%7D
, 'n
|
61627e72f4f3f0f9c7058264cb605650152192b5
|
Remove extra comments description for Google
|
geocoder/google.py
|
geocoder/google.py
|
#!/usr/bin/python
# coding: utf8
import ratelim
import requests
from .base import Base
class Google(Base):
"""
Google Geocoding API
====================
Geocoding is the process of converting addresses (like "1600 Amphitheatre Parkway,
Mountain View, CA") into geographic coordinates (like latitude 37.423021 and
longitude -122.083739), which you can use to place markers or position the map.
API Reference
-------------
https://developers.google.com/maps/documentation/geocoding/
OSM Quality (6/6)
-----------------
- [x] addr:housenumber
- [x] addr:street
- [x] addr:city
- [x] addr:state
- [x] addr:country
- [x] addr:postal
Attributes (26/26)
------------------
- [x] accuracy
- [x] address
- [x] bbox
- [x] city
- [x] city_long
- [x] confidence
- [x] country
- [x] country_long
- [x] county
- [x] encoding
- [x] housenumber
- [x] lat
- [x] lng
- [x] location
- [x] neighborhood
- [x] ok
- [x] postal
- [x] provider
- [x] quality
- [x] road_long
- [x] state
- [x] state_long
- [x] status
- [x] street
- [x] sublocality
- [x] subpremise
"""
provider = 'google'
method = 'geocode'
def __init__(self, location, **kwargs):
self.url = 'https://maps.googleapis.com/maps/api/geocode/json'
self.location = location
self.params = {
'sensor': 'false',
'address': location,
'key': kwargs.get('key', ''),
}
self._initialize(**kwargs)
self._google_catch_errors()
@staticmethod
@ratelim.greedy(2500, 60*60*24)
@ratelim.greedy(5, 1)
def rate_limited_get(*args, **kwargs):
return requests.get(*args, **kwargs)
def _google_catch_errors(self):
status = self.parse.get('status')
if not status == 'OK':
self.error = status
def _exceptions(self):
# Build intial Tree with results
if self.parse['results']:
self._build_tree(self.parse.get('results')[0])
# Build Geometry
self._build_tree(self.parse.get('geometry'))
# Parse address components with short & long names
for item in self.parse['address_components']:
for category in item['types']:
self.parse[category]['long_name'] = self._encode(item['long_name'])
self.parse[category]['short_name'] = self._encode(item['short_name'])
@property
def lat(self):
return self.parse['location'].get('lat')
@property
def lng(self):
return self.parse['location'].get('lng')
@property
def quality(self):
quality = self.parse.get('types')
if quality:
return quality[0]
@property
def accuracy(self):
return self.parse.get('location_type')
@property
def bbox(self):
south = self.parse['southwest'].get('lat')
west = self.parse['southwest'].get('lng')
north = self.parse['northeast'].get('lat')
east = self.parse['northeast'].get('lng')
return self._get_bbox(south, west, north, east)
@property
def address(self):
return self.parse.get('formatted_address')
@property
def postal(self):
return self.parse['postal_code'].get('short_name')
@property
def subpremise(self):
return self.parse['subpremise'].get('short_name')
@property
def housenumber(self):
return self.parse['street_number'].get('short_name')
@property
def street(self):
return self.parse['route'].get('short_name')
@property
def road_long(self):
return self.parse['route'].get('long_name')
@property
def neighborhood(self):
return self.parse['neighborhood'].get('short_name')
@property
def sublocality(self):
return self.parse['sublocality'].get('short_name')
@property
def city(self):
return self.parse['locality'].get('short_name')
@property
def city_long(self):
return self.parse['locality'].get('long_name')
@property
def county(self):
return self.parse['administrative_area_level_2'].get('short_name')
@property
def state(self):
return self.parse['administrative_area_level_1'].get('short_name')
@property
def state_long(self):
return self.parse['administrative_area_level_1'].get('long_name')
@property
def country(self):
return self.parse['country'].get('short_name')
@property
def country_long(self):
return self.parse['country'].get('long_name')
if __name__ == '__main__':
g = Google('11 Wall Street, New York')
|
Python
| 0
|
@@ -217,66 +217,8 @@
sses
- (like %221600 Amphitheatre Parkway,%0A Mountain View, CA%22)
int
@@ -229,16 +229,20 @@
ographic
+%0A
coordin
@@ -274,20 +274,16 @@
3021 and
-%0A
longitu
@@ -298,16 +298,20 @@
083739),
+%0A
which y
@@ -462,714 +462,247 @@
ding
-/
%0A%0A
-OSM Quality (6/6)%0A -----------------%0A - %5Bx%5D addr:housenumber%0A - %5Bx%5D addr:street%0A - %5Bx%5D addr:city%0A - %5Bx%5D addr:state%0A - %5Bx%5D addr:country%0A - %5Bx%5D addr:postal%0A%0A Attributes (26/26)%0A ------------------%0A - %5Bx%5D accuracy%0A - %5Bx%5D address%0A - %5Bx%5D bbox%0A - %5Bx%5D city%0A - %5Bx%5D city_long%0A - %5Bx%5D confidence%0A - %5Bx%5D country%0A - %5Bx%5D country_long%0A - %5Bx%5D county%0A - %5Bx%5D encoding%0A - %5Bx%5D housenumber%0A - %5Bx%5D lat%0A - %5Bx%5D l
+Parameters%0A ----------%0A :param location: Your search location you want geocoded.%0A :param method: (default=geocode) Use the followi
ng
+:
%0A
-- %5Bx%5D location%0A - %5Bx%5D neighborho
+ %3E geoc
od
+e
%0A
-- %5Bx%5D ok%0A - %5Bx%5D postal%0A - %5Bx%5D provider%0A - %5Bx%5D quality%0A - %5Bx%5D road_long%0A - %5Bx%5D state%0A - %5Bx%5D state_long
+ %3E reverse%0A %3E batch%0A %3E timezone
%0A
-- %5Bx%5D status%0A - %5Bx%5D street%0A - %5Bx%5D sublocality%0A - %5Bx%5D subpremise
+ %3E elevation
%0A
@@ -1164,12 +1164,16 @@
, 60
-*60*
+ * 60 *
24)%0A
@@ -4257,9 +4257,8 @@
York')%0A
-%0A
|
09dc63bc729d3415277cd66c78cb7acc615b791c
|
add real time mean
|
Sound/core.py
|
Sound/core.py
|
#! /usr/bin/python3
import alsaaudio
import time
import audioop
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from scipy.ndimage.filters import gaussian_laplace
import pylab
import multiprocessing
class MyProcess(multiprocessing.Process):
def __init__(self, array):
multiprocessing.Process.__init__(self)
self.exit = multiprocessing.Event()
self.array = array
def run(self):
sns.set_style('darkgrid')
plt.ion()
while not(self.exit.is_set()):
plt.clf()
a = 20 * np.log10(self.array)
a[np.isinf(a)] = 0
plt.plot(a, label="decibel volume")
plt.plot([np.mean(a[a > 0])] * len(a), label="mean(a)", color="r")
plt.grid(True)
plt.legend()
plt.draw()
pylab.waitforbuttonpress(timeout=0.1)
print("Stopped")
def shutdown(self):
self.exit.set()
class Micro():
def __init__(self, alsaaudio_capture, alsaaudio_nonblock):
self.capture = alsaaudio_capture
self.nonblock = alsaaudio_nonblock
def __enter__(self):
self.inp = alsaaudio.PCM(self.capture, self.nonblock)
return self.inp
def __exit__(self, capture, nonblock, inpt):
self.inp.close()
def main(time_seconds, to_file):
"""
Where time is the numbers of seconds!!!!
"""
# Open the device in nonblocking capture mode. The last argument could
# just as well have been zero for blocking mode. Then we could have
# left out the sleep call in the bottom of the loop
with Micro(alsaaudio.PCM_CAPTURE, alsaaudio.PCM_NONBLOCK) as inp:
# Set attributes: Mono, 8000 Hz, 16 bit little endian samples
inp.setchannels(1)
inp.setrate(8000)
inp.setformat(alsaaudio.PCM_FORMAT_S16_LE)
# The period size controls the internal number of frames per period.
# The significance of this parameter is documented in the ALSA api.
# For our purposes, it is suficcient to know that reads from the device
# will return this many frames. Each frame being 2 bytes long.
# This means that the reads below will return either 320 bytes of data
# or 0 bytes of data. The latter is possible because we are in nonblocking
# mode.
inp.setperiodsize(160)
i = 0
interval = 0.001
n = int(time_seconds / interval)
df = multiprocessing.Array('i', n)
t1 = MyProcess(df)
t1.start()
while i < n:
# Read data from device
l, data = inp.read()
if l:
# Return the maximumof the absolute value of all samples in a fragment.
df[i] = audioop.max(data, 2)
i += 1
time.sleep(interval)
print(i, "s")
if to_file:
df[400:].tofile("out.dat", sep=',')
input("Waiting input")
t1.shutdown()
return np.array(df[400:])
def plotting(df):
sns.set_style('darkgrid')
plt.subplot(2, 2, 1)
plt.plot(df, label="raw data")
plt.grid(True)
plt.legend()
a = 20 * np.log10(df)
a[np.isinf(a)] = 0
plt.subplot(2, 2, 2)
plt.plot(a, label="decibel volume")
plt.grid(True)
plt.legend()
plt.subplot(2, 2, 3)
plt.plot(np.fft.fft(a), label="fft(raw data)")
plt.grid(True)
plt.legend()
plt.subplot(2, 2, 4)
plt.plot(gaussian_laplace(a, sigma=max(a)), label="gaussian(raw data)")
plt.grid(True)
plt.legend()
plt.show()
if __name__ == '__main__':
df = main(10, False)
# plotting(df)
|
Python
| 0.000628
|
@@ -290,16 +290,42 @@
f, array
+, seconde_size, total_loop
):%0A
@@ -441,78 +441,245 @@
ray%0A
-%0A def run(self):%0A sns.set_style('darkgrid')%0A plt.ion(
+ self.seconde_size = seconde_size%0A self.total_loop = total_loop%0A%0A def run(self):%0A sns.set_style('darkgrid')%0A plt.ion()%0A%0A seconde_size = int(self.seconde_size)%0A total_loop = int(self.total_loop
)%0A%0A
@@ -743,16 +743,46 @@
t.clf()%0A
+ # decibel part
%0A
@@ -923,25 +923,301 @@
-plt.plot(
+# mean parts%0A # create th vector and fill it%0A mean = np.zeros(len(a))%0A for ele in range(total_loop):%0A # get the interval%0A res = a%5Bele * seconde_size: (ele + 1) * seconde_size%5D%0A res =
%5Bnp.mean
@@ -1221,11 +1221,15 @@
ean(
-a%5Ba
+res%5Bres
%3E 0
@@ -1238,30 +1238,186 @@
%5D *
-len(a), label=%22mean(a)
+seconde_size%0A # assign it%0A mean%5Bele * seconde_size: (ele + 1) * seconde_size%5D = res%0A plt.plot(mean, label=%22mean every seconds
%22, c
@@ -3203,16 +3203,44 @@
ocess(df
+, 1 / interval, time_seconds
)%0A
@@ -3553,24 +3553,26 @@
%0A
+ #
print(i, %22s
|
320a96337c55d770ed032520ecb75155e2d124e5
|
Update version
|
geoip2/__init__.py
|
geoip2/__init__.py
|
#pylint:disable=C0111
__title__ = 'geoip2'
__version__ = '0.1.0'
__author__ = 'Gregory Oschwald'
__license__ = 'LGPLv2+'
__copyright__ = 'Copyright 2013 Maxmind, Inc.'
|
Python
| 0
|
@@ -56,17 +56,17 @@
= '0.1.
-0
+1
'%0A__auth
|
d38fd28c47f3749ed3fb7a64827768108a413c78
|
introduce ec2.volume handling
|
src/main/python/monocyte/handler/ec2.py
|
src/main/python/monocyte/handler/ec2.py
|
from __future__ import print_function
import boto
import boto.ec2
from boto.exception import EC2ResponseError
from monocyte.handler import Resource, aws_handler
@aws_handler
class Handler(object):
VALID_TARGET_STATES = ["terminated", "shutting-down"]
def __init__(self, region_filter, dry_run=True):
self.regions = [region for region in boto.ec2.regions() if region_filter(region.name)]
self.dry_run = dry_run
self.name = __name__.rsplit(".", 1)[1]
self.order = 2
def fetch_unwanted_resources(self):
for region in self.regions:
connection = boto.ec2.connect_to_region(region.name)
resources = connection.get_only_instances() or []
for resource in resources:
yield Resource(resource, region.name)
def to_string(self, resource):
return "ec2 instance found in {region.name}\n\t" \
"{id} [{image_id}] - {instance_type}, since {launch_time}" \
"\n\tdnsname {public_dns_name}, key {key_name}, state {_state}".format(**vars(resource.wrapped))
def delete(self, resource):
if resource.wrapped.state in Handler.VALID_TARGET_STATES:
print("\tstate '{}' is a valid target state ({}), skipping".format(
resource.wrapped.state, ", ".join(Handler.VALID_TARGET_STATES)))
return []
connection = boto.ec2.connect_to_region(resource.region)
if self.dry_run:
try:
connection.terminate_instances([resource.wrapped.id], dry_run=True)
except EC2ResponseError as e:
if e.status == 412: # Precondition Failed
print("\tTermination {message}".format(**vars(e)))
return [resource.wrapped]
raise
else:
instances = connection.terminate_instances([resource.wrapped.id], dry_run=False)
print("\tInitiating shutdown sequence for {0}".format(instances))
return instances
|
Python
| 0
|
@@ -176,23 +176,24 @@
r%0Aclass
-Handler
+Instance
(object)
@@ -456,34 +456,22 @@
e =
-__name__.rsplit(%22.%22, 1)%5B1%5D
+%22ec2.instance%22
%0A
@@ -1140,23 +1140,24 @@
tate in
-Handler
+Instance
.VALID_T
@@ -1304,15 +1304,16 @@
oin(
-Handler
+Instance
.VAL
@@ -1970,16 +1970,16 @@
ances))%0A
-
@@ -1999,8 +1999,798 @@
stances%0A
+%0A%0A@aws_handler%0Aclass Volume(object):%0A def __init__(self, region_filter, dry_run=True):%0A self.regions = %5Bregion for region in boto.ec2.regions() if region_filter(region.name)%5D%0A self.dry_run = dry_run%0A self.name = %22ec2.volume%22%0A self.order = 3%0A%0A def fetch_unwanted_resources(self):%0A for region in self.regions:%0A connection = boto.ec2.connect_to_region(region.name)%0A resources = connection.get_all_volumes() or %5B%5D%0A for resource in resources:%0A yield Resource(resource, region.name)%0A%0A def to_string(self, resource):%0A return %22ebs volume found in %7Bregion.name%7D%5Cn%5Ct%22 %5C%0A %22%7Bid%7D %7Bstatus%7D, since %7Bcreate_time%7D%22.format(**vars(resource.wrapped))%0A%0A def delete(self, resource):%0A pass%0A
|
87e83a29dbdc0c656fffee92ac8b90556e540e19
|
Rename variable
|
tests/unit/son_analyze/cli/main_test.py
|
tests/unit/son_analyze/cli/main_test.py
|
# Copyright (c) 2015 SONATA-NFV, Thales Communications & Security
# ALL RIGHTS RESERVED.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Neither the name of the SONATA-NFV, Thales Communications & Security
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# This work has been performed in the framework of the SONATA project,
# funded by the European Commission under Grant number 671517 through
# the Horizon 2020 and 5G-PPP programmes. The authors would like to
# acknowledge the contributions of their colleagues of the SONATA
# partner consortium (www.sonata-nfv.eu).
# pylint: disable=missing-docstring
from time import sleep
import logging
from multiprocessing import Process
import typing # noqa pylint: disable=unused-import
import pytest # type: ignore
import requests
from docker import APIClient # type: ignore
import son_analyze.cli.main
from son_analyze import __version__
_LOGGER = logging.getLogger(__name__)
@pytest.fixture(scope="session")
def docker_cli():
return APIClient(base_url='unix://var/run/docker.sock')
def test_version(capsys) -> None:
with pytest.raises(SystemExit):
son_analyze.cli.main.dispatch(['version'])
out, _ = capsys.readouterr()
assert out == 'son-analyze version: {}\n'.format(__version__)
with pytest.raises(SystemExit) as boxed_ex:
son_analyze.cli.main.dispatch(['version', '--short'])
out, _ = capsys.readouterr()
assert out == __version__ + '\n'
assert boxed_ex.value.code == 0
@pytest.fixture(scope="function")
def run_bg(request):
run_process = Process(target=son_analyze.cli.main.dispatch, # type: ignore
args=(['run'],))
run_process.start() # type: ignore
def fin():
run_process.terminate() # type: ignore
request.addfinalizer(fin)
@pytest.mark.usefixtures("run_bg")
def test_run(docker_cli) -> None: # pylint: disable=redefined-outer-name
req = None
for _ in range(30):
try:
filters = {'label': 'com.sonata.analyze'}
targets = docker_cli.containers(filters=filters)
if len(targets) == 1:
container_id = targets[0].get('Id')
inspection = docker_cli.inspect_container(container_id)
container_ip = inspection.get('NetworkSettings') \
.get('IPAddress')
req = requests.get('http://{}:8888'.format(container_ip))
else:
_ = 'The son-analyze container wasn\'t found, targets=%s'
_LOGGER.debug(_, targets)
except requests.exceptions.ConnectionError as exc:
_LOGGER.warning('Unable to connect to the son-analyze ui: %s', exc)
if req and hasattr(req, 'status_code') and req.status_code == 200:
break
sleep(0.2)
assert req and hasattr(req, 'status_code')
assert req.status_code == 200
base = '/son-analyze'
# Verify that the source is here
cmd = 'find {} -ipath "*son_analyze*" -iname "main.py"'.format(base)
exec_cmd = docker_cli.exec_create(container=container_id, cmd=cmd)
exec_out = docker_cli.exec_start(exec_cmd)
assert exec_out.startswith(str.encode(base))
# Verify that weird directories were not created
cmd = 'find {} -ipath "*home*"'.format(base)
exec_cmd = docker_cli.exec_create(container=container_id, cmd=cmd)
exec_out = docker_cli.exec_start(exec_cmd)
assert not exec_out
|
Python
| 0.000003
|
@@ -3067,17 +3067,19 @@
-_
+msg
= 'The
@@ -3157,17 +3157,19 @@
R.debug(
-_
+msg
, target
|
01dd6198cba28623e3d2a72bc9b1f720a70112f0
|
Bump version to 0.2.1
|
geomet/__init__.py
|
geomet/__init__.py
|
# Copyright 2013 Lars Butler & individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.2.0-2'
class InvalidGeoJSONException(Exception):
"""
Simple exception class to indicate if invalid GeoJSON is encountered.
"""
|
Python
| 0.000001
|
@@ -627,11 +627,9 @@
0.2.
-0-2
+1
'%0A%0A%0A
|
ab8d6fc2163e7170e8d184f1321119bbcd469709
|
Update ipc_lista1.9.py
|
lista1/ipc_lista1.9.py
|
lista1/ipc_lista1.9.py
|
#ipc_lista1.9
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que peça a temperatura em graus Fahrenheit, transforme e mostre a temperatura em graus Celsius.
|
Python
| 0
|
@@ -190,8 +190,9 @@
elsius.%0A
+%0A
|
c9ba6d141e356b48caf1820a309e554f21e016c4
|
Transpose guards against None result
|
sympy/matrices/expressions/transpose.py
|
sympy/matrices/expressions/transpose.py
|
from sympy import Basic, Q
from sympy.functions import adjoint, conjugate
from sympy.matrices.expressions.matexpr import MatrixExpr
from sympy.matrices import MatrixBase
class Transpose(MatrixExpr):
"""
The transpose of a matrix expression.
This is a symbolic object that simply stores its argument without
evaluating it. To actually compute the transpose, use the ``transpose()``
function, or the ``.T`` attribute of matrices.
Examples
========
>>> from sympy.matrices import MatrixSymbol, Transpose
>>> from sympy.functions import transpose
>>> A = MatrixSymbol('A', 3, 5)
>>> B = MatrixSymbol('B', 5, 3)
>>> Transpose(A)
A'
>>> A.T == transpose(A) == Transpose(A)
True
>>> Transpose(A*B)
(A*B)'
>>> transpose(A*B)
B'*A'
"""
is_Transpose = True
def doit(self, **hints):
arg = self.arg
if hints.get('deep', True) and isinstance(arg, Basic):
arg = arg.doit(**hints)
try:
return arg._eval_transpose()
except AttributeError:
return Transpose(arg)
@property
def arg(self):
return self.args[0]
@property
def shape(self):
return self.arg.shape[::-1]
def _entry(self, i, j):
return self.arg._entry(j, i)
def _eval_adjoint(self):
return conjugate(self.arg)
def _eval_conjugate(self):
return adjoint(self.arg)
def _eval_transpose(self):
return self.arg
def _eval_trace(self):
from trace import Trace
return Trace(self.arg) # Trace(X.T) => Trace(X)
def _eval_determinant(self):
from sympy.matrices.expressions.determinant import det
return det(self.arg)
def transpose(expr):
""" Matrix transpose """
return Transpose(expr).doit()
|
Python
| 0.000244
|
@@ -1009,20 +1009,22 @@
re
-turn
+sult =
arg._ev
@@ -1038,16 +1038,84 @@
spose()%0A
+ return result if result is not None else Transpose(arg)%0A
|
6a0fd67cbe50ee952c0b8ab1a7dc29fa7b3449f5
|
Log task name more succinctly
|
nodepool/task_manager.py
|
nodepool/task_manager.py
|
#!/usr/bin/env python
# Copyright (C) 2011-2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import threading
from six.moves import queue as Queue
import logging
import time
import requests.exceptions
import stats
class ManagerStoppedException(Exception):
pass
class Task(object):
def __init__(self, **kw):
self._wait_event = threading.Event()
self._exception = None
self._traceback = None
self._result = None
self.args = kw
def done(self, result):
self._result = result
self._wait_event.set()
def exception(self, e, tb):
self._exception = e
self._traceback = tb
self._wait_event.set()
def wait(self):
self._wait_event.wait()
if self._exception:
raise self._exception, None, self._traceback
return self._result
def run(self, client):
try:
self.done(self.main(client))
except requests.exceptions.ProxyError as e:
raise e
except Exception as e:
self.exception(e, sys.exc_info()[2])
class TaskManager(threading.Thread):
log = logging.getLogger("nodepool.TaskManager")
def __init__(self, client, name, rate):
super(TaskManager, self).__init__(name=name)
self.daemon = True
self.queue = Queue.Queue()
self._running = True
self.name = name
self.rate = float(rate)
self._client = None
self.statsd = stats.get_client()
def stop(self):
self._running = False
self.queue.put(None)
def run(self):
last_ts = 0
try:
while True:
task = self.queue.get()
if not task:
if not self._running:
break
continue
while True:
delta = time.time() - last_ts
if delta >= self.rate:
break
time.sleep(self.rate - delta)
self.log.debug("Manager %s running task %s (queue: %s)" %
(self.name, task, self.queue.qsize()))
start = time.time()
self.runTask(task)
last_ts = time.time()
dt = last_ts - start
self.log.debug("Manager %s ran task %s in %ss" %
(self.name, task, dt))
if self.statsd:
#nodepool.task.PROVIDER.subkey
subkey = type(task).__name__
key = 'nodepool.task.%s.%s' % (self.name, subkey)
self.statsd.timing(key, int(dt * 1000))
self.statsd.incr(key)
self.queue.task_done()
except Exception:
self.log.exception("Task manager died.")
raise
def submitTask(self, task):
if not self._running:
raise ManagerStoppedException(
"Manager %s is no longer running" % self.name)
self.queue.put(task)
return task.wait()
def runTask(self, task):
task.run(self._client)
|
Python
| 0.999999
|
@@ -2671,20 +2671,67 @@
.name, t
-ask,
+ype(task).__name__,%0A
self.qu
@@ -2999,19 +2999,34 @@
.name, t
-ask
+ype(task).__name__
, dt))%0A
|
b4d3d4a5ec2986e119590b9aa498d958c633171e
|
Change era interim to gldas
|
gldas/reshuffle.py
|
gldas/reshuffle.py
|
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2016, TU Wien
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
'''
Module for a command line interface to convert the GLDAS data into a
time series format using the repurpose package
'''
import os
import sys
import argparse
from datetime import datetime
from pygeogrids import BasicGrid
from repurpose.img2ts import Img2Ts
from gldas.interface import GLDAS_Noah_v1_025Ds,GLDAS_Noah_v21_025Ds
def get_filetype(inpath):
'''
Tries to find out the file type by searching for
grib or nc files two subdirectories into the passed input path.
If function fails, grib is assumed.
Parameters
------------
input_root: string
input path where era interim data was downloaded
'''
onedown=os.path.join(inpath,os.listdir(inpath)[0])
twodown=os.path.join(onedown,os.listdir(onedown)[0])
filelist=[]
for path, subdirs,files in os.walk(twodown):
for name in files:
filename,extension=os.path.splitext(name)
filelist.append(extension)
if '.nc4' in filelist and '.grb' not in filelist:
return 'netCDF'
elif '.grb' in filelist and '.nc4' not in filelist:
return 'grib'
else:
#if file type cannot be detected, guess grib
return 'grib'
def mkdate(datestring):
if len(datestring) == 10:
return datetime.strptime(datestring, '%Y-%m-%d')
if len(datestring) == 16:
return datetime.strptime(datestring, '%Y-%m-%dT%H:%M')
def reshuffle(input_root, outputpath,
startdate, enddate,
parameters,
imgbuffer=50):
"""
Reshuffle method applied to ERA-Interim data.
Parameters
----------
input_root: string
input path where era interim data was downloaded
outputpath : string
Output path.
startdate : datetime
Start date.
enddate : datetime
End date.
parameters: list
parameters to read and convert
imgbuffer: int, optional
How many images to read at once before writing time series.
"""
if get_filetype(input_root) == 'grib':
input_dataset = GLDAS_Noah_v1_025Ds(input_root, parameters,
array_1D=True)
else:
input_dataset = GLDAS_Noah_v21_025Ds(input_root, parameters,
array_1D=True)
if not os.path.exists(outputpath):
os.makedirs(outputpath)
global_attr = {'product': 'GLDAS'}
# get time series attributes from first day of data.
data = input_dataset.read(startdate)
ts_attributes = data.metadata
grid = BasicGrid(data.lon, data.lat)
reshuffler = Img2Ts(input_dataset=input_dataset, outputpath=outputpath,
startdate=startdate, enddate=enddate,
input_grid=grid,
imgbuffer=imgbuffer, cellsize_lat=5.0, cellsize_lon=5.0,
global_attr=global_attr,
ts_attributes=ts_attributes)
reshuffler.calc()
def parse_args(args):
"""
Parse command line parameters for conversion from image to timeseries
:param args: command line parameters as list of strings
:return: command line parameters as :obj:`argparse.Namespace`
"""
parser = argparse.ArgumentParser(
description="Convert GLDAS data to time series format.")
parser.add_argument("dataset_root",
help='Root of local filesystem where the data is stored.')
parser.add_argument("timeseries_root",
help='Root of local filesystem where the timeseries should be stored.')
parser.add_argument("start", type=mkdate,
help=("Startdate. Either in format YYYY-MM-DD or YYYY-MM-DDTHH:MM."))
parser.add_argument("end", type=mkdate,
help=("Enddate. Either in format YYYY-MM-DD or YYYY-MM-DDTHH:MM."))
parser.add_argument("parameters", metavar="parameters",
nargs="+",
help=("Parameters to download in numerical format. e.g."
"086_L1 086_L2 086_L3 086_L4 for Volumetric soil water layers 1 to 4."))
parser.add_argument("--imgbuffer", type=int, default=50,
help=("How many images to read at once. Bigger numbers make the "
"conversion faster but consume more memory."))
args = parser.parse_args(args)
# set defaults that can not be handled by argparse
print("Converting data from {} to {} into folder {}.".format(args.start.isoformat(),
args.end.isoformat(),
args.timeseries_root))
return args
def main(args):
args = parse_args(args)
reshuffle(args.dataset_root,
args.timeseries_root,
args.start,
args.end,
args.parameters,
imgbuffer=args.imgbuffer)
def run():
main(sys.argv[1:])
if __name__ == '__main__':
run()
|
Python
| 0.000002
|
@@ -1740,35 +1740,29 @@
path where
-era interim
+GLDAS
data was do
|
e2f49a941ac34d86be2fbea177e7f84685787c91
|
Add rdfs:label and make first type less confusing For #7-pure-papers
|
src/main/python/dot/rural/sepake/oai.py
|
src/main/python/dot/rural/sepake/oai.py
|
'''
Created on 2 Dec 2014
@author: Niels Christensen
'''
from dot.rural.sepake.xml_to_rdf import XMLGraph
import urllib2
from rdflib.term import URIRef
_PATH_TO_RESUMPTION_TOKEN = URIRef(u'http://www.openarchives.org/OAI/2.0/#resumptionToken') / URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#value')
_CONSTRUCT_PAPERS = '''
PREFIX oai_hash: <http://www.openarchives.org/OAI/2.0/#>
PREFIX oai_dc_hash: <http://www.openarchives.org/OAI/2.0/oai_dc/#>
PREFIX dc_hash: <http://purl.org/dc/elements/1.1/#>
PREFIX dc: <http://purl.org/dc/elements/1.1/>
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX sepake: <http://dot.rural/sepake/>
PREFIX sepakecode: <http://dot.rural/sepake/code>
PREFIX prov: <http://www.w3.org/ns/prov/>
CONSTRUCT {
?sepakeuri rdf:type sepake:Unverified .
?sepakeuri dc:title ?title .
?sepakeuri dc:description ?description .
?sepakeuri dc:subject ?subject .
?sepakeuri sepake:wasDetailedByData ?pureurl .
?sepakeuri sepake:wasDetailedByCode sepakecode:PureRestPublication .
}
WHERE {
?record oai_hash:header / oai_hash:identifier / rdf:value ?identifier .
?record oai_hash:metadata / oai_dc_hash:dc / dc_hash:title / rdf:value ?title .
?record oai_hash:metadata / oai_dc_hash:dc / dc_hash:description / rdf:value ?description .
?record oai_hash:metadata / oai_dc_hash:dc / dc_hash:subject / rdf:value ?subject .
BIND ( ( STRAFTER ( ?identifier, "/" ) ) AS ?uuid )
BIND ( URI ( CONCAT (str ( sepake:PurePublication ), "#", ENCODE_FOR_URI( ?uuid ) ) ) AS ?sepakeuri )
BIND ( puredomain: AS ?pd)
BIND ( ( URI ( CONCAT ( STR( ?pd ), "ws/rest/publication?uuids.uuid=", ?uuid) ) ) AS ?pureurl )
FILTER ( CONTAINS ( LCASE ( ?subject ), "environment" ) )
}
'''
class OAIHarvester(object):
def __init__(self, location, pureset):
self._location = location
self._url = 'http://{}/ws/oai?verb=ListRecords&set={}&metadataPrefix=oai_dc'.format(self._location, pureset)
self._more = True
def _next(self):
xml_input = urllib2.urlopen(self._url, timeout=20)
page = XMLGraph(xml_input)
self._handle_resumption_token(page)
return page.query(_CONSTRUCT_PAPERS, initNs={'puredomain' : URIRef('http://{}/'.format(self._location))})
def _handle_resumption_token(self, page):
resumptionToken = list(page.objects(predicate = _PATH_TO_RESUMPTION_TOKEN))
assert len(resumptionToken) <= 1, 'OAI page had {} resumptionTokens'.format(len(resumptionToken))
if len(resumptionToken) == 0:
self._more = False
self._url = 'No more pages'
else:
self._more = True
self._url = 'http://{}/ws/oai?verb=ListRecords&resumptionToken={}'.format(self._location, resumptionToken[0])
def __iter__(self):
while (self._more):
yield self._next()
|
Python
| 0.000001
|
@@ -744,61 +744,142 @@
v/%3E%0A
-CONSTRUCT %7B%0A ?sepakeuri rdf:type sepake:Unverified
+PREFIX rdfs: %3Chttp://www.w3.org/2000/01/rdf-schema#%3E %0ACONSTRUCT %7B%0A ?sepakeuri rdf:type prov:Entity .%0A ?sepakeuri dc:title ?title
.%0A
@@ -888,32 +888,34 @@
?sepakeuri
-dc:title
+rdfs:label
?title .%0A
|
92699fa0ac8c97a5a54da2a4155b08145c524d5d
|
revert the previous change: regression found
|
web_seven/openerpweb.py
|
web_seven/openerpweb.py
|
# -*- coding: utf-8 -*-
def patch_web7():
import babel
import os.path
import sys
import openerp.addons.web
try:
from openerp.addons.web import http as openerpweb
except ImportError:
# OpenERP Web 6.1
return
# Self-reference for 6.1 modules which import 'web.common.http'
openerp.addons.web.common = openerp.addons.web
sys.modules['openerp.addons.web.common'] = openerp.addons.web
# Adapt the OpenERP Web 7.0 method for OpenERP 6.1 server
@openerpweb.jsonrequest
def translations(self, req, mods, lang):
res_lang = req.session.model('res.lang')
ids = res_lang.search([("code", "=", lang)])
lang_params = None
if ids:
lang_params = res_lang.read(ids[0], ["direction", "date_format", "time_format",
"grouping", "decimal_point", "thousands_sep"])
separator = '_' if '_' in lang else '@'
langs = lang.split(separator)
langs = [separator.join(langs[:x]) for x in range(1, len(langs) + 1)]
translations_per_module = {}
for addon_name in mods:
translations_per_module[addon_name] = transl = {"messages": []}
addons_path = openerpweb.addons_manifest[addon_name]['addons_path']
for l in langs:
f_name = os.path.join(addons_path, addon_name, "i18n", l + ".po")
try:
with open(f_name) as t_file:
po = babel.messages.pofile.read_po(t_file)
except Exception:
continue
for x in po:
if x.id and x.string and "openerp-web" in x.auto_comments:
transl["messages"].append({'id': x.id, 'string': x.string})
return {"modules": translations_per_module,
"lang_parameters": lang_params}
openerp.addons.web.controllers.main.WebClient.translations = translations
|
Python
| 0.000306
|
@@ -76,23 +76,8 @@
path
-%0A import sys
%0A%0A
@@ -241,194 +241,8 @@
rn%0A%0A
- # Self-reference for 6.1 modules which import 'web.common.http'%0A openerp.addons.web.common = openerp.addons.web%0A sys.modules%5B'openerp.addons.web.common'%5D = openerp.addons.web%0A%0A
|
d091f28028af0d303dc0e8fe76f18b9aa82fda81
|
Tidy up comments in Method according to PEP 8
|
malcolm/core/method.py
|
malcolm/core/method.py
|
#!/bin/env dls-python
from collections import OrderedDict
from malcolm.core.loggable import Loggable
class Method(Loggable):
"""Exposes a function with metadata for arguments and return values"""
def __init__(self, name):
super(Method, self).__init__(logger_name=name)
self.name = name
self.func = None
self.takes = None
self.returns = None
self.defaults = None
def set_function(self, func):
"""Set the function to expose.
Function must return accept a dictionary of keyword arguments
and return either a single value or dictionary of results.
"""
self.func = func
def set_function_takes(self, arg_meta, defaults=None):
"""Set the arguments and default values for the method
Args:
arg_meta (MapMeta): Arguments to the function
default (dict): Default values for arguments (default None)
"""
self.takes = arg_meta
if defaults is not None:
self.defaults = OrderedDict(defaults)
else:
self.defaults = OrderedDict()
def set_function_returns(self, return_meta):
"""Set the return parameters for the method to validate against"""
self.returns = return_meta
def __call__(self, *args, **kwargs):
"""Call the exposed function using regular keyword argument parameters.
Will validate the output against provided return parameters.
"""
#Assumes positional arguments represent arguments *before* any kw-args
#in the ordered dictionary.
for arg, arg_val in zip(self.takes.elements.keys(), args):
kwargs[arg] = arg_val
for arg in self.takes.elements:
if arg not in kwargs.keys():
if arg in self.defaults.keys():
kwargs[arg] = self.defaults[arg]
elif arg in self.takes.required:
raise ValueError(
"Argument %s is required but was not provided" % arg)
return_val = self.func(kwargs)
if self.returns is not None:
if return_val.keys() != self.returns.elements.keys():
raise ValueError(
"Return result did not match specified return structure")
for r_name, r_val in return_val.iteritems():
self.returns.elements[r_name].validate(r_val)
return return_val
def handle_request(self, request):
"""Call exposed function using request parameters and respond with the
result"""
result = self(**request.parameters)
request.respond_with_return(result)
def to_dict(self):
pass
|
Python
| 0
|
@@ -1485,16 +1485,17 @@
#
+
Assumes
@@ -1565,16 +1565,17 @@
#
+
in the o
|
f03b47e987ed2259b10e21123ed8bca711b8bf15
|
Add -D_REENTRANT to cflags as per Gemfire docs
|
binding.gyp
|
binding.gyp
|
# vim: set ft=javascript
{
# NOTE: 'module_name' and 'module_path' come from the 'binary' property in package.json
# node-pre-gyp handles passing them down to node-gyp when you build from source
"targets": [
{
"target_name": "<(module_name)",
"include_dirs": [ "include" ],
"sources": [ "src/binding.cpp" ],
'conditions': [
['OS=="mac"', {
'xcode_settings': {
'GCC_ENABLE_CPP_RTTI': 'YES',
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES'
}
}],
['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
'cflags_cc!': ['-fno-rtti', '-fno-exceptions'],
'cflags_cc+': ['-frtti'],
"libraries": [ "<(module_root_dir)/lib/libgfcppcache.so" ]
}]
]
},
{
"target_name": "action_after_build",
"type": "none",
"dependencies": [ "<(module_name)" ],
"copies": [
{
"files": [ "<(PRODUCT_DIR)/<(module_name).node" ],
"destination": "<(module_path)"
}
]
}
]
}
|
Python
| 0
|
@@ -358,17 +358,16 @@
mac%22', %7B
-
%0A
@@ -386,17 +386,16 @@
ings': %7B
-
%0A
@@ -481,17 +481,16 @@
%7D
-
%0A %7D
@@ -654,16 +654,32 @@
'-frtti'
+, '-D_REENTRANT'
%5D,%0A
|
726e70910e72f68085ecc7cdcc2d474c2ba99c6a
|
add source dir to include_dirs
|
binding.gyp
|
binding.gyp
|
{
"targets": [
{
"target_name": "node_mifare",
"dependencies": ["node_modules/libfreefare-pcsc/binding.gyp:freefare_pcsc"],
"conditions": [
['OS=="linux"', {
"defines": [
"USE_LIBNFC",
],
}]
],
"sources": [
"src/mifare.cc",
"src/reader.cc",
"src/desfire.cc",
"src/utils.cc"
],
"cflags": [
"-Wall",
"-Wextra",
"-Wno-unused-parameter",
"-fPIC",
"-fno-strict-aliasing",
"-fno-exceptions",
"-pedantic"
],
}
]
}
|
Python
| 0.000001
|
@@ -1,11 +1,61 @@
%7B%0D%0A
+ %22variables%22: %7B%0D%0A %22source_dir%22: %22src%22,%0D%0A %7D,%0D%0A
%22targe
@@ -323,24 +323,66 @@
%0D%0A %5D,%0D%0A
+ %22include_dirs%22: %5B%22%3C(source_dir)%22%5D,%0D%0A
%22sourc
|
2efa169358051980279abee57fda495273326735
|
add ORIGIN/lib
|
binding.gyp
|
binding.gyp
|
{
'includes': [ 'common.gypi' ],
'targets': [
{
'target_name': 'make_vector_tile',
'hard_dependency': 1,
'type': 'none',
'actions': [
{
'action_name': 'generate_setting',
'inputs': [
'gen_settings.py'
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/mapnik_settings.js'
],
'action': ['python', 'gen_settings.py', '<(SHARED_INTERMEDIATE_DIR)/mapnik_settings.js']
}
],
'copies': [
{
'files': [ '<(SHARED_INTERMEDIATE_DIR)/mapnik_settings.js' ],
'destination': '<(module_path)'
}
]
},
{
'target_name': '<(module_name)',
'dependencies': [ 'make_vector_tile' ],
'product_dir': '<(module_path)',
'sources': [
"src/mapnik_logger.cpp",
"src/node_mapnik.cpp",
"src/blend.cpp",
"src/mapnik_map.cpp",
"src/mapnik_color.cpp",
"src/mapnik_geometry.cpp",
"src/mapnik_feature.cpp",
"src/mapnik_image.cpp",
"src/mapnik_image_view.cpp",
"src/mapnik_grid.cpp",
"src/mapnik_grid_view.cpp",
"src/mapnik_memory_datasource.cpp",
"src/mapnik_palette.cpp",
"src/mapnik_projection.cpp",
"src/mapnik_layer.cpp",
"src/mapnik_datasource.cpp",
"src/mapnik_featureset.cpp",
"src/mapnik_expression.cpp",
"src/mapnik_cairo_surface.cpp",
"src/mapnik_vector_tile.cpp",
"deps/clipper/clipper.cpp"
],
"msvs_disabled_warnings": [
4267
],
'include_dirs': [
'./deps/clipper/',
'./mason_packages/.link/include/',
'./mason_packages/.link/include/freetype2',
'./mason_packages/.link/include/cairo',
'./src',
"<!(node -e \"require('nan')\")",
"<!(node -e \"require('mapnik-vector-tile')\")"
],
'defines': [
'MAPNIK_GIT_REVISION="<!@(<(module_root_dir)/mason_packages/.link/bin/mapnik-config --git-describe)"',
'CLIPPER_INTPOINT_IMPL=mapnik::geometry::point<cInt>',
'CLIPPER_PATH_IMPL=mapnik::geometry::line_string<cInt>',
'CLIPPER_PATHS_IMPL=mapnik::geometry::multi_line_string<cInt>',
'CLIPPER_IMPL_INCLUDE=<mapnik/geometry.hpp>'
],
'conditions': [
['OS=="win"',
{
'include_dirs':[
'<!@(<(module_root_dir)/mason_packages/.link/bin/mapnik-config --includes)',
'<!@(<(module_root_dir)/mason_packages/.link/bin/mapnik-config --dep-includes)'
],
'defines': ['NOMINMAX','<!@(<(module_root_dir)/mason_packages/.link/bin/mapnik-config --defines)'],
'defines!': ["_HAS_EXCEPTIONS=0"],
'libraries': [
'<!@(<(module_root_dir)/mason_packages/.link/bin/mapnik-config --libs)',
'mapnik-wkt.lib',
'mapnik-json.lib',
'<!@(<(module_root_dir)/mason_packages/.link/bin/mapnik-config --dep-libs)',
],
'msvs_disabled_warnings': [ 4244,4005,4506,4345,4804,4805 ],
'msvs_settings': {
'VCLinkerTool': {
'AdditionalLibraryDirectories': [
'<!@(<(module_root_dir)/mason_packages/.link/bin/mapnik-config --ldflags)'
],
},
}
},
{
'cflags_cc!': ['-fno-rtti', '-fno-exceptions'],
'cflags_cc' : [
'<!@(<(module_root_dir)/mason_packages/.link/bin/mapnik-config --cflags)',
'-D_GLIBCXX_USE_CXX11_ABI=0'
],
'libraries':[
'<!@(<(module_root_dir)/mason_packages/.link/bin/mapnik-config --libs)',
'-lmapnik-wkt',
'-lmapnik-json',
'<!@(<(module_root_dir)/mason_packages/.link/bin/mapnik-config --ldflags)',
],
'ldflags': [
'-Wl,-z,now',
],
'xcode_settings': {
'OTHER_CPLUSPLUSFLAGS':[
'<!@(<(module_root_dir)/mason_packages/.link/bin/mapnik-config --cflags)',
],
'OTHER_CFLAGS':[
'<!@(<(module_root_dir)/mason_packages/.link/bin/mapnik-config --cflags)'
],
'OTHER_LDFLAGS':[
'-Wl,-bind_at_load'
],
'GCC_ENABLE_CPP_RTTI': 'YES',
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
'MACOSX_DEPLOYMENT_TARGET':'10.8',
'CLANG_CXX_LIBRARY': 'libc++',
'CLANG_CXX_LANGUAGE_STANDARD':'c++11',
'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0'
}
},
]
]
},
{
'target_name': 'action_after_build',
'type': 'none',
'dependencies': [ '<(module_name)' ],
'hard_dependency': 1,
'actions': [
{
'action_name': 'postinstall',
'inputs': ['./scripts/postinstall.sh'],
'outputs': ['./lib/binding/mapnik'],
'action': ['./scripts/postinstall.sh']
}
]
},
]
}
|
Python
| 0
|
@@ -3971,16 +3971,88 @@
z,now',%0A
+ %22-Wl,-z,origin%22,%0A %22-Wl,-rpath=%5C$$ORIGIN/lib%22%0A
|
7dc9a5518db6a8176f04ee63681d65a9ad6f7974
|
fix python 3.2 compat
|
teamcity/nose_report.py
|
teamcity/nose_report.py
|
# coding=utf-8
import os
from teamcity import is_running_under_teamcity
from teamcity.unittestpy import TeamcityTestResult
from teamcity.common import is_string, split_output, limit_output
# from nose.util.ln
def _ln(label):
label_len = len(label) + 2
chunk = (70 - label_len) // 2
out = '%s %s %s' % ('-' * chunk, label, '-' * chunk)
pad = 70 - len(out)
if pad > 0:
out = out + ('-' * pad)
return out
_captured_output_start_marker = _ln(u'>> begin captured stdout <<') + "\n"
_captured_output_end_marker = "\n" + _ln(u'>> end captured stdout <<')
class TeamcityReport(TeamcityTestResult):
name = 'teamcity-report'
score = 10000
def __init__(self):
super(TeamcityReport, self).__init__()
self.enabled = False
def configure(self, options, conf):
self.enabled = is_running_under_teamcity()
def options(self, parser, env=os.environ):
pass
def report_fail(self, test, fail_type, err):
test_id = self.get_test_id(test)
details = self.convert_error_to_string(err)
start_index = details.find(_captured_output_start_marker)
end_index = details.find(_captured_output_end_marker)
if 0 <= start_index < end_index:
captured_output = details[start_index + len(_captured_output_start_marker):end_index]
details = details[:start_index] + details[end_index + len(_captured_output_end_marker):]
for chunk in split_output(limit_output(captured_output)):
self.messages.testStdOut(test_id, chunk, flowId=test_id)
self.messages.testFailed(test_id, message=fail_type, details=details, flowId=test_id)
def is_doctest_class_name(self, fqn):
return super(TeamcityReport, self).is_doctest_class_name(fqn) or fqn == "nose.plugins.doctests.DocTestCase"
def addDeprecated(self, test):
test_id = self.get_test_id(test)
self.messages.testIgnored(test_id, message="Deprecated", flowId=test_id)
def _lastPart(self, name):
nameParts = name.split('.')
return nameParts[-1]
def setOutputStream(self, stream):
self.output = stream
self.create_messages()
class dummy:
def write(self, *arg):
pass
def writeln(self, *arg):
pass
def flush(self):
pass
d = dummy()
return d
|
Python
| 0.99899
|
@@ -464,25 +464,24 @@
arker = _ln(
-u
'%3E%3E begin ca
@@ -547,17 +547,16 @@
%22 + _ln(
-u
'%3E%3E end
|
e24c74aa08c1be4e196001f3511a3742acc61e81
|
Add basic read-only tests for heat cli
|
tempest/cli/__init__.py
|
tempest/cli/__init__.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import shlex
import subprocess
from oslo.config import cfg
import tempest.cli.output_parser
from tempest.openstack.common import log as logging
import tempest.test
LOG = logging.getLogger(__name__)
cli_opts = [
cfg.BoolOpt('enabled',
default=True,
help="enable cli tests"),
cfg.StrOpt('cli_dir',
default='/usr/local/bin',
help="directory where python client binaries are located"),
cfg.IntOpt('timeout',
default=15,
help="Number of seconds to wait on a CLI timeout"),
]
CONF = cfg.CONF
cli_group = cfg.OptGroup(name='cli', title="cli Configuration Options")
CONF.register_group(cli_group)
CONF.register_opts(cli_opts, group=cli_group)
class ClientTestBase(tempest.test.BaseTestCase):
@classmethod
def setUpClass(cls):
if not CONF.cli.enabled:
msg = "cli testing disabled"
raise cls.skipException(msg)
cls.identity = cls.config.identity
super(ClientTestBase, cls).setUpClass()
def __init__(self, *args, **kwargs):
self.parser = tempest.cli.output_parser
super(ClientTestBase, self).__init__(*args, **kwargs)
def nova(self, action, flags='', params='', admin=True, fail_ok=False):
"""Executes nova command for the given action."""
return self.cmd_with_auth(
'nova', action, flags, params, admin, fail_ok)
def nova_manage(self, action, flags='', params='', fail_ok=False,
merge_stderr=False):
"""Executes nova-manage command for the given action."""
return self.cmd(
'nova-manage', action, flags, params, fail_ok, merge_stderr)
def keystone(self, action, flags='', params='', admin=True, fail_ok=False):
"""Executes keystone command for the given action."""
return self.cmd_with_auth(
'keystone', action, flags, params, admin, fail_ok)
def glance(self, action, flags='', params='', admin=True, fail_ok=False):
"""Executes glance command for the given action."""
return self.cmd_with_auth(
'glance', action, flags, params, admin, fail_ok)
def ceilometer(self, action, flags='', params='', admin=True,
fail_ok=False):
"""Executes ceilometer command for the given action."""
return self.cmd_with_auth(
'ceilometer', action, flags, params, admin, fail_ok)
def cinder(self, action, flags='', params='', admin=True, fail_ok=False):
"""Executes cinder command for the given action."""
return self.cmd_with_auth(
'cinder', action, flags, params, admin, fail_ok)
def neutron(self, action, flags='', params='', admin=True, fail_ok=False):
"""Executes neutron command for the given action."""
return self.cmd_with_auth(
'neutron', action, flags, params, admin, fail_ok)
def cmd_with_auth(self, cmd, action, flags='', params='',
admin=True, fail_ok=False):
"""Executes given command with auth attributes appended."""
# TODO(jogo) make admin=False work
creds = ('--os-username %s --os-tenant-name %s --os-password %s '
'--os-auth-url %s ' %
(self.identity.admin_username,
self.identity.admin_tenant_name,
self.identity.admin_password,
self.identity.uri))
flags = creds + ' ' + flags
return self.cmd(cmd, action, flags, params, fail_ok)
def cmd(self, cmd, action, flags='', params='', fail_ok=False,
merge_stderr=False):
"""Executes specified command for the given action."""
cmd = ' '.join([os.path.join(CONF.cli.cli_dir, cmd),
flags, action, params])
LOG.info("running: '%s'" % cmd)
cmd_str = cmd
cmd = shlex.split(cmd)
result = ''
result_err = ''
try:
stdout = subprocess.PIPE
stderr = subprocess.STDOUT if merge_stderr else subprocess.PIPE
proc = subprocess.Popen(
cmd, stdout=stdout, stderr=stderr)
result, result_err = proc.communicate()
if not fail_ok and proc.returncode != 0:
raise CommandFailed(proc.returncode,
cmd,
result,
stderr=result_err)
finally:
LOG.debug('output of %s:\n%s' % (cmd_str, result))
if not merge_stderr and result_err:
LOG.debug('error output of %s:\n%s' % (cmd_str, result_err))
return result
def assertTableStruct(self, items, field_names):
"""Verify that all items has keys listed in field_names."""
for item in items:
for field in field_names:
self.assertIn(field, item)
def assertFirstLineStartsWith(self, lines, beginning):
self.assertTrue(lines[0].startswith(beginning),
msg=('Beginning of first line has invalid content: %s'
% lines[:3]))
class CommandFailed(subprocess.CalledProcessError):
# adds output attribute for python2.6
def __init__(self, returncode, cmd, output, stderr=""):
super(CommandFailed, self).__init__(returncode, cmd)
self.output = output
self.stderr = stderr
|
Python
| 0.000001
|
@@ -3119,32 +3119,274 @@
dmin, fail_ok)%0A%0A
+ def heat(self, action, flags='', params='', admin=True,%0A fail_ok=False):%0A %22%22%22Executes heat command for the given action.%22%22%22%0A return self.cmd_with_auth(%0A 'heat', action, flags, params, admin, fail_ok)%0A%0A
def cinder(s
|
d060b5b7daba4a62174f6ad3fbacd6cdd4da11ef
|
change spawn rate
|
asteroid.py
|
asteroid.py
|
# Permite usar o modulo do Sense HAT
from sense_hat import SenseHat
from time import sleep
import pygame,sys,random
sense = SenseHat()
red = [255,0,0]
green = [0,255,0]
yellow = [255,255,0]
black = [0,0,0]
white = [255,255,255]
pygame.init()
pygame.display.set_mode()
sense.clear(0,0,0)
#direction, 0-right, 1-up, etc
#initial postition
player = [3,4]
IDIndex = pygame.USEREVENT + 1
#list of enemy : [x,y,direction]
enemies = {}
def movePlayer(direction):
global player
sense.set_pixel(player[0],player[1], black)
if direction == 0:
player[0] -= 1
#up
elif direction == 1:
player[1] += 1
#left
elif direction == 2:
player[0] += 1
#down
elif direction == 3:
player[1] -= 1
for key,value in enemies.items():
if value != None:
gameover(value)
sense.set_pixel(player[0],player[1], green)
def moveEnemy(enemy):
delete = True
for key,value in enemies.items():
if value != None:
if enemy[3] != value[3]:
if enemy[0] == value[0] and enemy[1] == value[1]:
delete = False
break
if delete:
sense.set_pixel(enemy[0],enemy[1], black) #clear
#right
if enemy[2] == 0:
enemy[0] -= 1
#up
elif enemy[2] == 1:
enemy[1] += 1
#left
elif enemy[2] == 2:
enemy[0] += 1
#down
elif enemy[2] == 3:
enemy[1] -= 1
if (enemy[0] < 0 or enemy[0] > 7) or (enemy[1] < 0 or enemy[1] > 7):
removeEnemy(enemy)
else:
#gameover here
gameover(enemy)
drawEnemy(enemy)
def drawEnemy(enemy):
sense.set_pixel(enemy[0],enemy[1], red) #draw
def gameover(enemy):
global start
if player[0] == enemy[0] and player[1] == enemy[1]:
end = pygame.time.get_ticks()
for x in range(0,3):
sense.set_pixel(enemy[0],enemy[1], yellow)
sleep(0.5)
sense.set_pixel(enemy[0],enemy[1], white)
sleep(0.5)
sense.set_rotation(180)
sense.show_message("Score = " + str((end - start)/1000))
sys.exit()
def getIndex():
global IDIndex
if IDIndex < 32:
index = IDIndex
IDIndex += 1
return index
else:
for key, value in enemies.items():
if value == None:
return key
def removeEnemy(enemy):
pygame.time.set_timer(enemy[3],0)
enemies[enemy[3]] = None
def createEnemy():
global createEnemyID
randomDirection = random.randrange(4)
if randomDirection == 0:
x = 7
y = random.randrange(8)
#up
elif randomDirection == 1:
y = 0
x = random.randrange(8)
#left
elif randomDirection == 2:
x = 0
y = random.randrange(8)
#down
elif randomDirection == 3:
y = 7
x = random.randrange(8)
index = getIndex()
enemy = [x,y,randomDirection, index]
enemies[index] = enemy
pygame.time.set_timer(index,1000)
drawEnemy(enemy)
pygame.time.set_timer(createEnemyID,random.randrange(1500,2500))
createEnemyID = IDIndex
IDIndex += 1
pygame.time.set_timer(createEnemyID,random.randrange(1500,2500))
movePlayer(0)
start = pygame.time.get_ticks()
#game cycle
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_UP and player[1] < 7:
movePlayer(1)
elif event.key == pygame.K_DOWN and player[1] > 0:
movePlayer(3)
elif event.key == pygame.K_RIGHT and player[0] > 0:
movePlayer(0)
elif event.key == pygame.K_LEFT and player[0] < 7:
movePlayer(2)
elif event.type in enemies:
moveEnemy(enemies[event.type])
elif event.type == createEnemyID:
createEnemy()
|
Python
| 0
|
@@ -3031,21 +3031,42 @@
r(index,
-10
+random.randrange(250,5
00)
+)
%0A dra
@@ -3132,30 +3132,29 @@
m.randrange(
-1
500,
-2
+1
500))%0A%0Acreat
@@ -3226,35 +3226,11 @@
yID,
-random.randrange(1500,
250
-0)
)%0Amo
|
0b7eca03c652b5afefb7eabd48011310b122acbc
|
Fix #47 contect handling
|
envelope/templatetags/envelope_tags.py
|
envelope/templatetags/envelope_tags.py
|
# -*- coding: utf-8 -*-
"""
Template tags related to the contact form.
"""
from __future__ import unicode_literals
from django import template
register = template.Library()
try:
import honeypot
# Register antispam_fields as an inclusion tag
t = template.Template('{% load honeypot %}{% render_honeypot_field %}')
register.inclusion_tag(t, name='antispam_fields')(lambda: {})
except ImportError: # pragma: no cover
# Register antispam_fields as an empty tag
register.simple_tag(name='antispam_fields')(lambda: '')
@register.inclusion_tag('envelope/contact_form.html', takes_context=True)
def render_contact_form(context):
"""
Renders the contact form which must be in the template context.
The most common use case for this template tag is to call it in the
template rendered by :class:`~envelope.views.ContactView`. The template
tag will then render a sub-template ``envelope/contact_form.html``.
"""
try:
form = context['form']
except KeyError:
raise template.TemplateSyntaxError("There is no 'form' variable in the template context.")
return {
'form': form,
}
|
Python
| 0
|
@@ -960,63 +960,32 @@
-try:%0A form = context%5B'form'%5D%0A except KeyError
+if 'form' not in context
:%0A
@@ -1025,16 +1025,29 @@
axError(
+%0A
%22There i
@@ -1096,47 +1096,34 @@
xt.%22
-)
%0A
-return %7B%0A
+)%0A
-'form': form,%0A %7D
+return context
%0A
|
444baf986cf90a952f5d2406b5aba60113494349
|
Add FloatingIP object implementation
|
nova/objects/__init__.py
|
nova/objects/__init__.py
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
|
Python
| 0.000002
|
@@ -1220,12 +1220,55 @@
.fixed_ip')%0A
+ __import__('nova.objects.floating_ip')%0A
|
7dd346572cdccfdd2d1b0fae04241e252bb39e74
|
Add CLI test for invalid format
|
numba/tests/test_help.py
|
numba/tests/test_help.py
|
from __future__ import print_function
import sys
import subprocess
import types as pytypes
import os.path
import numpy as np
from numba.six.moves import builtins
from numba import types
from .support import TestCase, temp_directory
from numba.help.inspector import inspect_function, inspect_module
class TestInspector(TestCase):
def check_function_descriptor(self, info, must_be_defined=False):
self.assertIsInstance(info, dict)
self.assertIn('numba_type', info)
numba_type = info['numba_type']
if numba_type is None:
self.assertFalse(must_be_defined)
else:
self.assertIsInstance(numba_type, types.Type)
self.assertIn('explained', info)
self.assertIsInstance(info['explained'], str)
self.assertIn('source_infos', info)
self.assertIsInstance(info['source_infos'], dict)
def test_inspect_function_on_range(self):
info = inspect_function(range)
self.check_function_descriptor(info, must_be_defined=True)
def test_inspect_function_on_np_all(self):
info = inspect_function(np.all)
self.check_function_descriptor(info, must_be_defined=True)
source_infos = info['source_infos']
self.assertGreater(len(source_infos), 0)
c = 0
for srcinfo in source_infos.values():
self.assertIsInstance(srcinfo['kind'], str)
self.assertIsInstance(srcinfo['name'], str)
self.assertIsInstance(srcinfo['sig'], str)
self.assertIsInstance(srcinfo['filename'], str)
self.assertIsInstance(srcinfo['lines'], tuple)
self.assertIn('docstring', srcinfo)
c += 1
self.assertEqual(c, len(source_infos))
def test_inspect_module(self):
c = 0
for it in inspect_module(builtins):
self.assertIsInstance(it['module'], pytypes.ModuleType)
self.assertIsInstance(it['name'], str)
self.assertTrue(callable(it['obj']))
self.check_function_descriptor(it)
c += 1
self.assertGreater(c, 0)
def test_inspect_cli(self):
# Try CLI on math module
cmdbase = [sys.executable, '-m', 'numba.help.inspector']
# Try default format "html"
dirpath = temp_directory('{}.{}'.format(__name__,
self.__class__.__name__))
filename = os.path.join(dirpath, 'out')
expected_file = filename + '.html'
cmds = cmdbase + ['--file', filename, 'math']
# File shouldn't exist yet
self.assertFalse(os.path.isfile(expected_file))
# Run CLI
subprocess.check_output(cmds)
# File should exist now
self.assertTrue(os.path.isfile(expected_file))
# Try changing the format to "rst"
cmds = cmdbase + ['--file', filename, '--format', 'rst', 'math']
expected_file = filename + '.rst'
# File shouldn't exist yet
self.assertFalse(os.path.isfile(expected_file))
# Run CLI
subprocess.check_output(cmds)
# File should exist now
self.assertTrue(os.path.isfile(expected_file))
|
Python
| 0
|
@@ -3157,28 +3157,370 @@
path.isfile(expected_file))%0A
+%0A # Try unsupported format%0A cmds = cmdbase + %5B'--file', filename, '--format', 'foo', 'math'%5D%0A # Run CLI%0A with self.assertRaises(subprocess.CalledProcessError) as raises:%0A subprocess.check_output(cmds, stderr=subprocess.STDOUT)%0A self.assertIn(%22foo is not supported%22, str(raises.exception.stdout))%0A
|
3b642056d6761133bb27676993a83650c91b4057
|
Fix parameters differ from overridden in StratisPoolDevice
|
blivet/devices/stratis.py
|
blivet/devices/stratis.py
|
# devices/stratis.py
#
# Copyright (C) 2020 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Vojtech Trefny <vtrefny@redhat.com>
#
import os
import logging
log = logging.getLogger("blivet")
from .storage import StorageDevice
from ..static_data import stratis_info
from ..storage_log import log_method_call
from ..errors import DeviceError, StratisError
from .. import devicelibs
class StratisPoolDevice(StorageDevice):
""" A stratis pool device """
_type = "stratis pool"
_resizable = False
_packages = ["stratisd", "stratis-cli"]
_dev_dir = "/dev/stratis"
_format_immutable = True
def __init__(self, *args, **kwargs):
"""
:encrypted: whether this pool is encrypted or not
:type encrypted: bool
:keyword passphrase: device passphrase
:type passphrase: str
:keyword key_file: path to a file containing a key
:type key_file: str
"""
self._encrypted = kwargs.pop("encrypted", False)
self.__passphrase = kwargs.pop("passphrase", None)
self._key_file = kwargs.pop("key_file", None)
super(StratisPoolDevice, self).__init__(*args, **kwargs)
@property
def blockdevs(self):
""" A list of this pool block devices """
return self.parents[:]
@property
def size(self):
""" The size of this pool """
# sum up the sizes of the block devices
return sum(parent.size for parent in self.parents)
@property
def encrypted(self):
""" True if this device is encrypted. """
return self._encrypted
@encrypted.setter
def encrypted(self, encrypted):
self._encrypted = encrypted
@property
def key_file(self):
""" Path to key file to be used in /etc/crypttab """
return self._key_file
def _set_passphrase(self, passphrase):
""" Set the passphrase used to access this device. """
self.__passphrase = passphrase
passphrase = property(fset=_set_passphrase)
@property
def has_key(self):
return ((self.__passphrase not in ["", None]) or
(self._key_file and os.access(self._key_file, os.R_OK)))
def _pre_create(self, **kwargs):
super(StratisPoolDevice, self)._pre_create(**kwargs)
if self.encrypted and not self.has_key:
raise StratisError("cannot create encrypted stratis pool without key")
def _create(self):
""" Create the device. """
log_method_call(self, self.name, status=self.status)
bd_list = [bd.path for bd in self.parents]
devicelibs.stratis.create_pool(name=self.name,
devices=bd_list,
encrypted=self.encrypted,
passphrase=self.__passphrase,
key_file=self._key_file)
def _post_create(self):
super(StratisPoolDevice, self)._post_create()
self.format.exists = True
pool_info = stratis_info.get_pool_info(self.name)
if not pool_info:
raise DeviceError("Failed to get information about newly created pool %s" % self.name)
self.uuid = pool_info.uuid
for parent in self.parents:
parent.format.pool_name = self.name
parent.format.pool_uuid = self.uuid
def _destroy(self):
""" Destroy the device. """
log_method_call(self, self.name, status=self.status)
devicelibs.stratis.remove_pool(self.uuid)
def add_hook(self, new=True):
super(StratisPoolDevice, self).add_hook(new=new)
if new:
return
for parent in self.parents:
parent.format.pool_name = self.name
parent.format.pool_uuid = self.uuid
def remove_hook(self, modparent=True):
if modparent:
for parent in self.parents:
parent.format.pool_name = None
parent.format.pool_uuid = None
super(StratisPoolDevice, self).remove_hook(modparent=modparent)
def dracut_setup_args(self):
return set(["stratis.rootfs.pool_uuid=%s" % self.uuid])
class StratisFilesystemDevice(StorageDevice):
""" A stratis pool device """
_type = "stratis filesystem"
_resizable = False
_packages = ["stratisd", "stratis-cli"]
_dev_dir = "/dev/stratis"
def __init__(self, *args, **kwargs):
if kwargs.get("size") is None and not kwargs.get("exists"):
kwargs["size"] = devicelibs.stratis.STRATIS_FS_SIZE
super(StratisFilesystemDevice, self).__init__(*args, **kwargs)
def _get_name(self):
""" This device's name. """
if self.pool is not None:
return "%s/%s" % (self.pool.name, self._name)
else:
return super(StratisFilesystemDevice, self)._get_name()
@property
def fsname(self):
""" The Stratis filesystem name (not including pool name). """
return self._name
@property
def pool(self):
if not self.parents:
# this should never happen but just to be sure
return None
return self.parents[0]
def _create(self):
""" Create the device. """
log_method_call(self, self.name, status=self.status)
devicelibs.stratis.create_filesystem(self.fsname, self.pool.uuid)
def _post_create(self):
super(StratisFilesystemDevice, self)._post_create()
fs_info = stratis_info.get_filesystem_info(self.pool.name, self.fsname)
if not fs_info:
raise DeviceError("Failed to get information about newly created filesystem %s" % self.name)
self.uuid = fs_info.uuid
self.format.pool_uuid = fs_info.pool_uuid
def _destroy(self):
""" Destroy the device. """
log_method_call(self, self.name, status=self.status)
devicelibs.stratis.remove_filesystem(self.pool.uuid, self.uuid)
def dracut_setup_args(self):
return set(["root=%s" % self.path])
|
Python
| 0
|
@@ -3124,26 +3124,16 @@
ate(self
-, **kwargs
):%0A
@@ -3178,24 +3178,16 @@
_create(
-**kwargs
)%0A%0A
|
97ec23d61d4aa78097e50328a2a3509ab3f6fb0d
|
Fix #4, oauth_callback_confirmed now returns 'error' if not True, not anymore hardcoded to ease debugging
|
oauth_provider/models.py
|
oauth_provider/models.py
|
import urllib
import urlparse
from time import time
from django.db import models
from django.contrib.auth.models import User
from managers import TokenManager, ConsumerManager, ResourceManager
from consts import KEY_SIZE, SECRET_SIZE, CONSUMER_KEY_SIZE, CONSUMER_STATES,\
PENDING, VERIFIER_SIZE, MAX_URL_LENGTH
generate_random = User.objects.make_random_password
class Nonce(models.Model):
token_key = models.CharField(max_length=KEY_SIZE)
consumer_key = models.CharField(max_length=CONSUMER_KEY_SIZE)
key = models.CharField(max_length=255)
def __unicode__(self):
return u"Nonce %s for %s" % (self.key, self.consumer_key)
class Resource(models.Model):
name = models.CharField(max_length=255)
url = models.TextField(max_length=MAX_URL_LENGTH)
is_readonly = models.BooleanField(default=True)
objects = ResourceManager()
def __unicode__(self):
return u"Resource %s with url %s" % (self.name, self.url)
class Consumer(models.Model):
name = models.CharField(max_length=255)
description = models.TextField()
key = models.CharField(max_length=CONSUMER_KEY_SIZE)
secret = models.CharField(max_length=SECRET_SIZE, blank=True)
status = models.SmallIntegerField(choices=CONSUMER_STATES, default=PENDING)
user = models.ForeignKey(User, null=True, blank=True)
objects = ConsumerManager()
def __unicode__(self):
return u"Consumer %s with key %s" % (self.name, self.key)
def generate_random_codes(self):
"""
Used to generate random key/secret pairings.
Use this after you've added the other data in place of save().
"""
key = generate_random(length=KEY_SIZE)
secret = generate_random(length=SECRET_SIZE)
while Consumer.objects.filter(models.Q(key__exact=key) | models.Q(secret__exact=secret)).count():
key = generate_random(length=KEY_SIZE)
secret = generate_random(length=SECRET_SIZE)
self.key = key
self.secret = secret
self.save()
class Token(models.Model):
REQUEST = 1
ACCESS = 2
TOKEN_TYPES = ((REQUEST, u'Request'), (ACCESS, u'Access'))
key = models.CharField(max_length=KEY_SIZE, null=True, blank=True)
secret = models.CharField(max_length=SECRET_SIZE, null=True, blank=True)
token_type = models.SmallIntegerField(choices=TOKEN_TYPES)
timestamp = models.IntegerField(default=long(time()))
is_approved = models.BooleanField(default=False)
user = models.ForeignKey(User, null=True, blank=True, related_name='tokens')
consumer = models.ForeignKey(Consumer)
resource = models.ForeignKey(Resource)
## OAuth 1.0a stuff
verifier = models.CharField(max_length=VERIFIER_SIZE)
callback = models.CharField(max_length=MAX_URL_LENGTH, null=True, blank=True)
callback_confirmed = models.BooleanField(default=False)
objects = TokenManager()
def __unicode__(self):
return u"%s Token %s for %s" % (self.get_token_type_display(), self.key, self.consumer)
def to_string(self, only_key=False):
token_dict = {
'oauth_token': self.key,
'oauth_token_secret': self.secret,
'oauth_callback_confirmed': 'true',
}
if self.verifier:
token_dict.update({ 'oauth_verifier': self.verifier })
if only_key:
del token_dict['oauth_token_secret']
del token_dict['oauth_callback_confirmed']
return urllib.urlencode(token_dict)
def generate_random_codes(self):
"""
Used to generate random key/secret pairings.
Use this after you've added the other data in place of save().
"""
key = generate_random(length=KEY_SIZE)
secret = generate_random(length=SECRET_SIZE)
while Token.objects.filter(models.Q(key__exact=key) | models.Q(secret__exact=secret)).count():
key = generate_random(length=KEY_SIZE)
secret = generate_random(length=SECRET_SIZE)
self.key = key
self.secret = secret
self.save()
def get_callback_url(self):
"""
OAuth 1.0a, append the oauth_verifier.
"""
if self.callback and self.verifier:
parts = urlparse.urlparse(self.callback)
scheme, netloc, path, params, query, fragment = parts[:6]
if query:
query = '%s&oauth_verifier=%s' % (query, self.verifier)
else:
query = 'oauth_verifier=%s' % self.verifier
return urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
return self.callback
|
Python
| 0.000001
|
@@ -3274,15 +3274,53 @@
d':
-'true',
+self.callback_confirmed and 'true' or 'error'
%0A
@@ -3378,18 +3378,9 @@
dict
-.update(%7B
+%5B
'oau
@@ -3391,17 +3391,19 @@
erifier'
-:
+%5D =
self.ve
@@ -3412,11 +3412,8 @@
fier
- %7D)
%0A%0A
|
7a5c344e29e0f3760c312a43d03f8b665c155ba9
|
Add more HTTP helper methods to RESTOAuthClient.
|
rightscale/httpclient.py
|
rightscale/httpclient.py
|
from functools import partial
import requests
DEFAULT_ROOT_RES_PATH = '/'
class RESTOAuthClient(object):
"""
HTTP client that is aware of REST and OAuth semantics.
Provides CRUD methods for API resources using HTTP verbs. It also
interrogates the API server for links to related resources and exposes
discovered resources as attributes on this root object.
For additional flexibility, helper methods like :meth:`get`, :meth:`post`,
and the generic :meth:`request` allow the caller to call any routes that
are not automatically exposed as attributes of this object.
:param str endpoint: URL for the API endpoint. E.g. ``https://blah.org``.
:param str root_path: The initial path to use for discovering the rest of
the resources. E.g. ``/api/``.
:param dict hints: Hints for URL paths that should be added or removed from
the set of discovered paths. This allows users to work around
inconsistencies in a vendor's REST implementation, or just ignore large
swaths of discovered routes that they don't need.
"""
def __init__(
self,
endpoint='',
root_path=DEFAULT_ROOT_RES_PATH,
hints=None,
):
self.endpoint = endpoint
self.root_path = root_path
self.hints = hints
self.headers = {'Accept': 'application/json'}
# convenience methods
self.get = partial(self.request, 'get')
self.post = partial(self.request, 'post')
self.reset_cache()
def request(self, method, path='/', url=None, ignore_codes=[], **kwargs):
"""
Performs HTTP request.
:param str method: An HTTP method (e.g. 'get', 'post', 'PUT', etc...)
:param str path: A path component of the target URL. This will be
appended to the value of ``self.endpoint``. If both :attr:`path`
and :attr:`url` are specified, the value in :attr:`url` is used and
the :attr:`path` is ignored.
:param str url: The target URL (e.g. ``http://server.tld/somepath/``).
If both :attr:`path` and :attr:`url` are specified, the value in
:attr:`url` is used and the :attr:`path` is ignored.
:param list of int ignore_codes: List of HTTP error codes (e.g.
404, 500) that should be ignored. If an HTTP error occurs and it
is *not* in :attr:`ignore_codes`, then an exception is raised.
:param kwargs: Any other kwargs to pass to :meth:`requests.request()`.
Returns a :class:`requests.Response` object.
"""
_url = url if url else (self.endpoint + path)
# merge with defaults in headers attribute. incoming 'headers' take
# priority over values in self.headers to allow last-minute overrides
# if the caller really knows what they're doing.
if 'headers' in kwargs:
headers = kwargs.pop('headers')
for k, v in self.headers.items():
headers.setdefaults(k, v)
else:
headers = self.headers
kwargs['headers'] = headers
r = requests.request(method, _url, **kwargs)
if not r.ok and r.status_code not in ignore_codes:
r.raise_for_status()
return r
def reset_cache(self):
self._links = None
@property
def _unfiltered_links(self):
if self._links is None:
response = self.get(self.root_path)
if not response.ok:
return {}
blob = response.json()
self._links = dict(
(raw['rel'], raw['href']) for raw in blob.get('links', [])
)
return self._links
@property
def links(self):
hinted_links = self._unfiltered_links.copy()
if self.hints:
for r in self.hints.get('remove', []):
hinted_links.pop(r, None)
hinted_links.update(self.hints.get('add', {}))
return hinted_links
def __getattr__(self, name):
if name not in self.links:
raise AttributeError('%s object has no attribute %s' % (
self.__class__.__name__,
name,
))
path = self.links[name]
response = self.get(path)
# TODO: construct appropriate objects based on content-type
return response.json()
|
Python
| 0
|
@@ -1438,89 +1438,241 @@
elf.
-get = partial(self.request, 'get')%0A self.post = partial(self.request, 'pos
+delete = partial(self.request, 'delete')%0A self.get = partial(self.request, 'get')%0A self.head = partial(self.request, 'head')%0A self.post = partial(self.request, 'post')%0A self.put = partial(self.request, 'pu
t')%0A
|
d3aae51f37321e1c73261284e057a569116ce5cc
|
revert changes to cvc code
|
rpmimport/recipemaker.py
|
rpmimport/recipemaker.py
|
#!/usr/bin/python
#
# Copyright (c) 2006,2008 rPath, Inc.
#
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.
#
# This program is distributed in the hope that it will be useful, but
# without any warranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
"""
Module for creating factory manifest and managing source components
"""
import os
import shutil
from conary import cvc
class RecipeMaker(object):
"""
Class for creating and managing rpm factory based source components.
"""
def __init__(self, cfg, repos, rpmSource):
self.cfg = cfg
self.repos = repos
self.rpmSource = rpmSource
def _cvc(self, *args, **kwargs):
"""
Run cvc command.
"""
cvc.sourceCommand(self.cfg, *args, **kwargs)
def _updateSourceComponent(self, pkgname, manifestContents,
comment):
"""
Update the manifest file in the current working directory,
preform a test cook, and commit
Assumptions: current working directory is a checkout
"""
f = open('manifest', 'w')
f.write(manifestContents)
f.close()
try:
self._cvc('cook', no-deps=None)
except Exception, e:
print '++++++ error building', pkgname, str(e)
return
self._cvc('commit', message='%s of %s:source' % (comment, pkgname))
def _newpkg(self, pkgname):
"""
Run the "cvc newpkg" related tasks when creating a new :source
component.
Assumption: current working directory is where the new checkout
should be created
Side effect: current working directory will be the checkout
directory when this method returns
"""
print 'creating initial template for', pkgname
try:
shutil.rmtree(pkgname)
except OSError:
pass
self._cvc('newpkg', pkgname, factory='sle-rpm')
os.chdir(pkgname)
f = open('manifest', 'w')
f.close()
self._cvc('add', 'manifest', text=True)
def _checkout(self, pkgname):
"""
Check out an existing :source component
Assumption: current working directory is where the new checkout
should be created
Side effect: current working directory will be the checkout
directory when this method returns
"""
print 'updating', pkgname
try:
shutil.rmtree(pkgname)
except OSError:
pass
self._cvc('co', pkgname)
os.chdir(pkgname)
def _createOrUpdate(self, pkgname, srpm, create=False, update=False):
"""
Manage a package manifest file.
NOTE: either create or update must be True.
@param pkgname: name of the package
@type pkgname: string
@param srpm: name of hte source RPM file
@type srpm: string
@param create: create a package
@type create: boolean
@param update: update a package
@type update: boolean
"""
assert(create or update)
manifest = self.rpmSource.createManifest(srpm)
cwd = os.getcwd()
try:
if create:
self._newpkg(pkgname)
comment = 'Automated initial commit'
else:
self._checkout(pkgname)
comment = 'Automated update'
self._updateSourceComponent(pkgname, manifest, comment)
finally:
os.chdir(cwd)
def createManifest(self, pkgname, srpm):
"""
Create a manifest file.
@param pkgname: name of the package
@type pkgname: string
@param srpm: name of hte source RPM file
@type srpm: string
"""
self._createOrUpdate(pkgname, srpm, create=True)
def updateManifest(self, pkgname, srpm):
"""
Update a manifest file.
@param pkgname: name of the package
@type pkgname: string
@param srpm: name of hte source RPM file
@type srpm: string
"""
self._createOrUpdate(pkgname, srpm, update=True)
|
Python
| 0
|
@@ -976,149 +976,8 @@
ce%0A%0A
- def _cvc(self, *args, **kwargs):%0A %22%22%22%0A Run cvc command.%0A %22%22%22%0A%0A cvc.sourceCommand(self.cfg, *args, **kwargs)%0A%0A
@@ -1036,16 +1036,16 @@
ntents,%0A
+
@@ -1384,38 +1384,63 @@
-self._cvc(
+cvc.sourceCommand(self.cfg, %5B
'cook'
+%5D
,
+%7B'
no-deps
-=
+':
None
+%7D
)%0A
@@ -1556,36 +1556,140 @@
-self._cvc('commit', message=
+cvc.sourceCommand(self.cfg,%0A %5B 'commit' %5D,%0A %7B 'message':%0A
'%25s
@@ -1722,16 +1722,17 @@
pkgname)
+%7D
)%0A%0A d
@@ -2262,26 +2262,46 @@
-self._cvc('
+cvc.sourceCommand(self.cfg, %5B %22
newpkg
-'
+%22
, pk
@@ -2309,18 +2309,49 @@
name
-,
+%5D,%0A %7B'
factory
-=
+':
'sle
@@ -2355,16 +2355,17 @@
sle-rpm'
+%7D
)%0A
@@ -2444,26 +2444,46 @@
-self._cvc(
+cvc.sourceCommand(self.cfg, %5B
'add', '
@@ -2495,19 +2495,25 @@
est'
-,
+%5D, %7B'
text
-=
+':
True
+%7D
)%0A%0A
@@ -2986,18 +2986,38 @@
-self._cvc(
+cvc.sourceCommand(self.cfg, %5B
'co'
@@ -3021,24 +3021,30 @@
co', pkgname
+ %5D, %7B%7D
)%0A os
|
4f7590ea19036ceb358c323d796be0046f33327e
|
move config.json to correct location
|
omtk/core/preferences.py
|
omtk/core/preferences.py
|
"""
Provide a Preference class to store the user preferences of the local installation.
"""
import os
import inspect
import json
import logging
log = logging.getLogger('omtk')
CONFIG_FILENAME = 'config.json'
def get_path_preferences():
"""
:return: The search path of the configuration file.
"""
current_dir = os.path.dirname(inspect.getfile(inspect.currentframe()))
config_dir = os.path.abspath(os.path.join(current_dir, '..', '..', '..'))
config_path = os.path.join(config_dir, CONFIG_FILENAME)
return config_path
class Preferences(object):
def __init__(self):
self.default_rig = None
def save(self, path=None):
if path is None:
path = get_path_preferences()
data = self.__dict__
with open(path, 'w') as fp:
json.dump(data, fp)
def load(self, path=None):
if path is None:
path = get_path_preferences()
if not path or not os.path.exists(path):
log.warning("Can't find config file. Using default config.")
return
with open(path, 'r') as fp:
data = json.load(fp)
self.__dict__.update(data)
def get_default_rig_class(self):
from omtk.core import plugin_manager
if self.default_rig:
for plugin in plugin_manager.plugin_manager.iter_loaded_plugins_by_type('rigs'):
if plugin.cls.__name__ == self.default_rig:
return plugin.cls
log.warning("Can't find default rig type {0}.".format(self.default_rig))
# If no match is found, return the base implementation
from omtk.core import classRig
return classRig.Rig
preferences = Preferences()
preferences.load()
|
Python
| 0.000002
|
@@ -442,22 +442,16 @@
r, '..',
- '..',
'..'))%0A
@@ -1730,8 +1730,9 @@
s.load()
+%0A
|
e27741a4a15c81151e8922da26b14b80afc579a5
|
use packet library
|
ryu/app/simple_switch.py
|
ryu/app/simple_switch.py
|
# Copyright (C) 2011 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import struct
from ryu.base import app_manager
from ryu.controller import mac_to_port
from ryu.controller import ofp_event
from ryu.controller.handler import MAIN_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import ofproto_v1_0
from ryu.lib.mac import haddr_to_str
# TODO: we should split the handler into two parts, protocol
# independent and dependant parts.
# TODO: can we use dpkt python library?
# TODO: we need to move the followings to something like db
class SimpleSwitch(app_manager.RyuApp):
OFP_VERSIONS = [ofproto_v1_0.OFP_VERSION]
def __init__(self, *args, **kwargs):
super(SimpleSwitch, self).__init__(*args, **kwargs)
self.mac_to_port = {}
def add_flow(self, datapath, in_port, dst, actions):
ofproto = datapath.ofproto
match = datapath.ofproto_parser.OFPMatch(
in_port=in_port, dl_dst=dst)
mod = datapath.ofproto_parser.OFPFlowMod(
datapath=datapath, match=match, cookie=0,
command=ofproto.OFPFC_ADD, idle_timeout=0, hard_timeout=0,
priority=ofproto.OFP_DEFAULT_PRIORITY,
flags=ofproto.OFPFF_SEND_FLOW_REM, actions=actions)
datapath.send_msg(mod)
@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def _packet_in_handler(self, ev):
msg = ev.msg
datapath = msg.datapath
ofproto = datapath.ofproto
dst, src, _eth_type = struct.unpack_from('!6s6sH', buffer(msg.data), 0)
dpid = datapath.id
self.mac_to_port.setdefault(dpid, {})
self.logger.info("packet in %s %s %s %s",
dpid, haddr_to_str(src), haddr_to_str(dst),
msg.in_port)
# learn a mac address to avoid FLOOD next time.
self.mac_to_port[dpid][src] = msg.in_port
if dst in self.mac_to_port[dpid]:
out_port = self.mac_to_port[dpid][dst]
else:
out_port = ofproto.OFPP_FLOOD
actions = [datapath.ofproto_parser.OFPActionOutput(out_port)]
# install a flow to avoid packet_in next time
if out_port != ofproto.OFPP_FLOOD:
self.add_flow(datapath, msg.in_port, dst, actions)
out = datapath.ofproto_parser.OFPPacketOut(
datapath=datapath, buffer_id=msg.buffer_id, in_port=msg.in_port,
actions=actions)
datapath.send_msg(out)
@set_ev_cls(ofp_event.EventOFPPortStatus, MAIN_DISPATCHER)
def _port_status_handler(self, ev):
msg = ev.msg
reason = msg.reason
port_no = msg.desc.port_no
ofproto = msg.datapath.ofproto
if reason == ofproto.OFPPR_ADD:
self.logger.info("port added %s", port_no)
elif reason == ofproto.OFPPR_DELETE:
self.logger.info("port deleted %s", port_no)
elif reason == ofproto.OFPPR_MODIFY:
self.logger.info("port modified %s", port_no)
else:
self.logger.info("Illeagal port state %s %s", port_no, reason)
|
Python
| 0
|
@@ -916,211 +916,81 @@
_to_
-str%0A%0A%0A# TODO: we should split the handler into two parts, protocol%0A# independent and dependant parts.%0A%0A# TODO: can we use dpkt python library?%0A%0A# TODO: we need to move the followings to something like db
+bin%0Afrom ryu.lib.packet import packet%0Afrom ryu.lib.packet import ethernet
%0A%0A%0Ac
@@ -1386,20 +1386,34 @@
dl_dst=
+haddr_to_bin(
dst)
+)
%0A%0A
@@ -1928,79 +1928,132 @@
-dst, src, _eth_type = struct.unpack_from('!6s6sH', buffer(msg.data), 0)
+pkt = packet.Packet(msg.data)%0A eth = pkt.get_protocol(ethernet.ethernet)%0A%0A dst = eth.dst%0A src = eth.src
%0A%0A
@@ -2177,102 +2177,24 @@
%25s%22,
-%0A dpid, haddr_to_str(
+ dpid,
src
-)
,
-haddr_to_str(dst),%0A
+dst,
msg
|
cae6e403efdef67af23a3b8a6c80082fa9efe4bd
|
Fix test
|
bluebottle/files/views.py
|
bluebottle/files/views.py
|
import mimetypes
import magic
from django.conf import settings
from django.core.exceptions import ValidationError
from django.http import HttpResponse
from rest_framework.parsers import FileUploadParser
from rest_framework.permissions import IsAuthenticated
from rest_framework_json_api.views import AutoPrefetchMixin
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from sorl.thumbnail.shortcuts import get_thumbnail
from bluebottle.bluebottle_drf2.renderers import BluebottleJSONAPIRenderer
from bluebottle.files.serializers import DocumentSerializer, ImageSerializer
from bluebottle.initiatives.models import Initiative
from bluebottle.utils.views import CreateAPIView, RetrieveAPIView
mime = magic.Magic(mime=True)
class FileList(AutoPrefetchMixin, CreateAPIView):
queryset = Initiative.objects.all()
serializer_class = DocumentSerializer
renderer_classes = (BluebottleJSONAPIRenderer, )
parser_classes = (FileUploadParser,)
permission_classes = (IsAuthenticated, )
authentication_classes = (
JSONWebTokenAuthentication,
)
prefetch_for_includes = {
'owner': ['owner'],
}
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class FileContentView(RetrieveAPIView):
def retrieve(self, *args, **kwargs):
instance = self.get_object()
file = getattr(instance, self.field).file
thumbnail = get_thumbnail(file, self.kwargs['size'])
content_type = mimetypes.guess_type(file.name)[0]
if settings.DEBUG:
response = HttpResponse(content=thumbnail.read())
else:
response = HttpResponse()
response['X-Accel-Redirect'] = thumbnail.url
response['Content-Type'] = content_type
return response
class ImageList(FileList):
queryset = Initiative.objects.all()
serializer_class = ImageSerializer
def perform_create(self, serializer):
uploaded_file = self.request.FILES['file']
if not mime.from_buffer(uploaded_file.read()) == uploaded_file.content_type:
raise ValidationError('Mime-type does not match Content-Type')
serializer.save(owner=self.request.user)
|
Python
| 0.000004
|
@@ -73,83 +73,86 @@
ngo.
-core.exceptions import ValidationError%0Afrom django.http import HttpResponse
+http import HttpResponse%0Afrom rest_framework.exceptions import ValidationError
%0Afro
@@ -2173,17 +2173,16 @@
-Type')%0A
-%0A
|
01e7a5b6880badaa0bd0dfe667972de53628d4e0
|
version 0.1.4
|
openapi_core/__init__.py
|
openapi_core/__init__.py
|
"""OpenAPI core module"""
from openapi_core.shortcuts import create_spec
from openapi_core.wrappers import RequestParametersFactory, RequestBodyFactory
__author__ = 'Artur Maciąg'
__email__ = 'maciag.artur@gmail.com'
__version__ = '0.1.3'
__url__ = 'https://github.com/p1c2u/openapi-core'
__license__ = 'BSD 3-Clause License'
__all__ = ['create_spec', 'request_parameters_factory', 'request_body_factory']
request_parameters_factory = RequestParametersFactory()
request_body_factory = RequestBodyFactory()
|
Python
| 0.000002
|
@@ -230,17 +230,17 @@
= '0.1.
-3
+4
'%0A__url_
|
ab4351afae1f1a16206cce6801d114b047babf76
|
Update main to use tag and reuse_mesage
|
bot/main.py
|
bot/main.py
|
#!/usr/bin/env python3
import logging
from telegram.ext import Updater, CommandHandler, CallbackQueryHandler, InlineQueryHandler
from game.api.server import start_api_server
from game.chooser import inline_query_game_chooser_handler
from game.launch import callback_query_game_launcher_handler
from tools import config, commands
from tools.logger import Logger
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
updater = Updater(token=config.Key.AUTH_TOKEN.read())
bot = updater.bot
bot.updater = updater
dispatcher = updater.dispatcher
logger = Logger(bot)
logger.debug("Starting bot...")
dispatcher.add_handler(CommandHandler("config", commands.config_editor_command, pass_args=True, allow_edited=True))
dispatcher.add_handler(CommandHandler("restart", commands.restart_command, pass_args=True, allow_edited=True))
dispatcher.add_handler(CallbackQueryHandler(callback_query_game_launcher_handler))
dispatcher.add_handler(InlineQueryHandler(inline_query_game_chooser_handler))
updater.start_polling()
logger.debug("Starting api server...")
start_api_server()
logger.info("Running!")
|
Python
| 0
|
@@ -609,16 +609,45 @@
gger(bot
+, %22START%22, reuse_message=True
)%0Alogger
|
23a1922afac917b06dbd6772fefc3b8a7c53c5ff
|
fix linespacing in footnotes (part of #3)
|
backends.py
|
backends.py
|
class LaTeX:
def preamble(self, typeface):
return """
\\documentclass[a4paper,12pt]{{article}}
\\usepackage{{fontspec}}
\\usepackage{{dblfnote}}
\\usepackage{{pfnote}}
\\setromanfont{{{typeface}}}
\\linespread{{1.5}}
\\spaceskip 0.5em
\\begin{{document}}
""".format(typeface=typeface)
def chapter_verse(self, chapter, verse):
return "\\textbf{{\Large {}.{}}}".format(chapter, verse)
def verse(self, verse):
return "\\textbf{{{}}}".format(verse)
def word(self, text, headword=None, parse=None, gloss=None):
if headword is None and parse is None and gloss is None:
return text
else:
footnote = []
if headword:
footnote.append(headword)
if parse:
footnote.append("\\textendash\\ {}".format(parse))
if gloss:
footnote.append("\\textendash\\ \\textit{{{}}}".format(gloss))
return "{}\\footnote{{{}}}".format(text, " ".join(footnote))
def comment(self, text):
return "% {}".format(text)
def postamble(self):
return "\\end{document}"
|
Python
| 0
|
@@ -102,16 +102,41 @@
icle%7D%7D%0A%0A
+%5C%5Cusepackage%7B%7Bsetspace%7D%7D%0A
%5C%5Cusepac
@@ -257,23 +257,22 @@
%7D%0A%5C%5C
-spaceskip 0.5em
+onehalfspacing
%0A%0A%5C%5C
|
a24d41a3868cca15b3682590eb033f492a9b8293
|
Remove multi-step decoding process before parsing HTML meta.
|
openid/yadis/discover.py
|
openid/yadis/discover.py
|
# -*- test-case-name: openid.test.test_yadis_discover -*-
__all__ = ['discover', 'DiscoveryResult', 'DiscoveryFailure']
from io import StringIO
import urllib.error
from openid import fetchers
from openid.yadis import etxrd
from openid.yadis.constants import \
YADIS_HEADER_NAME, YADIS_CONTENT_TYPE, YADIS_ACCEPT_HEADER
from openid.yadis.parsehtml import MetaNotFound, findHTMLMeta
class DiscoveryFailure(Exception):
"""Raised when a YADIS protocol error occurs in the discovery process"""
identity_url = None
def __init__(self, message, http_response):
Exception.__init__(self, message)
self.http_response = http_response
class DiscoveryResult(object):
"""Contains the result of performing Yadis discovery on a URI"""
# Normalized request uri
uri = None
# The URI from which the response text was returned (set to
# None if there was no XRDS document found)
xrds_uri = None
# The document returned from the xrds_uri
response_text = None
def __init__(self, uri):
self.uri = uri
def isXRDS(self):
return self.xrds_uri is not None
def is_xrds(body):
try:
et = etxrd.parseXRDS(body)
return True
except etxrd.XRDSError:
return False
def discover(uri):
"""Discover services for a given URI.
@param uri: The identity URI as a well-formed http or https
URI. The well-formedness and the protocol are not checked, but
the results of this function are undefined if those properties
do not hold.
@return: DiscoveryResult object
"""
resp = fetchers.fetch(uri, headers={'Accept': YADIS_ACCEPT_HEADER})
result = DiscoveryResult(resp.url)
result.response_text = resp.read() # MAX_RESPONSE
if is_xrds(result.response_text):
result.xrds_uri = result.uri
return result
location = whereIsYadis(resp, result.response_text)
return discover(location) if location else result
def whereIsYadis(resp, body):
"""Given a HTTPResponse, return the location of the Yadis document.
May be the URL just retrieved, another URL, or None if no suitable URL can
be found.
[non-blocking]
@returns: str or None
"""
location = resp.getheader(YADIS_HEADER_NAME)
if location:
return location
# Parse as HTML if the header is missing.
#
# XXX: do we want to do something with content-type, like
# have a whitelist or a blacklist (for detecting that it's
# HTML)?
# Decode body by encoding of file
content_type = resp.getheader('content-type') or ''
encoding = content_type.rsplit(';', 1)
if (len(encoding) == 2 and
encoding[1].strip().startswith('charset=')):
encoding = encoding[1].split('=', 1)[1].strip()
else:
encoding = 'utf-8'
try:
content = body.decode(encoding)
except UnicodeError:
# All right, the detected encoding has failed. Try with
# UTF-8 (even if there was no detected encoding and we've
# defaulted to UTF-8, it's not that expensive an operation)
try:
content = body.decode('utf-8')
except UnicodeError:
# At this point the content cannot be decoded to a str
# using the detected encoding or falling back to utf-8,
# so we have to resort to replacing undecodable chars.
# This *will* result in broken content but there isn't
# anything else that can be done.
content = body.decode(encoding, 'replace')
try:
return findHTMLMeta(StringIO(content))
except (MetaNotFound, UnicodeError):
# UnicodeError: Response body could not be encoded and xrds
# location could not be found before troubles occur.
pass
|
Python
| 0
|
@@ -114,16 +114,27 @@
lure'%5D%0A%0A
+import cgi%0A
from io
@@ -2322,1194 +2322,133 @@
-# Parse as HTML if the header is missing.%0A #%0A # XXX: do we want to do something with content-type, like%0A # have a whitelist or a blacklist (for detecting that it's%0A # HTML)?%0A%0A # Decode body by encoding of file%0A content_type = resp.getheader('content-type') or ''%0A encoding = content_type.rsplit(';', 1)%0A if (len(encoding) == 2 and%0A encoding%5B1%5D.strip().startswith('charset=')):%0A encoding = encoding%5B1%5D.split('=', 1)%5B1%5D.strip()%0A else:%0A encoding = 'utf-8'%0A%0A try:%0A content = body.decode(encoding)%0A except UnicodeError:%0A # All right, the detected encoding has failed. Try with%0A # UTF-8 (even if there was no detected encoding and we've%0A # defaulted to UTF-8, it's not that expensive an operation)%0A try:%0A content = body.decode('utf-8')%0A except UnicodeError:%0A # At this point the content cannot be decoded to a str%0A # using the detected encoding or falling back to utf-8,%0A # so we have to resort to replacing undecodable chars.%0A # This *will* result in broken content but there isn't%0A # anything else that can be done.%0A
+content_type = resp.getheader('content-type') or ''%0A encoding = cgi.parse_header(content_type)%5B1%5D.get('charset', 'utf-8')%0A
@@ -2490,17 +2490,16 @@
place')%0A
-%0A
try:
@@ -2557,17 +2557,16 @@
except
-(
MetaNotF
@@ -2573,153 +2573,32 @@
ound
-, UnicodeError):%0A # UnicodeError: Response body could not be encoded and xrds%0A # location could not be found before troubles occur.
+:%0A pass%0A%0A%0A%0A%0A%0A%0A%0A%0A%0A
%0A
@@ -2594,21 +2594,19 @@
%0A%0A%0A%0A%0A%0A%0A%0A
-pa
ss%0A
|
96d17640a1aef57f35f22620fe45028bf1c0f6fb
|
Fix error with Django admin urls
|
openquake/server/urls.py
|
openquake/server/urls.py
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2014-2018 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from django.conf import settings
from django.conf.urls import url, include
from django.views.generic.base import RedirectView
from openquake.server import views
urlpatterns = [
url(r'^$', RedirectView.as_view(url='/engine/', permanent=True)),
url(r'^v1/engine_version$', views.get_engine_version),
url(r'^v1/engine_latest_version$', views.get_engine_latest_version),
url(r'^v1/calc/', include('openquake.server.v1.calc_urls')),
url(r'^v1/valid/', views.validate_nrml),
url(r'^v1/available_gsims$', views.get_available_gsims),
url(r'^v1/on_same_fs$', views.on_same_fs, name="on_same_fs"),
url(r'^engine/?$', views.web_engine, name="index"),
url(r'^engine/(\d+)/outputs$',
views.web_engine_get_outputs, name="outputs"),
url(r'^engine/license$', views.license,
name="license"),
]
for app in settings.STANDALONE_APPS:
app_name = app.split('_')[1]
urlpatterns.append(url(r'^%s/' % app_name, include('%s.urls' % app,
namespace='%s' % app_name)))
if settings.LOCKDOWN:
from django.contrib import admin
from django.contrib.auth.views import login, logout
admin.autodiscover()
urlpatterns += [
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/login/$', login,
{'template_name': 'account/login.html'}, name="login"),
url(r'^accounts/logout/$', logout,
{'template_name': 'account/logout.html'}, name="logout"),
url(r'^accounts/ajax_login/$', views.ajax_login),
url(r'^accounts/ajax_logout/$', views.ajax_logout),
]
# To enable gunicorn debug without Nginx (to serve static files)
# uncomment the following lines
# from django.contrib.staticfiles.urls import staticfiles_urlpatterns
# urlpatterns += staticfiles_urlpatterns()
|
Python
| 0.000072
|
@@ -1973,24 +1973,16 @@
dmin/',
-include(
admin.si
@@ -1989,17 +1989,16 @@
te.urls)
-)
,%0A
|
b5416d4e68e273ce7164fc177d99f7c5b29e8ca4
|
Handle OSError on sendall, re-connect socket when error occurred.
|
opentsdb/tsdb_connect.py
|
opentsdb/tsdb_connect.py
|
import threading
import logging
import socket
import time
logger = logging.getLogger('opentsdb-py')
class TSDBConnect:
def __init__(self, host: str, port: int, check_tsdb_alive: bool=False):
self.tsdb_host = host
self.tsdb_port = int(port)
if check_tsdb_alive:
self.is_alive(raise_error=True)
self._connect = None
self.stopped = threading.Event()
def is_alive(self, timeout=3, raise_error=False) -> bool:
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(timeout)
sock.connect((self.tsdb_host, self.tsdb_port))
sock.close()
except (ConnectionRefusedError, socket.timeout):
if raise_error is True:
raise
return False
else:
return True
@property
def connect(self) -> socket.socket:
if not self._connect or getattr(self._connect, '_closed', False) is True:
logger.debug("Connect to OpenTSDB: %s:%s", self.tsdb_host, self.tsdb_port)
self.stopped.clear()
self._make_connection()
return self._connect
def _make_connection(self, timeout=2):
self._connect = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._connect.settimeout(timeout)
attempt = 0
while not self.stopped.is_set():
try:
self._connect.connect((self.tsdb_host, self.tsdb_port))
return
except (ConnectionRefusedError, socket.timeout):
time.sleep(min(15, 2 ** attempt))
attempt += 1
def disconnect(self):
logger.debug("Disconnecting from %s:%s", self.tsdb_host, self.tsdb_port)
self.stopped.set()
if self._connect:
self._connect.close()
self._connect = None
def sendall(self, line: bytes):
try:
self.connect.sendall(line)
except (BrokenPipeError, IOError) as error:
logger.error("Close connection to handle exception: %s", error)
self._connect.close()
|
Python
| 0
|
@@ -1995,16 +1995,25 @@
IOError
+, OSError
) as err
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.