commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
207fbc64fc9001abc62d0e687beefb3e3a25ef73
|
fix exception error
|
orator/exceptions/orm.py
|
orator/exceptions/orm.py
|
# -*- coding: utf-8 -*-
class ModelNotFound(RuntimeError):
def __init__(self, model):
self._model = model
self.message = 'No query results found for model [%s]' % self._model.__name__
def __str__(self):
return self.message
class MassAssignmentError(RuntimeError):
pass
class RelatedClassNotFound(RuntimeError):
def __init__(self, related):
self._related = related
self.message = 'The related class for "%s" does not exists' % related
def __str__(self):
return self.message
class ValidationError(ValueError):
default_detail = 'Invalid input.'
def __init__(self, detail=None):
if detail is None:
self.detail = self.default_detail if detail is None else detail
def __str__(self):
return self.detail
|
Python
| 0.000054
|
@@ -587,24 +587,16 @@
r):%0A
-default_
detail =
@@ -668,24 +668,28 @@
f detail is
+not
None:%0A
@@ -711,51 +711,8 @@
il =
- self.default_detail if detail is None else
det
|
a26ad106dba7ce2f00a0b9438629abf32a15a061
|
Improve reliability of test by clearing cache
|
orgviz/tests/test_web.py
|
orgviz/tests/test_web.py
|
import os
import tempfile
import shutil
import unittest
import textwrap
import json
import datetime
from .. import web
TMP_PREFIX = 'orgviz-test-'
def totimestamp(dt):
zero = datetime.datetime.fromtimestamp(0)
return (dt - zero).total_seconds()
class TestWebEventsData(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.tmpdir = tempfile.mkdtemp(prefix=TMP_PREFIX)
cls.org_file = os.path.join(cls.tmpdir, 'test.org')
web.app.config['ORG_FILE_COMMON'] = [cls.org_file]
cls.app = web.app.test_client()
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.tmpdir)
def write_org_file(self, text):
with open(self.org_file, 'w') as f:
f.write(textwrap.dedent(text))
def test_single_event(self):
self.write_org_file("""
* Node title
SCHEDULED: <2012-10-23 Tue>
""")
rv = self.app.get('/events_data?start=1349042400&end=1352674800')
events_data = json.loads(rv.data)
self.assertEqual(len(events_data), 1)
self.assertEqual(events_data[0]['title'], 'Node title')
def get_events_data(self, start, end):
start = totimestamp(datetime.datetime(*start))
end = totimestamp(datetime.datetime(*end))
return self.app.get(
'/events_data?start={0:.0f}&end={1:.0f}'.format(start, end))
def test_start_end(self):
self.write_org_file("""
* Node 1
SCHEDULED: <2012-10-21 Tue>
* Node 2
SCHEDULED: <2012-10-22 Wed>
* Node 3
SCHEDULED: <2012-10-24 Fri>
""")
# FIXME: clarify boundary condition 2012-10-23 in Node 3 does not work!
rv = self.get_events_data(start=(2012, 10, 20), end=(2012, 10, 23))
events_data = json.loads(rv.data)
self.assertEqual(len(events_data), 2)
self.assertEqual(events_data[0]['title'], 'Node 1')
self.assertEqual(events_data[1]['title'], 'Node 2')
|
Python
| 0.000001
|
@@ -636,16 +636,64 @@
mpdir)%0A%0A
+ def setUp(self):%0A web.cache.clear()%0A%0A
def
|
82c0698a0d1ff39a18a6ba7192a18e6f08c8aa77
|
add define_martian_year
|
planet4/stats.py
|
planet4/stats.py
|
import numpy as np
from math import pi
tau = 2 * pi
def get_fan_and_blotch_nunique_cids(data):
f1 = data.marking == 'fan'
f2 = data.marking == 'blotch'
return data[f1 | f2].classification_id.nunique()
def get_fb_to_all_ratio(data):
n_classifications = data.classification_id.nunique()
n_class_fb = get_fan_and_blotch_nunique_cids(data)
ratio = (n_class_fb / n_classifications)
return ratio
def size_of_unique(x):
return x.unique().size
def classification_counts_per_user(df):
res = df.classification_id.groupby(df.user_name,
sort=False).agg(size_of_unique).sort_values(ascending=False)
return res
def get_top_ten_users(df):
users_work = classification_counts_per_user(df)
return users_work.order(ascending=False)[:10]
def classification_counts_per_image(df):
"""Main function to help defining status of P4"""
return df.classification_id.groupby(df.image_id,
sort=False).agg(size_of_unique)
def get_no_tiles_done(df, limit=30):
counts = classification_counts_per_image(df)
no_done = counts[counts >= limit].size
return no_done
def get_status_per_classifications(df, limit=30):
"Returns status in percent of limit*n_unique_image_ids."
no_all = df.image_id.nunique()
sum_classifications = classification_counts_per_image(df).sum()
try:
return np.round(100.0 * sum_classifications / (limit * no_all), 1)
except ZeroDivisionError:
return np.nan
def get_status_per_completed_tile(df, limit=30):
no_all = len(df.image_id.unique())
no_done = get_no_tiles_done(df, limit)
try:
return np.round(100.0 * no_done / no_all, 1)
except ZeroDivisionError:
return np.nan
def classification_counts_for_user(username, df):
return df[df.user_name == username].classification_id.value_counts()
def no_of_classifications_per_user(df):
return df.user_name.groupby(df.image_id,
sort=False).agg(size_of_unique)
def get_blotch_area(record):
if record.marking != 'blotch':
return 0
else:
return 0.5 * tau * record.radius_1 * record.radius_2
###
# Season related stuff
###
def unique_image_ids_per_season(df):
return df.image_id.groupby(df.season, sort=False).agg(size_of_unique)
def define_season_column(df, colname='image_name'):
"""Create new column that indicates the MRO season.
Seasons 1,2, and 3 are MY28, 29, and 30 respectively.
Parameters:
----------
df : {pandas.DataFrame}
Dataframe that should have a column with name `colname` as deciding factor.
colname : str
Name of column to be used as HiRISE observation ID.
"""
thousands = df[colname].str[5:7].astype('int')
df['season'] = 0
df.loc[df[colname].str.startswith('PSP'), 'season'] = 1
df.loc[(thousands > 10) & (thousands < 15), 'season'] = 2
df.loc[(thousands > 15) & (thousands < 25), 'season'] = 3
df.loc[(thousands > 25) & (thousands < 35), 'season'] = 4
df.loc[(thousands > 35), 'season'] = 5
|
Python
| 0.000134
|
@@ -1,55 +1,352 @@
-import numpy as np%0Afrom math import pi%0Atau = 2 * pi
+from math import tau%0A%0Aimport numpy as np%0Aimport pandas as pd%0Afrom pandas import to_datetime%0A%0Amars_years = %7B28: %222006-01-23%22,%0A 29: %222007-12-10%22,%0A 30: %222009-10-27%22,%0A 31: %222011-09-15%22,%0A 32: %222013-08-01%22,%0A 33: %222015-06-19%22,%0A 34: %222017-05-05%22,%0A 35: %222019-03-24%22%7D
%0A%0A%0Ad
@@ -3412,8 +3412,251 @@
n'%5D = 5%0A
+%0A%0Adef define_martian_year(df, time_col_name):%0A mars_timestamps = %7Bk: pd.to_datetime(v)%0A for k, v in mars_years.items()%7D%0A df%5B'MY'%5D = 0%0A for yr, t in mars_timestamps.items():%0A df.loc%5Bdf.time %3E t, 'MY'%5D = yr%0A
|
8fa744582291aa45fcc5c8101b8f2c24744a399f
|
Fix #67 - print error message and exit when started without params.
|
sacredboard/bootstrap.py
|
sacredboard/bootstrap.py
|
# coding=utf-8
"""
Bootstrap module parses command line arguments and initializes the app.
Configures the database connection and starts the web application.
"""
import locale
import sys
import click
from flask import Flask
from gevent.pywsgi import WSGIServer
from sacredboard.app.config import jinja_filters
from sacredboard.app.data.filestorage import FileStorage
from sacredboard.app.data.mongodb import PyMongoDataAccess
from sacredboard.app.webapi import routes, metrics
locale.setlocale(locale.LC_ALL, '')
app = Flask(__name__)
@click.command()
@click.option("-m", default=None, metavar="HOST:PORT:DATABASE",
help="Connect to MongoDB using the format"
" host:port:database_name or just the database_name. "
"Default: sacred"
" Mutually exclusive with -mu")
@click.option("-mu", default=(None, None),
metavar="CONNECTION_STRING DATABASE", type=(str, str),
help="Connect to MongoDB using mongodb://..."
" and specify the database name."
" Mutually exclusive with -m")
@click.option("-mc", default="runs", metavar="COLLECTION",
help="The collection containing the Sacred's list of runs. "
"You might need it if you use a custom collection name "
"or Sacred v0.6 (which used default.runs). "
"Default: runs")
@click.option("-F", default="",
help="Path to directory containing experiment results of the"
"File Storage observer. (experimental)")
@click.option("--no-browser", is_flag=True, default=False,
help="Do not open web browser automatically.")
@click.option("--debug", is_flag=True, default=False,
help="Run the application in Flask debug mode "
"(for development).")
@click.version_option()
def run(debug, no_browser, m, mu, mc, f):
"""
Sacredboard.
\b
Sacredboard is a monitoring dashboard for Sacred.
Homepage: http://github.com/chovanecm/sacredboard
Example usage:
\b
sacredboard -m sacred
Starts Sacredboard on default port (5000) and connects to
a local MongoDB database called 'sacred'. Opens web browser.
Note: MongoDB must be listening on localhost.
\b
sacredboard -m 192.168.1.1:27017:sacred
Starts Sacredboard on default port (5000) and connects to
a MongoDB database running on 192.168.1.1 on port 27017
to a database called 'sacred'. Opens web browser.
\b
sacredboard -mu mongodb://user:pwd@host/admin?authMechanism=SCRAM-SHA-1 sacred
Starts Sacredboard on default port (5000) and connects to
a MongoDB database running on localhost on port 27017
to a database called 'sacred'. Opens web browser.
\b
sacredboard -m sacred -mc default.runs
Starts Sacredboard on default port (5000) and connects to
a local MongoDB database called 'sacred' and uses the Sacred's 0.6
default collection 'default.runs' to search the runs in.
Opens web browser.
Note: MongoDB must be listening on localhost.
"""
if m or mu != (None, None):
add_mongo_config(app, m, mu, mc)
app.config["data"].connect()
elif f:
app.config["data"] = FileStorage(f)
else:
print("Must specify either a mongodb instance or \
a path to a file storage.")
app.config['DEBUG'] = debug
app.debug = debug
jinja_filters.setup_filters(app)
routes.setup_routes(app)
metrics.initialize(app)
if debug:
app.run(host="0.0.0.0", debug=True)
else:
for port in range(5000, 5050):
http_server = WSGIServer(('0.0.0.0', port), app)
try:
http_server.start()
except OSError as e:
# try next port
continue
print("Starting sacredboard on port %d" % port)
if not no_browser:
click.launch("http://127.0.0.1:%d" % port)
http_server.serve_forever()
break
def add_mongo_config(app, simple_connection_string,
mongo_uri, collection_name):
"""
Configure the application to use MongoDB.
:param app: Flask application
:param simple_connection_string:
Expects host:port:database_name or database_name
Mutally_exclusive with mongo_uri
:param mongo_uri: Expects mongodb://... as defined
in https://docs.mongodb.com/manual/reference/connection-string/
Mutually exclusive with simple_connection_string (must be None)
:param collection_name: The collection containing Sacred's runs
:return:
"""
if mongo_uri != (None, None):
add_mongo_config_with_uri(app, mongo_uri[0], mongo_uri[1],
collection_name)
if simple_connection_string is not None:
print("Ignoring the -m option. Overridden by "
"a more specific option (-mu).", file=sys.stderr)
else:
# Use the default value 'sacred' when not specified
if simple_connection_string is None:
simple_connection_string = "sacred"
add_mongo_config_simple(app, simple_connection_string, collection_name)
def add_mongo_config_simple(app, connection_string, collection_name):
"""
Configure the app to use MongoDB.
:param app: Flask Application
:type app: Flask
:param connection_string: in format host:port:database or database
(default: sacred)
:type connection_string: str
:param collection_name: Name of the collection
:type collection_name: str
"""
split_string = connection_string.split(":")
config = {"host": "localhost", "port": 27017, "db": "sacred"}
if len(split_string) > 0 and len(split_string[-1]) > 0:
config["db"] = split_string[-1]
if len(split_string) > 1:
config["port"] = int(split_string[-2])
if len(split_string) > 2:
config["host"] = split_string[-3]
app.config["data"] = PyMongoDataAccess.build_data_access(
config["host"], config["port"], config["db"], collection_name)
def add_mongo_config_with_uri(app, connection_string_uri,
database_name, collection_name):
"""
Configure PyMongo with a MongoDB connection string.
:param app: Flask application
:param connection_string_uri: MongoDB connection string
:param database_name: Sacred database name
:param collection_name: Sacred's collection with runs
:return:
"""
app.config["data"] = PyMongoDataAccess.build_data_access_with_uri(
connection_string_uri, database_name, collection_name
)
if __name__ == '__main__':
run()
|
Python
| 0
|
@@ -3305,17 +3305,19 @@
ance or
-%5C
+%22 +
%0A
@@ -3323,18 +3323,17 @@
-
+%22
a path t
@@ -3349,17 +3349,117 @@
storage.
-%22
+%5CnRun sacredboard --help %22%0A %22for more information.%22, file=sys.stderr)%0A sys.exit(1
)%0A%0A a
|
b1d52ad5318532fa07112ab6165099e958008fda
|
fix a little bug in coco datasets loader
|
mmdet/datasets/coco.py
|
mmdet/datasets/coco.py
|
import numpy as np
from pycocotools.coco import COCO
from .custom import CustomDataset
class CocoDataset(CustomDataset):
CLASSES = ('person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus',
'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant',
'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog',
'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe',
'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee',
'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat',
'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket',
'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl',
'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot',
'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch',
'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop',
'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave',
'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock',
'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush')
def load_annotations(self, ann_file):
self.coco = COCO(ann_file)
self.cat_ids = self.coco.getCatIds()
self.cat2label = {
cat_id: i + 1
for i, cat_id in enumerate(self.cat_ids)
}
self.img_ids = self.coco.getImgIds()
img_infos = []
for i in self.img_ids:
info = self.coco.loadImgs([i])[0]
info['filename'] = info['file_name']
img_infos.append(info)
return img_infos
def get_ann_info(self, idx):
img_id = self.img_infos[idx]['id']
ann_ids = self.coco.getAnnIds(imgIds=[img_id])
ann_info = self.coco.loadAnns(ann_ids)
return self._parse_ann_info(ann_info)
def _filter_imgs(self, min_size=32):
"""Filter images too small or without ground truths."""
valid_inds = []
ids_with_ann = set(_['image_id'] for _ in self.coco.anns.values())
for i, img_info in enumerate(self.img_infos):
if self.img_ids[i] not in ids_with_ann:
continue
if min(img_info['width'], img_info['height']) >= min_size:
valid_inds.append(i)
return valid_inds
def _parse_ann_info(self, ann_info, with_mask=True):
"""Parse bbox and mask annotation.
Args:
ann_info (list[dict]): Annotation info of an image.
with_mask (bool): Whether to parse mask annotations.
Returns:
dict: A dict containing the following keys: bboxes, bboxes_ignore,
labels, masks, mask_polys, poly_lens.
"""
gt_bboxes = []
gt_labels = []
gt_bboxes_ignore = []
# Two formats are provided.
# 1. mask: a binary map of the same size of the image.
# 2. polys: each mask consists of one or several polys, each poly is a
# list of float.
if with_mask:
gt_masks = []
gt_mask_polys = []
gt_poly_lens = []
for i, ann in enumerate(ann_info):
if ann.get('ignore', False):
continue
x1, y1, w, h = ann['bbox']
if ann['area'] <= 0 or w < 1 or h < 1:
continue
bbox = [x1, y1, x1 + w - 1, y1 + h - 1]
if ann['iscrowd']:
gt_bboxes_ignore.append(bbox)
else:
gt_bboxes.append(bbox)
gt_labels.append(self.cat2label[ann['category_id']])
if with_mask:
gt_masks.append(self.coco.annToMask(ann))
mask_polys = [
p for p in ann['segmentation'] if len(p) >= 6
] # valid polygons have >= 3 points (6 coordinates)
poly_lens = [len(p) for p in mask_polys]
gt_mask_polys.append(mask_polys)
gt_poly_lens.extend(poly_lens)
if gt_bboxes:
gt_bboxes = np.array(gt_bboxes, dtype=np.float32)
gt_labels = np.array(gt_labels, dtype=np.int64)
else:
gt_bboxes = np.zeros((0, 4), dtype=np.float32)
gt_labels = np.array([], dtype=np.int64)
if gt_bboxes_ignore:
gt_bboxes_ignore = np.array(gt_bboxes_ignore, dtype=np.float32)
else:
gt_bboxes_ignore = np.zeros((0, 4), dtype=np.float32)
ann = dict(
bboxes=gt_bboxes, labels=gt_labels, bboxes_ignore=gt_bboxes_ignore)
if with_mask:
ann['masks'] = gt_masks
# poly format is not used in the current implementation
ann['mask_polys'] = gt_mask_polys
ann['poly_lens'] = gt_poly_lens
return ann
|
Python
| 0.000001
|
@@ -1905,16 +1905,31 @@
ann_info
+,self.with_mask
)%0A%0A d
|
0cbb38671098b938ce7f6cf66543cb7155a66b7b
|
Add note about default permissions of files.write()
|
osgtest/library/files.py
|
osgtest/library/files.py
|
"""
Functions for dealing with writing and tracking of files.
The most important part of this module is the backup mechanism.
Backups are identified by a (filepath, owner) pair--where owner is an
arbitraty string identifying the test component responsible for the file,
specifically its cleanup.
preserve() does the backup. It raises an error if a pair has been used.
Normally you would not call it directly, but call write(), append() or
replace(), and pass either the value of owner, or backup=False.
restore() puts the backup back to its original location.
Since one owner cannot back up the same file twice, if you modify a file
twice in one module then you must either call preserve() directly, or pass
an owner for the first append/replace/write call, and backup=False for all
subsequent calls to append/replace/write.
(You could also use a different owner for each change, but then you would
have to restore it for each owner you used else the cleanup test will fail).
"""
import glob
import os
import re
import shutil
import tempfile
import osgtest.library.core as core
_backup_directory = '/usr/share/osg-test/backups'
_backups = {}
def read(path, as_single_string=False):
"""Read the file at the path and return its contents as a list or string."""
the_file = open(path, 'r')
if as_single_string:
contents = the_file.read()
else:
contents = the_file.readlines()
the_file.close()
return contents
def preserve(path, owner):
"""Backup the file at path and remember it with the given owner.
owner must be specified. The (path, owner) pair must not have been
previously used in a call to preserve. Raises ValueError if either
of these are not true.
"""
if owner is None:
raise ValueError('Must have owner string')
backup_id = (path, owner)
if backup_id in _backups:
raise ValueError("Already have a backup of '%s' for '%s'" % (path, owner))
backup_path = os.path.join(_backup_directory, os.path.basename(path) + '#' + owner)
if os.path.exists(backup_path):
raise ValueError("Backup already exists at '%s'" % (backup_path))
if os.path.exists(path):
if not os.path.isdir(_backup_directory):
os.mkdir(_backup_directory)
shutil.copy2(path, backup_path)
_backups[backup_id] = backup_path
core.log_message("Backed up '%s' as '%s'" % (path, backup_path))
else:
_backups[backup_id] = None
def write(path, contents, owner=None, backup=True):
"""Write the contents to a file at the path.
The 'owner' argument (default: None), is a string that identifies the owner
of the file. If the 'backup' argument is True (default), then any existing
file at the path will be backed up for later restoration. However, because
backups are identified in part by 'owner', if 'backup' is True, then 'owner'
must be defined. Typically, a caller specifies either 'backup=False' to
turn off backups (not recommended) or 'owner=[some string]' to set the owner
for the backup.
"""
# The default arguments are invalid: Either "backup" must be false or the
# "owner" must be specified.
if (owner is None) and backup:
raise ValueError('Must specify an owner or backup=False')
# Write temporary file
temp_fd, temp_path = tempfile.mkstemp(prefix=os.path.basename(path) + '.', suffix='.osgtest-new',
dir=os.path.dirname(path))
temp_file = os.fdopen(temp_fd, 'w')
if isinstance(contents, list) or isinstance(contents, tuple):
temp_file.writelines(contents)
else:
temp_file.write(contents)
temp_file.close()
# Copy ownership and permissions
if os.path.exists(path):
old_stat = os.stat(path)
os.chown(temp_path, old_stat.st_uid, old_stat.st_gid)
os.chmod(temp_path, old_stat.st_mode)
# Back up existing file
if backup:
preserve(path, owner)
# Atomically move temporary file into final location
os.rename(temp_path, path)
core.log_message('Wrote %d bytes to %s' % (os.stat(path).st_size, path))
def replace(path, old_line, new_line, owner=None, backup=True):
"""Replace an old line with a new line in given path.
The 'owner' and 'backup' arguments are passed to write().
"""
lines_to_write = []
lines = read(path)
for line in lines:
if line.rstrip('\n') == old_line.rstrip('\n'):
lines_to_write.append(new_line + '\n')
else:
lines_to_write.append(line.rstrip('\n') + '\n')
write(path, lines_to_write, owner, backup)
def append(path, contents, force=False, owner=None, backup=True):
"""Append the contents to the given file.
Normally, if the contents already exist in the file, no action is taken.
However, if the force argument is True, then the extra contents are always
appended.
The 'owner' and 'backup' arguments are the same as in write().
"""
# The default arguments are invalid: Either "backup" must be false or the
# "owner" must be specified.
if backup:
if owner is None:
raise ValueError('Must specify an owner or backup=False')
preserve(path, owner)
if os.path.exists(path):
old_contents = read(path)
else:
old_contents = []
if (not force) and (contents in old_contents):
return
new_contents = old_contents + [contents]
write(path, new_contents, backup=False)
def restore(path, owner):
"""Restores the path to its state prior to being written by its owner."""
backup_id = (path, owner)
if backup_id not in _backups:
raise ValueError("No backup of '%s' for '%s'" % (path, owner))
if os.path.exists(path):
os.remove(path)
core.log_message('Removed test %s' % (path))
backup_path = _backups[backup_id]
if (backup_path is not None) and os.path.exists(backup_path):
shutil.move(backup_path, path)
core.log_message('Restored original %s' % (path))
del _backups[backup_id]
def remove(path, force=False):
"""Remove the path, which could be a file, empty directory, or file glob.
If the force argument is True, then this function will remove non-empty directories.
"""
if re.search(r'[\]*?]', path):
for glob_path in glob.glob(path):
if os.path.isfile(glob_path):
os.unlink(glob_path)
elif os.path.isdir(glob_path):
if force:
shutil.rmtree(glob_path)
else:
os.rmdir(glob_path)
elif os.path.isdir(path):
if not os.listdir(path):
os.rmdir(path)
else:
if force:
shutil.rmtree(path)
else:
# Go ahead and try the rmdir to raise an exception
os.rmdir(path)
elif os.path.isfile(path):
os.unlink(path)
|
Python
| 0.000001
|
@@ -3061,16 +3061,233 @@
backup.
+%0A%0A NOTE: If the file doesn't exist, the default permissions are '0600' and%0A owned by root/root. Make sure to properly set the permissions of the file%0A afterwards if you need specific permissions or ownership.
%0A %22%22%22
|
79c8707965603551b1ff386c17d99467f48b6035
|
determine units of loaded SVG and also scale the display units
|
svg2plt.py
|
svg2plt.py
|
import xml.dom.minidom
import re
from svg.path import parse_path
class SVG2PLT:
# the plt code to output
plt = ''
delimiter = 'z' # path delimiter (mM -> zZ)
# factors to transform the SVG as it is read
scale = 1.0
x_offset = 0.0
y_offset = 0.0
divisions = 30.0 # the number of point divisions on an element
overcut = 0.2 # how much to overcut the next shape (TODO: units for now as percentage. could be a percentage of the line, could be mm?)
# SVG properties that may be useful
min_x = 100000
min_y = 100000
max_x = 0
max_y = 0
width = 0
height = 0
# real world display measurements
unit = 0.01 # a unit value for the number of pixels per inch
display_width = 0
display_height = 0
display_units = "in"
def start(self):
self.plt += 'ST0;\n'
self.plt += "U"+str(int(self.x_offset))+","+str(int(self.y_offset))+";\n"
self.plt += 'LED255,64,0;\n'
def end(self):
self.plt += 'ST0;\n'
self.plt += "U"+str(int(self.x_offset))+","+str(int(self.y_offset))+";\n"
self.plt += 'LED128,128,128;\n'
# open a file with name 'filename', extract the path elements, and convert them to PLT code
def parse_file(self, filename):
self.start()
#read the svg doc as a DOM to extract the XML <path> element
doc = xml.dom.minidom.parse(filename)
# determine the ratio of each pixel to real world units
svg = doc.getElementsByTagName('svg')[0]
#get the units for this file
height = svg.getAttribute('height')
width = svg.getAttribute('width')
if(height.find("in")):
self.display_units = "in"
elif(height.find("mm")):
self.display_units = "in"
elif(height.find("cm")):
self.display_units = "cm"
height = height.replace(self.display_units, "")
width = width.replace(self.display_units, "")
viewbox = svg.getAttribute('viewBox').rsplit(" ")
self.unit = (float(width)/float(viewbox[2]) + float(height)/float(viewbox[3]))/2
# extract the path elements
path_strings = [path.getAttribute('d') for path in doc.getElementsByTagName('path')]
# iterate over each path that is found
for path_string in path_strings:
# break up each path shape into the individual lines (mM -> zZ)
lines = re.split('z|Z', path_string)
for line in lines:
if(len(line)>2):
line += self.delimiter
path = parse_path(line) # convert the string to a path using svg.path library
self.parse_path(path) # parse the path from SVG to PLT
# add overcut to the item
if(self.overcut and path.closed==True):
self.plt += self.parse_overcut(path)
self.end()
# parse a path (mM -> zZ)
def parse_path(self, path):
first = True
for item in path:
self.plt += self.parse_item(item, first)
if(first):
first = False;
def parse_overcut(self, path):
item = path[0]
output = ''
for i in range(0, int(self.divisions*self.overcut)):
loc = i/self.divisions
point = item.point(loc)
output += self.command('D', point.real, point.imag)
return output
def parse_item(self, item, first):
output = ''
for i in range(0, int(self.divisions)):
loc = i/self.divisions
point = item.point(loc)
if(first and i==0):
output += self.command('U', point.real, point.imag)
output += self.command('D', point.real, point.imag)
self.calc_bounding_box(point.real, point.imag)
return output
def command(self, dir, x, y):
output = dir + str(int(x*self.scale+self.x_offset)) +","+ str(int(y*self.scale+self.y_offset)) +";\n"
return(output)
# calculate the if x, y are the bounding box
def calc_bounding_box(self, x, y):
if(x<self.min_x):
self.min_x = x
if(x>self.max_x):
self.max_x = x
if(y<self.min_y):
self.min_y = y
if(y>self.max_y):
self.max_y = y
self.width = self.max_x - self.min_x
self.height = self.max_y - self.min_y
self.display_width = float("{0:.2f}".format(self.width*self.unit))
self.display_height = float("{0:.2f}".format(self.height*self.unit))
|
Python
| 0
|
@@ -1519,16 +1519,20 @@
nd(%22in%22)
+!=-1
):%0A%09%09%09se
@@ -1579,16 +1579,20 @@
nd(%22mm%22)
+!=-1
):%0A%09%09%09se
@@ -1639,16 +1639,20 @@
nd(%22cm%22)
+!=-1
):%0A%09%09%09se
@@ -1675,16 +1675,76 @@
= %22cm%22%0A
+%09%09elif(height.find(%22px%22)!=-1):%0A%09%09%09self.display_units = %22px%22%0A
%09%09%09%0A%09%09he
@@ -3958,16 +3958,27 @@
elf.unit
+*self.scale
))%0A%09%09sel
@@ -4040,10 +4040,21 @@
elf.unit
+*self.scale
))
|
8a71e75501bc7965df86f4e31ffb200edba876ee
|
Change history API endpoints so we can get the main page's history.
|
wikked/views/history.py
|
wikked/views/history.py
|
import os.path
from flask import g, jsonify, request, abort
from pygments import highlight
from pygments.lexers import get_lexer_by_name
from pygments.formatters import get_formatter_by_name
from wikked.page import PageLoadingError
from wikked.scm.base import ACTION_NAMES
from wikked.utils import PageNotFoundError
from wikked.views import (is_page_readable, get_page_meta, get_page_or_404,
url_from_viewarg,
CHECK_FOR_READ)
from wikked.web import app
def get_history_data(history, needs_files=False):
hist_data = []
for i, rev in enumerate(reversed(history)):
rev_data = {
'index': i + 1,
'rev_id': rev.rev_id,
'rev_name': rev.rev_name,
'author': rev.author.name,
'timestamp': rev.timestamp,
'description': rev.description
}
if needs_files:
rev_data['pages'] = []
for f in rev.files:
url = None
path = os.path.join(g.wiki.root, f['path'])
try:
page = g.wiki.db.getPage(path=path)
# Hide pages that the user can't see.
if not is_page_readable(page):
continue
url = page.url
except PageNotFoundError:
pass
except PageLoadingError:
pass
if not url:
url = os.path.splitext(f['path'])[0]
rev_data['pages'].append({
'url': url,
'action': ACTION_NAMES[f['action']]
})
rev_data['num_pages'] = len(rev_data['pages'])
if len(rev_data['pages']) > 0:
hist_data.append(rev_data)
else:
hist_data.append(rev_data)
return hist_data
@app.route('/api/history')
def api_site_history():
after_rev = request.args.get('rev')
history = g.wiki.getHistory(limit=10, after_rev=after_rev)
hist_data = get_history_data(history, needs_files=True)
result = {'history': hist_data}
return jsonify(result)
@app.route('/api/history/<path:url>')
def api_page_history(url):
page = get_page_or_404(url, check_perms=CHECK_FOR_READ)
history = page.getHistory()
hist_data = get_history_data(history)
result = {'url': url, 'meta': get_page_meta(page), 'history': hist_data}
return jsonify(result)
@app.route('/api/revision/<path:url>')
def api_read_page_rev(url):
rev = request.args.get('rev')
if rev is None:
abort(400)
page = get_page_or_404(url, check_perms=CHECK_FOR_READ)
page_rev = page.getRevision(rev)
meta = dict(get_page_meta(page, True), rev=rev)
result = {'meta': meta, 'text': page_rev}
return jsonify(result)
@app.route('/api/diff/<path:url>')
def api_diff_page(url):
rev1 = request.args.get('rev1')
rev2 = request.args.get('rev2')
if rev1 is None:
abort(400)
page = get_page_or_404(url, check_perms=CHECK_FOR_READ)
diff = page.getDiff(rev1, rev2)
if 'raw' not in request.args:
lexer = get_lexer_by_name('diff')
formatter = get_formatter_by_name('html')
diff = highlight(diff, lexer, formatter)
if rev2 is None:
meta = dict(get_page_meta(page, True), change=rev1)
else:
meta = dict(get_page_meta(page, True), rev1=rev1, rev2=rev2)
result = {'meta': meta, 'diff': diff}
return jsonify(result)
@app.route('/api/revert/<path:url>', methods=['POST'])
def api_revert_page(url):
if not 'rev' in request.form:
abort(400)
rev = request.form['rev']
author = request.remote_addr
if 'author' in request.form and len(request.form['author']) > 0:
author = request.form['author']
message = 'Reverted %s to revision %s' % (url, rev)
if 'message' in request.form and len(request.form['message']) > 0:
message = request.form['message']
url = url_from_viewarg(url)
page_fields = {
'rev': rev,
'author': author,
'message': message
}
g.wiki.revertPage(url, page_fields)
result = {'reverted': 1}
return jsonify(result)
|
Python
| 0
|
@@ -1876,24 +1876,29 @@
route('/api/
+site-
history')%0Ade
@@ -2139,32 +2139,153 @@
onify(result)%0A%0A%0A
+@app.route('/api/history/')%0Adef api_main_page_history():%0A return api_page_history(g.wiki.main_page_url.lstrip('/'))%0A%0A%0A
@app.route('/api
|
6147a5229f67874179f371ded2e835c318a2bd56
|
correct prob formulation for secondary user beamforming
|
examples/secondary_user_beamforming.py
|
examples/secondary_user_beamforming.py
|
#!/usr/bin/python
# Secondary user multicast beamforming
# minimize ||w||^2
# subject to |h_i^H w|^2 >= tau
# |g_i^H w|^2 <= eta
# with variable w in complex^n
import numpy as np
import cvxpy as cvx
import qcqp
n = 10
m = 8
l = 2
tau = 10
eta = 1
np.random.seed(1)
H = np.random.randn(m, n)
G = np.random.randn(l, n)
w = cvx.Variable(n)
obj = cvx.Minimize(cvx.sum_squares(w))
cons = [cvx.square(H*w) >= tau, cvx.square(G*w) <= eta]
prob = cvx.Problem(obj, cons)
# SDP-based lower bound
lb = prob.solve(method='sdp-relax', solver=cvx.MOSEK)
print ('Lower bound: %.3f' % lb)
# Upper bounds
ub_cd = prob.solve(method='coord-descent', solver=cvx.MOSEK, num_samples=10)
ub_admm = prob.solve(method='qcqp-admm', solver=cvx.MOSEK, num_samples=10)
ub_dccp = prob.solve(method='qcqp-dccp', solver=cvx.MOSEK, num_samples=10, tau=1)
print ('Lower bound: %.3f' % lb)
print ('Upper bounds:')
print (' Coordinate descent: %.3f' % ub_cd)
print (' Nonconvex ADMM: %.3f' % ub_admm)
print (' Convex-concave programming: %.3f' % ub_dccp)
|
Python
| 0
|
@@ -170,16 +170,151 @@
omplex%5En
+.%0A# Data vectors h_i and g_i are also in complex%5En.%0A# The script below expands out the complex part and%0A# works with real numbers only.
%0A%0Aimport
@@ -359,16 +359,39 @@
t qcqp%0A%0A
+# n, m, l: 100, 30, 10%0A
n = 10%0Am
@@ -397,9 +397,9 @@
m =
-8
+3
%0Al =
@@ -439,16 +439,17 @@
eed(1)%0AH
+R
= np.ra
@@ -468,10 +468,114 @@
, n)
+/np.sqrt(2);%0AHI = np.random.randn(m, n)/np.sqrt(2);%0AH1 = np.hstack((HR, HI))%0AH2 = np.hstack((-HI, HR))%0A
%0AG
+R
= n
@@ -594,16 +594,118 @@
dn(l, n)
+/np.sqrt(2);%0AGI = np.random.randn(l, n)/np.sqrt(2);%0AG1 = np.hstack((GR, GI))%0AG2 = np.hstack((-GI, GR))
%0A%0Aw = cv
@@ -715,16 +715,18 @@
ariable(
+2*
n)%0Aobj =
@@ -767,16 +767,21 @@
cons = %5B
+%0A
cvx.squa
@@ -784,16 +784,36 @@
square(H
+1*w) + cvx.square(H2
*w) %3E= t
@@ -815,16 +815,20 @@
%3E= tau,
+%0A
cvx.squ
@@ -832,16 +832,36 @@
square(G
+1*w) + cvx.square(G2
*w) %3C= e
@@ -862,16 +862,17 @@
) %3C= eta
+%0A
%5D%0Aprob =
@@ -1027,158 +1027,181 @@
nds%0A
-ub_cd = prob.solve(method='coord-descent', solver=cvx.MOSEK, num_samples=10)%0Aub_admm = prob.solve(method='qcqp-admm', solver=cvx.MOSEK, num_samples=10
+print ('Upper bounds:')%0Aub_admm = prob.solve(method='qcqp-admm', use_sdp=False, solver=cvx.MOSEK, num_samples=10, rho=np.sqrt(m+l))%0Aprint (' Nonconvex ADMM: %25.3f' %25 ub_admm
)%0Aub
@@ -1238,16 +1238,31 @@
p-dccp',
+ use_sdp=False,
solver=
@@ -1307,181 +1307,175 @@
t ('
-Lower bound: %25.3f' %25 lb)%0Aprint ('Upper bounds:')%0Aprint (' Coordinate descent: %25.3f' %25 ub_cd)%0Aprint (' Nonconvex ADMM: %25.3f' %25 ub_admm)%0Aprint (' Convex-concave programming
+ Convex-concave programming: %25.3f' %25 ub_dccp)%0Aub_cd = prob.solve(method='coord-descent', use_sdp=False, solver=cvx.MOSEK, num_samples=10)%0Aprint (' Coordinate descent
: %25.
@@ -1471,26 +1471,24 @@
descent: %25.3f' %25 ub_
+c
d
-ccp
)%0A
|
183e08be99fae2ba521c5fb60b7205d3c3c5b520
|
Add grappelli styles to make inlines collapsible
|
winthrop/books/admin.py
|
winthrop/books/admin.py
|
from django.contrib import admin
from winthrop.common.admin import NamedNotableAdmin
from .models import Subject, Language, Publisher, OwningInstitution, \
Book, Catalogue, BookSubject, BookLanguage, CreatorType, Creator, \
PersonBook, PersonBookRelationshipType
class NamedNotableBookCount(NamedNotableAdmin):
list_display = NamedNotableAdmin.list_display + ('book_count', )
class OwningInstitutionAdmin(admin.ModelAdmin):
list_display = ('short_name', 'name', 'place', 'has_notes', 'book_count')
fields = ('name', 'short_name', 'contact_info', 'place', 'notes')
search_fields = ('name', 'short_name', 'contact_info', 'notes')
class CatalogueInline(admin.TabularInline):
model = Catalogue
fields = ('institution', 'call_number', 'start_year', 'end_year',
'is_current', 'is_sammelband', 'bound_order', 'notes')
class SubjectInline(admin.TabularInline):
model = BookSubject
fields = ('subject', 'is_primary', 'notes')
class LanguageInline(admin.TabularInline):
model = BookLanguage
fields = ('language', 'is_primary', 'notes')
class CreatorInline(admin.TabularInline):
model = Creator
fields = ('creator_type', 'person', 'notes')
class BookAdmin(admin.ModelAdmin):
list_display = ('short_title', 'author_names', 'pub_year',
'catalogue_call_numbers', 'is_extant', 'is_annotated',
'is_digitized')
# NOTE: fields are specified here so that notes input will be displayed last
fields = ('title', 'short_title', 'original_pub_info', 'publisher',
'pub_place', 'pub_year', 'is_extant', 'is_annotated', 'is_digitized',
'red_catalog_number', 'ink_catalog_number', 'pencil_catalog_number',
'dimensions', 'notes')
inlines = [CreatorInline, LanguageInline, SubjectInline, CatalogueInline]
list_filter = ('subjects', 'languages')
admin.site.register(Subject, NamedNotableBookCount)
admin.site.register(Language, NamedNotableBookCount)
admin.site.register(Publisher, NamedNotableBookCount)
admin.site.register(OwningInstitution, OwningInstitutionAdmin)
admin.site.register(Book, BookAdmin)
admin.site.register(Catalogue)
admin.site.register(CreatorType)
# NOTE: these will probably be inlines, but register for testing for now
admin.site.register(Creator)
admin.site.register(PersonBook)
admin.site.register(PersonBookRelationshipType)
|
Python
| 0
|
@@ -648,24 +648,195 @@
'notes')%0A%0A%0A
+class CollapsibleTabularInline(admin.TabularInline):%0A 'Django admin tabular inline with grappelli collapsible classes added'%0A classes = ('grp-collapse grp-open',)%0A%0A%0A
class Catalo
@@ -837,38 +837,43 @@
CatalogueInline(
-admin.
+Collapsible
TabularInline):%0A
@@ -1028,16 +1028,17 @@
otes')%0A%0A
+%0A
class Su
@@ -1045,30 +1045,35 @@
bjectInline(
-admin.
+Collapsible
TabularInlin
@@ -1167,30 +1167,35 @@
guageInline(
-admin.
+Collapsible
TabularInlin
@@ -1286,38 +1286,43 @@
s CreatorInline(
-admin.
+Collapsible
TabularInline):%0A
@@ -1575,16 +1575,29 @@
gitized'
+, 'has_notes'
)%0A #
@@ -2318,39 +2318,8 @@
in)%0A
-admin.site.register(Catalogue)%0A
admi
@@ -2345,16 +2345,35 @@
atorType
+, NamedNotableAdmin
)%0A# NOTE
@@ -2443,37 +2443,8 @@
now%0A
-admin.site.register(Creator)%0A
admi
|
f0fab0c0add34e6b74583f524c86a3b42a7c7c54
|
remove 'fml'
|
plugins/games.py
|
plugins/games.py
|
"""
games.py: Create a bot that provides game functionality (dice, 8ball, etc).
"""
import random
import urllib.request
import urllib.error
from xml.etree import ElementTree
from pylinkirc import utils
from pylinkirc.log import log
mydesc = "The \x02Games\x02 plugin provides simple games for IRC."
gameclient = utils.registerService("Games", manipulatable=True, desc=mydesc)
reply = gameclient.reply # TODO find a better syntax for ServiceBot.reply()
error = gameclient.error # TODO find a better syntax for ServiceBot.error()
# commands
def dice(irc, source, args):
"""<num>d<sides>
Rolls a die with <sides> sides <num> times.
"""
if not args:
reply(irc, "No string given.")
return
try:
# Split num and sides and convert them to int.
num, sides = map(int, args[0].split('d', 1))
except ValueError:
# Invalid syntax. Show the command help.
gameclient.help(irc, source, ['dice'])
return
assert 1 < sides <= 100, "Invalid side count (must be 2-100)."
assert 1 <= num <= 100, "Cannot roll more than 100 dice at once."
results = []
for _ in range(num):
results.append(random.randint(1, sides))
# Convert results to strings, join them, format, and reply.
s = 'You rolled %s: %s (total: %s)' % (args[0], ' '.join([str(x) for x in results]), sum(results))
reply(irc, s)
gameclient.add_cmd(dice, 'd')
gameclient.add_cmd(dice, featured=True)
eightball_responses = ["It is certain.",
"It is decidedly so.",
"Without a doubt.",
"Yes, definitely.",
"You may rely on it.",
"As I see it, yes.",
"Most likely.",
"Outlook good.",
"Yes.",
"Signs point to yes.",
"Reply hazy, try again.",
"Ask again later.",
"Better not tell you now.",
"Cannot predict now.",
"Concentrate and ask again.",
"Don't count on it.",
"My reply is no.",
"My sources say no.",
"Outlook not so good.",
"Very doubtful."]
def eightball(irc, source, args):
"""[<question>]
Asks the Magic 8-ball a question.
"""
reply(irc, random.choice(eightball_responses))
gameclient.add_cmd(eightball, featured=True)
gameclient.add_cmd(eightball, '8ball')
gameclient.add_cmd(eightball, '8b')
def fml(irc, source, args):
"""[<id>]
Displays an entry from fmylife.com. If <id> is not given, fetch a random entry from the API."""
try:
query = args[0]
except IndexError:
# Get a random FML from the API.
query = 'random'
# TODO: configurable language?
url = ('http://api.betacie.com/view/%s/nocomment'
'?key=4be9c43fc03fe&language=en' % query)
try:
data = urllib.request.urlopen(url).read()
except urllib.error as e:
error(irc, '%s' % e)
return
tree = ElementTree.fromstring(data.decode('utf-8'))
tree = tree.find('items/item')
try:
category = tree.find('category').text
text = tree.find('text').text
fmlid = tree.attrib['id']
url = tree.find('short_url').text
except AttributeError as e:
log.debug("games.FML: Error fetching FML %s from URL %s: %s",
query, url, e)
error(irc, "That FML does not exist or there was an error "
"fetching data from the API.")
return
if not fmlid:
error(irc, "That FML does not exist.")
return
# TODO: customizable formatting
votes = "\x02[Agreed: %s / Deserved: %s]\x02" % \
(tree.find('agree').text, tree.find('deserved').text)
s = '\x02#%s [%s]\x02: %s - %s \x02<\x0311%s\x03>\x02' % \
(fmlid, category, text, votes, url)
reply(irc, s)
gameclient.add_cmd(fml, featured=True)
def die(irc=None):
utils.unregisterService('games')
|
Python
| 0
|
@@ -17,161 +17,63 @@
eate
+s
a bot
-that
provid
-es game functionality (dice, 8ball, etc).%0A%22%22%22%0Aimport random%0Aimport urllib.request%0Aimport urllib.error%0Afrom xml.etree import ElementTree
+ing a few simple games.%0A%22%22%22%0Aimport random
%0A%0Afr
@@ -2325,1481 +2325,8 @@
')%0A%0A
-def fml(irc, source, args):%0A %22%22%22%5B%3Cid%3E%5D%0A%0A Displays an entry from fmylife.com. If %3Cid%3E is not given, fetch a random entry from the API.%22%22%22%0A try:%0A query = args%5B0%5D%0A except IndexError:%0A # Get a random FML from the API.%0A query = 'random'%0A%0A # TODO: configurable language?%0A url = ('http://api.betacie.com/view/%25s/nocomment'%0A '?key=4be9c43fc03fe&language=en' %25 query)%0A try:%0A data = urllib.request.urlopen(url).read()%0A except urllib.error as e:%0A error(irc, '%25s' %25 e)%0A return%0A%0A tree = ElementTree.fromstring(data.decode('utf-8'))%0A tree = tree.find('items/item')%0A%0A try:%0A category = tree.find('category').text%0A text = tree.find('text').text%0A fmlid = tree.attrib%5B'id'%5D%0A url = tree.find('short_url').text%0A except AttributeError as e:%0A log.debug(%22games.FML: Error fetching FML %25s from URL %25s: %25s%22,%0A query, url, e)%0A error(irc, %22That FML does not exist or there was an error %22%0A %22fetching data from the API.%22)%0A return%0A%0A if not fmlid:%0A error(irc, %22That FML does not exist.%22)%0A return%0A%0A # TODO: customizable formatting%0A votes = %22%5Cx02%5BAgreed: %25s / Deserved: %25s%5D%5Cx02%22 %25 %5C%0A (tree.find('agree').text, tree.find('deserved').text)%0A s = '%5Cx02#%25s %5B%25s%5D%5Cx02: %25s - %25s %5Cx02%3C%5Cx0311%25s%5Cx03%3E%5Cx02' %25 %5C%0A (fmlid, category, text, votes, url)%0A reply(irc, s)%0Agameclient.add_cmd(fml, featured=True)%0A%0A
def
|
850cc5b6401fd4d72e69b1f52050bfeef28e2132
|
add room configuration, groupchat and some clean up, still needs documentation and a bit more stuff to add re #24
|
wokkel/test/test_muc.py
|
wokkel/test/test_muc.py
|
# Copyright (c) 2003-2008 Ralph Meijer
# See LICENSE for details.
"""
Tests for L{wokkel.muc}
"""
from zope.interface import verify
from twisted.trial import unittest
from twisted.internet import defer
from twisted.words.xish import domish, xpath
from twisted.words.protocols.jabber import error
from twisted.words.protocols.jabber.jid import JID
from wokkel import data_form, iwokkel, muc, shim, disco
from wokkel.generic import parseXml
from wokkel.test.helpers import XmlStreamStub
try:
from twisted.words.protocols.jabber.xmlstream import toResponse
except ImportError:
from wokkel.compat import toResponse
def calledAsync(fn):
"""
Function wrapper that fires a deferred upon calling the given function.
"""
d = defer.Deferred()
def func(*args, **kwargs):
try:
result = fn(*args, **kwargs)
except:
d.errback()
else:
d.callback(result)
return d, func
class MucClientTest(unittest.TestCase):
timeout = 2
def setUp(self):
self.stub = XmlStreamStub()
self.protocol = muc.MUCClient()
self.protocol.xmlstream = self.stub.xmlstream
self.protocol.connectionInitialized()
def test_interface(self):
"""
Do instances of L{muc.MUCClient} provide L{iwokkel.IMUCClient}?
"""
verify.verifyObject(iwokkel.IMUCClient, self.protocol)
def test_discoServerSupport(self):
"""Test for disco support from a server.
"""
test_srv = 'shakespeare.lit'
def cb(query):
# check namespace
self.failUnless(query.uri==disco.NS_INFO, 'Wrong namespace')
d = self.protocol.disco(test_srv)
d.addCallback(cb)
iq = self.stub.output[-1]
# send back a response
response = toResponse(iq, 'result')
response.addElement('query', disco.NS_INFO)
# need to add information to response
response.query.addChild(disco.DiscoFeature(muc.NS))
response.query.addChild(disco.DiscoIdentity(category='conference',
name='Macbeth Chat Service',
type='text'))
self.stub.send(response)
return d
def test_joinRoom(self):
"""Test joining a room
"""
test_room = 'test'
test_srv = 'conference.example.org'
test_nick = 'Nick'
def cb(room):
self.assertEquals(test_room, room.name)
d = self.protocol.join(test_srv, test_room, test_nick)
d.addCallback(cb)
prs = self.stub.output[-1]
self.failUnless(prs.name=='presence', "Need to be presence")
self.failUnless(getattr(prs, 'x', None), 'No muc x element')
# send back user presence, they joined
response = muc.UserPresence(frm=test_room+'@'+test_srv+'/'+test_nick)
self.stub.send(response)
return d
def test_joinRoomForbidden(self):
"""Test joining a room and getting an error
"""
test_room = 'test'
test_srv = 'conference.example.org'
test_nick = 'Nick'
# p = muc.BasicPresenc(to=)
def cb(error):
self.failUnless(isinstance(error.value,muc.PresenceError), 'Wrong type')
self.failUnless(error.value['type']=='error', 'Not an error returned')
d = self.protocol.join(test_srv, test_room, test_nick)
d.addBoth(cb)
prs = self.stub.output[-1]
self.failUnless(prs.name=='presence', "Need to be presence")
self.failUnless(getattr(prs, 'x', None), 'No muc x element')
# send back user presence, they joined
response = muc.PresenceError(error=muc.MUCError('auth',
'forbidden'
),
frm=test_room+'@'+test_srv+'/'+test_nick)
self.stub.send(response)
return d
|
Python
| 0
|
@@ -1397,16 +1397,883 @@
ocol)%0A%0A%0A
+ def test_presence(self):%0A %22%22%22Test receiving room presence%0A %22%22%22%0A p = muc.UserPresence()%0A%09%0A def userPresence(prs):%0A self.failUnless(len(prs.children)==1, 'Not enough children')%0A self.failUnless(getattr(prs,'x',None), 'No x element')%0A%09 %0A%09%0A d, self.protocol.receivedUserPresence = calledAsync(userPresence)%0A self.stub.send(p)%0A return d%0A%0A%0A def test_groupChat(self):%0A %22%22%22Test receiving room presence%0A %22%22%22%0A m = muc.GroupChat('test@test.com',body='test')%0A%09%0A def groupChat(elem):%0A self.failUnless(elem.name=='message','Wrong stanza')%0A self.failUnless(elem%5B'type'%5D == 'groupchat', 'Wrong attribute')%0A %09 %0A%09%0A d, self.protocol.receivedGroupChat = calledAsync(groupChat)%0A self.stub.send(m)%0A return d%0A%0A
%0A def
@@ -4090,45 +4090,8 @@
k'%0A%0A
- # p = muc.BasicPresenc(to=)%0A%0A
@@ -4913,32 +4913,32 @@
.send(response)%0A
-
return d
@@ -4938,16 +4938,207 @@
eturn d %0A
+%0A def test_roomConfigure(self):%0A%0A test_room = 'test'%0A test_srv = 'conference.example.org'%0A test_nick = 'Nick' %0A%0A self.fail('Not Implemented')%0A %0A%0A
|
46d58dc9ae6db2dc8566cbc7bc9cc1399283f90f
|
Fix OSError when roots file hash hasn't been built
|
salt/fileserver/roots.py
|
salt/fileserver/roots.py
|
'''
The default file server backend
Based on the environments in the :conf_master:`file_roots` configuration
option.
'''
# Import python libs
import os
# Import salt libs
import salt.fileserver
import salt.utils
def find_file(path, env='base', **kwargs):
'''
Search the environment for the relative path
'''
fnd = {'path': '',
'rel': ''}
if os.path.isabs(path):
return fnd
if env not in __opts__['file_roots']:
return fnd
if 'index' in kwargs:
try:
root = __opts__['file_roots'][env][int(kwargs['index'])]
except IndexError:
# An invalid index was passed
return fnd
except ValueError:
# An invalid index option was passed
return fnd
full = os.path.join(root, path)
if os.path.isfile(full) and not salt.fileserver.is_file_ignored(__opts__, full):
fnd['path'] = full
fnd['rel'] = path
return fnd
for root in __opts__['file_roots'][env]:
full = os.path.join(root, path)
if os.path.isfile(full) and not salt.fileserver.is_file_ignored(__opts__, full):
fnd['path'] = full
fnd['rel'] = path
return fnd
return fnd
def envs():
'''
Return the file server environments
'''
return __opts__['file_roots'].keys()
def serve_file(load, fnd):
'''
Return a chunk from a file based on the data received
'''
ret = {'data': '',
'dest': ''}
if 'path' not in load or 'loc' not in load or 'env' not in load:
return ret
if not fnd['path']:
return ret
ret['dest'] = fnd['rel']
gzip = load.get('gzip', None)
with salt.utils.fopen(fnd['path'], 'rb') as fp_:
fp_.seek(load['loc'])
data = fp_.read(__opts__['file_buffer_size'])
if gzip and data:
data = salt.utils.gzip_util.compress(data, gzip)
ret['gzip'] = gzip
ret['data'] = data
return ret
def update():
'''
When we are asked to update (regular interval) lets reap the cache
'''
salt.fileserver.reap_fileserver_cache_dir(os.path.join(__opts__['cachedir'], 'roots/hash'), find_file)
def file_hash(load, fnd):
'''
Return a file hash, the hash type is set in the master config file
'''
if 'path' not in load or 'env' not in load:
return ''
path = fnd['path']
ret = {}
# if the file doesn't exist, we can't get a hash
if not path or not os.path.isfile(path):
return ret
# set the hash_type as it is determined by config-- so mechanism won't change that
ret['hash_type'] = __opts__['hash_type']
# check if the hash is cached
# cache file's contents should be "hash:mtime"
cache_path = os.path.join(__opts__['cachedir'],
'roots/hash',
load['env'],
'{0}.hash.{1}'.format(fnd['rel'],
__opts__['hash_type']))
# if we have a cache, serve that if the mtime hasn't changed
if os.path.exists(cache_path):
with salt.utils.fopen(cache_path, 'rb') as fp_:
hsum, mtime = fp_.read().split(':')
if os.path.getmtime(path) == mtime:
# check if mtime changed
ret['hsum'] = hsum
return ret
# if we don't have a cache entry-- lets make one
ret['hsum'] = salt.utils.get_hash(path, __opts__['hash_type'])
cache_dir = os.path.dirname(cache_path)
# make cache directory if it doesn't exist
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
# save the cache object "hash:mtime"
with salt.utils.fopen(cache_path, 'w') as fp_:
fp_.write('{0}:{1}'.format(ret['hsum'], os.path.getmtime(path)))
return ret
def file_list(load):
'''
Return a list of all files on the file server in a specified
environment
'''
ret = []
if load['env'] not in __opts__['file_roots']:
return ret
for path in __opts__['file_roots'][load['env']]:
prefix = load['prefix'].strip('/')
for root, dirs, files in os.walk(os.path.join(path, prefix), followlinks=True):
for fname in files:
rel_fn = os.path.relpath(
os.path.join(root, fname),
path
)
if not salt.fileserver.is_file_ignored(__opts__, rel_fn):
ret.append(rel_fn)
return ret
def file_list_emptydirs(load):
'''
Return a list of all empty directories on the master
'''
ret = []
if load['env'] not in __opts__['file_roots']:
return ret
for path in __opts__['file_roots'][load['env']]:
prefix = load['prefix'].strip('/')
for root, dirs, files in os.walk(os.path.join(path, prefix), followlinks=True):
if len(dirs) == 0 and len(files) == 0:
rel_fn = os.path.relpath(root, path)
if not salt.fileserver.is_file_ignored(__opts__, rel_fn):
ret.append(rel_fn)
return ret
def dir_list(load):
'''
Return a list of all directories on the master
'''
ret = []
if load['env'] not in __opts__['file_roots']:
return ret
for path in __opts__['file_roots'][load['env']]:
prefix = load['prefix'].strip('/')
for root, dirs, files in os.walk(os.path.join(path, prefix), followlinks=True):
ret.append(os.path.relpath(root, path))
return ret
|
Python
| 0
|
@@ -2001,24 +2001,25 @@
return ret%0A%0A
+%0A
def update()
@@ -2103,24 +2103,37 @@
che%0A '''%0A
+ try:%0A
salt.fil
@@ -2166,16 +2166,29 @@
che_dir(
+%0A
os.path.
@@ -2232,19 +2232,143 @@
h'),
- find_file)
+%0A find_file%0A )%0A except os.error:%0A # Hash file won't exist if no files have yet been served up%0A pass%0A
%0A%0Ade
@@ -3992,16 +3992,17 @@
rn ret%0A%0A
+%0A
def file
|
4745f1a0dd24913869daf641fc5c920561279aef
|
fix Python tests compute_dt_matrices call to Interaction constructor
|
wrap/utils/functions.py
|
wrap/utils/functions.py
|
"""Set of functions used in python tests"""
import siconos.kernel as SK
import numpy as np
def compute_dt_matrices(A, B, h, TV=False):
# variable declaration
t0 = 0.0 # start time
T = 1 # end time
n, m = B.shape
# Matrix declaration
x0 = np.random.random(n)
Csurface = np.random.random((m, n))
# Declaration of the Dynamical System
if TV:
process_ds = SK.FirstOrderLinearDS(x0, A)
else:
process_ds = SK.FirstOrderLinearTIDS(x0, A)
# Model
process = SK.Model(t0, T)
process.nonSmoothDynamicalSystem().insertDynamicalSystem(process_ds)
# time discretisation
process_time_discretisation = SK.TimeDiscretisation(t0, h)
# Creation of the Simulation
process_simu = SK.TimeStepping(process_time_discretisation, 0)
process_simu.setName("plant simulation")
# Declaration of the integrator
process_integrator = SK.ZeroOrderHoldOSI()
process_simu.insertIntegrator(process_integrator)
rel = SK.FirstOrderLinearTIR(Csurface, B)
nslaw = SK.RelayNSL(m)
inter = SK.Interaction(m, nslaw, rel)
#process.nonSmoothDynamicalSystem().insertInteraction(inter, True)
process.nonSmoothDynamicalSystem().link(inter, process_ds)
process.nonSmoothDynamicalSystem().setControlProperty(inter, True)
# Initialization
process.setSimulation(process_simu)
process.initialize()
# Main loop
process_simu.computeOneStep()
Ad = SK.getMatrix(process_integrator.Ad(process_ds)).copy()
Bd = SK.getMatrix(process_integrator.Bd(process_ds)).copy()
return (Ad, Bd)
def pole_placement(A, B, P):
""" Compute the column vector K such that the eigenvalues of A - B*F are
the ones given by the vector P
A is an nxn matrix, B and P are vectors
Please note that if you want to specify complex eigenvalues, P is a matrix
with 2 rows: the first for the real part of the eigenvalues and the second
for the imaginary part.
To use this function, you need a python interface to the SEVAS fortran
routine, also known as algorithm 718 in TOMS. You can freely obtain the
code at http://calgo.acm.org/718.gz or http://www.netlib.org/toms/718.
Then extract the file DSEVAS.F from the main file.
Please take a look at the license of all the algorithm on ACM ToMS
http://toms.acm.org/AlgPolicy.html
To generate a python interface, use f2py from numpy. A quick and dirty way
to do it is to use the following command:
f2py -c DSEVAS.F -m DSEVAS -l<your blas lib> --noopt
replace <your blas lib> with your favorite blas library. Please note that
there are some glitches with this method and then the optimisations have
to be disabled (--noopt switch).
"""
import DSEVAS
n = A.shape[0]
k = np.int(np.ceil((n**2 - 2*n + 1.0)/4))+1
w = np.int(np.ceil((n**2 + 3*n - 4.0)/2))+1
AA = np.array(A, order='F', dtype='f8')
BB = np.array(B, order='F', dtype='f8')
ieigal = np.zeros((1), dtype=int)
rstor = np.zeros((4, k), dtype='f8', order='F')
istor = np.zeros((k), dtype=int)
cstor = np.zeros((w), dtype='f8')
if P.ndim == 1:
PP = np.zeros((2, n), dtype='f8', order='F')
PP[0, :] = P
else:
PP = np.array(P, dtype='f8', order='F')
K = np.zeros((n), dtype='f8', order='F')
DSEVAS.dsevas(AA, BB, PP, rstor, istor, cstor, 0, 0, ieigal, K, n, n)
errPoles = np.linalg.norm(P-np.linalg.eig(A - B*K)[0], ord=np.inf)
if errPoles > 1e-10:
print("Error, the poles are not placed correctly")
print("The error is: " + str(errPoles))
print("Desired poles: " + str(P))
print("Obtained poles: " + str(np.linalg.eig(A - B*K)[0]))
return (K, errPoles)
|
Python
| 0.000004
|
@@ -1074,19 +1074,16 @@
raction(
-m,
nslaw, r
|
4b0d846fc782ad80399adcbeb6bbea725e0b68f5
|
Fix find and read up
|
salt/wheel/file_roots.py
|
salt/wheel/file_roots.py
|
'''
Read in files from the file_root and save files to the file root
'''
# Import python libs
import os
# Import salt libs
import salt.utils
def find(path, env='base'):
'''
Return a dict of the files located with the given path and environment
'''
# Return a list of paths + text or bin
ret = []
if env not in __opts__['file_roots']:
return ret
for root in __opts__['file_roots'][env]:
full = os.path.join(root, path)
if os.path.isfile(full):
# Add it to the dict
with open(path, 'rb') as fp_:
if salt.utils.istextfile(fp_):
ret.append({full: 'txt'})
else:
ret.append({full: 'bin'})
return ret
def list_env(env='base'):
'''
Return all of the file paths found in an environment
'''
ret = {}
if not env in __opts__['file_roots']:
return ret
for f_root in __opts__['file_roots'][env]:
ret[f_root] = {}
for root, dirs, files in os.walk(f_root):
sub = ret[f_root]
if root != f_root:
# grab subroot ref
sroot = root
above = []
# Populate the above dict
while not os.path.samefile(sroot, f_root):
base = os.path.basename(sroot)
if base:
above.insert(0, base)
sroot = os.path.dirname(sroot)
for aroot in above:
sub = sub[aroot]
for dir_ in dirs:
sub[dir_] = {}
for fn_ in files:
sub[fn_] = 'f'
return ret
def list_roots():
'''
Return all of the files names in all available environments
'''
ret = {}
for env in __opts__['file_roots']:
ret[env] = []
ret[env].append(list_env(env))
return ret
def read(path, env='base'):
'''
Read the contents of a text file, if the file is binary then
'''
# Return a dict of paths + content
ret = []
files = find(path, env)
for fn_ in files:
if fn_ == 'txt':
with open(fn_, 'rb') as fp_:
ret.append({fn_: fp_.read()})
return ret
def write(data, path, env='base', index=0):
'''
Write the named file, by default the first file found is written, but the
index of the file can be specified to write to a lower priority file root
'''
if not env in __opts__['file_roots']:
return 'Named environment {0} is not present'.format(env)
if not len(__opts__['file_roots'][env]) > index:
return 'Specified index {0} in environment {1} is not present'.format(
index, env)
if os.path.isabs(path):
return ('The path passed in {0} is not relative to the environment '
'{1}').format(path, env)
dest = os.path.join(__opts__['file_roots'][env][index], path)
dest_dir = os.path.dirname(dest)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
with open(dest, 'w+') as fp_:
fp_.write(data)
return 'Wrote data to file {0}'.format(dest)
|
Python
| 0
|
@@ -547,20 +547,20 @@
th open(
-path
+full
, 'rb')
@@ -2134,14 +2134,69 @@
-if fn_
+full = fn_.keys()%5B0%5D%0A form = fn_%5Bfull%5D%0A if form
==
@@ -2225,18 +2225,19 @@
h open(f
-n_
+ull
, 'rb')
@@ -2273,18 +2273,19 @@
ppend(%7Bf
-n_
+ull
: fp_.re
|
a40c80eea715626616ef280b87de6bbcc7b73b7f
|
use relative imports
|
transport_information/model/__init__.py
|
transport_information/model/__init__.py
|
# -*- coding: utf-8 -*-
import transport_mode
import transport_vehicle
|
Python
| 0.000001
|
@@ -17,16 +17,23 @@
f-8 -*-%0A
+from .
import t
@@ -46,16 +46,23 @@
rt_mode%0A
+from .
import t
|
08cf82852ab19417f9521af45f2fb296d9e223d6
|
Update batch processing example
|
batch_eg.py
|
batch_eg.py
|
"""Example of using nbparameterise API to substitute variables in 'batch mode'
"""
from nbparameterise import code
from IPython.nbformat import current as nbformat
from IPython.nbconvert.preprocessors.execute import ExecutePreprocessor
from IPython.nbconvert.exporters.notebook import NotebookExporter
from IPython.nbconvert.writers import FilesWriter
stock_names = ['YHOO', 'MSFT', 'GOOG']
with open("Stock display.ipynb") as f:
nb = nbformat.read(f, 'ipynb')
definitions = code.extract_parameters(nb)
for name in stock_names:
print("Rendering for stock", name)
defined = []
for inp in definitions:
if inp.name =='stock':
# Fill in the current value
defined.append(inp.with_value(name))
else:
defined.append(inp)
code.replace_definitions(nb, defined)
# Run
resources = {}
nb, resources = ExecutePreprocessor().preprocess(nb, resources)
# Save
output, resources = NotebookExporter().from_notebook_node(nb, resources)
nbname = "Stock display %s" % name
FilesWriter().write(output, resources, notebook_name=nbname)
|
Python
| 0
|
@@ -113,47 +113,14 @@
ode%0A
-from IPython.nbformat import current as
+import
nbf
@@ -126,32 +126,24 @@
format%0Afrom
-IPython.
nbconvert.pr
@@ -190,32 +190,24 @@
cessor%0Afrom
-IPython.
nbconvert.ex
@@ -256,16 +256,8 @@
rom
-IPython.
nbco
@@ -399,15 +399,20 @@
(f,
-'ipynb'
+as_version=4
)%0A%0Ad
@@ -1062,12 +1062,13 @@
name=nbname)
+%0A
|
97939c334543d9ca4d717a7bc75ae30e848c8a09
|
Replace native.git_repository with skylark rule
|
bazlets.bzl
|
bazlets.bzl
|
NAME = "com_googlesource_gerrit_bazlets"
def load_bazlets(
commit,
local_path = None):
if not local_path:
native.git_repository(
name = NAME,
remote = "https://gerrit.googlesource.com/bazlets",
commit = commit,
)
else:
native.local_repository(
name = NAME,
path = local_path,
)
|
Python
| 0.000381
|
@@ -1,12 +1,83 @@
+load(%22@bazel_tools//tools/build_defs/repo:git.bzl%22, %22git_repository%22)%0A%0A
NAME = %22com_
@@ -199,23 +199,16 @@
-native.
git_repo
|
36cc738308b8ae4435d6becac38fa3c4e96dc491
|
Remove useless code
|
patchboard/patchboard.py
|
patchboard/patchboard.py
|
# patchboard.py
#
# Copyright 2014 BitVault.
from __future__ import print_function
import json
from api import API
from schema_manager import SchemaManager
from client import Client
from util import to_camel_case
def discover(url):
"""
Retrieve the API definition from the given URL and construct
a Patchboard to interface with it.
"""
# Retrieve JSON data from server
# Treat url like a file and read mock JSON for now
with open(url, u"r") as file:
api_spec = json.load(file)
return Patchboard(api_spec)
class Patchboard(object):
"""
The primary client interface to a patchboard server.
"""
def __init__(self, api_spec):
self.api = API(api_spec)
self.schema_manager = SchemaManager(self.api.schemas)
self.endpoint_classes = self.create_endpoint_classes()
client = self.spawn()
# Appears to be unused
#self.resources = client.resources
self.context = client.context
def create_endpoint_classes(self):
classes = {}
for resource_name, mapping in self.api.mappings.iteritems():
if resource_name not in classes:
schema = self.schema_manager.find_name(resource_name)
resource_def = mapping.resource
cls = self.create_class(
resource_name,
resource_def,
schema,
mapping)
classes[resource_name] = cls
return classes
def create_class(self, resource_name, definition, schema, mapping):
# Cannot use unicode for class names
class_name = to_camel_case(str(resource_name))
class_parents = (object,)
# TODO: fill in stub class definition
class_body = """
def __init__(self):
pass
"""
class_dict = {}
exec(class_body, globals(), class_dict)
cls = type(class_name, class_parents, class_dict)
return cls
def spawn(self, context={}):
return Client(context, self.api, self.endpoint_classes)
|
Python
| 0.001027
|
@@ -93,16 +93,46 @@
t json%0A%0A
+from resource import Resource%0A
from api
@@ -1745,200 +1745,42 @@
= (
-object,)%0A # TODO: fill in stub class definition%0A class_body = %22%22%22%0Adef __init__(self):%0A pass%0A %22%22%22%0A class_dict = %7B%7D%0A exec(class_body, globals(), class_dict)
+Resource,)%0A class_dict = %7B%7D
%0A
|
382a715ec78d9bcc53e949e9536bdb1077d3ed98
|
Update docstring
|
pathvalidate/__init__.py
|
pathvalidate/__init__.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
import re
import dataproperty
__INVALID_PATH_CHARS = '\:*?"<>|'
def validate_filename(filename):
"""
:param str filename: Filename to validate.
:raises ValueError:
If ``filename`` is empty or include invalid char
(``\``, ``:``, ``*``, ``?``, ``"``, ``<``, ``>``, ``|``).
"""
if dataproperty.is_empty_string(filename):
raise ValueError("null path")
match = re.search("[%s]" % (
re.escape(__INVALID_PATH_CHARS)), filename)
if match is not None:
raise ValueError(
"invalid char found in the file path: '%s'" % (
re.escape(match.group())))
def sanitize_filename(filename, replacement_text=""):
filename = filename.strip()
re_replace = re.compile("[%s]" % re.escape(__INVALID_PATH_CHARS))
return re_replace.sub(replacement_text, filename)
def replace_symbol(filename, replacement_text=""):
fname = sanitize_filename(filename, replacement_text)
if fname is None:
return None
re_replace = re.compile("[%s]" % re.escape(" ,.%()/"))
return re_replace.sub(replacement_text, fname)
|
Python
| 0.000001
|
@@ -269,16 +269,20 @@
If
+the
%60%60filena
@@ -305,16 +305,17 @@
include
+s
invalid
@@ -319,16 +319,20 @@
lid char
+(s):
%0A
@@ -777,24 +777,276 @@
t_text=%22%22):%0A
+ %22%22%22%0A Replace invalid chars within the %60%60filename%60%60 with%0A the %60%60replacement_text%60%60.%0A%0A :param str filename: Filename to validate.%0A :param str replacement_text: Replacement text.%0A :return: A replacement string.%0A :rtype: str%0A %22%22%22%0A%0A
filename
|
8065d5677e63d17a231e362115738bc278f00a77
|
Add new tests cases for signature and orders parsing.
|
payu/tests/test_forms.py
|
payu/tests/test_forms.py
|
import pytest
from payu.forms import PayULiveUpdateForm, OrdersField
@pytest.mark.parametrize("payload,signature", [
({
'ORDER_REF': 112457,
'ORDER_DATE': '2012-05-01 15:51:35',
'ORDER': [
{
'PNAME': 'MacBook Air 13 inch',
'PCODE': 'MBA13',
'PINFO': 'Extended Warranty - 5 Years',
'PRICE': 1750,
'PRICE_TYPE': 'GROSS',
'QTY': 1,
'VAT': 24
},
],
'BILL_FNAME': 'Joe',
'BILL_LNAME': 'Doe',
'BILL_COUNTRYCODE': 'RO',
'BILL_PHONE': '+040000000000',
'BILL_EMAIL': 'joe.doe@gmail.com',
'BILL_COMPANY': 'ACME Inc',
'BILL_FISCALCODE': None,
'PRICES_CURRENCY': 'RON',
'CURRENCY': 'RON',
'PAY_METHOD': 'CCVISAMC'
}, 'c6e9b0135191e9103beaf1e0f5ab6096')
])
def test_calculate_correct_hash(payload, signature):
payu_form = PayULiveUpdateForm(initial=payload)
assert payu_form.signature == signature
assert payu_form.fields['ORDER_HASH'].initial == signature
@pytest.mark.parametrize("payload,orders", [
({
'ORDER_REF': 112457,
'ORDER_DATE': '2012-05-01 15:51:35',
'ORDER': [
{
'PNAME': 'MacBook Air 13 inch',
'PCODE': 'MBA13',
'PINFO': 'Extended Warranty - 5 Years',
'PRICE': 1750,
'PRICE_TYPE': 'GROSS',
'QTY': 1,
'VAT': 24
},
],
'BILL_FNAME': 'Joe',
'BILL_LNAME': 'Doe',
'BILL_COUNTRYCODE': 'RO',
'BILL_PHONE': '+040000000000',
'BILL_EMAIL': 'joe.doe@gmail.com',
'BILL_COMPANY': 'ACME Inc',
'BILL_FISCALCODE': None,
'PRICES_CURRENCY': 'RON',
'CURRENCY': 'RON',
'PAY_METHOD': 'CCVISAMC'
}, [{
'VER': None,
'PRICE_TYPE': 'GROSS',
'PRICE': 1750,
'QTY': 1,
'PINFO': 'Extended Warranty - 5 Years',
'PCODE': 'MBA13',
'PNAME': 'MacBook Air 13 inch',
'PGROUP': None,
'VAT': 24
}])
])
def test_orders_parsing(payload, orders):
payu_form = PayULiveUpdateForm(initial=payload)
assert payu_form._prepare_orders(payload['ORDER']) == orders
|
Python
| 0
|
@@ -896,270 +896,9 @@
96')
-%0A%5D)%0Adef test_calculate_correct_hash(payload, signature):%0A payu_form = PayULiveUpdateForm(initial=payload)%0A assert payu_form.signature == signature%0A assert payu_form.fields%5B'ORDER_HASH'%5D.initial == signature%0A%0A%0A@pytest.mark.parametrize(%22payload,orders%22, %5B
+,
%0A
@@ -1120,35 +1120,8 @@
': '
-Extended Warranty - 5 Years
',%0A
@@ -1584,181 +1584,1321 @@
'
-PAY_METHOD': 'CCVISAMC'%0A %7D, %5B%7B%0A 'VER': None,%0A 'PRICE_TYPE': 'GROSS',%0A 'PRICE': 1750,%0A 'QTY': 1,%0A 'PINFO': 'Extended Warranty - 5 Years'
+LANGUAGE': 'RO',%0A 'TEST': True,%0A 'PAY_METHOD': 'CCVISAMC'%0A %7D, '8d6acdf75aa76eb5da0fe6fdefd04723')%0A%5D)%0Adef test_calculate_correct_hash(payload, signature):%0A payu_form = PayULiveUpdateForm(initial=payload)%0A assert payu_form.signature == signature%0A assert payu_form.fields%5B'ORDER_HASH'%5D.initial == signature%0A%0A%0A@pytest.mark.parametrize(%22payload,orders%22, %5B%0A (%7B%0A 'ORDER': %5B%0A %7B%0A 'PNAME': 'MacBook Air 13 inch',%0A 'PCODE': 'MBA13',%0A 'PINFO': 'Extended Warranty - 5 Years',%0A 'PRICE': 1750,%0A 'PRICE_TYPE': 'GROSS',%0A 'QTY': 1,%0A 'VAT': 24%0A %7D,%0A %5D,%0A %7D, %5B%7B%0A 'VER': None,%0A 'PRICE_TYPE': 'GROSS',%0A 'PRICE': 1750,%0A 'QTY': 1,%0A 'PINFO': 'Extended Warranty - 5 Years',%0A 'PCODE': 'MBA13',%0A 'PNAME': 'MacBook Air 13 inch',%0A 'PGROUP': None,%0A 'VAT': 24%0A %7D%5D),%0A (%7B%0A 'ORDER': %5B%0A %7B%0A 'PNAME': 'MacBook Air 13 inch',%0A 'PCODE': 'MBA13',%0A 'PRICE': 1750,%0A 'PRICE_TYPE': 'GROSS',%0A %7D,%0A %5D,%0A %7D, %5B%7B%0A 'VER': None,%0A 'PRICE_TYPE': 'GROSS',%0A 'PRICE': 1750,%0A 'QTY': 1,%0A 'PINFO': None
,%0A
|
ace0fa76496fe59f38debf858d9ad24ace69882a
|
remove the expm_multiply from linalg because I am too noob at python to figure out the cyclic import issues
|
scipy/linalg/__init__.py
|
scipy/linalg/__init__.py
|
"""
====================================
Linear algebra (:mod:`scipy.linalg`)
====================================
.. currentmodule:: scipy.linalg
Linear algebra functions.
.. seealso::
`numpy.linalg` for more linear algebra functions. Note that
although `scipy.linalg` imports most of them, identically named
functions from `scipy.linalg` may offer more or slightly differing
functionality.
Basics
======
.. autosummary::
:toctree: generated/
inv - Find the inverse of a square matrix
solve - Solve a linear system of equations
solve_banded - Solve a banded linear system
solveh_banded - Solve a Hermitian or symmetric banded system
solve_triangular - Solve a triangular matrix
det - Find the determinant of a square matrix
norm - Matrix and vector norm
lstsq - Solve a linear least-squares problem
pinv - Pseudo-inverse (Moore-Penrose) using lstsq
pinv2 - Pseudo-inverse using svd
pinvh - Pseudo-inverse of hermitian matrix
kron - Kronecker product of two arrays
tril - Construct a lower-triangular matrix from a given matrix
triu - Construct an upper-triangular matrix from a given matrix
Eigenvalue Problems
===================
.. autosummary::
:toctree: generated/
eig - Find the eigenvalues and eigenvectors of a square matrix
eigvals - Find just the eigenvalues of a square matrix
eigh - Find the e-vals and e-vectors of a Hermitian or symmetric matrix
eigvalsh - Find just the eigenvalues of a Hermitian or symmetric matrix
eig_banded - Find the eigenvalues and eigenvectors of a banded matrix
eigvals_banded - Find just the eigenvalues of a banded matrix
Decompositions
==============
.. autosummary::
:toctree: generated/
lu - LU decomposition of a matrix
lu_factor - LU decomposition returning unordered matrix and pivots
lu_solve - Solve Ax=b using back substitution with output of lu_factor
svd - Singular value decomposition of a matrix
svdvals - Singular values of a matrix
diagsvd - Construct matrix of singular values from output of svd
orth - Construct orthonormal basis for the range of A using svd
cholesky - Cholesky decomposition of a matrix
cholesky_banded - Cholesky decomp. of a sym. or Hermitian banded matrix
cho_factor - Cholesky decomposition for use in solving a linear system
cho_solve - Solve previously factored linear system
cho_solve_banded - Solve previously factored banded linear system
qr - QR decomposition of a matrix
qr_multiply - QR decomposition and multiplication by Q
qz - QZ decomposition of a pair of matrices
schur - Schur decomposition of a matrix
rsf2csf - Real to complex Schur form
hessenberg - Hessenberg form of a matrix
Matrix Functions
================
.. autosummary::
:toctree: generated/
expm - Matrix exponential using Pade approximation
expm2 - Matrix exponential using eigenvalue decomposition
expm3 - Matrix exponential using Taylor-series expansion
logm - Matrix logarithm
cosm - Matrix cosine
sinm - Matrix sine
tanm - Matrix tangent
coshm - Matrix hyperbolic cosine
sinhm - Matrix hyperbolic sine
tanhm - Matrix hyperbolic tangent
signm - Matrix sign
sqrtm - Matrix square root
funm - Evaluating an arbitrary matrix function
expm_frechet - Frechet derivative of the matrix exponential
expm_multiply - Product of a matrix exponential and a matrix
Matrix Equation Solvers
=======================
.. autosummary::
:toctree: generated/
solve_sylvester - Solve the Sylvester matrix equation
solve_continuous_are - Solve the continuous-time algebraic Riccati equation
solve_discrete_are - Solve the discrete-time algebraic Riccati equation
solve_discrete_lyapunov - Solve the discrete-time Lyapunov equation
solve_lyapunov - Solve the (continous-time) Lyapunov equation
Special Matrices
================
.. autosummary::
:toctree: generated/
block_diag - Construct a block diagonal matrix from submatrices
circulant - Circulant matrix
companion - Companion matrix
hadamard - Hadamard matrix of order 2**n
hankel - Hankel matrix
hilbert - Hilbert matrix
invhilbert - Inverse Hilbert matrix
leslie - Leslie matrix
pascal - Pascal matrix
toeplitz - Toeplitz matrix
tri - Construct a matrix filled with ones at and below a given diagonal
Low-level routines
==================
.. autosummary::
:toctree: generated/
get_blas_funcs
get_lapack_funcs
find_best_blas_type
scipy.linalg.blas
scipy.linalg.lapack
"""
from __future__ import division, print_function, absolute_import
from .linalg_version import linalg_version as __version__
from .misc import *
from .basic import *
from .decomp import *
from .decomp_lu import *
from .decomp_cholesky import *
from .decomp_qr import *
from ._decomp_qz import *
from .decomp_svd import *
from .decomp_schur import *
from .matfuncs import *
from .blas import *
from .lapack import *
from .special_matrices import *
from ._solvers import *
from scipy.sparse.linalg import expm_multiply
__all__ = [s for s in dir() if not s.startswith('_')]
from numpy.dual import register_func
for k in ['norm', 'inv', 'svd', 'solve', 'det', 'eig', 'eigh', 'eigvals',
'eigvalsh', 'lstsq', 'cholesky']:
try:
register_func(k, eval(k))
except ValueError:
pass
try:
register_func('pinv', pinv2)
except ValueError:
pass
del k, register_func
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
Python
| 0
|
@@ -3347,72 +3347,8 @@
ial%0A
- expm_multiply - Product of a matrix exponential and a matrix%0A
%0A%0AMa
@@ -4954,54 +4954,8 @@
rt *
-%0Afrom scipy.sparse.linalg import expm_multiply
%0A%0A__
|
3064bdeaf75a0bec4644340fe06a7bac07bb2e7c
|
Fix pydocstyle
|
f8a_jobs/handlers/invoke_graph_sync.py
|
f8a_jobs/handlers/invoke_graph_sync.py
|
"""Sync all pending packages to Graph DB."""
from os import environ
import urllib
import concurrent.futures
import requests
from .base import BaseHandler
class InvokeGraphSync(BaseHandler):
"""Sync all finished analyses to Graph DB."""
_SERVICE_HOST = environ.get("BAYESIAN_DATA_IMPORTER_SERVICE_HOST", "bayesian-data-importer")
_SERVICE_PORT = environ.get("BAYESIAN_DATA_IMPORTER_SERVICE_PORT", "9192")
_PENDING_API_ENDPOINT = "api/v1/pending"
_SYNC_ALL_ENDPOINT = "api/v1/sync_all"
_INGEST_SERVICE_ENDPOINT = "api/v1/ingest_to_graph"
_PENDING_API_URL = "http://{host}:{port}/{endpoint}".format(
host=_SERVICE_HOST, port=_SERVICE_PORT, endpoint=_PENDING_API_ENDPOINT)
_SYNC_ALL_API_URL = "http://{host}:{port}/{endpoint}".format(
host=_SERVICE_HOST, port=_SERVICE_PORT, endpoint=_SYNC_ALL_ENDPOINT)
_INGEST_API_URL = "http://{host}:{port}/{endpoint}".format(
host=_SERVICE_HOST, port=_SERVICE_PORT, endpoint=_INGEST_SERVICE_ENDPOINT)
BATCH_SIZE = 10
def _fetch_all_counts(self, params={}):
url = "%s?%s" % (self._PENDING_API_URL, urllib.parse.urlencode(params))
self.log.info(url)
response = requests.get(url)
self.log.info(response)
data = response.json()
all_counts = data["all_counts"]
self.log.info(data)
return all_counts
def _fetch_package_versions(self, params={}, offset=None, limit=None):
request_params = params.copy()
request_params.update({"offset": offset, "limit": limit})
url = "%s?%s" % (self._PENDING_API_URL, urllib.parse.urlencode(request_params))
self.log.info(url)
response = requests.get(url)
self.log.info(response)
data = response.json()
return data["pending_list"]
def _perform_sync(self, packages_list):
self.log.info("Invoke graph importer at url: '%s' for %s", self._INGEST_API_URL,
packages_list)
response = requests.post(self._INGEST_API_URL, json=packages_list)
if response.status_code != 200:
raise RuntimeError("Failed to invoke graph import at '%s' for %s" % (
self._INGEST_API_URL, packages_list))
self.log.info("Graph import succeeded with response: %s", response.text)
def execute(self, **kwargs):
"""Start the synchronization of all finished analyses to Graph database."""
# fetch count of pending list
#
# fetch windowed pending list from backend
# start with offset at 0, batch_size = BATCH_SIZE
# while there are more records
# batch = fetch current batch
# schedule graph sync for current batch
# increase offset by query_slice
# end
self.log.info(kwargs)
future_to_params_map = {}
with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor:
all_counts = self._fetch_all_counts(params=kwargs)
self.log.info(all_counts)
offset = 0
while all_counts > offset:
# for each batch of packages, send request to backend for ingesting this batch
# package_list = [
# {'ecosystem': 'npm', 'name': 'serve-static', 'version': '1.1.7'},...
# ]
packages_list = self._fetch_package_versions(params=kwargs, offset=offset,
limit=self.BATCH_SIZE)
executor.submit(self._perform_sync, packages_list)
offset += self.BATCH_SIZE
for future in concurrent.futures.as_completed(future_to_params_map):
request_data = future_to_params_map[future]
try:
response_data = future.result()
except Exception as exc:
print("FAILURE: %s" % response_data)
print('%r generated an exception: %s' % (response_data, exc))
else:
print("SUCCESS: %s" % request_data)
|
Python
| 0.000488
|
@@ -2414,25 +2414,24 @@
atabase.%22%22%22%0A
-%0A
# fe
|
58d701a99eae46464c280478a7949f964e05f3d2
|
Load testing database URI from environmental variable if set
|
portal/config.py
|
portal/config.py
|
"""Configuration"""
import os
from flask.ext.script import Server
from flask import Config
class BaseConfig(object):
"""Base configuration - override in subclasses"""
ANONYMOUS_USER_ACCOUNT = True
CELERY_BROKER_URL = 'redis://localhost:6379/0'
CELERY_IMPORTS = ('portal.tasks', )
CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
DEBUG = False
DEFAULT_MAIL_SENDER = 'dontreply@truenth-demo.cirg.washington.edu'
LOG_FOLDER = os.path.join('/var/log', __package__)
LOG_LEVEL = 'DEBUG'
MAIL_USERNAME = 'portal@truenth-demo.cirg.washington.edu'
MAIL_DEFAULT_SENDER = '"TrueNTH" <noreply@truenth-demo.cirg.washington.edu'
CONTACT_SENDTO_EMAIL = MAIL_USERNAME
ERROR_SENDTO_EMAIL = MAIL_USERNAME
OAUTH2_PROVIDER_TOKEN_EXPIRES_IN = 4 * 60 * 60 # units: seconds
PIWIK_DOMAINS = ""
PIWIK_SITEID = 0
PROJECT = "portal"
PROJECT_ROOT =\
os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SQLALCHEMY_TRACK_MODIFICATIONS = False
SECRET_KEY = 'override this secret key'
TESTING = False
USER_APP_NAME = 'TrueNTH' # used by email templates
USER_AFTER_LOGIN_ENDPOINT = 'auth.next_after_login'
USER_AFTER_CONFIRM_ENDPOINT = USER_AFTER_LOGIN_ENDPOINT
class DefaultConfig(BaseConfig):
"""Default configuration"""
DEBUG = True
SQLALCHEMY_ECHO = False
class TestConfig(BaseConfig):
"""Testing configuration - used by unit tests"""
TESTING = True
SERVER_NAME = 'localhost'
SQLALCHEMY_ECHO = False
SQLALCHEMY_DATABASE_URI =\
'postgresql://test_user:4tests_only@localhost/portal_unit_tests'
WTF_CSRF_ENABLED = False
def early_app_config_access():
"""Workaround to bootstrap configuration problems
Some extensions require config values before the flask app can be
initialized. Expose the same configuration used by the app by
direct access.
Avoid use of this approach, as the app has its own config with a
chain of overwrites. i.e. use app.config whenever possible.
"""
root_path = os.path.join(os.path.dirname(__file__), "..")
_app_config = Config(root_path=root_path)
_app_config.from_pyfile(os.path.join(\
os.path.dirname(__file__), 'application.cfg'))
return _app_config
class ConfigServer(Server): # pragma: no cover
"""Correctly read Flask configuration values when running Flask
Flask-Script 2.0.5 does not read host and port specified in
SERVER_NAME. This subclass fixes that.
Bug: https://github.com/smurfix/flask-script/blob/7dfaf2898d648761632dc5b3ba6654edff67ec57/flask_script/commands.py#L343
Values passed in when instance is called as a function override
those passed during initialization which override configured values
See https://github.com/smurfix/flask-script/issues/108
"""
def __init__(self, port=None, host=None, **kwargs):
"""Override default port and host
Allow fallback to configured values
"""
super(ConfigServer, self).__init__(port=port, host=host, **kwargs)
def __call__(self, app=None, host=None, port=None, *args, **kwargs):
"""Call app.run() with highest precedent configuration values"""
# Fallback to initialized value if None is passed
port = self.port if port is None else port
host = self.host if host is None else host
super(ConfigServer, self).__call__(app=app, host=host,
port=port, *args, **kwargs)
|
Python
| 0
|
@@ -1548,17 +1548,67 @@
SE_URI =
-%5C
+ os.environ.get(%0A 'SQLALCHEMY_DATABASE_URI',
%0A
@@ -1673,16 +1673,23 @@
_tests'%0A
+ )%0A%0A
WTF_
|
798639f4d22bec341667a4067db7a18095d36beb
|
Add missing doc string values.
|
flask_jsondash/data_utils/wordcloud.py
|
flask_jsondash/data_utils/wordcloud.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
flask_jsondash.data_utils.wordcloud
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Utilities for working with wordcloud formatted data.
:copyright: (c) 2016 by Chris Tabor.
:license: MIT, see LICENSE for more details.
"""
from collections import Counter
# Py2/3 compat.
try:
_unicode = unicode
except NameError:
_unicode = str
# NLTK stopwords
stopwords = [
'i', 'me', 'my', 'myself', 'we', 'our', 'ours', 'ourselves', 'you', 'your',
'yours', 'yourself', 'yourselves', 'he', 'him', 'his', 'himself', 'she',
'her', 'hers', 'herself', 'it', 'its', 'itself', 'they', 'them', 'their',
'theirs', 'themselves', 'what', 'which', 'who', 'whom', 'this', 'that',
'these', 'those', 'am', 'is', 'are', 'was', 'were', 'be', 'been', 'being',
'have', 'has', 'had', 'having', 'do', 'does', 'did', 'doing', 'a', 'an',
'the', 'and', 'but', 'if', 'or', 'because', 'as', 'until', 'while', 'of',
'at', 'by', 'for', 'with', 'about', 'against', 'between', 'into',
'through', 'during', 'before', 'after', 'above', 'below', 'to', 'from',
'up', 'down', 'in', 'out', 'on', 'off', 'over', 'under', 'again',
'further', 'then', 'once', 'here', 'there', 'when', 'where', 'why',
'how', 'all', 'any', 'both', 'each', 'few', 'more', 'most', 'other',
'some', 'such', 'no', 'nor', 'not', 'only', 'own', 'same', 'so', 'than',
'too', 'very', 's', 't', 'can', 'will', 'just', 'don', 'should', 'now',
]
def get_word_freq_distribution(words):
"""Get the counted word frequency distribution of all words.
Arg:
words (list): A list of strings indicating words.
Returns:
collections.Counter: The Counter object with word frequencies.
"""
return Counter([w for w in words if w not in stopwords])
def format_4_wordcloud(words, size_multiplier=2):
"""Format words in a way suitable for wordcloud plugin.
Args:
words (list): A list of strings indicating words.
size_multiplier (int, optional): The size multiplier to scale
word sizing. Can improve visual display of word cloud.
"""
return [
{'text': word, 'size': size * size_multiplier}
for (word, size) in words if word
]
|
Python
| 0.000001
|
@@ -2111,16 +2111,81 @@
d cloud.
+%0A%0A Returns:%0A list: A list of dicts w/ appropriate keys.
%0A %22%22%22
|
584e5edc27d8f63f3d780ba72abdd30e4781cb3e
|
fix FeaturedTheme icon, set icon_src for all text_enabled plugins
|
foundation/organisation/cms_plugins.py
|
foundation/organisation/cms_plugins.py
|
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cms.models.pluginmodel import CMSPlugin
from cms.extensions.extension_pool import extension_pool
from sorl.thumbnail import get_thumbnail
from .models import (Project, Theme, FeaturedTheme, FeaturedProject,
ProjectList, NetworkGroup, NetworkGroupList, WorkingGroup,
SignupForm, SideBarExtension)
class FeaturedThemePlugin(CMSPluginBase):
model = FeaturedTheme
module = "OKF"
name = _("Featured Theme")
text_enabled = True
render_template = "organisation/theme_featured.html"
def icon_alt(self, instance):
return 'Theme: %s' % instance.theme.name
def icon_src(self, instance):
im = get_thumbnail(instance.theme.picture, '50x50', quality=99)
return im.url
def render(self, context, instance, placeholder):
context = super(FeaturedThemePlugin, self)\
.render(context, instance, placeholder)
context['object'] = instance.theme
return context
plugin_pool.register_plugin(FeaturedThemePlugin)
class FeaturedProjectPlugin(CMSPluginBase):
model = FeaturedProject
module = "OKF"
name = _("Featured Project")
render_template = "organisation/project_featured.html"
def render(self, context, instance, placeholder):
context = super(FeaturedProjectPlugin, self)\
.render(context, instance, placeholder)
context['project'] = instance.project
return context
plugin_pool.register_plugin(FeaturedProjectPlugin)
class ProjectListPlugin(CMSPluginBase):
model = ProjectList
module = "OKF"
name = _("Project List")
render_template = "organisation/project_list_plugin.html"
def render(self, context, instance, placeholder):
context = super(ProjectListPlugin, self)\
.render(context, instance, placeholder)
results = Project.objects.all()
if instance.theme:
results = results.filter(themes=instance.theme)
if instance.project_type:
results = results.filter(types=instance.project_type)
context['projects'] = results
return context
plugin_pool.register_plugin(ProjectListPlugin)
class ThemesPlugin(CMSPluginBase):
model = CMSPlugin
module = "OKF"
name = _("Theme list")
render_template = "organisation/theme_list.html"
def render(self, context, instance, placeholder):
context = super(ThemesPlugin, self)\
.render(context, instance, placeholder)
context['object_header'] = _("Themes")
context['object_list'] = Theme.objects.all()
return context
plugin_pool.register_plugin(ThemesPlugin)
class NetworkGroupFlagsPlugin(CMSPluginBase):
model = NetworkGroupList
module = "OKF"
name = _("Network Group Flags")
render_template = "organisation/networkgroup_flags.html"
text_enabled = True
def render(self, context, instance, placeholder):
context = super(NetworkGroupFlagsPlugin, self)\
.render(context, instance, placeholder)
context['title'] = instance.get_group_type_display()
context['countries'] = NetworkGroup.objects.countries().filter(
group_type=instance.group_type).order_by('name')
return context
plugin_pool.register_plugin(NetworkGroupFlagsPlugin)
class WorkingGroupPlugin(CMSPluginBase):
model = CMSPlugin
module = "OKF"
name = _("Working Groups")
render_template = "organisation/workinggroup_shortlist.html"
text_enabled = True
def render(self, context, instance, placeholder):
context = super(WorkingGroupPlugin, self)\
.render(context, instance, placeholder)
context['workinggroups'] = WorkingGroup.objects.active()
return context
plugin_pool.register_plugin(WorkingGroupPlugin)
class SignupFormPlugin(CMSPluginBase):
model = SignupForm
module = "OKF"
name = _("Signup Form")
render_template = "organisation/signup_form.html"
def render(self, context, instance, placeholder):
context = super(SignupFormPlugin, self)\
.render(context, instance, placeholder)
context['title'] = instance.title
context['description'] = instance.description
return context
plugin_pool.register_plugin(SignupFormPlugin)
extension_pool.register(SideBarExtension)
|
Python
| 0.000002
|
@@ -1,12 +1,45 @@
+from django.conf import settings%0A
from django.
@@ -271,50 +271,8 @@
ol%0A%0A
-from sorl.thumbnail import get_thumbnail%0A%0A
from
@@ -799,93 +799,72 @@
-im = get_thumbnail(instance.theme.picture, '50x50', quality=99)%0A return im.url
+return settings.STATIC_URL + %22cms/img/icons/plugins/snippet.png%22
%0A%0A
@@ -2967,32 +2967,238 @@
enabled = True%0A%0A
+ def icon_alt(self, instance):%0A return 'Network Group Flags: %25s' %25 instance.theme.name%0A%0A def icon_src(self, instance):%0A return settings.STATIC_URL + %22cms/img/icons/plugins/snippet.png%22%0A%0A
def render(s
@@ -3814,32 +3814,207 @@
enabled = True%0A%0A
+ def icon_alt(self, instance):%0A return 'Working Groups'%0A%0A def icon_src(self, instance):%0A return settings.STATIC_URL + %22cms/img/icons/plugins/snippet.png%22%0A%0A
def render(s
|
e442f6874d71bf876116da6add66b0f099c0887f
|
Fix Tempy Factory parser with void tags
|
tempy/t.py
|
tempy/t.py
|
# -*- coding: utf-8 -*-
# @author: Federico Cerchiari <federicocerchiari@gmail.com>
import importlib
from html.parser import HTMLParser
from .elements import Tag, VoidTag, TagAttrs
class TempyParser(HTMLParser):
"""Custom parser used to translate an html into Tempy Tags.
See https://docs.python.org/3/library/html.parser.html for details on how parsing is performed.
Every tag found by the parser is converted into a TempyTag, every subsequent element will be added
inside this one.
As a result of this, unclosed tags in imput will be closed in the resulting Tempy Tree right before
the parent element is closed.
This behaviour is accidental and should not be used a s a html sanitizing feature.
"""
def __init__(self):
super().__init__()
self.unknown_tag_maker = TempyFactory()
self.tempy_tags = importlib.import_module('.tags', package='tempy')
self._reset()
def _reset(self):
self.result = []
self.current_tag = None
return self
def _make_tempy_tag(self, tag, attrs, void):
"""Searches in tempy.tags for the correct tag to use, if does not exists uses the TempyFactory to
create a custom tag."""
tempy_tag_cls = getattr(self.tempy_tags, tag.title(), None)
if not tempy_tag_cls:
unknow_maker = [self.unknown_tag_maker, self.unknown_tag_maker.Void][void]
tempy_tag_cls = unknow_maker[tag]
attrs = {TagAttrs._TO_SPECIALS.get(k, k): v or True for k, v in attrs}
tempy_tag = tempy_tag_cls(**attrs)
if not self.current_tag:
self.result.append(tempy_tag)
if not void:
self.current_tag = tempy_tag
else:
self.current_tag(tempy_tag)
self.current_tag = self.current_tag.childs[-1]
def handle_starttag(self, tag, attrs):
self._make_tempy_tag(tag, attrs, False)
def handle_startendtag(self, tag, attrs):
self._make_tempy_tag(tag, attrs, True)
def handle_endtag(self, tag):
self.current_tag = self.current_tag.parent
def handle_data(self, data):
if self.current_tag and data.strip():
self.current_tag(data)
def handle_comment(self, data):
pass
def handle_decl(self, decl):
pass
class TempyFactory:
def __init__(self, void_maker=False):
self._void = void_maker
if not self._void:
self.Void = TempyFactory(void_maker=True)
def make_tempy(self, tage_name):
base_class = [Tag, VoidTag][self._void]
return type(tage_name, (base_class, ), {'_%s__tag' % tage_name: tage_name.lower(), '_from_factory': True})
def __getattribute__(self, attr):
try:
return object.__getattribute__(self, attr)
except AttributeError:
return self.make_tempy(attr)
def __getitem__(self, key):
tag = self.make_tempy(key)
return tag
class TempyGod(TempyFactory):
def __init__(self):
super().__init__()
self._parser = TempyParser()
def from_string(self, html_string):
"""Parses an html string and returns a list of Tempy trees."""
self._parser._reset().feed(html_string)
return self._parser.result
def dump(self, tempy_tree_list, filename, pretty=False):
"""Dumps a Tempy object to a python file"""
if not filename:
raise ValueError('"filename" argument should not be none.')
if len(filename.split('.')) > 1 and not filename.endswith('.py'):
raise ValueError('"filename" argument should be a .py extension, if given.')
if not filename.endswith('.py'):
filename += '.py'
with open(filename, 'w') as f:
f.write('# -*- coding: utf-8 -*-\nfrom tempy import T\nfrom tempy.tags import *\n')
for tempy_tree in tempy_tree_list:
f.write(tempy_tree.to_code(pretty=pretty))
return filename
T = TempyGod()
|
Python
| 0
|
@@ -1725,16 +1725,56 @@
else:%0A
+ if not tempy_tag._void:%0A
@@ -1797,32 +1797,36 @@
_tag(tempy_tag)%0A
+
self
|
57a263f8671afd0c0b05443acb192bcccaf3eef7
|
Remove remote_field from guardian.compat (Django < 1.9)
|
guardian/compat.py
|
guardian/compat.py
|
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import handler404, handler500, include, url
from django.contrib.auth.models import AnonymousUser, Group, Permission
from importlib import import_module
import django
import six
import sys
__all__ = [
'User',
'Group',
'Permission',
'AnonymousUser',
'get_user_model',
'import_string',
'user_model_label',
'url',
'patterns',
'include',
'handler404',
'handler500',
]
# Django 1.5 compatibility utilities, providing support for custom User models.
# Since get_user_model() causes a circular import if called when app models are
# being loaded, the user_model_label should be used when possible, with calls
# to get_user_model deferred to execution time
user_model_label = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
try:
from django.contrib.auth import get_user_model
except ImportError:
from django.contrib.auth.models import User
get_user_model = lambda: User
def get_user_model_path():
"""
Returns 'app_label.ModelName' for User model. Basically if
``AUTH_USER_MODEL`` is set at settings it would be returned, otherwise
``auth.User`` is returned.
"""
return getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
def get_user_permission_full_codename(perm):
"""
Returns 'app_label.<perm>_<usermodulename>'. If standard ``auth.User`` is
used, for 'change' perm this would return ``auth.change_user`` and if
``myapp.CustomUser`` is used it would return ``myapp.change_customuser``.
"""
User = get_user_model()
model_name = User._meta.model_name
return '%s.%s_%s' % (User._meta.app_label, perm, model_name)
def get_user_permission_codename(perm):
"""
Returns '<perm>_<usermodulename>'. If standard ``auth.User`` is
used, for 'change' perm this would return ``change_user`` and if
``myapp.CustomUser`` is used it would return ``change_customuser``.
"""
return get_user_permission_full_codename(perm).split('.')[1]
def import_string(dotted_path):
"""
Import a dotted module path and return the attribute/class designated by the
last name in the path. Raise ImportError if the import failed.
Backported from Django 1.7
"""
try:
module_path, class_name = dotted_path.rsplit('.', 1)
except ValueError:
msg = "%s doesn't look like a module path" % dotted_path
six.reraise(ImportError, ImportError(msg), sys.exc_info()[2])
module = import_module(module_path)
try:
return getattr(module, class_name)
except AttributeError:
msg = 'Module "%s" does not define a "%s" attribute/class' % (
dotted_path, class_name)
six.reraise(ImportError, ImportError(msg), sys.exc_info()[2])
# Python 3
try:
unicode = unicode # pyflakes:ignore
basestring = basestring # pyflakes:ignore
str = str # pyflakes:ignore
except NameError:
basestring = unicode = str = str
# Django 1.7 compatibility
# create_permission API changed: skip the create_models (second
# positional argument) if we have django 1.7+ and 2+ positional
# arguments with the second one being a list/tuple
def create_permissions(*args, **kwargs):
from django.contrib.auth.management import create_permissions as original_create_permissions
if len(args) > 1 and isinstance(args[1], (list, tuple)):
args = args[:1] + args[2:]
return original_create_permissions(*args, **kwargs)
__all__ = ['User', 'Group', 'Permission', 'AnonymousUser']
def template_debug_setter(value):
if hasattr(settings, 'TEMPLATE_DEBUG'):
settings.TEMPLATE_DEBUG = value
settings.TEMPLATES[0]['OPTIONS']['DEBUG'] = value
def template_debug_getter():
if hasattr(settings, 'TEMPLATE_DEBUG'):
return settings.TEMPLATE_DEBUG
return settings.TEMPLATES[0]['OPTIONS'].get('DEBUG', False)
# Django 1.9 compatibility
def remote_field(field):
"""
https://docs.djangoproject.com/en/1.9/releases/1.9/#field-rel-changes
"""
if django.VERSION < (1, 9):
return field.rel
return field.remote_field
def remote_model(field):
if django.VERSION < (1, 9):
return remote_field(field).to
return remote_field(field).model
# Django 1.10 compatibility
def is_authenticated(user):
if django.VERSION < (1, 10):
return user.is_authenticated()
return user.is_authenticated
def is_anonymous(user):
if django.VERSION < (1, 10):
return user.is_anonymous()
return user.is_anonymous
try:
from django.urls import reverse, reverse_lazy
except ImportError:
from django.core.urlresolvers import reverse, reverse_lazy
|
Python
| 0
|
@@ -3947,29 +3947,29 @@
%0Adef remote_
-fi
+mod
el
-d
(field):%0A
@@ -3973,289 +3973,81 @@
-%22%22%22%0A https://docs.djangoproject.com/en/1.9/releases/1.9/#field-rel-changes%0A %22%22%22%0A if django.VERSION %3C (1, 9):%0A return field.rel%0A return field.remote_field%0A%0A%0Adef remote_model(field):%0A if django.VERSION %3C (1, 9):%0A return remote_field(field).to%0A return
+if django.VERSION %3C (1, 9):%0A return field.rel.to%0A return field.
remo
@@ -4054,23 +4054,16 @@
te_field
-(field)
.model%0A%0A
|
253361e56ad2b1e331691f0bf3c9010c22c0c9aa
|
Fix tests
|
test/test_blacklists.py
|
test/test_blacklists.py
|
#!/usr/bin/env python3
from glob import glob
from helpers import only_blacklists_changed
def test_blacklist_integrity():
for bl_file in glob('bad_*.txt') + glob('blacklisted_*.txt'):
with open(bl_file, 'r') as lines:
seen = dict()
for lineno, line in enumerate(lines, 1):
if line.endswith('\r\n'):
raise(ValueError('{0}:{1}:DOS line ending'.format(bl_file, lineno)))
if not line.endswith('\n'):
raise(ValueError('{0}:{1}:No newline'.format(bl_file, lineno)))
if line == '\n':
raise(ValueError('{0}:{1}:Empty line'.format(bl_file, lineno)))
if line in seen:
raise(ValueError('{0}:{1}:Duplicate entry {2} (also on line {3})'.format(
bl_file, lineno, line.rstrip('\n'), seen[line])))
seen[line] = lineno
def test_blacklist_pull_diff():
only_blacklists_diff = """helpers.py
test/test_blacklists.py
blacklisted_usernames.txt"""
assert only_blacklists_changed(only_blacklists_diff)
mixed_files_diff = """blacklisted_websites.txt
watched_keywords.txt
bad_keywords.txt"""
assert not only_blacklists_changed(mixed_files_diff)
|
Python
| 0.000003
|
@@ -985,18 +985,28 @@
%22%22%22
-helpers.py
+watched_keywords.txt
%0A
@@ -1036,31 +1036,24 @@
-test/test_blacklists.py
+bad_keywords.txt
%0A
@@ -1095,15 +1095,14 @@
ted_
-usernam
+websit
es.t
@@ -1194,77 +1194,72 @@
%22%22%22
-blacklisted_websites.txt%0A watched_keywords.txt%0A
+helpers.py%0A test/test_blacklists.py%0A
@@ -1274,34 +1274,43 @@
b
-ad_keyword
+lacklisted_username
s.txt%22%22%22%0A
|
e13cfe7a7e215f43e8210fb6d116ccafe80c8756
|
fix names of functions
|
gary/observation/tests/test_rrlyrae.py
|
gary/observation/tests/test_rrlyrae.py
|
# coding: utf-8
"""
Test the RR Lyrae helper functions.
"""
from __future__ import absolute_import, unicode_literals, division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import os, sys
# Third-party
import astropy.units as u
import numpy as np
import pytest
from ..core import *
from ..rrlyrae import *
def test_gaia_rv_error():
d = np.linspace(1.,50.,100)*u.kpc
rv_errs = gaia_rv_error(d)
def test_gaia_pm_error():
d = np.linspace(1.,50.,100)*u.kpc
pm_errs = gaia_pm_error(d)
vtan_errs = pm_errs.to(u.rad/u.yr).value/u.yr*d
vtan_errs = vtan_errs.to(u.km/u.s)
|
Python
| 0.999104
|
@@ -424,25 +424,38 @@
rrs = gaia_r
-v
+adial_velocity
_error(d)%0A%0Ad
@@ -533,25 +533,36 @@
rrs = gaia_p
-m
+roper_motion
_error(d)%0A%0A
|
9681ad615487c282a70e85ded1e3020bc47e71bf
|
modify demonstration.py
|
scripts/demonstration.py
|
scripts/demonstration.py
|
#!/usr/bin/env python
from SPyFFI.Observation import Observation, default
# initialize to default settings
inputs = default
'''
------------_--------------_--------------_--------------_--------------_-------
"camera" inputs change many of the basics of the observations.
------------_--------------_--------------_--------------_--------------_-------
'''
# provide a label, that sets the directory in which outputs will be stored
inputs['camera']['label'] = 'demonstration'
# what is the commanded central ra and dec of the field?
#inputs['camera']['ra'] = 82.0
#inputs['camera']['dec'] = 1.0
# if subarray = an integer number
# create a square subarray, with that many pixels on a side
# if subarray = None,
# simply creating four separate CCDs, with their default sizes
inputs['camera']['subarray'] = None
'''
------------_--------------_--------------_--------------_--------------_-------
"catalog" inputs affect what stars will be used to populate the images.
------------_--------------_--------------_--------------_--------------_-------
'''
# if the catalog name is set to 'sky', draw stars from the real sky (UCAC4)
#inputs['catalog']['name'] = 'sky'
#inputs['catalog']['skykw']['faintlimit'] = 10.0
# if the catalog name is set to 'testpattern', create a uniformly space grid
inputs['catalog']['name'] = 'testpattern'
'''
------------_--------------_--------------_--------------_--------------_-------
"jitter" inputs change the jitter, with an effect on both exposure-to-exposure
nudges and intra-exposure blurring. They may require (expsensive) recomputation
of the PSF library.
------------_--------------_--------------_--------------_--------------_-------
'''
# the code looks for a jitter timeseries (sampled at any cadence faster than 2s)
# located in '$SPYFFIDATA/inputs/{rawjitterbasename}'
# inputs['jitter']['rawjitterbasename'] = "AttErrTimeArcsec_80k.dat"
# if jitterrms is set to a numerical value,
# the code will rescale so that sqrt(dx**2 + dy**2) = jitterrms (in arcsec)
# if jitterrms is set to None,
# the code will use the input jitter timeseries as is
inputs['jitter']['jitterrms'] = None
# this will amplify the jitter between exposures (without reblurring the PSFs)
inputs['jitter']['amplifyinterexposurejitter'] = 1.0
'''
------------_--------------_--------------_--------------_--------------_-------
"expose" keywords determine how individual exposures are generated.
------------_--------------_--------------_--------------_--------------_-------
'''
# should the exposures write out to file(s)?
inputs['expose']['writesimulated'] = True
# should we write an image of the cosmic rays?
inputs['expose']['writecosmics'] = False
# should we write an image with no noise?
inputs['expose']['writenoiseless'] = True
# down to what magnitudes should we include? (for fast testing)
inputs['expose']['magnitudethreshold'] = 999
# should the exposures be jittered?
inputs['expose']['jitter'] = True
# should readout smear be included?
inputs['expose']['smear'] = False
# should we skip cosmic injection?
inputs['expose']['skipcosmics'] = True
# should we pretend cosmics don't exist?
# (if skipcosmics is false and correctcosmics is true,
# a cosmic ray image will be made but not added to the final image)
inputs['expose']['correctcosmics'] = True
# should we display images in ds9, as they're created?
inputs['expose']['display'] = True
'''
------------_--------------_--------------_--------------_--------------_-------
"observation" keywords set the overall group of exposures to be made.
------------_--------------_--------------_--------------_--------------_-------
'''
# cadencestodo should be a dictionary of cadences to expose, for example:
# "{2:3, 120:3, 1800:3}" generates (3 each of 2s, 120s, 1800s exposures)
inputs['observation']['cadencestodo'] = {1800:3, 2:3, 120:3}
# (this links closely to cadences to do)
# stamps should be a dictionary, with cadences as keys
# if a cadence's entry is:
# None -- a full-frame image will be produced
# an integer -- this number of postage stamps will be randomly places
# a string -- this will be interpreted as a filename pointing to a
# three-column ascii text file to define where the stamps
# should be placed. the columns should be:
# [1] RA (in degrees)
# [2] Dec (in degrees)
# [3] radius (in pixels) of postage stamp
# (NOT YET IMPLEMENTED! JUNE 2,2016)
inputs['camera']['stamps'] = {2:None, 120:None, 1800:None}
'''
------------_--------------_--------------_--------------_--------------_-------
finally, create an observation object, using all these inputs, and make images!
------------_--------------_--------------_--------------_--------------_-------
'''
# generate the observation object
o = Observation(inputs)
# use that object to perform all the exposures
o.create()
|
Python
| 0.000001
|
@@ -4582,16 +4582,171 @@
:None%7D%0A%0A
+# a dictionary, like those above%0A# should exposures of a particular cadence be compressed?%0Ainputs%5B'expose'%5D%5B'compress'%5D = %7B2:True, 120:True, 1800:False%7D%0A%0A%0A
%0A%0A'''%0A--
|
ff2cfb51b1fa30d0103bc782843f69fea08e0d51
|
Fix formatting for table declaration
|
girder/utility/assetstore_utilities.py
|
girder/utility/assetstore_utilities.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright 2013 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
from .filesystem_assetstore_adapter import FilesystemAssetstoreAdapter
from .gridfs_assetstore_adapter import GridFsAssetstoreAdapter
from .s3_assetstore_adapter import S3AssetstoreAdapter
from girder.constants import AssetstoreType
from girder import events
assetstoreTable = {AssetstoreType.FILESYSTEM: FilesystemAssetstoreAdapter,
AssetstoreType.GRIDFS: GridFsAssetstoreAdapter,
AssetstoreType.S3: S3AssetstoreAdapter}
def getAssetstoreAdapter(assetstore, instance=True):
"""
This is a factory method that will return the appropriate assetstore adapter
for the specified assetstore. The returned object will conform to
the interface of the AbstractAssetstoreAdapter.
:param assetstore: The assetstore document used to instantiate the adapter.
:type assetstore: dict
:param instance: Whether to return an instance of the adapter or the class.
If you are performing validation, set this to False to avoid throwing
unwanted exceptions during instantiation.
:type instance: bool
:returns: An adapter descending from AbstractAssetstoreAdapter
"""
storeType = assetstore['type']
cls = assetstoreTable.get(storeType)
if cls is None:
e = events.trigger('assetstore.adapter.get', assetstore)
if len(e.responses) > 0:
cls = e.responses[-1]
else:
raise Exception('No AssetstoreAdapter for type: %s.' % storeType)
if instance:
return cls(assetstore)
else:
return cls
def setAssetstoreAdapter(storeType, cls):
if storeType not in assetstoreTable:
raise Exception('Illegal assetstore type code: "%s"' % (storeType))
assetstoreTable[storeType] = cls
def fileIndexFields():
"""
This will return a set of all required index fields from all of the
different assetstore types.
"""
return list(set(
FilesystemAssetstoreAdapter.fileIndexFields() +
GridFsAssetstoreAdapter.fileIndexFields() +
S3AssetstoreAdapter.fileIndexFields()
))
|
Python
| 0.000042
|
@@ -1067,16 +1067,21 @@
able = %7B
+%0A
Assetsto
@@ -1124,39 +1124,24 @@
oreAdapter,%0A
-
Assetsto
@@ -1180,31 +1180,16 @@
dapter,%0A
-
Asse
@@ -1222,16 +1222,17 @@
eAdapter
+%0A
%7D%0A%0A%0Adef
|
0cafc534c1b0398c47fbdb279df196e02e68fb11
|
Fix decoding of PID in URLs
|
gmn/src/d1_gmn/app/views/decorators.py
|
gmn/src/d1_gmn/app/views/decorators.py
|
# -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""View decorators
"""
from __future__ import absolute_import
import functools
import d1_gmn.app.auth
import d1_gmn.app.revision
import d1_gmn.app.sysmeta
import d1_gmn.app.views.asserts
import d1_gmn.app.views.util
import d1_common.const
import d1_common.types
import d1_common.types.exceptions
import d1_common.url
import django.conf
# ------------------------------------------------------------------------------
# Series ID (SID)
# ------------------------------------------------------------------------------
def resolve_sid(f):
"""Decorator that adds SID resolve and PID validation to view handlers.
- For v1 calls, assume that {did} is a pid and raise NotFound exception
if it's not valid.
- For v2 calls, if DID is a valid PID, return it. If not, try to
resolve it as a SID and, if successful, return the new PID. Else, raise
NotFound exception.
"""
@functools.wraps(f)
def wrapper(request, did, *args, **kwargs):
pid = resolve_sid_func(request, did)
return f(request, pid, *args, **kwargs)
return wrapper
def resolve_sid_func(request, did):
if d1_gmn.app.views.util.is_v1_api(request):
d1_gmn.app.views.asserts.is_pid_of_existing_object(did)
return did
elif d1_gmn.app.views.util.is_v2_api(request):
if d1_gmn.app.sysmeta.is_pid(did):
return did
elif d1_gmn.app.revision.is_sid(did):
return d1_gmn.app.revision.resolve_sid(did)
else:
raise d1_common.types.exceptions.NotFound(
0, u'Unknown identifier. id="{}"'.format(did), identifier=did
)
else:
assert False, u'Unable to determine API version'
def decode_id(f):
"""Decorator that decodes the SID or PID extracted from URL path segment
by Django.
"""
# TODO: Currently, Django passes percent-encoded params to views when they
# were extracted from URL path segments by the Django URL regex parser and
# dispatcher. IMO, that's a bug and I'm working with Django devs to see if
# this can be fixed. Update this accordingly.
@functools.wraps(f)
def wrapper(request, did, *args, **kwargs):
# return f(request, d1_common.url.decodeQueryElement(did), *args, **kwargs)
return f(request, did, *args, **kwargs)
return wrapper
# ------------------------------------------------------------------------------
# Auth
# ------------------------------------------------------------------------------
# The following decorators check if the subject in the provided client side
# certificate has the permissions required to perform a given action. If
# the required permissions are not present, a NotAuthorized exception is
# return to the client.
#
# The decorators require the first argument to be request and the second to
# be PID.
def trusted_permission(f):
"""Access only by D1 infrastructure.
"""
@functools.wraps(f)
def wrapper(request, *args, **kwargs):
trusted(request)
return f(request, *args, **kwargs)
return wrapper
def list_objects_access(f):
"""Access to listObjects() controlled by settings.PUBLIC_OBJECT_LIST.
"""
@functools.wraps(f)
def wrapper(request, *args, **kwargs):
if not django.conf.settings.PUBLIC_OBJECT_LIST:
trusted(request)
return f(request, *args, **kwargs)
return wrapper
def get_log_records_access(f):
"""Access to getLogRecords() controlled by settings.PUBLIC_LOG_RECORDS.
"""
@functools.wraps(f)
def wrapper(request, *args, **kwargs):
if not django.conf.settings.PUBLIC_LOG_RECORDS:
trusted(request)
return f(request, *args, **kwargs)
return wrapper
def trusted(request):
if not d1_gmn.app.auth.is_trusted_subject(request):
raise d1_common.types.exceptions.NotAuthorized(
0, u'Access allowed only for trusted subjects. active_subjects="{}", '
u'trusted_subjects="{}"'.format(
d1_gmn.app.auth.format_active_subjects(request),
d1_gmn.app.auth.get_trusted_subjects_string()
)
)
def assert_create_update_delete_permission(f):
"""Access only by subjects with Create/Update/Delete permission and by
trusted infrastructure (CNs).
"""
@functools.wraps(f)
def wrapper(request, *args, **kwargs):
d1_gmn.app.auth.assert_create_update_delete_permission(request)
return f(request, *args, **kwargs)
return wrapper
def authenticated(f):
"""Access only with a valid session.
"""
@functools.wraps(f)
def wrapper(request, *args, **kwargs):
if d1_common.const.SUBJECT_AUTHENTICATED not in request.all_subjects_set:
raise d1_common.types.exceptions.NotAuthorized(
0,
u'Access allowed only for authenticated subjects. Please reconnect with '
u'a valid DataONE session certificate. active_subjects="{}"'.
format(d1_gmn.app.auth.format_active_subjects(request))
)
return f(request, *args, **kwargs)
return wrapper
def verified(f):
"""Access only with a valid session where the primary subject is verified.
"""
@functools.wraps(f)
def wrapper(request, *args, **kwargs):
if d1_common.const.SUBJECT_VERIFIED not in request.all_subjects_set:
raise d1_common.types.exceptions.NotAuthorized(
0,
u'Access allowed only for verified accounts. Please reconnect with a '
u'valid DataONE session certificate in which the identity of the '
u'primary subject has been verified. active_subjects="{}"'
.format(d1_gmn.app.auth.format_active_subjects(request))
)
return f(request, *args, **kwargs)
return wrapper
def required_permission(f, level):
"""Assert that subject has access at given level or higher for object.
"""
@functools.wraps(f)
def wrapper(request, pid, *args, **kwargs):
d1_gmn.app.auth.assert_allowed(request, level, pid)
return f(request, pid, *args, **kwargs)
return wrapper
def changepermission_permission(f):
"""Assert that subject has changePermission or high for object.
"""
return required_permission(f, d1_gmn.app.auth.CHANGEPERMISSION_LEVEL)
def write_permission(f):
"""Assert that subject has write permission or higher for object.
"""
return required_permission(f, d1_gmn.app.auth.WRITE_LEVEL)
def read_permission(f):
"""Assert that subject has read permission or higher for object.
"""
return required_permission(f, d1_gmn.app.auth.READ_LEVEL)
|
Python
| 0.000266
|
@@ -2884,18 +2884,16 @@
gs):%0A
- #
return
@@ -2958,24 +2958,26 @@
*kwargs)%0A
+ #
return f(re
|
38e224c282c62ad358c753eb707cf71ad1f00aff
|
fix os x gyp settings
|
binding.gyp
|
binding.gyp
|
{
"targets": [
{
"target_name": "detection",
"sources": [
"src/detection.cpp",
"src/detection.h",
"src/deviceList.cpp"
],
"include_dirs" : [
"<!(node -e \"require('nan')\")"
],
'conditions': [
['OS=="win"',
{
'sources': [
"src/detection_win.cpp"
],
'include_dirs+':
[
# Not needed now
]
}
],
['OS=="mac"',
{
'sources': [
"src/detection_mac.cpp"
]
}
],
['OS=="linux"',
{
'sources': [
"src/detection_linux.cpp"
],
'link_settings': {
'libraries': [
'-ludev'
]
}
}
]
]
}
]
}
|
Python
| 0
|
@@ -577,24 +577,119 @@
on_mac.cpp%22%0A
+ %5D,%0A %22libraries%22: %5B%0A %22-framework%22, %0A %22IOKit%22%0A
|
de0180ccccdd3e2b83c6a7188fd2688c64c70580
|
add rpath linker option in binding.gyp when --sqlite option is used with a custom sqlite
|
binding.gyp
|
binding.gyp
|
{
'includes': [ 'deps/common-sqlite.gypi' ],
'variables': {
'sqlite%':'internal',
},
'targets': [
{
'target_name': 'node_sqlite3',
'conditions': [
['sqlite != "internal"', {
'libraries': [
'-L<@(sqlite)/lib',
'-lsqlite3'
],
'include_dirs': [ '<@(sqlite)/include' ]
},
{
'dependencies': [
'deps/sqlite3.gyp:sqlite3'
]
}
]
],
'sources': [
'src/database.cc',
'src/node_sqlite3.cc',
'src/statement.cc'
],
}
]
}
|
Python
| 0
|
@@ -359,16 +359,111 @@
include'
+ %5D,%0A 'conditions': %5B %5B 'OS==%22linux%22', %7B'libraries+':%5B'-Wl,-rpath=%3C@(sqlite)/lib'%5D%7D %5D
%5D%0A
|
c908f4f9b602dcfea81847144cfd4e43fe308798
|
Bump OSX minimum to 10.11
|
binding.gyp
|
binding.gyp
|
{
'variables': {
'libsass_ext%': '',
},
'targets': [
{
'target_name': 'binding',
'win_delay_load_hook': 'true',
'sources': [
'src/binding.cpp',
'src/create_string.cpp',
'src/custom_function_bridge.cpp',
'src/custom_importer_bridge.cpp',
'src/sass_context_wrapper.cpp',
'src/sass_types/boolean.cpp',
'src/sass_types/color.cpp',
'src/sass_types/error.cpp',
'src/sass_types/factory.cpp',
'src/sass_types/list.cpp',
'src/sass_types/map.cpp',
'src/sass_types/null.cpp',
'src/sass_types/number.cpp',
'src/sass_types/string.cpp'
],
'msvs_settings': {
'VCLinkerTool': {
'SetChecksum': 'true'
}
},
'xcode_settings': {
'CLANG_CXX_LIBRARY': 'libc++',
'OTHER_LDFLAGS': [],
'GCC_ENABLE_CPP_EXCEPTIONS': 'NO',
'MACOSX_DEPLOYMENT_TARGET': '10.7'
},
'include_dirs': [
'<!(node -e "require(\'nan\')")',
],
'conditions': [
['libsass_ext == "" or libsass_ext == "no"', {
'dependencies': [
'src/libsass.gyp:libsass',
]
}],
['libsass_ext == "auto"', {
'cflags_cc': [
'<!(pkg-config --cflags libsass)',
],
'link_settings': {
'ldflags': [
'<!(pkg-config --libs-only-other --libs-only-L libsass)',
],
'libraries': [
'<!(pkg-config --libs-only-l libsass)',
],
}
}],
['libsass_ext == "yes"', {
'cflags_cc': [
'<(libsass_cflags)',
],
'link_settings': {
'ldflags': [
'<(libsass_ldflags)',
],
'libraries': [
'<(libsass_library)',
],
}
}]
]
}
]
}
|
Python
| 0.000015
|
@@ -954,9 +954,10 @@
'10.
-7
+11
'%0A
|
3fe8dd01906fe2d8e3e52b537acd34f65073ca01
|
remove the cflags
|
binding.gyp
|
binding.gyp
|
{
"targets": [
{
"target_name": "objc",
"sources": [
"src/binding/objc.cc",
"src/binding/Proxy.cc",
"src/binding/utils.cc",
"src/binding/Invocation.cc",
"src/binding/constants.cpp"
],
"include_dirs": [
"<!(node -e \"require('nan')\")",
],
"cflags": [
"-std=c++14",
"-stdlib=libc++"
],
"xcode_settings": {
"OTHER_CFLAGS": [
"-std=c++14",
"-stdlib=libc++"
]
}
}
]
}
|
Python
| 0.998747
|
@@ -320,82 +320,8 @@
%5D,%0A
- %22cflags%22: %5B%0A %22-std=c++14%22,%0A %22-stdlib=libc++%22%0A %5D,%0A
|
0a90376144ee5568e6e140cbf657d7e85070f1f1
|
remove the matplotlib agg setting
|
tests/base/test_view.py
|
tests/base/test_view.py
|
from __future__ import print_function
import matplotlib
matplotlib.use('Agg')
import unittest
import numpy as np
import matplotlib.pyplot as plt
import discretize
from discretize import Tests, utils
import warnings
import pytest
np.random.seed(16)
TOL = 1e-1
class Cyl3DView(unittest.TestCase):
def setUp(self):
self.mesh = discretize.CylMesh([10, 4, 12])
def test_incorrectAxesWarnings(self):
# axes aren't polar
fig, ax = plt.subplots(1, 1)
# test z-slice
with pytest.warns(UserWarning):
self.mesh.plotGrid(slice='z', ax=ax)
# axes aren't right shape
with pytest.warns(UserWarning):
self.mesh.plotGrid(slice='both', ax=ax)
self.mesh.plotGrid(ax=ax)
# this should be fine
self.mesh.plotGrid(slice='theta', ax=ax)
fig, ax = plt.subplots(2, 1)
# axes are right shape, but not polar
with pytest.warns(UserWarning):
self.mesh.plotGrid(slice='both', ax=ax)
self.mesh.plotGrid(ax=ax)
# these should be fine
self.mesh.plotGrid()
ax0 = plt.subplot(121, projection='polar')
ax1 = plt.subplot(122)
self.mesh.plotGrid(slice='z', ax=ax0) # plot z only
self.mesh.plotGrid(slice='theta', ax=ax1) # plot theta only
self.mesh.plotGrid(slice='both', ax=[ax0, ax1]) # plot both
self.mesh.plotGrid(slice='both', ax=[ax1, ax0]) # plot both
self.mesh.plotGrid(ax=[ax1, ax0]) # plot both
def test_plotImage(self):
with self.assertRaises(Exception):
self.mesh.plotImage(np.random.rand(self.mesh.nC))
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000001
|
@@ -52,30 +52,8 @@
tlib
-%0Amatplotlib.use('Agg')
%0A%0Aim
|
a114325be4f81cd34438f96fbc134a7881d5fe7a
|
Add link to get_input_var_names
|
bmi/vars.py
|
bmi/vars.py
|
#! /usr/bin/env python
class BmiVars(object):
"""Defines an interface for converting a standalone model into an
integrated modeling framework component.
"""
def get_var_type(self, long_var_name):
"""Returns the type of the given variable.
Parameters
----------
long_var_name : str
An input or output variable name, a CSDMS Standard Name.
Returns
-------
str
The Python variable type; e.g., `str`, `int`, `float`.
"""
pass
def get_var_units(self, long_var_name):
"""Returns the units of the given variable.
Standard unit names, in lower case, should be used, such as
"meters" or "seconds". Standard abbreviations, like "m" for
meters, are also supported. For variables with compound units,
each unit name is separated by a single space, with exponents
other than 1 placed immediately after the name, as in "m s-1"
for velocity, "W m-2" for an energy flux, or "km2" for an
area.
Parameters
----------
long_var_name : str
An input or output variable name, a CSDMS Standard Name.
Returns
-------
str
The variable units.
Notes
-----
CSDMS uses the UDUNITS standard from Unidata.
"""
pass
def get_var_nbytes(self, long_var_name):
"""Returns the size, in bytes, of the given variable.
Parameters
----------
long_var_name : str
An input or output variable name, a CSDMS Standard Name.
Returns
-------
int
The size of the variable, counted in bytes.
"""
pass
def get_var_grid(self, long_var_name):
"""Returns the identifier of the grid associated with a given
variable.
Parameters
----------
long_var_name : str
An input or output variable name, a CSDMS Standard Name.
Returns
-------
int
The grid identifier.
"""
pass
|
Python
| 0.000001
|
@@ -2082,16 +2082,169 @@
ifier.%0A%0A
+ See Also%0A --------%0A bmi.info.BmiInfo.get_input_var_names : Get %60long_var_name%60 from this method or from **get_output_var_names**.%0A%0A
|
390059007169b964649b3ec8af84503b38e41e97
|
refactor date formatting
|
postweb/utils.py
|
postweb/utils.py
|
import bleach
from django.conf import settings
import dateutil.parser
import markdown
markdown = markdown.Markdown()
# Tags suitable for rendering markdown
# From https://github.com/yourcelf/bleach-allowlist/blob/main/bleach_allowlist/bleach_allowlist.py
MARKDOWN_TAGS = [
"h1",
"h2",
"h3",
"h4",
"h5",
"h6",
"b",
"i",
"strong",
"em",
"tt",
"p",
"br",
"span",
"div",
"blockquote",
"code",
"pre",
"hr",
"ul",
"ol",
"li",
"dd",
"dt",
"img",
"a",
"sub",
"sup",
]
MARKDOWN_ATTRS = {
"*": ["id"],
"img": ["src", "alt", "title"],
"a": ["href", "alt", "title"],
}
def service_url(service, path=""):
"""Construct a URL for accessing the named service."""
if path.startswith("/"):
path = path[1:]
service_def = settings.SERVICES.get(service, None)
if service_def is None:
raise ValueError(f"No service named {service} configured in settings.SERVICES")
endpoint = service_def.get("endpoint", None)
if endpoint is None:
raise ValueError(f"No endpoint configured in settings.SERVICES for {service}")
if not endpoint.endswith("/"):
endpoint = endpoint + "/"
return endpoint + path
def represent_date(iso_datetime_str):
"""Converts ISO-8601 datetime representation to something more readable.
TODO: This should be internationalized.
"""
dt = dateutil.parser.parse(iso_datetime_str)
return dt.strftime("%a %b %d %Y %I:%M %p")
def markdown_to_html(markdown_text):
"""Converts Markdown to HTML.
To avoid XSS vulnerabilities, only certain HTML tags will be allowed
in the Markdown text.
"""
return bleach.clean(
markdown.convert(markdown_text),
tags=MARKDOWN_TAGS,
attributes=MARKDOWN_ATTRS,
)
|
Python
| 0.000001
|
@@ -680,16 +680,65 @@
%22%5D,%0A%7D%0A%0A%0A
+PREFERRED_DATE_FORMAT = %22%25a %25b %25d %25Y %25I:%25M %25p%22%0A%0A%0A
def serv
@@ -1552,38 +1552,37 @@
trftime(
-%22%25a %25b %25d %25Y %25I:%25M %25p%22
+PREFERRED_DATE_FORMAT
)%0A%0A%0Adef
|
d0ac7153cd9a88c5a9c6edef4f6d415b4d88143b
|
make admin classes overridable
|
pinax/referrals/admin.py
|
pinax/referrals/admin.py
|
from django.contrib import admin
from .models import Referral, ReferralResponse
admin.site.register(
Referral,
list_display=[
"user",
"code",
"label",
"redirect_to",
"target_content_type",
"target_object_id"
],
readonly_fields=["code", "created_at"],
list_filter=["target_content_type", "created_at"],
search_fields=["user__first_name", "user__last_name", "user__email", "user__username", "code"]
)
admin.site.register(
ReferralResponse,
list_display=[
"referral",
"session_key",
"user",
"ip_address",
"action"
],
readonly_fields=["referral", "session_key", "user", "ip_address", "action"],
list_filter=["action", "created_at"],
search_fields=["referral__code", "referral__user__username", "ip_address"]
)
|
Python
| 0.000001
|
@@ -75,27 +75,24 @@
sponse%0A%0A
+%0A@
admin.
-site.
register
@@ -96,22 +96,56 @@
ter(
-%0A Referral,
+Referral)%0Aclass ReferralAdmin(admin.ModelAdmin):
%0A
@@ -149,33 +149,35 @@
list_display
-=
+ =
%5B%0A %22user%22
@@ -293,25 +293,24 @@
ct_id%22%0A %5D
-,
%0A readonl
@@ -317,17 +317,19 @@
y_fields
-=
+ =
%5B%22code%22,
@@ -338,25 +338,24 @@
created_at%22%5D
-,
%0A list_fi
@@ -358,17 +358,19 @@
t_filter
-=
+ =
%5B%22target
@@ -390,33 +390,32 @@
%22, %22created_at%22%5D
-,
%0A search_fiel
@@ -416,17 +416,19 @@
h_fields
-=
+ =
%5B%22user__
@@ -504,22 +504,17 @@
e%22%5D%0A
-)
%0A%0A
+@
admin.
-site.
regi
@@ -522,30 +522,72 @@
ter(
-%0A ReferralResponse,
+ReferralResponse)%0Aclass ReferralResponseAdmin(admin.ModelAdmin):
%0A
@@ -599,17 +599,19 @@
_display
-=
+ =
%5B%0A
@@ -707,17 +707,16 @@
n%22%0A %5D
-,
%0A rea
@@ -723,25 +723,27 @@
donly_fields
-=
+ =
%5B%22referral%22,
@@ -789,17 +789,16 @@
action%22%5D
-,
%0A lis
@@ -805,17 +805,19 @@
t_filter
-=
+ =
%5B%22action
@@ -832,17 +832,16 @@
ted_at%22%5D
-,
%0A sea
@@ -850,17 +850,19 @@
h_fields
-=
+ =
%5B%22referr
@@ -914,10 +914,8 @@
dress%22%5D%0A
-)%0A
|
5623eb94b0114c011bc5f9df62c1fc1dd6751b6a
|
Fix some compatibily issue
|
EMSL_api.py
|
EMSL_api.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""EMSL Api.
Usage:
EMSL_api.py list_basis [--basis=<basis_name>...]
[--atom=<atom_name>...]
[--db_path=<db_path>]
[--average_mo_number]
EMSL_api.py list_atoms --basis=<basis_name>
[--db_path=<db_path>]
EMSL_api.py get_basis_data --basis=<basis_name>
[--atom=<atom_name>...]
[--db_path=<db_path>]
[(--save [--path=<path>])]
[--check=<program_name>]
[--treat_l]
EMSL_api.py list_formats
EMSL_api.py create_db --format=<format>
[--db_path=<db_path>]
[--no-contraction]
EMSL_api.py (-h | --help)
EMSL_api.py --version
Options:
-h --help Show this screen.
--version Show version.
--no-contraction Basis functions are not contracted
<db_path> is the path to the SQLite3 file containing the Basis sets.
By default is $EMSL_API_ROOT/db/Gausian_uk.db
Example of use:
./EMSL_api.py list_basis --atom Al --atom U
./EMSL_api.py list_basis --atom S --basis 'cc-pV*' --average_mo_number
./EMSL_api.py list_atoms --basis ANO-RCC
./EMSL_api.py get_basis_data --basis 3-21++G*
"""
version = "0.8.1"
import os
from src.misc.docopt import docopt
from src.EMSL_dump import EMSL_dump
from src.EMSL_local import EMSL_local
if __name__ == '__main__':
arguments = docopt(__doc__, version='EMSL Api ' + version)
# ___
# | ._ o _|_
# _|_ | | | |_
#
if arguments["--db_path"]:
db_path = arguments["--db_path"]
else:
db_path = os.path.dirname(__file__) + "/db/GAMESS-US.db"
# Check the db
try:
if not(arguments['create_db']):
from src.EMSL_local import checkSQLite3
db_path, db_path_changed = checkSQLite3(db_path)
except:
raise
# _ _ _ ______ _
# | | (_) | | | ___ \ (_)
# | | _ ___| |_ | |_/ / __ _ ___ _ ___
# | | | / __| __| | ___ \/ _` / __| / __|
# | |___| \__ \ |_ | |_/ / (_| \__ \ \__ \
# \_____/_|___/\__| \____/ \__,_|___/_|___/
if arguments["list_basis"]:
e = EMSL_local(db_path=db_path)
l = e.list_basis_available(arguments["--atom"],
arguments["--basis"],
arguments["--average_mo_number"])
if arguments["--average_mo_number"]:
for name, des, avg in l:
print "- '{}' ({}) || {:<50}".format(name, avg, des)
else:
for name, des in l:
print "- '{}' || {:<50}".format(name, des)
# _ _ _ _____ _ _
# | | (_) | | | ___| | | |
# | | _ ___| |_ | |__ | | ___ _ __ ___ ___ _ __ | |_ ___
# | | | / __| __| | __|| |/ _ \ '_ ` _ \ / _ \ '_ \| __/ __|
# | |___| \__ \ |_ | |___| | __/ | | | | | __/ | | | |_\__ \
# \_____/_|___/\__| \____/|_|\___|_| |_| |_|\___|_| |_|\__|___/
elif arguments["list_atoms"]:
e = EMSL_local(db_path=db_path)
basis_name = arguments["--basis"]
l = e.get_list_element_available(basis_name)
print ", ".join(l)
# ______ _ _ _
# | ___ \ (_) | | | |
# | |_/ / __ _ ___ _ ___ __| | __ _| |_ __ _
# | ___ \/ _` / __| / __| / _` |/ _` | __/ _` |
# | |_/ / (_| \__ \ \__ \ | (_| | (_| | || (_| |
# \____/ \__,_|___/_|___/ \__,_|\__,_|\__\__,_|
elif arguments["get_basis_data"]:
e = EMSL_local(db_path=db_path)
basis_name = arguments["--basis"][0]
elts = arguments["--atom"]
l_atom_basis = e.get_basis(basis_name, elts,
arguments["--treat_l"],
arguments["--check"])
# Add separation between atoms, and a empty last line
str_ = "\n\n".join(l_atom_basis) + "\n"
if arguments["--save"]:
if arguments["--path"]:
path = arguments["--path"]
else:
# The defaut path is bais
path = "_".join([basis_name, ".".join(elts)])
path = "/tmp/" + path + ".bs"
with open(path, 'w') as f:
f.write(str_ + "\n")
print path
else:
print str_
# _ _ _ __ _
# | | (_) | | / _| | |
# | | _ ___| |_ | |_ ___ _ __ _ __ ___ __ _| |_ ___
# | | | / __| __| | _/ _ \| '__| '_ ` _ \ / _` | __/ __|
# | |___| \__ \ |_ | || (_) | | | | | | | | (_| | |_\__ \
# \_____/_|___/\__| |_| \___/|_| |_| |_| |_|\__,_|\__|___/
elif arguments["list_formats"]:
for i in EMSL_dump.get_list_format():
print i
# _____ _ _ _
# / __ \ | | | | |
# | / \/_ __ ___ __ _| |_ ___ __| | |__
# | | | '__/ _ \/ _` | __/ _ \ / _` | '_ \
# | \__/\ | | __/ (_| | || __/ | (_| | |_) |
# \____/_| \___|\__,_|\__\___| \__,_|_.__/
elif arguments["create_db"]:
db_path = arguments["--db_path"]
format = arguments["--format"]
contraction = not arguments["--no-contraction"]
e = EMSL_dump(db_path=db_path,
format=format,
contraction=contraction)
e.new_db()
# _
# / | _ _. ._ o ._ _
# \_ | (/_ (_| | | | | | (_|
# _|
# Clean up on exit
if not(arguments['create_db']) and db_path_changed:
os.system("rm -f /dev/shm/%d.db" % (os.getpid()))
|
Python
| 0.001068
|
@@ -2649,13 +2649,15 @@
- '%7B
+0
%7D' (%7B
+1
%7D) %7C
@@ -2762,16 +2762,17 @@
nt %22- '%7B
+0
%7D' %7C%7C %7B:
|
042085d6bc964b66086dd0d6931e4ae327833369
|
fix some pylint complains
|
yaranullin/framework.py
|
yaranullin/framework.py
|
# yaranullin/framework.py
#
# Copyright (c) 2012 Marco Scopesi <marco.scopesi@gmail.com>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
'''The event system of Yaranullin.
This module is an simple implementation of an event patter.
'''
import collections
import inspect
import time
import weakref
from yaranullin.events import *
_QUEUE = collections.deque()
_EVENTS = {}
_EVENTS[ANY] = weakref.WeakValueDictionary()
def connect(event, func):
''' Connect an handler '''
if inspect.isfunction(func):
func = {func: func}
elif inspect.ismethod(func):
func = {func.im_func: func.im_self}
else:
return
if event not in _EVENTS:
_EVENTS[event] = weakref.WeakValueDictionary()
_EVENTS[event].update(func)
def disconnect(event=None, func=None):
''' Disconnect an handler '''
if func is not None:
if inspect.ismethod(func):
func = func.im_func
if event in _EVENTS:
del _EVENTS[event][func]
elif event is None:
for ev, handlers in _EVENTS.items():
if func in handlers:
del _EVENTS[ev][func]
for ev in _EVENTS.keys():
if not _EVENTS[ev]:
del _EVENTS[ev]
elif event is None:
_EVENTS.clear()
elif event in _EVENTS:
del _EVENTS[event]
if ANY not in _EVENTS:
_EVENTS[ANY] = weakref.WeakValueDictionary()
def post(__event__, **kargs):
''' Post an event '''
if not isinstance(__event__, int):
return
if __event__ not in _EVENTS and not _EVENTS[ANY]:
return
# Add the id of the dict to the object
id_ = id(kargs)
kargs['__id__'] = id_
# Add a special attribute with the type of the event
kargs['__event__'] = __event__
# Post an event only if there is some handler connected.
_QUEUE.append(kargs)
return id_
def process_queue():
''' Consume the event queue and call all handlers '''
stop = False
while _QUEUE:
ekargs = _QUEUE.popleft()
event = ekargs['__event__']
# Find all handler for this event
handlers = {}
handlers.update(_EVENTS[event])
handlers.update(_EVENTS[ANY])
for handler, self in handlers.iteritems():
hargs, _, hkeywords, _ = inspect.getargspec(handler)
kargs = dict(ekargs)
if 'self' in hargs:
# We assume that this is a bound method
kargs['self'] = self
# Check if the handler has ** magic
if not hkeywords:
# Delete all arguments that the handler cannot take
for key in kargs.keys():
if key not in hargs:
del kargs[key]
handler(**kargs)
if event == QUIT:
stop = True
break
return stop
class Pipe(object):
''' Used for communication between two processes.
To allow sending and receiving events from two different processes,
create an instance of Pipe for each one of them. The in_queue of the
first Pipe must be the out_queue of the second and viceversa.
The default implementation allows all events through the queues.
'''
def __init__(self, in_queue, out_queue):
self.in_queue = in_queue
self.out_queue = out_queue
self.posted_events = set()
connect(ANY, self.handle)
connect(TICK, self.tick)
def handle(self, **kargs):
''' Put given event to the out queue '''
try:
id_ = kargs['__id__']
event = kargs['__event__']
except KeyError:
return
if event == TICK:
# Never post ticks between processes.
return
if id_ in self.posted_events:
# This event was posted by the pipe, so do not have to post it
# back or we will trigger an infinite loop.
# Remove the event from the set (the event will be posted here
# once) and return
self.posted_events.remove(id_)
return
self.out_queue.put(kargs)
def tick(self):
''' Get all the event from the in queue '''
while not self.in_queue.empty():
event = self.in_queue.get()
self.posted_events.add(post(**event))
|
Python
| 0.000017
|
@@ -940,20 +940,8 @@
ect%0A
-import time%0A
impo
@@ -982,17 +982,31 @@
import
-*
+TICK, ANY, QUIT
%0A%0A%0A_QUEU
@@ -1029,17 +1029,16 @@
deque()%0A
-%0A
_EVENTS
@@ -1042,17 +1042,16 @@
TS = %7B%7D%0A
-%0A
_EVENTS%5B
@@ -1708,16 +1708,18 @@
for ev
+nt
, handle
@@ -1733,32 +1733,32 @@
EVENTS.items():%0A
-
@@ -1812,16 +1812,18 @@
VENTS%5Bev
+nt
%5D%5Bfunc%5D%0A
@@ -1836,16 +1836,18 @@
for ev
+nt
in _EVE
@@ -1887,16 +1887,18 @@
VENTS%5Bev
+nt
%5D:%0A
@@ -1922,16 +1922,18 @@
VENTS%5Bev
+nt
%5D%0A el
|
51cf3706504adb6b1772b491c6d9d612a64e49ab
|
fix check mention
|
yubari/bots/qq_watch.py
|
yubari/bots/qq_watch.py
|
#!/usr/bin/env python
# coding: utf-8
import time
import logging
from yubari.config import QQ_GROUP, MENTION_NAME
from yubari.lib.qq import qqbot
logger = logging.getLogger(__name__)
def run():
continue_count = 0
last_msg = ""
last_call = 0
for msg in qqbot.poll():
logger.info(msg)
content = msg.get('msg').strip()
for word in MENTION_NAME:
if word in content:
now = int(time.time())
if now - last_call < 1800:
logger.info("called in last 30min")
return
call_msg = "呀呀呀,召唤一号机[CQ:at,qq=%s]" % QQ_ME
qqbot.sendGroupMsg(call_msg)
last_call = now
return
if msg.get('event') == 'GroupMsg':
if msg.get('group') == QQ_GROUP:
if content != last_msg:
last_msg = content
continue_count = 0
continue
if continue_count < 2:
continue_count += 1
else:
logger.info("repeat: %s", content)
qqbot.sendGroupMsg(content)
continue_count = 0
if __name__ == "__main__":
run()
|
Python
| 0
|
@@ -108,17 +108,24 @@
ION_NAME
+, QQ_ME
%0A
-
from yub
@@ -189,16 +189,150 @@
me__)%0A%0A%0A
+def check_mention_self(content):%0A for word in MENTION_NAME:%0A if word in content:%0A return True%0A return False%0A%0A%0A
def run(
@@ -500,70 +500,40 @@
-for word in MENTION_NAME:%0A if word in
+if check_mention_self(
content
+)
:%0A
-
@@ -575,20 +575,16 @@
-
if now -
@@ -610,36 +610,32 @@
-
-
logger.info(%22cal
@@ -666,39 +666,33 @@
- return%0A
+continue%0A
@@ -739,36 +739,32 @@
_ME%0A
-
qqbot.sendGroupM
@@ -772,28 +772,24 @@
g(call_msg)%0A
-
@@ -816,26 +816,24 @@
- return
+continue
%0A
|
0420ecadeb3632b853510c35e8c557fcf204827f
|
Use stream ID for sender's default sending stream.
|
zerver/lib/addressee.py
|
zerver/lib/addressee.py
|
from typing import Iterable, List, Optional, Sequence, Union, cast
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext as _
from zerver.lib.exceptions import JsonableError
from zerver.lib.request import JsonableError
from zerver.models import (
Realm,
UserProfile,
get_user_including_cross_realm,
get_user_by_id_in_realm_including_cross_realm,
)
def raw_pm_with_emails(email_str: str, my_email: str) -> List[str]:
frags = email_str.split(',')
emails = [s.strip().lower() for s in frags]
emails = [email for email in emails if email]
if len(emails) > 1:
emails = [email for email in emails if email != my_email.lower()]
return emails
def get_user_profiles(emails: Iterable[str], realm: Realm) -> List[UserProfile]:
user_profiles = [] # type: List[UserProfile]
for email in emails:
try:
user_profile = get_user_including_cross_realm(email, realm)
except UserProfile.DoesNotExist:
raise JsonableError(_("Invalid email '%s'") % (email,))
user_profiles.append(user_profile)
return user_profiles
def get_user_profiles_by_ids(user_ids: Iterable[int], realm: Realm) -> List[UserProfile]:
user_profiles = [] # type: List[UserProfile]
for user_id in user_ids:
try:
user_profile = get_user_by_id_in_realm_including_cross_realm(user_id, realm)
except UserProfile.DoesNotExist:
raise JsonableError(_("Invalid user ID {}".format(user_id)))
user_profiles.append(user_profile)
return user_profiles
def validate_topic(topic: str) -> str:
if topic is None:
raise JsonableError(_("Missing topic"))
topic = topic.strip()
if topic == "":
raise JsonableError(_("Topic can't be empty"))
return topic
class Addressee:
# This is really just a holder for vars that tended to be passed
# around in a non-type-safe way before this class was introduced.
#
# It also avoids some nonsense where you have to think about whether
# topic should be None or '' for a PM, or you have to make an array
# of one stream.
#
# Eventually we can use this to cache Stream and UserProfile objects
# in memory.
#
# This should be treated as an immutable class.
def __init__(self, msg_type: str,
user_profiles: Optional[Sequence[UserProfile]]=None,
stream_name: Optional[str]=None,
stream_id: Optional[int]=None,
topic: Optional[str]=None) -> None:
assert(msg_type in ['stream', 'private'])
self._msg_type = msg_type
self._user_profiles = user_profiles
self._stream_name = stream_name
self._stream_id = stream_id
self._topic = topic
def is_stream(self) -> bool:
return self._msg_type == 'stream'
def is_private(self) -> bool:
return self._msg_type == 'private'
def user_profiles(self) -> List[UserProfile]:
assert(self.is_private())
return self._user_profiles # type: ignore # assertion protects us
def stream_name(self) -> Optional[str]:
assert(self.is_stream())
return self._stream_name
def stream_id(self) -> Optional[int]:
assert(self.is_stream())
return self._stream_id
def topic(self) -> str:
assert(self.is_stream())
assert(self._topic is not None)
return self._topic
@staticmethod
def legacy_build(sender: UserProfile,
message_type_name: str,
message_to: Union[Sequence[int], Sequence[str]],
topic_name: str,
realm: Optional[Realm]=None) -> 'Addressee':
# For legacy reason message_to used to be either a list of
# emails or a list of streams. We haven't fixed all of our
# callers yet.
if realm is None:
realm = sender.realm
if message_type_name == 'stream':
if len(message_to) > 1:
raise JsonableError(_("Cannot send to multiple streams"))
if message_to:
stream_name_or_id = message_to[0]
else:
# This is a hack to deal with the fact that we still support
# default streams (and the None will be converted later in the
# callpath).
if sender.default_sending_stream:
# Use the users default stream
stream_name_or_id = sender.default_sending_stream.name
else:
raise JsonableError(_('Missing stream'))
if isinstance(stream_name_or_id, int):
stream_id = cast(int, stream_name_or_id)
return Addressee.for_stream_id(stream_id, topic_name)
stream_name = cast(str, stream_name_or_id)
return Addressee.for_stream(stream_name, topic_name)
elif message_type_name == 'private':
if not message_to:
raise JsonableError(_("Message must have recipients"))
if isinstance(message_to[0], str):
emails = cast(Sequence[str], message_to)
return Addressee.for_private(emails, realm)
elif isinstance(message_to[0], int):
user_ids = cast(Sequence[int], message_to)
return Addressee.for_user_ids(user_ids=user_ids, realm=realm)
else:
raise JsonableError(_("Invalid message type"))
@staticmethod
def for_stream(stream_name: str, topic: str) -> 'Addressee':
topic = validate_topic(topic)
return Addressee(
msg_type='stream',
stream_name=stream_name,
topic=topic,
)
@staticmethod
def for_stream_id(stream_id: int, topic: str) -> 'Addressee':
topic = validate_topic(topic)
return Addressee(
msg_type='stream',
stream_id=stream_id,
topic=topic,
)
@staticmethod
def for_private(emails: Sequence[str], realm: Realm) -> 'Addressee':
assert len(emails) > 0
user_profiles = get_user_profiles(emails, realm)
return Addressee(
msg_type='private',
user_profiles=user_profiles,
)
@staticmethod
def for_user_ids(user_ids: Sequence[int], realm: Realm) -> 'Addressee':
assert len(user_ids) > 0
user_profiles = get_user_profiles_by_ids(user_ids, realm)
return Addressee(
msg_type='private',
user_profiles=user_profiles,
)
@staticmethod
def for_user_profile(user_profile: UserProfile) -> 'Addressee':
user_profiles = [user_profile]
return Addressee(
msg_type='private',
user_profiles=user_profiles,
)
|
Python
| 0
|
@@ -4560,20 +4560,18 @@
_stream.
-name
+id
%0A
|
cf9c7f2f020121c3a4272dd58c8c62baee8679b9
|
fix typo
|
src/py/gopythongo/assemblers/django.py
|
src/py/gopythongo/assemblers/django.py
|
# -* encoding: utf-8 *-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import base64
import shutil
import os
from typing import Any, Type
import configargparse
from gopythongo import utils
from gopythongo.assemblers import BaseAssembler
from gopythongo.utils import highlight, ErrorMessage, get_umasked_mode
class DjangoAssembler(BaseAssembler):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
@property
def assembler_name(self) -> str:
return u"django"
@property
def assembler_type(self) -> str:
return BaseAssembler.TYPE_ISOLATED
def add_args(self, parser: configargparse.ArgumentParser) -> None:
gr_django = parser.add_argument_group("Django Assembler options")
gr_django.add_argument("--collect-static", dest="collect_static", action="store_true", default=False,
help="If set, run 'django-admin.py collectstatic' inside the bundle")
gr_django.add_argument("--static-root", dest="static_root", default=None,
help="Where to collect static files from (Django's STATIC_ROOT)")
gr_django.add_argument("--assert-static-root-empty", dest="fresh_static", action="store_true", default=False,
help="If set, this script will make sure that STATIC_ROOT is empty " +
"before running collectstatic by DELETING it (be careful!)")
gr_django.add_argument("--django-settings", dest="django_settings_module", default=None,
env_var="DJANGO_SETTINGS_MODULE",
help="'--settings' argument to pass to django-admin.py when it is called by " +
"this script. If --django-generate-secret-key is set, SECRET_KEY will be set "
"in the environment.")
gr_django.add_argument("--django-settings-envfile", dest="django_settings_envfile", default=None,
help="If set to a path, GoPythonGo will write the value of '--django-settings' to the "
"specified file, resulting in a file that can be read from envdir or systemd. This "
"is useful for shipping environment configuration for projects adhering to "
"12factor. (A common use would be to set the DJANGO_SETTINGS_MODULE environment "
"variable.")
gr_django.add_argument("--django-gen-secret-key", dest="django_secret_key_file", default=None,
env_var="DJANGO_GEN_SECRET_KEY",
help="If set, GoPythonGo will write SECRET_KEY='(random)' to the given filename. The "
"resulting file can be read from envdir or systemd (EnvironmentFile). This is "
"useful for shipping environment configuration for projects adhering to 12factor "
"(and/or using the django12factor library).")
gr_django.add_argument("--envfile-mode", dest="envfile_mode", choices=["envdir", "envfile"], default="envdir",
help="Sets the output format for --django-gen-secret-key and --django-settings-envfile "
"so GoPythonGo wither writes NAME=VALUE (for systemd EnvironmentFiles) to the file "
"or just VALUE (for envdirs).")
def validate_args(self, args: configargparse.Namespace) -> None:
if args.django_secret_key_file:
if os.path.exists(os.path.dirname(args.django_secret_key_file)):
if not os.access(os.path.dirname(args.django_secret_key_file), os.W_OK):
raise ErrorMessage("GoPythonGo can't write to %s" % os.path.dirname(args.django_secret_key_file))
if os.path.exists(args.django_secret_key_file) and not os.access(args.django_secret_key_file, os.W_OK):
raise ErrorMessage("GoPythonGo can't write to %s" % args.django_secret_key_file)
if args.collect_static:
if not args.django_settings_module:
raise ErrorMessage("%s requires %s, you must specify the module path of your settings module" %
(highlight("--collect-static"), highlight("--django-settings")))
def assemble(self, args: configargparse.Namespace) -> None:
if args.django_secret_key_file:
utils.print_info("Creating SECRET_KEY configuration for Django in %s" %
utils.highlight(args.django_secret_key_file))
if not os.path.exists(os.path.dirname(args.django_secret_key_file)):
utils.umasked_makedirs(os.path.dirname(args.django_secret_key_file), 0o755)
secret = base64.b64encode(os.urandom(48)).decode("utf-8")
if args.django_settings_module and 'SECRET_KEY' not in os.environ:
os.environ['SECRET_KEY'] = secret
with open(args.django_secret_key_file, "wt", encoding="utf-8") as sf:
os.chmod(args.django_secret_key_file, get_umasked_mode(0o600))
if args.envfile_mode == "envfile":
sf.write("SECRET_KEY=")
sf.write("%s\n" % secret)
if args.django_settings_envfile:
utils.print_info("Saving DJANGO_SETTINGS_MODULE %s to file %s" %
(utils.highlight(args.django_settings_module),
utils.highlight(args.django_settings_envfile)))
if not os.path.exits(os.path.dirname(args.django_settings_envfile)):
utils.umasked_makedirs(os.path.dirname(args.django_settings_envfile), 0o755)
with open(args.django_settings_envfile, "wt", encoding="utf-8") as sf:
os.chmod(args.django_settings_envfile, get_umasked_mode(0o644))
if args.envfile_mode == "envfile":
sf.write("DJANGO_SETTINGS_MODULE=")
sf.write("%s\n" % args.django_settings_module)
if args.collect_static:
envpy = utils.create_script_path(args.build_path, "python")
utils.print_info("Collecting static artifacts")
if args.static_root and os.path.exists(args.static_root):
utils.print_debug(" %s exists." % args.static_root)
if args.fresh_static:
utils.print_info("removing stale static artifacts in %s" % args.static_root)
shutil.rmtree(args.static_root)
django_admin = utils.create_script_path(args.build_path, 'django-admin.py')
run_dja = [envpy, django_admin, "collectstatic"]
if args.django_settings_module:
run_dja.append('--settings=%s' % args.django_settings_module)
run_dja.append("--noinput")
run_dja.append("--traceback")
utils.run_process(*run_dja)
if args.static_root and not os.path.exists(args.static_root):
raise ErrorMessage("%s should now exist, but it doesn't" % args.static_root)
assembler_class = DjangoAssembler # type: Type[DjangoAssembler]
|
Python
| 0.999991
|
@@ -5875,16 +5875,17 @@
path.exi
+s
ts(os.pa
|
eda12e10ae41dce8a34903709afb7a0c73fcd3e2
|
Add dict merging and fix wrapped
|
src/server/blueprints/rest/restutil.py
|
src/server/blueprints/rest/restutil.py
|
"""
Utilities specific to REST blueprints.
"""
from enum import Enum
from util import get_current_app
from functools import wraps
import re
class ClientType(str, Enum):
BROWSER = "browser" # most useful in debug
CURL = "cURL" # also useful in debug
OTHER = "other" # usually production apps
browsers = re.compile("|".join(("chrome", "firefox", "safari", "opera")), re.IGNORECASE)
def get_implied_client_type(useragent: str) -> ClientType:
"""
Attempts to get the client type based on user-agent. This is by no means exaustive for browser checking,
and may be incorrect if the client lies.
:param useragent: The user-agent that the client provided
:return: The ClientType the user-agent implies
"""
if browsers.search(useragent):
return ClientType.BROWSER
if "curl/" in useragent:
return ClientType.CURL
return ClientType.OTHER
_shared_decorator_key = __name__ + "_shared_decorator"
def _shared_decorator_logic(**response_kwargs):
"""
Shared deco logic, merges decorators that are used together
"""
def make_wrapper(f):
merged_kwargs = response_kwargs.copy()
fn = f
if hasattr(f, _shared_decorator_key):
data = getattr(f, _shared_decorator_key)
merged_kwargs.update(data['kwargs'])
fn = data['wrapped']
@wraps(fn)
def wrapper(*args, **kwargs):
return get_current_app().response_class(fn(*args, **kwargs), **merged_kwargs)
setattr(wrapper, _shared_decorator_key, {'kwargs': merged_kwargs, 'wrapped': f})
return wrapper
return make_wrapper
def content_type(ctype):
return _shared_decorator_logic(content_type=ctype)
def status_code(code):
return _shared_decorator_logic(status=code)
def headers(direct_dict=None, **kwargs):
funneled = direct_dict or dict()
funneled.update(kwargs)
funneled = {k.replace('_', '-').upper(): v for k, v in funneled.items()}
return _shared_decorator_logic(headers=funneled)
|
Python
| 0
|
@@ -71,35 +71,105 @@
rom
-util import get_current_app
+collections.abc import MutableMapping%0Afrom util import get_current_app%0Afrom flask import Response
%0Afro
@@ -1353,135 +1353,628 @@
-merged_kwargs.update(data%5B'kwargs'%5D)%0A fn = data%5B'wrapped'%5D%0A%0A @wraps(fn)%0A def wrapper(*args, **kwargs):
+kwtomerge = data%5B'kwargs'%5D%0A merge_dict = dict()%0A for k, v in kwtomerge.items():%0A if k in merged_kwargs and isinstance(merged_kwargs%5Bk%5D, MutableMapping):%0A merged_kwargs%5Bk%5D.update(v)%0A else:%0A merge_dict%5Bk%5D = v%0A merged_kwargs.update(merge_dict)%0A fn = data%5B'wrapped'%5D%0A%0A @wraps(fn)%0A def wrapper(*args, **kwargs):%0A ret = fn(*args, **kwargs)%0A if isinstance(ret, Response):%0A # ahhhhhh%0A raise ValueError(%22No support for returning response and merging%22)
%0A
@@ -2022,35 +2022,19 @@
e_class(
-fn(*args, **kwargs)
+ret
, **merg
@@ -2131,16 +2131,17 @@
pped': f
+n
%7D)%0A
|
13e22652473eac3d2b8e700f674cfcd03a5f2155
|
Remove the library dependency for the cluster command.
|
haascli/cluster.py
|
haascli/cluster.py
|
import click
import boto3
import executor
from executor.ssh.client import RemoteCommand
class ClusterTopology(object):
@staticmethod
def parse(stack_name):
client = boto3.client('cloudformation')
client_autoscaling = boto3.client('autoscaling')
client_ec2 = boto3.client('ec2')
response = client.describe_stacks(
StackName=stack_name,
# NextToken='test',
)
assert len(response['Stacks']) == 1
# not sure whether the following implementation supports multiple nodes in autoscaling groups
master_ip = None
stack_record = response['Stacks'][0]
result = client.list_stack_resources(StackName=stack_record['StackName'])
for stack_resource in result['StackResourceSummaries']:
if not (stack_resource['ResourceType'] == 'AWS::AutoScaling::AutoScalingGroup' and
stack_resource['LogicalResourceId'] == 'MasterASG'):
continue
instance_list = client_autoscaling.describe_auto_scaling_instances()
instance_id_list = []
for instance in instance_list['AutoScalingInstances']:
if instance['AutoScalingGroupName'] == stack_resource['PhysicalResourceId']:
instance_id_list.append(instance['InstanceId'])
ec2_instance_list = client_ec2.describe_instances(InstanceIds=instance_id_list)
# not sure any issue in this way
master_ip = ec2_instance_list['Reservations'][0]['Instances'][0]['NetworkInterfaces'][0]['Association']['PublicIp']
return ClusterTopology(master_ip)
def __init__(self, master_ip):
self.master_ip = master_ip
def get_master_ip(self):
return self.master_ip
@click.group(context_settings=dict(help_option_names=['-h', '--help']))
@click.option('-s', '--stack_name', default='myhpcc')
@click.pass_context
def cli(ctx, **kwargs):
"""Cluster related operations
"""
ctx.obj.update(kwargs)
@cli.command()
@click.pass_context
def start(ctx):
# @TODO: a cache mechanism would be better
topology = ClusterTopology.parse(ctx.obj['stack_name'])
# @TODO: after we finalize the AMI, we don't need to switch to the user's directory
RemoteCommand(topology.get_master_ip(), 'source ~/project-aws/init.sh; cd ~/project-aws; hpcc service --action start').start()
@cli.command()
@click.pass_context
def stop(ctx):
topology = ClusterTopology.parse(ctx.obj['stack_name'])
RemoteCommand(topology.get_master_ip(), 'source ~/project-aws/init.sh; cd ~/project-aws; hpcc service --action stop').start()
@cli.command()
@click.pass_context
def restart(ctx):
ctx.invoke(stop)
ctx.invoke(start)
@cli.command()
@click.pass_context
def status(ctx):
topology = ClusterTopology.parse(ctx.obj['stack_name'])
RemoteCommand(topology.get_master_ip(), 'source ~/project-aws/init.sh; cd ~/project-aws; hpcc service --action status').start()
|
Python
| 0
|
@@ -2265,32 +2265,41 @@
RemoteCommand(
+%0A
topology.get_mas
@@ -2311,86 +2311,236 @@
p(),
- 'source ~/project-aws/init.sh; cd ~/project-aws; hpcc service --action start'
+%0A 'sudo bash -c %22/opt/HPCCSystems/sbin/hpcc-run.sh -a dafilesrv start%22'%0A ).start()%0A RemoteCommand(%0A topology.get_master_ip(),%0A 'sudo bash -c %22/opt/HPCCSystems/sbin/hpcc-run.sh -a hpcc-init start%22'%0A
).st
@@ -2538,33 +2538,32 @@
%0A ).start()%0A%0A
-%0A
@cli.command()%0A@
@@ -2666,32 +2666,41 @@
RemoteCommand(
+%0A
topology.get_mas
@@ -2712,191 +2712,651 @@
p(),
- 'source ~/project-aws/init.sh; cd ~/project-aws; hpcc service --action stop').start()%0A%0A%0A@cli.command()%0A@click.pass_context%0Adef restart(ctx):%0A ctx.invoke(stop)%0A ctx.invoke(
+%0A 'sudo bash -c %22/opt/HPCCSystems/sbin/hpcc-run.sh -a dafilesrv stop%22'%0A ).start()%0A RemoteCommand(%0A topology.get_master_ip(),%0A 'sudo bash -c %22/opt/HPCCSystems/sbin/hpcc-run.sh -a hpcc-init stop%22'%0A ).start()%0A%0A%0A@cli.command()%0A@click.pass_context%0Adef restart(ctx):%0A topology = ClusterTopology.parse(ctx.obj%5B'stack_name'%5D)%0A RemoteCommand(%0A topology.get_master_ip(),%0A 'sudo bash -c %22/opt/HPCCSystems/sbin/hpcc-run.sh -a dafilesrv restart%22'%0A ).start()%0A RemoteCommand(%0A topology.get_master_ip(),%0A 'sudo bash -c %22/opt/HPCCSystems/sbin/hpcc-run.sh -a hpcc-init restart%22'%0A ).
start
+(
)%0A%0A@
@@ -3484,16 +3484,25 @@
Command(
+%0A
topology
@@ -3522,87 +3522,238 @@
p(),
- 'source ~/project-aws/init.sh; cd ~/project-aws; hpcc service --action status'
+%0A 'sudo bash -c %22/opt/HPCCSystems/sbin/hpcc-run.sh -a dafilesrv status%22'%0A ).start()%0A RemoteCommand(%0A topology.get_master_ip(),%0A 'sudo bash -c %22/opt/HPCCSystems/sbin/hpcc-run.sh -a hpcc-init status%22'%0A
).st
|
77921afc66e68a65d5cd8992a49550896b491082
|
Load random state test is fixed.
|
tests/core/test_base.py
|
tests/core/test_base.py
|
import unittest
import os
import numpy as np
from examples.game_of_life import GameOfLife, GOLExperiment
class TestCellularAutomaton(unittest.TestCase):
num_steps = 1000
num_runs = 3
def test_single_ca(self):
for i in range(self.num_runs):
ca = GameOfLife(GOLExperiment)
for j in range(self.num_steps):
ca.step()
self.assertEqual(584, np.sum(ca.cells_gpu.get()[:ca.cells_num]),
"Wrong field checksum.")
def test_multiple_ca(self):
ca1 = GameOfLife(GOLExperiment)
ca2 = GameOfLife(GOLExperiment)
for j in range(self.num_steps):
ca1.step()
ca2.step()
self.assertEqual(584, np.sum(ca1.cells_gpu.get()[:ca1.cells_num]),
"Wrong field checksum (CA #1).")
self.assertEqual(584, np.sum(ca2.cells_gpu.get()[:ca2.cells_num]),
"Wrong field checksum (CA #2).")
def test_render(self):
experiment = GOLExperiment
experiment.zoom = 1
ca = GameOfLife(experiment)
ca.set_viewport(experiment.size)
for j in range(self.num_steps):
ca.step()
img = ca.render()
self.assertEqual(584 * 3, np.sum(img / 255),
"Wrong image checksum.")
def test_pause(self):
ca = GameOfLife(GOLExperiment)
ca.paused = False
checksum_before = np.sum(ca.cells_gpu.get()[:ca.cells_num])
ca.step()
checksum_after = np.sum(ca.cells_gpu.get()[:ca.cells_num])
self.assertNotEqual(checksum_before, checksum_after,
"CA is paused.")
ca.paused = True
checksum_before = checksum_after
ca.step()
checksum_after = np.sum(ca.cells_gpu.get()[:ca.cells_num])
self.assertEqual(checksum_before, checksum_after,
"CA is not paused.")
def test_save_load(self):
ca1 = GameOfLife(GOLExperiment)
for i in range(self.num_steps // 2):
ca1.step()
ca1.save("test.ca")
ca2 = GameOfLife(GOLExperiment)
ca2.load("test.ca")
for i in range(self.num_steps // 2):
ca2.step()
self.assertEqual(584, np.sum(ca2.cells_gpu.get()[:ca2.cells_num]),
"Wrong field checksum.")
os.remove("test.ca")
def test_load_random(self):
ca1 = GameOfLife(GOLExperiment)
ca1.save("test.ca")
ca2 = GameOfLife(GOLExperiment)
ca2.load("test.ca")
self.assertEqual(ca1.random.std.randint(1, 1000),
ca2.random.std.randint(1, 1000),
"Wrong standard RNG state.")
self.assertEqual(ca1.random.np.randint(1, 1000),
ca2.random.np.randint(1, 1000),
"Wrong numpy RNG state.")
|
Python
| 0
|
@@ -2882,20 +2882,49 @@
numpy RNG state.%22)%0A
+ os.remove(%22test.ca%22)%0A
|
74ad5c935abd69b7408a0c1ba2d7cc4ed57e3bd9
|
test solely running linkcheck as it requires the html to be built
|
tests/docs/test_docs.py
|
tests/docs/test_docs.py
|
import subprocess
import unittest
import os
import platform
class Doc_Test(unittest.TestCase):
@property
def path_to_docs(self):
dirname, file_name = os.path.split(os.path.abspath(__file__))
return dirname.split(os.path.sep)[:-2] + ["docs"]
def test_html(self):
wd = os.getcwd()
os.chdir(os.path.sep.join(self.path_to_docs))
if platform.system() != "Windows":
response = subprocess.run(["make", "html"])
self.assertTrue(response.returncode == 0)
else:
response = subprocess.call(["make", "html"], shell=True)
self.assertTrue(response == 0)
os.chdir(wd)
def test_linkcheck(self):
wd = os.getcwd()
os.chdir(os.path.sep.join(self.path_to_docs))
if platform.system() != "Windows":
response = subprocess.run(["make", "linkcheck"])
self.assertTrue(response.returncode == 0)
else:
response = subprocess.call(["make", "linkcheck"], shell=True)
self.assertTrue(response == 0)
os.chdir(wd)
if __name__ == "__main__":
unittest.main()
|
Python
| 0
|
@@ -260,24 +260,26 @@
%22docs%22%5D%0A%0A
+ #
def test_ht
@@ -283,32 +283,34 @@
_html(self):%0A
+ #
wd = os.get
@@ -310,32 +310,34 @@
os.getcwd()%0A
+ #
os.chdir(os
@@ -363,36 +363,43 @@
.path_to_docs))%0A
+ #
%0A
+ #
if platform
@@ -417,32 +417,34 @@
= %22Windows%22:%0A
+ #
respons
@@ -479,24 +479,26 @@
%22html%22%5D)%0A
+ #
sel
@@ -531,32 +531,34 @@
rncode == 0)%0A
+ #
else:%0A
@@ -547,32 +547,34 @@
# else:%0A
+ #
respons
@@ -618,32 +618,34 @@
shell=True)%0A
+ #
self.as
@@ -660,36 +660,43 @@
(response == 0)%0A
+ #
%0A
+ #
os.chdir(wd
|
a80ac141b7341e867f1395858e0bdccaa9a83b37
|
Fix for Py2 test.
|
tests/filesys_test_7.py
|
tests/filesys_test_7.py
|
# -*- coding: utf-8 -*-
##==============================================================#
## SECTION: Imports #
##==============================================================#
from testlib import *
from auxly.filesys import File
##==============================================================#
## SECTION: Global Definitions #
##==============================================================#
UTF8_STR = "ÁÍÓÚÀÈÌÒÙAEIOU"
##==============================================================#
## SECTION: Class Definitions #
##==============================================================#
class TestCase(BaseTest):
def test_file_1(test):
"""Basic File usage."""
p = FNAME[0]
f = File(p)
test.assertFalse(f.exists())
test.assertTrue(f.write(UTF8_STR))
test.assertTrue(f.exists())
test.assertEqual(UTF8_STR, f.read())
test.assertEqual(None, f.read(encoding="ascii"))
def test_file_2(test):
"""Basic File usage."""
p = FNAME[0]
f = File(p)
test.assertFalse(f.exists())
test.assertFalse(f.write(UTF8_STR, encoding="ascii"))
##==============================================================#
## SECTION: Main Body #
##==============================================================#
if __name__ == '__main__':
unittest.main()
|
Python
| 0
|
@@ -481,16 +481,17 @@
8_STR =
+u
%22%C3%81%C3%8D%C3%93%C3%9A%C3%80%C3%88%C3%8C
|
3a5b8344364306d6f4af266fb8485269bbc8383e
|
remove device parameter
|
hoomd/hpmc/pytest/test_remove_drift.py
|
hoomd/hpmc/pytest/test_remove_drift.py
|
# Copyright (c) 2009-2021 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
"""Test hoomd.hpmc.update.RemoveDrift."""
import hoomd
from hoomd.conftest import operation_pickling_check
import pytest
import hoomd.hpmc.pytest.conftest
import numpy as np
# note: The parameterized tests validate parameters so we can't pass in values
# here that require preprocessing
valid_constructor_args = [
dict(trigger=hoomd.trigger.Periodic(10),
reference_positions=[(0, 0, 0), (1, 0, 1)]),
dict(trigger=hoomd.trigger.After(10),
reference_positions=[(0, 0, 0), (1, 0, 1)]),
dict(trigger=hoomd.trigger.Before(10),
reference_positions=[(0, 0, 0), (1, 0, 1)])
]
valid_attrs = [('trigger', hoomd.trigger.Periodic(10000)),
('trigger', hoomd.trigger.After(100)),
('trigger', hoomd.trigger.Before(12345)),
('reference_positions', [(0, 0, 0), (1, 0, 1)])]
@pytest.mark.parametrize("constructor_args", valid_constructor_args)
def test_valid_construction(device, constructor_args):
"""Test that RemoveDrift can be constructed with valid arguments."""
remove_drift = hoomd.hpmc.update.RemoveDrift(**constructor_args)
# validate the params were set properly
for attr, value in constructor_args.items():
assert np.all(getattr(remove_drift, attr) == value)
@pytest.mark.parametrize("constructor_args", valid_constructor_args)
def test_valid_construction_and_attach(simulation_factory,
two_particle_snapshot_factory,
constructor_args, valid_args):
"""Test that RemoveDrift can be attached with valid arguments."""
integrator = valid_args[0]
args = valid_args[1]
# Need to unpack union integrators
if isinstance(integrator, tuple):
inner_integrator = integrator[0]
integrator = integrator[1]
inner_mc = inner_integrator()
for i in range(len(args["shapes"])):
# This will fill in default values for the inner shape objects
inner_mc.shape["A"] = args["shapes"][i]
args["shapes"][i] = inner_mc.shape["A"]
mc = integrator()
mc.shape["A"] = args
mc.shape["B"] = args
remove_drift = hoomd.hpmc.update.RemoveDrift(**constructor_args)
dim = 2 if 'polygon' in integrator.__name__.lower() else 3
sim = simulation_factory(
two_particle_snapshot_factory(particle_types=['A', 'B'],
dimensions=dim,
d=2,
L=50))
sim.operations.updaters.append(remove_drift)
sim.operations.integrator = mc
sim.run(0)
# validate the params were set properly
for attr, value in constructor_args.items():
assert np.all(getattr(remove_drift, attr) == value)
@pytest.mark.parametrize("attr,value", valid_attrs)
def test_valid_setattr(attr, value):
"""Test that RemoveDrift can get and set attributes."""
remove_drift = hoomd.hpmc.update.RemoveDrift(
trigger=hoomd.trigger.Periodic(10),
reference_positions=[(0, 0, 1), (-1, 0, 1)])
setattr(remove_drift, attr, value)
assert np.all(getattr(remove_drift, attr) == value)
@pytest.mark.parametrize("attr,value", valid_attrs)
def test_valid_setattr_attached(attr, value, simulation_factory,
two_particle_snapshot_factory, valid_args):
"""Test that RemoveDrift can get and set attributes while attached."""
integrator = valid_args[0]
args = valid_args[1]
# Need to unpack union integrators
if isinstance(integrator, tuple):
inner_integrator = integrator[0]
integrator = integrator[1]
inner_mc = inner_integrator()
for i in range(len(args["shapes"])):
# This will fill in default values for the inner shape objects
inner_mc.shape["A"] = args["shapes"][i]
args["shapes"][i] = inner_mc.shape["A"]
mc = integrator()
mc.shape["A"] = args
mc.shape["B"] = args
remove_drift = hoomd.hpmc.update.RemoveDrift(
trigger=hoomd.trigger.Periodic(10),
reference_positions=[(0, 0, 1), (-1, 0, 1)])
dim = 2 if 'polygon' in integrator.__name__.lower() else 3
sim = simulation_factory(
two_particle_snapshot_factory(particle_types=['A', 'B'],
dimensions=dim,
d=2,
L=50))
sim.operations.updaters.append(remove_drift)
sim.operations.integrator = mc
sim.run(0)
setattr(remove_drift, attr, value)
assert np.all(getattr(remove_drift, attr) == value)
def test_remove_drift(simulation_factory, lattice_snapshot_factory):
"""Test that RemoveDrift modifies positions correctly."""
dev = hoomd.device.CPU()
sim = hoomd.simulation.Simulation(device=dev, seed=10234)
snap = hoomd.snapshot.Snapshot(communicator=dev.communicator)
reference_positions = np.array([[2, 0, 0.1], [-2, 0, 0.1]])
box = np.array([10, 10, 10, 0, 0, 0])
if snap.communicator.rank == 0:
snap.particles.N = 2
snap.particles.position[:] = reference_positions
snap.configuration.box = box
snap.particles.types = ["A"]
sim.create_state_from_snapshot(snap)
mc = hoomd.hpmc.integrate.Sphere(default_d=0.5)
mc.shape["A"] = dict(diameter=1.0)
sim.operations.integrator = mc
# randomize a bit
sim.run(500)
# make sure only the updater is acting on the system
sim.operations.integrator.d["A"] = 0
# remove the drift from the previous run
remove_drift = hoomd.hpmc.update.RemoveDrift(
trigger=hoomd.trigger.Periodic(1),
reference_positions=reference_positions)
sim.operations.updaters.append(remove_drift)
sim.run(1)
s = sim.state.snapshot
if s.communicator.rank == 0:
new_positions = s.particles.position
drift = np.mean(new_positions - reference_positions, axis=0)
assert np.allclose(drift, [0, 0, 0])
def test_pickling(simulation_factory, two_particle_snapshot_factory):
"""Test that RemoveDrift objects are picklable."""
sim = simulation_factory(two_particle_snapshot_factory())
mc = hoomd.hpmc.integrate.Sphere(default_d=0.1, default_a=0.1)
mc.shape['A'] = dict(diameter=1.1)
mc.shape['B'] = dict(diameter=1.3)
sim.operations.integrator = mc
remove_drift = hoomd.hpmc.update.RemoveDrift(
trigger=hoomd.trigger.Periodic(5),
reference_positions=[(0, 0, 1), (-1, 0, 1)])
operation_pickling_check(remove_drift, sim)
|
Python
| 0.000003
|
@@ -1099,16 +1099,8 @@
ion(
-device,
cons
|
2f9a0e326b120b5bad886417ba338314b26da4ea
|
version bump to 0.4.4.1
|
hangups/version.py
|
hangups/version.py
|
__version__ = '0.4.4'
|
Python
| 0.000001
|
@@ -13,10 +13,20 @@
= '0.4.4
+.1+das7pad
'%0A
|
6f7908294cd69e419c0be3f1561febed6b673f6e
|
fix user feeds
|
muckrock/foia/feeds.py
|
muckrock/foia/feeds.py
|
"""
Feeds for the FOIA application
"""
# Django
# pylint: disable=no-name-in-module
from django.contrib.auth.models import User
from django.contrib.syndication.views import Feed
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.template.defaultfilters import escape, linebreaks
# MuckRock
from muckrock.foia.models import FOIACommunication, FOIARequest
class LatestSubmittedRequests(Feed):
"""An RSS Feed for submitted FOIA requests"""
title = 'Muckrock Submitted Requests'
link = '/foi/'
description = 'Recently submitted FOI requests on MuckRock'
def items(self):
"""Return the items for the rss feed"""
return (
FOIARequest.objects.get_public()
.order_by('-composer__datetime_submitted')
.select_related('agency__jurisdiction')
.prefetch_related('communications')[:25]
)
def item_description(self, item):
"""The description of each rss item"""
return linebreaks(escape(item.first_request_text()))
class LatestDoneRequests(Feed):
"""An RSS Feed for completed FOIA requests"""
title = 'Muckrock Completed Requests'
link = '/foi/'
description = 'Recently completed FOI requests on MuckRock'
def items(self):
"""Return the items for the rss feed"""
return (
FOIARequest.objects.get_done().get_public()
.order_by('-datetime_done').select_related('agency__jurisdiction')
.prefetch_related('communications')[:25]
)
def item_description(self, item):
"""The description of each rss item"""
return linebreaks(escape(item.first_request_text()))
class FOIAFeed(Feed):
"""Feed for an individual FOI request"""
def get_object(self, request, idx):
"""Get the FOIA Request for this feed"""
# pylint: disable=arguments-differ
foia = get_object_or_404(FOIARequest, pk=idx)
if not foia.is_public():
raise Http404()
return foia
def title(self, obj):
"""The title of this feed"""
return 'MuckRock FOI Request: %s' % obj.title
def link(self, obj):
"""The link for this feed"""
return obj.get_absolute_url()
def description(self, obj):
"""The description of this feed"""
return 'Updates on FOI Request %s from MuckRock' % obj.title
def items(self, obj):
"""The communications are the items for this feed"""
return obj.communications.all()[:25]
def item_description(self, item):
"""The description of each rss item"""
return linebreaks(escape(item.communication))
class UserSubmittedFeed(Feed):
"""Feed for a user's new submitted requests"""
def get_object(self, request, username):
"""Get the user for this feed"""
# pylint: disable=arguments-differ
return get_object_or_404(User, username=username)
def title(self, obj):
"""The title of this feed"""
return 'MuckRock user %s\'s submitted requests' % obj.username
def link(self, obj):
"""The link for this feed"""
return obj.profile.get_absolute_url()
def description(self, obj):
"""The description of this feed"""
return 'Newly submitted requests by %s' % obj.username
def items(self, obj):
"""The submitted requests are the items for this feed"""
return (
FOIARequest.objects.filter(
user=obj,
embargo=False,
).order_by(
'-composer__datetime_submitted',
).select_related('agency__jurisdiction')
.prefetch_related('communications')[:25]
)
def item_description(self, item):
"""The description of each rss item"""
return linebreaks(escape(item.first_request_text()))
class UserDoneFeed(Feed):
"""Feed for a user's completed requests"""
def get_object(self, request, username):
"""Get the user for this feed"""
# pylint: disable=arguments-differ
return get_object_or_404(User, username=username)
def title(self, obj):
"""The title of this feed"""
return 'MuckRock user %s\'s completed requests' % obj.username
def link(self, obj):
"""The link for this feed"""
return obj.profile.get_absolute_url()
def description(self, obj):
"""The description of this feed"""
return 'Completed requests by %s' % obj.username
def items(self, obj):
"""The completed requests are the items for this feed"""
return (
FOIARequest.objects.get_done().filter(user=obj, embargo=False)
.order_by('-datetime_submitted'
).select_related('agency__jurisdiction')
.prefetch_related('communications')[:25]
)
def item_description(self, item):
"""The description of each rss item"""
return linebreaks(escape(item.first_request_text()))
class UserUpdateFeed(Feed):
"""Feed for updates to all of user's requests"""
def get_object(self, request, username):
"""Get the user for this feed"""
# pylint: disable=arguments-differ
return get_object_or_404(User, username=username)
def title(self, obj):
"""The title of this feed"""
return 'MuckRock user %s\'s request updates' % obj.username
def link(self, obj):
"""The link for this feed"""
return obj.profile.get_absolute_url()
def description(self, obj):
"""The description of this feed"""
return 'All request updates by %s' % obj.username
def items(self, obj):
"""The communications are the items for this feed"""
communications = (
FOIACommunication.objects.filter(foia__user=obj)
.exclude(foia__embargo=True)
.select_related('foia__agency__jurisdiction').order_by('-date')
)
return communications[:25]
def item_description(self, item):
"""The description of each rss item"""
return linebreaks(escape(item.communication))
|
Python
| 0.000008
|
@@ -3487,16 +3487,26 @@
+composer__
user=obj
@@ -3507,16 +3507,16 @@
er=obj,%0A
-
@@ -4673,25 +4673,68 @@
.filter(
-user=obj,
+%0A composer__user=obj,%0A
embargo
@@ -4739,17 +4739,17 @@
go=False
-)
+,
%0A
@@ -4745,32 +4745,33 @@
se,%0A
+)
.order_by('-date
@@ -4756,32 +4756,49 @@
).order_by(
+%0A
'-datetime_submi
@@ -4802,27 +4802,18 @@
bmitted'
+,
%0A
-
@@ -5828,16 +5828,16 @@
ons = (%0A
-
@@ -5879,16 +5879,26 @@
r(foia__
+composer__
user=obj
|
cc7ca20a4f86cedc4e0b20c50f6cbb9a3f512a20
|
Fix FullTrigger conditions list attribute instantiation
|
hawkular/alerts.py
|
hawkular/alerts.py
|
"""
Copyright 2015-2016 Red Hat, Inc. and/or its affiliates
and other contributors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from client import ApiOject, HawkularBaseClient
class Trigger(ApiOject):
__slots__ = [
'id', 'name', 'description', 'type', 'event_type', 'event_category',
'event_text', 'event_category', 'event_text', 'severity', 'context',
'tags', 'actions', 'auto_disable', 'auto_enable', 'auto_resolve',
'auto_resolve_alerts', 'auto_resolve_match', 'data_id_map', 'member_of',
'enabled', 'firing_match', 'source'
]
class Condition(ApiOject):
__slots__ = [
'trigger_id', 'trigger_mode', 'type', 'condition_set_size',
'condition_set_index', 'condition_id', 'context', 'data_id',
'operator', 'data2_id', 'data2_multiplier', 'pattern', 'ignore_case',
'threshold', 'operator_low', 'operator_high', 'threshold_low', 'threshold_high',
'in_range', 'alerter_id', 'expression', 'direction', 'period', 'interval'
]
class Dampening(ApiOject):
__slots__ = [
'trigger_id', 'trigger_mode', 'type', 'eval_true_setting',
'eval_total_setting', 'eval_time_setting', 'dampening_id'
]
class FullTrigger(ApiOject):
defaults = {
'conditions': [],
'dampenings': []
}
__slots__ = [
'trigger', 'dampenings', 'conditions'
]
def __init__(self, dictionary=dict()):
udict = FullTrigger.transform_dict_to_underscore(dictionary)
self.trigger = Trigger(udict.get('trigger'))
self.dampenings = Dampening.list_to_object_list(udict.get('dampenings'))
self.conditions = Dampening.list_to_object_list(udict.get('conditions'))
class GroupMemberInfo(ApiOject):
__slots__ = [
'group_id', 'member_id', 'member_name', 'member_description', 'member_context',
'member_tags', 'data_id_map'
]
class GroupConditionsInfo(ApiOject):
__slots__ = [
'conditions', 'data_id_member_map'
]
defaults = {
'conditions': []
}
def addCondition(self, c):
self.conditions.append(c)
class TriggerType:
STANDARD = 'STANDARD'
GROUP = 'GROUP'
DATA_DRIVEN_GROUP = 'DATA_DRIVEN_GROUP'
MEMBER = 'MEMBER'
ORPHAN = 'ORPHAN'
class TriggerMode:
FIRING = 'FIRING'
AUTORESOLVE = 'AUTORESOLVE'
class DampeningType:
STRICT = 'STRICT'
RELAXED_COUNT = 'RELAXED_COUNT'
RELAXED_TIME = 'RELAXED_TIME'
STRICT_TIME = 'STRICT_TIME'
STRICT_TIMEOUT = 'STRICT_TIMEOUT'
class ConditionType:
AVAILABILITY = 'AVAILABILITY'
COMPARE = 'COMPARE'
STRING = 'STRING'
THRESHOLD = 'THRESHOLD'
RANGE = 'RANGE'
EXTERNAL = 'EXTERNAL'
EVENT = 'EVENT'
RATE = 'RATE'
MISSING = 'MISSING'
class Operator:
LT = 'LT'
GT = 'GT'
LTE = 'LTE'
GTE = 'GTE'
class Severity:
LOW = 'LOW'
MEDIUM = 'MEDIUM'
HIGH = 'HIGH'
CRITICAL = 'CRITICAL'
class HawkularAlertsClient(HawkularBaseClient):
def list_triggers(self, ids=[], tags=[]):
ids = ','.join(ids)
tags = ','.join(tags)
url = self._service_url('triggers', {'tags': tags, 'ids': ids})
triggers_dict = self._get(url)
return Trigger.list_to_object_list(triggers_dict)
def create_trigger(self, trigger):
data = self._serialize_object(trigger)
if isinstance(trigger, FullTrigger):
returned_dict = self._post(self._service_url(['triggers', 'trigger']), data)
return FullTrigger(returned_dict)
else:
returned_dict = self._post(self._service_url('triggers'), data)
return Trigger(returned_dict)
def get_trigger(self, trigger_id, full=False):
if full:
returned_dict = self._get(self._service_url(['triggers', 'trigger', trigger_id]))
return FullTrigger(returned_dict)
else:
returned_dict = self._get(self._service_url(['triggers', trigger_id]))
return Trigger(returned_dict)
def create_group_trigger(self, trigger):
data = self._serialize_object(trigger)
return Trigger(self._post(self._service_url(['triggers', 'groups']), data))
def create_group_member(self, member):
data = self._serialize_object(member)
return Trigger(self._post(self._service_url(['triggers', 'groups', 'members']), data))
def create_group_conditions(self, group_id, trigger_mode, conditions):
data = self._serialize_object(conditions)
url = self._service_url(['triggers', 'groups', group_id, 'conditions', trigger_mode])
response = self._put(url, data)
return Condition.list_to_object_list(response)
def list_dampenings(self, trigger_id):
url = self._service_url(['triggers', trigger_id, 'dampenings'])
data = self._get(url)
return Dampening.list_to_object_list(data)
|
Python
| 0
|
@@ -2161,33 +2161,33 @@
onditions =
-Dampening
+Condition
.list_to_obj
|
e883d625bf78c52d4f1206f13ef64e53df23c3dd
|
Add a tool for getting the current schema. Not sure if this could break things. Concurrency might be a bitch.
|
multi_schema/schema.py
|
multi_schema/schema.py
|
Python
| 0.000001
|
@@ -0,0 +1,257 @@
+from django.db import models%0Afrom .models import Schema%0A%0Adef get_schema():%0A cursor = models.connection.cursor()%0A cursor.execute('SHOW search_path')%0A search_path = cursor.fetchone()%5B0%5D%0A return Schema.objects.get(schema=search_path.split(',')%5B0%5D)%0A
|
|
6306288b7b65481a7e0706d3515d673b2344d2f0
|
Bump version
|
hbmqtt/__init__.py
|
hbmqtt/__init__.py
|
# Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
VERSION = (0, 2, 0, 'final', 0)
|
Python
| 0
|
@@ -102,20 +102,20 @@
(0,
-2
+3
, 0, '
-final
+alpha
', 0
|
6076b6a7824072b97936aaa3da3ba1acf2bc87d6
|
Bump version
|
mythril/__version__.py
|
mythril/__version__.py
|
"""This file contains the current Mythril version.
This file is suitable for sourcing inside POSIX shell, e.g. bash as well
as for importing into Python.
"""
__version__ = "v0.21.6"
|
Python
| 0
|
@@ -178,7 +178,7 @@
.21.
-6
+7
%22%0A
|
f7a02791db913353a7bef7afbe71d19a0bbd645b
|
make logging work
|
chatdemo.py
|
chatdemo.py
|
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import tornado.escape
import tornado.ioloop
import tornado.web
import os.path
import uuid
from tornado.concurrent import Future
from tornado import gen
from tornado.options import define, options, parse_command_line
define("port", default=8888, help="run on the given port", type=int)
define("debug", default=False, help="run in debug mode")
class MessageBuffer(object):
def __init__(self):
self.waiters = set()
self.cache = []
self.cache_size = 200
def wait_for_messages(self, cursor=None):
# Construct a Future to return to our caller. This allows
# wait_for_messages to be yielded from a coroutine even though
# it is not a coroutine itself. We will set the result of the
# Future when results are available.
result_future = Future()
if cursor:
new_count = 0
for msg in reversed(self.cache):
if msg["id"] == cursor:
break
new_count += 1
if new_count:
result_future.set_result(self.cache[-new_count:])
return result_future
self.waiters.add(result_future)
return result_future
def cancel_wait(self, future):
self.waiters.remove(future)
# Set an empty result to unblock any coroutines waiting.
future.set_result([])
def new_messages(self, messages):
logging.info("Sending new message to %r listeners", len(self.waiters))
for future in self.waiters:
future.set_result(messages)
self.waiters = set()
self.cache.extend(messages)
if len(self.cache) > self.cache_size:
self.cache = self.cache[-self.cache_size:]
# Making this a non-singleton is left as an exercise for the reader.
global_message_buffer = MessageBuffer()
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.render("index.html", messages=global_message_buffer.cache)
class MessageNewHandler(tornado.web.RequestHandler):
def post(self):
message = {
"id": str(uuid.uuid4()),
"body": self.get_argument("body"),
}
# to_basestring is necessary for Python 3's json encoder,
# which doesn't accept byte strings.
message["html"] = tornado.escape.to_basestring(
self.render_string("message.html", message=message))
if self.get_argument("next", None):
self.redirect(self.get_argument("next"))
else:
self.write(message)
global_message_buffer.new_messages([message])
class MessageUpdatesHandler(tornado.web.RequestHandler):
@gen.coroutine
def post(self):
cursor = self.get_argument("cursor", None)
# Save the future returned by wait_for_messages so we can cancel
# it in wait_for_messages
self.future = global_message_buffer.wait_for_messages(cursor=cursor)
messages = yield self.future
if self.request.connection.stream.closed():
return
self.write(dict(messages=messages))
def on_connection_close(self):
global_message_buffer.cancel_wait(self.future)
def main():
logging.info("Starting hello app")
parse_command_line()
app = tornado.web.Application(
[
(r"/", MainHandler),
(r"/a/message/new", MessageNewHandler),
(r"/a/message/updates", MessageUpdatesHandler),
],
cookie_secret="__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__",
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
xsrf_cookies=True,
debug=options.debug,
)
app.listen(options.port)
tornado.ioloop.IOLoop.current().start()
if __name__ == "__main__":
main()
|
Python
| 0.000001
|
@@ -3796,47 +3796,8 @@
():%0A
- logging.info(%22Starting hello app%22)%0A
@@ -4305,16 +4305,56 @@
)
+%0A%0A logging.info(%22Starting hello app%22)
%0A app
|
4d072bf3e1042bd7996f54c9a173bf39ed67c283
|
Remove unused imports
|
promgen/forms.py
|
promgen/forms.py
|
import datetime
from django import forms
from django.core.exceptions import ValidationError
from promgen import models, plugins, prometheus
class ImportConfigForm(forms.Form):
config = forms.CharField(
widget=forms.Textarea(attrs={'rows': 5, 'class': 'form-control'}),
required=False)
url = forms.CharField(
widget=forms.TextInput(attrs={'class': 'form-control'}),
required=False)
file_field = forms.FileField(
widget=forms.FileInput(attrs={'class': 'form-control'}),
required=False)
class ImportRuleForm(forms.Form):
rules = forms.CharField(widget=forms.Textarea, required=True)
class MuteForm(forms.Form):
def validate_datetime(value):
try:
datetime.datetime.strptime(value, '%Y-%m-%d %H:%M')
except:
raise forms.ValidationError('Invalid timestamp')
next = forms.CharField(required=False)
duration = forms.CharField(required=False)
start = forms.CharField(required=False, validators=[validate_datetime])
stop = forms.CharField(required=False, validators=[validate_datetime])
def clean(self):
duration = self.data.get('duration')
start = self.data.get('start')
stop = self.data.get('stop')
if duration:
# No further validation is required if only duration is set
return
if not all([start, stop]):
raise forms.ValidationError('Both start and end are required')
elif datetime.datetime.strptime(start, '%Y-%m-%d %H:%M') > datetime.datetime.strptime(stop, '%Y-%m-%d %H:%M'):
raise forms.ValidationError('Start time and end time is mismatch')
class ExporterForm(forms.ModelForm):
class Meta:
model = models.Exporter
exclude = ['project']
class ServiceForm(forms.ModelForm):
class Meta:
model = models.Service
exclude = []
class ProjectForm(forms.ModelForm):
class Meta:
model = models.Project
exclude = ['service', 'farm']
class ProjectMove(forms.ModelForm):
class Meta:
model = models.Project
exclude = ['farm']
class URLForm(forms.ModelForm):
class Meta:
model = models.URL
exclude = ['project']
class NewRuleForm(forms.ModelForm):
class Meta:
model = models.Rule
exclude = ['service']
widgets = {
'name': forms.TextInput(attrs={'class': 'form-control'}),
'clause': forms.Textarea(attrs={'rows': 5, 'class': 'form-control'}),
}
class RuleForm(forms.ModelForm):
class Meta:
model = models.Rule
exclude = []
widgets = {
'name': forms.TextInput(attrs={'class': 'form-control'}),
'clause': forms.Textarea(attrs={'rows': 5, 'class': 'form-control'}),
}
class RuleCopyForm(forms.Form):
def _choices():
return sorted([
(rule.pk, '<{}> {}'.format(rule.service.name, rule.name)) for rule in models.Rule.objects.all()
], key=lambda r: r[1])
rule_id = forms.TypedChoiceField(coerce=int, choices=_choices)
class FarmForm(forms.ModelForm):
class Meta:
model = models.Farm
exclude = ['source']
class SenderForm(forms.ModelForm):
sender = forms.ChoiceField(choices=[
(entry.module_name, entry.module_name) for entry in plugins.senders()
])
class Meta:
model = models.Sender
exclude = ['content_type', 'object_id']
class HostForm(forms.Form):
hosts = forms.CharField(widget=forms.Textarea)
|
Python
| 0.000001
|
@@ -38,59 +38,8 @@
orms
-%0Afrom django.core.exceptions import ValidationError
%0A%0Afr
@@ -75,20 +75,8 @@
gins
-, prometheus
%0A%0A%0Ac
|
4e6fd7f491adb94e2503443828bd143b17d382c5
|
Fix typo
|
nbs/models/document.py
|
nbs/models/document.py
|
# -*- coding: utf-8 -*-
from nbs.models import db
from nbs.models.misc import TimestampMixin
class PurchaseDocument(db.Model, TimestampMixin):
__tablename__ = 'purchase_document'
__table_args__ = (
db.UniqueConstraint('pos_no', 'number', 'supplier_id'),
)
TYPE_FACTURA_A = 'TYPE_FACTURA_A'
TYPE_PRESUPUESTO = 'TYPE_PRESUPUESTO'
_doc_type = {
TYPE_FACTURA_A: 'Factura A',
TYPE_PRESUPUESTO: 'Presupuesto',
}
STATUS_PENDING = 'STATUS_PENDING'
STATUS_EXPIRED = 'STATUS_EXPIRED'
STATUS_PAID = 'STATUS_PAID'
_doc_status = {
STATUS_PENDING: 'Pendiente',
STATUS_EXPIRED: 'Vencida',
STATUS_PAID: 'Pagada',
}
id = db.Column(db.Integer, primary_key=True)
document_type = db.Column(db.Enum(*_doc_type.keys(), name='doc_type'),
default=TYPE_FACTURA_A)
#: invoice point of sale number
pos_no = db.Column(db.Integer, nullable=True)
number = db.Column(db.Integer, nullable=True)
amount = db.Column(db.Numeric(10, 2), nullable=False)
issue_date = db.Column(db.Date)
expiration_date = db.Column(db.Date)
status = db.Column(db.Enum(*_doc_status.keys(), name='doc_status'),
default=STATUS_PENDING)
supplier_id = db.Column(db.Integer, db.ForeignKey('supplier.supplier_id'),
nullable=False)
supplier = db.relationship('Supplier', backref=db.backref('documents',
lazy='dynamic'))
@property
def type_str(self):
return self._doc_type[self.document_type]
@property
def status_str(self):
return self._doc_status[self.status]
@property
def number_display(self):
retval = "%08d" % self.document_number
if self.pos_no:
retval = "%04d-%s" % (self.pos_no, retval)
return retval
class PurchaseOrder(db.Model, TimestampMixin):
__tablename__ = 'purchase_order'
STATUS_CANCELLED = 'STATUS_CANCELLED'
STATUS_QUOTING = 'STATUS_QUOTING'
STATUS_PENDING = 'STATUS_PENDING'
STATUS_PARTIAL = 'STATUS_PARTIAL'
STATUS_CONFIRMED = 'STATUS_CONFIRMED'
STATUS_CLOSED = 'STATUS_CLOSED'
STATUS_DRAFT = 'STATUS_DRAFT'
_order_status = {
STATUS_CANCELLED: 'Cancelada',
STATUS_QUOTING: 'Presupuestando',
STATUS_PENDING: 'Pendiente',
STATUS_PARTIAL: 'Parcial',
STATUS_CONFIRMED: 'Confirmada',
STATUS_CLOSED: 'Cerrada',
STATUS_DRAFT: 'Borrador',
}
NOTIFY_EMAIL = 'NOTIFY_EMAIL'
NOTIFY_FAX = 'NOTIFY_FAX'
NOTIFY_PHONE = 'NOTIFY_PHONE'
NOTIFY_PERSONALLY = 'NOTIFY_PERSONALLY'
_notify = {
NOTIFY_EMAIL: 'Correo Electrónico',
NOTIFY_FAX: 'Fax',
NOTIFY_PHONE: 'Telefónico',
NOTIFY_PERSONALLY: 'Personalmente',
}
id = db.Column(db.Integer, primary_key=True)
number = db.Column(db.Integer)
issue_date = db.Column(db.DateTime)
notes = db.Column(db.UnicodeText)
status = db.Column(db.Enum(*_order_status.keys(), name='order_status'),
default=STATUS_DRAFT)
notify = db.Column(db.Enum(*_notify.keys(), name='notify'),
default=NOTIFY_EMAIL)
supplier_id = db.Column(db.Integer, db.ForeignKey('supplier.supplier_id'))
supplier = db.relationship('Supplier',
backref=db.backref('orders', lazy='dynamic'))
def add_item(self, item, position=None):
assert isinstance(item, PurchaseOrderItem)
if position is not None and position <= self.items.count():
position = max(position, 1)
self.reindex_items(position, 1)
item.order_index = position
else:
item.order_index = self.items.count() + 1
self.items.append(item)
def reindex_items(self, start=1, shift=0):
items = self.items.filter(PurchaseOrderItem.order_index>=start).all()
for idx, item in reversed(list(enumerate(items, start=(start+shift)))):
# This modifies unique key so do in subtransactions
with db.session.begin(subtransactions=True):
item.order_index = idx
@property
def status_str(self):
return self._order_status[self.status]
@property
def notify_str(self):
return self._notify[self.notify]
class PurchaseOrderItem(db.Model):
__tablename__ = 'purchase_orderitem'
__table_args__ = (
db.UniqueConstraint('order_index', 'order_id'),
db.UniqueConstraint('sku', 'order_id'),
)
id = db.Column(db.Integer, primary_key=True)
sku = db.Column(db.Unicode) # codigo producto
description = db.Column(db.Unicode)
quantity = db.Column(db.Integer, nullable=False)
received_quantity = db.Column(db.Integer, default=0)
order_index = db.Column(db.Integer, nullable=False)
order_id = db.Column(db.Integer, db.ForeignKey('purchase_order.id'),
nullable=False)
order = db.relationship(PurchaseOrder,
backref=db.backref('items', lazy='dynamic',
order_by=order_index))
def __repr__(self):
return "<PurchaseOrderItem {} '{} {} * {}' of PO{}>".format(
self.order_index, self.sku, self.description, self.quantity
self.order.number)
|
Python
| 0.999999
|
@@ -5406,16 +5406,17 @@
quantity
+,
%0A
|
e906e108ab5118ec1c8856a54b8ebe1fd69484ac
|
Add shebang to and update permissions of servefiles.py
|
servefiles/servefiles.py
|
servefiles/servefiles.py
|
import os
import socket
import struct
import sys
import threading
import time
import urllib
try:
from SimpleHTTPServer import SimpleHTTPRequestHandler
from SocketServer import TCPServer
from urlparse import urljoin
from urllib import pathname2url, quote
except ImportError:
from http.server import SimpleHTTPRequestHandler
from socketserver import TCPServer
from urllib.parse import urljoin, quote
from urllib.request import pathname2url
if len(sys.argv) < 3:
print("Usage: " + sys.argv[0] + " <ip> <file/directory>")
sys.exit(1)
ip = sys.argv[1]
directory = sys.argv[2]
if not os.path.exists(directory):
print(directory + ": No such file or directory.")
sys.exit(1)
print("Preparing data...")
baseUrl = [(s.connect(('8.8.8.8', 53)), s.getsockname()[0], s.close()) for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1] + ":8080/"
payload = ""
if os.path.isfile(directory):
if directory.endswith(('.cia', '.tik')):
payload += baseUrl + quote(os.path.basename(directory))
directory = os.path.dirname(directory)
else:
for file in [ file for file in next(os.walk(directory))[2] if file.endswith(('.cia', '.tik')) ]:
payload += baseUrl + quote(file) + "\n"
if len(payload) == 0:
print("No files to serve.")
sys.exit(1)
if not directory == "":
os.chdir(directory)
print("")
print("URLS:")
print(payload)
print("")
print("Opening HTTP server on port 8080...")
server = TCPServer(("", 8080), SimpleHTTPRequestHandler)
thread = threading.Thread(target=server.serve_forever)
thread.start()
try:
print("Sending URL(s) to " + ip + ":5000...")
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip, 5000))
try:
payloadBytes = bytes(payload, "ascii")
except:
payloadBytes = payload.encode("ascii")
networkPayload = struct.pack('!L', len(payloadBytes)) + payloadBytes
sentLength = 0
while sentLength < len(networkPayload):
sent = sock.send(networkPayload[sentLength:])
if sent == 0:
raise RuntimeError("Socket connection broken.")
sentLength += sent
while len(sock.recv(1)) < 1:
time.sleep(0.05)
sock.close()
except Exception as e:
print("Error: " + str(e))
print("Shutting down HTTP server...")
server.shutdown()
|
Python
| 0
|
@@ -1,12 +1,26 @@
+#!/bin/python%0A
import os%0Aim
|
d45e2237cccf9a29db93fd485de34b9cc4dc3cfe
|
Update gpio.py
|
02traffic_python/gpio.py
|
02traffic_python/gpio.py
|
##########################################################
# * Python GPIO Functions for Traffic Signal Simulation
# * using Baglebone Black running Debian 7 Linux distribution
##########################################################
# * Developed by MicroEmbedded Technologies
##########################################################
import sys
import os
SYSFS_GPIO_DIR = "/sys/class/gpio"
def gpioUnexport (gpio):
try:
fo = open(SYSFS_GPIO_DIR + "/unexport","w")
fo.write(gpio)
fo.close()
return
except IOError:
return
def gpioExport (gpio):
try:
fo = open(SYSFS_GPIO_DIR + "/export","w")
fo.write(gpio)
fo.close()
return
except IOError:
return
def gpioSetDir (gpio, flag):
try:
fo = open(SYSFS_GPIO_DIR + "/gpio" + gpio + "/direction" ,"w")
fo.write(flag)
fo.close()
return
except IOError:
return
def gpioSetVal (gpio, val):
try:
fo = open(SYSFS_GPIO_DIR + "/gpio" + gpio + "/value" ,"w")
fo.write(val)
fo.close()
return
except IOError:
return
|
Python
| 0.000001
|
@@ -232,111 +232,8 @@
####
-%0A# * Developed by MicroEmbedded Technologies%0A##########################################################
%0A%0Aim
|
1aa45fa857a6af15a7e29eab08a63ac594738c6e
|
clean up
|
html_cluster/commands/download_html.py
|
html_cluster/commands/download_html.py
|
import os
import json
import base64
import click
import requests
import htmlmin
from html_cluster.settings import (
HTML_CLUSTER_DATA_DIRECTORY, SPLASH_URL, USER_AGENT, SPLASH_TIMEOUT
)
from html_cluster.utils.common import file_name
from html_cluster.utils.html import is_html_page_from_string
from html_cluster.utils.url import FileUrlsReader
HELP = '''
'''
SHORT_HELP = 'Download the html from the urls and store it in a folder.'
def splash_request(url, splash_url):
splash_url = splash_url.rstrip('/') + '/render.json'
headers = {
'content-type': 'application/json',
'user-agent': USER_AGENT
}
params = {
'html': 1,
'png': 1,
'width': 400,
'height': 300,
'timeout': SPLASH_TIMEOUT,
'images': 0,
'url': url
}
return requests.get(splash_url, headers=headers, params=params)
def make_request(url, **kwargs):
if 'splash' in kwargs and kwargs['splash']:
if 'splash_url' in kwargs:
return splash_request(url, kwargs['splash_url'])
else:
return splash_request(url, SPLASH_URL)
return requests.get(url, headers={'user-agent': USER_AGENT})
def download_html(urls_file, output_directory, is_splash_request_enable=False, splash_url=SPLASH_URL):
if not os.path.isfile(urls_file):
click.echo('The {} file does not exits.'.format(urls_file))
click.Context.exit(1)
if not os.path.exists(output_directory):
os.makedirs(output_directory)
urls = FileUrlsReader(urls_file).read()
for url in urls:
click.echo(click.style('Downloading {}'.format(url)))
try:
r = make_request(
url, splash=is_splash_request_enable, splash_url=splash_url
)
html = r.text
if 'text/html' not in r.headers['Content-Type']:
click.echo(
click.style(' --> The url {} is not an html file. Content-Type: {}'.format(url, r.headers['Content-Type']), fg='red')
)
continue
if r.status_code == requests.codes.ok:
html_file_name = file_name(url)
click.echo(
click.style(
' --> Saving {}/{}'.format(output_directory, html_file_name), fg='green'
)
)
if is_splash_request_enable:
json_response = json.loads(html)
html = json_response['html']
if is_html_page_from_string(html):
with open('{}/{}.html'.format(output_directory, html_file_name), 'w') as html_file:
html_file.write(htmlmin.minify(html, remove_comments=True))
if is_splash_request_enable:
with open('{}/{}.png'.format(output_directory, html_file_name), 'wb') as png_file:
png_file.write(base64.b64decode(json_response['png']))
else:
click.echo(
click.style(' --> The url {} is not a html file'.format(url), fg='red')
)
else:
click.echo(
click.style(
' --> The {} return a bad status code ({}).'.format(url, r.status_code), fg='red'
)
)
except Exception as e:
print(' --> Oh noes! {}'.format(e))
@click.command(help=HELP, short_help=SHORT_HELP)
@click.argument('urls_file')
@click.option('--output-directory', default=HTML_CLUSTER_DATA_DIRECTORY)
@click.option('--splash-enabled/--no-splash-enabled', default=False)
@click.option('--splash-url', default=SPLASH_URL)
def cli(urls_file, output_directory, splash_enabled, splash_url):
download_html(urls_file, output_directory, splash_enabled, splash_url)
|
Python
| 0.000001
|
@@ -384,20 +384,16 @@
ownload
-the
html fro
|
358f200348b06096dcd2562a8d377efe706ff8ce
|
clean up
|
classify.py
|
classify.py
|
""" Classify the output based on the choice neuron in the last layer with the highest probability."""
import numpy as np
#import chainer
from chainer import cuda, Variable, Chain, optimizers, serializers
import chainer.links as L
import chainer.functions as F
class BaseNetwork(Chain):
"""
BaseNetwork is the underlying NN. Takes in the
following parameters:
- inputSize: Total length of an input vector
- hidden_layers: An array of ints, corresponding to number of neurons in each layer.
- choices: Total number of possible choices.
"""
def __init__(self, num_input_nodes, hidden_layers, num_exit_nodes):
if len(hidden_layers) == 0:
raise Exception("Net must have hidden layers")
layers = [L.Linear(num_input_nodes, hidden_layers[0])];
for i in range(0, len(hidden_layers)-1):
print hidden_layers[i]
print hidden_layers[i+1]
layers.append(L.Linear(hidden_layers[i], hidden_layers[i+1]))
layers.append(L.Linear(hidden_layers[-1], num_exit_nodes))
super(BaseNetwork, self).__init__(
l1 = layers[0],
l2 = layers[1]
);
for i in range(2, len(layers)):
super(BaseNetwork, self).add_link(i, layers[i])
"""Call the network -- given x (an input vector), call each layer and return
the final result."""
def __call__(self, x):
h1 = F.relu(self.l1(x))
h2 = F.relu(self.l2(h1))
h3 = F.relu(self.l3(h2))
h4 = F.relu(self.l4(h3))
y = self.l5(h2)
return y
class ClassificationTrainer(object):
"""Train a classifier on some labeled data.
"""
def __init__(self, data, target, nn_size, model_filename="", optimizer_filename=""):
""" Must submit either a net configuration, or something to load from """
if nn_sizes == [] and model_filename == "":
raise Exception("Must provide a net configuration or a file to load from")
""" Divide the data into training and test """
self.trainsize = int(len(data) * 5 / 6)
self.testsize = len(data) - self.trainsize
self.x_train, self.x_test = np.split(data, [self.trainsize])
self.y_train, self.y_test = np.split(target, [self.trainsize])
""" Create the underlying neural network model """
print set(target)
self.model = L.Classifier(BaseNetwork(len(data[0]), nn_sizes, len(set(target))))
if (model_filename != ""):
serializers.load_hdf5(model_filename, self.model)
""" Create the underlying optimizer """
self.optimizer = optimizers.Adam()
self.optimizer.setup(self.model)
if (optimizer_filename != ""):
serializers.load_hdf5(optimizer_filename, self.optimizer)
def learn(self, numEpochs, batchsize):
"""Train the classifier for a given number of epochs, with a given batchsize"""
for epoch in range(numEpochs):
print('epoch %d' % epoch)
indexes = np.random.permutation(self.trainsize)
for i in range(0, self.trainsize, batchsize):
x = Variable(self.x_train[indexes[i: i + batchsize]])
t = Variable(self.y_train[indexes[i: i + batchsize]])
self.optimizer.update(self.model, x, t)
def eval(self, batchsize):
"""Evaluate how well the classifier is doing. Return mean loss and mean accuracy"""
sum_loss, sum_accuracy = 0, 0
for i in range(0, self.testsize, batchsize):
x = Variable(self.x_test[i: i + batchsize])
y = Variable(self.y_test[i: i + batchsize])
loss = self.model(x, y)
sum_loss += loss.data * batchsize
sum_accuracy += self.model.accuracy.data * batchsize
return sum_loss / self.testsize, sum_accuracy / self.testsize
def save(self, model_filename, optimizer_filename):
""" Save the state of the model & optimizer to disk """
serializers.save_hdf5(model_filename, self.model)
serializers.save_hdf5(optimizer_filename, self.optimizer)
def classify(self, vector):
""" Run this over a phrase and see the result """
# XXX: this is kind of a hack.
x = Variable(np.asarray([vector]))
return self.model.predictor(x).data
|
Python
| 0
|
@@ -860,80 +860,8 @@
1):%0A
- print hidden_layers%5Bi%5D%0A print hidden_layers%5Bi+1%5D%0A
@@ -1053,17 +1053,17 @@
l
-1
+0
= layer
@@ -1081,17 +1081,17 @@
l
-2
+1
= layer
@@ -1192,17 +1192,22 @@
dd_link(
-i
+str(i)
, layers
@@ -1358,153 +1358,116 @@
-h1 = F.relu(self.l1(x))%0A h2 = F.relu(self.l2(h1))%0A h3 = F.relu(self.l3(h2))%0A h4 = F.relu(self.l4(h3))%0A y = self.l5(h2
+layer_result = x%0A for layer in self.children():%0A layer_result = F.relu(layer(layer_result)
)%0A
@@ -1479,17 +1479,28 @@
return
-y
+layer_result
%0A%0Aclass
@@ -1623,23 +1623,29 @@
target,
-nn_size
+hidden_layers
, model_
@@ -1774,23 +1774,28 @@
if
-nn_size
+hidden_layer
s == %5B%5D
@@ -2352,15 +2352,20 @@
%5D),
-nn_size
+hidden_layer
s, l
|
eb1c75dd167ed66d60762d722e496e756efc7e38
|
fix bug in Model.py
|
ARCCSSive/CMIP5/Model.py
|
ARCCSSive/CMIP5/Model.py
|
#!/usr/bin/env python
"""
Copyright 2015 ARC Centre of Excellence for Climate Systems Science
author: Scott Wales <scott.wales@unimelb.edu.au>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, Date, UniqueConstraint
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
from ARCCSSive.data import *
import os
import glob
Base = declarative_base()
class Instance(Base):
"""
A model variable from a specific run
Search through these using :func:`ARCCSSive.CMIP5.Session.outputs()`
.. attribute:: variable
Variable name
.. attribute:: experiment
CMIP experiment
.. attribute:: mip
MIP table specifying output frequency
.. attribute:: model
Model that generated the dataset
.. attribute:: ensemble
Ensemble member
.. attribute:: versions
List of :class:`Version` available for this output
"""
__tablename__ = 'instances'
id = Column(Integer, name='instance_id', primary_key = True)
variable = Column(String, index=True)
experiment = Column(String, index=True)
mip = Column(String, index=True)
model = Column(String, index=True)
ensemble = Column(String)
realm = Column(String)
# there will be new versions labelled 've' (version estimate) in drstree so using only timestamp to order them
# order doesn't work if version NA
versions = relationship('Version', order_by='Version.version', backref='variable')
__table_args__ = (
UniqueConstraint('variable','experiment','mip','model','ensemble'),
)
def latest(self):
"""
Returns latest version/s available on raijin, first check in any version is_latest, then checks date stamp
"""
if len(self.versions)==1: return self.versions
vlatest=[v for v in self.versions if v.is_latest]
if vlatest==[]:
valid=[v for v in self.versions if v.version!="NA"]
if valid==[]: return self.versions
valid.sort(key=lambda x: x.version[:-8])
vlatest.append(valid[-1])
i=-2
while i>=-len(valid) and valid[i].version[:-8]==vlatest[0].version[:-8]:
vlatest.append(valid[i])
i+=-1
return vlatest
def filenames(self):
"""
Returns the file names from the latest version of this variable
:returns: List of file names
"""
return self.latest()[0].filenames()
def drstree_path(self):
"""
Returns the drstree path for this instance, if one is not yet available returns None
"""
drstreep="/g/data1/ua6/drstree/CMIP5/GCM/" # this should be passed as DRSTREE env var
frequency=mip_dict[self.mip][0]
return drstreep + "/".join([ self.model,
self.experiment,
frequency,
self.realm,
self.variable,
self.ensemble])
# Add alias to deprecated name
Variable = Instance
class Version(Base):
"""
A version of a model run's variable
.. attribute:: version
Version identifier
.. attribute:: path
Path to the output directory
.. attribute:: variable
:class:`Variable` associated with this version
.. attribute:: warnings
List of :class:`VersionWarning` available for this output
.. attribute:: files
List of :class:`VersionFile` available for this output
.. testsetup::
>>> cmip5 = getfixture('session')
>>> version = cmip5.query(Version).first()
"""
__tablename__ = 'versions'
id = Column(Integer, name='version_id', primary_key = True)
instance_id = Column(Integer, ForeignKey('instances.instance_id'), index=True)
version = Column(String)
path = Column(String)
dataset_id = Column(String)
is_latest = Column(Boolean)
checked_on = Column(String)
to_update = Column(Boolean)
warnings = relationship('VersionWarning', order_by='VersionWarning.id',
backref='version', cascade="all, delete-orphan", passive_deletes=True)
files = relationship('VersionFile', order_by='VersionFile.id',
backref='version', cascade="all, delete-orphan", passive_deletes=True)
def glob(self):
"""
Get the glob string matching the CMIP5 filename
.. testsetup::
>>> import six
>>> cmip5 = getfixture('session')
>>> version = cmip5.query(Version).first()
>>> six.print_(version.glob())
a_6hrLev_c_d_e*.nc
"""
return '%s_%s_%s_%s_%s*.nc'%(
self.variable.variable,
self.variable.mip,
self.variable.model,
self.variable.experiment,
self.variable.ensemble)
def build_filepaths(self):
"""
Returns the list of files matching this version
:returns: List of file names
.. testsetup::
>>> cmip5 = getfixture('session')
>>> version = cmip5.query(Version).first()
>>> version.build_filepaths()
[]
"""
g = os.path.join(self.path, self.glob())
return glob.glob(g)
def filenames(self):
"""
Returns the list of filenames for this version
:returns: List of file names
.. testsetup::
>>> cmip5 = getfixture('session')
>>> version = cmip5.query(Version).first()
>>> version.filenames()
[]
"""
return [x.filename for x in self.files]
def tracking_ids(self):
"""
Returns the list of tracking_ids for files in this version
:returns: List of tracking_ids
.. testsetup::
>>> cmip5 = getfixture('session')
>>> version = cmip5.query(Version).first()
>>> version.tracking_ids()
[]
"""
return [x.tracking_id for x in self.files]
class VersionWarning(Base):
"""
Warnings associated with a output version
"""
__tablename__ = 'warnings'
id = Column(Integer, name='warning_id', primary_key = True)
warning = Column(String)
added_by = Column(String)
added_on = Column(Date)
version_id = Column(Integer, ForeignKey('versions.version_id'), index=True)
def __str__(self):
return u'%s (%s): %s'%(self.added_on, self.added_by, self.warning)
class VersionFile(Base):
"""
Files associated with a output version
"""
__tablename__ = 'files'
id = Column(Integer, name='file_id', primary_key = True)
filename = Column(String)
tracking_id = Column(String)
md5 = Column(String)
sha256 = Column(String)
version_id = Column(Integer, ForeignKey('versions.version_id'), index = True)
def __str__(self):
return '%s'%(self.filename)
|
Python
| 0
|
@@ -2683,19 +2683,19 @@
version%5B
-:
-8
+:
%5D)%0A
@@ -2798,19 +2798,19 @@
version%5B
-:
-8
+:
%5D==vlate
@@ -2827,11 +2827,11 @@
ion%5B
-:
-8
+:
%5D:%0A
|
ef59a8e94b4cd38906e62e2f9832a37cbf65c399
|
Add note on heuristic for A*
|
transit.py
|
transit.py
|
#!/usr/bin/env python3
import argparse
import collections
import re
import string
import sys
#
# Hex Grid Calculations
#
# http://www.redblobgames.com/grids/hexagons/#coordinates
# SWN hex grids are an "odd-q" layout (flat bottoms, high top-left corner) with "offset" coordinates.
def offset_to_cube(col, row):
x = col
z = row - (col - (col % 2)) // 2
y = -1 * x - z
return CubeCoord(x, z, y)
def cube_distance(a, b):
return max(abs(a.x - b.x), abs(a.y - b.y), abs(a.z - b.z))
#
# Read
#
grid_pattern = re.compile(r'GRID (?P<x>[0-9]{2})(?P<y>[0-9]{2})')
CubeCoord = collections.namedtuple("CubeCoord", ["x", "z", "y"])
OffsetCoord = collections.namedtuple("OffsetCoord", ["x", "y"])
System = collections.namedtuple("System", ["name", "offset", "cube"])
def read_tiddlywiki(input):
import bs4
systems = []
soup = bs4.BeautifulSoup(input)
for d in soup.find_all("div"):
if "title" in d.attrs and d.attrs["title"].startswith("System:"):
# Get system name
name = string.capwords(d.attrs["title"][7:])
# Get coords from hex number
match = grid_pattern.search(d.text)
raw_coords = match.groupdict()
x = int(raw_coords["x"])
y = int(raw_coords["y"])
# Store both coord systems
offset_coords = OffsetCoord(x, y)
cube_coords = offset_to_cube(*offset_coords)
systems.append(System(
name=name,
offset=offset_coords,
cube=cube_coords,
))
return systems
#
# Write
#
def write_tsv(output, systems):
for system in systems:
prefix = ""
for e in system:
output.write(prefix)
prefix = "\t"
if isinstance(e, tuple):
output.write(",".join(str(p) for p in e))
else:
output.write(str(e))
output.write("\n")
#def write_graphml():
# pass
#
# Main
#
def convert(input, output, drive_level, output_func):
# Open streams if required
if input == "-":
convert(sys.stdin, output, drive_level, output_func)
elif isinstance(input, str):
with open(input, "r") as i:
convert(i, output, drive_level, output_func)
elif output == "-":
convert(input, sys.stdout, drive_level, output_func)
elif isinstance(output, str):
with open(output, "w") as o:
convert(input, o, drive_level, output_func)
# Do actual conversion
else:
systems = read_tiddlywiki(input)
output_func(output, systems)
def main():
output_fmts = {n[6:]: f for n,f in globals().items() if n.startswith("write_")}
# TODO add subcommands? drive isn't always needed if tsv won't contain distance info (any readable way to encode that info in tsv?).
# TODO probably better to use a table rather than a graph for distance info output? ==> Can't show the route with a table, but could have alternative output mode to show the minimum distance that needs to be travelled between two systems for a particular spike drive level.
# TODO is there any way to show all drive levels at once? ==> Color coding? Note: lower drive levels may not be able to get everywhere in the sector, higher drive levels may be able to take shorter paths
# TODO need to include elapsed time information? The only time it isn't 6 days per jump is when jumping less than the maximum distance the spike drive can travel, ex: drive level 3 jumps 1 hex in (6*1)/3=2 days, 2 hexes in (6*2)/3=4 days, 3 hexes in (6*3)/3=6 days
# TODO: note that the visualisation of a spike drive level >1 is there are more connections added between the nodes (Sectors), with different time weights attached to them (drive level =1, at most one connection between nodes, of six day length). Should be able to use standard shortest-path algos on this problem.
parser = argparse.ArgumentParser(description="Convert system data from a TiddlyWiki created by SWN Sector Generator into ship transit data.")
parser.add_argument("-o", "--output-format", default="tsv", choices=sorted(output_fmts), help="Format to use for the output. Default: tsv")
parser.add_argument("drive", type=int, choices=range(1,6+1), help="Starship drive level.")
parser.add_argument("input", help="TiddlyWiki html to read. Use - for stdin.")
parser.add_argument("output", help="graphml file to write. Use - for stdout.")
args = parser.parse_args()
convert(args.input, args.output, args.drive, output_fmts[args.output_format])
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -3924,16 +3924,147 @@
problem.
+ Heuristic is point-to-point (single-hop, infinite drive level) distance (can precomputed easily)? ==%3E Admissiblity of that for A*?
%0A%0A pa
|
f76a2896eb806cf8a8f5c35e579abe0c22461824
|
Use nicer Event repr
|
indico/modules/events/models/events.py
|
indico/modules/events/models/events.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from contextlib import contextmanager
from sqlalchemy.dialects.postgresql import JSON
from indico.core.db.sqlalchemy import db
from indico.core.db.sqlalchemy.protection import ProtectionManagersMixin
from indico.modules.events.logs import EventLogEntry
from indico.util.caching import memoize_request
from indico.util.decorators import classproperty
from indico.util.string import return_ascii, to_unicode
from indico.web.flask.util import url_for
class Event(ProtectionManagersMixin, db.Model):
"""An Indico event
This model contains the most basic information related to an event.
Note that the ACL is currently only used for managers but not for
view access!
"""
__tablename__ = 'events'
__table_args__ = (db.CheckConstraint("(logo IS NULL) = (logo_metadata::text = 'null')", 'valid_logo'),
db.CheckConstraint("(stylesheet IS NULL) = (stylesheet_metadata::text = 'null')",
'valid_stylesheet'),
{'schema': 'events'})
disallowed_protection_modes = frozenset()
inheriting_have_acl = True
__logging_disabled = False
#: The ID of the event
id = db.Column(
db.Integer,
primary_key=True
)
#: If the event has been deleted
is_deleted = db.Column(
db.Boolean,
nullable=False,
default=False
)
#: The ID of the user who created the event
creator_id = db.Column(
db.Integer,
db.ForeignKey('users.users.id'),
nullable=False
)
#: The metadata of the logo (hash, size, filename, content_type)
logo_metadata = db.Column(
JSON,
nullable=False,
default=None
)
#: The logo's raw image data
logo = db.deferred(db.Column(
db.LargeBinary,
nullable=True
))
#: The metadata of the stylesheet (hash, size, filename)
stylesheet_metadata = db.Column(
JSON,
nullable=False,
default=None
)
#: The stylesheet's raw image data
stylesheet = db.deferred(db.Column(
db.Text,
nullable=True
))
#: The ID of the event's default page (conferences only)
default_page_id = db.Column(
db.Integer,
db.ForeignKey('events.pages.id'),
index=True,
nullable=True
)
#: The user who created the event
creator = db.relationship(
'User',
lazy=True,
backref=db.backref(
'created_events',
lazy='dynamic'
)
)
#: The event's default page (conferences only)
default_page = db.relationship(
'EventPage',
lazy=True,
foreign_keys=[default_page_id],
# don't use this backref. we just need it so SA properly NULLs
# this column when deleting the default page
backref=db.backref('_default_page_of_event', lazy=True)
)
#: The ACL entries for the event
acl_entries = db.relationship(
'EventPrincipal',
backref='event_new',
cascade='all, delete-orphan',
collection_class=set
)
# relationship backrefs:
# - layout_images (ImageFile.event_new)
# - menu_entries (MenuEntry.event_new)
# - custom_pages (EventPage.event_new)
# - surveys (Survey.event_new)
@property
@memoize_request
def as_legacy(self):
"""Returns a legacy `Conference` object (ZODB)"""
from MaKaC.conference import ConferenceHolder
return ConferenceHolder().getById(self.id, None)
@property
def protection_parent(self):
return self.as_legacy.getOwner()
@property
def has_logo(self):
return self.logo_metadata is not None
@property
def logo_url(self):
return url_for('event_images.logo_display', self, slug=self.logo_metadata['hash'])
@property
def has_stylesheet(self):
return self.stylesheet_metadata is not None
@property
def locator(self):
return {'confId': self.id}
@property
def title(self):
return to_unicode(self.as_legacy.getTitle())
@property
@contextmanager
def logging_disabled(self):
"""Temporarily disables event logging
This is useful when performing actions e.g. during event
creation or at other times where adding entries to the event
log doesn't make sense.
"""
self.__logging_disabled = True
try:
yield
finally:
self.__logging_disabled = False
def can_access(self, user, allow_admin=True):
if not allow_admin:
raise NotImplementedError('can_access(..., allow_admin=False) is unsupported until ACLs are migrated')
from MaKaC.accessControl import AccessWrapper
return self.as_legacy.canAccess(AccessWrapper(user.as_avatar if user else None))
def can_manage(self, user, role=None, allow_key=False, *args, **kwargs):
# XXX: Remove this method once modification keys are gone!
return (super(Event, self).can_manage(user, role, *args, **kwargs) or
(allow_key and self.as_legacy.canKeyModify()))
def log(self, realm, kind, module, summary, user=None, type_='simple', data=None):
"""Creates a new log entry for the event
:param realm: A value from :class:`.EventLogRealm` indicating
the realm of the action.
:param kind: A value from :class:`.EventLogKind` indicating
the kind of the action that was performed.
:param module: A human-friendly string describing the module
related to the action.
:param summary: A one-line summary describing the logged action.
:param user: The user who performed the action.
:param type_: The type of the log entry. This is used for custom
rendering of the log message/data
:param data: JSON-serializable data specific to the log type.
In most cases the ``simple`` log type is fine. For this type,
any items from data will be shown in the detailed view of the
log entry. You may either use a dict (which will be sorted)
alphabetically or a list of ``key, value`` pairs which will
be displayed in the given order.
"""
if self.__logging_disabled:
return
db.session.add(EventLogEntry(event_id=self.id, user=user, realm=realm, kind=kind, module=module, type=type_,
summary=summary, data=data or {}))
@return_ascii
def __repr__(self):
# TODO: add self.protection_repr once we use it and the title once we store it here
return '<Event({})>'.format(self.id)
# TODO: Remove the next block of code once event acls (read access) are migrated
def _fail(self, *args, **kwargs):
raise NotImplementedError('These properties are not usable until event ACLs are in the new DB')
is_public = classproperty(classmethod(_fail))
is_inheriting = classproperty(classmethod(_fail))
is_protected = classproperty(classmethod(_fail))
protection_repr = property(_fail)
del _fail
|
Python
| 0
|
@@ -1169,16 +1169,29 @@
_unicode
+, format_repr
%0Afrom in
@@ -7460,36 +7460,48 @@
urn
-'%3CEvent(%7B%7D)%3E'.format(self.id
+format_repr(self, 'id', is_deleted=False
)%0A%0A
|
22ab27f9966c19c1f3496e445e460f9ac6400de7
|
Fix doubling order admin when custom order model used
|
shop/admin/orderadmin.py
|
shop/admin/orderadmin.py
|
#-*- coding: utf-8 -*-
from django.contrib import admin
from django.contrib.admin.options import ModelAdmin
from django.utils.translation import ugettext_lazy as _
from shop.models.ordermodel import (Order, OrderItem,
OrderExtraInfo, ExtraOrderPriceField, OrderPayment)
class OrderExtraInfoInline(admin.TabularInline):
model = OrderExtraInfo
extra = 0
class OrderPaymentInline(admin.TabularInline):
model = OrderPayment
extra = 0
class ExtraOrderPriceFieldInline(admin.TabularInline):
model = ExtraOrderPriceField
extra = 0
class OrderItemInline(admin.TabularInline):
model = OrderItem
extra = 0
#TODO: add ExtraOrderItemPriceField inline, ideas?
class OrderAdmin(ModelAdmin):
list_display = ('id', 'user', 'shipping_name', 'status','order_total',
'payment_method', 'created')
list_filter = ('status', 'payment_method', )
search_fields = ('id', 'shipping_name', 'user__username')
date_hierarchy = 'created'
inlines = (OrderItemInline, OrderExtraInfoInline,
ExtraOrderPriceFieldInline, OrderPaymentInline)
readonly_fields = ('created', 'modified',)
fieldsets = (
(None, {'fields': ('user', 'status', 'order_total',
'order_subtotal', 'payment_method', 'created', 'modified')}),
(_('Shipping'), {
'fields': ('shipping_name', 'shipping_address',
'shipping_address2', 'shipping_city', 'shipping_zip_code',
'shipping_state', 'shipping_country',)
}),
(_('Billing'), {
'fields': ('billing_name', 'billing_address',
'billing_address2', 'billing_city', 'billing_zip_code',
'billing_state', 'billing_country',)
}),
)
admin.site.register(Order, OrderAdmin)
|
Python
| 0
|
@@ -156,16 +156,49 @@
azy as _
+%0Afrom django.conf import settings
%0A%0Afrom s
@@ -1835,16 +1835,98 @@
)%0A%0A%0A
+ORDER_MODEL = getattr(settings, 'SHOP_ORDER_MODEL', None)%0Aif not ORDER_MODEL:%0A
admin.si
|
1b3e5d52911f3c623b8f320adadea2d8f3ee226a
|
Implement web service based Pathway Commons client
|
indra/biopax/pathway_commons_client.py
|
indra/biopax/pathway_commons_client.py
|
from indra.java_vm import autoclass, JavaException
def run_pc_query(query_type, source_genes, target_genes=None, neighbor_limit=1):
cpath_client = autoclass('cpath.client.CPathClient').\
newInstance('http://www.pathwaycommons.org/pc2/')
query = cpath_client.createGraphQuery()
query.kind(query_type)
query.sources(source_genes)
query.targets(target_genes)
query.organismFilter(['homo sapiens'])
query.mergeEquivalentInteractions(True)
query.limit(autoclass('java.lang.Integer')(neighbor_limit))
# Execute query
print 'Sending Pathway Commons query...'
model = query.result()
if model is not None:
print 'Pathway Commons query returned model...'
else:
print 'Pathway Commons query returned blank model...'
return model
def owl_to_model(fname):
io_class = autoclass('org.biopax.paxtools.io.SimpleIOHandler')
io = io_class(autoclass('org.biopax.paxtools.model.BioPAXLevel').L3)
try:
file_is = autoclass('java.io.FileInputStream')(fname)
except JavaException:
print 'Could not open data file %s' % fname
return
try:
biopax_model = io.convertFromOWL(file_is)
except JavaException:
print 'Could not convert data file %s to BioPax model' % data_file
return
file_is.close()
return biopax_model
def model_to_owl(model, fname):
io_class = autoclass('org.biopax.paxtools.io.SimpleIOHandler')
io = io_class(autoclass('org.biopax.paxtools.model.BioPAXLevel').L3)
try:
fileOS = autoclass('java.io.FileOutputStream')(fname)
except JavaException:
print 'Could not open data file %s' % fname
return
l3_factory = autoclass('org.biopax.paxtools.model.BioPAXLevel').L3.getDefaultFactory()
model_out = l3_factory.createModel()
for r in model.getObjects().toArray():
model_out.add(r)
io.convertToOWL(model_out, fileOS)
fileOS.close()
|
Python
| 0
|
@@ -1,8 +1,31 @@
+import urllib, urllib2%0A
from ind
@@ -72,43 +72,85 @@
on%0A%0A
-def run_pc_query(query_type
+pc2_url = 'http://www.pathwaycommons.org/pc2/'%0A%0Adef send_request(kind
, source
_gen
@@ -149,22 +149,16 @@
urce
-_genes
, target
_gen
@@ -157,533 +157,845 @@
rget
-_genes=None, neighbor_limit=1):%0A cpath_client = autoclass('cpath.client.CPathClient').%5C%0A newInstance('http://www.pathwaycommons.org/pc2/')%0A query = cpath_client.createGraphQuery()%0A query.kind(query_type)%0A query.sources(source_genes)%0A query.targets(target_genes)%0A query.organismFilter(%5B'homo sapiens'%5D)%0A query.mergeEquivalentInteractions(True)%0A query.limit(autoclass('java.lang.Integer')(neighbor_limit))%0A # Execute query%0A print 'Sending Pathway Commons query...'%0A model = query.result(
+=None):%0A kind_str = kind.lower()%0A if kind not in %5B'neighborhood', 'pathsbetween', 'pathsfromto'%5D:%0A print 'Invalid query type %25s' %25 kind_str%0A return None%0A organism = '9606'%0A if isinstance(source, basestring):%0A source_str = source%0A else:%0A source_str = ','.join(source)%0A params = %7B'kind': kind_str,%0A 'organism': organism,%0A 'source': ','.join(source),%0A 'format': 'BIOPAX'%7D%0A if target is not None:%0A if isinstance(target, basestring):%0A target_str = target%0A else:%0A target_str = ','.join(target)%0A params%5B'target'%5D = target_str%0A %0A print 'Sending Pathway Commons query...'%0A res = urllib2.urlopen(pc2_url + 'graph', data=urllib.urlencode(params))%0A owl_str = res.read()%0A model = owl_str_to_model(owl_str
)%0A
@@ -1064,16 +1064,18 @@
eturned
+a
model...
@@ -1084,75 +1084,444 @@
-else:%0A print 'Pathway Commons query returned blank model...'
+return model%0A%0Adef owl_str_to_model(owl_str):%0A io_class = autoclass('org.biopax.paxtools.io.SimpleIOHandler')%0A io = io_class(autoclass('org.biopax.paxtools.model.BioPAXLevel').L3)%0A bais = autoclass('java.io.ByteArrayInputStream')%0A scs = autoclass('java.nio.charset.StandardCharsets')%0A jstr = autoclass('java.lang.String')%0A istream = bais(jstr(owl_str).getBytes(scs.UTF_8));%0A biopax_model = io.convertFromOWL(istream)
%0A
@@ -1520,32 +1520,39 @@
eam)%0A return
+biopax_
model%0A%0Adef owl_t
|
a7062bb3d87954478f4be23a8ac2cc3d125804e7
|
resolve #13: consecutive blank row are preserved
|
tests/test_bug_fixes.py
|
tests/test_bug_fixes.py
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
import os
from pyexcel_ods import get_data, save_data
from nose.tools import raises
def test_bug_fix_for_issue_1():
data = get_data(os.path.join("tests", "fixtures", "repeated.ods"))
assert data["Sheet1"] == [['repeated', 'repeated', 'repeated', 'repeated']]
def test_bug_fix_for_issue_2():
data = {}
data.update({"Sheet 1": [[1, 2, 3], [4, 5, 6]]})
data.update({"Sheet 2": [[u"row 1", u"Héllô!", u"HolÁ!"]]})
save_data("your_file.ods", data)
new_data = get_data("your_file.ods")
assert new_data["Sheet 2"] == [[u'row 1', u'H\xe9ll\xf4!', u'Hol\xc1!']]
def test_date_util_parse():
from pyexcel_ods.ods import date_value
value = "2015-08-17T19:20:00"
d = date_value(value)
assert d.strftime("%Y-%m-%dT%H:%M:%S") == "2015-08-17T19:20:00"
value = "2015-08-17"
d = date_value(value)
assert d.strftime("%Y-%m-%d") == "2015-08-17"
value = "2015-08-17T19:20:59.999999"
d = date_value(value)
assert d.strftime("%Y-%m-%dT%H:%M:%S") == "2015-08-17T19:20:59"
value = "2015-08-17T19:20:59.99999"
d = date_value(value)
assert d.strftime("%Y-%m-%dT%H:%M:%S") == "2015-08-17T19:20:59"
value = "2015-08-17T19:20:59.999999999999999"
d = date_value(value)
assert d.strftime("%Y-%m-%dT%H:%M:%S") == "2015-08-17T19:20:59"
@raises(Exception)
def test_invalid_date():
from pyexcel_ods.ods import date_value
value = "2015-08-"
date_value(value)
@raises(Exception)
def test_fake_date_time_10():
from pyexcel_ods.ods import date_value
date_value("1234567890")
@raises(Exception)
def test_fake_date_time_19():
from pyexcel_ods.ods import date_value
date_value("1234567890123456789")
@raises(Exception)
def test_fake_date_time_20():
from pyexcel_ods.ods import date_value
date_value("12345678901234567890")
|
Python
| 0.005453
|
@@ -120,16 +120,21 @@
t raises
+, eq_
%0A%0A%0Adef t
@@ -1868,12 +1868,297 @@
67890%22)%0A
-
+%0A%0Adef test_issue_13():%0A test_file = %22test_issue_13.ods%22%0A data = %5B%0A %5B1,2%5D,%0A %5B%5D,%0A %5B%5D,%0A %5B%5D,%0A %5B3,4%5D%0A %5D%0A save_data(test_file, %7Btest_file: data%7D)%0A written_data = get_data(test_file)%0A eq_(data, written_data%5Btest_file%5D)%0A os.unlink(test_file)
|
2fb41f6e9e6ba69181707b0699a4c6735737c7fa
|
Add CLI test for imxim create and extract boot image
|
tests/test_cli_imxim.py
|
tests/test_cli_imxim.py
|
# Copyright (c) 2019 Martin Olejar
#
# SPDX-License-Identifier: BSD-3-Clause
# The BSD-3-Clause license for this file can be found in the LICENSE file included with this distribution
# or at https://spdx.org/licenses/BSD-3-Clause.html#licenseText
import os
import pytest
import shutil
# Used Directories
DATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data')
TEMP_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'temp')
def setup_module(module):
# Create temp directory
os.makedirs(TEMP_DIR, exist_ok=True)
def teardown_module(module):
# Delete created files
shutil.rmtree(TEMP_DIR)
@pytest.mark.script_launch_mode('subprocess')
def test_imxim_create2a(script_runner):
ret = script_runner.run('imxim',
'create2a',
'--version', '0x41',
'--dcd', os.path.join(DATA_DIR, 'dcd_test.bin'),
'0x877FF000',
os.path.join(DATA_DIR, 'dcd_test.bin'),
os.path.join(TEMP_DIR, 'test_image.imx'))
assert ret.success
@pytest.mark.script_launch_mode('subprocess')
def test_imxim_create2b(script_runner):
pass
@pytest.mark.script_launch_mode('subprocess')
def test_imxim_create3a(script_runner):
pass
@pytest.mark.script_launch_mode('subprocess')
def test_imxim_create3b(script_runner):
pass
@pytest.mark.script_launch_mode('subprocess')
def test_imxim_create(script_runner):
pass
@pytest.mark.script_launch_mode('subprocess')
def test_imxim_extract(script_runner):
pass
@pytest.mark.script_launch_mode('subprocess')
def test_imxim_info(script_runner):
ret = script_runner.run('imxim', 'info', os.path.join(TEMP_DIR, 'test_image.imx'))
assert ret.success
@pytest.mark.script_launch_mode('subprocess')
def test_imxim_dcdfc(script_runner):
# convert DCD in TXT format to Binary format (default conversion)
ret = script_runner.run('imxim',
'dcdfc',
os.path.join(TEMP_DIR, 'dcd_test.bin'),
os.path.join(DATA_DIR, 'dcd_test.txt'))
assert ret.success
# convert DCD in Binary format to TXT format
ret = script_runner.run('imxim',
'dcdfc',
'-o', 'txt',
'-i', 'bin',
os.path.join(TEMP_DIR, 'dcd_test.txt'),
os.path.join(DATA_DIR, 'dcd_test.bin'))
assert ret.success
|
Python
| 0
|
@@ -1513,106 +1513,418 @@
-pass%0A%0A%0A@pytest.mark.script_launch_mode('subprocess')%0Adef test_imxim_extract(script_runner):%0A pa
+ret = script_runner.run('imxim', 'create',%0A os.path.join(DATA_DIR, 'imx7d_bootimg.yaml'),%0A os.path.join(TEMP_DIR, 'imx7d_bootimg.imx'))%0A assert ret.success%0A%0A%0A@pytest.mark.script_launch_mode('subprocess')%0Adef test_imxim_extract(script_runner):%0A ret = script_runner.run('imxim', 'extract', os.path.join(TEMP_DIR, 'test_image.imx'))%0A assert ret.succe
ss%0A%0A
|
b7cb5fc036cc2715a184f099fd403a4f6b722969
|
Add output writing to single pop model
|
single-pop/single_pop.py
|
single-pop/single_pop.py
|
import sys
import numpy as np
import numpy.random as npr
from sklearn.neighbors.kde import KernelDensity
from scipy.special import gammaln
import matplotlib.pyplot as plt
from calculate_phist import read_counts
from calculate_phist import normalize_haplotypes
def log_factorial(n):
return gammaln(n+1)
def log_multinomial(xs, ps):
n = np.sum(xs)
log_prob = log_factorial(n) - np.sum(log_factorial(xs)) + np.sum(xs * np.log(ps + 0.0000000000001))
return log_prob
class KDE_MCMC_Sampler(object):
def __init__(self, observed_counts):
"""
Observed counts is 3D matrix of pop, locus, haplotype
"""
self.observed_counts = observed_counts
self.individual_counts = observed_counts.sum(axis=2)
self.observed_frequencies = normalize_haplotypes(observed_counts)
self.n_loci, self.n_pop, self.n_haplotypes = self.observed_counts.shape
# from bamova
self.DWEIGHT = 1.0
self.DADD = 0.00001
self.SMALL_NUM = 0.0000000000001
print "initializing frequencies"
self.freq = np.zeros((self.n_loci, self.n_haplotypes))
for l in xrange(self.n_loci):
self.freq[l, :] = self.sample_locus_freq(self.observed_frequencies[l, 0, :])
def sample_locus_freq(self, freq):
alphas = self.DWEIGHT * freq + self.DADD + self.SMALL_NUM
return npr.dirichlet(alphas)
def locus_prob(self, locus_obs_counts, locus_freq):
log_prob_sum = 0.0
for p in xrange(self.n_pop):
log_prob_sum += log_multinomial(locus_obs_counts[p], locus_freq)
return log_prob_sum
def step(self):
total_log_prob = 0.0
for l in xrange(self.n_loci):
locus_indiv_counts = self.individual_counts[l, :]
locus_obs_counts = self.observed_counts[l, :, :]
log_prob = self.locus_prob(locus_obs_counts, self.freq[l, :])
proposed_locus_freq = self.sample_locus_freq(self.freq[l, :])
proposed_log_prob = self.locus_prob(locus_obs_counts, proposed_locus_freq)
log_prob_ratio = proposed_log_prob - log_prob
log_r = np.log(npr.random())
if proposed_log_prob >= log_prob or log_r <= log_prob_ratio:
self.freq[l, :] = proposed_locus_freq
log_prob = proposed_log_prob
total_log_prob += log_prob
locus_prob = []
for l in xrange(self.n_loci):
log_prob = self.locus_prob(locus_obs_counts, self.freq[l, :])
locus_prob.append(log_prob)
return self.freq, total_log_prob, locus_prob
def plot_log_prob(flname, log_probs):
plt.clf()
plt.hold(True)
plt.hist(log_probs, bins=30)
plt.xlabel("Log Probability", fontsize=16)
plt.xlim([min(log_probs), 0.0])
plt.ylabel("Occurrences (Loci)", fontsize=16)
plt.savefig(flname, DPI=200)
def simulate(occur_fl, n_steps, plot_basename):
print "reading occurrences"
observed_counts = read_counts(occur_fl)
individual_counts = observed_counts.sum(axis=2)
observed_frequencies = normalize_haplotypes(observed_counts)
sampler = KDE_MCMC_Sampler(observed_counts)
locus_log_prob = []
for i in xrange(n_steps):
freq, log_prob, locus_log_prob = sampler.step()
print "step", i, "log prob", log_prob
plot_log_prob(plot_basename + "_log_prob.pdf", locus_log_prob)
if __name__ == "__main__":
occur_fl = sys.argv[1]
n_steps = int(sys.argv[2])
plot_basename = sys.argv[3]
simulate(occur_fl, n_steps, plot_basename)
|
Python
| 0.000027
|
@@ -2592,28 +2592,39 @@
steps, plot_
-base
+flname, prob_fl
name):%0A%09prin
@@ -2846,16 +2846,46 @@
ounts)%0A%0A
+%09fl = open(prob_flname, %22w%22)%0A%0A
%09locus_l
@@ -3020,53 +3020,154 @@
b%0A%0A%09
-plot_log_prob(plot_basename + %22_log_prob.pdf%22
+%09if i %25 100 == 0:%0A%09%09%09for j, prob in enumerate(locus_log_prob):%0A%09%09%09%09fl.write(%22%25s %25s %25s%5Cn%22 %25 (i, j, prob))%0A%0A%09fl.close()%0A%0A%0A%09plot_log_prob(plot_flname
, lo
@@ -3262,28 +3262,53 @@
v%5B2%5D)%0A%09plot_
-base
+flname = sys.argv%5B3%5D%0A%09prob_fl
name = sys.a
@@ -3311,17 +3311,17 @@
ys.argv%5B
-3
+4
%5D%0A%0A%09simu
@@ -3353,12 +3353,23 @@
lot_
-base
+flname, prob_fl
name
|
cc32b41359c044556c76007dbb1772e3564acfc6
|
Add more tests
|
tests/test_endpoints.py
|
tests/test_endpoints.py
|
from tests import SuperTestCase
from flask import jsonify
from manage import app
import json
class TestEndPoints(SuperTestCase):
""" Endpoints being tested:
/api/v1/bucketlists/
/api/v1/bucketlists/<bucket_list_id>
/api/v1/bucketlists/<bucketlist_id>/items/
/api/v1/bucketlists/<bucketlist_id>/items/<item ID>
Methods: GET, PUT, POST, DELETE
"""
def test_creation_of_a_bucketlist(self):
""" Test for creation of a bucketlist """
self.buck = {"name":"tomorrowland", "description":"dance time"}
response = self.client.get("/api/v1/bucketlists/", headers=self.make_token(), content_type='application/json')
self.assertEqual(response.status_code, 200)
def test_editing_a_bucketlist(self):
""" Test for editing an existent bucketlist """
self.bucketlist = {"name":"tomorrowland", "description":"party time"}
response = self.app.put("/api/v1/bucketlists/2",data=self.bucketlist, headers=self.make_token())
self.assertEqual(response.status_code, 201)
def test_deletion_of_a_bucketlist(self):
""" Test deletion of a bucketlist """
response = self.app.delete("/api/v1/bucketlists/1", headers=self.make_token(), content_type='application/json')
self.assertEqual(response.status_code, 200)
def test_get_bucketlists(self):
""" Test listing all bucketlists via a get request """
response = self.app.get("/api/v1/bucketlists/", headers=self.make_token(), content_type='application/json')
self.assertEqual(response.status_code, 200)
def test_get_single_bucketlist(self):
""" Test listing a single bucketlist item """
response = self.app.get("/api/v1/bucketlists/1", headers=self.make_token())
self.assertEqual(response.status_code, 200)
def test_get_none_existent_bucketlist(self):
""" Test get request on a none existent bucketlist """
response = self.app.get("/api/v1/bucketlists/350", headers=self.make_token())
self.assertEqual(response.status_code, 404)
def test_item_creation(self):
""" Test for response on new item creation """
item_data = {"name":"invite guyz", "description":"call up the hommies"}
response = self.app.post("/api/v1/bucketlists/2/items/", data=item_data, headers=self.make_token())
self.assertEqual(response.status_code, 201)
def test_editing_an_item(self):
""" Test for editing an item """
self.item = {"name":"learn the guitar", "description":"take guitar classes"}
response = self.app.put("/api/v1/bucketlists/1/items/1", headers=self.make_token())
self.assertEqual(response.status_code, 201)
def test_deletion_of_an_item(self):
""" Test for deletion of an item """
response = self.app.delete("/api/v1/bucketlists/2/items/1", headers=self.make_token(), content_type='application/json')
self.assertEqual(response.status_code, 200)
|
Python
| 0
|
@@ -2705,33 +2705,390 @@
.status_code, 20
-1
+0)%0A%0A def test_editing_a_none_existing_item(self):%0A %22%22%22 Test for editing an item %22%22%22%0A self.item = %7B%22name%22:%22learn the guitar%22, %22description%22:%22take guitar classes%22%7D%0A response = self.app.put(%22/api/v1/bucketlists/100/items/650%22, headers=self.make_token(), content_type='application/json')%0A self.assertEqual(response.status_code, 404
)%0A%0A def test_
@@ -3320,28 +3320,264 @@
(response.status_code, 200)%0A
+%0A def test_deletion_of_a_none_existent_item(self):%0A response = self.app.delete(%22/api/v1/bucketlists/2/items/300%22, headers=self.make_token(), content_type='application/json')%0A self.assertEqual(response.status_code, 404)%0A
|
12d244ce9bd15d95817d4c4d774a1ab1758db894
|
fix broken build
|
tests/test_extension.py
|
tests/test_extension.py
|
from nose.tools import raises
class TestExt:
def test_test(self):
"""test test"""
from pyexcel.ext import test
from pyexcel.io import READERS
from pyexcel.io import WRITERS
assert READERS['test'] == 'test'
assert WRITERS['test'] == 'test'
@raises(ImportError)
def test_unknown(self):
"""test unknown"""
from pyexcel.ext import unknown
def test_tabulate(self):
import pyexcel as pe
from pyexcel.ext import presentation
a = [[1,1]]
m = pe.sheets.Matrix(a)
print(str(m))
assert str(m) == "pyexcel.sheets.matrix.Matrix"
|
Python
| 0.000001
|
@@ -647,8 +647,119 @@
Matrix%22%0A
+%0A def tearDown(self):%0A from pyexcel.presentation import STRINGIFICATION%0A STRINGIFICATION = %7B%7D%0A
|
12786677d454de5bccee6f0461021727481fbf24
|
use simplified integral definition in tests/test_functions.py
|
tests/test_functions.py
|
tests/test_functions.py
|
# c: 14.04.2008, r: 14.04.2008
import numpy as nm
from sfepy import data_dir
filename_mesh = data_dir + '/meshes/2d/square_unit_tri.mesh'
def get_pars(ts, coors, mode=None, region=None, ig=None, extra_arg=None):
if mode == 'special':
if extra_arg == 'hello!':
ic = 0
else:
ic = 1
return {('x_%s' % ic) : coors[:,ic]}
def get_p_edge(ts, coors, bc=None):
if bc.name == 'p_left':
return nm.sin(nm.pi * coors[:,1])
else:
return nm.cos(nm.pi * coors[:,1])
def get_circle(coors, domain=None):
r = nm.sqrt(coors[:,0]**2.0 + coors[:,1]**2.0)
return nm.where(r < 0.2)[0]
functions = {
'get_pars1' : (lambda ts, coors, mode=None, region=None, ig=None:
get_pars(ts, coors, mode, region, ig, extra_arg='hello!'),),
'get_p_edge' : (get_p_edge,),
'get_circle' : (get_circle,),
}
# Just another way of adding a function, besides 'functions' keyword.
function_1 = {
'name' : 'get_pars2',
'function' : lambda ts, coors,mode=None, region=None, ig=None:
get_pars(ts, coors, mode, region, ig, extra_arg='hi!'),
}
materials = {
'mf1' : (None, 'get_pars1'),
'mf2' : 'get_pars2',
# Dot denotes a special value, that is not propagated to all QP.
'mf3' : ({'a' : 10.0, 'b' : 2.0, '.c' : 'ahoj'},),
}
fields = {
'pressure' : ((1,1), 'real', 'Omega', {'Omega' : '2_3_P2'}),
}
variables = {
'p' : ('unknown field', 'pressure', 0),
'q' : ('test field', 'pressure', 'p'),
}
wx = 0.499
regions = {
'Omega' : ('all', {}),
'Left' : ('nodes in (x < -%.3f)' % wx, {}),
'Right' : ('nodes in (x > %.3f)' % wx, {}),
'Circle' : ('nodes by get_circle', {}),
}
integrals = {
'i1' : ('v', 'gauss_o2_d2'),
}
ebcs = {
'p_left' : ('Left', {'p.all' : 'get_p_edge'}),
'p_right' : ('Right', {'p.all' : 'get_p_edge'}),
}
equations = {
'e1' : """dw_laplace.i1.Omega( mf3.a, q, p ) = 0""",
}
solver_0 = {
'name' : 'ls',
'kind' : 'ls.scipy_direct',
}
solver_1 = {
'name' : 'newton',
'kind' : 'nls.newton',
}
fe = {
'chunk_size' : 1000
}
from sfepy.base.testing import TestCommon, assert_
from sfepy.base.base import pause, debug
class Test( TestCommon ):
def from_conf( conf, options ):
from sfepy.fem import ProblemDefinition
problem = ProblemDefinition.from_conf(conf, init_variables=False)
test = Test(problem = problem, conf = conf, options = options)
return test
from_conf = staticmethod( from_conf )
def test_material_functions(self):
problem = self.problem
ts = problem.get_default_ts(step=0)
problem.materials.time_update(ts,
problem.domain,
problem.equations,
problem.variables)
coors = problem.domain.get_mesh_coors()
mat1 = problem.materials['mf1']
assert_(nm.all(coors[:,0] == mat1.get_data(None, None, 'x_0')))
mat2 = problem.materials['mf2']
assert_(nm.all(coors[:,1] == mat2.get_data(None, None, 'x_1')))
mat3 = problem.materials['mf3']
assert_(nm.all(mat3.get_data(('Omega', 'i1'), 0, 'a') == 10.0))
assert_(nm.all(mat3.get_data(('Omega', 'i1'), 0, 'b') == 2.0))
assert_(mat3.get_data(None, None, 'c') == 'ahoj')
return True
# mat.time_update(ts, problem)
def test_ebc_functions(self):
import os.path as op
problem = self.problem
problem.set_variables(self.conf.variables)
problem.set_equations(self.conf.equations)
problem.time_update()
vec = problem.solve()
name = op.join(self.options.out_dir,
op.splitext(op.basename(__file__))[0] + '_ebc.vtk')
problem.save_state(name, vec)
ok = True
domain = problem.domain
iv = domain.regions['Left'].get_vertices(0)
coors = domain.get_mesh_coors()[iv]
ok = ok and self.compare_vectors(vec[iv], nm.sin(nm.pi * coors[:,1]),
label1='state_left', label2='bc_left')
iv = domain.regions['Right'].get_vertices(0)
coors = domain.get_mesh_coors()[iv]
ok = ok and self.compare_vectors(vec[iv], nm.cos(nm.pi * coors[:,1]),
label1='state_right', label2='bc_right')
return ok
def test_region_functions(self):
import os.path as op
problem = self.problem
name = op.join(self.options.out_dir,
op.splitext(op.basename(__file__))[0])
problem.save_regions(name, ['Circle'])
return True
|
Python
| 0.000001
|
@@ -1712,58 +1712,8 @@
%0A%7D%0A%0A
-integrals = %7B%0A 'i1' : ('v', 'gauss_o2_d2'),%0A%7D%0A%0A
ebcs
@@ -1863,18 +1863,17 @@
laplace.
-i1
+2
.Omega(
@@ -3118,16 +3118,68 @@
%5B'mf3'%5D%0A
+ key = mat3.get_keys(region_name='Omega')%5B0%5D%0A
@@ -3203,39 +3203,27 @@
t3.get_data(
-('Omega', 'i1')
+key
, 0, 'a') ==
@@ -3271,23 +3271,11 @@
ata(
-('Omega', 'i1')
+key
, 0,
|
ba6c18a95a01153f142b1da46e7b803e8f5923da
|
Add options to plot_eigvals to specify the symbol to use and the axes to plot onto.
|
Plotting.py
|
Plotting.py
|
import scipy
import SloppyCell
# We've had yet more trouble running in parallel, but these errors were actually
# killing the job without raising any error. So let's just only even try
# importing if we're the master node.
if SloppyCell.my_rank != 0:
raise ImportError
try:
from pylab import *
except RuntimeError:
# When running in parallel we found that this import could raise a
# 'RuntimeError: could not open display' rather than an ImportError, so
# we catch and raise an error we know how to handle
raise ImportError
import Residuals
rc('lines', linewidth=2)
basic_colors = ('b', 'g', 'r', 'c', 'm', 'k')
basic_symbols = ('o', 's', '^', 'v', '<', ">", 'x', 'D', 'h', 'p')
basic_lines = ('-', '--', '-.', ':')
def ColorWheel(colors = basic_colors, symbols = basic_symbols,
lines = basic_lines):
"""
ColorWheel()
Returns a generator that cycles through a selection of colors, symbols, and
line styles for matlibplot.matlab.plot.
"""
if not colors:
colors = ('',)
if not symbols:
symbols = ('',)
if not lines:
lines = ('',)
while 1:
for l in lines:
for s in symbols:
for c in colors:
yield (c, s, l)
vals_cW = 0
def reset_vals_cw():
"""
Reset the ColorWheel used for plotting eigenvalues.
"""
global vals_cW
vals_cW = ColorWheel(colors = ('b', 'r', 'g', 'c', 'm', 'y', 'k'),
lines = None)
reset_vals_cw()
def plot_eigvals(vals, label=None, offset=0, indicate_neg=True, join=False):
posVals = abs(scipy.compress(scipy.real(vals) > 0, vals))
posRange = scipy.compress(scipy.real(vals) > 0, range(len(vals)))
negVals = abs(scipy.compress(scipy.real(vals) < 0, vals))
negRange = scipy.compress(scipy.real(vals) < 0, range(len(vals)))
sym = vals_cW.next()
if indicate_neg:
if sym[0] == 'r':
sym = vals_cW.next()
if len(negVals) > 0:
semilogy(negRange+offset, negVals, color = 'r', marker=sym[1],
linestyle='', mfc = 'r', zorder=1)
line = semilogy(posRange+offset, posVals, color=sym[0], marker=sym[1],
label = label, zorder=0, markerfacecolor=sym[0],
linestyle='')
if join:
plot(scipy.arange(len(vals)) + offset, abs(vals), color = sym[0],
linestyle='-', zorder=-1)
a = axis()
axis([-0.05*len(vals) + offset, 1.05*(len(vals) - 1) + offset, a[2], a[3]])
return line
def plot_singvals(vals, label=None, offset=0, join=False):
return plot_eigvals(vals, label, offset, indicate_neg=False, join=join)
PlotEigenvalueSpectrum = plot_eigvals
def plot_eigvect(vect, labels=None, bottom = 0, num_label = 5):
"""
Plot a given eigenvector.
If a list of labels is passed in, the largest (in magnitude) num_label bars
will be labeled on the plot.
"""
# The 0.4 centers the bars on their numbers, accounting for the default
# bar width of 0.8
vect = scipy.real(vect)
max_index = scipy.argmax(abs(vect))
if vect[max_index] < 0:
vect = -vect
bar(scipy.arange(len(vect)) - 0.4, vect/scipy.linalg.norm(vect),
bottom=bottom)
a = list(axis())
a[0:2] = [-.03*len(vect) - 0.4, (len(vect) - 1)*1.03 + 0.4]
if labels is not None:
mags = zip(abs(vect), range(len(vect)), vect)
mags.sort()
mags.reverse()
for mag, index, val in mags[:num_label]:
name = labels[index]
text(index, val + scipy.sign(val)*0.05, name,
horizontalalignment='center', verticalalignment='center')
a[2] -= 0.1
a[3] += 0.1
axis(a)
def plot_priors(model,priorIDs=None,params=None,sameScale=False):
"""
Plots specified priors and parameter values.
If no priors are specified, plots them all.
If no params are provided, uses the model params.
If sameScale is true, recenters everything so all prior optima are at 0.
Labeling is awkward and hence avoided here. I suggest using the
pylab.text command with parameter names after the plot has been generated.
"""
if params is None:
params=model.get_params()
residuals = model.GetResiduals()
if priorIDs is None:
priorIDs = residuals.keys()
priorVals=[]
priorErrs=[]
parVals=[]
for resID in priorIDs:
res = residuals.getByKey(resID)
if isinstance(res, Residuals.PriorInLog):
priorVals.append(res.logPVal)
priorErrs.append(res.sigmaLogPVal)
parVals.append(params.getByKey(res.pKey))
if sameScale is False:
errorbar(scipy.arange(len(priorVals)),priorVals,yerr=priorErrs,fmt='bo',ecolor='k',capsize=6)
errorbar(scipy.arange(len(priorVals)),scipy.log(parVals),fmt='go')
else:
errorbar(scipy.arange(len(priorVals)),scipy.zeros(len(priorVals)),yerr=priorErrs,fmt=None,ecolor='k',capsize=6)
errorbar(scipy.arange(len(priorVals)),scipy.log(parVals)-priorVals,fmt='go')
|
Python
| 0
|
@@ -1583,24 +1583,61 @@
, join=False
+, %0A sym=None, ax=None
):%0A posVa
@@ -1888,24 +1888,89 @@
en(vals)))%0A%0A
+ if axis is None:%0A ax = gca()%0A%0A if sym is None:%0A
sym = va
@@ -2103,16 +2103,19 @@
+ax.
semilogy
@@ -2169,16 +2169,19 @@
sym%5B1%5D,%0A
+
@@ -2240,16 +2240,19 @@
line =
+ax.
semilogy
@@ -2328,16 +2328,19 @@
+
+
label =
@@ -2392,32 +2392,35 @@
+
linestyle='')%0A%0A
@@ -2439,16 +2439,19 @@
+ax.
plot(sci
@@ -2500,32 +2500,35 @@
color = sym%5B0%5D,%0A
+
lin
|
47b32b1b2d5fe81dcf86c78d61690c1f0572b8ea
|
Add failing name and docstring test for things exported
|
tests/test_interface.py
|
tests/test_interface.py
|
import pkgutil
import pytest
import funcy
from funcy.cross import PY2, PY3
from funcy.py2 import cat
from funcy import py2, py3
py = py2 if PY2 else py3
# Introspect all modules
exclude = ('cross', '_inspect', 'py2', 'py3', 'simple_funcs', 'funcmakers')
module_names = list(name for _, name, _ in pkgutil.iter_modules(funcy.__path__)
if name not in exclude)
modules = [getattr(funcy, name) for name in module_names]
def test_match():
assert funcy.__all__ == py.__all__
@pytest.mark.skipif(PY3, reason="modules use python 2 internally")
def test_full_py2():
assert sorted(funcy.__all__) == sorted(cat(m.__all__ for m in modules))
def test_full():
assert len(py2.__all__) == len(py3.__all__)
def test_name_clashes():
counts = py2.count_reps(py2.icat(m.__all__ for m in modules))
clashes = [name for name, c in counts.items() if c > 1]
assert not clashes, 'names clash for ' + ', '.join(clashes)
def test_renames():
inames = [n for n in py2.__all__ if n.startswith('i')]
ipairs = [n[1:] for n in inames if n[1:] in py2.__all__]
for name in inames:
if name != 'izip':
assert name in py3.__all__ or name[1:] in py3.__all__
for name in ipairs:
assert name in py3.__all__
assert 'l' + name in py3.__all__
lnames = [n for n in py3.__all__ if n.startswith('l')]
lpairs = [n[1:] for n in lnames if n[1:] in py3.__all__]
for name in lnames:
if name != 'lzip':
assert name in py2.__all__ or name[1:] in py2.__all__
for name in lpairs:
assert name in py2.__all__
assert 'i' + name in py2.__all__
# Only inames a renamed
assert set(py2.__all__) - set(py3.__all__) <= set(inames)
# Only lnames a new, and zip_values/zip_dicts
assert set(py3.__all__) - set(py2.__all__) <= set(lnames) | set(['zip_values', 'zip_dicts'])
|
Python
| 0
|
@@ -1881,8 +1881,474 @@
icts'%5D)%0A
+%0A%0Adef test_docs():%0A exports = %5B(name, getattr(funcy, name)) for name in funcy.__all__%0A if name not in ('print_errors', 'print_durations') and%0A getattr(funcy, name).__module__ != 'funcy.types'%5D%0A # NOTE: we are testing this way and not with all() to immediately get a list of offenders%0A assert %5Bname for name, f in exports if f.__name__ == '%3Clambda%3E'%5D == %5B%5D%0A assert %5Bname for name, f in exports if f.__doc__ is None%5D == %5B%5D%0A
|
1e8f1882a357f11925e79395409716620f91677c
|
Fix import error python_2_unicode_compatible
|
hitcount/models.py
|
hitcount/models.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import timedelta
from django.db import models
from django.conf import settings
from django.db.models import F
from django.utils import timezone
from django.dispatch import receiver
from django.utils.encoding import python_2_unicode_compatible
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext_lazy as _
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
from .managers import HitCountManager, HitManager
from .signals import delete_hit_count
@receiver(delete_hit_count)
def delete_hit_count_handler(sender, instance, save_hitcount=False, **kwargs):
"""
Custom callback for the Hit.delete() method.
Hit.delete(): removes the hit from the associated HitCount object.
Hit.delete(save_hitcount=True): preserves the hit for the associated
HitCount object.
"""
if not save_hitcount:
instance.hitcount.decrease()
@python_2_unicode_compatible
class HitCount(models.Model):
"""
Model that stores the hit totals for any content object.
"""
hits = models.PositiveIntegerField(default=0)
modified = models.DateTimeField(auto_now=True)
content_type = models.ForeignKey(
ContentType, related_name="content_type_set_for_%(class)s", on_delete=models.CASCADE)
object_pk = models.TextField('object ID')
content_object = GenericForeignKey('content_type', 'object_pk')
objects = HitCountManager()
class Meta:
ordering = ('-hits',)
get_latest_by = "modified"
verbose_name = _("hit count")
verbose_name_plural = _("hit counts")
unique_together = ("content_type", "object_pk")
db_table = "hitcount_hit_count"
def __str__(self):
return '%s' % self.content_object
def increase(self):
self.hits = F('hits') + 1
self.save()
def decrease(self):
self.hits = F('hits') - 1
self.save()
def hits_in_last(self, **kwargs):
"""
Returns hit count for an object during a given time period.
This will only work for as long as hits are saved in the Hit database.
If you are purging your database after 45 days, for example, that means
that asking for hits in the last 60 days will return an incorrect
number as that the longest period it can search will be 45 days.
For example: hits_in_last(days=7).
Accepts days, seconds, microseconds, milliseconds, minutes,
hours, and weeks. It's creating a datetime.timedelta object.
"""
assert kwargs, "Must provide at least one timedelta arg (eg, days=1)"
period = timezone.now() - timedelta(**kwargs)
return self.hit_set.filter(created__gte=period).count()
# def get_content_object_url(self):
# """
# Django has this in its contrib.comments.model file -- seems worth
# implementing though it may take a couple steps.
#
# """
# pass
@python_2_unicode_compatible
class Hit(models.Model):
"""
Model captures a single Hit by a visitor.
None of the fields are editable because they are all dynamically created.
Browsing the Hit list in the Admin will allow one to blacklist both
IP addresses as well as User Agents. Blacklisting simply causes those
hits to not be counted or recorded.
Depending on how long you set the HITCOUNT_KEEP_HIT_ACTIVE, and how long
you want to be able to use `HitCount.hits_in_last(days=30)` you can choose
to clean up your Hit table by using the management `hitcount_cleanup`
management command.
"""
created = models.DateTimeField(editable=False, auto_now_add=True, db_index=True)
ip = models.CharField(max_length=40, editable=False, db_index=True)
session = models.CharField(max_length=40, editable=False, db_index=True)
user_agent = models.CharField(max_length=255, editable=False)
user = models.ForeignKey(AUTH_USER_MODEL, null=True, editable=False, on_delete=models.CASCADE)
hitcount = models.ForeignKey(HitCount, editable=False, on_delete=models.CASCADE)
objects = HitManager()
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
verbose_name = _("hit")
verbose_name_plural = _("hits")
def __str__(self):
return 'Hit: %s' % self.pk
def save(self, *args, **kwargs):
"""
The first time the object is created and saved, we increment
the associated HitCount object by one. The opposite applies
if the Hit is deleted.
"""
if self.pk is None:
self.hitcount.increase()
super(Hit, self).save(*args, **kwargs)
def delete(self, save_hitcount=False):
"""
If a Hit is deleted and save_hitcount=True, it will preserve the
HitCount object's total. However, under normal circumstances, a
delete() will trigger a subtraction from the HitCount object's total.
NOTE: This doesn't work at all during a queryset.delete().
"""
delete_hit_count.send(
sender=self, instance=self, save_hitcount=save_hitcount)
super(Hit, self).delete()
@python_2_unicode_compatible
class BlacklistIP(models.Model):
ip = models.CharField(max_length=40, unique=True)
class Meta:
db_table = "hitcount_blacklist_ip"
verbose_name = _("Blacklisted IP")
verbose_name_plural = _("Blacklisted IPs")
def __str__(self):
return '%s' % self.ip
@python_2_unicode_compatible
class BlacklistUserAgent(models.Model):
user_agent = models.CharField(max_length=255, unique=True)
class Meta:
db_table = "hitcount_blacklist_user_agent"
verbose_name = _("Blacklisted User Agent")
verbose_name_plural = _("Blacklisted User Agents")
def __str__(self):
return '%s' % self.user_agent
class HitCountMixin(object):
"""
HitCountMixin provides an easy way to add a `hit_count` property to your
model that will return the related HitCount object.
"""
@property
def hit_count(self):
ctype = ContentType.objects.get_for_model(self.__class__)
hit_count, created = HitCount.objects.get_or_create(
content_type=ctype, object_pk=self.pk)
return hit_count
|
Python
| 0.000001
|
@@ -262,29 +262,11 @@
rom
-django.utils.encoding
+six
imp
|
5baa216b615b39fe5d9bf5bb71e9ae8048ef4dc0
|
delete samples on metric delete
|
holodeck/models.py
|
holodeck/models.py
|
import uuid
from django.db import models
from holodeck.utils import get_widget_type_choices, load_class_by_string, \
metric_to_shard_mapper, sample_to_shard_mapper
from django.contrib.auth.models import User
class Dashboard(models.Model):
name = models.CharField(max_length=255)
owner = models.ForeignKey(User, null=True)
def __unicode__(self):
return self.name
class Metric(models.Model):
name = models.CharField(max_length=255)
dashboard = models.ForeignKey('holodeck.Dashboard')
widget_type = models.CharField(
max_length=64,
choices=get_widget_type_choices()
)
api_key = models.CharField(
max_length=32,
unique=True,
blank=True,
null=True
)
def __unicode__(self):
return self.name
@classmethod
def generate_api_key(cls):
return uuid.uuid4().hex
def render(self):
return load_class_by_string(self.widget_type)().render(self)
@property
def sample_set(self):
return Sample.objects.filter(metric_id=self.id).using(
'shard_%s' % metric_to_shard_mapper(self))
def save(self, *args, **kwargs):
if not self.api_key:
self.api_key = Metric.generate_api_key()
super(Metric, self).save(*args, **kwargs)
class Sample(models.Model):
metric_id = models.IntegerField(max_length=64)
integer_value = models.IntegerField()
string_value = models.CharField(max_length=64)
timestamp = models.DateTimeField()
def save(self, *args, **kwargs):
self.full_clean()
kwargs.update({'using': 'shard_%s' % sample_to_shard_mapper(self)})
super(Sample, self).save(*args, **kwargs)
|
Python
| 0
|
@@ -22,24 +22,154 @@
ngo.
-db import models
+contrib.auth.models import User%0Afrom django.db import models%0Afrom django.db.models.signals import post_delete%0Afrom django.dispatch import receiver
%0Afro
@@ -296,52 +296,8 @@
per%0A
-from django.contrib.auth.models import User%0A
%0A%0Acl
@@ -1764,28 +1764,315 @@
self).save(*args, **kwargs)%0A
+%0A%0A@receiver(post_delete, sender=Metric)%0Adef metric_post_delete_handler(sender, instance, **kwargs):%0A %22%22%22%0A Because relation between sample and metric is handled on the application%0A level ensure deletion of samples on metric delete.%0A %22%22%22%0A instance.sample_set.all().delete()%0A
|
62150ec45c9c062397f0ac0270466b4497d459de
|
Fix world time plugin
|
will/plugins/productivity/world_time.py
|
will/plugins/productivity/world_time.py
|
import datetime
import requests
from will.plugin import WillPlugin
from will.decorators import respond_to, periodic, hear, randomly, route, rendered_template, require_settings
from will import settings
class TimePlugin(WillPlugin):
@respond_to("what time is it in (?P<place>.*)")
def what_time_is_it_in(self, message, place):
"""what time is it in ___: Say the time in almost any city on earth."""
if (
not hasattr(settings, "WORLD_WEATHER_ONLINE_KEY") and
not hasattr(settings, "WORLD_WEATHER_ONLINE_V2_KEY")
):
self.say(
"I need a world weather online key to do that.\n"
"You can get one at http://developer.worldweatheronline.com, "
"and then set the key as WORLD_WEATHER_ONLINE_V2_KEY",
message=message
)
else:
if hasattr(settings, "WORLD_WEATHER_ONLINE_V2_KEY"):
r = requests.get(
"http://api2.worldweatheronline.com/free/v2/tz.ashx?q=%s&format=json&key=%s" %
(place, settings.WORLD_WEATHER_ONLINE_V2_KEY)
)
elif hasattr(settings, "WORLD_WEATHER_ONLINE_KEY"):
r = requests.get(
"http://api2.worldweatheronline.com/free/v1/tz.ashx?q=%s&format=json&key=%s" %
(place, settings.WORLD_WEATHER_ONLINE_KEY)
)
resp = r.json()
if "request" in resp["data"] and len(resp["data"]["request"]) > 0:
place = resp["data"]["request"][0]["query"]
current_time = self.parse_natural_time(resp["data"]["time_zone"][0]["localtime"])
self.say("It's %s in %s." % (self.to_natural_day_and_time(current_time), place), message=message)
else:
self.say("I couldn't find anywhere named %s." % (place, ), message=message)
@respond_to("what time is it(\?)?$", multiline=False)
def what_time_is_it(self, message):
"""what time is it: Say the time where I am."""
now = datetime.datetime.now()
self.say("It's %s." % self.to_natural_day_and_time(now, with_timezone=True), message=message)
|
Python
| 0
|
@@ -16,24 +16,49 @@
%0Aimport
-requests
+pytz%0Aimport requests%0Aimport time%0A
%0Afrom wi
@@ -226,1516 +226,921 @@
s%0A%0A%0A
-class TimePlugin(WillPlugin):%0A%0A @respond_to(%22what time is it in (?P%3Cplace%3E.*)%22)%0A def what_time_is_it_in(self, message, place):%0A %22%22%22what time is it in ___: Say the time in almost any city on earth.%22%22%22%0A if (%0A not hasattr(settings, %22WORLD_WEATHER_ONLINE_KEY%22) and%0A not hasattr(settings, %22WORLD_WEATHER_ONLINE_V2_KEY%22)%0A ):%0A self.say(%0A %22I need a world weather online key to do that.%5Cn%22%0A %22You can get one at http://developer.worldweatheronline.com, %22%0A %22and then set the key as WORLD_WEATHER_ONLINE_V2_KEY%22,%0A message=message%0A )%0A else:%0A if hasattr(settings, %22WORLD_WEATHER_ONLINE_V2_KEY%22):%0A r = requests.get(%0A %22
+def get_location(place):%0A payload = %7B'address': place, 'sensor': False%7D%0A r = requests.get('http://maps.googleapis.com/maps/api/geocode/json', params=payload)%0A resp = r.json()%0A location = resp%5B%22results%22%5D%5B0%5D%5B%22geometry%22%5D%5B%22location%22%5D%0A return location%0A%0A%0Adef get_timezone(lat, lng):%0A payload = %7B'location': %22%25s,%25s%22 %25 (lat, lng), 'timestamp': int(time.time()), 'sensor': False%7D%0A r = requests.get('
http
+s
://
+m
ap
-i2.worldweatheronline.com/free/v2/tz.ashx?q=%25s&format=json&key=%25s%22 %25%0A (place, settings.WORLD_WEATHER_ONLINE_V2_KEY)%0A )%0A elif hasattr(settings, %22WORLD_WEATHER_ONLINE_KEY%22):%0A r = requests.get(%0A %22http://api2.worldweatheronline.com/free/v1/tz.ashx?q=%25s&format=json&key=%25s%22 %25%0A (place, settings.WORLD_WEATHER_ONLINE_KEY)%0A )%0A resp = r.json()%0A if %22request%22 in resp%5B%22data%22%5D and len(resp%5B%22data%22%5D%5B%22request%22%5D) %3E 0:%0A place = resp%5B%22data%22%5D%5B%22request%22%5D%5B0%5D%5B%22query%22%5D%0A current_time = self.parse_natural_time(resp%5B%22data%22%5D%5B%22
+s.googleapis.com/maps/api/timezone/json', params=payload)%0A resp = r.json()%0A tz = resp%5B'timeZoneId'%5D%0A return tz%0A%0A%0Aclass TimePlugin(WillPlugin):%0A%0A @respond_to(%22what time is it in (?P%3Cplace%3E.*)%22)%0A def what_time_is_it_in(self, message, place):%0A %22%22%22what time is it in ___: Say the time in almost any city on earth.%22%22%22%0A location = get_location(place)%0A tz = get_timezone(location%5B'lat'%5D, location%5B'lng'%5D)%0A ct = datetime.datetime.now(tz=pytz.
time
-_
zone
-%22%5D%5B0%5D%5B%22localtime%22%5D)%0A%0A
+(tz))%0A
@@ -1206,137 +1206,17 @@
me(c
-urrent_time), place), message=message)%0A else:%0A self.say(%22I couldn't find anywhere named %25s.%22 %25 (place,
+t), place
), m
|
47bb5b64dfec5ea4718d8eac4c204f8e61dd60f8
|
Add test that checks relative variable initialisation
|
tests/test_particles.py
|
tests/test_particles.py
|
from parcels import Grid, ScipyParticle, JITParticle, Variable
import numpy as np
import pytest
ptype = {'scipy': ScipyParticle, 'jit': JITParticle}
@pytest.fixture
def grid(xdim=100, ydim=100):
U = np.zeros((xdim, ydim), dtype=np.float32)
V = np.zeros((xdim, ydim), dtype=np.float32)
lon = np.linspace(0, 1, xdim, dtype=np.float32)
lat = np.linspace(0, 1, ydim, dtype=np.float32)
return Grid.from_data(U, lon, lat, V, lon, lat, mesh='flat')
@pytest.mark.parametrize('mode', ['scipy', 'jit'])
def test_variable_init(grid, mode, npart=10):
class TestParticle(ptype[mode]):
p_float = Variable('p_float', dtype=np.float32, default=10.)
p_double = Variable('p_double', dtype=np.float64, default=11.)
p_int = Variable('p_int', dtype=np.int32, default=12)
pset = grid.ParticleSet(npart, pclass=TestParticle,
lon=np.linspace(0, 1, npart, dtype=np.float32),
lat=np.linspace(1, 0, npart, dtype=np.float32))
assert np.array([isinstance(p.p_float, np.float32) for p in pset]).all()
assert np.allclose([p.p_float for p in pset], 10., rtol=1e-12)
assert np.array([isinstance(p.p_double, np.float64) for p in pset]).all()
assert np.allclose([p.p_double for p in pset], 11., rtol=1e-12)
assert np.array([isinstance(p.p_int, np.int32) for p in pset]).all()
assert np.allclose([p.p_int for p in pset], 12., rtol=1e-12)
|
Python
| 0.000008
|
@@ -89,16 +89,48 @@
pytest%0A
+from operator import attrgetter%0A
%0A%0Aptype
@@ -586,24 +586,94 @@
npart=10):%0A
+ %22%22%22Test that checks correct initialisation of custom variables%22%22%22%0A
class Te
@@ -897,16 +897,17 @@
fault=12
+.
)%0A ps
@@ -1463,32 +1463,32 @@
in pset%5D).all()%0A
-
assert np.al
@@ -1521,16 +1521,764 @@
set%5D, 12
+, rtol=1e-12)%0A%0A%0A@pytest.mark.parametrize('mode', %5B'scipy', 'jit'%5D)%0Adef test_variable_init_relative(grid, mode, npart=10):%0A %22%22%22Test that checks relative initialisation of custom variables%22%22%22%0A class TestParticle(ptype%5Bmode%5D):%0A p_base = Variable('p_base', dtype=np.float32, default=10.)%0A p_relative = Variable('p_relative', dtype=np.float32,%0A default=attrgetter('p_base'))%0A pset = grid.ParticleSet(npart, pclass=TestParticle,%0A lon=np.linspace(0, 1, npart, dtype=np.float32),%0A lat=np.linspace(1, 0, npart, dtype=np.float32))%0A assert np.allclose(%5Bp.p_base for p in pset%5D, 10., rtol=1e-12)%0A assert np.allclose(%5Bp.p_relative for p in pset%5D, 10
., rtol=
|
def480fd6b44e85cb85bcb3ed8cc0b98d771ee97
|
Rework test.
|
src/test/ed/lang/python/import4_test.py
|
src/test/ed/lang/python/import4_test.py
|
'''
Copyright (C) 2008 10gen Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License, version 3,
as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
try:
import google.atom.service
except ImportError, e:
# This is OK -- can't import from outside a site with a _config packages map
pass
else:
raise AssertionError
import import3_help
m = __import__('import3_help', {}, {}, [])
assert m == import3_help
m = __import__('import3_help', {}, {}, ['foo'])
assert m == import3_help
m = __import__('import3_help', None, None, [])
assert m == import3_help
m = __import__('import3_help', None, None, ['foo'])
assert m == import3_help
|
Python
| 0
|
@@ -858,273 +858,203 @@
elp%0A
-m = __import__('import3_help', %7B%7D, %7B%7D, %5B%5D)%0Aassert m == import3_help%0Am = __import__('import3_help', %7B%7D, %7B%7D, %5B'foo'%5D)%0Aassert m == import3_help%0Am = __import__('import3_help', None, None, %5B%5D)%0Aassert m == import3_help%0Am = __import__('import3_help', None, None, %5B'foo'%5D)%0A
+for globals in %5B%7B%7D, None%5D:%0A for locals in %5B%7B%7D, None%5D:%0A for fromlist in %5B%5B%5D%5D: # could try None here too%0A m = __import__('import3_help', globals, locals, fromlist)%0A
asse
|
519ad83f47ead62549c2e0a533ffd3ff5488e384
|
Add lint test and format generated code (#4114)
|
java-asset/google-cloud-asset/synth.py
|
java-asset/google-cloud-asset/synth.py
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
import synthtool.gcp as gcp
gapic = gcp.GAPICGenerator()
common_templates = gcp.CommonTemplates()
# tasks has two product names, and a poorly named artman yaml
v1beta1_library = gapic.java_library(
service='asset',
version='v1beta1',
config_path='artman_cloudasset_v1beta1.yaml',
artman_output_name='')
s.copy(v1beta1_library / 'gapic-google-cloud-asset-v1beta1/src', 'src')
s.copy(v1beta1_library / 'grpc-google-cloud-asset-v1beta1/src', '../../google-api-grpc/grpc-google-cloud-asset-v1beta1/src')
s.copy(v1beta1_library / 'proto-google-cloud-asset-v1beta1/src', '../../google-api-grpc/proto-google-cloud-asset-v1beta1/src')
|
Python
| 0
|
@@ -696,150 +696,220 @@
gcp%0A
-%0Agapic = gcp.GAPICGenerator()%0Acommon_templates = gcp.CommonTemplates()%0A%0A# tasks has two product names, and a poorly named artman yaml%0Av1beta1_
+import synthtool.languages.java as java%0A%0Agapic = gcp.GAPICGenerator()%0A%0Aservice = 'asset'%0Aversions = %5B'v1beta1'%5D%0Aconfig_pattern = '/google/cloud/asset/artman_cloudasset_%7Bversion%7D.yaml'%0A%0Afor version in versions:%0A
libr
@@ -942,16 +942,18 @@
+
service=
'ass
@@ -952,17 +952,19 @@
ice=
-'asset',%0A
+service,%0A
@@ -975,19 +975,19 @@
ion=
-'v1beta1',%0A
+version,%0A
@@ -1002,42 +1002,50 @@
ath=
-'artman_cloudasset_v1beta1.yaml',%0A
+config_pattern.format(version=version),%0A
@@ -1068,31 +1068,25 @@
me='')%0A%0A
+
s.copy(
-v1beta1_
library
@@ -1087,16 +1087,17 @@
brary /
+f
'gapic-g
@@ -1104,37 +1104,43 @@
oogle-cloud-
-asset-v1beta1
+%7Bservice%7D-%7Bversion%7D
/src', 'src'
@@ -1137,39 +1137,33 @@
rc', 'src')%0A
+
s.copy(
-v1beta1_
library / 'g
@@ -1160,16 +1160,17 @@
brary /
+f
'grpc-go
@@ -1180,36 +1180,43 @@
e-cloud-
-asset-v1beta1
+%7Bservice%7D-%7Bversion%7D
/src',
+f
'../../g
@@ -1252,43 +1252,43 @@
oud-
-asset-v1beta1
+%7Bservice%7D-%7Bversion%7D
/src')%0A
+
s.copy(
-v1beta1_
libr
@@ -1293,16 +1293,17 @@
brary /
+f
'proto-g
@@ -1318,28 +1318,35 @@
oud-
-asset-v1beta1
+%7Bservice%7D-%7Bversion%7D
/src',
+f
'../
@@ -1387,21 +1387,231 @@
oud-
-asset-v1beta1
+%7Bservice%7D-%7Bversion%7D/src')%0A%0A java.format_code('./src')%0A java.format_code(f'../../google-api-grpc/grpc-google-cloud-%7Bservice%7D-%7Bversion%7D/src')%0A java.format_code(f'../../google-api-grpc/proto-google-cloud-%7Bservice%7D-%7Bversion%7D
/src
|
5555b8d93e127d92a577bdc33a2865dd2f1b81e7
|
Add a couple descriptive comments
|
tests/test_sim_setup.py
|
tests/test_sim_setup.py
|
"""This contains a set of tests for ParaTemp.sim_setup"""
########################################################################
# #
# This script was written by Thomas Heavey in 2018. #
# theavey@bu.edu thomasjheavey@gmail.com #
# #
# Copyright 2017-18 Thomas J. Heavey IV #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or #
# implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
########################################################################
from __future__ import absolute_import
import os
import pytest
import re
def test_job_info_from_qsub():
from paratemp.sim_setup import _job_info_from_qsub
job_info = _job_info_from_qsub('Your job 2306551 ("PT-NTD-CG") '
'has been submitted')
assert job_info == ('2306551', 'PT-NTD-CG', '2306551 ("PT-NTD-CG")')
class TestUpdateNum(object):
@pytest.fixture
def match_10(self):
return re.search(r'([,=])(\d+)', '=10')
@pytest.fixture
def match_text(self):
return re.search(r'([,=])(\w+)', '=text')
@pytest.fixture
def match_float(self):
return re.search(r'([,=])(\d+\.\d+)', '=2.1')
@pytest.fixture
def match_bad_few(self):
return re.search(r'([,=])', '=10')
@pytest.fixture
def match_bad_many(self):
return re.search(r'([,=])(\d+)(\d+)', '=10')
@pytest.fixture
def rd1030(self):
return {10: 30}
def test_update_num(self, match_10, rd1030):
from paratemp.sim_setup import _update_num
assert '=30' == _update_num(match_10, shift=10, cat_repl_dict=rd1030)
assert '=1' == _update_num(match_10, shift=9, cat_repl_dict=dict())
assert '=1' == _update_num(match_10, shift=9, cat_repl_dict=rd1030)
def test_update_num_raises(self, match_10, match_text, match_float,
match_bad_few, match_bad_many):
from paratemp.sim_setup import _update_num
with pytest.raises(KeyError):
_update_num(match_10, shift=10, cat_repl_dict=dict())
with pytest.raises(ValueError,
match='cannot be converted to a valid int'):
_update_num(match_text, cat_repl_dict=dict())
with pytest.raises(ValueError,
match='cannot be converted to a valid int'):
_update_num(match_float, cat_repl_dict=dict())
with pytest.raises(ValueError, match='unpack'):
_update_num(match_bad_few, cat_repl_dict=dict())
with pytest.raises(ValueError, match='too many.*unpack'):
_update_num(match_bad_many, cat_repl_dict=dict())
@pytest.fixture
def n_top_dc():
path = 'tests/test-data/ptad-cin-cg.top'
b_path = os.path.join(os.path.dirname(path),
'unequal-'+os.path.basename(path))
yield os.path.abspath(path)
if os.path.isfile(b_path):
os.rename(b_path, path)
@pytest.fixture
def folder_dc(n_top_dc):
return os.path.dirname(n_top_dc)
class TestGetSolvCountTop(object):
def test_get_solv_count_top(self, n_top_dc, folder_dc):
from paratemp.sim_setup import get_solv_count_top
assert get_solv_count_top(n_top_dc) == 361
assert get_solv_count_top(folder=folder_dc) == 361
class TestSetSolvCountTop(object):
def test_set_solv_count_top_n(self, n_top_dc):
from paratemp.sim_setup import set_solv_count_top, get_solv_count_top
set_solv_count_top(n_top_dc, s_count=100)
assert get_solv_count_top(n_top_dc) == 100
def test_set_solv_count_top_folder(self, folder_dc, n_top_dc):
from paratemp.sim_setup import set_solv_count_top, get_solv_count_top
set_solv_count_top(folder=folder_dc, s_count=50)
assert get_solv_count_top(n_top_dc) == 50
|
Python
| 0
|
@@ -3960,16 +3960,108 @@
h(path)%0A
+ # If a backup of the original was made, copy the backup over the updated%0A # version:%0A
if o
@@ -4363,50 +4363,153 @@
-assert get_solv_count_top(n_top_dc) == 361
+# Test giving the file name as input%0A assert get_solv_count_top(n_top_dc) == 361%0A # Test giving only the containing folder as input
%0A
|
a24d0879c4722ffa2349fbd322d11ced430b3434
|
Fix test
|
tests/test_transform.py
|
tests/test_transform.py
|
import unittest
from singer import transform
from singer.transform import *
class TestTransform(unittest.TestCase):
def test_integer_transform(self):
schema = {'type': 'integer'}
self.assertEqual(123, transform(123, schema))
self.assertEqual(123, transform('123', schema))
self.assertEqual(1234, transform('1,234', schema))
def test_nested_transform(self):
schema = {"type": "object",
"properties": {"addrs": {"type": "array",
"items": {"type": "object",
"properties": {"addr1": {"type": "string"},
"city": {"type": "string"},
"state": {"type": "string"},
'amount': {'type': 'integer'}}}}}}
data = {'addrs': [{'amount': '123'}, {'amount': '456'}]}
expected = {'addrs': [{'amount': 123}, {'amount': 456}]}
self.assertDictEqual(expected, transform(data, schema))
def test_null_transform(self):
self.assertEqual('', transform('', {'type': ['null', 'string']}))
self.assertEqual('', transform('', {'type': [ 'string', 'null']}))
self.assertEqual(None, transform(None, {'type': [ 'string', 'null']}))
self.assertEqual(None, transform('', {'type': ['null']}))
self.assertEqual(None, transform(None, {'type': ['null']}))
def test_datetime_transform(self):
schema = {"type": "string", "format": "date-time"}
string_datetime = "2017-01-01T00:00:00Z"
self.assertEqual(string_datetime, transform(string_datetime, schema, NO_INTEGER_DATETIME_PARSING))
self.assertEqual('1970-01-02T00:00:00Z', transform(86400, schema, UNIX_SECONDS_INTEGER_DATETIME_PARSING))
self.assertEqual(string_datetime, transform(string_datetime, schema, UNIX_SECONDS_INTEGER_DATETIME_PARSING))
self.assertEqual('1970-01-01T00:01:26Z', transform(86400, schema, UNIX_MILLISECONDS_INTEGER_DATETIME_PARSING))
self.assertEqual(string_datetime, transform(string_datetime, schema, UNIX_MILLISECONDS_INTEGER_DATETIME_PARSING))
with self.assertRaises(Exception):
transform('cat', schema, NO_INTEGER_DATETIME_PARSING)
with self.assertRaises(Exception):
transform('cat', schema, UNIX_SECONDS_INTEGER_DATETIME_PARSING)
with self.assertRaises(Exception):
transform(0, schema, NO_INTEGER_DATETIME_PARSING)
def test_anyof_datetime(self):
schema = {'anyOf': [{'type': 'null'}, {'format': 'date-time', 'type': 'string'}]}
string_datetime = '2016-03-10T18:47:20Z'
self.assertEqual(string_datetime, transform(string_datetime, schema))
self.assertIsNone(transform(None, schema))
def test_error_path(self):
schema = {"type": "object",
"properties": {"foo": {"type": "integer"},
"baz": {"type": "integer"}}}
data = {"foo": "bar", "baz": 1}
trans = Transformer(NO_INTEGER_DATETIME_PARSING)
success, data = trans.transform_recur(data, schema, [])
self.assertFalse(success)
self.assertIsNone(data)
self.assertListEqual([["foo"]], sorted(e.path for e in trans._errors))
def test_nested_error_path_throws(self):
schema = {
"type": "object",
"properties": {
"key1": {
"type": "object",
"properties": {
"key2": {
"type": "object",
"properties": {
"key3": {
"type": "object",
"properties": {
"key4": {"type": "integer"},
},
},
},
},
},
},
},
}
data = {"key1": {"key2": {"key3": {"key4": "not an integer"}}}}
trans = Transformer()
success, _ = trans.transform_recur(data, schema, [])
self.assertFalse(success)
self.assertListEqual([['key1', 'key2', 'key3', 'key4']], sorted(e.path for e in trans._errors))
def test_nested_error_path_no_throw(self):
schema = {
"type": "object",
"properties": {
"key1": {
"type": "object",
"properties": {
"key2": {
"type": "object",
"properties": {
"key3": {
"type": "object",
"properties": {
"key4": {"type": "string"},
"key5": {"type": "string"},
},
},
},
},
},
},
},
}
data = {"key1": {"key2": {"key3": {"key4": None, "key5": None}}}}
trans = Transformer()
success, data = trans.transform_recur(data, schema, [])
self.assertFalse(success)
self.assertIsNone(data)
# NB> error_paths may be returned in any order, so we sort here to be deterministic
self.assertListEqual([['key1', 'key2', 'key3', 'key4'], ['key1', 'key2', 'key3', 'key5']],
sorted(e.path for e in trans._errors))
def test_error_path_array(self):
schema = {"type": "object",
"properties": {"integers": {"type": "array",
"items": {"type": "integer"}}}}
data = {"integers": [1, 2, "not an integer", 4, "also not an integer"]}
trans = Transformer()
success, data = trans.transform_recur(data, schema, [])
self.assertFalse(success)
# NB> error_paths may be returned in any order, so we sort here to be deterministic
self.assertListEqual([["integers", 2], ["integers", 4]], sorted(e.path for e in trans._errors))
def test_nested_error_path_array(self):
schema = {"type": "object",
"properties": {"lists_of_integers": {"type": "array",
"items": {"type": "array",
"items": {"type": "integer"}}}}}
data = {"lists_of_integers": [[1, "not an integer"], [2, 3], ["also not an integer", 4]]}
trans = Transformer()
success, transformed_data = trans.transform_recur(data, schema, [])
self.assertFalse(success)
self.assertListEqual(
[["lists_of_integers", 0, 1], ["lists_of_integers", 2, 0]],
sorted(e.path for e in trans._errors))
def test_error_path_datetime(self):
schema = {"type": "object",
"properties": {"good_datetime": {"type": "string", "format": "date-time"},
"bad_datetime1": {"type": "string", "format": "date-time"},
"bad_datetime2": {"type": "string", "format": "date-time"}}}
data = {"good_datetime": "2017-04-11T16:07:00Z",
"bad_datetime1": "not a datetime",
"bad_datetime2": 1}
trans = Transformer()
success, transformed_data = trans.transform_recur(data, schema, [])
self.assertFalse(success)
self.assertListEqual([["bad_datetime1"], ["bad_datetime2"]], sorted(e.path for e in trans._errors))
def test_unexpected_object_properties(self):
schema = {"type": "object",
"properties": {"good_property": {"type": "string"}}}
data = {"good_property": "expected data",
"bad_property": "unexpected data"}
trans = Transformer()
success, transformed_data = trans.transform_recur(data, schema, [])
self.assertFalse(success)
self.assertIsNone(transformed_data)
|
Python
| 0.000004
|
@@ -8277,36 +8277,35 @@
self.assert
-Fals
+Tru
e(success)%0A
@@ -8310,39 +8310,48 @@
self.assert
-IsNone(
+DictEqual(data,
transformed_data
|
286d0d577921126512263e8d16a01a75878ee453
|
Add missing import
|
jenkinsapi/utils/urlopener_kerberos.py
|
jenkinsapi/utils/urlopener_kerberos.py
|
import urllib2
import kerberos as krb
class KerberosAuthHandler(urllib2.BaseHandler):
"""
A BaseHandler class that will add Kerberos Auth headers to a request
"""
def __init__(self,tgt):
self.tgt = tgt
def http_request(self,req):
req.add_unredirected_header('Authorization', 'Negotiate %s' % self.tgt)
return req
def https_request(self,req):
return self.http_request(req)
def mkkrbopener( jenkinsurl ):
"""
Creates an url opener that works with kerberos auth
:param jenkinsurl: jenkins url, str
:return: urllib2.opener configured for kerberos auth
"""
handlers = []
for handler in get_kerberos_auth_handler(jenkinsurl=jenkinsurl):
handlers.append(handler)
opener = urllib2.build_opener(*handlers)
return opener.open
def get_kerberos_auth_handler(jenkinsurl):
"""
Get a handler which enabled authentication over GSSAPI
:param jenkinsurl: jenkins base url, str
:return: a list of handlers
"""
jenkinsnetloc = urlparse(jenkinsurl).netloc
assert type( jenkinsnetloc ) == str, "Jenkins network location should be a string, got %s" % repr( jenkinsnetloc )
_ignore, ctx = krb.authGSSClientInit('HTTP@%s' % jenkinsnetloc, gssflags=krb.GSS_C_DELEG_FLAG|krb.GSS_C_MUTUAL_FLAG|krb.GSS_C_SEQUENCE_FLAG)
rc = krb.authGSSClientStep(ctx,'')
if rc != krb.AUTH_GSS_CONTINUE:
return []
tgt = krb.authGSSClientResponse(ctx)
if not tgt:
return []
krb_handler = KerberosAuthHandler(tgt)
return [ krb_handler ]
|
Python
| 0.000466
|
@@ -30,16 +30,46 @@
s as krb
+%0Afrom urlparse import urlparse
%0A%0Aclass
@@ -1601,8 +1601,9 @@
andler %5D
+%0A
|
9fa4543e9f6c6c5bb0954954649b7c691e462e3c
|
improve the volume export - sleep & check export
|
nova/volume/service.py
|
nova/volume/service.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Nova Storage manages creating, attaching, detaching, and
destroying persistent storage volumes, ala EBS.
Currently uses Ata-over-Ethernet.
"""
import logging
from twisted.internet import defer
from nova import db
from nova import exception
from nova import flags
from nova import process
from nova import service
from nova import validate
FLAGS = flags.FLAGS
flags.DEFINE_string('storage_dev', '/dev/sdb',
'Physical device to use for volumes')
flags.DEFINE_string('volume_group', 'nova-volumes',
'Name for the VG that will contain exported volumes')
flags.DEFINE_string('aoe_eth_dev', 'eth0',
'Which device to export the volumes on')
flags.DEFINE_string('aoe_export_dir',
'/var/lib/vblade-persist/vblades',
'AoE directory where exports are created')
flags.DEFINE_integer('blades_per_shelf',
16,
'Number of AoE blades per shelf')
flags.DEFINE_string('storage_availability_zone',
'nova',
'availability zone of this service')
flags.DEFINE_boolean('fake_storage', False,
'Should we make real storage volumes to attach?')
class VolumeService(service.Service):
"""
There is one VolumeNode running on each host.
However, each VolumeNode can report on the state of
*all* volumes in the cluster.
"""
def __init__(self):
super(VolumeService, self).__init__()
self._exec_init_volumes()
@defer.inlineCallbacks
# @validate.rangetest(size=(0, 1000))
def create_volume(self, volume_id, context=None):
"""
Creates an exported volume (fake or real),
restarts exports to make it available.
Volume at this point has size, owner, and zone.
"""
logging.info("volume %s: creating" % (volume_id))
volume_ref = db.volume_get(context, volume_id)
# db.volume_update(context, volume_id, {'node_name': FLAGS.node_name})
size = volume_ref['size']
logging.debug("volume %s: creating lv of size %sG" % (volume_id, size))
yield self._exec_create_volume(volume_id, size)
logging.debug("volume %s: allocating shelf & blade" % (volume_id))
(shelf_id, blade_id) = db.volume_allocate_shelf_and_blade(context,
volume_id)
logging.debug("volume %s: exporting shelf %s & blade %s" % (volume_id,
shelf_id, blade_id))
yield self._exec_create_export(volume_id, shelf_id, blade_id)
# TODO(joshua): We need to trigger a fanout message
# for aoe-discover on all the nodes
db.volume_update(context, volume_id, {'status': 'available'})
logging.debug("volume %s: re-exporting all values" % (volume_id))
yield self._exec_ensure_exports()
logging.debug("volume %s: created successfully" % (volume_id))
defer.returnValue(volume_id)
@defer.inlineCallbacks
def delete_volume(self, volume_id, context=None):
logging.debug("Deleting volume with id of: %s" % (volume_id))
volume_ref = db.volume_get(context, volume_id)
if volume_ref['attach_status'] == "attached":
raise exception.Error("Volume is still attached")
if volume_ref['node_name'] != FLAGS.node_name:
raise exception.Error("Volume is not local to this node")
shelf_id, blade_id = db.volume_get_shelf_and_blade(context,
volume_id)
yield self._exec_remove_export(volume_id, shelf_id, blade_id)
yield self._exec_delete_volume(volume_id)
db.volume_destroy(context, volume_id)
defer.returnValue(True)
@defer.inlineCallbacks
def _exec_create_volume(self, volume_id, size):
if FLAGS.fake_storage:
defer.returnValue(None)
if int(size) == 0:
sizestr = '100M'
else:
sizestr = '%sG' % size
yield process.simple_execute(
"sudo lvcreate -L %s -n %s %s" % (sizestr,
volume_id,
FLAGS.volume_group),
terminate_on_stderr=False)
@defer.inlineCallbacks
def _exec_delete_volume(self, volume_id):
if FLAGS.fake_storage:
defer.returnValue(None)
yield process.simple_execute(
"sudo lvremove -f %s/%s" % (FLAGS.volume_group,
volume_id),
terminate_on_stderr=False)
@defer.inlineCallbacks
def _exec_create_export(self, volume_id, shelf_id, blade_id):
if FLAGS.fake_storage:
defer.returnValue(None)
yield process.simple_execute(
"sudo vblade-persist setup %s %s %s /dev/%s/%s" %
(shelf_id,
blade_id,
FLAGS.aoe_eth_dev,
FLAGS.volume_group,
volume_id),
terminate_on_stderr=False)
@defer.inlineCallbacks
def _exec_remove_export(self, _volume_id, shelf_id, blade_id):
if FLAGS.fake_storage:
defer.returnValue(None)
yield process.simple_execute(
"sudo vblade-persist stop %s %s" % (shelf_id, blade_id),
terminate_on_stderr=False)
yield process.simple_execute(
"sudo vblade-persist destroy %s %s" % (shelf_id, blade_id),
terminate_on_stderr=False)
@defer.inlineCallbacks
def _exec_ensure_exports(self):
if FLAGS.fake_storage:
defer.returnValue(None)
# NOTE(vish): these commands sometimes sends output to stderr for warnings
yield process.simple_execute("sudo vblade-persist auto all",
terminate_on_stderr=False)
yield process.simple_execute("sudo vblade-persist start all",
terminate_on_stderr=False)
@defer.inlineCallbacks
def _exec_init_volumes(self):
if FLAGS.fake_storage:
defer.returnValue(None)
yield process.simple_execute(
"sudo pvcreate %s" % (FLAGS.storage_dev))
yield process.simple_execute(
"sudo vgcreate %s %s" % (FLAGS.volume_group,
FLAGS.storage_dev))
|
Python
| 0
|
@@ -6522,24 +6522,25 @@
e(None)%0A
+%0A
# NOTE(v
@@ -6535,82 +6535,75 @@
-# NOTE(vish): these commands sometimes sends output to stderr for warnings
+yield process.simple_execute(%22sleep 5%22) # wait for blades to appear
%0A
@@ -6705,35 +6705,31 @@
-terminate_on_stderr
+check_exit_code
=False)%0A
@@ -6835,35 +6835,31 @@
-terminate_on_stderr
+check_exit_code
=False)%0A
|
e1e76fefbd5a41ae14308a43245b4ecdf3099252
|
Simplify and still reuse out with float16 inputs.
|
numpy/core/_methods.py
|
numpy/core/_methods.py
|
"""
Array methods which are called by both the C-code for the method
and the Python code for the NumPy-namespace function
"""
from __future__ import division, absolute_import, print_function
import warnings
from numpy.core import multiarray as mu
from numpy.core import umath as um
from numpy.core.numeric import asanyarray
from numpy.core import numerictypes as nt
# save those O(100) nanoseconds!
umr_maximum = um.maximum.reduce
umr_minimum = um.minimum.reduce
umr_sum = um.add.reduce
umr_prod = um.multiply.reduce
umr_any = um.logical_or.reduce
umr_all = um.logical_and.reduce
# avoid keyword arguments to speed up parsing, saves about 15%-20% for very
# small reductions
def _amax(a, axis=None, out=None, keepdims=False):
return umr_maximum(a, axis, None, out, keepdims)
def _amin(a, axis=None, out=None, keepdims=False):
return umr_minimum(a, axis, None, out, keepdims)
def _sum(a, axis=None, dtype=None, out=None, keepdims=False):
return umr_sum(a, axis, dtype, out, keepdims)
def _prod(a, axis=None, dtype=None, out=None, keepdims=False):
return umr_prod(a, axis, dtype, out, keepdims)
def _any(a, axis=None, dtype=None, out=None, keepdims=False):
return umr_any(a, axis, dtype, out, keepdims)
def _all(a, axis=None, dtype=None, out=None, keepdims=False):
return umr_all(a, axis, dtype, out, keepdims)
def _count_reduce_items(arr, axis):
if axis is None:
axis = tuple(range(arr.ndim))
if not isinstance(axis, tuple):
axis = (axis,)
items = 1
for ax in axis:
items *= arr.shape[ax]
return items
def _mean(a, axis=None, dtype=None, out=None, keepdims=False):
arr = asanyarray(a)
rcount = _count_reduce_items(arr, axis)
orig_dtype = dtype
# Make this warning show up first
if rcount == 0:
warnings.warn("Mean of empty slice.", RuntimeWarning, stacklevel=2)
# Cast bool, unsigned int, and int to float64 by default
if dtype is None:
if issubclass(arr.dtype.type, (nt.integer, nt.bool_)):
dtype = mu.dtype('f8')
elif issubclass(arr.dtype.type, nt.float16):
dtype = mu.dtype('f4')
ret = umr_sum(arr, axis, dtype, out, keepdims)
if isinstance(ret, mu.ndarray):
ret = um.true_divide(
ret, rcount, out=ret, casting='unsafe', subok=False)
if orig_dtype is None and issubclass(arr.dtype.type, nt.float16):
ret = a.dtype.type(ret)
elif hasattr(ret, 'dtype'):
if orig_dtype is None and issubclass(arr.dtype.type, nt.float16):
ret = a.dtype.type(ret / rcount)
else:
ret = ret.dtype.type(ret / rcount)
else:
ret = ret / rcount
return ret
def _var(a, axis=None, dtype=None, out=None, ddof=0, keepdims=False):
arr = asanyarray(a)
rcount = _count_reduce_items(arr, axis)
# Make this warning show up on top.
if ddof >= rcount:
warnings.warn("Degrees of freedom <= 0 for slice", RuntimeWarning,
stacklevel=2)
# Cast bool, unsigned int, and int to float64 by default
if dtype is None and issubclass(arr.dtype.type, (nt.integer, nt.bool_)):
dtype = mu.dtype('f8')
# Compute the mean.
# Note that if dtype is not of inexact type then arraymean will
# not be either.
arrmean = umr_sum(arr, axis, dtype, keepdims=True)
if isinstance(arrmean, mu.ndarray):
arrmean = um.true_divide(
arrmean, rcount, out=arrmean, casting='unsafe', subok=False)
else:
arrmean = arrmean.dtype.type(arrmean / rcount)
# Compute sum of squared deviations from mean
# Note that x may not be inexact and that we need it to be an array,
# not a scalar.
x = asanyarray(arr - arrmean)
if issubclass(arr.dtype.type, nt.complexfloating):
x = um.multiply(x, um.conjugate(x), out=x).real
else:
x = um.multiply(x, x, out=x)
ret = umr_sum(x, axis, dtype, out, keepdims)
# Compute degrees of freedom and make sure it is not negative.
rcount = max([rcount - ddof, 0])
# divide by degrees of freedom
if isinstance(ret, mu.ndarray):
ret = um.true_divide(
ret, rcount, out=ret, casting='unsafe', subok=False)
elif hasattr(ret, 'dtype'):
ret = ret.dtype.type(ret / rcount)
else:
ret = ret / rcount
return ret
def _std(a, axis=None, dtype=None, out=None, ddof=0, keepdims=False):
ret = _var(a, axis=axis, dtype=dtype, out=out, ddof=ddof,
keepdims=keepdims)
if isinstance(ret, mu.ndarray):
ret = um.sqrt(ret, out=ret)
elif hasattr(ret, 'dtype'):
ret = ret.dtype.type(um.sqrt(ret))
else:
ret = um.sqrt(ret)
return ret
|
Python
| 0
|
@@ -1654,32 +1654,62 @@
asanyarray(a)%0A%0A
+ is_float16_result = False%0A
rcount = _co
@@ -1740,31 +1740,8 @@
is)%0A
- orig_dtype = dtype%0A
@@ -2139,16 +2139,53 @@
pe('f4')
+%0A is_float16_result = True
%0A%0A re
@@ -2379,69 +2379,41 @@
if
-orig_dtype is None and issubclass(arr.dtype.type, nt.float16)
+is_float16_result and out is None
:%0A
@@ -2493,69 +2493,25 @@
if
-orig_dtype is None and issubclass(arr.dtype.type, nt.float16)
+is_float16_result
:%0A
@@ -2522,36 +2522,34 @@
ret =
-a.dtype.type
+nt.float16
(ret / rcoun
|
5044b8049e667630301262836eba82902ce33d09
|
Allow overriding the 404 page for missing files
|
private_storage/views.py
|
private_storage/views.py
|
"""
Views to send private files.
"""
import os
from django.http import Http404, HttpResponseForbidden
from django.utils.module_loading import import_string
from django.views.generic import View
from django.views.generic.detail import SingleObjectMixin
from . import appconfig
from .models import PrivateFile
from .servers import get_server_class
from .storage import private_storage
try:
from urllib.parse import quote
except ImportError:
from urllib import quote # Python 2
class PrivateStorageView(View):
"""
Return the uploaded files
"""
#: The storage class to retrieve files from
storage = private_storage
#: The authorisation rule for accessing
can_access_file = staticmethod(import_string(appconfig.PRIVATE_STORAGE_AUTH_FUNCTION))
#: Import the server class once
server_class = get_server_class(appconfig.PRIVATE_STORAGE_SERVER)
#: Whether the file should be displayed ``inline`` or show a download box (``attachment``).
content_disposition = None
#: The filename to use when :attr:`content_disposition` is set.
content_disposition_filename = None
def get_path(self):
"""
Determine the path for the object to provide.
This can be overwritten to combine the view with a different object retrieval.
"""
return self.kwargs['path']
def get_private_file(self):
"""
Return all relevant data in a single object, so this is easy to extend
and server implementations can pick what they need.
"""
return PrivateFile(
request=self.request,
storage=self.storage,
relative_name=self.get_path()
)
def get(self, request, *args, **kwargs):
"""
Handle incoming GET requests
"""
private_file = self.get_private_file()
if not self.can_access_file(private_file):
return HttpResponseForbidden('Private storage access denied')
if not private_file.exists():
raise Http404("File not found")
return self.serve_file(private_file)
def serve_file(self, private_file):
"""
Serve the file that was retrieved from the storage.
The relative path can be found with ``private_file.relative_name``.
:type private_file: :class:`private_storage.models.PrivateFile`
:rtype: django.http.HttpResponse
"""
response = self.server_class().serve(private_file)
if self.content_disposition:
filename = self.get_content_disposition_filename(private_file)
response['Content-Disposition'] = '{}; {}'.format(
self.content_disposition, self._encode_filename_header(filename)
)
return response
def get_content_disposition_filename(self, private_file):
"""
Return the filename in the download header.
"""
return self.content_disposition_filename or os.path.basename(private_file.relative_name)
def _encode_filename_header(self, filename):
"""
The filename, encoded to use in a ``Content-Disposition`` header.
"""
# Based on https://www.djangosnippets.org/snippets/1710/
user_agent = self.request.META.get('HTTP_USER_AGENT', None)
if 'WebKit' in user_agent:
# Support available for UTF-8 encoded strings.
utf8_filename = filename.encode("utf-8")
return 'filename={}'.format(utf8_filename)
elif 'MSIE' in user_agent:
# IE does not support internationalized filename at all.
# It can only recognize internationalized URL, so we should perform a trick via URL names.
return ''
else:
# For others like Firefox, we follow RFC2231 (encoding extension in HTTP headers).
rfc2231_filename = quote(filename.encode("utf-8"))
return "filename*=UTF-8''{}".format(rfc2231_filename)
class PrivateStorageDetailView(SingleObjectMixin, PrivateStorageView):
"""
Download a document based on an object ID.
This view can by used by third-party apps to implement their own download view.
Implement access controls by overriding :meth`get_queryset` or redefining :meth:`can_access_file`.
"""
#: Define the model to fetch.
model = None
#: Define which field the file name is stored at.
model_file_field = 'file'
def get(self, request, *args, **kwargs):
self.object = self.get_object()
return super(PrivateStorageDetailView, self).get(request, *args, **kwargs)
def get_path(self):
file = getattr(self.object, self.model_file_field)
return file.name
def get_private_file(self):
# Provide the parent object as well.
return PrivateFile(
request=self.request,
storage=self.storage,
relative_name=self.get_path(),
parent_object=self.object
)
def can_access_file(self, private_file):
"""
The authorization rule for this view.
By default it reuses the ``PRIVATE_STORAGE_AUTH_FUNCTION`` setting,
but this should likely be redefined.
"""
return PrivateStorageView.can_access_file(private_file)
|
Python
| 0.000001
|
@@ -2024,83 +2024,654 @@
r
-aise Http404(%22File not found%22)%0A%0A return self.serve_file(private_file
+eturn self.serve_file_not_found(private_file)%0A else:%0A return self.serve_file(private_file)%0A%0A def serve_file_not_found(self, private_file):%0A %22%22%22%0A Display a response message telling that the file is not found.%0A This can be overwritten to improve the customer experience.%0A For example%0A - redirect the user, and show a message.%0A - render the message in the expected media type (e.g. PNG).%0A - show a custom 404 page.%0A%0A :type private_file: :class:%60private_storage.models.PrivateFile%60%0A :rtype: django.http.HttpResponse%0A %22%22%22%0A raise Http404(%22File not found%22
)%0A%0A
|
f95555ee63323d4046444f14395813a415aa0683
|
implement just enough of mtrand to make tests start passing
|
numpy/random/mtrand.py
|
numpy/random/mtrand.py
|
random_sample = None
|
Python
| 0
|
@@ -1,21 +1,409 @@
-random_sample = None
+import random%0Afrom numpy import zeros%0Adef random_sample(length=0):%0A if length == 0:%0A return random.random()%0A ret = zeros((length,))%0A for x in xrange(length):%0A ret%5Bx%5D = random.random()%0A return ret%0A%0Adef randn(length=0):%0A if length == 0:%0A return random.gauss(0., 1.)%0A ret = zeros((length,))%0A for x in xrange(length):%0A ret%5Bx%5D = random.gauss(0., 1.)%0A return ret
%0A
|
92162564fb95ba20090b230695b52d4ddb41a38b
|
Add support for reading SSH keys from file.
|
cog/util.py
|
cog/util.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013, Activision Publishing, Inc.
# the cog project is free software under 3-clause BSD licence
# see the LICENCE file in the project root for copying terms
# miscellaneous utility functions, mostly borrowed
import os
import pwd
import string
import random
import keyring
import getpass
from itertools import chain
from passlib.hash import sha512_crypt
def randomized_string(size=16, chars=string.letters + string.digits + string.punctuation):
"""
Generate randomized string using printable character. (Not using
string.printable here because it does produce more than we can eat,
unfortunately.)
"""
return ''.join(random.choice(chars) for x in range(size))
def make_pass(passwd=None):
"""
generate password using SHA-512 method, randomized salt and randomized
number of rounds.
"""
if passwd is None:
passwd = randomized_string(17)
salt = randomized_string(16, ('./' + string.letters + string.digits))
iterations = random.randint(40000, 80000)
return '{CRYPT}' + sha512_crypt.encrypt(passwd, salt=salt, rounds=iterations)
def get_pass(username, service, prompt, use_keyring=False):
"""
get a password string, either from user input or from system key/password
store
"""
password = None
if use_keyring:
password = keyring.get_password(service, username)
if not password:
password = getpass.getpass(prompt)
if password:
keyring.set_password(service, username, password)
else:
password = getpass.getpass(prompt)
return password
def get_current_uid():
return pwd.getpwuid(os.getuid()).pw_name
def loop_on(input):
if isinstance(input, basestring):
yield input
else:
try:
for item in input:
yield item
except TypeError:
yield input
def flatten(list_of_lists):
"""
Flatten one level of nesting
"""
return chain.from_iterable(list_of_lists)
def merge(d1, d2):
"""
Merge two dictionaries recursively. Merge the lists embedded within
dictionary at the same positions too (with caveats).
"""
for k1, v1 in d1.iteritems():
if k1 not in d2:
d2[k1] = v1
elif isinstance(v1, list):
d2[k1] = list(set(d2[k1] + v1))
elif isinstance(v1, dict):
merge(v1, d2[k1])
return d2
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
|
Python
| 0
|
@@ -324,16 +324,64 @@
getpass%0A
+import sshpubkeys%0A%0Afrom sshpubkeys import SSHKey
%0Afrom it
@@ -437,24 +437,422 @@
512_crypt%0A%0A%0A
+def read_ssh_key(path):%0A %22%22%22%0A Read an SSH key given path, bail out when bad. Limit the keyfile length.%0A %22%22%22%0A key = None%0A try:%0A with open(path) as key_fh:%0A key = SSHKey(key_fh.read(262144)).keydata.strip()%0A except IOError as io_exc:%0A print io_exc.message%0A except sshpubkeys.InvalidKeyException as key_exc:%0A print key_exc.message%0A return key%0A%0A%0A
def randomiz
|
422e5e71767df502e504d3f0482e6c8981c7eb25
|
Update rng.py
|
cogs/rng.py
|
cogs/rng.py
|
# -*- coding: utf-8 -*-
from discord.ext import commands
import random as rng
import copy
class RNG:
"""Utilities that provide pseudo-RNG."""
el_fractions=['necro', 'walkers', 'xyuecro', 'lords']
def __init__(self, bot):
self.bot = bot
self.el_pull = copy.copy(RNG.el_frections)
@commands.command()
async def random(self, minimum=0, maximum=100):
"""Выбрать случайное число в заданном диапазоне.
Минимум должен быть меньше максимума, а максимум — меньше 1000.
"""
maximum = min(maximum, 1000)
if minimum >= maximum:
await self.bot.say('Максимум меньше минимума.')
return
await self.bot.say(rng.randint(minimum, maximum))
#@random.command()
#async def lenny(self):
# """Displays a random lenny face."""
# lenny = rng.choice([
# "( ͡° ͜ʖ ͡°)", "( ͠° ͟ʖ ͡°)", "ᕦ( ͡° ͜ʖ ͡°)ᕤ", "( ͡~ ͜ʖ ͡°)",
# "( ͡o ͜ʖ ͡o)", "͡(° ͜ʖ ͡ -)", "( ͡͡ ° ͜ ʖ ͡ °)", "(ง ͠° ͟ل͜ ͡°)ง",
# "ヽ༼ຈل͜ຈ༽ノ"
# ])
# await self.bot.say(lenny)
@commands.group(pass_context=True, aliases=[])
async def el(self, ctx):
r_answer = ''
for fract in self.el_pull:
str_answer += '{}\n'.format(fract)
await self.bot.say(str_answer)
@el.command(pass_context=True)
async def repull(self, ctx):
self.el_pull = copy.copy(RNG.el_fractions)
ctx.invoke(self.el)
@commands.command(aliases=['выбери', 'вибери'])
async def choose(self, *, choices : str):
"""Есть два стула...
Варианты должны быть разделены с помощью `or` или `или`
"""
choices_list = list()
for choice in choices.split('or'):
choices_list += choice.split('или')
if len(choices_list) < 2:
await self.bot.say('Шо то хуйня, шо это хуйня.')
else:
await self.bot.say(rng.choice(choices_list).lstrip())
def setup(bot):
bot.add_cog(RNG(bot))
|
Python
| 0.000001
|
@@ -1089,378 +1089,8 @@
y)%0A%0A
- @commands.group(pass_context=True, aliases=%5B%5D)%0A async def el(self, ctx):%0A r_answer = ''%0A for fract in self.el_pull:%0A str_answer += '%7B%7D%5Cn'.format(fract)%0A await self.bot.say(str_answer)%0A%0A @el.command(pass_context=True)%0A async def repull(self, ctx):%0A self.el_pull = copy.copy(RNG.el_fractions)%0A ctx.invoke(self.el)
%0A%0A
|
34f4d6c095c112ed0c8a64c9ab3f28e22827c4f0
|
Fix randomfact + fmt
|
cogs/rng.py
|
cogs/rng.py
|
import discord
import random
import json
from discord.ext import commands
from bs4 import BeautifulSoup
from utils import aiohttp_wrap as aw
class RNG(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.session = bot.aio_session
self.fact_url = "http://www.unkno.com/"
self.react_url = "http://api.chew.pro/trbmb"
@commands.command(aliases=["facts"])
async def fact(self, ctx):
""" Get a random fun fact (potentially NSFW) """
html = await aw.aio_get_text(self.session, self.fact_url)
soup = BeautifulSoup(html, "lxml")
try:
fun_fact = soup.find("div", id="content").text.strip()
except AttributeError:
return await ctx.send(
random.choice(
[
"Sorry, I get nervous in front of crowds",
"Oh god, I'm blanking",
"Just a second, I'll think of something...",
"This is no time for fun or facts.",
]
)
)
# Create embed
em = discord.Embed(description=fun_fact)
em.set_thumbnail(url="https://i.imgur.com/c36rUx9.png")
await ctx.send(embed=em)
@commands.command()
async def react(self, ctx):
""" Have qtbot react with something inane """
# Have to get text because it has the wrong content-type
word1 = "bites,highs,burns,ruins,humids,leans,quiets,traffics,homes,crashes,trumps,backs,salts,xboxs,closes,records,stops,sevens,pollutes,kills,rents,cleans,extras,boggles,Taylor's,snaps,questions,coffee's,clicks,pops,ticks,maintains,stars,ties,nys,bills,defends,opens,airs,Americans,steals,drinks,yous,businesses,teleys,invents,thanks,students,computers,frees,weathers,vends,severs,allergies,silences,fires,ambers,pushes,screws,smokes,mrs,reds,consumes,let's,classes,makes,draws,lights,butters,celebrates,drives,pulls,toxics,finds,waters,pets,lags,types,environments,grows,builds,moos,tunas,confuses,classifies,births,fails,breaks,emotionals,booms,calls,taxes,burgers,4s,gases,potatoes,pre owns,sends,mows,tickles,lefts,Saharas,nals,unites,camps,roses,shuts down,macs,apples,cheeses,turns,flexes,moves,trucks,necks,swallows,Harry's,flushes,pays,eyes,cities,increases,trains,cooks,i's,cringes,unders,folds,enters,speeds,roads,spends,tacos,pumps,hearts,Willows,reads,suhs,dogs,rocks,cookies,grinds".split(',')
word2 = "bites,voices,rubber,jokes,weather,dabs,time,jams,depots,parties,country,Clinton,fires,grasses,one,door,videos,signs,elevens,air,mood,movie,rooms,roads,brain cells,points,mind,Swifts,chats,vibe,motives,mugs,pens,buttons,sanity,tocks,office,scouts,shoes,keys,nyes,freedom,will to live,force,flags,Gatorade,sprite,tubes,service,phones,wheel,yous,services,labs,tuition,ford,machines,warnings,alert,phone,extinguishers,dexterious,driver,detector,jos,cross,M&Ms,goes,days,pictures,poles,biscuit,75 years,cars,levers,waters,ways out,burgers,dogs,minecraft,emojis,sciences,trees,legos,buildings,cows,fish,conversation,animals,certificates,science classes,hearts,issues,roasted,horns,friends,kings,Gs,birthdays,stations,chips,vehicles,texts,lawns,pickles,lanes,deserts,genes,rocks,states,outs,coffee,reds,computers,books,watches,milk,steaks,teens,wheels,muscles,homes,stops,self,tattoos,food,Potters,toilets,brows,limits,toasts,towers,volume,tracks,wears,bones,oragamies,zones,kills,money,bells,ups,radios,ways,Donald's,springs,elections,walls,corn,dudes,filters,rolls,tongues,gears".split(',')
await ctx.send(f"That really {random.choice(word1)} my {random.choice(word2)}!")
def setup(bot):
bot.add_cog(RNG(bot))
|
Python
| 0.000119
|
@@ -26,20 +26,8 @@
dom%0A
-import json%0A
from
@@ -59,38 +59,8 @@
nds%0A
-from bs4 import BeautifulSoup%0A
from
@@ -243,78 +243,56 @@
http
+s
://
-www.unkno.com/%22%0A self.react_url = %22http://api.chew.pro/trbmb
+uselessfacts.jsph.pl/random.json?language=en
%22%0A%0A
@@ -427,20 +427,20 @@
-html
+fact
= await
@@ -451,20 +451,20 @@
aio_get_
-text
+json
(self.se
@@ -489,161 +489,28 @@
rl)%0A
-
+%0A
-soup = BeautifulSoup(html, %22lxml%22)%0A%0A try:%0A fun_fact = soup.find(%22div%22, id=%22content%22).text.strip()%0A except AttributeError
+ if not fact
:%0A
@@ -961,15 +961,19 @@
on=f
-un_fact
+act%5B'text'%5D
)%0A
@@ -2248,35 +2248,57 @@
s,grinds%22.split(
-','
+%0A %22,%22%0A
)%0A%0A word2
@@ -3386,11 +3386,33 @@
lit(
-','
+%0A %22,%22%0A
)%0A%0A
|
65998c13dfa7f1b13cc2332eeceec186e9d6af1c
|
fix syntax
|
channels.py
|
channels.py
|
channels = {
"#huggle":
lambda x: x.get("X-Bugzilla-Product", None) == "Huggle",
"#pywikibot":
lambda x: x.get("X-Bugzilla-Product", None) == "Pywikibot",
"#wikimedia-corefeatures":
lambda x: (x.get("X-Bugzilla-Product", None) == "MediaWiki extensions") and \
(x.get("X-Bugzilla-Component", None) in ["Echo", "Flow", "PageCuration", "Thanks", "WikiLove"]),
"#mediawiki-i18n":
lambda x: (x.get("X-Bugzilla-Component", None) in ["ContentTranslation"]),
"#wikimedia-labs":
lambda x: x.get("X-Bugzilla-Product", None) in ["Tool Labs tools", "Wikimedia Labs"],
"#wikimedia-mobile":
lambda x: x.get("X-Bugzilla-Product", None) in ["Wikimedia Mobile", "Commons App", "Wikipedia App", "MobileFrontend"],
"#wikimedia-qa":
lambda x: (
(x.get("X-Bugzilla-Product", None) == "Wikimedia") and \
(x.get("X-Bugzilla-Component", None) in ["Continuous integration", "Quality Assurance"])
) or \
(
(x.get("X-Bugzilla-Product", None) == "Wikimedia Labs") and \
(x.get("X-Bugzilla-Component", None) == ""deployment-prep (beta)")
),
"#mediawiki-visualeditor":
lambda x: x.get("X-Bugzilla-Product", None) in ["VisualEditor", "OOjs", "OOjs UI"] or \
(
(x.get("X-Bugzilla-Product", None) == "MediaWiki extensions") and \
(x.get("X-Bugzilla-Component", None) in ["TemplateData", "Cite", "WikiEditor"])
) or \
(
(x.get("X-Bugzilla-Product", None) == "MediaWiki") and \
(x.get("X-Bugzilla-Component", None) in ["Page editing", "ResourceLoader"])
),
"#mediawiki-parsoid":
lambda x: x.get("X-Bugzilla-Product", None) in ["Parsoid"],
"#wikimedia-multimedia":
lambda x: \
(
x.get("X-Bugzilla-Product", None) in ["MediaWiki extensions"] and
x.get("X-Bugzilla-Component", None) in
["UploadWizard", "TimedMediaHandler", "VipsScaler", "GlobalUsage", "MultimediaViewer", "GWToolset",
"Score", "PagedTiffHandler", "PdfHandler", "ImageMap", "CommonsMetadata", "OggHandler"]
) or \
(
x.get("X-Bugzilla-Product", None) in ["MediaWiki"] and
x.get("X-Bugzilla-Component", None) in
["File management", "Uploading"]
),
"#wikimedia-growth":
lambda x:
(
x.get("X-Bugzilla-Product", None) in ["MediaWiki extensions"] and
x.get("X-Bugzilla-Component", None) in ["GuidedTour", "GettingStarted"]
),
"#wikimedia-analytics":
lambda x: x.get("X-Bugzilla-Product", None) == "Analytics",
# The following changes should ALWAYS be in #wikimedia-dev, even if the bugs
# are also reported elsewhere.
"#wikimedia-dev":
lambda x: x.get("X-Bugzilla-Product", None) == "MediaWiki"
}
default_channel = "#wikimedia-dev"
firehose_channel = "#mediawiki-feed"
|
Python
| 0.999704
|
@@ -1201,17 +1201,16 @@
one) ==
-%22
%22deploym
|
0b048cef1f0efd190d8bf8f50c69df35c59b91a3
|
Add verbosity on JSON compare fail
|
xdc-plugin/tests/compare_output_json.py
|
xdc-plugin/tests/compare_output_json.py
|
#!/usr/bin/env python3
"""
This script extracts the top module cells and their corresponding parameters
from json files produced by Yosys.
The return code of this script is used to check if the output is equivalent.
"""
import sys
import json
def read_cells(json_file):
with open(json_file) as f:
data = json.load(f)
f.close()
cells = data['modules']['top']['cells']
cells_parameters = dict()
for cell, opts in cells.items():
cells_parameters[cell] = opts['parameters']
return cells_parameters
def main():
if len(sys.argv) < 3:
print("Incorrect number of arguments")
exit(1)
cells1 = read_cells(sys.argv[1])
cells2 = read_cells(sys.argv[2])
if cells1 == cells2:
exit(0)
else:
exit(1)
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -239,16 +239,73 @@
t json%0A%0A
+parameters = %5B%22IOSTANDARD%22, %22DRIVE%22, %22SLEW%22, %22IN_TERM%22%5D%0A%0A
def read
@@ -503,24 +503,173 @@
ls.items():%0A
+ attributes = opts%5B'parameters'%5D%0A if len(attributes.keys()):%0A if any(%5Bx in parameters for x in attributes.keys()%5D):%0A
cell
@@ -689,34 +689,26 @@
cell%5D =
-opts%5B'parameters'%5D
+attributes
%0A ret
@@ -946,16 +946,16 @@
exit(0)%0A
-
else
@@ -956,16 +956,124 @@
else:%0A
+ print(json.dumps(cells1, indent=4))%0A print(%22VS%22)%0A print(json.dumps(cells2, indent=4))%0A
|
fa28919d3d968fead742138484fcc81a6fca46d4
|
Add tests for hubsync.sync.yesno_as_boolean
|
tests/unit/sync_test.py
|
tests/unit/sync_test.py
|
"""Sync module tests"""
import unittest
from hubsync import sync
class ZipPairsTestCase(unittest.TestCase):
def test_empty_lists(self):
self.assertEqual(
[],
list(sync.zip_pairs([], []))
)
def test_empty_first_list(self):
self.assertEqual(
[(1, None)],
list(sync.zip_pairs([1], []))
)
def test_empty_second_list(self):
self.assertEqual(
[(None, 1)],
list(sync.zip_pairs([], [1]))
)
def test_single_element_lists(self):
self.assertEqual(
set([(1, 1), (1, None)]),
set(sync.zip_pairs([1, 1], [1]))
)
def test_non_matching_elements(self):
self.assertEqual(
set([(None, 2), (1, None)]),
set(sync.zip_pairs([1], [2]))
)
def test_unordered_matching(self):
self.assertEqual(
set([(1, 1), (2, 2)]),
set(sync.zip_pairs([1, 2], [2, 1]))
)
def test_diff_length_non_matching_lower(self):
self.assertEqual(
set([('etcaterva', 'etcaterva'), ('aa', None)]),
set(sync.zip_pairs(['aa', 'etcaterva'], ['etcaterva']))
)
def test_diff_length_non_matching_higher(self):
self.assertEqual(
set([('zz', None), ('etcaterva', 'etcaterva')]),
set(sync.zip_pairs(['zz', 'etcaterva'], ['etcaterva']))
)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.001036
|
@@ -61,16 +61,247 @@
sync%0A%0A%0A
+class SyncTestCase(unittest.TestCase):%0A def test_yesno_as_boolean_yes(self):%0A self.assertTrue(sync.yesno_as_boolean(%22yes%22))%0A%0A def test_yesno_as_boolean_no(self):%0A self.assertFalse(sync.yesno_as_boolean(%22no%22))%0A%0A%0A
class Zi
|
816d6bcd5660d539c4482ea76f1adcf69c23cc92
|
add inverse test
|
streams/coordinates/tests/test_core.py
|
streams/coordinates/tests/test_core.py
|
# coding: utf-8
"""
Test conversions in core.py
"""
from __future__ import absolute_import, division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
import os
import pytest
import numpy as np
import astropy.coordinates as coord
import astropy.units as u
from astropy.io import ascii
from ..core import *
this_path = os.path.split(__file__)[0]
data = ascii.read(os.path.join(this_path, "idl_vgsr_vhel.txt"))
def test_gsr_to_hel():
for row in data:
l = row["lon"] * u.degree
b = row["lat"] * u.degree
v_gsr = row["vgsr"] * u.km/u.s
v_sun_lsr = [row["vx"],row["vy"],row["vz"]]*u.km/u.s
v_hel = vgsr_to_vhel(l, b, v_gsr,
v_sun_lsr=v_sun_lsr,
v_circ=row["vcirc"]*u.km/u.s)
np.testing.assert_almost_equal(v_hel, row['vhelio'], decimal=2)
|
Python
| 0.999869
|
@@ -852,16 +852,22 @@
al(v_hel
+.value
, row%5B'v
@@ -887,9 +887,471 @@
mal=
-2
+4)%0A%0Adef test_hel_to_gsr():%0A for row in data:%0A l = row%5B%22lon%22%5D * u.degree%0A b = row%5B%22lat%22%5D * u.degree%0A v_hel = row%5B%22vhelio%22%5D * u.km/u.s%0A v_sun_lsr = %5Brow%5B%22vx%22%5D,row%5B%22vy%22%5D,row%5B%22vz%22%5D%5D*u.km/u.s%0A %0A v_gsr = vhel_to_vgsr(l, b, v_hel, %0A v_sun_lsr=v_sun_lsr, %0A v_circ=row%5B%22vcirc%22%5D*u.km/u.s)%0A %0A np.testing.assert_almost_equal(v_gsr.value, row%5B'vgsr'%5D, decimal=4
)%0A
|
675f3cdb2080fb71136b1da18a47321111a34fd9
|
Fix Python version check, fix formatting
|
updates.py
|
updates.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = "Mikhail Fedosov (tbs.micle@gmail.com)"
__version__ = "0.1.4"
# http://code.activestate.com/recipes/577708-check-for-package-updates-on-pypi-works-best-in-pi/
# http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python
import pip
import sys
import socket
from multiprocessing import Pool
if sys.version < '3':
from xmlrpclib import ServerProxy
else:
from xmlrpc.client import ServerProxy
if sys.version < '3':
reload(sys)
sys.setdefaultencoding("utf-8")
else:
# http://stackoverflow.com/questions/3828723/why-we-need-sys-setdefaultencodingutf-8-in-a-py-script
pass
class colors:
""" Colored terminal text
"""
OKGREEN = "\033[92m"
FAIL = "\033[91m"
BOLD = "\033[1m"
ENDC = "\033[0m"
@classmethod
def disable(cls):
colors.OKGREEN = ""
colors.FAIL = ""
colors.BOLD = ""
colors.ENDC = ""
class symbols:
""" Status symbols
"""
FAIL = u"✖ "
UPDATE = u"+ "
OK = u"✓ "
@classmethod
def disable(cls):
symbols.FAIL = ""
symbols.UPDATE = ""
symbols.OK = ""
@classmethod
def simplify(cls):
symbols.FAIL = "x "
symbols.UPDATE = "+ "
symbols.OK = ""
# disable colors and simplify status symbols for Windows console
if sys.platform == "win32":
colors.disable()
symbols.simplify()
def check_package(dist):
pypi = xmlrpclib.ServerProxy("http://pypi.python.org/pypi")
try:
available = pypi.package_releases(dist.project_name)
if not available:
# try to capitalize pkg name
available = pypi.package_releases(dist.project_name.capitalize())
if not available:
# try to replace "-" by "_" (as in "django_compressor")
available = pypi.package_releases(dist.project_name.replace("-", "_"))
if not available:
msg = u"{colors.FAIL}{symbols.FAIL}not found at PyPI{colors.ENDC}".format(colors=colors, symbols=symbols)
elif available[0] != dist.version:
msg = u"{colors.OKGREEN}{symbols.UPDATE}{colors.BOLD}{version}{colors.ENDC}".format(colors=colors, symbols=symbols, version=available[0])
else:
if "-v" in sys.argv:
msg = u"{symbols.OK}up to date".format(colors=colors, symbols=symbols)
else:
msg = ""
except socket.timeout:
msg = u"{colors.FAIL}{symbols.FAIL}timeout{colors.ENDC}".format(colors=colors, symbols=symbols)
except KeyboardInterrupt:
return False
if msg:
print((u"{dist.project_name:26} {colors.BOLD}{dist.version:16}{colors.ENDC} {msg}".format(colors=colors, dist=dist, msg=msg)).encode("utf-8", "replace"))
def main():
socket.setdefaulttimeout(5.0)
# do not use multiprocessing under Windows
if sys.platform == "win32":
map(check_package, pip.get_installed_distributions())
else:
pypi_pool = Pool()
pypi_pool_map = pypi_pool.map_async(check_package, pip.get_installed_distributions())
try:
pypi_pool_map.get(0xFFFF)
except KeyboardInterrupt:
print("Aborted")
return
if __name__ == "__main__":
main()
|
Python
| 0.000001
|
@@ -388,20 +388,25 @@
sion
- %3C '3':%0A
+_info %3C (3, 0):%0A%09
from
@@ -445,12 +445,9 @@
se:%0A
-
+%09
from
@@ -499,14 +499,22 @@
sion
- %3C '3'
+_info %3C (3, 0)
:%0A%09r
@@ -671,17 +671,16 @@
%0A%09pass%0A%0A
-%09
%0Aclass c
@@ -1356,18 +1356,8 @@
i =
-xmlrpclib.
Serv
@@ -1395,16 +1395,17 @@
/pypi%22)%0A
+%0A
%09try:%0A%09%09
|
2a4adaa7e5bca8bc8bd3f552f23a46d73ef04a86
|
fix authentication bug in token_Data and start_session
|
cheezapi.py
|
cheezapi.py
|
from flask import url_for, session
import os
import requests
def redirect_uri():
redirect_uri = url_for('cheez', _external=True)
# API includes protocol as part of URL matching. Use this to force HTTPS:
if os.environ.get('FORCE_HTTPS') == 'True':
redirect_uri = redirect_uri.replace('http://', 'https://')
return redirect_uri
def client_id():
return os.environ.get('CHZ_CLIENT_ID')
def client_secret():
return os.environ.get('CHZ_CLIENT_SECRET')
def token_data():
r = requests.post("https://api.cheezburger.com/oauth/access_token",
data={'client_id': client_id(), 'client_secret': client_secret(),
'code': code, 'grant_type': 'authorization_code'})
return r.json()
def start_session(code):
token_data = token_data(code)
session['access_token'] = token_data['access_token']
def user():
if 'access_token' in session:
r = requests.get('https://api.cheezburger.com/v1/me',
params = {'access_token': session['access_token']})
json = r.json()
if 'items' in json:
return json['items'][0]
return None
|
Python
| 0.000001
|
@@ -490,16 +490,20 @@
en_data(
+code
):%0A r
@@ -731,17 +731,16 @@
.json()%0A
-
%0Adef sta
@@ -762,21 +762,16 @@
):%0A t
-oken_
data = t
@@ -813,29 +813,24 @@
_token'%5D = t
-oken_
data%5B'access
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.