commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
b92d9e3b05f975ad34f8c62a462eee93377e8435
|
Update wrappers.py
|
gym_minigrid/wrappers.py
|
gym_minigrid/wrappers.py
|
import math
import operator
from functools import reduce
import numpy as np
import gym
from gym import error, spaces, utils
class ActionBonus(gym.core.Wrapper):
"""
Wrapper which adds an exploration bonus.
This is a reward to encourage exploration of less
visited (state,action) pairs.
"""
def __init__(self, env):
super().__init__(env)
self.counts = {}
def step(self, action):
obs, reward, done, info = self.env.step(action)
env = self.unwrapped
tup = (env.agentPos, env.agentDir, action)
# Get the count for this (s,a) pair
preCnt = 0
if tup in self.counts:
preCnt = self.counts[tup]
# Update the count for this (s,a) pair
newCnt = preCnt + 1
self.counts[tup] = newCnt
bonus = 1 / math.sqrt(newCnt)
reward += bonus
return obs, reward, done, info
class StateBonus(gym.core.Wrapper):
"""
Adds an exploration bonus based on which positions
are visited on the grid.
"""
def __init__(self, env):
super().__init__(env)
self.counts = {}
def step(self, action):
obs, reward, done, info = self.env.step(action)
# Tuple based on which we index the counts
# We use the position after an update
env = self.unwrapped
tup = (env.agentPos)
# Get the count for this key
preCnt = 0
if tup in self.counts:
preCnt = self.counts[tup]
# Update the count for this key
newCnt = preCnt + 1
self.counts[tup] = newCnt
bonus = 1 / math.sqrt(newCnt)
reward += bonus
return obs, reward, done, info
class ImgObsWrapper(gym.core.ObservationWrapper):
"""
Use rgb image as the only observation output
"""
def __init__(self, env):
super().__init__(env)
# Hack to pass values to super wrapper
self.__dict__.update(vars(env))
self.observation_space = env.observation_space.spaces['image']
def observation(self, obs):
return obs['image']
class FullyObsWrapper(gym.core.ObservationWrapper):
"""
Fully observable gridworld using a compact grid encoding
"""
def __init__(self, env):
super().__init__(env)
self.__dict__.update(vars(env)) # hack to pass values to super wrapper
self.observation_space = spaces.Box(
low=0,
high=255,
shape=(self.env.width, self.env.height, 3), # number of cells
dtype='uint8'
)
def observation(self, obs):
full_grid = self.env.grid.encode()
full_grid[self.env.agent_pos[0]][self.env.agent_pos[1]] = np.array([255, self.env.agent_dir, 0])
return full_grid
class FlatObsWrapper(gym.core.ObservationWrapper):
"""
Encode mission strings using a one-hot scheme,
and combine these with observed images into one flat array
"""
def __init__(self, env, maxStrLen=64):
super().__init__(env)
self.maxStrLen = maxStrLen
self.numCharCodes = 27
imgSpace = env.observation_space.spaces['image']
imgSize = reduce(operator.mul, imgSpace.shape, 1)
self.observation_space = spaces.Box(
low=0,
high=255,
shape=(1, imgSize + self.numCharCodes * self.maxStrLen),
dtype='uint8'
)
self.cachedStr = None
self.cachedArray = None
def observation(self, obs):
image = obs['image']
mission = obs['mission']
# Cache the last-encoded mission string
if mission != self.cachedStr:
assert len(mission) <= self.maxStrLen, "mission string too long"
mission = mission.lower()
strArray = np.zeros(shape=(self.maxStrLen, self.numCharCodes), dtype='float32')
for idx, ch in enumerate(mission):
if ch >= 'a' and ch <= 'z':
chNo = ord(ch) - ord('a')
elif ch == ' ':
chNo = ord('z') - ord('a') + 1
assert chNo < self.numCharCodes, '%s : %d' % (ch, chNo)
strArray[idx, chNo] = 1
self.cachedStr = mission
self.cachedArray = strArray
obs = np.concatenate((image.flatten(), self.cachedArray.flatten()))
return obs
|
Python
| 0.000001
|
@@ -1773,11 +1773,11 @@
Use
-rgb
+the
ima
@@ -1809,16 +1809,38 @@
n output
+, no language/mission.
%0A %22%22%22
|
31caf3d6366cdc3669eb72007a1a6a45bffe2ce3
|
Update at 2017-07-23 11-30-32
|
plot.py
|
plot.py
|
from sys import argv
from pathlib import Path
import matplotlib as mpl
mpl.use('Agg')
import seaborn as sns
sns.set_style("darkgrid")
import matplotlib.pyplot as plt
import pandas as pd
# from keras.utils import plot_model
# plot_model(model, to_file='model.png', show_shapes=True, show_layer_names=False)
def plot_svg(log, name):
df = pd.read_csv(log)
graph = Path('./graph/')
loss_path = graph / (name + '_loss.svg')
acc_path = graph / (name + '_acc.svg')
keys = ['loss', 'val_loss']
ax = df[keys].plot(kind='line')
ax.set_xlabel('epoch')
ax.set_ylabel('loss(binary crossentropy)')
plt.savefig(str(loss_path))
keys = ['binary_accuracy', 'val_binary_accuracy']
ax = df[keys].plot(kind='line')
ax.set_xlabel('epoch')
ax.set_ylabel('accuracy')
plt.savefig(str(acc_path))
if __name__ == '__main__':
log, name = argv[1], argv[2]
plot_svg(log, name)
|
Python
| 0
|
@@ -513,32 +513,37 @@
ax = df%5Bkeys%5D
+%5B:22%5D
.plot(kind='line
@@ -723,16 +723,21 @@
df%5Bkeys%5D
+%5B:22%5D
.plot(ki
|
c78f864c41d85762a307ced808d6a220a0893805
|
[middleware] convert path to unicode
|
webnotes/middlewares.py
|
webnotes/middlewares.py
|
from __future__ import unicode_literals
import webnotes
import os
from werkzeug.wsgi import SharedDataMiddleware
from webnotes.utils import get_site_name, get_site_path, get_site_base_path, get_path
class StaticDataMiddleware(SharedDataMiddleware):
def __call__(self, environ, start_response):
self.environ = environ
return super(StaticDataMiddleware, self).__call__(environ, start_response)
def get_directory_loader(self, directory):
def loader(path):
import conf
fail = True
if hasattr(conf, 'sites_dir'):
site = get_site_name(self.environ.get('HTTP_HOST'))
possible_site_path = get_path(directory, path, base=os.path.join(conf.sites_dir, site))
if os.path.isfile(possible_site_path):
path = possible_site_path
fail = False
if fail and os.path.isfile(get_path(directory, path)):
path = get_path(directory, path)
fail = False
if fail:
return None, None
return os.path.basename(path), self._opener(path)
return loader
|
Python
| 0.999999
|
@@ -193,16 +193,22 @@
get_path
+, cstr
%0A%0Aclass
@@ -479,16 +479,37 @@
rt conf%0A
+%09%09%09path = cstr(path)%0A
%09%09%09fail
|
e2330caffae04bc31376a2e0f66f0e86ebf92532
|
Add my own K-nearest-neighbor algorithm
|
kNearestNeighbors/howItWorksKNearestNeighbors.py
|
kNearestNeighbors/howItWorksKNearestNeighbors.py
|
Python
| 0.000008
|
@@ -0,0 +1,2606 @@
+# -*- coding: utf-8 -*-%0A%22%22%22K Nearest Neighbors classification for machine learning.%0A%0AThis file demonstrate knowledge of K Nearest Neighbors classification. By%0Abuilding the algorithm from scratch.%0AThe idea of K Nearest Neighbors classification is to best divide and separate%0Athe data based on clustering the data and classifying based on the proximity%0Ato it's K closest neighbors and their classifications.%0A%0A'Closeness' is measured by the euclidean distance.%0A%0Adataset is breast cancer data from: http://archive.ics.uci.edu/ml/datasets.html%0A%0AExample:%0A%0A $ python howItWorksKNearestNeighbors.py%0A%0ATodo:%0A *%0A%22%22%22%0Afrom collections import Counter%0Aimport numpy as np%0A# import matplotlib.pyplot as plt%0Afrom matplotlib import style%0A# from math import sqrt%0Aimport warnings%0Astyle.use('fivethirtyeight')%0A%0A# hardcoded testdata%0Adataset = %7B'k': %5B%5B1, 2%5D, %5B2, 3%5D, %5B3, 1%5D%5D, 'r': %5B%5B6, 5%5D, %5B7, 7%5D, %5B8, 6%5D%5D%7D%0Anew_features = %5B5, 7%5D%0A%0A# %5B%5Bplt.scatter(ii%5B0%5D, ii%5B1%5D, s=100, color=i) for ii in dataset%5Bi%5D%5D for i in dataset%5D%0A# plt.scatter(new_features%5B0%5D, new_features%5B1%5D, s=100)%0A# plt.show()%0A%0A%0Adef k_nearest_neighbors(data, predict, k=3):%0A %22%22%22Function to calculate k nearest neighbors.%0A%0A Based on the parameter 'predict' we find the points in the local proximity%0A of the training data and their label. In a larger dataset it would make%0A sense to specify a radius to avoid going over all data points each time,%0A but with the current dataset it does not matter so I avoid it to simplify.%0A%0A Args:%0A data (dictionary): a dictionary where the keys are labels and the%0A values are a list of lists of features.%0A predict (list): a list of features that we will classify%0A k (int): an int that is the amount of neighbors to be counted. Should%0A be an odd number and higher than len(data) to avoid errors.%0A%0A Returns:%0A str: The return value. The label that the predicted parameter has.%0A%0A%0A %22%22%22%0A if len(data) %3E= k:%0A warnings.warn('K is set to a value less than total voting groups')%0A distances = %5B%5D%0A for group in data:%0A for features in data%5Bgroup%5D:%0A # euclidean_distance = np.sqrt(np.sum((np.array(features)-np.array(predict))**2))%0A euclidean_distance = np.linalg.norm(np.array(features) - np.array(predict)) # faster%0A distances.append(%5Beuclidean_distance, group%5D)%0A%0A votes = %5Bi%5B1%5D for i in sorted(distances)%5B:k%5D%5D%0A print(Counter(votes).most_common(1))%0A vote_result = Counter(votes).most_common(1)%5B0%5D%5B0%5D%0A return vote_result%0A%0Aresult = k_nearest_neighbors(dataset, new_features, k=5)%0Aprint(result)%0A
|
|
5286417cf60742528cbf63d620f7696abdab927f
|
Fix TMAO recipients
|
reporter/reporter/__init__.py
|
reporter/reporter/__init__.py
|
#!/usr/bin/env python3
import pymssql
import smtplib
import markdown
import os
import logging
from enum import Enum
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from mimetypes import guess_type
from email.encoders import encode_base64
from email.mime.text import MIMEText
import matplotlib
matplotlib.use('Agg')
SQL_REPORTING_HOST = os.environ["SQL_REPORTING_HOST"]
SQL_REPORTING_USER = os.environ["SQL_REPORTING_USER"]
SQL_REPORTING_PASSWORD = os.environ["SQL_REPORTING_PASSWORD"]
SQL_REPORTING_DATABASE = os.environ["SQL_REPORTING_DATABASE"]
SQL_DWBRICCS_HOST = os.environ["SQL_DWBRICCS_HOST"]
SQL_DWBRICCS_USER = os.environ["SQL_DWBRICCS_USER"]
SQL_DWBRICCS_PASSWORD = os.environ["SQL_DWBRICCS_PASSWORD"]
SQL_DWBRICCS_DATABASE = os.environ["SQL_DWBRICCS_DATABASE"]
EMAIL_FROM_ADDRESS = os.environ["EMAIL_FROM_ADDRESS"]
EMAIL_SMTP_SERVER = os.environ["EMAIL_SMTP_SERVER"]
DEFAULT_RECIPIENT = os.environ["DEFAULT_RECIPIENT"]
RECIPIENT_IT_DWH = 'RECIPIENT_IT_DWH'
RECIPIENT_IT_DQ = 'RECIPIENT_IT_DQ'
RECIPIENT_LAB_MANAGER = 'RECIPIENT_LAB_MANAGER'
RECIPIENT_BIORESOURCE_MANAGER = 'RECIPIENT_BIORESOURCE_MANAGER'
RECIPIENT_BIORESOURCE_ADMIN = 'RECIPIENT_BIORESOURCE_ADMIN'
RECIPIENT_BRICCS_MANAGER = 'RECIPIENT_BRICCS_MANAGER'
RECIPIENT_BRICCS_ADMIN = 'RECIPIENT_BRICCS_ADMIN'
RECIPIENT_BRICCS_DQ = 'RECIPIENT_BRICCS_DQ'
RECIPIENT_GENVASC_MANAGER = 'RECIPIENT_GENVASC_MANAGER'
RECIPIENT_GENVASC_ADMIN = 'RECIPIENT_GENVASC_ADMIN'
RECIPIENT_GRAPHIC2_MANAGER = 'RECIPIENT_GRAPHIC2_MANAGER'
RECIPIENT_AS_MANAGER = 'RECIPIENT_AS_MANAGER'
RECIPIENT_AS_ADMIN = 'RECIPIENT_AS_ADMIN'
RECIPIENT_BRAVE_MANAGER = 'RECIPIENT_BRAVE_MANAGER'
RECIPIENT_BRAVE_ADMIN = 'RECIPIENT_BRAVE_ADMIN'
RECIPIENT_DREAM_MANAGER = 'RECIPIENT_DREAM_MANAGER'
RECIPIENT_DREAM_ADMIN = 'RECIPIENT_DREAM_ADMIN'
RECIPIENT_SCAD_MANAGER = 'RECIPIENT_SCAD_MANAGER'
RECIPIENT_SCAD_ADMIN = 'RECIPIENT_SCAD_ADMIN'
RECIPIENT_TMAO_MANAGER = 'RECIPIENT_SCAD_MANAGER'
RECIPIENT_TMAO_ADMIN = 'RECIPIENT_SCAD_ADMIN'
RECIPIENT_LENTEN_MANAGER = 'RECIPIENT_LENTEN_MANAGER'
RECIPIENT_LENTEN_ADMIN = 'RECIPIENT_LENTEN_ADMIN'
RECIPIENT_FAST_MANAGER = 'RECIPIENT_FAST_MANAGER'
RECIPIENT_FAST_ADMIN = 'RECIPIENT_FAST_ADMIN'
RECIPIENT_INDAPAMIDE_MANAGER = 'RECIPIENT_INDAPAMIDE_MANAGER'
RECIPIENT_INDAPAMIDE_ADMIN = 'RECIPIENT_INDAPAMIDE_ADMIN'
RECIPIENT_MARI_MANAGER = 'RECIPIENT_MARI_MANAGER'
RECIPIENT_MARI_ADMIN = 'RECIPIENT_MARI_ADMIN'
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO
)
class DatabaseConnection(Enum):
def reporting():
return pymssql.connect(
SQL_REPORTING_HOST,
SQL_REPORTING_USER,
SQL_REPORTING_PASSWORD,
SQL_REPORTING_DATABASE
)
def dwbriccs():
return pymssql.connect(
SQL_DWBRICCS_HOST,
SQL_DWBRICCS_USER,
SQL_DWBRICCS_PASSWORD,
SQL_DWBRICCS_DATABASE
)
def send_markdown_email(
report_name,
recipients,
mkdn,
attachments=None
):
to_recipients = get_recipients(recipients)
msg = MIMEMultipart()
msg['Subject'] = report_name
msg['To'] = ','.join(to_recipients)
msg['From'] = EMAIL_FROM_ADDRESS
html = markdown.markdown(mkdn)
msg.attach(MIMEText(html, 'html'))
for a in attachments or []:
mimetype, encoding = guess_type(a['filename'])
mimetype = mimetype.split('/', 1)
part = MIMEBase(mimetype[0], mimetype[1])
part.set_payload(a['stream'].read())
encode_base64(part)
if a['inline'] or False:
part.add_header('Content-Disposition',
'inline; filename="{}"'.format(a['filename']))
part.add_header('Content-ID', a['filename'])
else:
part.add_header('Content-Disposition',
'attachment; filename="{}"'.format(a['filename']))
msg.attach(part)
s = smtplib.SMTP(EMAIL_SMTP_SERVER)
s.send_message(msg)
s.quit()
logging.info("{} Email Sent to {}".format(report_name, to_recipients))
def get_recipients(recipients):
result = set()
list_of_recs = [os.getenv(r) for r in recipients]
for lr in list_of_recs:
if lr:
result |= set(lr.split(','))
if len(result) == 0:
result = set([DEFAULT_RECIPIENT])
return result
def get_case_link(link_text, case_id, contact_id):
CIVICRM_CASE_URL = ('[{}]('
'http://lcbru.xuhl-tr.nhs.uk/civicrm/contact/view/case'
'?id={}&cid={})')
return (CIVICRM_CASE_URL.format(
link_text,
case_id,
contact_id))
def get_contact_link(link_text, contact_id):
CIVICRM_CONTACT_URL = (
'[{}]('
'http://lcbru.xuhl-tr.nhs.uk/civicrm/contact/view'
'?cid={})')
return (CIVICRM_CONTACT_URL.format(
link_text,
contact_id))
def get_contact_id_search_link(link_text, contact_id):
CIVICRM_SEARCH_URL = (
'[{}]('
'http://lcbru.xuhl-tr.nhs.uk/content/participant_search/{})')
return (CIVICRM_SEARCH_URL.format(
link_text,
contact_id))
|
Python
| 0.000016
|
@@ -1988,36 +1988,36 @@
ER = 'RECIPIENT_
-SCAD
+TMAO
_MANAGER'%0D%0ARECIP
@@ -2037,36 +2037,36 @@
IN = 'RECIPIENT_
-SCAD
+TMAO
_ADMIN'%0D%0ARECIPIE
|
2f8ebebce8374b42a2e7c9963a35fe908439468a
|
fix error on different result
|
l10n_it_sale/wizard/stock_partial_picking.py
|
l10n_it_sale/wizard/stock_partial_picking.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-2014 Didotech (<http://www.didotech.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
from openerp.tools.translate import _
class stock_partial_picking(orm.TransientModel):
_inherit = "stock.partial.picking"
_columns = {
'tracking_code': fields.char('Pack', size=64),
'ddt_in_reference': fields.char('In DDT', size=32),
'ddt_in_date': fields.date('In DDT Date'),
'type': fields.selection([('out', 'Sending Goods'), ('in', 'Getting Goods'), ('internal', 'Internal')],
'Shipping Type', required=True,),
}
def save_partial(self, cr, uid, ids, context=None):
res = super(stock_partial_picking, self).save_partial(cr, uid, ids, context=None)
partial = self.browse(cr, uid, ids[0], context=context)
vals = {}
if partial.ddt_in_reference:
vals.update({'ddt_in_reference': partial.ddt_in_reference})
if partial.ddt_in_date:
vals.update({'ddt_in_date': partial.ddt_in_date})
if vals:
partial.picking_id.write(vals)
return res
def default_get(self, cr, uid, fields, context=None):
if context is None:
context = {}
res = super(stock_partial_picking, self).default_get(cr, uid, fields, context=context)
picking_ids = context.get('active_ids', [])
picking_id, = picking_ids
picking = self.pool['stock.picking'].browse(cr, uid, picking_id, context)
if 'type' in fields:
res.update(type=picking.type)
if 'ddt_in_date':
if picking.ddt_in_date:
res.update(ddt_in_date=picking.ddt_in_date)
if 'ddt_in_reference':
if picking.ddt_in_reference:
res.update(ddt_in_reference=picking.ddt_in_reference)
return res
def do_partial(self, cr, uid, ids, context=None):
if not context:
context = {}
result = super(stock_partial_picking, self).do_partial(cr, uid, ids, context)
partial = self.browse(cr, uid, ids, context=context)[0]
vals = {}
if partial.ddt_in_reference:
vals.update({'ddt_in_reference': partial.ddt_in_reference})
if partial.ddt_in_date:
vals.update({'ddt_in_date': partial.ddt_in_date})
if vals:
self.pool['stock.picking'].write(cr, uid, result['res_id'], vals, context)
if result.get('res_id', False) != context.get('active_id', False):
context.update({
'active_id': result.get('res_id', False),
'active_ids': [result.get('res_id', False)],
'old_result': result
})
if context.get('no_auto_ddt', False) or partial.type == 'in':
return result
else:
return {
'type': 'ir.actions.act_window',
'name': _('Assign DDT'),
'view_mode': 'form',
'view_type': 'form',
'res_model': 'wizard.assign.ddt',
# 'res_id': res_id,
'target': 'new',
'context': context,
}
|
Python
| 0.000002
|
@@ -3239,32 +3239,115 @@
if vals:%0A
+ if result.get('res_id', False) or context.get('active_id', False):%0A
self
@@ -3394,17 +3394,21 @@
sult
-%5B
+.get(
'res_id'
%5D, v
@@ -3403,17 +3403,59 @@
'res_id'
-%5D
+, False) or context.get('active_id', False)
, vals,
|
1d2d19a464f5e8c407e1d63130b852faf4799c70
|
Fix bug when workspace contains special characters (#536)
|
catkin_tools/verbs/catkin_config/cli.py
|
catkin_tools/verbs/catkin_config/cli.py
|
# Copyright 2014 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
from catkin_tools.argument_parsing import add_cmake_and_make_and_catkin_make_args
from catkin_tools.argument_parsing import add_context_args
from catkin_tools.context import Context
from catkin_tools.terminal_color import ColorMapper
color_mapper = ColorMapper()
clr = color_mapper.clr
def prepare_arguments(parser):
parser.description = "This verb is used to configure a catkin workspace's\
configuration and layout. Calling `catkin config` with no arguments will\
display the current config and affect no changes if a config already exists\
for the current workspace and profile."
# Workspace / profile args
add_context_args(parser)
behavior_group = parser.add_argument_group('Behavior', 'Options affecting argument handling.')
add = behavior_group.add_mutually_exclusive_group().add_argument
add('--append-args', '-a', action='store_true', default=False,
help='For list-type arguments, append elements.')
add('--remove-args', '-r', action='store_true', default=False,
help='For list-type arguments, remove elements.')
context_group = parser.add_argument_group('Workspace Context', 'Options affecting the context of the workspace.')
add = context_group.add_argument
add('--init', action='store_true', default=False,
help='Initialize a workspace if it does not yet exist.')
add = context_group.add_mutually_exclusive_group().add_argument
add('--extend', '-e', dest='extend_path', type=str,
help='Explicitly extend the result-space of another catkin workspace, '
'overriding the value of $CMAKE_PREFIX_PATH.')
add('--no-extend', dest='extend_path', action='store_const', const='',
help='Un-set the explicit extension of another workspace as set by --extend.')
add = context_group.add_argument
add('--mkdirs', action='store_true', default=False,
help='Create directories required by the configuration (e.g. source space) if they do not already exist.')
lists_group = parser.add_argument_group(
'Package Build Defaults', 'Packages to include or exclude from default build behavior.')
add = lists_group.add_mutually_exclusive_group().add_argument
add('--whitelist', metavar="PKG", dest='whitelist', nargs="+", required=False, type=str, default=None,
help='Set the packages on the whitelist. If the whitelist is non-empty, '
'only the packages on the whitelist are built with a bare call to '
'`catkin build`.')
add('--no-whitelist', dest='whitelist', action='store_const', const=[], default=None,
help='Clear all packages from the whitelist.')
add = lists_group.add_mutually_exclusive_group().add_argument
add('--blacklist', metavar="PKG", dest='blacklist', nargs="+", required=False, type=str, default=None,
help='Set the packages on the blacklist. Packages on the blacklist are '
'not built with a bare call to `catkin build`.')
add('--no-blacklist', dest='blacklist', action='store_const', const=[], default=None,
help='Clear all packages from the blacklist.')
spaces_group = parser.add_argument_group('Spaces', 'Location of parts of the catkin workspace.')
Context.setup_space_keys()
for space, space_dict in Context.SPACES.items():
add = spaces_group.add_mutually_exclusive_group().add_argument
flags = ['--{}-space'.format(space)]
flags.extend([space_dict['short_flag']] if 'short_flag' in space_dict else [])
add(*flags, default=None,
help='The path to the {} space.'.format(space))
add('--default-{}-space'.format(space),
action='store_const', dest='{}_space'.format(space), default=None, const=space_dict['default'],
help='Use the default path to the {} space ("{}")'.format(space, space_dict['default']))
add = spaces_group.add_argument
add('-x', '--space-suffix',
help='Suffix for build, devel, and install space if they are not otherwise explicitly set.')
devel_group = parser.add_argument_group(
'Devel Space', 'Options for configuring the structure of the devel space.')
add = devel_group.add_mutually_exclusive_group().add_argument
add('--link-devel', dest='devel_layout', action='store_const', const='linked', default=None,
help='Build products from each catkin package into isolated spaces,'
' then symbolically link them into a merged devel space.')
add('--merge-devel', dest='devel_layout', action='store_const', const='merged', default=None,
help='Build products from each catkin package into a single merged devel spaces.')
add('--isolate-devel', dest='devel_layout', action='store_const', const='isolated', default=None,
help='Build products from each catkin package into isolated devel spaces.')
install_group = parser.add_argument_group(
'Install Space', 'Options for configuring the structure of the install space.')
add = install_group.add_mutually_exclusive_group().add_argument
add('--install', action='store_true', default=None,
help='Causes each package to be installed to the install space.')
add('--no-install', dest='install', action='store_false', default=None,
help='Disables installing each package into the install space.')
add = install_group.add_mutually_exclusive_group().add_argument
add('--isolate-install', action='store_true', default=None,
help='Install each catkin package into a separate install space.')
add('--merge-install', dest='isolate_install', action='store_false', default=None,
help='Install each catkin package into a single merged install space.')
build_group = parser.add_argument_group('Build Options', 'Options for configuring the way packages are built.')
add_cmake_and_make_and_catkin_make_args(build_group)
return parser
def main(opts):
try:
# Determine if the user is trying to perform some action, in which
# case, the workspace should be automatically initialized
ignored_opts = ['main', 'verb']
actions = [v for k, v in vars(opts).items() if k not in ignored_opts]
no_action = not any(actions)
# Try to find a metadata directory to get context defaults
# Otherwise use the specified directory
context = Context.load(
opts.workspace,
opts.profile,
opts,
append=opts.append_args,
remove=opts.remove_args)
do_init = opts.init or not no_action
summary_notes = []
if not context.initialized() and do_init:
summary_notes.append(clr('@!@{cf}Initialized new catkin workspace in `%s`@|' % context.workspace))
if context.initialized() or do_init:
Context.save(context)
if opts.mkdirs and not context.source_space_exists():
os.makedirs(context.source_space_abs)
print(context.summary(notes=summary_notes))
except IOError as exc:
# Usually happens if workspace is already underneath another catkin_tools workspace
print('error: could not configure catkin workspace: %s' % exc.message)
return 1
return 0
|
Python
| 0
|
@@ -880,16 +880,26 @@
orMapper
+, sanitize
%0A%0Acolor_
@@ -7350,16 +7350,25 @@
s%60@%7C' %25
+sanitize(
context.
@@ -7378,16 +7378,17 @@
kspace))
+)
%0A%0A
|
43bbf64879ad0567805b0bab2fac123cfbc9c5f2
|
Add scenario image endpoint
|
cea/interfaces/dashboard/api/project.py
|
cea/interfaces/dashboard/api/project.py
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import os
import re
api = Namespace('Project', description='Current project for CEA')
# PATH_REGEX = r'(^[a-zA-Z]:\\[\\\S|*\S]?.*$)|(^(/[^/ ]*)+/?$)'
PROJECT_PATH_MODEL = api.model('Project Path', {
'path': fields.String(description='Path of Project'),
'scenario': fields.String(description='Path of Project')
})
PROJECT_MODEL = api.inherit('Project', PROJECT_PATH_MODEL, {
'name': fields.String(description='Name of Project'),
'scenario': fields.String(description='Name of Current Scenario'),
'scenarios': fields.List(fields.String, description='Name of Current Scenario')
})
@api.route('/')
class Project(Resource):
@api.marshal_with(PROJECT_MODEL)
def get(self):
config = cea.config.Configuration()
name = os.path.basename(config.project)
return {'name': name, 'path': config.project, 'scenario': config.scenario_name, 'scenarios': config.get_parameter('general:scenario-name')._choices}
@api.doc(body=PROJECT_PATH_MODEL, responses={200: 'Success', 400: 'Invalid Path given'})
def post(self):
config = cea.config.Configuration()
payload = api.payload
if 'path' in payload:
path = payload['path']
if os.path.exists(path):
config.project = path
if 'scenario' not in payload:
config.scenario_name = ''
config.save()
return {'message': 'Project path changed'}
else:
abort(400, 'Path given does not exist')
if 'scenario' in payload:
scenario = payload['scenario']
choices = config.get_parameter(
'general:scenario-name')._choices
if scenario in choices:
config.scenario_name = scenario
config.save()
return {'message': 'Scenario changed'}
else:
abort(
400, 'Scenario does not exist', choices=choices)
|
Python
| 0.000042
|
@@ -1,8 +1,36 @@
+import os%0A%0Aimport geopandas%0A
from fla
@@ -87,46 +87,174 @@
ort%0A
-%0Aimport cea.config%0Aimport os%0Aimport re
+from staticmap import StaticMap, Polygon%0A%0Aimport cea.config%0Aimport cea.inputlocator%0Afrom cea.utilities.standardize_coordinates import get_geographic_coordinate_system
%0A%0Aap
@@ -1111,16 +1111,32 @@
io_name,
+%0A
'scenar
@@ -1910,33 +1910,16 @@
rameter(
-%0A
'general
@@ -2153,37 +2153,16 @@
abort(
-%0A
400, 'Sc
@@ -2202,8 +2202,2102 @@
hoices)%0A
+%0A%0A@api.route('/%3Cstring:scenario%3E/image')%0Aclass ScenarioImage(Resource):%0A def get(self, scenario):%0A config = cea.config.Configuration()%0A choices = config.get_parameter('general:scenario-name')._choices%0A if scenario in choices:%0A locator = cea.inputlocator.InputLocator(os.path.join(config.project, scenario))%0A zone_path = locator.get_zone_geometry()%0A if os.path.isfile(zone_path):%0A cache_path = os.path.join(config.project, '.cache')%0A image_path = os.path.join(cache_path, scenario + '.png')%0A%0A zone_modified = os.path.getmtime(zone_path)%0A if not os.path.isfile(image_path):%0A image_modified = 0%0A else:%0A image_modified = os.path.getmtime(image_path)%0A%0A if zone_modified %3E image_modified:%0A # Make sure .cache folder exists%0A if not os.path.exists(cache_path):%0A os.makedirs(cache_path)%0A%0A try:%0A zone_df = geopandas.read_file(zone_path)%0A zone_df = zone_df.to_crs(get_geographic_coordinate_system())%0A polygons = zone_df%5B'geometry'%5D%0A%0A polygons = %5Blist(polygons.geometry.exterior%5Brow_id%5D.coords) for row_id in range(polygons.shape%5B0%5D)%5D%0A%0A m = StaticMap(256, 160)%0A for polygon in polygons:%0A out = Polygon(polygon, 'blue', 'black', False)%0A m.add_polygon(out)%0A%0A image = m.render()%0A image.save(image_path)%0A except Exception as e:%0A abort(400, str(e))%0A%0A import base64%0A with open(image_path, 'rb') as imgFile:%0A image = base64.b64encode(imgFile.read())%0A%0A return %7B'image': image%7D%0A abort(400, 'Zone file not found')%0A else:%0A abort(400, 'Scenario does not exist', choices=choices)%0A%0A
|
c5bf44204d1c9e53686053d45006782e77305fab
|
Fix mysql_engine option type
|
ceilometer/storage/sqlalchemy/models.py
|
ceilometer/storage/sqlalchemy/models.py
|
# -*- encoding: utf-8 -*-
#
# Author: John Tran <jhtran@att.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
SQLAlchemy models for nova data.
"""
import json
from sqlalchemy import Column, Integer, String, Table
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import ForeignKey, DateTime
from sqlalchemy.orm import relationship, backref
from sqlalchemy.types import TypeDecorator, VARCHAR
from urlparse import urlparse
import ceilometer.openstack.common.cfg as cfg
from ceilometer.openstack.common import timeutils
sql_opts = [
cfg.IntOpt('mysql_engine',
default='InnoDB',
help='MySQL engine')
]
cfg.CONF.register_opts(sql_opts)
def table_args():
engine_name = urlparse(cfg.CONF.database_connection).scheme
if engine_name == 'mysql':
return {'mysql_engine': cfg.CONF.mysql_engine}
return None
class JSONEncodedDict(TypeDecorator):
"Represents an immutable structure as a json-encoded string."
impl = VARCHAR
def process_bind_param(self, value, dialect):
if value is not None:
value = json.dumps(value)
return value
def process_result_value(self, value, dialect):
if value is not None:
value = json.loads(value)
return value
class CeilometerBase(object):
"""Base class for Ceilometer Models."""
__table_args__ = table_args()
__table_initialized__ = False
def __setitem__(self, key, value):
setattr(self, key, value)
def __getitem__(self, key):
return getattr(self, key)
Base = declarative_base(cls=CeilometerBase)
sourceassoc = Table('sourceassoc', Base.metadata,
Column('meter_id', Integer, ForeignKey("meter.id")),
Column('project_id', String(255), ForeignKey("project.id")),
Column('resource_id', String(255), ForeignKey("resource.id")),
Column('user_id', String(255), ForeignKey("user.id")),
Column('source_id', String(255), ForeignKey("source.id"))
)
class Source(Base):
__tablename__ = 'source'
id = Column(String(255), primary_key=True)
class Meter(Base):
"""Metering data"""
__tablename__ = 'meter'
id = Column(Integer, primary_key=True)
counter_name = Column(String(255))
sources = relationship("Source", secondary=lambda: sourceassoc)
user_id = Column(String(255), ForeignKey('user.id'))
project_id = Column(String(255), ForeignKey('project.id'))
resource_id = Column(String(255), ForeignKey('resource.id'))
resource_metadata = Column(JSONEncodedDict)
counter_type = Column(String(255))
counter_volume = Column(Integer)
timestamp = Column(DateTime, default=timeutils.utcnow)
message_signature = Column(String)
message_id = Column(String)
class User(Base):
__tablename__ = 'user'
id = Column(String(255), primary_key=True)
sources = relationship("Source", secondary=lambda: sourceassoc)
resources = relationship("Resource", backref='user')
meters = relationship("Meter", backref='user')
class Project(Base):
__tablename__ = 'project'
id = Column(String(255), primary_key=True)
sources = relationship("Source", secondary=lambda: sourceassoc)
resources = relationship("Resource", backref='project')
meters = relationship("Meter", backref='project')
class Resource(Base):
__tablename__ = 'resource'
id = Column(String(255), primary_key=True)
sources = relationship("Source", secondary=lambda: sourceassoc)
timestamp = Column(DateTime)
resource_metadata = Column(JSONEncodedDict)
received_timestamp = Column(DateTime, default=timeutils.utcnow)
user_id = Column(String(255), ForeignKey('user.id'))
project_id = Column(String(255), ForeignKey('project.id'))
meters = relationship("Meter", backref='resource')
|
Python
| 0.000016
|
@@ -1064,19 +1064,19 @@
cfg.
-Int
+Str
Opt('mys
|
72a633793b30a87b6affa528459185d46fc37007
|
Update getJob signature
|
shared/api.py
|
shared/api.py
|
from __future__ import print_function
import boto3
import json
import os
import btr3baseball
jobTable = os.environ['JOB_TABLE']
jobQueue = os.environ['JOB_QUEUE']
repo = btr3baseball.JobRepository(jobTable)
queue = boto3.resource('sqs').get_queue_by_name(QueueName=jobQueue)
def submitJob(event, context):
# Put initial entry in dynamo db
jobId = repo.createJob(event)
# Put the job ID on the SQS queue
response = queue.send_message(MessageBody=jobId)
# Update the DB entry with sqs message ID for traceability
repo.updateWithMessageId(jobId, response.get('MessageId'))
def getJobInfo():
repo.getJob(event['jobId'])
|
Python
| 0
|
@@ -608,13 +608,23 @@
tJob
-Info(
+(event, context
):%0A
|
5692bb1c893182e5aac7271161e64fa9d1a03f2f
|
Remove test for backend.is_successful
|
celery/tests/test_backends/test_base.py
|
celery/tests/test_backends/test_base.py
|
import sys
import types
import unittest2 as unittest
from celery.serialization import subclass_exception
from celery.serialization import find_nearest_pickleable_exception as fnpe
from celery.serialization import UnpickleableExceptionWrapper
from celery.serialization import get_pickleable_exception as gpe
from celery import states
from celery.backends.base import BaseBackend, KeyValueStoreBackend
class wrapobject(object):
def __init__(self, *args, **kwargs):
self.args = args
Oldstyle = types.ClassType("Oldstyle", (), {})
Unpickleable = subclass_exception("Unpickleable", KeyError, "foo.module")
Impossible = subclass_exception("Impossible", object, "foo.module")
Lookalike = subclass_exception("Lookalike", wrapobject, "foo.module")
b = BaseBackend()
class TestBaseBackendInterface(unittest.TestCase):
def test_get_status(self):
self.assertRaises(NotImplementedError,
b.is_successful, "SOMExx-N0Nex1stant-IDxx-")
def test_store_result(self):
self.assertRaises(NotImplementedError,
b.store_result, "SOMExx-N0nex1stant-IDxx-", 42, states.SUCCESS)
def test_get_result(self):
self.assertRaises(NotImplementedError,
b.get_result, "SOMExx-N0nex1stant-IDxx-")
def test_restore_taskset(self):
self.assertRaises(NotImplementedError,
b.restore_taskset, "SOMExx-N0nex1stant-IDxx-")
def test_save_taskset(self):
self.assertRaises(NotImplementedError,
b.save_taskset, "SOMExx-N0nex1stant-IDxx-", "blergh")
def test_get_traceback(self):
self.assertRaises(NotImplementedError,
b.get_traceback, "SOMExx-N0nex1stant-IDxx-")
class TestPickleException(unittest.TestCase):
def test_oldstyle(self):
self.assertIsNone(fnpe(Oldstyle()))
def test_BaseException(self):
self.assertIsNone(fnpe(Exception()))
def test_get_pickleable_exception(self):
exc = Exception("foo")
self.assertEqual(gpe(exc), exc)
def test_unpickleable(self):
self.assertIsInstance(fnpe(Unpickleable()), KeyError)
self.assertIsNone(fnpe(Impossible()))
class TestPrepareException(unittest.TestCase):
def test_unpickleable(self):
x = b.prepare_exception(Unpickleable(1, 2, "foo"))
self.assertIsInstance(x, KeyError)
y = b.exception_to_python(x)
self.assertIsInstance(y, KeyError)
def test_impossible(self):
x = b.prepare_exception(Impossible())
self.assertIsInstance(x, UnpickleableExceptionWrapper)
y = b.exception_to_python(x)
self.assertEqual(y.__class__.__name__, "Impossible")
if sys.version_info < (2, 5):
self.assertTrue(y.__class__.__module__)
else:
self.assertEqual(y.__class__.__module__, "foo.module")
def test_regular(self):
x = b.prepare_exception(KeyError("baz"))
self.assertIsInstance(x, KeyError)
y = b.exception_to_python(x)
self.assertIsInstance(y, KeyError)
class TestKeyValueStoreBackendInterface(unittest.TestCase):
def test_get(self):
self.assertRaises(NotImplementedError, KeyValueStoreBackend().get,
"a")
def test_set(self):
self.assertRaises(NotImplementedError, KeyValueStoreBackend().set,
"a", 1)
def test_cleanup(self):
self.assertFalse(KeyValueStoreBackend().cleanup())
|
Python
| 0.000152
|
@@ -923,21 +923,18 @@
b.
-is_successful
+get_status
, %22S
|
50c30f51925f40a7579e36e0ede59d4a584339ae
|
Fix lint in subunit to junit tools script (#6614)
|
tools/subunit_to_junit.py
|
tools/subunit_to_junit.py
|
#!/usr/bin/env python3
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2018.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
# Based heavily on subunit2junit filter script from the upstream subunit python
# package, https://github.com/testing-cabal/subunit/ just adapted to run on
# python3 more reliably.
import argparse
import sys
from junitxml import JUnitXmlResult
from subunit.filters import run_tests_from_stream
from testtools import StreamToExtendedDecorator
def filter_by_result(
result_factory,
output_path,
passthrough,
forward,
input_stream=sys.stdin,
protocol_version=1,
passthrough_subunit=True,
):
"""Filter an input stream using a test result.
:param result_factory: A callable that when passed an output stream
returns a TestResult. It is expected that this result will output
to the given stream.
:param output_path: A path send output to. If None, output will be go
to ``sys.stdout``.
:param passthrough: If True, all non-subunit input will be sent to
``sys.stdout``. If False, that input will be discarded.
:param forward: If True, all subunit input will be forwarded directly to
``sys.stdout`` as well as to the ``TestResult``.
:param input_stream: The source of subunit input. Defaults to
``sys.stdin``.
:param protocol_version: The subunit protocol version to expect.
:param passthrough_subunit: If True, passthrough should be as subunit.
:return: A test result with the results of the run.
"""
if protocol_version == 1:
sys.stderr.write("Subunit protocol version 2 must be used")
sys.exit(1)
if passthrough:
passthrough_stream = sys.stdout
else:
passthrough_stream = None
if forward:
forward_stream = sys.stdout
else:
forward_stream = None
if output_path is None:
output_to = sys.stdout
else:
output_to = open(output_path, "w")
try:
result = result_factory(output_to)
run_tests_from_stream(
input_stream,
result,
passthrough_stream,
forward_stream,
protocol_version=protocol_version,
passthrough_subunit=passthrough_subunit,
)
finally:
if output_path:
output_to.close()
return result
def run_filter_script(
result_factory, description, post_run_hook=None, protocol_version=1, passthrough_subunit=True
):
"""Main function for simple subunit filter scripts.
Many subunit filter scripts take a stream of subunit input and use a
TestResult to handle the events generated by that stream. This function
wraps a lot of the boiler-plate around that by making a script with
options for handling passthrough information and stream forwarding
:param result_factory: A callable that takes an output stream and returns
a test result that outputs to that stream.
:param description: A description of the filter script.
:param protocol_version: What protocol version to consume/emit.
:param passthrough_subunit: If True, passthrough should be as subunit.
"""
parser = argparse.ArgumentParser(description=description)
parser.add_argument(
"--no-passthrough",
action="store_true",
help="Hide all non subunit input.",
default=False,
dest="no_passthrough",
)
parser.add_argument("-o", "--output-to", help="Send the output to this path rather than stdout")
parser.add_argument(
"-f",
"--forward",
action="store_true",
default=False,
help="Forward subunit stream on stdout. When set, "
"received non-subunit output will be encapsulated"
" in subunit.",
)
args = parser.parse_args()
result = filter_by_result(
result_factory,
args.output_to,
not args.no_passthrough,
args.forward,
protocol_version=protocol_version,
passthrough_subunit=passthrough_subunit,
input_stream=sys.stdin,
)
if post_run_hook:
post_run_hook(result)
if not hasattr(result, "wasSuccessful"):
result = result.decorated
def main():
run_filter_script(
lambda output: StreamToExtendedDecorator(JUnitXmlResult(output)),
"Convert to junitxml",
protocol_version=2,
)
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -683,16 +683,122 @@
iably.%0A%0A
+%22%22%22Script to convert subunit stream returned by stestr to junitxml for processing by azure-pipelines.%22%22%22%0A%0A
import a
@@ -852,16 +852,48 @@
mlResult
+ # pylint: disable=import-error
%0Afrom su
@@ -1214,32 +1214,41 @@
lt.%0A%0A :param
+callable
result_factory:
@@ -1407,16 +1407,20 @@
:param
+str
output_p
@@ -1505,32 +1505,37 @@
t%60%60.%0A :param
+bool
passthrough: If
@@ -1654,16 +1654,21 @@
:param
+bool
forward:
@@ -1793,16 +1793,21 @@
:param
+file
input_st
@@ -1880,32 +1880,36 @@
n%60%60.%0A :param
+int
protocol_version
@@ -1953,32 +1953,37 @@
ect.%0A :param
+bool
passthrough_subu
@@ -2026,24 +2026,25 @@
as subunit.%0A
+%0A
:return:
@@ -2087,16 +2087,43 @@
he run.%0A
+ :rtype: JUnitXmlResult%0A
%22%22%22%0A
@@ -3424,16 +3424,25 @@
:param
+callable
result_f
@@ -3562,16 +3562,20 @@
:param
+str
descript
@@ -3626,16 +3626,216 @@
:param
+callable post_run_hook: A callback function that runs after the test run%0A finishes. It will be passed a single positional argument the result%0A object returned by the run.%0A :param int
protocol
@@ -3894,16 +3894,21 @@
:param
+bool
passthro
@@ -5013,16 +5013,17 @@
d%0A%0A%0Adef
+_
main():%0A
@@ -5217,15 +5217,16 @@
_%22:%0A
+_
main()%0A
|
e6f63a90292f0c38ea91032741869d9036b542df
|
Disable autoescape for template rendering
|
mods/email.py
|
mods/email.py
|
#!/usr/bin/env python3
#
# Copyright 2016 Red Hat, Inc.
#
# Authors:
# Fam Zheng <famz@redhat.com>
#
# This work is licensed under the MIT License. Please see the LICENSE file or
# http://opensource.org/licenses/MIT.
from django.conf.urls import url
from django.http import HttpResponse, Http404
from django.core.urlresolvers import reverse
from django.core.exceptions import PermissionDenied
from django.template import Template, Context
from mod import PatchewModule
import smtplib
import email
import uuid
import traceback
from api.models import Message, Project
from event import register_handler, get_events_info
from schema import *
_default_config = """
[smtp]
server = smtp.example.com
ssl = True
port = 465
username = youruser
password = yourpassword
from = your@email.com
"""
class EmailModule(PatchewModule):
"""
Documentation
-------------
Email information is configured in "INI" style:
""" + _default_config
name = "email" # The notify method name
default_config = _default_config
email_schema = \
ArraySchema("email_notification", "Email Notification",
desc="Email notification",
members=[
EnumSchema("event", "Event",
enums=lambda: get_events_info(),
required=True,
desc="Which event to trigger the email notification"),
BooleanSchema("enabled", "Enabled",
desc="Whether this event is enabled",
default=True),
BooleanSchema("reply_to_all", "Reply to all",
desc='Whether to "reply to all" if the event has an associated email message',
default=False),
BooleanSchema("in_reply_to", "Set In-Reply-To",
desc='Whether to set In-Reply-To to the message id, if the event has an associated email message',
default=True),
BooleanSchema("reply_subject", "Set replying subject",
desc='Whether to set Subject to "Re: xxx", if the event has an associated email message',
default=True),
StringSchema("to", "To", desc="Send email to"),
StringSchema("cc", "Cc", desc="Cc list"),
StringSchema("subject_template", "Subject template",
desc="""The django template for subject""",
required=True),
StringSchema("body_template", "Body template",
desc="The django template for email body.",
multiline=True,
required=True),
])
project_property_schema = \
ArraySchema("email", desc="Configuration for email module",
members=[
MapSchema("notifications", "Email notifications",
desc="Email notifications",
item=email_schema),
])
def __init__(self):
register_handler(None, self.on_event)
def _get_smtp(self):
server = self.get_config("smtp", "server")
port = self.get_config("smtp", "port")
username = self.get_config("smtp", "username")
password = self.get_config("smtp", "password")
ssl = self.get_config("smtp", "ssl", "getboolean")
if ssl:
smtp = smtplib.SMTP_SSL(server, port)
else:
smtp = smtplib.SMTP(server, port)
smtp.login(username, password)
return smtp
def _send_series_recurse(self, sendmethod, s):
sendmethod(s)
for i in s.get_replies():
self._send_series_recurse(sendmethod, i)
def _smtp_send(self, to, cc, message):
from_addr = self.get_config("smtp", "from")
message["Resent-From"] = message["From"]
try:
message.replace_header("from", from_addr)
except KeyError:
message["from"] = from_addr
smtp = self._get_smtp()
smtp.sendmail(from_addr, to, message.as_string())
def www_view_email_bounce(self, request, message_id):
if not request.user.is_authenticated():
raise PermissionDenied()
m = Message.objects.find_series(message_id)
if not m:
raise Http404("Series not found: " + message_id)
def send_one(m):
msg = m.get_mbox()
message = email.message_from_string(msg)
self._smtp_send(request.user.email, None, message)
self._send_series_recurse(send_one, m)
return HttpResponse("email bounced")
def www_url_hook(self, urlpatterns):
urlpatterns.append(url(r"^email-bounce/(?P<message_id>.*)/",
self.www_view_email_bounce,
name="email-bounce"))
def prepare_message_hook(self, request, message):
if message.is_series_head and request.user.is_authenticated():
message.extra_ops.append({"url": reverse("email-bounce",
kwargs={"message_id": message.message_id}),
"title": "Bounce to me"})
def _sections_by_event(self, event):
conf = self.get_config_obj()
for sec in conf.sections():
if sec.startswith("mail ") and conf.get(sec, "event") == event:
yield sec
def _send_email(self, to, cc, headers, body):
message = email.message.Message()
for k, v in headers.items():
message[k] = v
message.set_payload(body, charset="utf-8")
self._smtp_send(to, cc, message)
def gen_message_id(self):
return "<%s@patchew.org>" % uuid.uuid1()
def get_notifications(self, project):
ret = {}
for k, v in project.get_properties().items():
if not k.startswith("email.notifications."):
continue
tn = k[len("email.notifications."):]
if "." not in tn:
continue
an = tn[tn.find(".") + 1:]
tn = tn[:tn.find(".")]
ret.setdefault(tn, {})
ret[tn][an] = v
ret[tn]["name"] = tn
return ret
def on_event(self, event, **params):
class GitEmailCancelled(Exception):
pass
po = None
mo = None
for v in list(params.values()):
if isinstance(v, Message):
mo = v
po = mo.project
break
elif isinstance(v, Project):
po = v
break
if not po:
return
for nt in list(self.get_notifications(po).values()):
headers = {}
if not nt["enabled"]:
continue
if nt["event"] != event:
continue
def cancel_email():
raise GitEmailCancelled
params["cancel"] = cancel_email
ctx = Context(params)
try:
subject = Template(nt["subject_template"]).render(ctx)
body = Template(nt["body_template"]).render(ctx)
to = [x.strip() for x in Template(nt["to"]).render(ctx).split()]
except GitEmailCancelled:
continue
cc = []
if nt["reply_to_all"] and mo:
to += [mo.get_sender_addr()]
cc = [x[1] for x in mo.get_receivers()]
if mo and nt["in_reply_to"]:
headers["In-Reply-To"] = "<%s>" % mo.message_id
if nt["reply_subject"] and mo:
subject = "Re: " + mo.subject if not mo.subject.startswith("Re:") else mo.subject
if not (subject and body and to):
continue
headers["Subject"] = subject
self._send_email(to, cc, headers, body)
def prepare_project_hook(self, request, project):
if not project.maintained_by(request.user):
return
project.extra_info.append({"title": "Email notifications",
"class": "info",
"content": self.build_config_html(request,
project)})
|
Python
| 0
|
@@ -7361,16 +7361,34 @@
t(params
+, autoescape=False
)%0A%0A
|
06d0287a8fef0679b281296e6ed76e0b6c803acb
|
Improve management command to clear or clean kvstore
|
sorl/thumbnail/management/commands/thumbnail.py
|
sorl/thumbnail/management/commands/thumbnail.py
|
from django.core.management.base import BaseCommand, CommandError
from sorl.thumbnail.conf import settings
from sorl.thumbnail import default
class Command(BaseCommand):
help = (
u'Handles thumbnails and key value store'
)
args = '[cleanup, clear]'
option_list = BaseCommand.option_list
def handle(self, *labels, **options):
verbosity = int(options.get('verbosity'))
if len(labels) != 1:
raise CommandError('`%s` is not a valid argument' % labels)
label = labels[0]
if label not in ['cleanup', 'clear']:
raise CommandError('`%s` unknown action' % label)
if label == 'cleanup':
if verbosity >= 1:
self.stdout.write("Cleanup thumbnails ... ")
default.kvstore.cleanup()
if verbosity >= 1:
self.stdout.write("[Done]\n")
if label == 'clear':
if verbosity >= 1:
self.stdout.write("Clear the Key Value Store ... ")
default.kvstore.clear()
if verbosity >= 1:
self.stdout.write("[Done]\n")
|
Python
| 0
|
@@ -1,12 +1,23 @@
+import sys%0A
from django.
@@ -74,49 +74,8 @@
ror%0A
-from sorl.thumbnail.conf import settings%0A
from
@@ -369,16 +369,116 @@
sity'))%0A
+%0A if not labels:%0A print self.print_help('thumbnail', '')%0A sys.exit(1)%0A%0A
@@ -824,24 +824,40 @@
bnails ... %22
+, ending=' ... '
)%0A%0A
@@ -958,18 +958,16 @@
(%22%5BDone%5D
-%5Cn
%22)%0A%0A
@@ -962,32 +962,34 @@
one%5D%22)%0A%0A
+el
if label == 'cle
@@ -1084,22 +1084,33 @@
ue Store
+%22, ending='
...
-%22
+'
)%0A%0A
@@ -1217,9 +1217,7 @@
one%5D
-%5Cn
%22)%0A
|
a8805982ff5b92a59d25a28e2acd63af3c210f65
|
Add brute force sol
|
lc0945_minimum_increment_to_make_array_unique.py
|
lc0945_minimum_increment_to_make_array_unique.py
|
"""Leetcode 945. Minimum Increment to Make Array Unique
Medium
URL: https://leetcode.com/problems/minimum-increment-to-make-array-unique/
Given an array of integers A, a move consists of choosing any A[i], and
incrementing it by 1.
Return the least number of moves to make every value in A unique.
Example 1:
Input: [1,2,2]
Output: 1
Explanation: After 1 move, the array could be [1, 2, 3].
Example 2:
Input: [3,2,1,2,1,7]
Output: 6
Explanation: After 6 moves, the array could be [3, 4, 1, 2, 5, 7].
It can be shown with 5 or less moves that it is impossible for the array to
have all unique values.
Note:
- 0 <= A.length <= 40000
- 0 <= A[i] < 40000
"""
class SolutionSortPrevPlusOne(object):
def minIncrementForUnique(self, A):
"""
:type A: List[int]
:rtype: int
Time complexity: O(n*logn), where n is A's length.
Space complexity: O(1).
"""
if not A:
return 0
# Sort the input array, compare current number with previous one.
moves = need = 0
for num in sorted(A):
# Current number need to be at least previous + 1.
moves += max(need - num, 0)
need = max(num, need) + 1
return moves
def main():
# Output: 1
A = [1, 2, 2]
print SolutionSortPrevPlusOne().minIncrementForUnique(A)
# Output: 6
A = [3, 2, 1, 2, 1, 7]
[1, 1, 2, 2, 3, 7]
print SolutionSortPrevPlusOne().minIncrementForUnique(A)
if __name__ == '__main__':
main()
|
Python
| 0.99996
|
@@ -655,16 +655,1197 @@
00%0A%22%22%22%0A%0A
+class SolutionBruteForce(object):%0A def minIncrementForUnique(self, A):%0A %22%22%22%0A :type A: List%5Bint%5D%0A :rtype: int%0A%0A Note: Time limit exceeded.%0A%0A Time complexity: O(n%5E2), where n is A's length.%0A Space complexity: O(n).%0A %22%22%22%0A from collections import defaultdict%0A%0A if not A:%0A return 0%0A%0A # Create a dict:number-%3Ecount.%0A num_count_d = defaultdict(int)%0A for num in A:%0A num_count_d%5Bnum%5D += 1%0A%0A # While exists repeated numbers, move number by incrementing it.%0A moves = 0%0A repeated_nums = set(%5Bnum for num, count in num_count_d.items() %0A if count %3E 1%5D)%0A while repeated_nums:%0A num = repeated_nums.pop()%0A while num_count_d%5Bnum%5D %3E 1:%0A num_count_d%5Bnum%5D -= 1%0A num_count_d%5Bnum + 1%5D += 1%0A moves += 1%0A%0A # If num's or num + 1's counts %3E 1, add back to set.%0A if num_count_d%5Bnum%5D %3E 1:%0A repeated_nums.add(num)%0A if num_count_d%5Bnum + 1%5D %3E 1:%0A repeated_nums.add(num + 1)%0A%0A return moves%0A%0A%0A
class So
@@ -1867,32 +1867,32 @@
lusOne(object):%0A
-
def minIncre
@@ -2213,24 +2213,48 @@
= need = 0%0A
+ print sorted(A)%0A
for
@@ -2482,16 +2482,72 @@
, 2, 2%5D%0A
+ print SolutionBruteForce().minIncrementForUnique(A)%0A
prin
@@ -2651,30 +2651,59 @@
- %5B1, 1, 2, 2, 3, 7%5D
+print SolutionBruteForce().minIncrementForUnique(A)
%0A
|
435dd1ce0b2b53bcc2287e811aed8ed76db03011
|
Add missing assignment to netid
|
shrunk/app.py
|
shrunk/app.py
|
""" shRUnk - Rutgers University URL Shortener
Sets up a Flask application for shRUnk.
"""
from flask import Flask, render_template, request, redirect, g
from flask_login import LoginManager, login_required, current_user, logout_user
from flask_auth import Auth
from shrunk.client import ShrunkCursor
from shrunk.forms import BlockLinksForm
from shrunk.forms import LinkForm, RULoginForm, BlacklistUserForm, AddAdminForm
from shrunk.user import User, get_user, admin_required
from shrunk.util import get_db_client, set_logger, formattime
# Create application
app = Flask(__name__)
# Import settings in config.py
app.config.from_pyfile("config.py", silent=True)
app.secret_key = app.config['SECRET_KEY']
# Initialize logging
set_logger(app)
# Initialize login manager
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = '/login'
# Allows us to use the function in our templates
app.jinja_env.globals.update(formattime=formattime)
@login_manager.user_loader
def load_user(userid):
"""Loads user object for login.
:Parameters:
- `userid`: An id for the user (typically a NetID).
"""
return User(userid)
def render_login(**kwargs):
"""Renders the login template.
Takes a WTForm in the keyword arguments.
"""
return render_template('login.html', **kwargs)
def login_success(user):
"""Function executed on successful login.
Redirects the user to the homepage.
:Parameters:
- `user`: The user that has logged in.
"""
return redirect('/')
def unauthorized_admin():
return redirect("/")
### Views ###
@app.route("/")
def render_index(**kwargs):
"""Renders the homepage.
Renders the homepage for the current user. By default, this renders all of
the links owned by them. If a search has been made, then only the links
matching their search query are shown.
"""
client = get_db_client(app, g)
# Grab the current page number
try:
page = int(request.args["p"])
except:
page = 0
# If this exists, execute a search query
try:
query = request.args["search"]
except:
query = ""
# Depending on the type of user, get info from the database
is_admin = not current_user.is_anonymous() and current_user.is_admin()
if not hasattr(current_user, "netid"):
cursor = ShrunkCursor(None)
app.logger.info("render index: anonymous user")
elif is_admin:
if query:
cursor = client.search(query)
else:
cursor = client.get_all_urls(query)
else:
netid = current_user.netid
if query:
cursor = client.search(query, netid=netid)
app.logger.info("search: {}, '{}'".format(netid, query))
else:
cursor = client.get_urls(current_user.netid)
app.logger.info("render index: {}".format(netid))
# Perform pagination and get the results
count = cursor.cursor.count()
page, lastpage = cursor.paginate(page, app.config["MAX_DISPLAY_LINKS"])
links = cursor.get_results()
return render_template("index.html",
admin=is_admin,
links=links,
count=count,
linkserver_url=app.config["LINKSERVER_URL"],
netid=netid,
page=page,
lastpage=lastpage,
query=query,
**kwargs)
@app.route("/login", methods=['GET', 'POST'])
def login():
"""Handles authentication."""
a = Auth(app.config['AUTH'], get_user)
return a.login(request, RULoginForm, render_login, login_success)
@app.route("/logout")
@login_required
def logout():
"""Handles logging out."""
logout_user()
return redirect('/')
@app.route("/add", methods=["GET", "POST"])
@login_required
def add_link():
"""Adds a new link for the current user."""
form = LinkForm(request.form)
client = get_db_client(app, g)
if request.method == "POST":
if form.validate():
# TODO Handle an error on db insert
kwargs = form.to_json()
response = client.create_short_url(
netid=current_user.netid,
**kwargs
)
if not response:
return render_template("add.html", errors=["Blocked Link"])
return render_index(new_url=response,
new_target_url=kwargs["long_url"])
else:
return render_template("add.html",
errors=form.errors,
netid=current_user.netid)
if not request.form:
form = LinkForm()
return render_template("add.html", netid=current_user.netid)
@app.route("/delete", methods=["GET", "POST"])
@login_required
def delete_link():
"""Deletes a link."""
client = get_db_client(app, g)
# TODO Handle the response intelligently, or put that logic somewhere else
if request.method == "POST":
app.logger.info("Deleting URL: {}".format(request.form["short_url"]))
client.delete_url(request.form["short_url"])
return render_index(deleted_url=request.form["short_url"])
@app.route("/admin/")
@app.route("/admin/<action>", methods=["GET", "POST"])
@login_required
@admin_required(unauthorized_admin)
def admin_sub(action=None):
"""Renders the admin interface.
:Parameters:
- `action`: Which action to take. This can be one of the following:
1. blacklist - Go to blacklist panel used for blacklisting users
2. add - Go to add panel used for adding additional admins
3. blocklink - Go to block link panel, used for blacklisting long urls
"""
netid = current_user.netid
if action == None:
return render_template("admin.html", netid=netid)
client = get_db_client(app, g)
if action == 'blacklist':
form = BlacklistUserForm(request.form)
if request.method == "POST" and form.validate():
if form.action.data == 'ban':
res = client.blacklist_user(form.netid.data, netid)
else:
res = client.allow_user(form.netid.data)
return render_template('admin_blacklist.html', form=form,
netid=netid, msg='Success!')
return render_template('admin_blacklist.html', netid=netid, form=form)
elif action == 'add':
form = AddAdminForm(request.form)
if request.method == "POST" and form.validate():
res = client.add_admin(form.netid.data, current_user.netid)
return render_template('admin_add.html', form=form, netid=netid,
msg='Success!')
return render_template('admin_add.html', form=form, netid=netid)
elif action == 'blocklink':
form = BlockLinksForm(request.form)
if request.method == "POST" and form.validate():
if form.action.data == 'block':
res = client.block_link(form.link.data, netid)
else:
res = client.allow_link(form.link.data)
return render_template('admin_block_links.html', form=form,
msg='Success!', netid=netid)
return render_template('admin_block_links.html', form=form,
netid=netid)
else:
return redirect('/')
|
Python
| 0
|
@@ -2343,16 +2343,37 @@
etid%22):%0A
+ netid = None%0A
@@ -2475,16 +2475,51 @@
_admin:%0A
+ netid = current_user.netid%0A
|
b3a0178f0f6a2eadc823bacd91545c5ee861b25b
|
stop collecting the unmerged commits from unmerged branches
|
lib/markdownify.py
|
lib/markdownify.py
|
#!/usr/bin/env python3
import sys
import os
from os.path import exists as path_exists, join as path_join
from textwrap import indent
from .flatten import flatten
from .run import run_command as run
from .find_unmerged_branches import find_unmerged_branches_in_cwd
def indent4(string):
return indent(string, ' ')
def unicode_truncate(s, length, encoding='utf-8'):
encoded = s.encode(encoding)[:length]
return encoded.decode(encoding, 'ignore')
def process_file(filename, steps, spec, cwd):
steps = steps if type(steps) is list else [steps]
output = []
header = '### ' + filename
options = {
'timeout': 4,
'truncate_after': 10000, # 10K
'truncate_contents': False,
}
options.update(spec.get('options', {}).get(filename, {}))
file_status, file_contents = run(['cat', filename])
if file_status == 'success':
_, last_edit = run(['git', 'log',
'-n', '1',
r'--pretty=format:%cd',
'--', filename])
header += ' ({})'.format(last_edit)
output.extend([header, '\n'])
if options['truncate_contents']:
file_contents = unicode_truncate(file_contents, options['truncate_contents'])
if file_status != 'success':
output.append('**the file %s does not exist**\n' % filename)
output.append('`ls .` says that these files exist:\n')
output.append(indent4('\n'.join(os.listdir('.'))) + '\n\n')
return '\n'.join(output)
output.extend(['**contents of %s**\n' % filename, indent4(file_contents)])
output.append('\n')
any_step_failed = False
for step in steps:
if step and not any_step_failed:
command = step.replace('$@', filename)
status, compilation = run(command.split())
if compilation:
warnings_header = '**warnings: `%s`**\n' % (command)
output.extend([warnings_header, indent4(compilation)])
else:
warnings_header = '**no warnings: `%s`**' % (command)
output.extend([warnings_header])
if status != 'success':
any_step_failed = True
output.append('\n')
elif any_step_failed:
break
if not steps or any_step_failed:
return '\n'.join(output)
inputs = spec.get('inputs', {})
tests = spec.get('tests', {}).get(filename, [])
if type(tests) is not list:
tests = [tests]
for test in tests:
if not test:
continue
test = test.replace('$@', './%s' % filename)
test_string = test
test = test.split(' | ')
input_for_test = None
for cmd in test[:-1]:
# decode('unicode_escape') de-escapes the backslash-escaped strings.
# like, it turns the \n from "echo Hawken \n 26" into an actual newline,
# like a shell would.
cmd = bytes(cmd, 'utf-8').decode('unicode_escape')
cmd = cmd.split(' ')
status, input_for_test = run(cmd, input=input_for_test)
input_for_test = input_for_test.encode('utf-8')
test_cmd = test[-1].split(' ')
if path_exists(path_join(cwd, filename)):
status, full_result = run(test_cmd,
input=input_for_test,
timeout=options['timeout'])
result = unicode_truncate(full_result, options['truncate_after'])
truncate_msg = 'output truncated after %d bytes' % (options['truncate_after']) \
if full_result != result else ''
items = [item for item in [status, truncate_msg] if item]
status = '; '.join(items)
output.append('**results of `%s`** (status: %s)\n' % (test_string, status))
output.append(indent4(result))
else:
output.append('%s could not be found.\n' % filename)
output.append('\n')
output.extend(["\n\n"])
return '\n'.join(output)
def find_unmerged_branches():
# approach taken from https://stackoverflow.com/a/3602022/2347774
unmerged_branches = find_unmerged_branches_in_cwd()
if not unmerged_branches:
return ''
result = 'Unmerged branches:\n'
for b in unmerged_branches:
_, commits = run(['git', 'cherry', '-v', 'master', b])
commits = [c.strip() for c in commits.split('\n') if c.strip()]
branch_msg = ' {}\n{}'.format(b, '\n'.join([' {}'.format(c) for c in commits]))
result += branch_msg
return result + '\n\n\n'
def markdownify_throws(hw_id, username, spec):
cwd = os.getcwd()
results = []
inputs = spec.get('inputs', {})
for filename, contents in inputs.items():
with open(path_join(cwd, filename), 'w') as outfile:
outfile.write(contents)
files = [(filename, steps)
for file in spec['files']
for filename, steps in file.items()]
for filename, steps in files:
result = process_file(filename, steps, spec, cwd)
results.append(result)
[run(['rm', '-f', '%s.exec' % file]) for file, steps in files]
[os.remove(path_join(cwd, inputfile)) for inputfile in inputs]
unmerged = find_unmerged_branches()
result_string = ''.join(results)
return '# {} — {} \n\n{}{}'.format(hw_id, username, unmerged, result_string)
def markdownify(*args, **kwargs):
try:
return markdownify_throws(*args, **kwargs)
except Exception as err:
return str(err)
|
Python
| 0
|
@@ -4370,154 +4370,16 @@
-_, commits = run(%5B'git', 'cherry', '-v', 'master', b%5D)%0A commits = %5Bc.strip() for c in commits.split('%5Cn') if c.strip()%5D%0A branch_msg
+result +
= '
@@ -4385,18 +4385,16 @@
%7B%7D%5Cn
-%7B%7D
'.format
@@ -4399,90 +4399,9 @@
at(b
-, '%5Cn'.join(%5B' %7B%7D'.format(c) for c in commits%5D))%0A result += branch_msg
+)
%0A%0A
|
ca3b1c09705d65307851711dca71714915e4525a
|
Fix the formatting of log message
|
ipaqe_provision_hosts/__main__.py
|
ipaqe_provision_hosts/__main__.py
|
#!/usr/bin/env python
from __future__ import print_function
import argparse
import logging
import sys
from ipaqe_provision_hosts.runner import create, delete
from ipaqe_provision_hosts.errors import IPAQEProvisionerError
CONFIG_HELP_MSG = (
'Configuration file for the topology. Must contain core configuration as '
' well as configuration for backend. If not specified, the tool checks '
'the configuration from /etc/ipaqe-provision-hosts/config.yaml')
def main():
parser = argparse.ArgumentParser(description='FreeIPA provisioning')
parser.add_argument('-d', '--debug', dest='loglevel',
help='Set logging level. Default level is ERROR',
metavar='LEVEL')
subparsers = parser.add_subparsers(dest="command")
parser_create = subparsers.add_parser("create")
parser_create.add_argument("--topology", required=True, metavar='FILE',
help="The topology template file")
parser_create.add_argument("--output", required=True, metavar='FILE',
help="File to print final configuration into")
parser_create.add_argument("--config", required=False, metavar='FILE',
help=CONFIG_HELP_MSG)
parser_delete = subparsers.add_parser("delete")
parser_delete.add_argument("--config", required=False, metavar='FILE',
help=CONFIG_HELP_MSG)
args = parser.parse_args()
loglevel = None
if args.loglevel:
try:
loglevel = getattr(logging, args.loglevel.upper())
except AttributeError:
loglevel = logging.ERROR
finally:
logging.basicConfig(level=loglevel)
log = logging.getLogger(__name__)
log.debug('Setting log level to {}'.format(logging.getLevelName(loglevel)))
try:
if args.command == "create":
create(args.topology, args.config, args.output)
elif args.command == "delete":
delete(args.config)
except IPAQEProvisionerError:
# Backend exception should be handled by now
sys.exit(1)
except Exception as e:
log.error("Unhandled exception: %s", e)
sys.exit(1)
if __name__ == "__main__":
main()
|
Python
| 0.999999
|
@@ -1799,19 +1799,13 @@
to
-%7B%7D'.format(
+%25s',
logg
@@ -1831,17 +1831,16 @@
glevel))
-)
%0A%0A tr
|
0a524042190bed746cd4805d7868a9e5efbae20b
|
Add validator wsdl to service
|
sii/server.py
|
sii/server.py
|
# -*- coding: UTF-8 -*-
from sii.resource import SII
from zeep import Client
from requests import Session
from zeep.transports import Transport
wsdl_files = {
'emitted_invoice': 'http://www.agenciatributaria.es/static_files/AEAT/Contenidos_Comunes/La_Agencia_Tributaria/Modelos_y_formularios/Suministro_inmediato_informacion/FicherosSuministros/V_07/SuministroFactEmitidas.wsdl',
'received_invoice': 'http://www.agenciatributaria.es/static_files/AEAT/Contenidos_Comunes/La_Agencia_Tributaria/Modelos_y_formularios/Suministro_inmediato_informacion/FicherosSuministros/V_07/SuministroFactRecibidas.wsdl',
}
def get_dict_data(invoice):
return SII.generate_object(invoice)
class Service(object):
def __init__(self, certificate, key, test_mode=False):
self.certificate = certificate
self.key = key
self.test_mode = True # Force now work in test mode
self.emitted_service = None
self.received_service = None
self.result = {}
def send(self, invoice):
if invoice.type.startswith('out_'):
if self.emitted_service is None:
self.emitted_service = self.create_service(invoice.type)
else:
if self.received_service is None:
self.received_service = self.create_service(invoice.type)
self.send_invoice(invoice)
def create_service(self, i_type):
proxy_address = 'https://sii-proxy.gisce.net:4443'
session = Session()
session.cert = (self.certificate, self.key)
session.verify = False
transport = Transport(session=session)
if i_type.startswith('out_'):
wsdl = wsdl_files['emitted_invoice']
port_name = 'SuministroFactEmitidas'
binding_name = '{https://www2.agenciatributaria.gob.es/static_files/common/internet/dep/aplicaciones/es/aeat/ssii/fact/ws/SuministroFactEmitidas.wsdl}siiBinding'
type_address = '/wlpl/SSII-FACT/ws/fe/SiiFactFEV1SOAP'
else:
wsdl = wsdl_files['received_invoice']
port_name = 'SuministroFactRecibidas'
binding_name = '{https://www2.agenciatributaria.gob.es/static_files/common/internet/dep/aplicaciones/es/aeat/ssii/fact/ws/SuministroFactRecibidas.wsdl}siiBinding'
type_address = '/wlpl/SSII-FACT/ws/fr/SiiFactFRV1SOAP'
if self.test_mode:
port_name += 'Pruebas'
client = Client(wsdl=wsdl, port_name=port_name, transport=transport,
service_name='siiService')
address = '{0}{1}'.format(proxy_address, type_address)
service = client.create_service(binding_name, address)
return service
def send_invoice(self, invoice):
msg_header, msg_invoice = self.get_msg(invoice)
try:
if invoice.type == 'out_invoice':
res = self.emitted_service.SuministroLRFacturasEmitidas(
msg_header, msg_invoice)
elif invoice.type == 'in_invoice':
res = self.received_service.SuministroLRFacturasRecibidas(
msg_header, msg_invoice)
self.result['sii_sent'] = res['EstadoEnvio'] == 'Correcto'
self.result['sii_return'] = res
except Exception as fault:
self.result['sii_return'] = fault
def list_invoice(self, invoice):
msg_header, msg_invoice = self.get_msg(invoice)
try:
if invoice.type == 'in_invoice':
res = self.received_service.ConsultaLRFacturasRecibidas(
msg_header, msg_invoice)
if res['EstadoEnvio'] == 'Correcto':
self.result['sii_sent'] = True
self.result['sii_return'] = res
except Exception as fault:
self.result['sii_return'] = fault
def get_msg(self, invoice):
dict_from_marsh = get_dict_data(invoice=invoice)
res_header = res_invoices = None
if invoice.type.startswith('out_'):
res_header = dict_from_marsh['SuministroLRFacturasEmitidas'][
'Cabecera']
res_invoices = dict_from_marsh['SuministroLRFacturasEmitidas'][
'RegistroLRFacturasEmitidas']
elif invoice.type.startswith('in_'):
res_header = dict_from_marsh['SuministroLRFacturasRecibidas'][
'Cabecera']
res_invoices = dict_from_marsh['SuministroLRFacturasRecibidas'][
'RegistroLRFacturasRecibidas']
return res_header, res_invoices
|
Python
| 0
|
@@ -606,16 +606,157 @@
.wsdl',%0A
+ 'ids_validator': 'https://www2.agenciatributaria.gob.es/static_files/common/internet/dep/aplicaciones/es/aeat/burt/jdit/ws/VNifV1.wsdl',%0A
%7D%0A%0A%0Adef
|
551c33b0f366d6df1fc6752f79f6ef68fc818851
|
Fix default dict construction
|
python/smqtk/web/nearestneighbor_service/__init__.py
|
python/smqtk/web/nearestneighbor_service/__init__.py
|
import mimetypes
import multiprocessing
import os
import flask
import requests
from smqtk.algorithms.descriptor_generator import get_descriptor_generator_impls
from smqtk.algorithms.nn_index import NearestNeighborsIndex, get_nn_index_impls
from smqtk.representation import DescriptorElementFactory
from smqtk.representation.data_element.file_element import DataFileElement
from smqtk.representation.data_element.memory_element import DataMemoryElement
from smqtk.representation.data_element.url_element import DataUrlElement
from smqtk.utils import SimpleTimer
from smqtk.utils import plugin
from smqtk.utils.configuration import merge_configs
from smqtk.web import SmqtkWebApp
MIMETYPES = mimetypes.MimeTypes()
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
class NearestNeighborServiceServer (SmqtkWebApp):
"""
Simple server that takes in a specification of the following form:
/nn/<path:uri>[?...]
Computes the nearest neighbor index for the given data and returns a list
of nearest neighbors in the following format
Standard return JSON::
{
"success": <bool>,
"neighbors": [ <float>, ... ]
"message": <string>,
"reference_uri": <uri>
}
"""
@classmethod
def is_usable(cls):
return True
@classmethod
def get_default_config(cls):
"""
Generate and return a default configuration dictionary for this class.
This will be primarily used for generating what the configuration
dictionary would look like for this class without instantiating it.
:return: Default configuration dictionary for the class.
:rtype: dict
"""
c = super(NearestNeighborServiceServer, cls).get_default_config()
merge_configs(c, {
"descriptor_factory": DescriptorElementFactory.get_default_config(),
"descriptor_generator": {
plugin.make_config(get_descriptor_generator_impls)
},
"nn_index": plugin.make_config(get_nn_index_impls),
})
return c
def __init__(self, json_config):
"""
Initialize application based of supplied JSON configuration
:param json_config: JSON configuration dictionary
:type json_config: dict
"""
super(NearestNeighborServiceServer, self).__init__(json_config)
# Descriptor factory setup
self.log.info("Initializing DescriptorElementFactory")
self.descr_elem_factory = DescriptorElementFactory.from_config(
self.json_config['descriptor_factory']
)
# Descriptor generator configuration labels
#: :type: dict[str, dict]
self.generator_config = self.json_config['descriptor_generator']
#: :type: smqtk.algorithms.NearestNeighborsIndex
self.nn_index = plugin.from_plugin_config(
json_config['nn_index'],
get_nn_index_impls
)
#: :type: smqtk.algorithms.DescriptorGenerator
self.descriptor_generator_inst = plugin.from_plugin_config(
self.generator_config,
get_descriptor_generator_impls)
@self.route("/nn/<path:uri>")
def compute_nearest_neighbors(uri):
"""
Data modes for upload/use::
- local filepath
- base64
- http/s URL
The following sub-sections detail how different URI's can be used.
Local Filepath
--------------
The URI string must be prefixed with ``file://``, followed by the
full path to the data file to describe.
Base 64 data
------------
The URI string must be prefixed with "base64://", followed by the
base64 encoded string. This mode also requires an additional
``?content_type=`` to provide data content type information. This
mode saves the encoded data to temporary file for processing.
HTTP/S address
--------------
This is the default mode when the URI prefix is none of the above.
This uses the requests module to locally download a data file
for processing.
JSON Return format::
{
"success": <bool>
"message": <str>
"neighbors": <None|list[float]>
"reference_uri": <str>
}
:type uri: str
"""
message = "execution nominal"
descriptor = None
de = None
try:
de = self.resolve_data_element(uri)
except ValueError, ex:
message = "URI resolution issue: %s" % str(ex)
if de:
try:
descriptor = self.descriptor_generator_inst.\
compute_descriptor(de, self.descr_elem_factory)
except RuntimeError, ex:
message = "Descriptor extraction failure: %s" % str(ex)
except ValueError, ex:
message = "Data content type issue: %s" % str(ex)
# fail here if de is None
# Default is 8
num_neighbors = flask.request.args.get("num_neighbors", 8)
neighbors = []
dists = []
if descriptor is not None:
try:
neighbors, dists = \
self.nn_index.nn(descriptor, n=num_neighbors)
except ValueError, ex:
message = "Descriptor or index related issue: %s" % str(ex)
# TODO: Return the optional descriptor vectors for the neighbors
return flask.jsonify({
"success": descriptor is not None,
"message": message,
"neighbors": [n.uuid() for n in neighbors],
"distances": dists,
"reference_uri": uri
})
def get_config(self):
return self.json_config
def resolve_data_element(self, uri):
"""
Given the URI to some data, resolve it down to a DataElement instance.
:raises ValueError: Issue with the given URI regarding either URI source
resolution or data resolution.
:param uri: URI to data
:type uri: str
:return: DataElement instance wrapping given URI to data.
:rtype: smqtk.representation.DataElement
"""
# Resolve URI into appropriate DataElement instance
if uri[:7] == "file://":
self.log.debug("Given local disk filepath")
filepath = uri[7:]
if not os.path.isfile(filepath):
raise ValueError("File URI did not point to an existing file "
"on disk.")
else:
de = DataFileElement(filepath)
elif uri[:9] == "base64://":
self.log.debug("Given base64 string")
content_type = flask.request.args.get('content_type', None)
self.log.debug("Content type: %s", content_type)
if not content_type:
raise ValueError("No content-type with given base64 data")
else:
b64str = uri[9:]
de = DataMemoryElement.from_base64(b64str, content_type)
else:
self.log.debug("Given URL")
try:
de = DataUrlElement(uri)
except requests.HTTPError, ex:
raise ValueError("Failed to initialize URL element due to "
"HTTPError: %s" % str(ex))
return de
APPLICATION_CLASS = NearestNeighborServiceServer
|
Python
| 0.000001
|
@@ -1899,18 +1899,16 @@
erator%22:
- %7B
%0A
@@ -1966,30 +1966,16 @@
r_impls)
-%0A %7D
,%0A
|
d3c068ea7e240326235f3ac567354708246881de
|
Remove UnicodeWriter.
|
pybossa/exporter/csv_export.py
|
pybossa/exporter/csv_export.py
|
# -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2015 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
# Cache global variables for timeouts
"""
CSV Exporter module for exporting tasks and tasks results out of PYBOSSA
"""
import tempfile
from pybossa.exporter import Exporter
from pybossa.core import uploader, task_repo
from pybossa.model.task import Task
from pybossa.model.task_run import TaskRun
from pybossa.util import UnicodeWriter
from werkzeug.datastructures import FileStorage
from werkzeug.utils import secure_filename
import pandas as pd
class CsvExporter(Exporter):
def _respond_csv(self, table, project_id, info_only=False):
flat_data = self._get_data(table, project_id,
flat=True, info_only=info_only)
return pd.DataFrame(flat_data)
def _make_zip(self, project, ty):
name = self._project_name_latin_encoded(project)
dataframe = self._respond_csv(ty, project.id)
if dataframe is not None:
info_dataframe = self._respond_csv(ty, project.id, info_only=True)
datafile = tempfile.NamedTemporaryFile()
info_datafile = tempfile.NamedTemporaryFile()
try:
dataframe.to_csv(datafile, index=False,
encoding='utf-8')
info_dataframe.to_csv(
info_datafile, index=False, encoding='utf-8')
datafile.flush()
info_datafile.flush()
zipped_datafile = tempfile.NamedTemporaryFile()
try:
_zip = self._zip_factory(zipped_datafile.name)
_zip.write(
datafile.name, secure_filename('%s_%s.csv' % (name, ty)))
_zip.write(
info_datafile.name, secure_filename('%s_%s_info_only.csv' % (name, ty)))
_zip.close()
container = "user_%d" % project.owner_id
_file = FileStorage(
filename=self.download_name(project, ty), stream=zipped_datafile)
uploader.upload_file(_file, container=container)
finally:
zipped_datafile.close()
finally:
datafile.close()
info_datafile.close()
def download_name(self, project, ty):
return super(CsvExporter, self).download_name(project, ty, 'csv')
def pregenerate_zip_files(self, project):
print("%d (csv)" % project.id)
self._make_zip(project, "task")
self._make_zip(project, "task_run")
self._make_zip(project, "result")
|
Python
| 0.000001
|
@@ -1030,47 +1030,8 @@
Run%0A
-from pybossa.util import UnicodeWriter%0A
from
|
01afa5bdbdf1900b5d67ffb6b0bb880d257a1869
|
Update server.py
|
src/server.py
|
src/server.py
|
"""Server for http-server echo assignment."""
import socket # pragma: no cover
import sys # pragma: no cover
from email.utils import formatdate
def server(): # pragma: no cover
"""
Open the server, waits for input from client.
Closes connection on completed message.
Closes server with Ctrl-C
"""
server = socket.socket(socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_TCP)
address = ('127.0.0.1', 5000)
server.bind(address)
server.listen(1)
while True:
try:
connection, address = server.accept()
message = b''
buffer_length = 8
message_complete = False
while not message_complete:
part = connection.recv(buffer_length)
message += part
if b'\r\n\r\n' in message:
message_complete = True
print(message)
connection.sendall(response_ok())
connection.close()
except KeyboardInterrupt:
print('\nServer closed good bye.')
server.shutdown(socket.SHUT_WR)
server.close()
sys.exit(0)
def response_ok():
"""Send a response OK."""
msg = b'HTTP/1.1 200 OK\r\nMessage recieved.\r\n'
msg += u'Date: {}\r\n\r\n'.format(formatdate(usegmt=True)).encode('utf8')
return msg
def response_error():
"""Send a response erorr."""
return b'HTTP/1.1 500 Internal Server Error\r\nError!'
if __name__ == '__main__': # pragma: no cover
print('Server ready and waiting...\n')
server()
|
Python
| 0.000001
|
@@ -1233,24 +1233,31 @@
%22Send a
+200 OK
response
OK.%22%22%22%0A
@@ -1248,19 +1248,16 @@
response
- OK
.%22%22%22%0A
@@ -1287,29 +1287,8 @@
K%5Cr%5C
-nMessage recieved.%5Cr%5C
n'%0A
@@ -1417,24 +1417,41 @@
%22Send a
+500 server error
response
erorr.%22
@@ -1446,14 +1446,8 @@
onse
- erorr
.%22%22%22
@@ -1498,18 +1498,8 @@
rror
-%5Cr%5CnError!
'%0A%0A%0A
|
5c97b9911a2dafde5fd1e4c40cda4e84974eb855
|
Allow keys to be set (in anticipation of write commands). Better object __repr__() for spaces and tickets.
|
assembla/lib.py
|
assembla/lib.py
|
from functools import wraps
class AssemblaObject(object):
"""
Proxies getitem calls (eg: `instance['id']`) to a dictionary `instance.data['id']`.
"""
def __init__(self, data):
self.data = data
def __getitem__(self, key):
return self.data[key]
def keys(self):
return self.data.keys()
def values(self):
return self.data.values()
def get(self, *args, **kwargs):
return self.data.get(*args, **kwargs)
def assembla_filter(func):
"""
Filters :data for the objects in it which possess attributes equal in
name/value to a key/value in kwargs.
Each key/value combination in kwargs is compared against the object, so
multiple keyword arguments can be passed in to constrain the filtering.
"""
@wraps(func)
def wrapper(class_instance, **kwargs):
results = func(class_instance)
if not kwargs:
return results
else:
return filter(
# Find the objects who have an equal number of matching attr/value
# combinations as `len(kwargs)`
lambda obj: len(kwargs) == len(
filter(
lambda boolean: boolean,
[obj.get(attr_name) == value
for attr_name, value in kwargs.iteritems()]
)
),
results
)
return wrapper
|
Python
| 0
|
@@ -276,16 +276,87 @@
a%5Bkey%5D%0A%0A
+ def __setitem__(self, key, value):%0A self.data%5Bkey%5D = value%0A%0A
def
@@ -540,16 +540,370 @@
wargs)%0A%0A
+ def __repr__(self):%0A if 'name' in self.data:%0A return %22%3C%25s: %25s%3E%22 %25 (type(self).__name__, self.data%5B'name'%5D)%0A%0A if ('number' in self.data) and ('summary' in self.data):%0A return %22%3C%25s: #%25s - %25s%3E%22 %25 (type(self).__name__, self.data%5B'number'%5D, self.data%5B'summary'%5D)%0A%0A return super(AssemblaObject, self).__repr__()%0A%0A
%0Adef ass
|
d9db8ff917cabe6bdddbd4bb52cd7965d3e91d50
|
Fix indentation of last commit
|
pyglet/media/drivers/silent.py
|
pyglet/media/drivers/silent.py
|
#!/usr/bin/env python
'''
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import time
from pyglet.media import AbstractAudioPlayer, AbstractAudioDriver, \
MediaThread, MediaEvent
import pyglet
_debug = pyglet.options['debug_media']
class SilentAudioPacket(object):
def __init__(self, timestamp, duration):
self.timestamp = timestamp
self.duration = duration
def consume(self, dt):
self.timestamp += dt
self.duration -= dt
class SilentAudioPlayerPacketConsumer(AbstractAudioPlayer):
# When playing video, length of audio (in secs) to buffer ahead.
_buffer_time = 0.4
# Minimum number of bytes to request from source
_min_update_bytes = 1024
# Maximum sleep time
_sleep_time = 0.2
def __init__(self, source_group, player):
super(SilentAudioPlayerPacketConsumer, self).__init__(source_group, player)
# System time of first timestamp
self._timestamp_time = None
# List of buffered SilentAudioPacket
self._packets = []
self._packets_duration = 0
self._events = []
# Actual play state.
self._playing = False
# TODO Be nice to avoid creating this thread if user doesn't care
# about EOS events and there's no video format.
# NOTE Use thread.condition as lock for all instance vars used by worker
self._thread = MediaThread(target=self._worker_func)
if source_group.audio_format:
self._thread.start()
def delete(self):
if _debug:
print 'SilentAudioPlayer.delete'
self._thread.stop()
def play(self):
if _debug:
print 'SilentAudioPlayer.play'
self._thread.condition.acquire()
if not self._playing:
self._playing = True
self._timestamp_time = time.time()
self._thread.condition.notify()
self._thread.condition.release()
def stop(self):
if _debug:
print 'SilentAudioPlayer.stop'
self._thread.condition.acquire()
if self._playing:
timestamp = self.get_time()
if self._packets:
packet = self._packets[0]
self._packets_duration -= timestamp - packet.timestamp
packet.consume(timestamp - packet.timestamp)
self._playing = False
self._thread.condition.release()
def clear(self):
if _debug:
print 'SilentAudioPlayer.clear'
self._thread.condition.acquire()
del self._packets[:]
self._packets_duration = 0
del self._events[:]
self._thread.condition.release()
def get_time(self):
if _debug:
print 'SilentAudioPlayer.get_time()'
self._thread.condition.acquire()
packets = self._packets
if self._playing:
# Consume timestamps
result = None
offset = time.time() - self._timestamp_time
while packets:
packet = packets[0]
if offset > packet.duration:
del packets[0]
self._timestamp_time += packet.duration
offset -= packet.duration
self._packets_duration -= packet.duration
else:
packet.consume(offset)
self._packets_duration -= offset
self._timestamp_time += offset
result = packet.timestamp
break
else:
# Paused
if packets:
result = packets[0].timestamp
else:
result = None
self._thread.condition.release()
if _debug:
print 'SilentAudioPlayer.get_time() -> ', result
return result
# Worker func that consumes audio data and dispatches events
def _worker_func(self):
thread = self._thread
#buffered_time = 0
eos = False
events = self._events
while True:
thread.condition.acquire()
if thread.stopped or (eos and not events):
thread.condition.release()
break
# Use up "buffered" audio based on amount of time passed.
timestamp = self.get_time()
if _debug:
print 'timestamp: %r' % timestamp
# Dispatch events
while events and timestamp is not None:
if (events[0].timestamp is None or
events[0].timestamp <= timestamp):
events[0]._sync_dispatch_to_player(self.player)
del events[0]
# Calculate how much data to request from source
secs = self._buffer_time - self._packets_duration
bytes = secs * self.source_group.audio_format.bytes_per_second
if _debug:
print 'Trying to buffer %d bytes (%r secs)' % (bytes, secs)
while bytes > self._min_update_bytes and not eos:
# Pull audio data from source
audio_data = self.source_group.get_audio_data(int(bytes))
if not audio_data and not eos:
events.append(MediaEvent(timestamp, 'on_eos'))
events.append(MediaEvent(timestamp, 'on_source_group_eos'))
eos = True
break
# Pretend to buffer audio data, collect events.
if self._playing and not self._packets:
self._timestamp_time = time.time()
self._packets.append(SilentAudioPacket(audio_data.timestamp,
audio_data.duration))
self._packets_duration += audio_data.duration
for event in audio_data.events:
event.timestamp += audio_data.timestamp
events.append(event)
events.extend(audio_data.events)
bytes -= audio_data.length
sleep_time = self._sleep_time
if not self._playing:
sleep_time = None
elif events and events[0].timestamp and timestamp:
sleep_time = min(sleep_time, events[0].timestamp - timestamp)
if _debug:
print 'SilentAudioPlayer(Worker).sleep', sleep_time
thread.sleep(sleep_time)
thread.condition.release()
class SilentTimeAudioPlayer(AbstractAudioPlayer):
# Note that when using this player (automatic if playing back video with
# unsupported audio codec) no events are dispatched (because they are
# normally encoded in the audio packet -- so no EOS events are delivered.
# This is a design flaw.
#
# Also, seeking is broken because the timestamps aren't synchronized with
# the source group.
_time = 0.0
_systime = None
def play(self):
self._systime = time.time()
def stop(self):
self._time = self.get_time()
self._systime = None
def delete(self):
pass
def clear(self):
pass
def get_time(self):
if self._systime is None:
return self._time
else:
return time.time() - self._systime + self._time
class SilentAudioDriver(AbstractAudioDriver):
def create_audio_player(self, source_group, player):
if source_group.audio_format:
return SilentAudioPlayerPacketConsumer(source_group, player)
else:
return SilentTimeAudioPlayer(source_group, player)
def create_audio_driver():
return SilentAudioDriver()
|
Python
| 0.000182
|
@@ -4694,24 +4694,28 @@
+
del events%5B0
|
e0d5c5b7b27d47f6a771634a0f36e9b30234f0a6
|
add more duplicate code
|
pygmsh/opencascade/geometry.py
|
pygmsh/opencascade/geometry.py
|
import gmsh
from .. import common
from .ball import Ball
from .boolean import Boolean
from .box import Box
from .cone import Cone
from .cylinder import Cylinder
from .disk import Disk
from .rectangle import Rectangle
from .torus import Torus
from .wedge import Wedge
class Geometry(common.CommonGeometry):
def __init__(self):
super().__init__(gmsh.model.occ)
self._AFTER_SYNC_QUEUE = []
self._EMBED_QUEUE = []
self._COMPOUND_ENTITIES = []
self._RECOMBINE_ENTITIES = []
self._TRANSFINITE_CURVE_QUEUE = []
self._TRANSFINITE_SURFACE_QUEUE = []
self._SIZE_QUEUE = []
def __exit__(self, *a):
# TODO remove once gmsh 4.7.0 is out
# <https://gitlab.onelab.info/gmsh/gmsh/-/issues/1001>
gmsh.option.setNumber("Mesh.CharacteristicLengthMin", 0.0)
gmsh.option.setNumber("Mesh.CharacteristicLengthMax", 1.0e22)
gmsh.finalize()
@property
def characteristic_length_min(self):
return gmsh.option.getNumber("Mesh.CharacteristicLengthMin")
@property
def characteristic_length_max(self):
return gmsh.option.getNumber("Mesh.CharacteristicLengthMax")
@characteristic_length_min.setter
def characteristic_length_min(self, val):
gmsh.option.setNumber("Mesh.CharacteristicLengthMin", val)
@characteristic_length_max.setter
def characteristic_length_max(self, val):
gmsh.option.setNumber("Mesh.CharacteristicLengthMax", val)
def add_rectangle(self, *args, mesh_size=None, **kwargs):
entity = Rectangle(*args, **kwargs)
if mesh_size is not None:
self._SIZE_QUEUE.append((entity, mesh_size))
return entity
def add_disk(self, *args, mesh_size=None, **kwargs):
entity = Disk(*args, **kwargs)
if mesh_size is not None:
self._SIZE_QUEUE.append((entity, mesh_size))
return entity
def add_ball(self, *args, mesh_size=None, **kwargs):
obj = Ball(*args, **kwargs)
if mesh_size is not None:
self._SIZE_QUEUE.append((obj, mesh_size))
return obj
def add_box(self, *args, mesh_size=None, **kwargs):
box = Box(*args, **kwargs)
if mesh_size is not None:
self._SIZE_QUEUE.append((box, mesh_size))
return box
def add_cone(self, *args, mesh_size=None, **kwargs):
cone = Cone(*args, **kwargs)
if mesh_size is not None:
self._SIZE_QUEUE.append((cone, mesh_size))
return cone
def add_cylinder(self, *args, mesh_size=None, **kwargs):
cyl = Cylinder(*args, **kwargs)
if mesh_size is not None:
self._SIZE_QUEUE.append((cyl, mesh_size))
return cyl
def add_ellipsoid(self, center, radii, mesh_size=None):
obj = Ball(center, 1.0)
self.dilate(obj, center, radii)
if mesh_size is not None:
self._SIZE_QUEUE.append((obj, mesh_size))
return obj
def add_torus(self, *args, mesh_size=None, **kwargs):
obj = Torus(*args, **kwargs)
if mesh_size is not None:
self._SIZE_QUEUE.append((obj, mesh_size))
return obj
def add_wedge(self, *args, mesh_size=None, **kwargs):
obj = Wedge(*args, **kwargs)
if mesh_size is not None:
self._SIZE_QUEUE.append((obj, mesh_size))
return obj
def boolean_intersection(self, entities):
"""Boolean intersection, see
https://gmsh.info/doc/texinfo/gmsh.html#Boolean-operations input_entity
and tool_entity are called object and tool in gmsh documentation.
"""
ent = entities[0].dim_tags
# form subsequent intersections
# https://gitlab.onelab.info/gmsh/gmsh/-/issues/999
for e in entities[1:]:
out, _ = gmsh.model.occ.intersect(
[ent],
[e.dim_tags],
removeObject=True,
removeTool=True,
)
assert all(out[0] == item for item in out)
ent = out[0]
return Boolean([ent], "Intersection")
def boolean_union(self, entities):
"""Boolean union, see
https://gmsh.info/doc/texinfo/gmsh.html#Boolean-operations input_entity
and tool_entity are called object and tool in gmsh documentation.
"""
out, _ = gmsh.model.occ.fuse(
[entities[0].dim_tags],
[e.dim_tags for e in entities[1:]],
removeObject=True,
removeTool=True,
)
return Boolean(out, "Union")
def boolean_difference(self, d0, d1, delete_first=True, delete_other=True):
"""Boolean difference, see
https://gmsh.info/doc/texinfo/gmsh.html#Boolean-operations input_entity
and tool_entity are called object and tool in gmsh documentation.
"""
out, _ = gmsh.model.occ.cut(
d0.dim_tags,
d1.dim_tags,
removeObject=delete_first,
removeTool=delete_other,
)
return Boolean(out, "Difference")
def boolean_fragments(self, d0, d1):
"""Boolean fragments, see
https://gmsh.info/doc/texinfo/gmsh.html#Boolean-operations input_entity
and tool_entity are called object and tool in gmsh documentation.
"""
out, _ = gmsh.model.occ.fragment(d0.dim_tags, d1.dim_tags)
return Boolean(out, "Fragments")
def add_physical(self, entities, label=None):
if not isinstance(entities, list):
entities = [entities]
dim = entities[0].dimension
for e in entities:
assert e.dimension == dim
tag = gmsh.model.addPhysicalGroup(dim, [e._ID for e in entities])
if label is not None:
assert isinstance(label, str)
gmsh.model.setPhysicalName(dim, tag, label)
def add_polygon(self, X, mesh_size=None, holes=None, make_surface=True):
class Polygon:
def __init__(self, points, lines, curve_loop, surface, mesh_size=None):
self.points = points
self.lines = lines
self.num_edges = len(lines)
self.curve_loop = curve_loop
self.surface = surface
self.mesh_size = mesh_size
if surface is not None:
self._ID = self.surface._ID
self.dimension = 2
self.dim_tags = [(2, surface)]
if holes is None:
holes = []
else:
assert make_surface
if isinstance(mesh_size, list):
assert len(X) == len(mesh_size)
else:
mesh_size = len(X) * [mesh_size]
# Create points.
p = [self.add_point(x, mesh_size=l) for x, l in zip(X, mesh_size)]
# Create lines
lines = [self.add_line(p[k], p[k + 1]) for k in range(len(p) - 1)]
lines.append(self.add_line(p[-1], p[0]))
ll = self.add_curve_loop(lines)
surface = self.add_plane_surface(ll, holes) if make_surface else None
return Polygon(p, lines, ll, surface, mesh_size=mesh_size)
|
Python
| 0.000001
|
@@ -5409,441 +5409,8 @@
%22)%0A%0A
- def add_physical(self, entities, label=None):%0A if not isinstance(entities, list):%0A entities = %5Bentities%5D%0A%0A dim = entities%5B0%5D.dimension%0A for e in entities:%0A assert e.dimension == dim%0A%0A tag = gmsh.model.addPhysicalGroup(dim, %5Be._ID for e in entities%5D)%0A if label is not None:%0A assert isinstance(label, str)%0A gmsh.model.setPhysicalName(dim, tag, label)%0A%0A
|
d013f50b92e968258b14b67ebea9e70b4c35dcb0
|
Fix completion
|
pylibs/ropemode/environment.py
|
pylibs/ropemode/environment.py
|
class Environment(object):
def ask(self, prompt, default=None, starting=None):
pass
def ask_values(self, prompt, values, default=None, starting=None):
pass
def ask_directory(self, prompt, default=None, starting=None):
pass
def ask_completion(self, prompt, values, starting=None):
pass
def message(self, message):
pass
def yes_or_no(self, prompt):
pass
def y_or_n(self, prompt):
pass
def get(self, name, default=None):
pass
def get_offset(self):
pass
def get_text(self):
pass
def get_region(self):
pass
def filename(self):
pass
def is_modified(self):
pass
def goto_line(self, lineno):
pass
def insert_line(self, line, lineno):
pass
def insert(self, text):
pass
def delete(self, start, end):
pass
def filenames(self):
pass
def save_files(self, filenames):
pass
def reload_files(self, filenames, moves=None):
pass
def find_file(self, filename, readonly=False, other=False):
pass
def create_progress(self, name):
pass
def current_word(self):
pass
def push_mark(self):
pass
def pop_mark(self):
pass
def prefix_value(self, prefix):
pass
def show_occurrences(self, locations):
pass
def show_doc(self, docs, altview=False):
pass
def preview_changes(self, diffs):
pass
def local_command(self, name, callback, key=None, prefix=False):
pass
def global_command(self, name, callback, key=None, prefix=False):
pass
def add_hook(self, name, callback, hook):
pass
@staticmethod
def _completion_text(proposal):
return proposal.name
def _completion_data(self, proposal):
return self._completion_text(proposal)
|
Python
| 0.00144
|
@@ -1840,16 +1840,34 @@
sal.name
+.partition(':')%5B0%5D
%0A%0A de
|
1563c35f10ac4419d6c732e0e25c3d2d62fcd3fd
|
send all available output to client if are multiple lines available
|
hey/server.py
|
hey/server.py
|
from twisted.internet import protocol, reactor
from twisted.internet.endpoints import TCP4ServerEndpoint
try:
from Queue import Queue, Empty
except ImportError:
# python 3.x
from queue import Queue, Empty
class HeyQueueFactory(protocol.Factory, object):
def __init__(self, outQueue, *args, **kwargs):
self.outQueue = outQueue
super(HeyQueueFactory, self).__init__(*args, **kwargs)
def buildProtocol(self, addr):
return HeyQueueProtocol(self.outQueue)
class HeyQueueProtocol(protocol.Protocol, object):
def __init__(self, outQueue, *args, **kwargs):
self.outQueue = outQueue
super(HeyQueueProtocol, self).__init__(*args, **kwargs)
def dataReceived(self, data):
if data == 'whatsup':
self.whatsup()
if data == 'stopit':
self.stopit()
def stopit(self):
self.transport.write('stopping server')
reactor.callLater(1, reactor.stop)
def whatsup(self):
try:
output = self.outQueue.get_nowait()
except Empty:
output = "nothing to report, sir"
self.transport.write(output)
class HeyProcessProtocol(protocol.ProcessProtocol, object):
def __init__(self, outQueue, *args, **kwargs):
self.outQueue = outQueue
self.status = 'open'
super(HeyProcessProtocol, self).__init__(*args, **kwargs)
def outReceived(self, data):
self.outQueue.put(data)
def processExited(self, reason):
self.status = 'closed'
def processEnded(self, reason):
self.status = 'closed'
class HeyServer(object):
def __init__(self, command, port):
outQueue = Queue()
self.proc = HeyProcessProtocol(outQueue)
reactor.spawnProcess(self.proc, command[0], command, usePTY=True)
endpoint = TCP4ServerEndpoint(reactor, port)
endpoint.listen(HeyQueueFactory(outQueue))
def run(self):
reactor.run()
def start(command):
host, port = "localhost", 9999
server = HeyServer(command, port)
server.run()
|
Python
| 0
|
@@ -970,32 +970,76 @@
whatsup(self):%0A
+ output = %22%22%0A while True:%0A
try:%0A
@@ -1047,23 +1047,28 @@
+
output
++
= self.o
@@ -1096,16 +1096,20 @@
+
+
except E
@@ -1114,16 +1114,57 @@
Empty:%0A
+ if output == %22%22:%0A
@@ -1200,16 +1200,38 @@
rt, sir%22
+%0A break
%0A%0A
|
bd8c4ce81340901cd7322f84cd1911581ce910dd
|
Use OptionParser for win32 build script.
|
tools/win32build/build.py
|
tools/win32build/build.py
|
"""Python script to build windows binaries to be fed to the "superpack".
The script is pretty dumb: it assumes python executables are installed the
standard way, and the location for blas/lapack/atlas is harcoded."""
# TODO:
# - integrate the x86analysis script to check built binaries
# - make the config configurable with a file
import sys
import subprocess
import os
import shutil
PYEXECS = {"2.5" : "C:\python25\python.exe",
"2.4" : "C:\python24\python2.4.exe"}
_SSE3_CFG = r"""[atlas]
library_dirs = C:\local\lib\yop\sse3"""
_SSE2_CFG = r"""[atlas]
library_dirs = C:\local\lib\yop\sse2"""
_NOSSE_CFG = r"""[DEFAULT]
library_dirs = C:\local\lib\yop\nosse"""
SITECFG = {"sse2" : _SSE2_CFG, "sse3" : _SSE3_CFG, "nosse" : _NOSSE_CFG}
def get_python_exec(ver):
"""Return the executable of python for the given version."""
# XXX Check that the file actually exists
try:
return PYEXECS[ver]
except KeyError:
raise ValueError("Version %s not supported/recognized" % ver)
def get_clean():
if os.path.exists("build"):
shutil.rmtree("build")
if os.path.exists("dist"):
shutil.rmtree("dist")
def write_site_cfg(arch):
if os.path.exists("site.cfg"):
os.remove("site.cfg")
f = open("site.cfg", 'w')
f.writelines(SITECFG[arch])
f.close()
def build(arch, pyver):
print "Building numpy binary for python %s, arch is %s" % (get_python_exec(pyver), arch)
get_clean()
write_site_cfg(arch)
cmd = "%s setup.py build -c mingw32 bdist_wininst" % get_python_exec(pyver)
build_log = "build-%s-%s.log" % (arch, pyver)
f = open(build_log, 'w')
try:
try:
subprocess.check_call(cmd, shell = True, stderr = subprocess.STDOUT, stdout = f)
finally:
f.close()
except subprocess.CalledProcessError, e:
msg = """
There was an error while executing the following command:
%s
Error was : %s
Look at the build log (%s).""" % (cmd, str(e), build_log)
raise Exception(msg)
move_binary(arch, pyver)
def move_binary(arch, pyver):
if not os.path.exists("binaries"):
os.makedirs("binaries")
shutil.move(os.path.join('dist', get_windist_exec(pyver)),
os.path.join("binaries", get_binary_name(arch)))
def get_numpy_version():
import __builtin__
__builtin__.__NUMPY_SETUP__ = True
from numpy.version import version
return version
def get_binary_name(arch):
return "numpy-%s-%s.exe" % (get_numpy_version(), arch)
def get_windist_exec(pyver):
"""Return the name of the installer built by wininst command."""
# Yeah, the name logic is harcoded in distutils. We have to reproduce it
# here
name = "numpy-%s.win32-py%s.exe" % (get_numpy_version(), pyver)
return name
USAGE = """build.py ARCH PYTHON_VERSION
Example: build.py sse2 2.4."""
if __name__ == '__main__':
if len(sys.argv) < 3:
raise ValueError(USAGE)
sys.exit(-1)
arch = sys.argv[1]
pyver = sys.argv[2]
#build(arch, pyver)
for arch in SITECFG.keys():
build(arch, pyver)
|
Python
| 0
|
@@ -392,16 +392,77 @@
shutil%0D
+%0Afrom os.path import join as pjoin, split as psplit, dirname%0D
%0A%0D%0APYEXE
@@ -2949,195 +2949,399 @@
%0D%0A%0D%0A
-USAGE = %22%22%22build.py ARCH PYTHON_VERSION%0D%0A%0D%0AExample: build.py sse2 2.4.%22%22%22%0D%0A%0D%0Aif __name__ == '__main__':%0D%0A if len(sys.argv) %3C 3:%0D%0A raise ValueError(USAGE)%0D%0A sys.exit(-1)%0D%0A
+if __name__ == '__main__':%0D%0A from optparse import OptionParser%0D%0A parser = OptionParser()%0D%0A parser.add_option(%22-a%22, %22--arch%22, dest=%22arch%22, %0D%0A help = %22Architecture to build (sse2, sse3, nosse, etc...)%22)%0D%0A parser.add_option(%22-p%22, %22--pyver%22, dest=%22pyver%22,%0D%0A help = %22Python version (2.4, 2.5, etc...)%22)%0D%0A%0D%0A opts, args = parser.parse_args()
%0D%0A
@@ -3353,19 +3353,17 @@
h =
-sy
+opt
s.ar
-gv%5B1%5D
+ch
%0D%0A
@@ -3376,26 +3376,110 @@
r =
-sys.argv%5B2%5D
+opts.pyver%0D%0A%0D%0A if not arch:%0D%0A arch = %22nosse%22%0D%0A if not pyver:%0D%0A pyver = %222.5%22
%0D%0A
-#
buil
@@ -3498,16 +3498,17 @@
r)%0D%0A
+#
for arch
@@ -3528,24 +3528,25 @@
eys():%0D%0A
+#
build(ar
|
0dce50c77963ef0d2cdb168f85c2588d62f43220
|
Remove duplicate import
|
yunity/stores/models.py
|
yunity/stores/models.py
|
from django.db import models
from config import settings
from yunity.base.base_models import BaseModel
from django.db import models
class PickupDate(BaseModel):
date = models.DateTimeField()
collectors = models.ManyToManyField(settings.AUTH_USER_MODEL)
store = models.ForeignKey('stores.store', related_name='pickupdates', on_delete=models.CASCADE)
max_collectors = models.IntegerField(null=True)
class Store(BaseModel):
group = models.ForeignKey('groups.Group', on_delete=models.CASCADE)
name = models.TextField()
description = models.TextField(null=True)
|
Python
| 0.000008
|
@@ -1,33 +1,4 @@
-from django.db import models%0A
from
|
f55182fc2b3e05b154e82ae904cc1a6079b1c4a0
|
Add (empty) unit tests for the OmicsUnitType model
|
apps/core/tests/test_models.py
|
apps/core/tests/test_models.py
|
import datetime
from django.core.exceptions import ValidationError
from django.db import IntegrityError, transaction
from django.test import TestCase
from apps.data.factories import EntryFactory
from .. import models
class SpeciesTestCase(TestCase):
def test_can_create_species(self):
name = 'Saccharomyces cerevisiae'
reference = EntryFactory()
description = 'lorem ipsum'
qs = models.Species.objects.all()
self.assertEqual(qs.count(), 0)
species = models.Species.objects.create(
name=name,
reference=reference,
description=description,
)
self.assertEqual(species.name, name)
self.assertEqual(species.reference.id, reference.id)
self.assertEqual(species.description, description)
self.assertEqual(qs.count(), 1)
def test_can_create_species_without_description(self):
name = 'Saccharomyces cerevisiae'
reference = EntryFactory()
qs = models.Species.objects.all()
self.assertEqual(qs.count(), 0)
species = models.Species.objects.create(
name=name,
reference=reference,
)
self.assertEqual(species.name, name)
self.assertEqual(species.reference.id, reference.id)
self.assertEqual(qs.count(), 1)
def test_model_representation(self):
name = 'Saccharomyces cerevisiae'
reference = EntryFactory()
species = models.Species.objects.create(
name=name,
reference=reference,
)
self.assertEqual(str(species), name)
class StrainTestCase(TestCase):
def setUp(self):
self.species_name = 'Saccharomyces cerevisiae'
self.species_reference = EntryFactory()
self.species_description = 'lorem ipsum'
self.species = models.Species.objects.create(
name=self.species_name,
reference=self.species_reference,
description=self.species_description
)
def test_can_create_strain(self):
name = 'S288c / XJ24-24a'
description = 'lorem ipsum'
qs = models.Strain.objects.all()
self.assertEqual(qs.count(), 0)
strain = models.Strain.objects.create(
name=name,
description=description,
species=self.species
)
self.assertEqual(strain.name, name)
self.assertEqual(strain.description, description)
self.assertEqual(strain.species.id, self.species.id)
self.assertEqual(qs.count(), 1)
def test_model_representation(self):
name = 'S288c / XJ24-24a'
strain = models.Strain.objects.create(
name=name,
species=self.species
)
self.assertEqual(str(strain), name)
def test_cannot_create_two_strains_with_identical_names_for_species(self):
name = 'S288c / XJ24-24a'
qs = models.Strain.objects.all()
self.assertEqual(qs.count(), 0)
models.Strain.objects.create(
name=name,
species=self.species
)
with self.assertRaises(IntegrityError):
with transaction.atomic():
models.Strain.objects.create(
name=name,
species=self.species
)
self.assertEqual(qs.count(), 1)
|
Python
| 0
|
@@ -3343,28 +3343,367 @@
.assertEqual(qs.count(), 1)%0A
+%0A%0Aclass OmicsUnitTypeTestCase(TestCase):%0A%0A def test_can_create_omics_unit_type(self):%0A # TODO%0A # raise NotImplementedError('You have work to do @thomasdenecker!')%0A pass%0A%0A def test_model_representation(self):%0A # TODO%0A # raise NotImplementedError('You have work to do @thomasdenecker!')%0A pass%0A
|
bc63b8f19742277ad96c2427405f1430687430d1
|
expire jwt in 1 day
|
hbapi/settings/heroku.py
|
hbapi/settings/heroku.py
|
import dj_database_url
import os
from .base import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ADMINS = [('znotdead', 'zhirafchik@gmail.com')]
DATABASES['default'] = dj_database_url.config()
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
ALLOWED_HOSTS = [".herokuapp.com", ]
REDIS_POOL = redis.ConnectionPool.from_url(os.environ.get('REDISCLOUD_URL'))
REDIS_CONN = redis.Redis(connection_pool=REDIS_POOL)
if not DEBUG:
SECURE_SSL_REDIRECT = True
|
Python
| 0.000001
|
@@ -548,8 +548,80 @@
= True%0A
+%0AJWT_AUTH = %7B%0A 'JWT_EXPIRATION_DELTA': datetime.timedelta(days=1),%0A%7D%0A
|
0d7cab10d66fb13d5ea9ddd4fd048ff29def5ba2
|
Fix F811 redefinition of unused '_ast_py3'
|
astroid/_ast.py
|
astroid/_ast.py
|
import ast
import sys
from collections import namedtuple
from functools import partial
from typing import Optional
import astroid
_ast_py3 = None
try:
import typed_ast.ast3 as _ast_py3
except ImportError:
pass
PY38 = sys.version_info[:2] >= (3, 8)
if PY38:
# On Python 3.8, typed_ast was merged back into `ast`
_ast_py3 = ast
FunctionType = namedtuple("FunctionType", ["argtypes", "returns"])
class ParserModule(
namedtuple(
"ParserModule",
[
"module",
"unary_op_classes",
"cmp_op_classes",
"bool_op_classes",
"bin_op_classes",
"context_classes",
],
)
):
def parse(self, string: str, type_comments=True):
if self.module is _ast_py3:
if PY38:
parse_func = partial(self.module.parse, type_comments=type_comments)
else:
parse_func = partial(
self.module.parse, feature_version=sys.version_info.minor
)
else:
parse_func = self.module.parse
return parse_func(string)
def parse_function_type_comment(type_comment: str) -> Optional[FunctionType]:
"""Given a correct type comment, obtain a FunctionType object"""
if _ast_py3 is None:
return None
func_type = _ast_py3.parse(type_comment, "<type_comment>", "func_type")
return FunctionType(argtypes=func_type.argtypes, returns=func_type.returns)
def get_parser_module(type_comments=True) -> ParserModule:
if not type_comments:
parser_module = ast
else:
parser_module = _ast_py3
parser_module = parser_module or ast
unary_op_classes = _unary_operators_from_module(parser_module)
cmp_op_classes = _compare_operators_from_module(parser_module)
bool_op_classes = _bool_operators_from_module(parser_module)
bin_op_classes = _binary_operators_from_module(parser_module)
context_classes = _contexts_from_module(parser_module)
return ParserModule(
parser_module,
unary_op_classes,
cmp_op_classes,
bool_op_classes,
bin_op_classes,
context_classes,
)
def _unary_operators_from_module(module):
return {module.UAdd: "+", module.USub: "-", module.Not: "not", module.Invert: "~"}
def _binary_operators_from_module(module):
binary_operators = {
module.Add: "+",
module.BitAnd: "&",
module.BitOr: "|",
module.BitXor: "^",
module.Div: "/",
module.FloorDiv: "//",
module.MatMult: "@",
module.Mod: "%",
module.Mult: "*",
module.Pow: "**",
module.Sub: "-",
module.LShift: "<<",
module.RShift: ">>",
}
return binary_operators
def _bool_operators_from_module(module):
return {module.And: "and", module.Or: "or"}
def _compare_operators_from_module(module):
return {
module.Eq: "==",
module.Gt: ">",
module.GtE: ">=",
module.In: "in",
module.Is: "is",
module.IsNot: "is not",
module.Lt: "<",
module.LtE: "<=",
module.NotEq: "!=",
module.NotIn: "not in",
}
def _contexts_from_module(module):
return {
module.Load: astroid.Load,
module.Store: astroid.Store,
module.Del: astroid.Del,
module.Param: astroid.Store,
}
|
Python
| 0.000008
|
@@ -129,24 +129,8 @@
id%0A%0A
-_ast_py3 = None%0A
try:
@@ -196,12 +196,23 @@
-pass
+_ast_py3 = None
%0A%0A%0AP
|
cee2368dac250ef9655a3df9af3188b8abd095dc
|
Disable slow test. Not intended to run.
|
spec/puzzle/examples/gph/a_basic_puzzle_spec.py
|
spec/puzzle/examples/gph/a_basic_puzzle_spec.py
|
from data import warehouse
from puzzle.examples.gph import a_basic_puzzle
from puzzle.problems import number_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with description('a_basic_puzzle'):
with before.all:
warehouse.save()
prod_config.init()
self.subject = a_basic_puzzle.get()
with after.all:
prod_config.reset()
warehouse.restore()
with it('parses'):
problems = self.subject.problems()
expect(problems).to(have_len(len(a_basic_puzzle.SOURCE.split('\n')) - 2))
for problem in problems:
expect(problem).to(be_a(number_problem.NumberProblem))
with it('solves first problem'):
expect(self.subject.problem(0).solution).not_to(be_empty)
with it('gets some solutions right'):
solutions = self.subject.solutions()
matches = []
expect(solutions).to(equal([
'decimal +25',
'octal +12',
'sept e nary +1',
'binary +1',
None,
'qui nary +9',
None,
None,
'quaternary +12',
None
]))
|
Python
| 0
|
@@ -184,16 +184,17 @@
*%0A%0Awith
+_
descript
|
fccfdbcdae7af60ae0773ad03f6a52f367e6cd48
|
Add switch to disable datastore writes
|
hn-archive.py
|
hn-archive.py
|
import functools
import logging
import os
from google.appengine.api import memcache
from google.appengine.api import taskqueue
from google.appengine.ext import deferred
from google.appengine.ext import ndb
import jinja2
import webapp2
from webapp2_extras import routes
import models
IS_LOCAL_DEV_SERVER = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.join(os.path.dirname(__file__), 'templates')),
autoescape=True)
JINJA_ENVIRONMENT.filters['add_thousands_separator'] = lambda value: '{0:,}'.format(value)
TASK_RUNNING_KEY = 'fetcher_running'
FETCH_BATCH_SIZE = 10
def guarded_internal_callback(f):
"""
Checks that the method invocation indeed came from the GAE cron service as described
in https://cloud.google.com/appengine/docs/python/config/cron#Python_app_yaml_Securing_URLs_for_cron
"""
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
assert self.request.headers['X-Appengine-Cron'] == 'true'
assert self.request.remote_addr == '0.1.0.1'
return f(self, *args, **kwargs)
return wrapper
@ndb.tasklet
def fetch_raw(url):
result = yield ndb.get_context().urlfetch(
url,
headers={
'User-Agent': "Grey Panther's Hacker News Archiver - https://hn-archive.appspot.com/ "
})
assert not result.content_was_truncated
raise ndb.Return(result)
@ndb.tasklet
def fetch(url):
result = yield fetch_raw(url)
assert result.status_code == 200
raise ndb.Return(result.content)
@ndb.toplevel
def fetch_items_batch():
memcache.set(TASK_RUNNING_KEY, 1, time=600)
last_retrieved_id, max_item_id = yield [models.LastRetrievedId.get(default=0), models.MaxItemId.get()]
batch_start_id = last_retrieved_id + 1
batch_end_id = last_retrieved_id + FETCH_BATCH_SIZE
batch_end_id = min(batch_end_id, max_item_id)
if batch_end_id < batch_start_id:
logging.info("Pausing since all %d items have been retrieved" % max_item_id)
raise ndb.Return()
logging.info("Retrieving items %d..%d (inclusive)", batch_start_id, batch_end_id)
ids_to_retrieve = range(batch_start_id, batch_end_id + 1)
fetch_results = yield [
fetch_raw('https://hacker-news.firebaseio.com/v0/item/%d.json' % i)
for i in ids_to_retrieve
]
inaccessible_entries = len([r for r in fetch_results if r.status_code != 200])
if inaccessible_entries == FETCH_BATCH_SIZE:
logging.error("All %d entries returned an error, pausing for a while" % FETCH_BATCH_SIZE)
raise ndb.Return()
logging.info("Retrieved items, storing them")
futures = []
for i, result in zip(ids_to_retrieve, fetch_results):
if result.status_code == 200:
m = models.HNEntry(id=i, body=result.content)
else:
m = models.InaccessibleHNEntry(id=i, error_code=result.status_code)
futures.append(m.put_async())
yield futures
logging.info("Updating counters")
yield [
models.LastRetrievedId.set(batch_end_id),
models.InaccessibleEntryCount.increment(inaccessible_entries),
]
deferred.defer(fetch_items_batch, _countdown=2, _queue='fetch')
class FetchMaxItemId(webapp2.RequestHandler):
@guarded_internal_callback
@ndb.toplevel
def get(self):
result = yield fetch('https://hacker-news.firebaseio.com/v0/maxitem.json')
assert int(result) > 0
logging.info("MaxItemId: %d" % int(result))
yield models.MaxItemId.set(int(result))
class FetchItemsKickoff(webapp2.RequestHandler):
@guarded_internal_callback
def get(self):
if memcache.get(TASK_RUNNING_KEY) is not None:
return
memcache.set(TASK_RUNNING_KEY, 1, time=600)
deferred.defer(fetch_items_batch, _countdown=1, _queue='fetch')
class Placeholder(webapp2.RequestHandler):
@ndb.toplevel
def get(self):
values = {
'last_retrieved_id': models.LastRetrievedId.get(),
'max_item_id': models.MaxItemId.get(),
'inaccessible_entries': models.InaccessibleEntryCount.get(),
}
retrieved_values = yield values.values()
for k, v in zip(values.keys(), retrieved_values):
values[k] = v
template = JINJA_ENVIRONMENT.get_template('index.html')
self.response.write(template.render(values))
app = webapp2.WSGIApplication([
('/', Placeholder),
], debug=IS_LOCAL_DEV_SERVER)
cron_app = webapp2.WSGIApplication([
routes.PathPrefixRoute('/tasks', [
webapp2.Route('/fetch_max_item_id', FetchMaxItemId),
webapp2.Route('/kickoff_fetch_items', FetchItemsKickoff),
]),
], debug=IS_LOCAL_DEV_SERVER)
|
Python
| 0.000001
|
@@ -279,16 +279,48 @@
models%0A%0A
+ENABLE_DATASTORE_WRITES = True%0A%0A
IS_LOCAL
@@ -1697,24 +1697,135 @@
, time=600)%0A
+ if not ENABLE_DATASTORE_WRITES:%0A logging.info(%22Datastore writes disabled, sleeping%22)%0A return%0A
last_ret
@@ -3497,32 +3497,155 @@
def get(self):%0A
+ if not ENABLE_DATASTORE_WRITES:%0A logging.info(%22Datastore writes disabled, sleeping%22)%0A return%0A
result =
@@ -3920,24 +3920,24 @@
al_callback%0A
-
def get(
@@ -3935,32 +3935,155 @@
def get(self):%0A
+ if not ENABLE_DATASTORE_WRITES:%0A logging.info(%22Datastore writes disabled, sleeping%22)%0A return%0A
if memca
|
fe6ba23a7c27ca7e82b73f17602ae0239db16584
|
Update views.py
|
vio/vio/swagger/views/volume/views.py
|
vio/vio/swagger/views/volume/views.py
|
# Copyright (c) 2017 VMware, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import json
import logging
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from vio.pub.msapi import extsys
from vio.pub.vim.vimapi.cinder import OperateVolume
from vio.pub.vim.vimapi.glance import OperateImage
from vio.swagger import volume_utils
logger = logging.getLogger(__name__)
class GetDeleteVolumeView(APIView):
def get(self, request, vimid, tenantid, volumeid):
vim_info = extsys.get_vim_by_id(vimid)
volume_op = OperateVolume.OperateVolume(vim_info)
try:
volume = volume_op.get_vim_volume(volumeid)
vim_rsp = volume_utils.vim_formatter(vim_info, tenantid)
rsp = volume_utils.volume_formatter(volume)
rsp.update(vim_rsp)
return Response(data=rsp, status=status.HTTP_200_OK)
except Exception as e:
return Response(data={'error': str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def delete(self, request, vimid, tenantid, volumeid):
vim_info = extsys.get_vim_by_id(vimid)
volume_op = OperateVolume.OperateVolume(vim_info)
try:
volume_op.delete_vim_volume(volumeid)
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
return Response(data={'error': str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
class CreateListVolumeView(APIView):
def get(self, request, vimid, tenantid):
vim_info = extsys.get_vim_by_id(vimid)
query_data = dict(request.query_params)
volume_op = OperateVolume.OperateVolume(vim_info)
try:
volumes = volume_op.get_vim_volumes(**query_data)
rsp = {}
rsp['volumes'] = []
vim_rsp = volume_utils.vim_formatter(vim_info, tenantid)
for volume in volumes:
volume_info = volume_op.get_vim_volume(volume.id)
rsp['volumes'].append(volume_utils.volume_formatter(volume_info))
rsp.update(vim_rsp)
return Response(data=rsp, status=status.HTTP_200_OK)
except Exception as e:
return Response(data={'error': str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def post(self, request, vimid, tenantid):
vim_info = extsys.get_vim_by_id(vimid)
volume_op = OperateVolume.OperateVolume(vim_info)
image_op = OperateImage.OperateImage(vim_info)
try:
volumes_detail = volume_op.get_vim_volumes()
json_body = json.loads(request.body)
vim_rsp = volume_utils.vim_formatter(vim_info, tenantid)
for volume in volumes_detail:
if volume.name == json_body.get('name'):
volume_info = volume_op.get_vim_volume(volume.id)
rsp = volume_utils.volume_formatter(volume_info)
rsp['returnCode'] = 0
rsp.update(vim_rsp)
return Response(data=rsp, status=status.HTTP_200_OK)
imageName = json_body.get('imageName')
image = image_op.find_vim_image(imageName)
json_body['imageName'] = image.id
param = volume_utils.req_body_formatter(json_body)
volume_info = volume_op.create_vim_volume(**param)
rsp = volume_utils.volume_formatter(volume_info)
rsp['returnCode'] = 1
rsp.update(vim_rsp)
return Response(data=rsp, status=status.HTTP_202_ACCEPTED)
except Exception as e:
return Response(data={'error': str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
Python
| 0
|
@@ -2960,63 +2960,8 @@
nfo)
-%0A image_op = OperateImage.OperateImage(vim_info)
%0A%0A
@@ -3542,16 +3542,75 @@
00_OK)%0A%0A
+ image_op = OperateImage.OperateImage(vim_info)%0A
@@ -4154,32 +4154,32 @@
rror': str(e)%7D,%0A
-
@@ -4215,28 +4215,29 @@
P_500_INTERNAL_SERVER_ERROR)
+%0A
|
e1f808c4c854d92c8a37f0a75ce49a26398ddc52
|
Fix an error in the encode/decode functions for PKCS#1 DSA public keys
|
asyncssh/dsa.py
|
asyncssh/dsa.py
|
# Copyright (c) 2013-2014 by Ron Frederick <ronf@timeheart.net>.
# All rights reserved.
#
# This program and the accompanying materials are made available under
# the terms of the Eclipse Public License v1.0 which accompanies this
# distribution and is available at:
#
# http://www.eclipse.org/legal/epl-v10.html
#
# Contributors:
# Ron Frederick - initial implementation, API, and documentation
"""DSA public key encryption handler based on PyCrypto"""
import random
from Crypto.Hash import SHA
from Crypto.PublicKey import DSA
from .asn1 import *
from .misc import *
from .packet import *
from .public_key import *
class _DSAKey(SSHKey):
"""Handler for DSA public key encryption"""
algorithm = b'ssh-dss'
pem_name = b'DSA'
pkcs8_oid = ObjectIdentifier('1.2.840.10040.4.1')
def __init__(self, key):
self._key = key
def __eq__(self, other):
return isinstance(other, self.__class__) and self._key == other._key
def __hash__(self):
return hash((self._key.p, self._key.q, self._key.g, self._key.y,
self._key.x if hasattr(self, 'x') else None))
@classmethod
def decode_pkcs1_private(cls, key_data):
if (isinstance(key_data, tuple) and len(key_data) == 6 and
all_ints(key_data) and key_data[0] == 0):
_, p, q, g, y, x = key_data
return cls(DSA.construct((y, g, p, q, x)))
else:
raise KeyImportError('Invalid DSA private key')
@classmethod
def decode_pkcs1_public(cls, key_data):
if (isinstance(key_data, tuple) and len(key_data) == 5 and
all_ints(key_data) and key_data[0] == 0):
_, p, q, g, y = key_data
return cls(DSA.construct((y, g, p, q)))
else:
raise KeyImportError('Invalid DSA public key')
@classmethod
def decode_pkcs8_private(cls, alg_params, data):
try:
x = der_decode(data)
except ASN1DecodeError:
x = None
if len(alg_params) == 3 and all_ints(alg_params) and isinstance(x, int):
p, q, g = alg_params
y = pow(g, x, p)
return cls(DSA.construct((y, g, p, q, x)))
else:
raise KeyImportError('Invalid DSA private key')
@classmethod
def decode_pkcs8_public(cls, alg_params, data):
try:
y = der_decode(data)
except ASN1DecodeError:
y = None
if len(alg_params) == 3 and all_ints(alg_params) and isinstance(y, int):
p, q, g = alg_params
return cls(DSA.construct((y, g, p, q)))
else:
raise KeyImportError('Invalid DSA public key')
@classmethod
def decode_ssh_public(cls, packet):
try:
p = packet.get_mpint()
q = packet.get_mpint()
g = packet.get_mpint()
y = packet.get_mpint()
packet.check_end()
return cls(DSA.construct((y, g, p, q)))
except DisconnectError:
# Fall through and return a key import error
pass
raise KeyImportError('Invalid DSA public key')
def encode_pkcs1_private(self):
if not self._key.has_private():
raise KeyExportError('Key is not private')
return (0, self._key.p, self._key.q, self._key.g,
self._key.y, self._key.x)
def encode_pkcs1_public(self):
return (0, self._key.p, self._key.q, self._key.g, self._key.y)
def encode_pkcs8_private(self):
if not self._key.has_private():
raise KeyExportError('Key is not private')
return (self._key.p, self._key.q, self._key.g), der_encode(self._key.x)
def encode_pkcs8_public(self):
return (self._key.p, self._key.q, self._key.g), der_encode(self._key.y)
def encode_ssh_public(self):
return b''.join((String(self.algorithm), MPInt(self._key.p),
MPInt(self._key.q), MPInt(self._key.g),
MPInt(self._key.y)))
def sign(self, data):
if not self._key.has_private():
raise ValueError('Private key needed for signing')
k = random.randrange(2, self._key.q)
r, s = self._key.sign(SHA.new(data).digest(), k)
sig = r.to_bytes(20, 'big') + s.to_bytes(20, 'big')
return b''.join((String(self.algorithm), String(sig)))
def verify(self, data, sig):
sig = SSHPacket(sig)
if sig.get_string() != self.algorithm:
return False
sig = sig.get_string()
return self._key.verify(SHA.new(data).digest(),
(int.from_bytes(sig[:20], 'big'),
int.from_bytes(sig[20:], 'big')))
register_public_key_alg(_DSAKey)
|
Python
| 0.998915
|
@@ -1605,9 +1605,9 @@
==
-5
+4
and
@@ -1637,37 +1637,16 @@
ey_data)
- and key_data%5B0%5D == 0
):%0A
@@ -1644,33 +1644,33 @@
)):%0A
-_
+y
, p, q, g, y = k
@@ -1662,19 +1662,16 @@
p, q, g
-, y
= key_d
@@ -3385,33 +3385,43 @@
return (
-0
+self._key.y
, self._key.p, s
@@ -3443,29 +3443,16 @@
f._key.g
-, self._key.y
)%0A%0A d
|
3066f8f64f185624fe95a696d7fcef102dc61921
|
add galery models
|
helena/content/models.py
|
helena/content/models.py
|
from django.db import models
from helpers.service import image_path
class Genres(models.Model):
""" class with genres model """
def get_image_path(instace, filename):
return image_path(instace, filename, directory='genres')
title = models.CharField(verbose_name='Заголовок', max_length=200)
description = models.TextField(verbose_name='Описание')
image = models.ImageField(verbose_name='Изображение пример', upload_to=get_image_path)
def __str__(self):
return self.title
class Meta:
verbose_name = 'жанр'
verbose_name_plural = 'жанры'
|
Python
| 0.000001
|
@@ -74,14 +74,20 @@
ass
-Genres
+ImgWithDescr
(mod
@@ -136,16 +136,38 @@
el %22%22%22%0A%0A
+ directory = None%0A%0A
def
@@ -260,16 +260,22 @@
ory=
-'genres'
+self.directory
)%0A%0A
@@ -342,16 +342,31 @@
ngth=200
+, required=True
)%0A de
@@ -478,15 +478,8 @@
%D0%B5%D0%BD%D0%B8%D0%B5
- %D0%BF%D1%80%D0%B8%D0%BC%D0%B5%D1%80
', u
@@ -501,16 +501,31 @@
age_path
+, required=True
)%0A%0A d
@@ -597,64 +597,630 @@
-verbose_name = '%D0%B6%D0%B0%D0%BD%D1%80'%0A verbose_name_plural = '%D0%B6%D0%B0%D0%BD%D1%80%D1%8B
+abstract = True%0A%0A%0Aclass Genres(ImgWithDescr):%0A%0A %22%22%22 class with genres model %22%22%22%0A%0A directory = 'genres'%0A%0A class Meta:%0A verbose_name = '%D0%B6%D0%B0%D0%BD%D1%80'%0A verbose_name_plural = '%D0%B6%D0%B0%D0%BD%D1%80%D1%8B'%0A%0A%0Aclass Gallery(ImgWithDescr):%0A%0A %22%22%22 class with gallery model %22%22%22%0A%0A directory = 'gallery'%0A external_img = models.URLField(verbose_name='%D0%98%D0%B7%D0%BE%D0%B1%D1%80%D0%B0%D0%B6%D0%B5%D0%BD%D0%B8%D0%B5 %D0%B2%D0%BE %D0%B2%D0%BD%D0%B5%D1%88%D0%BD%D0%B5%D0%BC %D0%B8%D1%81%D1%82%D0%BE%D1%87%D0%BD%D0%B8%D0%BA%D0%B5')%0A%0A def img_url(self):%0A %22%22%22 return external img url or self file img %22%22%22%0A return self.external_img or self.image.url%0A%0A class Meta:%0A verbose_name = '%D0%B8%D0%B7%D0%BE%D0%B1%D1%80%D0%B0%D0%B6%D0%B5%D0%BD%D0%B8%D0%B5 %D0%B2 %D0%B3%D0%B0%D0%BB%D0%BB%D0%B5%D1%80%D0%B5%D0%B5'%0A verbose_name_plural = '%D0%B8%D0%B7%D0%BE%D0%B1%D1%80%D0%B0%D0%B6%D0%B5%D0%BD%D0%B8%D1%8F %D0%B2 %D0%B3%D0%B0%D0%BB%D0%BB%D0%B5%D1%80%D0%B5%D0%B5
'%0A
|
3d19af590505edc572d9ea990ccc9bfe63393643
|
Fix landing page in DocGen presubmit (#1140)
|
docgen/docgen.py
|
docgen/docgen.py
|
#!/usr/bin/env python3
#
# Copyright 2021 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import os
import subprocess
import sys
import bazelci
DEFAULT_FLAGS = ["--action_env=PATH=/usr/local/bin:/usr/bin:/bin", "--sandbox_tmpfs_path=/tmp"]
Settings = collections.namedtuple(
"Settings", ["target", "build_flags", "output_dir", "gcs_bucket", "gcs_subdir", "landing_page"]
)
DOCGEN_SETTINGS = {
"bazel-trusted": {
"https://github.com/bazelbuild/bazel.git": Settings(
target="//site",
build_flags=[],
output_dir="bazel-bin/site/site-build",
gcs_bucket="docs.bazel.build",
gcs_subdir="",
landing_page="versions/master/bazel-overview.html",
),
"https://github.com/bazelbuild/bazel-blog.git": Settings(
target="//:site",
build_flags=[],
output_dir="bazel-bin/site-build",
gcs_bucket="blog.bazel.build",
gcs_subdir="",
landing_page="index.html",
),
"https://github.com/bazelbuild/bazel-website.git": Settings(
target="//:site",
build_flags=[],
output_dir="bazel-bin/site-build",
gcs_bucket="www.bazel.build",
gcs_subdir="",
landing_page="index.html",
),
},
"bazel": {
"https://bazel.googlesource.com/bazel.git": Settings(
target="//site",
build_flags=[],
output_dir="bazel-bin/site/site-build",
gcs_bucket="docs-staging.bazel.build",
gcs_subdir=os.getenv("BUILDKITE_BUILD_NUMBER"),
landing_page="versions/master/bazel-overview.html",
),
},
}
def get_destination(bucket, subdir):
if not subdir:
return bucket
return "{}/{}".format(bucket, subdir)
def get_url(settings):
return "https://{}/{}".format(
get_destination(settings.gcs_bucket, settings.gcs_subdir), settings.landing_page
)
def main(argv=None):
org = os.getenv("BUILDKITE_ORGANIZATION_SLUG")
repo = os.getenv("BUILDKITE_REPO")
settings = DOCGEN_SETTINGS.get(org, {}).get(repo)
if not settings:
bazelci.eprint("docgen is not enabled for '%s' org and repository %s", org, repo)
return 1
bazelci.print_expanded_group(":bazel: Building documentation from {}".format(repo))
try:
bazelci.execute_command(
["bazel", "build"] + DEFAULT_FLAGS + settings.build_flags + [settings.target]
)
except subprocess.CalledProcessError as e:
bazelci.eprint("Bazel failed with exit code {}".format(e.returncode))
return e.returncode
bucket = "gs://{}".format(settings.gcs_bucket)
dest = get_destination(bucket, settings.gcs_subdir)
bazelci.print_expanded_group(":bazel: Uploading documentation to {}".format(dest))
try:
bazelci.execute_command(
["gsutil", "-m", "rsync", "-r", "-c", "-d", settings.output_dir, dest]
)
bazelci.execute_command(
["gsutil", "web", "set", "-m", "index.html", "-e", "404.html", bucket]
)
# TODO: does not work with 404 pages in sub directories
except subprocess.CalledProcessError as e:
bazelci.eprint("Upload to GCS failed with exit code {}".format(e.returncode))
return e.returncode
bazelci.print_collapsed_group(":bazel: Publishing documentation URL")
message = "You can find the documentation at {}".format(get_url(settings))
bazelci.execute_command(
["buildkite-agent", "annotate", "--style=info", message, "--context", "doc_url"]
)
bazelci.execute_command(
["buildkite-agent", "meta-data", "set", "message", message]
)
return 0
if __name__ == "__main__":
sys.exit(main())
|
Python
| 0
|
@@ -2204,32 +2204,16 @@
g_page=%22
-versions/master/
bazel-ov
|
a1d3304f993702460077d7f6c70607131aff874b
|
add fix keyword
|
libs/player.py
|
libs/player.py
|
# @Time : 2016/11/11 11:01
# @Author : lixintong
from keywords import keyword, var_cache
@keyword('current_activity')
def current_activity(acticity_desc):
"""
:param acticity_desc:video_player or topic_player or live or vr_live or pic_player or local_player
:return:
"""
return var_cache['proxy'].current_activity(acticity_desc)
@keyword('change_video_state')
def change_video_state(player_name, state):
"""
:param player_name: player_name or topic_player or live or vr_live or pic_player or local_player
:param state: play or pause
:return:
"""
return var_cache['proxy'].change_video_state(player_name, state)
|
Python
| 0
|
@@ -492,35 +492,24 @@
or live or
-vr_live or
pic_player o
|
04d0bb5a32b3e1b66c6ac1e27df656aed607c3cb
|
Test suite: Fix re_util doctest on PyPy
|
python/phonenumbers/re_util.py
|
python/phonenumbers/re_util.py
|
"""Additional regular expression utilities, to make it easier to sync up
with Java regular expression code.
>>> import re
>>> from .re_util import fullmatch
>>> from .util import u
>>> string = 'abcd'
>>> r1 = re.compile('abcd')
>>> r2 = re.compile('bc')
>>> r3 = re.compile('abc')
>>> fullmatch(r1, string) # doctest: +ELLIPSIS
<_sre.SRE_Match object...>
>>> fullmatch(r2, string)
>>> fullmatch(r3, string)
>>> r = re.compile('\\d{8}|\\d{10,11}')
>>> m = fullmatch(r, '1234567890')
>>> m.end()
10
>>> r = re.compile(u('[+\uff0b\\d]'), re.UNICODE)
>>> m = fullmatch(r, u('\uff10'))
>>> m.end()
1
"""
import re
def fullmatch(pattern, string, flags=0):
"""Try to apply the pattern at the start of the string, returning a match
object if the whole string matches, or None if no match was found."""
# Build a version of the pattern with a non-capturing group around it.
# This is needed to get m.end() to correctly report the size of the
# matched expression (as per the final doctest above).
grouped_pattern = re.compile("^(?:%s)$" % pattern.pattern, pattern.flags)
m = grouped_pattern.match(string)
if m and m.end() < len(string):
# Incomplete match (which should never happen because of the $ at the
# end of the regexp), treat as failure.
m = None # pragma no cover
return m
if __name__ == '__main__': # pragma no cover
import doctest
doctest.testmod()
|
Python
| 0
|
@@ -329,12 +329,10 @@
IS%0A%3C
-_sre
+..
.SRE
|
f373638885a9dbd599117f782613203f251e3bfb
|
add example code to try out the LinearFilter
|
lib/neuroimaging/algorithms/kernel_smooth.py
|
lib/neuroimaging/algorithms/kernel_smooth.py
|
import gc
import numpy as N
import numpy.fft as fft
import numpy.linalg as NL
from neuroimaging.core.image.image import Image
from neuroimaging.algorithms.utils import fwhm2sigma
from neuroimaging.core.reference.mapping import Affine
class LinearFilter(object):
'''
A class to implement some FFT smoothers for VImage objects.
By default, this does a Gaussian kernel smooth. More choices
would be better!
'''
def __init__(self, grid, fwhm=6.0, padding=5, scale=1.0, location=0.0,
cov=None):
self.grid = grid
self.fwhm = fwhm
self.padding = padding
self.scale = scale
self.location = location
self.cov = cov
self.shape = N.array(self.grid.shape) + self.padding
self._setup_kernel()
def _setup_kernel(self):
_normsq = self._normsq() / 2.
self.kernel = N.exp(-N.minimum(_normsq, 15))
norm = N.sqrt((self.kernel**2).sum())
self.kernel = self.kernel / norm
self.kernel = fft.rfftn(self.kernel)
def _normsq(self):
"""
Compute the (periodic, i.e. on a torus) squared distance needed for
FFT smoothing. Assumes coordinate system is linear.
"""
if not isinstance(self.grid.mapping, Affine):
raise ValueError, 'for FFT smoothing, need a regular (affine) grid'
voxels = N.indices(self.shape).astype(N.float64)
for i in range(voxels.shape[0]):
test = N.less(voxels[i], self.shape[i] / 2.)
voxels[i] = test * voxels[i] + (1. - test) * (voxels[i] - self.shape[i])
voxels.shape = (voxels.shape[0], N.product(voxels.shape[1:]))
X = self.grid.mapping(voxels)
if self.fwhm is not 1.0:
X = X / fwhm2sigma(self.fwhm)
if self.cov != None:
_chol = NL.cholesky(self.cov)
X = N.dot(NL.inv(_chol), X)
D2 = N.add.reduce(X**2, 0)
D2.shape = self.shape
return D2
def smooth(self, inimage, clean=False, is_fft=False):
if inimage.ndim == 4:
_out = N.zeros(inimage.shape)
nslice = inimage.shape[0]
elif inimage.ndim == 3:
nslice = 1
else:
raise NotImplementedError, 'expecting either 3 or 4-d image.'
for _slice in range(nslice):
if inimage.ndim == 4:
data = inimage[_slice]
elif inimage.ndim == 3:
data = inimage[:]
if clean:
data = N.nan_to_num(data)
if not is_fft:
data = self._presmooth(data)
data *= self.kernel
else:
data *= self.kernel
data = fft.irfftn(data)
gc.collect()
if self.scale != 1:
data = self.scale * data[0:inimage.shape[0],0:inimage.shape[1],0:inimage.shape[2]]
if self.location != 0.0:
data += self.location
gc.collect()
# Write out data
if inimage.ndim == 4:
_out[_slice] = data
else:
_out = data
_slice += 1
gc.collect()
_out = _out[[slice(0, n) for n in self.grid.shape]]
if inimage.ndim == 3:
return Image(_out, grid=self.grid)
else:
return Image(_out, grid=self.grid.replicate(inimage.grid.shape[0]))
def _presmooth(self, indata):
_buffer = N.zeros(self.shape, N.float64)
_buffer[0:indata.shape[0],0:indata.shape[1],0:indata.shape[2]] = indata
return fft.rfftn(_buffer)
|
Python
| 0
|
@@ -314,17 +314,16 @@
ers for
-V
Image ob
@@ -3621,8 +3621,509 @@
buffer)%0A
+%0Aif __name__ == '__main__':%0A from neuroimaging.core.image.image import Image%0A from pylab import plot, show, imshow, subplot%0A a = 100*N.random.random((100, 100, 100))%0A img = Image(a)%0A filt = LinearFilter(img.grid)%0A smoothed = filt.smooth(img)%0A%0A from neuroimaging.utils.tests.data import repository%0A from neuroimaging.ui.visualization.viewer import BoxViewer%0A%0A view = BoxViewer(img)%0A view.draw()%0A sview = BoxViewer(smoothed, colormap='jet')%0A sview.draw()%0A show()%0A%0A
|
28b856dfe7ba9860b6f0e826f9e6e56a2b0d5c45
|
Tidy up clean module
|
dotbriefs/clean.py
|
dotbriefs/clean.py
|
import re
import sys
import yaml
from collections import OrderedDict
from textsub import Textsub
from util import warning
keyword_dict = {
'(?#QuotedString)' : r'("[^"\\]*(?:\\.[^"\\]*)*"|\'[^\'\\]*(?:\\.[^\'\\]*)*\')',
'(?#QuotedOrSingleWord)' : r'("[^"\\]*(?:\\.[^"\\]*)*"|\'[^\'\\]*(?:\\.[^\'\\]*)*\'|\S+)',
'(?#UpToHash)' : r'([^\s#]+(?:[ \t\v\f]*[^\s#]+)+)',
'(?#UpToSemicolon)' : r'([^\s;]+(?:[ \t\v\f]*[^\s;]+)+)',
}
keyword_sub = Textsub(keyword_dict)
keyword_sub.compile()
class CleanTemplate(object):
def __init__(self, template_type, rules=[]):
self.template_type = template_type
self.rules = rules
self.set_parent_rules()
def __getstate__(self):
state = OrderedDict()
state['type'] = self.template_type
state['rules'] = self.rules
return state
def __setstate__(self, state):
self.template_type = state['type']
self.rules = state['rules']
def set_parent_rules(self):
for rule in self.rules:
rule.template = self
def sub(self, line):
for rule in self.rules:
line = rule.sub(line)
return line
def clean_template_representer(dumper, data):
return dumper.represent_mapping(u'!Template', data.__getstate__().items(), False)
def clean_template_constructor(loader, node):
mapping = loader.construct_mapping(node)
if 'type' in mapping:
mapping['template_type'] = mapping['type']
del mapping['type']
return CleanTemplate(**mapping)
class CleanSecret(object):
def __init__(self, key, regex, substitute, description='', numbered=False):
self.key = key
self.description = description
self.numbered = numbered
self.regex = regex
self.substitute = substitute
self.n = 0
self.template = None
def __getstate__(self):
state = OrderedDict()
state['key'] = self.key
state['description'] = self.description
state['numbered'] = self.numbered
state['regex'] = self._orig_regex
state['substitute'] = self._substitute
return state
def __setstate__(self, state):
self.__dict__.update(state)
self.n = 0
self.template = None
def get_regex(self):
return self._regex
def set_regex(self, regex):
self._orig_regex = regex
regex = keyword_sub.sub(regex)
self._regex = re.compile(regex)
regex = property(get_regex, set_regex)
def sub(self, line):
out = u''
prev_start = -1
prev_end = -1
for m in self.regex.finditer(line):
if prev_start == -1:
out += line[:m.start()]
else:
if m.start() > prev_end:
out += line[prev_end:m.start()]
self.n += 1
out += m.expand(self.substitute)
prev_start = m.start()
prev_end = m.end()
if prev_end != -1:
out += line[prev_end:]
return out
else:
return line
def get_substitute(self):
key = ''
if self.numbered:
key += self.key + '_' + unicode(self.n)
else:
key += self.key
return self._substitute.replace('(?#Key)', '$DotBrief: ' + key + '$')
def set_substitute(self, substitute):
self._substitute = substitute
substitute = property(get_substitute, set_substitute)
def clean_secret_representer(dumper, data):
return dumper.represent_mapping(u'!Secret', data.__getstate__().items(), False)
def clean_secret_constructor(loader, node):
mapping = loader.construct_mapping(node)
return CleanSecret(**mapping)
class CopyTemplate(object):
def sub(self, line):
return line
def create_config():
s = []
s.append(CleanSecret("passwd",
r'password(\s*)=(\s*)(?#QuotedOrSingleWord)',
r'password\1=\2(?#Key)', 'Mutt passwords', True))
t = []
t.append(CleanTemplate('mutt', s))
config_file = open('.dotbrief.yaml', 'w')
yaml.dump_all(t, config_file)
config_file.close()
def load_config(template_type, filename):
if filename is None:
filename = '.dotbrief.yaml'
with open(filename, 'r') as config_file:
for template in yaml.load_all(config_file):
if template.template_type == template_type:
template.set_parent_rules()
return template
return None
def clean(args):
yaml.add_representer(CleanTemplate, clean_template_representer)
yaml.add_constructor(u'!Template', clean_template_constructor)
yaml.add_representer(CleanSecret, clean_secret_representer)
yaml.add_constructor(u'!Secret', clean_secret_constructor)
template = load_config(args.type, args.config)
if template is None:
warning("No template found")
template = CopyTemplate()
while 1:
try:
line = args.input.readline()
except KeyboardInterrupt:
break
if not line:
break
args.output.write(template.sub(line))
|
Python
| 0
|
@@ -14,10 +14,9 @@
ort
-sy
+o
s%0Aim
@@ -117,16 +117,296 @@
rning%0A%0A%0A
+# Configuration file for templates%0ACONFIG_FILE = '.dotbriefs.yaml'%0ACONFIG_PATH = '.dotfiles'%0A%0A# Tag used in regex substitution for secret keys%0ATAG_SECRET_KEY = '(?#Key)'%0A%0A# Used to tag secrets in dot files%0ATAG_SECRET_START = '$DotBriefs: '%0ATAG_SECRET_END = '$'%0A%0A# Regex shortcuts%0A
keyword_
@@ -3601,44 +3601,79 @@
ace(
-'(?#Key)', '$DotBrief: ' + key + '$'
+TAG_SECRET_KEY,%0A TAG_SECRET_START + key + TAG_SECRET_END
)%0A%0A
@@ -4564,35 +4564,206 @@
-filename = '.dotbrief.yaml'
+home_path = os.getenv('HOME', '')%0A conf_path = os.getenv('DOTBRIEFS_CONFIG_PATH',%0A os.path.join(home_path, CONFIG_PATH))%0A filename = os.path.join(conf_path, CONFIG_FILE)
%0A
|
f44b5758f2320021fa607891e97a6f4e438b47a2
|
Add command line flag for tail end cutoff for specificity score experiments
|
train_bert_keras_model.py
|
train_bert_keras_model.py
|
"""
Copyright 2020 Google LLC.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import time
import argparse
import tensorflow as tf
from smart_news_query_embeddings.models.bert_keras_model import BertKerasModel
from smart_news_query_embeddings.preprocessing.bert_tokenizer import *
from tensorflow.keras.optimizers import Adam
from sklearn.model_selection import train_test_split
DATA_PATH = 'data/nyt_articles_with_normalized_scores.pkl'
if __name__ == '__main__':
output_dir = 'bert_keras_output_{}'.format(int(time.time()))
parser = argparse.ArgumentParser()
parser.add_argument('--batch-size', '-b', default=32, type=int)
parser.add_argument('--learning-rate', '-l', default=1e-5, type=float)
parser.add_argument('--max-seq-length', default=128, type=int)
parser.add_argument('--warmup-proportion', default=0.1, type=float)
parser.add_argument('--dropout-rate', default=0.5, type=float)
parser.add_argument('--num-train-epochs', '-n', default=3, type=int)
parser.add_argument('--dense-size', '-d', default=256, type=int)
parser.add_argument('--save-summary-every', default=100, type=int)
parser.add_argument('--output-dir', '-o', default=output_dir, type=str)
parser.add_argument('--training', '-t', default=True, type=bool)
parser.add_argument('--bert-dir', default='uncased_L-12_H-768_A-12', type=str)
args = parser.parse_args()
if not os.path.exists('outputs'):
os.mkdir('outputs')
out_dir = os.path.join('outputs', args.output_dir)
tokenizer = create_tokenizer(args.bert_dir)
df = get_filtered_nyt_data_with_scores(DATA_PATH)
df['category_labels'] = df['section'].astype('category').cat.codes
num_classes = df['category_labels'].max() + 1
CUTOFF = df.shape[0] // 2
train_df, test_df = df[CUTOFF:], df[:CUTOFF]
train_ids, train_labels = tokenize_data(train_df['abstract'], train_df['category_labels'], tokenizer, args.max_seq_length, num_classes)
test_ids, test_labels = tokenize_data(test_df['abstract'], test_df['category_labels'], tokenizer, args.max_seq_length, num_classes)
if not os.path.exists(out_dir):
model = BertKerasModel(num_classes, bert_dir=args.bert_dir,
max_seq_length=args.max_seq_length, dense_size=args.dense_size,
dropout_rate=args.dropout_rate)
model.build(input_shape=(None, args.max_seq_length))
model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=args.learning_rate), metrics=['accuracy'])
else:
model = tf.keras.models.load_model(out_dir)
model.fit(train_ids, train_labels, validation_data=(test_ids, test_labels), epochs=args.num_train_epochs, batch_size=args.batch_size)
model.save(out_dir)
|
Python
| 0
|
@@ -1842,24 +1842,90 @@
, type=str)%0A
+ parser.add_argument('--tail-cutoff', default=0.5, type=float)%0A
args = p
@@ -2302,16 +2302,20 @@
UTOFF =
+int(
df.shape
@@ -2322,12 +2322,27 @@
%5B0%5D
-// 2
+* args.tail_cutoff)
%0A
@@ -2365,16 +2365,17 @@
df = df%5B
+-
CUTOFF:%5D
|
7fcfe4ece5d7b792b2f38b9b0115f590d3fe0e60
|
Fix glu.py for windows
|
autoconf/glu.py
|
autoconf/glu.py
|
from _external import *
from gl import *
if windows:
glu = LibWithHeaderChecker('GLU32', ['windows.h','GL/glu.h'], 'c', dependencies=[gl])
if macos:
glu = LibWithHeaderChecker('OpenGL', ['OpenGL/glu.h'], 'c', name='glu')
else :
glu = LibWithHeaderChecker('GLU', ['GL/glu.h'], 'c', dependencies=[gl])
|
Python
| 0.000002
|
@@ -140,16 +140,18 @@
gl%5D) %0A
+el
if macos
|
33017d1266b36635a5f29db5ae6883e1bf97e76e
|
Add fullPath to folders in box metadata
|
waterbutler/providers/box/metadata.py
|
waterbutler/providers/box/metadata.py
|
import os
from waterbutler.core import metadata
class BaseBoxMetadata(metadata.BaseMetadata):
def __init__(self, raw, folder):
super().__init__(raw)
self.folder = folder
@property
def provider(self):
return 'box'
class BoxFolderMetadata(BaseBoxMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return '/{}/'.format(self.raw['id'])
class BoxFileMetadata(BaseBoxMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return '/{0}/{1}'.format(self.raw['id'], self.raw['name'])
@property
def size(self):
return self.raw.get('size')
@property
def modified(self):
return self.raw.get('modified_at')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'etag': self.raw.get('etag'),
'fullPath': self.materialized_path
}
@property
def materialized_path(self):
if 'path_collection' not in self.raw:
return None
path = []
for entry in reversed(self.raw['path_collection']['entries']):
if self.folder == entry['id']:
break
path.append(entry['name'])
return '/' + os.path.join('/'.join(reversed(path)), self.name)
class BoxRevision(metadata.BaseFileRevisionMetadata):
@property
def version(self):
try:
return self.raw['id']
except KeyError:
return self.raw['path'].split('/')[1]
@property
def version_identifier(self):
return 'revision'
@property
def path(self):
try:
return '/{0}/{1}'.format(self.raw['id'], self.raw['name'])
except KeyError:
return self.raw.get('path')
@property
def modified(self):
try:
return self.raw['modified_at']
except KeyError:
return self.raw.get('modified')
|
Python
| 0.000001
|
@@ -248,16 +248,494 @@
'box'%0A%0A
+ @property%0A def full_path(self):%0A if 'path_collection' not in self.raw:%0A return None%0A%0A path = %5B%5D%0A for entry in reversed(self.raw%5B'path_collection'%5D%5B'entries'%5D):%0A if self.folder == entry%5B'id'%5D:%0A break%0A path.append(entry%5B'name'%5D)%0A%0A return '/' + os.path.join('/'.join(reversed(path)), self.name)%0A%0A @property%0A def extra(self):%0A return %7B%0A 'fullPath': self.full_path%0A %7D%0A%0A
%0Aclass B
@@ -946,16 +946,164 @@
'id'%5D)%0A%0A
+ @property%0A def full_path(self):%0A path = super().full_path%0A if path is None:%0A return None%0A return path + '/'%0A%0A
%0Aclass B
@@ -1671,28 +1671,20 @@
': self.
-materialized
+full
_path%0A
|
ce9d25400bff704c87038a77ea1a3290e680f8f3
|
Update EN_background_check.py
|
qctests/EN_background_check.py
|
qctests/EN_background_check.py
|
"""
Implements the background check on reported levels from the EN quality control
system, http://www.metoffice.gov.uk/hadobs/en3/OQCpaper.pdf
"""
import EN_spike_and_step_check
import numpy as np
import util.obs_utils as outils
def test(p, *args, **kwargs):
"""
Runs the quality control check on profile p and returns a numpy array
of quality control decisions with False where the data value has
passed the check and True where it failed.
"""
# Define an array to hold results.
qc = np.zeros(p.n_levels(), dtype=bool)
# Check that we have the auxiliary information we need, otherwise we
# will just return.
if kwargs['EN_background_check_aux'] is None: return qc
# If we are here then the auxiliary information is available.
aux = kwargs['EN_background_check_aux']
# Find grid cell nearest to the observation.
lon = p.longitude()
grid = aux['lon']
nlon = len(grid)
ilon = np.mod(np.round((lon - grid[0]) / (grid[1] - grid[0])), nlon)
lat = p.latitude()
grid = aux['lat']
nlat = len(grid)
ilat = np.mod(np.round((lat - grid[0]) / (grid[1] - grid[0])), nlat)
if ilat == nlat: ilat -= 1 # Checks for edge case where lat is ~90.
assert ilon >=0 and ilon < len(grid), 'Longitude is out of range: %f %i' % (lon, ilon)
assert ilat >=0 and ilat < len(grid), 'Latitude is out of range: %f %i' % (lat, ilat)
# Extract the relevant auxiliary data.
imonth = p.month() - 1
clim = aux['clim'][:, ilat, ilon, imonth]
bgev = aux['bgev'][:, ilat, ilon]
obev = aux['obev']
depths = aux['depth']
# Remove missing data points.
iOK = (clim.mask == False) & (bgev.mask == False)
if np.count_nonzero(iOK) == 0: return qc
clim = clim[iOK]
bgev = bgev[iOK]
obev = obev[iOK]
depths = depths[iOK]
# Find which levels have data.
t = p.t()
s = p.s()
z = p.z()
isTemperature = (t.mask==False)
isSalinity = (s.mask==False)
isDepth = (z.mask==False)
isData = isTemperature & isDepth
# Use the EN_spike_and_step_check to find suspect values.
suspect = EN_spike_and_step_check.test(p, suspect=True)
# Loop over levels.
for iLevel in range(p.n_levels()):
if isData[iLevel] == False: continue
# Get the climatology and error variance values at this level.
climLevel = np.interp(z[iLevel], depths, clim, right=99999)
bgevLevel = np.interp(z[iLevel], depths, bgev, right=99999)
obevLevel = np.interp(z[iLevel], depths, obev, right=99999)
if climLevel == 99999:
qc[iLevel] = True # This could reject some good data if the
# climatology is incomplete, but also can act as
# a check that the depth of the profile is
# consistent with the depth of the ocean.
continue
assert bgevLevel > 0, 'Background error variance <= 0'
assert obevLevel > 0, 'Observation error variance <= 0'
# If at low latitudes the background error variance is increased.
# ALso, because we are on reported levels instead of standard levels
# the variances are increased. NB multiplication factors are squared
# because we are working with error variances instead of standard
# deviations.
if np.abs(p.latitude() < 10.0): bgevLevel *= 1.5**2
bgevLevel *= 2.0**2
# Set up an initial estimate of probability of gross error. Information
# from the EN_spike_and_step_check is used here to increase the initial
# estimate if the observation is suspect.
probe_type = p.probe_type()
if probe_type == 1 or probe_type == 2 or probe_type == 3 or probe_type == 13 or probe_type == 16:
pge = 0.05
else:
pge = 0.01
if suspect[iLevel]:
pge = 0.5 + 0.5 * pge
# Calculate potential temperature.
if isSalinity[iLevel]:
sLevel = s[iLevel]
else:
sLevel = 35.0
potm = outils.pottem(t[iLevel], sLevel, z[iLevel], lat=p.latitude())
# Do Bayesian calculation.
evLevel = obevLevel + bgevLevel
sdiff = (potm - climLevel)**2 / evLevel
pdGood = np.exp(-0.5 * np.min([sdiff, 160.0])) / np.sqrt(2.0 * np.pi * evLevel)
pdTotal = 0.1 * pge + pdGood * (1.0 - pge)
pgebk = 0.1 * pge / pdTotal
if pgebk >= 0.5: qc[iLevel] = True
return qc
|
Python
| 0
|
@@ -1267,25 +1267,20 @@
ilon %3C
-len(grid)
+nlon
, 'Longi
@@ -1353,25 +1353,20 @@
ilat %3C
-len(grid)
+nlat
, 'Latit
|
2e3d31dd20936574d238fc61c1d43983d8b9ff1c
|
Add out_path input.
|
qipipe/interfaces/fix_dicom.py
|
qipipe/interfaces/fix_dicom.py
|
from nipype.interfaces.base import (BaseInterface,
BaseInterfaceInputSpec, traits, Directory, TraitedSpec)
import os
from qipipe.staging.fix_dicom import fix_dicom_headers
class FixDicomInputSpec(BaseInterfaceInputSpec):
source = Directory(exists=True, desc='The input patient directory', mandatory=True)
dest = traits.String(desc='The output location', mandatory=True)
class FixDicomOutputSpec(TraitedSpec):
out_path = Directory(exists=True, desc="The output patient directory")
class FixDicom(BaseInterface):
input_spec = FixDicomInputSpec
output_spec = FixDicomOutputSpec
def _run_interface(self, runtime):
fix_dicom_headers(self.inputs.source, self.inputs.dest)
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
outputs['out_path'] = self.inputs.dest
return outputs
|
Python
| 0
|
@@ -421,24 +421,20 @@
c):%0A
-out_path
+dest
= Direc
@@ -457,24 +457,31 @@
, desc=%22The
+target
output patie
@@ -818,16 +818,12 @@
ts%5B'
-out_path
+dest
'%5D =
|
61da0b379efefa5dbbc4341c8fd653cf1411040f
|
fix wenjuan question is checkbox
|
apps/wenjuan/serializer/report.py
|
apps/wenjuan/serializer/report.py
|
# -*- coding:utf-8 -*-
from rest_framework import serializers
from wenjuan.models.question import Job, Report, Answer
# from wenjuan.serializer.question import JobModelSerializer
from wenjuan.serializer.answer import AnswerDetailSerializer
class ReportModelSerializer(serializers.ModelSerializer):
"""
Report Model Serializer
"""
def check_job_answers(self, job):
# print("validate_answers", job)
# 1. 获取到问卷的所有问题
questions = job.questions.all()
# 答卷的回答
answers_list = []
# 2. 开始检查问题
request = self.context["request"]
for question in questions:
# 根据question的类型来处理
category = question.category
field_name = "question_{}".format(question.id)
field_value = request.data.get(field_name)
# 判断传入的值是否为空
if not field_value:
raise serializers.ValidationError("问题(ID:{}):{},回答未填写".format(question.id, question.title))
# 判断是否需要唯一值
if question.is_unique:
# 从答案中查找相关的值【注意:只有用户自己输入的才可能需要唯一值,单选和复选框是没唯一值一说的】
if category == "text":
answer_exist = Answer.objects.filter(question=question, answer=field_value).first()
if answer_exist:
raise serializers.ValidationError(
"问题(ID:{}):{},回答值需唯一!值({})已经提交".format(question.id, question.title, field_value))
if category == "radio":
# 如果是单选或者多选框,需要校验结果
choice = question.choices.filter(option=field_value).first()
if not choice:
raise serializers.ValidationError(
"问题(ID:{}):{},没有{}这个选项".format(question.id, question.title, field_value))
else:
# 添加answer
answer = Answer(question=question, option=field_value, answer=choice.value)
answers_list.append(answer)
# 创建answer
elif category == "checkbox":
# 如果是checkbox就需要对多个选项都进行检查
# 检查字段的值是否为空
option_field = field_value.join(",")
answer_field_list = []
for v in field_value:
choice = question.choices.filter(option=v).first()
if not choice:
raise serializers.ValidationError(
"问题(ID:{}):{},没有{}这个选项".format(question.id, question.title, v))
else:
answer_field_list.append(choice.value)
# 选项
answer_field = answer_field_list.join(",")
# 添加answer
answer = Answer(question=question, option=option_field, answer=answer_field)
answers_list.append(answer)
else:
# input
# 添加answer
answer = Answer(question=question, answer=field_value)
answers_list.append(answer)
# 返回answers
return answers_list
def create(self, validated_data):
# 1. get job
job = validated_data["job"]
request = self.context["request"]
if 'HTTP_X_FORWARDED_FOR' in request.META:
ip = request.META['HTTP_X_FORWARDED_FOR']
else:
ip = request.META['REMOTE_ADDR']
validated_data["ip"] = ip
# 判断是否登录
user = request.user
if job.is_authenticated:
# 需要用户登录才ok
if not user.is_authenticated:
raise serializers.ValidationError("需要登录才可以回答")
validated_data["user"] = user
else:
if user.is_authenticated:
validated_data["user"] = user
# check job answer data
answers = self.check_job_answers(job=job)
instance = super().create(validated_data=validated_data)
for answer in answers:
answer.save()
instance.answers.add(answer)
return instance
class Meta:
model = Report
fields = ("id", "job", "user", "ip", "time_added", "answers")
class ReportDetailSerializer(serializers.ModelSerializer):
"""
问卷回答详情api
"""
# job = JobModelSerializer(read_only=True)
answers = AnswerDetailSerializer(many=True, read_only=True)
class Meta:
model = Report
fields = ("id", "job", "user", "ip", "time_added", "answers")
|
Python
| 0.999999
|
@@ -2148,16 +2148,25 @@
field =
+%22,%22.join(
field_va
@@ -2168,25 +2168,16 @@
ld_value
-.join(%22,%22
)%0A
@@ -2646,16 +2646,25 @@
field =
+%22,%22.join(
answer_f
@@ -2676,17 +2676,8 @@
list
-.join(%22,%22
)%0A
|
9d3d541faaf993665040d39a5cacb52d7a096cde
|
Add in a model concept for settings
|
drupdates/utils.py
|
drupdates/utils.py
|
import datetime
import requests
import os
from os.path import expanduser
import yaml
def nextFriday():
# Get the data string for the following Friday
today = datetime.date.today()
if datetime.datetime.today().weekday() == 4:
friday = str(today + datetime.timedelta( (3-today.weekday())%7+1 ))
else:
friday = str(today + datetime.timedelta( (4-today.weekday()) % 7 ))
return friday
def apiCall (uri, name, method = 'get', **kwargs):
#user = '', pword = ''):
""" Perform and API call, expecting a JSON response. Largely a wrapper
around the request module
Keyword arguments:
uri -- the uri of the Restful Web Service (required)
name -- the human readable label for the service being called (required)
method -- HTTP method to use (defaul = 'get')
kwargs -- dictionary of arguments passed directly to requests module method
"""
# FIXME: need to HTML escape passwords
func = getattr(requests, method)
args = {}
for key, value in kwargs.iteritems():
args[key] = value
# if not user == '' and not pword == '':
# args.append("auth=(user, pword)")
r = func(uri, **args)
responseDictionary = r.json()
#If API call errors out print the error and quit the script
if r.status_code != 200:
if 'errors' in responseDictionary:
errors = responseDictionary.pop('errors')
firstError = errors.pop()
elif 'error' in responseDictionary:
firstError = responseDictionary.pop('error')
else:
firstError['message'] = "No error message provided by response"
print("{0} returned an error, exiting the script.\n Status Code: {1} \n Error: {2}".format(name, r.status_code , firstError['message']))
return False
else:
return responseDictionary
class Settings:
__localFile = expanduser("~") + '/.drupdates/main.yaml'
def __init__(self):
currentDir = os.path.dirname(os.path.realpath(__file__))
default = open(currentDir + "/settings/default.yaml", 'r')
self.__settings = yaml.load(default)
default.close()
if os.path.isfile(self.__localFile):
local = open(self.__localFile, 'r')
self.__local = yaml.load(local)
local.close()
self.__settings = dict( self.__settings + self.__local.items())
def get(self, setting):
return self.__settings[setting]['value']
# Load variables:
settings = Settings()
|
Python
| 0.000001
|
@@ -1825,16 +1825,60 @@
(self):%0A
+ self.__settings = %7B%7D%0A self.__model()%0A
curr
@@ -2226,17 +2226,16 @@
= dict(
-
self.__s
@@ -2241,16 +2241,24 @@
settings
+.items()
+ self.
@@ -2275,16 +2275,178 @@
ems())%0A%0A
+ def __model(self):%0A model = %7B%7D%0A model%5B'default'%5D = ''%0A model%5B'value'%5D = ''%0A model%5B'prompt'%5D = ''%0A model%5B'format'%5D = ''%0A self.__model = model%0A%0A
def ge
@@ -2467,21 +2467,28 @@
g):%0A
-retur
+if setting i
n self._
@@ -2500,17 +2500,106 @@
ings
-%5Bsetting%5D
+:%0A setting = dict(self.__model.items() + self.__settings%5Bsetting%5D.items())%0A return setting
%5B'va
|
3972594787f4ed33d656ff0c097fdb3633a96b14
|
add testcase for #1
|
tsstats/tests/test_log.py
|
tsstats/tests/test_log.py
|
import pytest
from tsstats.exceptions import InvalidLog
from tsstats.log import parse_log, parse_logs
@pytest.fixture
def clients():
return parse_log('tsstats/tests/res/test.log')
def test_log_client_count(clients):
assert len(clients) == 3
def test_log_onlinetime(clients):
assert clients['1'].onlinetime == 402
assert clients['2'].onlinetime == 20
def test_log_kicks(clients):
assert clients['UIDClient1'].kicks == 1
def test_log_pkicks(clients):
assert clients['2'].pkicks == 1
def test_log_bans(clients):
assert clients['UIDClient1'].bans == 1
def test_log_pbans(clients):
assert clients['2'].pbans == 1
def test_log_invalid():
with pytest.raises(InvalidLog):
parse_log('tsstats/tests/res/test.log.broken')
def test_log_multiple():
assert len(parse_log('tsstats/tests/res/test.log')) == \
len(parse_logs('tsstats/tests/res/test.log'))
|
Python
| 0.000001
|
@@ -1,16 +1,40 @@
+from time import sleep%0A%0A
import pytest%0A%0Af
@@ -931,8 +931,267 @@
.log'))%0A
+%0A%0A@pytest.mark.slowtest%0Adef test_log_client_online():%0A clients = parse_log('tsstats/tests/res/test.log')%0A assert clients%5B'1'%5D.onlinetime == 402%0A sleep(2)%0A clients = parse_log('tsstats/tests/res/test.log')%0A assert clients%5B'1'%5D.onlinetime == 404%0A
|
07f86c47c58d6266bd4b42c81521001aca072ff1
|
Add some more rubbish to example string
|
jsonconfigparser/test/__init__.py
|
jsonconfigparser/test/__init__.py
|
import unittest
from jsonconfigparser import JSONConfigParser
class JSONConfigTestCase(unittest.TestCase):
def test_init(self):
JSONConfigParser()
def test_read_string(self):
string = '[section]\n' + \
'foo = "bar"\n'
cf = JSONConfigParser()
cf.read_string(string)
self.assertEqual(cf.get('section', 'foo'), 'bar')
def test_get(self):
cf = JSONConfigParser()
cf.add_section('section')
cf.set('section', 'section', 'set-in-section')
self.assertEqual(cf.get('section', 'section'), 'set-in-section')
cf.set(cf.default_section, 'defaults', 'set-in-defaults')
self.assertEqual(cf.get('section', 'defaults'), 'set-in-defaults')
self.assertEqual(cf.get('section', 'vars',
vars={'vars': 'set-in-vars'}),
'set-in-vars')
self.assertEqual(cf.get('section', 'unset', 'fallback'), 'fallback')
suite = unittest.TestLoader().loadTestsFromTestCase(JSONConfigTestCase)
|
Python
| 0.000173
|
@@ -245,22 +245,165 @@
'
-foo = %22bar%22%5Cn'
+# comment comment%5Cn' + %5C%0A 'foo = %22bar%22%5Cn' + %5C%0A '%5Cn' + %5C%0A '%5Bsection2%5D%5Cn' + %5C%0A 'bar = %22baz%22%5Cn'%0A
%0A%0A
|
39da3725f9b8e6842f06954c562873d0a8ff731a
|
fix silly request header error
|
dvc/remote/http.py
|
dvc/remote/http.py
|
from __future__ import unicode_literals
from dvc.scheme import Schemes
from dvc.utils import LARGE_FILE_SIZE
from dvc.utils.compat import open
import requests
import logging
from dvc.progress import Tqdm
from dvc.exceptions import DvcException
from dvc.config import Config
from dvc.remote.base import RemoteBASE
logger = logging.getLogger(__name__)
class RemoteHTTP(RemoteBASE):
scheme = Schemes.HTTP
REQUEST_TIMEOUT = 10
CHUNK_SIZE = 2 ** 16
PARAM_CHECKSUM = "etag"
def __init__(self, repo, config):
super(RemoteHTTP, self).__init__(repo, config)
url = config.get(Config.SECTION_REMOTE_URL)
self.path_info = self.path_cls(url) if url else None
def _download(self, from_info, to_file, name=None, no_progress_bar=False):
request = self._request("GET", from_info.url, stream=True)
total = self._content_length(request)
with Tqdm(
total=total,
leave=False,
bytes=True,
desc_truncate=from_info.url if name is None else name,
disable=no_progress_bar,
) as pbar:
with open(to_file, "wb") as fd:
for chunk in request.iter_content(chunk_size=self.CHUNK_SIZE):
fd.write(chunk)
fd.flush()
pbar.update(len(chunk))
# print completed progress bar for large file sizes
pbar.n = total or pbar.n
if pbar.n > LARGE_FILE_SIZE:
Tqdm.write(str(pbar))
def exists(self, path_info):
return bool(self._request("HEAD", path_info.url))
def _content_length(self, url_or_request):
headers = getattr(
url_or_request,
"headers",
self._request("HEAD", url_or_request).headers,
)
res = headers.get("Content-Length")
return int(res) if res else None
def get_file_checksum(self, path_info):
url = path_info.url
headers = self._request("HEAD", url).headers
etag = headers.get("ETag") or headers.get("Content-MD5")
if not etag:
raise DvcException(
"could not find an ETag or "
"Content-MD5 header for '{url}'".format(url=url)
)
if etag.startswith("W/"):
raise DvcException(
"Weak ETags are not supported."
" (Etag: '{etag}', URL: '{url}')".format(etag=etag, url=url)
)
return etag
def _request(self, method, url, **kwargs):
kwargs.setdefault("allow_redirects", True)
kwargs.setdefault("timeout", self.REQUEST_TIMEOUT)
try:
return requests.request(method, url, **kwargs)
except requests.exceptions.RequestException:
raise DvcException("could not perform a {} request".format(method))
def gc(self):
raise NotImplementedError
|
Python
| 0.000004
|
@@ -875,23 +875,25 @@
_length(
-request
+from_info
)%0A%0A
|
6215aea4723192bd69632781b43535d846bbfb69
|
Fix delete option
|
solrbackup.py
|
solrbackup.py
|
#!/usr/bin/env python
#
# Solr 4 remote backup tool
#
# URL: https://github.com/nla/solrbackup
# Author: Alex Osborne <aosborne@nla.gov.au>
# License: MIT
#
import json, time, os, struct, zlib, sys, errno
from urllib import urlencode
from urllib2 import urlopen
from contextlib import closing
from optparse import OptionParser
def getjson(url):
f = urlopen(url)
try:
return json.load(f)
finally:
f.close()
def listcores(solr_url):
return getjson(solr_url + '/admin/cores?action=STATUS&wt=json')['status'].keys()
def clusterstate(solr_url):
return json.loads(getjson(solr_url + '/zookeeper?detail=true&path=%2Fclusterstate.json')['znode']['data'])
def indexversion(solr_url, core):
response = getjson(solr_url + '/%s/replication?command=indexversion&wt=json' % core)
return {'generation': response['generation'], 'indexversion': response['indexversion']}
def filelist(solr_url, core, version):
return getjson(solr_url + '/%s/replication?command=filelist&wt=json&%s' % (core, urlencode(version)))['filelist']
class FileStream(object):
def __init__(self, f, use_checksum = False):
self.f = f
self.use_checksum = use_checksum
def __iter__(self):
return self
def unpack(self, fmt):
size = struct.calcsize(fmt)
buf = self.f.read(size)
if buf:
return struct.unpack(fmt, buf)
else:
return None
def next(self):
size, = self.unpack('>i')
if size is None or size == 0:
self.close()
raise StopIteration
if self.use_checksum:
checksum, = self.unpack('>q')
data = self.f.read(size)
if len(data) < size:
self.close()
raise EOFError('unexpected end of file stream')
if self.use_checksum:
calculated = zlib.adler32(data) & 0xffffffff
if calculated != checksum:
self.close()
raise 'checksum mismatch: calculated ' + calculated + ' but expected ' + checksum
return data
def close(self):
self.f.close()
def filestream(solr_url, core, version, file, offset=0, use_checksum=False):
query = {
'command': 'filecontent',
'wt': 'filestream',
'file': file['name'],
'offset': offset,
'checksum': 'true' if use_checksum else 'false',
'generation': version['generation'],
}
f = urlopen('%s/%s/replication?%s' % (solr_url, core, urlencode(query)))
return FileStream(f, use_checksum=use_checksum)
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def is_complete(path, expected_size):
try:
return os.path.getsize(path) >= expected_size
except OSError as e:
if e.errno == errno.ENOENT:
return False
else:
raise
def nicesize(bytes):
if bytes < 1024: return '%dB' % bytes
if bytes < 1024 * 1024: return '%.2fK' % (bytes / 1024.0)
if bytes < 1024 * 1024 * 1024: return '%.2fM' % (bytes / 1024.0 / 1024.0)
return '%.2fG' % (bytes / 1024.0 / 1024.0 / 1024.0)
def download_file(solr_url, core, version, file, destdir, options):
dest = os.path.join(destdir, file['name'])
if is_complete(dest, file['size']):
if options.verbose:
print 'already got', file['name']
return
if options.verbose:
print 'fetching', file['name']
with open(dest, 'a+b') as out:
out.seek(0, 2)
offset = out.tell()
with closing(filestream(solr_url, core, version, file, offset, use_checksum=options.use_checksum)) as stream:
for packet in stream:
out.write(packet)
if options.verbose:
print core, file['name'], nicesize(out.tell()), '/', nicesize(file['size']), '%.2f%%' % (100.0 * out.tell() / file['size'])
def download_core(solr_url, core, dest, options):
version = indexversion(solr_url, core)
files = filelist(solr_url, core, version)
mkdir_p(dest)
for file in files:
download_file(solr_url, core, version, file, dest, options)
keep = set([f['name'] for f in files])
for file in os.listdir(dest):
if file not in keep:
if options.verbose: print 'deleting', file
os.remove(os.path.join(dest, file))
def download_cores(solr_url, outdir, options):
for core in options.cores or listcores(solr_url):
dest = os.path.join(outdir, core)
download_core(solr_url, core, dest, options)
def find_leader(replicas):
for replica in replicas:
if r.get('leader') == 'true':
return replica
return None
def download_cloud(solr_url, outdir, options):
collections = clusterstate(solr_url)
for colname, coldata in collections.iteritems():
for shardname, sharddata in coldata['shards'].iteritems():
replica = find_leader(sharddata['replicas'].values())
if replica is None:
raise 'no leader for shard ' + shardname + ' in ' + colname
shard_url = replica['base_url']
core = replica['core']
dest = os.path.join(outdir, colname, shardname)
download_core(solr_url, core, dest, options)
def main():
parser = OptionParser(usage='Usage: %prog [options] solr_url outdir')
parser.add_option("-C", "--cloud", action="store_true", dest="cloud", default=False, help="download all shards from a SolrCloud")
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="show progress")
parser.add_option("-d", "--delete", action="store_true", dest="verbose", default=False, help="expire old segments (use when updating an existing backup)")
parser.add_option("--core", action="append", dest="cores", help="core to download (can be specified multiple times, default is all)")
parser.add_option("--no-checksum", action="store_true", dest="use_checksum", default=True, help="don't verify adler32 checksums while downloading")
(options, args) = parser.parse_args()
if len(args) < 2:
parser.print_help()
sys.exit(1)
solr_url = args[0].rstrip('/')
outdir = args[1]
if options.cloud:
download_cloud(solr_url, outdir, options)
else:
download_cores(solr_url, outdir, options)
if __name__ == '__main__': main()
|
Python
| 0.000005
|
@@ -4282,16 +4282,43 @@
files%5D)%0A
+ if options.delete:%0A
for
@@ -4351,16 +4351,20 @@
+
+
if file
@@ -4376,16 +4376,20 @@
n keep:%0A
+
@@ -4435,16 +4435,20 @@
', file%0A
+
@@ -5796,38 +5796,37 @@
re_true%22, dest=%22
-verbos
+delet
e%22, default=Fals
|
546368705e132fcc462f63e40f89eb431c54ec65
|
Remove XMLField (removed from django 1.4)
|
dynamo/settings.py
|
dynamo/settings.py
|
# django imports
from django.conf import settings
# Delete database column after field has been deleted
DYNAMO_DELETE_COLUMNS = getattr(settings,'DYNAMO_DELETE_COLUMNS',True)
# Delete database table after model has been deleted
DYNAMO_DELETE_TABLES = getattr(settings,'DYNAMO_DELETE_TABLES',True)
# Default app to be used when model is generated
DYNAMO_DEFAULT_APP = getattr(settings,'DYNAMO_DEFAULT_APP','dynamo')
# Default module to be used when model is generated
DYNAMO_DEFAULT_MODULE = getattr(settings,'DYNAMO_DEFAULT_MODULE','dynamo.models')
# Available standard Field Types (as of Django 1.3)
from django.db import models
FIELD_TYPES = [('AutoField', models.AutoField),
('BooleanField', models.BooleanField),
('CharField', models.CharField),
('CommaSeparatedIntegerField', models.CommaSeparatedIntegerField),
('DateField', models.DateField),
('DateTimeField', models.DateTimeField),
('DecimalField', models.DecimalField),
('EmailField', models.EmailField),
('FileField', models.FileField),
('FloatField', models.FloatField),
('ImageField', models.ImageField),
('NullBooleanField', models.NullBooleanField),
('SlugField', models.SlugField),
('TimeField', models.TimeField),
('URLField', models.URLField),
('XMLField', models.XMLField),
('BigIntegerField', models.BigIntegerField),
('IntegerField', models.IntegerField),
('PositiveIntegerField', models.PositiveIntegerField),
('PositiveSmallIntegerField', models.PositiveSmallIntegerField),
('SmallIntegerField', models.SmallIntegerField),
('ForeignKey', models.ForeignKey),
('OneToOneField', models.OneToOneField),
('ManyToManyField', models.ManyToManyField)
]
DYNAMO_FIELD_TYPES = getattr(settings,'DYNAMO_FIELD_TYPES',FIELD_TYPES)
STANDARD_FIELD_TYPES = ['AutoField',
'BooleanField',
'CharField',
'CommaSeparatedIntegerField',
'DateField',
'DateTimeField',
'DecimalField',
'EmailField',
'FileField',
'FloatField',
'ImageField',
'NullBooleanField',
'SlugField',
'TimeField',
'URLField',
'XMLField',
'BigIntegerField',
'IntegerField',
'PositiveIntegerField',
'PositiveSmallIntegerField',
'SmallIntegerField'
]
DYNAMO_STANDARD_FIELD_TYPES = getattr(settings, 'DYNAMO_STANDARD_FIELD_TYPES', STANDARD_FIELD_TYPES)
INTEGER_FIELD_TYPES = ['BigIntegerField',
'IntegerField',
'PositiveIntegerField',
'PositiveSmallIntegerField',
'SmallIntegerField'
]
DYNAMO_INTEGER_FIELD_TYPES = getattr(settings, 'DYNAMO_INTEGER_FIELD_TYPES',INTEGER_FIELD_TYPES)
STRING_FIELD_TYPES = ['CommaSeparatedIntegerField',
'EmailField',
'FileField',
'ImageField',
'SlugField',
'URLField',
'XMLField',
]
DYNAMO_STRING_FIELD_TYPES = getattr(settings, 'DYNAMO_STRING_FIELD_TYPES',STRING_FIELD_TYPES)
# Available relationship Field Types (as of Django 1.3)
RELATION_FIELD_TYPES =['ForeignKey','OneToOneField','ManyToManyField']
DYNAMO_RELATION_FIELD_TYPES = getattr(settings, 'DYNAMO_RELATION_FIELD_TYPES',RELATION_FIELD_TYPES)
|
Python
| 0
|
@@ -1427,54 +1427,8 @@
d),%0A
- ('XMLField', models.XMLField),%0A
@@ -2602,44 +2602,8 @@
d',%0A
- 'XMLField',%0A
@@ -3582,44 +3582,8 @@
d',%0A
- 'XMLField',%0A
|
49ff951100b35c63d6b04f4c18f8123240fabec1
|
Check for datetime present in event start and end date.
|
economicpy/gcal.py
|
economicpy/gcal.py
|
import gflags
import httplib2
from apiclient.discovery import build
from oauth2client.file import Storage
from oauth2client.client import OAuth2WebServerFlow
from oauth2client.tools import run
class Calendar(object):
def __init__(self, client_id, client_secret, ignore_events, src_path):
self.user_agent = 'economic-py/0.3'
self.ignore_events = ignore_events
FLAGS = gflags.FLAGS
# The client_id and client_secret can be found in Google Developers Console
flow = OAuth2WebServerFlow(
client_id=client_id,
client_secret=client_secret,
scope='https://www.googleapis.com/auth/calendar',
user_agent=self.user_agent)
# To disable the local server feature, uncomment the following line:
# FLAGS.auth_local_webserver = False
# If the Credentials don't exist or are invalid, run through the native client
# flow. The Storage object will ensure that if successful the good
# Credentials will get written back to a file.
storage = Storage(src_path + '/calendar.dat')
credentials = storage.get()
if credentials is None or credentials.invalid == True:
credentials = run(flow, storage)
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with our good Credentials.
http = httplib2.Http()
http = credentials.authorize(http)
# Build a service object for interacting with the API. Visit
# the Google Developers Console
# to get a developerKey for your own application.
self.service = build(serviceName='calendar', version='v3', http=http,
developerKey='notsosecret')
def get_events(self, start_date, end_date):
"""
Get events from calendar between given dates.
"""
page_token = None
while True:
events = self.service.events().list(calendarId='primary', pageToken=page_token, singleEvents=True,
timeMin=start_date, timeMax=end_date).execute()
for event in events['items']:
if event['status'] != "declined" and event['summary'] not in self.ignore_events:
yield {
'start_date': event['start']['dateTime'],
'end_date': event['end']['dateTime'],
'title': event['summary'].encode('utf8')
}
page_token = events.get('nextPageToken')
if not page_token:
break
|
Python
| 0
|
@@ -2259,16 +2259,106 @@
e_events
+ %5C%0A and 'dateTime' in event%5B'start'%5D and 'dateTime' in event%5B'end'%5D
:%0A
|
60039cd74693982ef38808a63366aa1454b50bd1
|
Bump version to 13.3.2
|
recipe_scrapers/__version__.py
|
recipe_scrapers/__version__.py
|
__version__ = "13.3.1"
|
Python
| 0
|
@@ -13,11 +13,11 @@
= %2213.3.
-1
+2
%22%0A
|
6c293419d490f217a520e6c9fc696b39a46a172c
|
Fix excludes
|
froide_campaign/providers/amenity.py
|
froide_campaign/providers/amenity.py
|
from django.template.defaultfilters import slugify
from django_amenities.models import Amenity
from froide.publicbody.models import PublicBody
from froide.georegion.models import GeoRegion
from ..models import InformationObject
from .base import BaseProvider, first
class AmenityProvider(BaseProvider):
CREATE_ALLOWED = True
ADMIN_LEVELS = [
'borough', 'municipality', 'admin_cooperation',
'district', 'state'
]
def get_queryset(self):
iobs = super().get_queryset()
ident_list = iobs.values_list('ident', flat=True)
osm_ids = [int(ident.split('_')[1])
for ident in ident_list if 'custom' not in ident]
return Amenity.objects.filter(
topics__contains=[self.kwargs.get('amenity_topic', '')],
).exclude(name='').exclude(osm_id__in=osm_ids)
def get_ident_list(self, qs):
return [
obj.ident for obj in qs
]
def filter(self, qs, **filter_kwargs):
if filter_kwargs.get('q'):
qs = qs.filter(name__contains=filter_kwargs['q'])
if filter_kwargs.get('requested') is not None:
qs = qs.none()
return qs
def get_by_ident(self, ident):
try:
pk = ident.split('_')[0]
return self.get_queryset().get(id=pk)
except ValueError:
return super().get_by_ident(ident)
def get_provider_item_data(self, obj, foirequests=None, detail=False):
d = {
'ident': obj.ident,
'request_url': self.get_request_url_redirect(obj.ident),
'title': obj.name,
'address': obj.address,
'description': '',
'lat': obj.geo.y,
'lng': obj.geo.x,
'foirequest': None,
'foirequests': [],
}
if foirequests:
d.update({
'foirequest': first(foirequests[obj.ident]),
'foirequests': foirequests[obj.ident]
})
return d
def _get_publicbody(self, amenity):
pbs = PublicBody.objects.all()
if self.kwargs.get('category'):
pbs = pbs.filter(
categories__name=self.kwargs['category'],
)
regions = GeoRegion.objects.filter(
geom__covers=amenity.geo,
).filter(
kind__in=self.ADMIN_LEVELS
).order_by('kind')
pbs = pbs.filter(
regions__in=regions
)
if len(pbs) == 0:
return None
elif len(pbs) > 1:
return pbs[0]
return pbs[0]
def get_request_url_context(self, obj):
return {
'title': obj.name,
'address': obj.address
}
def connect_request(self, ident, sender):
if not sender.public:
return
try:
amenity = self.get_by_ident(ident)
except Amenity.DoesNotExist:
return
context = self.get_request_url_context(amenity)
iobj, created = InformationObject.objects.get_or_create(
campaign=self.campaign,
ident=ident,
defaults=dict(
title=context['title'],
slug=slugify(context['title']),
publicbody=sender.public_body,
geo=amenity.geo,
foirequest=sender
)
)
if not created:
iobj.publicbody = sender.public_body
iobj.save()
iobj.foirequests.add(sender)
|
Python
| 0.000006
|
@@ -1,16 +1,47 @@
+from django.db.models import Q%0A
from django.temp
@@ -814,17 +814,16 @@
c', '')%5D
-,
%0A
@@ -837,16 +837,18 @@
ude(
+Q(
name='')
.exc
@@ -843,24 +843,20 @@
name='')
-.exclude
+ %7C Q
(osm_id_
@@ -867,16 +867,17 @@
osm_ids)
+)
%0A%0A de
|
87a720dc526efe9732fd1b4633e773ef4a11352a
|
Use earliest consultation if legal date is unavailable
|
mainapp/management/commands/fix-sort-date.py
|
mainapp/management/commands/fix-sort-date.py
|
import datetime
from django.core.management.base import BaseCommand
from django.db.models import F
from mainapp.models import Paper, File
class Command(BaseCommand):
help = "After the initial import, this command guesses the sort_date-Attribute of papers and files"
def add_arguments(self, parser):
help_str = (
"The date of the first import in the format YYYY-MM-DD. "
+ "All documents/files created up to this day will have the sort_date-Attribute modified."
)
parser.add_argument("import_date", type=str, help=help_str)
help_str = "If no date can be determined, this will be used as fallback. Should be far in the past."
parser.add_argument("fallback_date", type=str, help=help_str)
def handle(self, *args, **options):
import_date = datetime.datetime.strptime(
options["import_date"] + " 23:59:59", "%Y-%m-%d %H:%M:%S"
)
fallback_date = datetime.datetime.strptime(options["fallback_date"], "%Y-%m-%d")
self.stdout.write("Fixing papers...")
num = Paper.objects.filter(
created__lte=import_date, legal_date__isnull=False
).update(sort_date=F("legal_date"), modified=F("legal_date"))
self.stdout.write("=> Changed records: ", num)
num = Paper.objects.filter(legal_date__isnull=True).update(
sort_date=fallback_date
)
self.stdout.write("=> Not determinable: ", num)
self.stdout.write("Fixing files...")
num = File.objects.filter(
created__lte=import_date, legal_date__isnull=False
).update(sort_date=F("legal_date"), modified=F("legal_date"))
self.stdout.write("=> Changed records: ", num)
num = File.objects.filter(legal_date__isnull=True).update(
sort_date=fallback_date
)
self.stdout.write("=> Not determinable: ", num)
|
Python
| 0
|
@@ -10,16 +10,40 @@
tetime%0A%0A
+from dateutil import tz%0A
from dja
@@ -116,16 +116,39 @@
import F
+, Subquery, OuterRef, Q
%0A%0Afrom m
@@ -179,16 +179,30 @@
er, File
+, Consultation
%0A%0A%0Aclass
@@ -980,32 +980,61 @@
%25M:%25S%22%0A )
+.replace(tzinfo=tz.tzlocal())
%0A fallbac
@@ -1069,16 +1069,29 @@
trptime(
+%0A
options%5B
@@ -1118,16 +1118,54 @@
Y-%25m-%25d%22
+%0A ).replace(tzinfo=tz.tzlocal()
)%0A%0A
@@ -1392,32 +1392,33 @@
lf.stdout.write(
+f
%22=%3E Changed reco
@@ -1413,32 +1413,32 @@
Changed
-records: %22, num)
+papers: %7Bnum%7D%22)%0A
%0A
@@ -1566,32 +1566,33 @@
tdout.write(
+f
%22=%3E Not
determinable
@@ -1579,36 +1579,731 @@
%22=%3E Not
-determinable: %22,
+fixable due to missing legal date: %7Bnum%7D%22)%0A%0A # Use the date of the earliest consultation%0A earliest_consultation = (%0A Consultation.objects.filter(paper=OuterRef(%22pk%22), meeting__isnull=False)%0A .order_by(%22meeting__start%22)%0A .values(%22meeting__start%22)%5B:1%5D%0A )%0A num = (%0A Paper.objects.filter(%0A Q(sort_date=fallback_date) %7C ~Q(sort_date=F(%22legal_date%22))%0A )%0A .annotate(earliest_consultation=Subquery(earliest_consultation))%0A .filter(earliest_consultation__isnull=False)%0A .update(sort_date=F(%22earliest_consultation%22))%0A )%0A self.stdout.write(f%22=%3E Fix by earliest consultation: %7B
num
+%7D%22
)%0A%0A
@@ -2532,24 +2532,25 @@
tdout.write(
+f
%22=%3E Changed
@@ -2553,23 +2553,21 @@
ged
-records: %22,
+files: %7B
num
+%7D%22
)%0A
@@ -2703,16 +2703,17 @@
t.write(
+f
%22=%3E Not
@@ -2730,12 +2730,12 @@
le:
-%22,
+%7B
num
+%7D%22
)%0A
|
f243a972c320292b68923a37b386b1681eee280c
|
fix for pull_request check
|
fuelweb_test/tests/base_test_case.py
|
fuelweb_test/tests/base_test_case.py
|
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from devops.helpers.helpers import SSHClient
from proboscis import test, SkipTest
from fuelweb_test.models.environment import EnvironmentModel
from fuelweb_test.helpers.decorators import debug
from fuelweb_test.settings import *
logging.basicConfig(
format='%(asctime)s - %(levelname)s %(filename)s:'
'%(lineno)d -- %(message)s',
level=logging.DEBUG
)
logger = logging.getLogger(__name__)
logwrap = debug(logger)
class TestBasic(object):
"""Basic test case class for all system tests.
Initializes EnvironmentModel and FuelWebModel.
"""
def __init__(self):
self.env = EnvironmentModel()
self.fuel_web = self.env.fuel_web
def check_run(self, snapshot_name):
"""Checks if run of current test is required.
:param snapshot_name: Name of the snapshot the function should make
:type snapshot_name: str
:raises: SkipTest
"""
if snapshot_name != "" and snapshot_name is not None:
if self.env.get_virtual_environment().has_snapshot(snapshot_name):
raise SkipTest()
@test
class SetupEnvironment(TestBasic):
@test(groups=["setup"])
def setup_master(self):
"""Create environment and set up master node
Snapshot: empty
"""
self.check_run("empty")
self.env.setup_environment()
self.env.make_snapshot("empty")
@test(depends_on=[setup_master])
def prepare_release(self):
"""Prepare master node
Scenario:
1. Revert snapshot "empty"
2. Download the release if needed. Uploads custom manifest.
Snapshot: ready
"""
self.check_run("ready")
self.env.revert_snapshot("empty")
if OPENSTACK_RELEASE == OPENSTACK_RELEASE_REDHAT:
self.fuel_web.update_redhat_credentials()
self.fuel_web.assert_release_state(
OPENSTACK_RELEASE_REDHAT,
state='available'
)
try:
if UPLOAD_MANIFESTS:
logging.info("Uploading new manifests from %s" %
UPLOAD_MANIFESTS_PATH)
remote = SSHClient(self.env.get_admin_node_ip(),
username='root',
password='r00tme')
remote.execute('rm -rf /etc/puppet/modules/*')
remote.upload(UPLOAD_MANIFESTS_PATH, '/etc/puppet/modules/')
logging.info("Copying new site.pp from %s" %
SITEPP_FOR_UPLOAD)
remote.execute("cp %s /etc/puppet/manifests" %
SITEPP_FOR_UPLOAD)
except:
logging.error("Could not upload manifests")
raise
self.env.make_snapshot("ready")
@test(depends_on=[prepare_release])
def prepare_slaves_3(self):
"""Bootstrap 3 slave nodes
Scenario:
1. Revert snapshot "ready"
2. Start 3 slave nodes
Snapshot: ready_with_3_slaves
"""
self.check_run("ready_with_3_slaves")
self.env.revert_snapshot("ready")
self.env.bootstrap_nodes(self.env.nodes().slaves[:3])
self.env.make_snapshot("ready_with_3_slaves")
@test(depends_on=[prepare_release])
def prepare_slaves_5(self):
"""Bootstrap 5 slave nodes
Scenario:
1. Revert snapshot "ready"
2. Start 5 slave nodes
Snapshot: ready_with_5_slaves
"""
self.check_run("ready_with_5_slaves")
self.env.revert_snapshot("ready")
self.env.bootstrap_nodes(self.env.nodes().slaves[:5])
self.env.make_snapshot("ready_with_5_slaves")
|
Python
| 0.000001
|
@@ -1570,43 +1570,32 @@
ame
-!= %22%22 and snapshot_name is not None
+and not UPLOAD_MANIFESTS
:%0A
|
df255b635517ed1dbcbbf06841a7b7971edd26b5
|
fix import of User
|
registration_couchdb/models.py
|
registration_couchdb/models.py
|
import datetime
import random
import re
from django.conf import settings
from django.template.loader import render_to_string
from django.utils.hashcompat import sha_constructor
from couchdbkit.ext.django.schema import *
from django_couchdb_utils.auth import User
SHA1_RE = re.compile('^[a-f0-9]{40}$')
def activate_user(activation_key):
"""
Validate an activation key and activate the corresponding
``User`` if valid.
If the key is valid and has not expired, return the ``User``
after activating.
If the key is not valid or has expired, return ``False``.
If the key is valid but the ``User`` is already active,
return ``False``.
To prevent reactivation of an account which has been
deactivated by site administrators, the activation key is
reset to the string constant ``RegistrationProfile.ACTIVATED``
after successful activation.
"""
# Make sure the key we're trying conforms to the pattern of a
# SHA1 hash; if it doesn't, no point trying to look it up in
# the database.
if not SHA1_RE.search(activation_key):
return False
user = User.get_by_key(activation_key)
if not user.activation_key_expired():
del user.activation_key
user.is_active = True
user.save()
return user
def create_inactive_user(username, email, password,
site, send_email=True):
"""
Create a new, inactive ``User``, generate a
``RegistrationProfile`` and email its activation key to the
``User``, returning the new ``User``.
By default, an activation email will be sent to the new
user. To disable this, pass ``send_email=False``.
"""
new_user = User()
new_user.username = username
new_user.email = email
new_user.set_password(password)
new_user.is_active = False
create_profile(new_user)
new_user.save()
if send_email:
new_user.send_activation_email(site)
return new_user
def create_profile(user):
"""
Create a ``RegistrationProfile`` for a given
``User``, and return the ``RegistrationProfile``.
The activation key for the ``RegistrationProfile`` will be a
SHA1 hash, generated from a combination of the ``User``'s
username and a random salt.
"""
salt = sha_constructor(str(random.random())).hexdigest()[:5]
username = user.username
if isinstance(username, unicode):
username = username.encode('utf-8')
user.activation_key = sha_constructor(salt+username).hexdigest()
def delete_expired_users():
"""
Remove expired instances of ``RegistrationProfile`` and their
associated ``User``s.
Accounts to be deleted are identified by searching for
instances of ``RegistrationProfile`` with expired activation
keys, and then checking to see if their associated ``User``
instances have the field ``is_active`` set to ``False``; any
``User`` who is both inactive and has an expired activation
key will be deleted.
It is recommended that this method be executed regularly as
part of your routine site maintenance; this application
provides a custom management command which will call this
method, accessible as ``manage.py cleanupregistration``.
Regularly clearing out accounts which have never been
activated serves two useful purposes:
1. It alleviates the ocasional need to reset a
``RegistrationProfile`` and/or re-send an activation email
when a user does not receive or does not act upon the
initial activation email; since the account will be
deleted, the user will be able to simply re-register and
receive a new activation key.
2. It prevents the possibility of a malicious user registering
one or more accounts and never activating them (thus
denying the use of those usernames to anyone else); since
those accounts will be deleted, the usernames will become
available for use again.
If you have a troublesome ``User`` and wish to disable their
account while keeping it in the database, simply delete the
associated ``RegistrationProfile``; an inactive ``User`` which
does not have an associated ``RegistrationProfile`` will not
be deleted.
"""
for user in User.all_users():
if user.activation_key_expired():
if not user.is_active:
user.delete()
def get_migration_user_data(user):
"""
Returns the data that will be merged into the User object
when migrating an ORM-based User to CouchDB
"""
try:
reg_profile = RegistrationProfile.objects.get(user=user)
if reg_profile.activation_key != RegistrationProfile.ACTIVATED and \
not user.is_active:
return {'activation_key': reg_profile.activation_key}
except:
return {}
class User(User):
"""
A simple profile which stores an activation key for use during
user account registration.
Generally, you will not want to interact directly with instances
of this model; the provided manager includes methods
for creating and activating new accounts, as well as for cleaning
out accounts which have never been activated.
While it is possible to use this model as the value of the
``AUTH_PROFILE_MODULE`` setting, it's not recommended that you do
so. This model's sole purpose is to store data temporarily during
account registration and activation.
"""
activation_key = StringProperty()
class Meta:
app_label = 'registration_couchdb'
@classmethod
def get_by_key(cls, key):
r = cls.view('registration_couchdb/users_by_activationkey',
key = key,
include_docs = True,
)
return r.first() if r else None
def activation_key_expired(self):
"""
Determine whether this ``RegistrationProfile``'s activation
key has expired, returning a boolean -- ``True`` if the key
has expired.
Key expiration is determined by a two-step process:
1. If the user has already activated, the key will have been
reset to the string constant ``ACTIVATED``. Re-activating
is not permitted, and so this method returns ``True`` in
this case.
2. Otherwise, the date the user signed up is incremented by
the number of days specified in the setting
``ACCOUNT_ACTIVATION_DAYS`` (which should be the number of
days after signup during which a user is allowed to
activate their account); if the result is less than or
equal to the current date, the key has expired and this
method returns ``True``.
"""
expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)
return bool(getattr(self, 'activation_key', False) and \
(self.date_joined + expiration_date <= datetime.datetime.now()))
activation_key_expired.boolean = True
def send_activation_email(self, site):
"""
Send an activation email to the user associated with this
``RegistrationProfile``.
The activation email will make use of two templates:
``registration/activation_email_subject.txt``
This template will be used for the subject line of the
email. Because it is used as the subject line of an email,
this template's output **must** be only a single line of
text; output longer than one line will be forcibly joined
into only a single line.
``registration/activation_email.txt``
This template will be used for the body of the email.
These templates will each receive the following context
variables:
``activation_key``
The activation key for the new account.
``expiration_days``
The number of days remaining during which the account may
be activated.
``site``
An object representing the site on which the user
registered; depending on whether ``django.contrib.sites``
is installed, this may be an instance of either
``django.contrib.sites.models.Site`` (if the sites
application is installed) or
``django.contrib.sites.models.RequestSite`` (if
not). Consult the documentation for the Django sites
framework for details regarding these objects' interfaces.
"""
ctx_dict = {'activation_key': self.activation_key,
'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS,
'site': site}
subject = render_to_string('registration/activation_email_subject.txt',
ctx_dict)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('registration/activation_email.txt',
ctx_dict)
self.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
|
Python
| 0.000006
|
@@ -245,16 +245,23 @@
ils.auth
+.models
import
|
5320f9bd74aeab70849cf288d5da4a94bd98cccd
|
store labels in a separate text field
|
load_corpus.py
|
load_corpus.py
|
#!/usr/bin/env python
from elasticsearch import Elasticsearch
from elasticsearch.client import IndicesClient
import os
es = Elasticsearch()
index = IndicesClient(es)
if index.exists('yso'):
index.delete('yso')
indexconf = {
'mappings': {
'concept': {
'properties': {
'text': {
'type': 'string',
'analyzer': 'finnish'
},
'boost': {
'type': 'double'
}
}
}
}
}
index.create(index='yso', body=indexconf)
files = os.listdir('corpus')
for file in files:
if not file.endswith('.fi'):
continue
f = open('corpus/%s' % file, 'r')
uri, label = f.readline().strip().split(' ', 1)
print file, uri, label
cid = uri.split('p')[-1]
text = "".join(f.readlines())
body = {'uri': uri, 'label': label, 'text': text, 'boost': 1}
es.index(index='yso', doc_type='concept', id=cid, body=body)
f.close()
|
Python
| 0.000001
|
@@ -288,24 +288,151 @@
perties': %7B%0A
+ 'labels': %7B%0A 'type': 'string',%0A 'analyzer': 'finnish'%0A %7D,%0A
@@ -966,14 +966,63 @@
-text =
+labels = f.readline().strip()%0A text = labels + %22 %22 +
%22%22.
@@ -1080,16 +1080,34 @@
: label,
+ 'labels': labels,
'text':
|
0c5310374e7eaeb39fcc3c184b60afa096abf364
|
Add missing methods from ItemWrapper
|
gaphor/core/modeling/presentation.py
|
gaphor/core/modeling/presentation.py
|
"""
Base code for presentation elements
"""
from __future__ import annotations
from typing import (
TYPE_CHECKING,
Callable,
Dict,
Generator,
Generic,
List,
Optional,
TypeVar,
)
from gaphor.core.modeling import Element
from gaphor.core.modeling.properties import association, attribute, relation_one
from gaphor.core.styling import CompiledStyleSheet
if TYPE_CHECKING:
from gaphas.canvas import Canvas # noqa
from gaphas.connector import Handle # noqa
from gaphas.item import Item # noqa
from gaphas.matrix import Matrix # noqa
S = TypeVar("S", bound=Element)
class ItemWrapper:
def __init__(self, item: Item):
self.item = item
self.canvas = item.canvas
def local_name(self) -> str:
return type(self.item).__name__.lower()
def parent(self) -> Optional[ItemWrapper]:
parent = self.canvas.get_parent(self.item)
return ItemWrapper(parent) if parent else None
class StyleSheet(Element):
def __init__(self, id=None, model=None):
super().__init__(id, model)
self._watcher = self.watcher()
self._watcher.watch("styleSheet", self.update_style_sheet)
self._watcher.subscribe_all()
self._compiled_style_sheet = CompiledStyleSheet("")
styleSheet: attribute[str] = attribute("styleSheet", str)
def postload(self):
super().postload()
if self.styleSheet:
self.compile_style_sheet(self.styleSheet)
def update_style_sheet(self, event):
self.compile_style_sheet(event.new_value)
def compile_style_sheet(self, css: str) -> None:
self._compiled_style_sheet = CompiledStyleSheet(css)
def item_style(self, item: Item) -> Dict[str, object]:
return self._compiled_style_sheet.match(ItemWrapper(item))
def unlink(self):
self._watcher.unsubscribe_all()
super().unlink()
class Presentation(Element, Generic[S]):
"""
This presentation is used to link the behaviors of `gaphor.core.modeling` and `gaphas.Item`.
"""
def __init__(self, id=None, model=None):
super().__init__(id, model)
def update(event):
self.request_update()
self._watcher = self.watcher(default_handler=update)
self.watch("subject")
subject: relation_one[S] = association(
"subject", Element, upper=1, opposite="presentation"
)
@property
def styleSheet(self) -> Optional[StyleSheet]:
return next(self.model.select(StyleSheet), None,) # type: ignore[arg-type]
@property
def style(self):
sheet = self.styleSheet
return sheet and sheet.item_style(self) or {}
handles: Callable[[Presentation], List[Handle]]
request_update: Callable[[Presentation], None]
canvas: Optional[Canvas]
matrix: Matrix
def watch(self, path, handler=None):
"""
Watch a certain path of elements starting with the DiagramItem.
The handler is optional and will default to a simple
self.request_update().
Watches should be set in the constructor, so they can be registered
and unregistered in one shot.
This interface is fluent(returns self).
"""
self._watcher.watch(path, handler)
return self
def subscribe_all(self):
"""
Subscribe all watched paths, as defined through `watch()`.
"""
self._watcher.subscribe_all()
def unsubscribe_all(self):
"""
Unsubscribe all watched paths, as defined through `watch()`.
"""
self._watcher.unsubscribe_all()
def unlink(self):
"""
Remove the item from the canvas and set subject to None.
"""
if self.canvas:
self.canvas.remove(self)
super().unlink()
Element.presentation = association(
"presentation", Presentation, composite=True, opposite="subject"
)
|
Python
| 0
|
@@ -190,16 +190,30 @@
tional,%0A
+ Sequence,%0A
Type
@@ -977,16 +977,135 @@
e None%0A%0A
+ def attribute(self, name: str) -%3E str:%0A return %22%22%0A%0A def state(self) -%3E Sequence%5Bstr%5D:%0A return ()%0A%0A
%0Aclass S
|
4a719e275c3639b2a2186711d9d616ce9435d614
|
Update agent for environment
|
reinforcement-learning/play.py
|
reinforcement-learning/play.py
|
"""This is the agent which currently takes the action with highest immediate reward."""
import pandas as pd
import numpy as np
import env
actions = ["left", "right", "stay"]
left = {x: [0]*(env.screen_width - 1) for x in range(2)}
right = {x: [0]*(env.screen_width - 1) for x in range(2)}
table = pd.DataFrame(left)
def max(list):
max = 0
index = 0
for item in list:
item += 1
if item > max:
max = item
return item
"""if np.random.uniform() > epsilon or all_zero:
action = np.random.choice(actions)
else:
action = None"""
for episode in range(10):
env.reset()
episode_reward = 0
for t in range(100):
episode_reward += env.actual_reward
if env.done:
print(
"Episode %d finished after %d timesteps, with reward %d"
% ((episode + 1), (t + 1), episode_reward))
break
max_action = 0
index = -1
for item in actions:
if env.create_reward(item) > max_action:
max_action = env.create_reward(item)
action = [item, index]
else:
index += 1
print(action[0])
episode_reward += env.create_reward(action[0])
env.action(action[0])
env.render()
|
Python
| 0
|
@@ -93,491 +93,28 @@
ort
-pandas as pd%0Aimport numpy as np%0Aimport env%0A%0Aactions = %5B%22left%22, %22right%22, %22stay%22%5D%0A%0Aleft = %7Bx: %5B0%5D*(env.screen_width - 1) for x in range(2)%7D%0Aright = %7Bx: %5B0%5D*(env.screen_width - 1) for x in range(2)%7D%0Atable = pd.DataFrame(left)%0A%0Adef max(list):%0A max = 0%0A index = 0%0A for item in list:%0A item += 1%0A if item %3E max:%0A max = item%0A return item%0A%0A%0A%22%22%22if np.random.uniform() %3E epsilon or all_zero:%0A action = np.random.choice(actions)%0Aelse:%0A action = None%22%22%22
+env%0Aenv.make(%22text%22)
%0A%0Afo
@@ -478,16 +478,16 @@
ex = -1%0A
-
@@ -498,16 +498,20 @@
item in
+env.
actions:
|
2a2ab3f758facfafe3604325ecec08cfcfa2b6e9
|
Update tests.py
|
image_space_app/tests.py
|
image_space_app/tests.py
|
import datetime
import unittest
from django.utils import timezone
from django.test import TestCase
class ImageSpaceTests(unittest.TestCase):
def setUp(self):
self.url="http://localhost:8000"
self.email="John@doe.com"
self.password="password"
def tearDown(self):
del self.url
def test1(self):
self.assertEqual(self.url,"http://localhost:8000")
print "After the user input the right URL, home page of the image space opens up"
def test2(self):
self.assertEqual(self.email,"John@doe.com")
self.assertEqual(self.password,"password")
print "If the user enters the correct email and password, log in to the profile of John Doe"
def test3(self):
"""
url is not correct
"""
self.assertNotEqual(self.url,"google.com")
print "If the user did not enter the right URL, URL is not correct"
def test4(self):
"""
Invalid Email/Password
"""
self.assertNotEqual(self.email,"")
self.assertNotEqual(self.password,"")
print "If the user either leaves email or password blank, Invalid Email/Password"
def test5(self):
self.assertNotEqual(self.email,"Johndoe")
self.assertNotEqual(self.password,"pass")
print "If the user enther the wrong email or password or both, Invalid Email/Password"
def test6(self):
"""
sign up page
"""
self.assertNotEqual(self.email,"")
self.assertNotEqual(self.password,"")
print "At the sign up page if the user leaves either email or password blank then prompt: This field is required"
|
Python
| 0.000001
|
@@ -1447,16 +1447,19 @@
+At
sign up
@@ -1686,16 +1686,226 @@
quired%22%0A
+ self.assertNotEqual(self.email,%22Johndoe%22)%0A print %22At the sign up page if the user enter email address to sign up without @something.com then prompt error to enter the valid email address with @%22%0A
%0A%0A%0A%0A
|
74a7e32f9ca2376fbc2e987f3bedc75570dc8b1d
|
Use name instead of uid in validation error message when trying to pair the same parent a second time
|
kpi/serializers/v2/paired_data.py
|
kpi/serializers/v2/paired_data.py
|
# coding: utf-8
import os
import re
from django.utils.translation import gettext as _
from formpack import FormPack
from rest_framework import serializers
from rest_framework.reverse import reverse
from kpi.constants import (
ASSET_TYPE_SURVEY,
PERM_PARTIAL_SUBMISSIONS,
PERM_VIEW_SUBMISSIONS,
)
from kpi.fields import (
RelativePrefixHyperlinkedRelatedField,
)
from kpi.models import Asset, AssetFile, PairedData
class PairedDataSerializer(serializers.Serializer):
parent = RelativePrefixHyperlinkedRelatedField(
lookup_field='uid',
queryset=Asset.objects.filter(asset_type=ASSET_TYPE_SURVEY),
view_name='asset-detail',
required=True,
style={'base_template': 'input.html'} # Render as a simple text box
)
fields = serializers.ListField(child=serializers.CharField(), required=False)
filename = serializers.CharField()
def create(self, validated_data):
return self.__save(validated_data)
def __get_download_url(self, instance: 'kpi.models.PairedData') -> str:
request = self.context['request']
asset_uid = instance.asset.uid
paired_data_uid = instance.paired_data_uid
return reverse(
'paired-data-detail',
args=[asset_uid, paired_data_uid],
request=request,
)
def __get_parent_asset_url(self, instance: 'kpi.models.PairedData') -> str:
request = self.context['request']
return reverse('asset-detail',
args=[instance.parent_uid],
request=request)
def __save(self, validated_data):
asset = self.context['asset']
parent = validated_data.pop('parent', None)
if not self.instance:
self.instance = PairedData(
parent_uid=parent.uid,
asset=asset,
**validated_data
)
else:
self.instance.update(validated_data)
self.instance.save()
return self.instance
def to_representation(self, instance):
return {
'parent': self.__get_parent_asset_url(instance),
'fields': instance.fields,
'filename': instance.filename,
'url': self.__get_download_url(instance),
}
def validate(self, attrs: dict) -> dict:
# Ensure `parent` has been validated before validating `filename`
# and `fields`. If 'parent' is not present in `attrs`, it should be
# only on update. (`RelativePrefixHyperlinkedRelatedField` validator
# enforces its requirement)
try:
attrs['parent']
except KeyError:
attrs['parent'] = Asset.objects.get(uid=self.instance.parent_uid)
self._validate_filename(attrs)
self._validate_fields(attrs)
return attrs
def _validate_fields(self, attrs: dict):
if 'fields' not in attrs:
# if paired data is created and `fields` does not exist in `POST`
# payload, let's initialize it as an empty list
if not self.instance:
attrs['fields'] = []
return
parent = attrs['parent']
schema = parent.latest_deployed_version.to_formpack_schema()
form_pack = FormPack(versions=schema)
valid_fields = [
f.path for f in form_pack.get_fields_for_versions()
]
parent_fields = parent.data_sharing.get('fields') or valid_fields
posted_fields = attrs['fields']
unknown_fields = set(posted_fields) - set(parent_fields)
if unknown_fields and parent_fields:
raise serializers.ValidationError(
{
'fields': _(
'Some fields are invalid, '
'choices are: `{parent_fields}`'
).format(parent_fields='`,`'.join(parent_fields))
}
)
attrs['fields'] = posted_fields
def _validate_filename(self, attrs: dict):
if self.instance and 'filename' not in attrs:
return
asset = self.context['asset']
parent = attrs['parent']
filename, extension = os.path.splitext(attrs['filename'])
if not re.match(r'^[\w\d-]+$', filename):
raise serializers.ValidationError(
{
'filename': _('Only letters, numbers and `-` are allowed')
}
)
if extension.lower() != '.xml' and extension != '':
raise serializers.ValidationError(
{
'filename': _('Extension must be `xml`')
}
)
# force XML extension
basename = filename
filename = f'{filename}.xml'
# Validate uniqueness of `filename`
# It cannot be used by any other asset files
media_filenames = (
AssetFile.objects.values_list('metadata__filename', flat=True)
.filter(asset_id=asset.pk)
.exclude(file_type=AssetFile.PAIRED_DATA)
)
paired_data_filenames = {}
for p_uid, values in asset.paired_data.items():
paired_data_filenames[p_uid] = values['filename']
pd_filename = paired_data_filenames.get(parent.uid)
is_new = pd_filename is None
if (
filename in media_filenames
or (
filename in paired_data_filenames.values()
and (is_new or not is_new and pd_filename != filename)
)
):
raise serializers.ValidationError(
{
'filename': _(
'`{basename}` is already used'
).format(basename=basename)
}
)
attrs['filename'] = filename
def validate_parent(self, parent):
asset = self.context['asset']
if self.instance and self.instance.parent_uid != parent.uid:
raise serializers.ValidationError(
_('Parent cannot be changed')
)
# Parent data sharing must be enabled before going further
if not parent.data_sharing.get('enabled'):
raise serializers.ValidationError(_(
'Data sharing for `{parent_uid}` is not enabled'
).format(parent_uid=parent.uid))
# Validate whether owner of the asset is allowed to link their form
# with the parent. Validation is made with owner of the asset instead of
# `request.user`
required_perms = [
PERM_PARTIAL_SUBMISSIONS,
PERM_VIEW_SUBMISSIONS,
]
if not parent.has_perms(asset.owner, required_perms):
raise serializers.ValidationError(_(
'Pairing data with `{parent_uid}` is not allowed'
).format(parent_uid=parent.uid))
if not self.instance and parent.uid in asset.paired_data:
raise serializers.ValidationError(_(
'Parent `{parent_uid}` is already paired'
).format(parent_uid=parent.uid))
return parent
def update(self, instance, validated_data):
return self.__save(validated_data)
|
Python
| 0
|
@@ -7015,28 +7015,24 @@
ent %60%7Bparent
-_uid
%7D%60 is alread
@@ -7064,36 +7064,32 @@
ormat(parent
-_uid
=parent.
uid))%0A%0A
@@ -7068,35 +7068,36 @@
t(parent=parent.
-uid
+name
))%0A%0A retu
|
7fb829cf17b8274ca67f98356e2d47abedc2df5b
|
Add type information to component registry
|
gaphor/services/componentregistry.py
|
gaphor/services/componentregistry.py
|
"""
A registry for components (e.g. services) and event handling.
"""
from typing import Set, Tuple
from gaphor.abc import Service
from gaphor.application import ComponentLookupError
class ComponentRegistry(Service):
"""
The ComponentRegistry provides a home for application wide components.
"""
def __init__(self):
self._comp: Set[Tuple[object, str]] = set()
def shutdown(self):
pass
def get_service(self, name):
"""Obtain a service used by Gaphor by name.
E.g. service("element_factory")
"""
return self.get(Service, name)
def register(self, component, name):
self._comp.add((component, name))
def unregister(self, component):
self._comp = {(c, n) for c, n in self._comp if not c is component}
def get(self, base, name):
found = {(c, n) for c, n in self._comp if isinstance(c, base) and n == name}
if len(found) > 1:
raise ComponentLookupError(
f"More than one component matches {base}+{name}: {found}"
)
if len(found) == 0:
raise ComponentLookupError(
f"Component with type {base} and name {name} is not registered"
)
return next(iter(found))[0]
def all(self, base):
return ((c, n) for c, n in self._comp if isinstance(c, base))
|
Python
| 0
|
@@ -87,18 +87,43 @@
ort
-Set, Tuple
+Iterator, Set, Tuple, Type, TypeVar
%0Afro
@@ -204,16 +204,50 @@
Error%0A%0A%0A
+T = TypeVar(%22T%22, bound=Service)%0A%0A%0A
class Co
@@ -382,24 +382,32 @@
init__(self)
+ -%3E None
:%0A se
@@ -471,16 +471,24 @@
wn(self)
+ -%3E None
:%0A
@@ -521,25 +521,41 @@
e(self, name
-)
+: str) -%3E Service
:%0A %22%22
@@ -686,16 +686,38 @@
e, name)
+ # type: ignore%5Bmisc%5D
%0A%0A de
@@ -742,22 +742,35 @@
omponent
+: object
, name
+: str
):%0A
@@ -841,16 +841,24 @@
omponent
+: object
):%0A
@@ -950,23 +950,42 @@
lf, base
-, name)
+: Type%5BT%5D, name: str) -%3E T
:%0A
@@ -1439,17 +1439,53 @@
lf, base
-)
+: Type%5BT%5D) -%3E Iterator%5BTuple%5BT, str%5D%5D
:%0A
|
907165cf323d2492ee2fc2f837a0aff2fec8ef77
|
Update utils.py
|
banpei/utils.py
|
banpei/utils.py
|
import numpy as np
def power_method(A, iter_num=1):
"""
Calculate the first singular vector/value of a target matrix based on the power method.
Parameters
----------
A : numpy array
Target matrix
iter_num : int
Number of iterations
Returns
-------
u : numpy array
first left singular vector of A
s : float
first singular value of A
v : numpy array
first right singular vector of A
"""
# set initial vector q
q = np.random.normal(size=A.shape[1])
q = q / np.linalg.norm(q)
for i in range(iter_num):
q = np.dot(np.dot(A.T, A), q)
v = q / np.linalg.norm(q)
Av = np.dot(A, v)
s = np.linalg.norm(Av)
u = Av / s
return u, s, v
|
Python
| 0
|
@@ -759,8 +759,1693 @@
u, s, v%0A
+%0A%0Adef _rolling_window(a, window):%0A %22%22%22%0A Usage:%0A a = np.random.rand(30, 5)%0A for 2d array:%0A roll aling axis=0: rolling_window(a.T, 3).transpose(1, 2, 0)%0A roll along axis=1: rolling_window(a, 3).transpose(1, 0, 2)%0A for 3d array:%0A roll along height(axis=0): rolling_window(a.transpose(2, 1, 0), 3).transpose(2, 3, 1, 0)%0A roll along width(axis=1): rolling_window(a, 3).transpose(2, 0, 1, 3)%0A roll along depth(axis=2): rolling_window(a.transpose(0, 2, 1), 3).transpose(3, 0, 2, 1)%0A %22%22%22%0A shape = a.shape%5B:-1%5D + (a.shape%5B-1%5D - window + 1, window)%0A strides = a.strides + (a.strides%5B-1%5D,)%0A return np.lib.stride_tricks.as_strided(a, shape=shape, strides=strides)%0A%0A%0Adef rolling_window(arr, window, axis=0):%0A if arr.ndim == 1:%0A return _rolling_window(arr, window)%0A elif arr.ndim == 2:%0A if axis == 0:%0A return _rolling_window(arr.T, window).transpose(1, 2, 0)%0A elif axis == 1:%0A return _rolling_window(arr, window).transpose(1, 0, 2)%0A else:%0A raise Exception('AxisError: axis %7B%7D is out of bounds for array of dimension %7B%7D'.format(axis, arr.ndim))%0A elif arr.ndim == 3:%0A if axis == 0:%0A return _rolling_window(arr.transpose(0, 2, 1), window).transpose(3, 0, 2, 1)%0A elif axis == 1:%0A return _rolling_window(arr, window).transpose(2, 0, 1, 3)%0A elif axis == 2:%0A return _rolling_window(arr.transpose(2, 1, 0), window).transpose(2, 3, 1, 0)%0A else:%0A raise Exception('AxisError: axis %7B%7D is out of bounds for array of dimension %7B%7D'.format(axis, arr.ndim))%0A else:%0A return _rolling_window(arr, window)%0A
|
c793401befa1efed0b5ad1eb77809c23f6855372
|
Fix ES thread mapping.
|
inbox/search/mappings.py
|
inbox/search/mappings.py
|
# TODO[k]: participants as nested, tags too.
THREAD_MAPPING = {
'properties': {
'namespace_id': {'type': 'string'},
'tags': {'type': 'string'},
'last_message_timestamp': {'type': 'date', 'format': 'dateOptionalTime'},
'object': {'type': 'string'},
'message_ids': {'type': 'string'},
'snippet': {'type': 'string'},
'participants': {'type': 'string'},
'first_message_timestamp': {'type': 'date', 'format': 'dateOptionalTime'},
'id': {'type': 'string'},
'subject': {'type': 'string'}
}
}
# TODO[k]:
# from, to, cc, bcc as nested.
# date as {'type': 'date', 'format': 'dateOptionalTime'} for range filters and such?
MESSAGE_MAPPING = {
'_parent': {
'type': 'thread'
},
'properties': {
'id': {'type': 'string'},
'object': {'type': 'string'},
'namespace_id': {'type': 'string'},
'subject': {'type': 'string'},
'from': {'type': 'string'},
'to': {'type': 'string'},
'cc': {'type': 'string'},
'bcc': {'type': 'string'},
'date': {'type': 'string'},
'thread_id': {'type': 'string'},
'snippet': {'type': 'string'},
'body': {'type': 'string'},
'unread': {'type': 'boolean'},
'files': {'type': 'nested', 'properties': {'size': {'type': 'long'}, 'id': {'type': 'string'}, 'content_type': {'type': 'string'}, 'filename': {'type': 'string'}}},
}
}
# TODO[k]: message._parent = thread
NAMESPACE_INDEX_MAPPING = {
'thread': THREAD_MAPPING,
'message': MESSAGE_MAPPING
}
|
Python
| 0
|
@@ -38,16 +38,127 @@
gs too.%0A
+# first/last_message_timestamp as %7B'type': 'date', 'format': 'dateOptionalTime'%7D%0A# for range filters and such?%0A
THREAD_M
@@ -316,42 +316,14 @@
': '
-date', 'format': 'dateOptionalTime
+string
'%7D,%0A
@@ -535,42 +535,14 @@
': '
-date', 'format': 'dateOptionalTime
+string
'%7D,%0A
|
e0dac0a621cbeed615553e5c3544f9c49de96eb2
|
Subtract 1 from model end_year
|
metadata/FrostNumberModel/hooks/pre-stage.py
|
metadata/FrostNumberModel/hooks/pre-stage.py
|
"""A hook for modifying parameter values read from the WMT client."""
import os
import shutil
from wmt.utils.hook import find_simulation_input_file
from topoflow_utils.hook import assign_parameters
file_list = []
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['end_year'] = long(env['start_year']) + long(env['_run_duration'])
env['fn_out_filename'] = 'frostnumber_output.dat'
assign_parameters(env, file_list)
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
|
Python
| 0
|
@@ -142,16 +142,27 @@
put_file
+, yaml_dump
%0Afrom to
@@ -479,17 +479,21 @@
ation'%5D)
+ - 1
%0A
-
env%5B
@@ -577,16 +577,17 @@
e_list)%0A
+%0A
for
@@ -659,16 +659,16 @@
fname%5D)%0A
-
@@ -695,8 +695,41 @@
curdir)%0A
+%0A yaml_dump('_env.yaml', env)%0A
|
be7ee0ba4cdfab1ef03b0d58913cddb00c572c0f
|
Revise descriptive comments
|
lc0131_palindrome_partitioning.py
|
lc0131_palindrome_partitioning.py
|
"""Leetcode 131. Palindrome Partitioning
Medium
URL: https://leetcode.com/problems/palindrome-partitioning/
Given a string s, partition s such that every substring of the partition is a palindrome.
Return all possible palindrome partitioning of s.
Example:
Input: "aab"
Output:
[
["aa","b"],
["a","a","b"]
]
"""
class Solution(object):
def _backtrack(self, result, temps, s, start):
if start == len(s):
result.append(temps[:])
return None
for i in range(start, len(s)):
# Check if palindrome.
partial = s[start:i+1]
if partial == partial[::-1]:
temps.append(s[start:i+1])
self._backtrack(result, temps, s, i + 1)
temps.pop()
def partition(self, s):
"""
:type s: str
:rtype: List[List[str]]
Time complexity: O(n * 2^n), where n is the length of s.
Space complexity: O(n).
"""
# Apply backtracking.
result = []
temps = []
start = 0
self._backtrack(result, temps, s, start)
return result
def main():
s = "aab"
print Solution().partition(s)
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -143,16 +143,17 @@
ch that
+%0A
every su
@@ -315,17 +315,16 @@
%0A%5D%0A%22%22%22%0A%0A
-%0A
class So
@@ -365,33 +365,32 @@
(self, result, t
-e
mps, s, start):%0A
@@ -413,24 +413,143 @@
== len(s):%0A
+ # Check partial string with start len(s): empty string ''.%0A # Palindrom partition is completed.%0A
@@ -563,17 +563,16 @@
append(t
-e
mps%5B:%5D)%0A
@@ -655,18 +655,46 @@
# Check
-if
+partial string s%5Bstart:i+1%5D is
palindr
@@ -794,34 +794,149 @@
-temps.append(s%5Bstart:i+1%5D)
+# If yes, append it to tmps.%0A tmps.append(partial)%0A%0A # Further check the remaining string is also a palinfrome.
%0A
@@ -965,33 +965,32 @@
ktrack(result, t
-e
mps, s, i + 1)%0A
@@ -988,16 +988,17 @@
i + 1)%0A
+%0A
@@ -1005,18 +1005,74 @@
+# Backtrack by popping out the top tmps.%0A
t
-e
mps.pop(
@@ -1200,11 +1200,9 @@
O(n
- *
+*
2%5En)
@@ -1334,17 +1334,16 @@
t
-e
mps = %5B%5D
@@ -1339,24 +1339,25 @@
tmps = %5B%5D%0A
+%0A
star
@@ -1395,17 +1395,16 @@
esult, t
-e
mps, s,
@@ -1410,16 +1410,17 @@
start)%0A
+%0A
|
797ce2ce387a8b95a1a20e9c09400a6755d3be66
|
Use the settings on the package #49
|
base_command.py
|
base_command.py
|
import sublime
import sublime_plugin
import os.path
is_sublime_text_3 = int(sublime.version()) >= 3000
if is_sublime_text_3:
from .progress_notifier import ProgressNotifier
from .cross_platform_codecs import CrossPlaformCodecs
else:
from progress_notifier import ProgressNotifier
from cross_platform_codecs import CrossPlaformCodecs
# A base for each command
class BaseCommand(sublime_plugin.WindowCommand):
package_name = "Gulp"
def run(self, task_name=None, task_flag=None, silent=False, paths=[]):
self.setup_data_from_settings()
self.task_name = task_name
self.task_flag = task_flag if task_name is not None and task_flag is not None else self.get_flag_from_task_name()
self.silent = silent
self._working_dir = ""
self.sercheable_folders = [os.path.dirname(path) for path in paths] if len(paths) > 0 else self.window.folders()
self.output_view = None
self.work()
def setup_data_from_settings(self):
self.settings = sublime.load_settings("Gulp.sublime-settings")
self.results_in_new_tab = self.settings.get("results_in_new_tab", False)
self.nonblocking = self.settings.get("nonblocking", True)
self.exec_args = self.settings.get('exec_args', False)
self.check_for_gulpfile = self.settings.get('check_for_gulpfile', True)
def get_flag_from_task_name(self):
flags = self.settings.get("flags", {})
return flags[self.task_name] if self.task_name in flags else ""
# Properties
@property
def working_dir(self):
return self._working_dir
@working_dir.setter
def working_dir(self, value):
if self.check_for_gulpfile:
self._working_dir = os.path.dirname(value)
else:
self._working_dir = value
# Main method, override
def work(self):
pass
# Panels and message
def show_quick_panel(self, items, on_done=None, font=sublime.MONOSPACE_FONT):
self.defer_sync(lambda: self.window.show_quick_panel(items, on_done, font))
def show_input_panel(self, caption, initial_text="", on_done=None, on_change=None, on_cancel=None):
self.window.show_input_panel(caption, initial_text, on_done, on_change, on_cancel)
def status_message(self, text):
sublime.status_message("%s: %s" % (self.package_name, text))
def error_message(self, text):
sublime.error_message("%s: %s" % (self.package_name, text))
# Output view
def show_output_panel(self, text):
if self.silent:
self.status_message(text)
return
if self.results_in_new_tab:
new_tab_path = os.path.join(self.gulp_results_path(), "Gulp Results")
self.output_view = self.window.open_file(new_tab_path)
self.output_view.set_scratch(True)
else:
self.output_view = self.window.get_output_panel("gulp_output")
self.show_panel()
self.output_view.settings().set("scroll_past_end", False)
self.add_syntax()
self.append_to_output_view(text)
def gulp_results_path(self):
return next(folder_path for folder_path in self.sercheable_folders if self.working_dir.find(folder_path) != -1) if self.working_dir else ""
def gulp_results_view(self):
if self.output_view is None:
gulp_results = [view for view in sublime.active_window().views() if view.file_name() and os.path.basename(view.file_name()) == "Gulp Results"]
return gulp_results[0] if len(gulp_results) > 0 else None
else:
return self.output_view
def add_syntax(self):
syntax_file = self.settings.get("syntax", "Packages/Gulp/syntax/GulpResults.tmLanguage")
if syntax_file:
self.output_view.set_syntax_file(syntax_file)
def append_to_output_view_in_main_thread(self, text):
self.defer_sync(lambda: self.append_to_output_view(text))
def append_to_output_view(self, text):
if not self.silent:
decoded_text = text if is_sublime_text_3 else CrossPlaformCodecs.force_decode(text)
self._insert(self.output_view, decoded_text)
def _insert(self, view, content):
if view is None:
return
if self.results_in_new_tab and view.is_loading():
self.set_timeout(lambda: self._insert(view, content), 10)
else:
view.set_read_only(False)
view.run_command("view_insert", { "size": view.size(), "content": content })
view.set_viewport_position((0, view.size()), True)
view.set_read_only(True)
def set_output_close_on_timeout(self):
timeout = self.settings.get("results_autoclose_timeout_in_milliseconds", False)
if timeout:
self.set_timeout(self.close_panel, timeout)
def close_panel(self):
if self.results_in_new_tab:
self.output_view = self.gulp_results_view()
if self.output_view and self.output_view.file_name():
self.window.focus_view(self.output_view)
self.window.run_command('close_file')
else:
self.window.run_command("hide_panel", { "panel": "output.gulp_output" })
def show_panel(self):
self.window.run_command("show_panel", { "panel": "output.gulp_output" })
# Sync/async calls
def defer_sync(self, fn):
self.set_timeout(fn, 0)
def defer(self, fn):
self.async(fn, 0)
def set_timeout(self, fn, delay):
sublime.set_timeout(fn, delay)
def async(self, fn, delay):
if is_sublime_text_3:
progress = ProgressNotifier('Gulp: Working')
sublime.set_timeout_async(lambda: self.call(fn, progress), delay)
else:
fn()
def call(self, fn, progress):
fn()
progress.stop()
class ViewInsertCommand(sublime_plugin.TextCommand):
def run(self, edit, size, content):
self.view.insert(edit, int(size), content)
|
Python
| 0
|
@@ -164,32 +164,67 @@
rogressNotifier%0A
+ from .settings import Settings%0A
from .cross_
@@ -314,32 +314,66 @@
rogressNotifier%0A
+ from settings import Settings%0A
from cross_p
@@ -1094,53 +1094,17 @@
s =
-sublime.load_settings(%22Gulp.sublime-settings%22
+Settings(
)%0A
|
9d29061f8520506d798ad75aa296be8dc838aaf7
|
Remove leftover print call in paginator
|
resolwe/elastic/pagination.py
|
resolwe/elastic/pagination.py
|
""".. Ignore pydocstyle D400.
==================
Elastic Paginators
==================
Paginator classes used in Elastic app.
.. autoclass:: resolwe.elastic.pagination.LimitOffsetPostPagination
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from rest_framework.pagination import LimitOffsetPagination, _positive_int
def get_query_param(request, key):
"""Get query parameter uniformly for GET and POST requests."""
value = request.query_params.get(key) or request.data.get(key)
if value is None:
raise KeyError()
return value
class LimitOffsetPostPagination(LimitOffsetPagination):
"""Limit/offset paginator.
This is standard limit/offset paginator from Django REST framework,
with difference that it supports passing ``limit`` and ``offset``
attributes also in the body of the request (not just as query
parameter).
"""
def get_limit(self, request):
"""Return limit parameter."""
if self.limit_query_param:
try:
print(get_query_param(request, self.limit_query_param))
return _positive_int(
get_query_param(request, self.limit_query_param),
strict=True,
cutoff=self.max_limit
)
except (KeyError, ValueError):
pass
return self.default_limit
def get_offset(self, request):
"""Return offset parameter."""
try:
return _positive_int(
get_query_param(request, self.offset_query_param),
)
except (KeyError, ValueError):
return 0
|
Python
| 0
|
@@ -1041,80 +1041,8 @@
ry:%0A
- print(get_query_param(request, self.limit_query_param))%0A
|
f7abcfa295565fc1742dccda8ec28f34917320ff
|
Allow server socket reuse
|
soco/events.py
|
soco/events.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""
Classes to handle Sonos UPnP Events
"""
try: # python 3
from http.server import SimpleHTTPRequestHandler
from urllib.request import urlopen
from urllib.error import URLError
import socketserver
from queue import Queue
except ImportError: # python 2.7
from SimpleHTTPServer import SimpleHTTPRequestHandler
from urllib2 import urlopen, URLError
import SocketServer as socketserver
from Queue import Queue
import threading
import socket
import logging
import soco
log = logging.getLogger(__name__) # pylint: disable=C0103
class EventServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
""" A TCP server which handles each new request in a new thread """
pass
class EventNotifyHandler(SimpleHTTPRequestHandler):
""" Handles HTTP NOTIFY Verbs sent to the listener server """
def do_NOTIFY(self):
""" Handle a NOTIFY request. See the UPnP Spec for details."""
headers = dict(self.headers)
seq = headers['seq'] # Event sequence number
sid = headers['sid'] # Event Subscription Identifier
content_length = int(headers['content-length'])
content = self.rfile.read(content_length)
# Build an event structure to put on the queue, containing the useful
# information extracted from the request
event = {
'seq': seq,
'sid': sid,
'content': content
}
# put it on the queue for later consumption
self.server.event_queue.put(event)
self.send_response(200)
self.end_headers()
def log_message(self, format, *args):
# We override this to stop the printing of requests and errors
pass
class EventServerThread(threading.Thread):
"""The thread in which the event listener server will run"""
def __init__(self, ip, event_queue):
super(EventServerThread, self).__init__()
#: used to signal that the server should stop
self.stop_flag = threading.Event()
#: The ip address on which the server should listen
self.ip = ip
#: The queue onto which events will be placed
self.event_queue = event_queue
def run(self):
# Start the server on the local IP at port 1400. Any free port could
# be used but this seems appropriate for Sonos, and avoids the need
# to find a free port. Handling of requests is delegated to instances
# of the EventNotifyHandler class
listener = EventServer((self.ip, 1400), EventNotifyHandler)
listener.event_queue = self.event_queue
log.debug("Event listener running on %s", listener.server_address)
# Listen for events untill told to stop
while not self.stop_flag.is_set():
listener.handle_request()
class EventListener(object):
"""The Event Listener.
Runs an http server in a thread which is an endpoint for NOTIFY messages
from sonos devices"""
def __init__(self):
super(EventListener, self).__init__()
#: Indicates whether the server is currently running
self.is_running = False
#: The queue to which events are posted
self.event_queue = Queue()
self._listener_thread = None
#: The address (ip, port) on which the server is listening
self.address = ()
def start(self, any_zone):
"""Start the event listener
any_zone is any Sonos device on the network. It does not matter which
device. It is used only to find a local IP address reachable by the
Sonos net.
"""
# Find our local network IP address which is accessible to the Sonos net
# See http://stackoverflow.com/q/166506
temp_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
temp_sock.connect((any_zone.ip_address, 1400))
ip = temp_sock.getsockname()[0]
temp_sock.close()
# Start the event listener server in a separate thread
self.address = (ip, 1400)
self._listener_thread = EventServerThread(ip, self.event_queue)
self._listener_thread.daemon = True
self._listener_thread.start()
self.is_running = True
log.info("Event listener started")
def stop(self):
"""Stop the event listener"""
# Signal the thread to stop before handling the next request
self._listener_thread.stop_flag.set()
# Send a dummy request in case the http server is currently listening
try:
urlopen(
'http://%s:%s/' % (self.address[0], self.address[1]))
except URLError:
# If the server is already shut down, we receive a socket error,
# which we ignore.
pass
# wait for the thread to finish
self._listener_thread.join()
self.is_running = False
log.info("Event listener stopped")
event_listener = EventListener()
event_queue = event_listener.event_queue
|
Python
| 0.000003
|
@@ -772,28 +772,50 @@
ead %22%22%22%0A
-pass
+allow_reuse_address = True
%0A%0A%0Aclass Eve
@@ -5035,28 +5035,29 @@
= event_listener.event_queue
+%0A
|
9971f0d54ccf2046957a86a2cd9c0d104243a607
|
Add option to rebuild test modules before running
|
script/test.py
|
script/test.py
|
#!/usr/bin/env python
import argparse
import os
import shutil
import subprocess
import sys
from lib.util import electron_gyp, rm_rf
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
PROJECT_NAME = electron_gyp()['project_name%']
PRODUCT_NAME = electron_gyp()['product_name%']
def main():
os.chdir(SOURCE_ROOT)
args = parse_args()
config = args.configuration
if sys.platform == 'darwin':
electron = os.path.join(SOURCE_ROOT, 'out', config,
'{0}.app'.format(PRODUCT_NAME), 'Contents',
'MacOS', PRODUCT_NAME)
resources_path = os.path.join(SOURCE_ROOT, 'out', config,
'{0}.app'.format(PRODUCT_NAME), 'Contents',
'Resources')
elif sys.platform == 'win32':
electron = os.path.join(SOURCE_ROOT, 'out', config,
'{0}.exe'.format(PROJECT_NAME))
resources_path = os.path.join(SOURCE_ROOT, 'out', config)
else:
electron = os.path.join(SOURCE_ROOT, 'out', config, PROJECT_NAME)
resources_path = os.path.join(SOURCE_ROOT, 'out', config)
returncode = 0
try:
if args.use_instrumented_asar:
install_instrumented_asar_file(resources_path)
subprocess.check_call([electron, 'spec'] + sys.argv[1:])
except subprocess.CalledProcessError as e:
returncode = e.returncode
except KeyboardInterrupt:
returncode = 0
if args.use_instrumented_asar:
restore_uninstrumented_asar_file(resources_path)
if os.environ.has_key('OUTPUT_TO_FILE'):
output_to_file = os.environ['OUTPUT_TO_FILE']
with open(output_to_file, 'r') as f:
print f.read()
rm_rf(output_to_file)
return returncode
def parse_args():
parser = argparse.ArgumentParser(description='Run Electron tests')
parser.add_argument('--use_instrumented_asar',
help='Run tests with coverage instructed asar file',
action='store_true',
required=False)
parser.add_argument('-c', '--configuration',
help='Build configuration to run tests against',
default='D',
required=False)
return parser.parse_args()
def install_instrumented_asar_file(resources_path):
asar_path = os.path.join(resources_path, '{0}.asar'.format(PROJECT_NAME))
uninstrumented_path = os.path.join(resources_path,
'{0}-original.asar'.format(PROJECT_NAME))
instrumented_path = os.path.join(SOURCE_ROOT, 'out', 'coverage',
'{0}.asar'.format(PROJECT_NAME))
shutil.move(asar_path, uninstrumented_path)
shutil.move(instrumented_path, asar_path)
def restore_uninstrumented_asar_file(resources_path):
asar_path = os.path.join(resources_path, '{0}.asar'.format(PROJECT_NAME))
uninstrumented_path = os.path.join(resources_path,
'{0}-original.asar'.format(PROJECT_NAME))
os.remove(asar_path)
shutil.move(uninstrumented_path, asar_path)
if __name__ == '__main__':
sys.exit(main())
|
Python
| 0
|
@@ -99,39 +99,188 @@
lib.
-util import electron_gyp, rm_rf
+config import PLATFORM, enable_verbose_mode, get_target_arch%0Afrom lib.util import electron_gyp, execute, get_electron_version, rm_rf, %5C%0A update_electron_modules
%0A%0A%0AS
@@ -539,16 +539,309 @@
ration%0A%0A
+ if args.verbose:%0A enable_verbose_mode()%0A%0A spec_modules = os.path.join(SOURCE_ROOT, 'spec', 'node_modules')%0A if args.rebuild_native_modules or not os.path.isdir(spec_modules):%0A rebuild_native_modules(spec_modules,%0A os.path.join(SOURCE_ROOT, 'out', config))%0A%0A
if sys
@@ -2468,16 +2468,366 @@
=False)%0A
+ parser.add_argument('--rebuild_native_modules',%0A help='Rebuild native modules used by specs',%0A action='store_true',%0A required=False)%0A parser.add_argument('-v', '--verbose',%0A action='store_true',%0A help='Prints the output of the subprocesses')%0A
parser
@@ -3813,24 +3813,24 @@
(asar_path)%0A
-
shutil.mov
@@ -3865,16 +3865,729 @@
path)%0A%0A%0A
+def run_python_script(script, *args):%0A script_path = os.path.join(SOURCE_ROOT, 'script', script)%0A return execute(%5Bsys.executable, script_path%5D + list(args))%0A%0A%0Adef rebuild_native_modules(modules_path, out_dir):%0A version = get_electron_version()%0A%0A run_python_script('create-node-headers.py',%0A '--version', version,%0A '--directory', out_dir)%0A%0A if PLATFORM == 'win32':%0A iojs_lib = os.path.join(out_dir, 'Release', 'iojs.lib')%0A atom_lib = os.path.join(out_dir, 'node.dll.lib')%0A shutil.copy2(atom_lib, iojs_lib)%0A%0A update_electron_modules(os.path.dirname(modules_path), get_target_arch(),%0A os.path.join(out_dir, 'node-%7B0%7D'.format(version)))%0A%0A%0A
if __nam
|
7e42c10472dd8c97649d523bb00c13f952698c46
|
Fix versions
|
myvcs/main.py
|
myvcs/main.py
|
#!/usr/bin/env python
import os
from os.path import exists, isdir, join
import shutil
import sys
META_DIR = '.myvcs'
def backup(path):
backup_dir = join(path, META_DIR)
if exists(backup_dir):
shutil.rmtree(backup_dir)
def is_backup_dir(src, names):
if src == path:
return [META_DIR]
return []
shutil.copytree(path, backup_dir, ignore=is_backup_dir)
def get_next_backup_dir(backup_dir, versions):
return join(backup_dir, str(len(versions) + 1))
def get_versions(path):
backup_dir = join(path, META_DIR)
if exists(backup_dir):
versions = [
name for name in os.listdir(join(path, META_DIR))
if name != 'stash'
]
else:
versions = []
return versions
def stash_exists(path):
return exists(join(path, META_DIR, 'stash'))
def snapshot(path, version=None):
versions = get_versions(path)
def is_backup_dir(src, names):
if src == path:
return [META_DIR]
return []
if version == None:
backup_dir = get_next_backup_dir(join(path, META_DIR), versions)
elif version == 'stash':
backup_dir = join(path, META_DIR, version)
if exists(backup_dir):
shutil.rmtree(backup_dir)
else:
print 'Unknown version'
shutil.copytree(path, backup_dir, ignore=is_backup_dir)
# FIXME: version 21?
def checkout_version(path, version):
snapshot_dir = join(path, META_DIR, version)
for name in os.listdir(path):
if name == META_DIR:
continue
full_path = join(path, name)
if isdir(full_path):
shutil.rmtree(full_path)
else:
os.remove(full_path)
for name in os.listdir(snapshot_dir):
full_path = join(snapshot_dir, name)
if isdir(full_path):
shutil.copytree(full_path, join(path, name))
else:
shutil.copy(full_path, join(path, name))
def checkout(path, version):
versions = get_versions(path)
if version == 'stash':
if not stash_exists(path):
version = max([int(i) for i in versions])
checkout_version(path, str(version))
elif version not in versions:
print 'No such version'
else:
snapshot(path, version='stash')
checkout_version(path, version)
if __name__ == '__main__':
# FIXME: This is a bad idea. It should look for a .myvcs somewhere up in
# the tree, or current directory.
path = os.getcwd()
assert len(sys.argv) > 1, 'Need a command to run'
command = sys.argv[1]
if command == 'commit':
snapshot(path)
elif command == 'checkout':
checkout(path, sys.argv[2])
elif command == 'latest':
checkout(path, 'stash')
else:
print 'Unknown command!'
sys.exit(1)
|
Python
| 0.000001
|
@@ -44,16 +44,26 @@
h import
+ basename,
exists,
@@ -89,16 +89,30 @@
shutil%0A
+import string%0A
import s
@@ -718,19 +718,41 @@
name
- != 'stash'
+.startswith(tuple(string.digits))
%0A
@@ -889,23 +889,21 @@
))%0A%0Adef
-snapsho
+commi
t(path,
@@ -1156,16 +1156,89 @@
ersions)
+%0A print versions%0A track_version(path, basename(backup_dir))
%0A%0A el
@@ -1552,23 +1552,21 @@
n):%0A
-snapsho
+commi
t_dir =
@@ -1858,23 +1858,21 @@
listdir(
-snapsho
+commi
t_dir):%0A
@@ -1896,23 +1896,21 @@
= join(
-snapsho
+commi
t_dir, n
@@ -2066,16 +2066,89 @@
, name))
+%0A %0A if version is not 'stash':%0A track_version(path, version)
%0A%0Adef ch
@@ -2456,31 +2456,29 @@
se:%0A
-snapsho
+commi
t(path, vers
@@ -2531,16 +2531,234 @@
rsion)%0A%0A
+def track_version(path, version):%0A with open(join(path, META_DIR, 'head'), 'w') as f:%0A f.write(version)%0A%0Adef print_version(path):%0A with open(join(path, META_DIR, 'head')) as f:%0A print f.read()%0A %0A
%0Aif __na
@@ -3037,15 +3037,13 @@
-snapsho
+commi
t(pa
@@ -3174,32 +3174,96 @@
(path, 'stash')%0A
+ %0A elif command == 'current':%0A print_version(path)%0A
%0A els
|
1dcea652d825c86fb6f69a736a81a9ddce056804
|
Remove raven and whitenoise from settings.py
|
resource_tracking/settings.py
|
resource_tracking/settings.py
|
from dbca_utils.utils import env
import dj_database_url
import os
from pathlib import Path
# Project paths
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = str(Path(__file__).resolve().parents[1])
# Application definition
DEBUG = env('DEBUG', False)
SECRET_KEY = env('SECRET_KEY', 'PlaceholderSecretKey')
CSRF_COOKIE_SECURE = env('CSRF_COOKIE_SECURE', False)
SESSION_COOKIE_SECURE = env('SESSION_COOKIE_SECURE', False)
if not DEBUG:
ALLOWED_HOSTS = env('ALLOWED_DOMAINS', 'localhost').split(',')
else:
ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ['127.0.0.1', '::1']
ROOT_URLCONF = 'resource_tracking.urls'
WSGI_APPLICATION = 'resource_tracking.wsgi.application'
CSW_URL = env('CSW_URL', '')
PRINTING_URL = env('PRINTING_URL', '')
TRACPLUS_URL = env('TRACPLUS_URL', False)
KMI_VEHICLE_BASE_URL = env('KMI_VEHICLE_BASE_URL', '')
JQUERY_SOURCE = env('JQUERY_SOURCE', '')
JQUERYUI_SOURCE = env('JQUERYUI_SOURCE', '')
DFES_URL = env('DFES_URL', False)
DFES_USER = env('DFES_USER', False)
DFES_PASS = env('DFES_PASS', False)
DFES_OUT_OF_ORDER_BUFFER = int(env('DFES_OUT_OF_ORDER_BUFFER') or 300)
# Add scary warning on device edit page for prod
PROD_SCARY_WARNING = env('PROD_SCARY_WARNING', False)
DEVICE_HTTP_CACHE_TIMEOUT = env('DEVICE_HTTP_CACHE_TIMEOUT', 60)
HISTORY_HTTP_CACHE_TIMEOUT = env('HISTORY_HTTP_CACHE_TIMEOUT', 60)
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.gis',
'raven.contrib.django.raven_compat',
'tastypie',
'django_extensions',
'djgeojson',
# Sub-app definitions
'tracking',
'weather',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'dbca_utils.middleware.SSOLoginMiddleware',
]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
# Email settings
ADMINS = ('asi@dbca.wa.gov.au',)
EMAIL_HOST = env('EMAIL_HOST', 'email.host')
EMAIL_PORT = env('EMAIL_PORT', 25)
EMAIL_USER = env('EMAIL_USER', 'username')
EMAIL_PASSWORD = env('EMAIL_PASSWORD', 'password')
SERIALIZATION_MODULES = {
"geojson": "djgeojson.serializers",
}
# Database
DATABASES = {
# Defined in the DATABASE_URL env variable.
'default': dj_database_url.config(),
}
# Project authentication settings
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
# Internationalization
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Australia/Perth'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
# Logging settings - log to stdout/stderr
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'console': {'format': '%(asctime)s %(levelname)-8s %(message)s'},
},
'handlers': {
'console': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'console'
},
'sentry': {
'level': 'WARNING',
'class': 'raven.contrib.django.raven_compat.handlers.SentryHandler',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'propagate': True,
},
'django.request': {
'handlers': ['console', 'sentry'],
'level': 'WARNING',
'propagate': False,
},
'weather': {
'handlers': ['console'],
'level': 'INFO'
},
'dafwa': {
'handlers': ['console'],
'level': 'INFO'
},
'dafwa_uploads': {
'handlers': ['console'],
'level': 'INFO'
},
'tracking_points': {
'handlers': ['console'],
'level': 'INFO'
},
}
}
JS_MINIFY = False
RESOURCE_FILES_WITH_AUTO_VERSION = [
os.path.join(BASE_DIR, "tracking", "static", "sss", "sss.js"),
os.path.join(BASE_DIR, "tracking", "static", "sss", "leaflet.dump.js"),
os.path.join(BASE_DIR, "tracking", "static", "sss", "sss.css"),
]
# Tastypie settings
TASTYPIE_DEFAULT_FORMATS = ['json']
# DAFWA config
DAFWA_UPLOAD = env('DAFWA_UPLOAD', False)
DAFWA_UPLOAD_HOST = env('DAFWA_UPLOAD_HOST', 'host')
DAFWA_UPLOAD_USER = env('DAFWA_UPLOAD_USER', 'username')
DAFWA_UPLOAD_PASSWORD = env('DAFWA_UPLOAD_PASSWORD', 'password')
DAFWA_UPLOAD_DIR = env('DAFWA_UPLOAD_DIR', '/inbound')
# Sentry configuration
if env('RAVEN_DSN', False):
RAVEN_CONFIG = {'dsn': env('RAVEN_DSN')}
|
Python
| 0.000003
|
@@ -1591,49 +1591,8 @@
s',%0A
- 'raven.contrib.django.raven_compat',%0A
@@ -1776,58 +1776,8 @@
e',%0A
- 'whitenoise.middleware.WhiteNoiseMiddleware',%0A
@@ -3797,152 +3797,8 @@
%7D,%0A
- 'sentry': %7B%0A 'level': 'WARNING',%0A 'class': 'raven.contrib.django.raven_compat.handlers.SentryHandler',%0A %7D,%0A
@@ -3982,18 +3982,8 @@
ole'
-, 'sentry'
%5D,%0A
@@ -5106,29 +5106,30 @@
ion%0Aif env('
-RAVEN
+SENTRY
_DSN', False
@@ -5135,21 +5135,22 @@
e):%0A
-RAVEN
+SENTRY
_CONFIG
@@ -5168,13 +5168,14 @@
nv('
-RAVEN
+SENTRY
_DSN
|
ee1effb3a91bca7fcf1c590955f45e5b631a0598
|
Revise documentation
|
hanlp/pretrained/ner.py
|
hanlp/pretrained/ner.py
|
# -*- coding:utf-8 -*-
# Author: hankcs
# Date: 2019-12-30 20:07
from hanlp_common.constant import HANLP_URL
MSRA_NER_BERT_BASE_ZH = HANLP_URL + 'ner/ner_bert_base_msra_20200104_185735.zip'
'BERT model (:cite:`devlin-etal-2019-bert`) trained on MSRA with 3 entity types.'
MSRA_NER_ALBERT_BASE_ZH = HANLP_URL + 'ner/ner_albert_base_zh_msra_20200111_202919.zip'
'ALBERT model (:cite:`Lan2020ALBERT:`) trained on MSRA with 3 entity types.'
MSRA_NER_ELECTRA_SMALL_ZH = HANLP_URL + 'ner/msra_ner_electra_small_20210807_154832.zip'
'Electra small model (:cite:`clark2020electra:`) trained on MSRA with 3 entity types. F1 = `95.10`'
CONLL03_NER_BERT_BASE_UNCASED_EN = HANLP_URL + 'ner/ner_conll03_bert_base_uncased_en_20200104_194352.zip'
'BERT model (:cite:`devlin-etal-2019-bert`) trained on CoNLL03.'
ALL = {}
|
Python
| 0.000001
|
@@ -582,33 +582,34 @@
ed on MSRA with
-3
+26
entity types. F
|
6df071a84e4c1c75e93c33e6e8676cf4b618e2a6
|
Add __version__ and remove testing hack
|
bes/__init__.py
|
bes/__init__.py
|
"""
Log actions to Elastic Search (via UDP)
"""
import datetime as _datetime
import json as _json
import logging as _logging
import socket as _socket
LOG = _logging.getLogger(__name__)
DEFAULT = {
'host': 'localhost',
'port': 9700,
'protocol': 'UDP',
'index': 'log',
'datestamp_index': False,
'type': 'record',
}
class Connection(object):
"""A socket connecting to Elastic Search
Use a context manager for PEP 343's 'with' syntax:
>>> with Connection(host='localhost', port=1234) as c:
... c.send(message='hello!')
"""
def __init__(self, host=None, port=None, protocol=None):
if host is None:
host = DEFAULT['host']
if port is None:
port = DEFAULT['port']
if protocol is None:
protocol = DEFAULT['protocol']
self.host = host
self.port = port
if protocol == 'UDP':
self.socket_type = _socket.SOCK_DGRAM
else:
raise NotImplementedError(protocol)
self._sock = None
def __enter__(self):
self._sock = _socket.socket(_socket.AF_INET, self.socket_type)
return self
def __exit__(self, *exc_info):
if self._sock is not None:
try:
self._sock.close()
finally:
self._sock = None
def send(self, message):
LOG.debug(message)
self._sock.sendto(message, (self.host, self.port))
def log(index=None, type=None, **kwargs):
"""Log an arbitrary payload dictionary to Elastic Search
Uses the default connection configuration. If you need to
override any of them, build your payload dict by hand and use
emit() instead.
You can optionally override the index and type of payload, for
later filtering in Elastic Search. This means that `index` and
`type` are not available as payload keys.
"""
kwargs['@timestamp'] = _datetime.datetime.utcnow().isoformat()
kwargs['@version'] = 1
emit(payload=kwargs, index=index, type=type)
def emit(payload, index=None, datestamp_index=None, type=None, **kwargs):
"""Send bulk-upload data to Elastic Search
Uses the 'index' action to add or replace a document as necessary.
http://www.elasticsearch.org/guide/reference/api/bulk/
http://www.elasticsearch.org/guide/reference/api/bulk-udp/
"""
#TODO indexes, types, and what Kibana likes.
#Try it out and adjust
#throwing all of payloads **kwargs into an 'additional' or simular field might
#required, and I don't know what happens if we send different data types with
#the same name ie a **kwargs of my_special_key: str and my_special_key: {'foo': 'bar'}
if index is None:
index = DEFAULT['index']
if type is None:
type = DEFAULT['type']
if datestamp_index is None:
datestamp_index = DEFAULT['datestamp_index']
if datestamp_index:
index = '-'.join([
index,
_datetime.date.today().strftime('%Y.%m.%d'),
])
index_data = {
'index': {
'_index': index,
'_type': type,
},
}
message = '\n'.join([
_json.dumps(index_data),
_json.dumps(payload),
'',
])
with Connection(**kwargs) as connection:
connection.send(message)
if __name__ == '__main__':
LOG.addHandler(_logging.StreamHandler())
LOG.setLevel(_logging.DEBUG)
for i in range(3):
log(who='somebody', what='Did something %sx times' % i)
|
Python
| 0.000017
|
@@ -146,16 +146,37 @@
ocket%0A%0A%0A
+__version__ = '0.1'%0A%0A
LOG = _l
@@ -3372,199 +3372,4 @@
ge)%0A
-%0A%0Aif __name__ == '__main__':%0A LOG.addHandler(_logging.StreamHandler())%0A LOG.setLevel(_logging.DEBUG)%0A%0A for i in range(3):%0A log(who='somebody', what='Did something %25sx times' %25 i)%0A
|
5a7e191f7b2751ede70606c06393cbfbf9f18ec6
|
Fix Python 3 incompatibility
|
skimage/color/colorlabel.py
|
skimage/color/colorlabel.py
|
import warnings
import itertools
import numpy as np
from skimage import img_as_float
from skimage._shared import six
from skimage._shared.six.moves import zip
from .colorconv import rgb2gray, gray2rgb
from . import rgb_colors
__all__ = ['color_dict', 'label2rgb', 'DEFAULT_COLORS']
DEFAULT_COLORS = ('red', 'blue', 'yellow', 'magenta', 'green',
'indigo', 'darkorange', 'cyan', 'pink', 'yellowgreen')
color_dict = dict((k, v) for k, v in rgb_colors.__dict__.iteritems()
if isinstance(v, tuple))
def _rgb_vector(color):
"""Return RGB color as (1, 3) array.
This RGB array gets multiplied by masked regions of an RGB image, which are
partially flattened by masking (i.e. dimensions 2D + RGB -> 1D + RGB).
Parameters
----------
color : str or array
Color name in `color_dict` or RGB float values between [0, 1].
"""
if isinstance(color, six.string_types):
color = color_dict[color]
# Slice to handle RGBA colors.
return np.array(color[:3])
def _match_label_with_color(label, colors, bg_label, bg_color):
"""Return `unique_labels` and `color_cycle` for label array and color list.
Colors are cycled for normal labels, but the background color should only
be used for the background.
"""
# Temporarily set background color; it will be removed later.
if bg_color is None:
bg_color = (0, 0, 0)
bg_color = _rgb_vector([bg_color])
unique_labels = list(set(label.flat))
# Ensure that the background label is in front to match call to `chain`.
if bg_label in unique_labels:
unique_labels.remove(bg_label)
unique_labels.insert(0, bg_label)
# Modify labels and color cycle so background color is used only once.
color_cycle = itertools.cycle(colors)
color_cycle = itertools.chain(bg_color, color_cycle)
return unique_labels, color_cycle
def label2rgb(label, image=None, colors=None, alpha=0.3,
bg_label=-1, bg_color=None, image_alpha=1):
"""Return an RGB image where color-coded labels are painted over the image.
Parameters
----------
label : array
Integer array of labels with the same shape as `image`.
image : array
Image used as underlay for labels. If the input is an RGB image, it's
converted to grayscale before coloring.
colors : list
List of colors. If the number of labels exceeds the number of colors,
then the colors are cycled.
alpha : float [0, 1]
Opacity of colorized labels. Ignored if image is `None`.
bg_label : int
Label that's treated as the background.
bg_color : str or array
Background color. Must be a name in `color_dict` or RGB float values
between [0, 1].
image_alpha : float [0, 1]
Opacity of the image.
"""
if colors is None:
colors = DEFAULT_COLORS
colors = [_rgb_vector(c) for c in colors]
if image is None:
image = np.zeros(label.shape + (3,), dtype=np.float64)
# Opacity doesn't make sense if no image exists.
alpha = 1
else:
if not image.shape[:2] == label.shape:
raise ValueError("`image` and `label` must be the same shape")
if image.min() < 0:
warnings.warn("Negative intensities in `image` are not supported")
image = img_as_float(rgb2gray(image))
image = gray2rgb(image) * image_alpha + (1 - image_alpha)
# Ensure that all labels are non-negative so we can index into
# `label_to_color` correctly.
offset = min(label.min(), bg_label)
if offset != 0:
label = label - offset # Make sure you don't modify the input array.
bg_label -= offset
new_type = np.min_scalar_type(label.max())
if new_type == np.bool:
new_type = np.uint8
label = label.astype(new_type)
unique_labels, color_cycle = _match_label_with_color(label, colors,
bg_label, bg_color)
if len(unique_labels) == 0:
return image
dense_labels = range(max(unique_labels) + 1)
label_to_color = np.array([c for i, c in zip(dense_labels, color_cycle)])
result = label_to_color[label] * alpha + image * (1 - alpha)
# Remove background label if its color was not specified.
remove_background = bg_label in unique_labels and bg_color is None
if remove_background:
result[label == bg_label] = image[label == bg_label]
return result
|
Python
| 0.99992
|
@@ -456,16 +456,30 @@
k, v in
+six.iteritems(
rgb_colo
@@ -489,27 +489,16 @@
__dict__
-.iteritems(
)%0A
|
fc1789315811c38e0acf2f34ce00c3eee222dd5e
|
Fix cleanup
|
hassio/dock/__init__.py
|
hassio/dock/__init__.py
|
"""Init file for HassIO docker object."""
import asyncio
from contextlib import suppress
import logging
import docker
from ..tools import get_version_from_env
_LOGGER = logging.getLogger(__name__)
class DockerBase(object):
"""Docker hassio wrapper."""
def __init__(self, config, loop, dock, image=None):
"""Initialize docker base wrapper."""
self.config = config
self.loop = loop
self.dock = dock
self.image = image
self.container = None
self.version = None
self._lock = asyncio.Lock(loop=loop)
@property
def docker_name(self):
"""Return name of docker container."""
return None
@property
def in_progress(self):
"""Return True if a task is in progress."""
return self._lock.locked()
async def install(self, tag):
"""Pull docker image."""
if self._lock.locked():
_LOGGER.error("Can't excute install while a task is in progress")
return False
async with self._lock:
return await self.loop.run_in_executor(None, self._install, tag)
def _install(self, tag):
"""Pull docker image.
Need run inside executor.
"""
try:
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
image = self.dock.images.pull("{}:{}".format(self.image, tag))
image.tag(self.image, tag='latest')
self.version = get_version_from_env(image.attrs['Config']['Env'])
_LOGGER.info("Tag image %s with version %s as latest",
self.image, self.version)
except docker.errors.APIError as err:
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
return False
return True
def exists(self):
"""Return True if docker image exists in local repo.
Return a Future.
"""
return self.loop.run_in_executor(None, self._exists)
def _exists(self):
"""Return True if docker image exists in local repo.
Need run inside executor.
"""
try:
image = self.dock.images.get(self.image)
self.version = get_version_from_env(image.attrs['Config']['Env'])
except docker.errors.DockerException:
return False
return True
def is_running(self):
"""Return True if docker is Running.
Return a Future.
"""
return self.loop.run_in_executor(None, self._is_running)
def _is_running(self):
"""Return True if docker is Running.
Need run inside executor.
"""
if not self.container:
try:
self.container = self.dock.containers.get(self.docker_name)
self.version = get_version_from_env(
self.container.attrs['Config']['Env'])
except docker.errors.DockerException:
return False
self.container.reload()
return self.container.status == 'running'
async def attach(self):
"""Attach to running docker container."""
if self._lock.locked():
_LOGGER.error("Can't excute attach while a task is in progress")
return False
async with self._lock:
return await self.loop.run_in_executor(None, self._attach)
def _attach(self):
"""Attach to running docker container.
Need run inside executor.
"""
try:
self.container = self.dock.containers.get(self.docker_name)
self.image = self.container.attrs['Config']['Image']
self.version = get_version_from_env(
self.container.attrs['Config']['Env'])
_LOGGER.info("Attach to image %s with version %s",
self.image, self.version)
except (docker.errors.DockerException, KeyError):
_LOGGER.fatal(
"Can't attach to %s docker container!", self.docker_name)
return False
return True
async def run(self):
"""Run docker image."""
if self._lock.locked():
_LOGGER.error("Can't excute run while a task is in progress")
return False
async with self._lock:
_LOGGER.info("Run docker image %s with version %s",
self.image, self.version)
return await self.loop.run_in_executor(None, self._run)
def _run(self):
"""Run docker image.
Need run inside executor.
"""
raise NotImplementedError()
async def stop(self):
"""Stop/remove docker container."""
if self._lock.locked():
_LOGGER.error("Can't excute stop while a task is in progress")
return False
async with self._lock:
await self.loop.run_in_executor(None, self._stop)
return True
def _stop(self):
"""Stop/remove and remove docker container.
Need run inside executor.
"""
if not self.container:
return
self.container.reload()
if self.container.status == 'running':
with suppress(docker.errors.DockerException):
self.container.stop()
with suppress(docker.errors.DockerException):
self.container.remove(force=True)
self.container = None
async def remove(self):
"""Remove docker container."""
if self._lock.locked():
_LOGGER.error("Can't excute remove while a task is in progress")
return False
async with self._lock:
return await self.loop.run_in_executor(None, self._remove)
def _remove(self):
"""remove docker container.
Need run inside executor.
"""
if self._is_running():
self._stop()
image = "{}:latest".format(self.image)
try:
self.dock.images.remove(image=image, force=True)
except docker.errors.DockerException as err:
_LOGGER.warning("Can't remove image %s -> %s", image, err)
return False
return True
async def update(self, tag):
"""Update a docker image.
Return a Future.
"""
if self._lock.locked():
_LOGGER.error("Can't excute update while a task is in progress")
return False
async with self._lock:
return await self.loop.run_in_executor(None, self._update, tag)
def _update(self, tag):
"""Update a docker image.
Need run inside executor.
"""
old_run = self._is_running()
old_image = "{}:{}".format(self.image, self.version)
_LOGGER.info("Update docker %s with %s:%s",
old_image, self.image, tag)
# update docker image
if self._install(tag):
_LOGGER.info("Cleanup old %s docker.", old_image)
self._stop()
try:
self.dock.images.remove(image=old_image, force=True)
except docker.errors.DockerException as err:
_LOGGER.warning(
"Can't remove old image %s -> %s", old_image, err)
# restore
if old_run:
self._run()
return True
return False
|
Python
| 0.000007
|
@@ -5861,46 +5861,107 @@
-image = %22%7B%7D:latest%22.format(self.image)
+_LOGGER.info(%22Remove docker %25s with latest and %25s%22,%0A self.image, self.version)%0A
%0A
@@ -6143,16 +6143,21 @@
-%3E %25s%22,
+self.
image, e
|
c78b0e71bdf2095f4e4a7793615980b5cec1b73c
|
remove auth token from debug log. but it can be uncommented for testing.
|
src/biokbase/narrative/handlers/authhandlers.py
|
src/biokbase/narrative/handlers/authhandlers.py
|
from tornado.escape import url_escape
from notebook.base.handlers import IPythonHandler
from traitlets.config import Application
from notebook.auth.login import LoginHandler
from notebook.auth.logout import LogoutHandler
from biokbase.narrative.common.kblogging import (
get_logger, log_event
)
from biokbase.narrative.common.util import kbase_env
import tornado.log
import os
import urllib
import logging
from biokbase.auth import (
get_user_info,
init_session_env,
set_environ_token
)
"""
KBase handlers for authentication in the Jupyter notebook.
"""
__author__ = 'Bill Riehl <wjriehl@lbl.gov>'
# Set logging up globally.
g_log = get_logger("biokbase.narrative")
app_log = tornado.log.app_log # alias
if Application.initialized:
app_log = Application.instance().log
if os.environ.get('KBASE_DEBUG', False):
app_log.setLevel(logging.DEBUG)
auth_cookie_name = "kbase_session"
class KBaseLoginHandler(LoginHandler):
"""KBase-specific login handler.
This should get the cookie and put it where it belongs.
A (not-so-distant) future version will return a session token.
"""
def get(self):
"""
Initializes the KBase session from the cookie passed into it.
"""
# cookie_regex = re.compile('([^ =|]+)=([^\|]*)')
client_ip = self.request.remote_ip
http_headers = self.request.headers
ua = http_headers.get('User-Agent', 'unknown')
# save client ip in environ for later logging
kbase_env.client_ip = client_ip
auth_cookie = self.cookies.get(auth_cookie_name, None)
if auth_cookie:
token = urllib.unquote(auth_cookie.value)
auth_info = dict()
try:
auth_info = get_user_info(token)
except Exception as e:
app_log.error("Unable to get user information from authentication token!")
if app_log.isEnabledFor(logging.DEBUG):
app_log.debug("kbase cookie = {}".format(cookie_val))
app_log.debug("KBaseLoginHandler.get: user_id={uid} token={tok}"
.format(uid=auth_info.get('user', 'none'),
tok=token))
init_session_env(auth_info, client_ip)
self.current_user = kbase_env.user
log_event(g_log, 'session_start', {'user': kbase_env.user, 'user_agent': ua})
app_log.info("KBaseLoginHandler.get(): user={}".format(kbase_env.user))
app_log.info("KBaseLoginHandler.get(): token={}".format(kbase_env.auth_token))
if self.current_user:
self.redirect(self.get_argument('next', default=self.base_url))
else:
self.write('This is a test?')
def post(self):
pass
@classmethod
def get_user(cls, handler):
user_id = kbase_env.user
if user_id == '':
user_id = 'anonymous'
if user_id is None:
handler.clear_login_cookie()
if not handler.login_available:
user_id = 'anonymous'
return user_id
@classmethod
def password_from_settings(cls, settings):
return u''
@classmethod
def login_available(cls, settings):
"""Whether this LoginHandler is needed - and therefore whether the login page should be displayed."""
return True
class KBaseLogoutHandler(LogoutHandler):
def get(self):
client_ip = self.request.remote_ip
http_headers = self.request.headers
user = kbase_env.user
ua = http_headers.get('User-Agent', 'unknown')
kbase_env.auth_token = 'none'
kbase_env.narrative = 'none'
kbase_env.session = 'none'
kbase_env.user = 'anonymous'
kbase_env.workspace = 'none'
set_environ_token(None)
app_log.info('Successfully logged out')
log_event(g_log, 'session_close', {'user': user, 'user_agent': ua})
self.write(self.render_template('logout.html', message={'info': 'Successfully logged out'}))
|
Python
| 0
|
@@ -1896,24 +1896,83 @@
%0A
+ # re-enable if token logging info is needed.%0A #
if app_log.
@@ -2003,32 +2003,34 @@
UG):%0A
+ #
app_log.deb
@@ -2075,32 +2075,34 @@
al))%0A
+ #
app_log.deb
@@ -2158,32 +2158,34 @@
ok%7D%22%0A
+ #
@@ -2233,32 +2233,34 @@
e'),%0A
+ #
@@ -2463,32 +2463,32 @@
r_agent': ua%7D)%0A%0A
+
app_log.
@@ -2554,95 +2554,8 @@
er))
-%0A app_log.info(%22KBaseLoginHandler.get(): token=%7B%7D%22.format(kbase_env.auth_token))
%0A%0A
|
cb24a839d39ca606c23869c04112975aca7d0dab
|
Update arch type for 64bits support
|
misc/sampleapp-android-tests/sampleapp/comm.py
|
misc/sampleapp-android-tests/sampleapp/comm.py
|
#!/usr/bin/env python
# coding=utf-8
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Cici,Li<cici.x.li@intel.com>
import unittest
import os
import sys
import commands
import shutil
reload(sys)
sys.setdefaultencoding("utf-8")
script_path = os.path.realpath(__file__)
const_path = os.path.dirname(script_path)
sample_src_pref = "/tmp/crosswalk-samples/"
pack_tools = const_path + "/../tools/crosswalk/"
index_path = "index.html"
def setUp():
global ARCH, MODE, device
device = os.environ.get('DEVICE_ID')
if not device:
print (" get device id error\n")
sys.exit(1)
fp = open(const_path + "/../arch.txt", 'r')
if fp.read().strip("\n\t") != "x86":
ARCH = "arm"
else:
ARCH = "x86"
fp.close()
mode = open(const_path + "/../mode.txt", 'r')
if mode.read().strip("\n\t") != "shared":
MODE = "embedded"
else:
MODE = "shared"
mode.close()
def check_appname():
global app_name
xwalk_version = os.environ.get('XWALK_VERSION')
#xwalk_version = '8.38.208.0'
if int(xwalk_version.split('.')[0]) < 9:
app_name = 'xwalk_echo_app'
else:
app_name = 'Sample'
def pack(cmd, appname, self):
setUp()
os.chdir(const_path + "/../testapp/")
print "Generate APK %s ----------------> START" % appname
print cmd
packstatus = commands.getstatusoutput(cmd)
self.assertEquals(0, packstatus[0])
print "\nGenerate APK %s ----------------> OK\n" % appname
result = commands.getstatusoutput("ls")
self.assertIn(appname, result[1])
os.chdir("../..")
def app_install(cmd, cmdfind, self):
print "Install APK ----------------> START"
inststatus = commands.getstatusoutput(cmd)
self.assertEquals(0, inststatus[0])
print "Install APK ----------------> OK"
pmstatus = commands.getstatusoutput(cmdfind)
self.assertEquals(0, pmstatus[0])
print "Find Package in device ----------------> OK"
def app_launch(cmd, self):
print "Launch APK ----------------> START"
launchstatus = commands.getstatusoutput(cmd)
self.assertNotIn("error", launchstatus[1].lower())
print "Launch APK ----------------> OK"
def app_stop(cmd, self):
print "Stop APK ----------------> START"
stopstatus = commands.getstatusoutput(cmd)
self.assertEquals(0, stopstatus[0])
print "Stop APK ----------------> OK"
def app_uninstall(cmd, self):
print "Uninstall APK ----------------> START"
unistatus = commands.getstatusoutput(cmd)
self.assertEquals(0, unistatus[0])
print "Uninstall APK ----------------> OK"
def others():
if os.path.exists(pack_tools + "/" + AppName):
os.remove(pack_tools + "/" + AppName)
if os.path.exists(const_path + "/../" + AppName):
os.remove(const_path + "/../" + AppName)
|
Python
| 0
|
@@ -2090,18 +2090,22 @@
r')%0A
-if
+ARCH =
fp.read
@@ -2124,70 +2124,8 @@
%5Ct%22)
- != %22x86%22:%0A ARCH = %22arm%22%0A else:%0A ARCH = %22x86%22
%0A
@@ -2191,18 +2191,22 @@
r')%0A
-if
+MODE =
mode.re
@@ -2227,81 +2227,8 @@
%5Ct%22)
- != %22shared%22:%0A MODE = %22embedded%22%0A else:%0A MODE = %22shared%22
%0A
|
b0df73b995a1020aa43a8e9ad85e7bf197744578
|
fix formating
|
updatebot/lib/xobjects.py
|
updatebot/lib/xobjects.py
|
#
# Copyright (c) 2009 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.
#
# This program is distributed in the hope that it will be useful, but
# without any warranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
"""
Module for serializable representations of repository metadata.
"""
from xobj import xobj
import conary
from aptmd.packages import _Package
from aptmd.sources import _SourcePackage
class XDocManager(xobj.Document):
"""
Base class that implements simple freeze/thaw methods.
"""
data = str
freeze = xobj.Document.toxml
@classmethod
def thaw(cls, xml):
"""
Deserialize an xml string into a DocManager instance.
"""
return xobj.parse(xml, documentClass=cls)
@classmethod
def fromfile(cls, fn):
"""
Deserialize from file.
"""
return xobj.parsef(fn, documentClass=cls)
def tofile(self, fn):
"""
Save model to file name.
"""
fObj = open(fn, 'w')
xml = self.toxml()
fObj.write(xml)
fObj.close()
class XMetadata(object):
"""
Representation of repository data.
"""
binaryPackages = [ _Package ]
sourcePackage = _SourcePackage
class XMetadataDoc(XDocManager):
"""
Document class for repository data.
"""
data = XMetadata
def __init__(self, *args, **kwargs):
data = kwargs.pop('data', None)
XDocManager.__init__(self, *args, **kwargs)
if data is not None:
self.data = XMetadata()
self.data.binaryPackages = []
for pkg in data:
if pkg.arch == 'src':
self.data.sourcePackage = pkg
else:
self.data.binaryPackages.append(pkg)
class XDictItem(object):
"""
Object to represent key/value pairs.
"""
key = str
value = str
def __init__(self, key=None, value=None):
self.key = key
self.value = value
def __hash__(self):
return hash(self.key)
def __cmp__(self, other):
if type(other) in (str, unicode):
return cmp(self.key, other)
else:
return cmp(self.key, other.key)
class XDict(object):
"""
String based xobj dict implementation.
"""
items = [ XDictItem ]
def __init__(self):
self.items = []
self._itemClass = self.__class__.__dict__['items'][0]
def __setitem__(self, key, value):
item = self._itemClass(key, value)
if item in self.items:
idx = self.items.index(item)
self.items[idx] = item
else:
self.items.append(item)
def __getitem__(self, key):
if key in self.items:
idx = self.items.index(key)
return self.items[idx].value
raise KeyError, key
def __contains__(self, key):
return key in self.items
class XItemList(object):
"""
List of items.
"""
items = None
def __init__(self):
self.items = []
self._itemClass = self.__class__.__dict__['items'][0]
class XHashableItem(object):
"""
Base class for hashable items.
"""
@property
def key(self):
raise NotImplementedError
def __hash__(self):
return hash(self.key)
def __cmp__(self, other):
return cmp(self.key, other.key)
class XPackageItem(XHashableItem):
"""
Object to represent package data required for group builds with the
managed group factory.
"""
name = str
version = str
flavor = str
byDefault = int
use = str
source = str
def __init__(self, name=None, version=None, flavor=None, byDefault=None,
use=None, source=None):
self.name = name
self.version = version
self.source = source
if byDefault in (True, False):
self.byDefault = int(byDefault)
else:
self.byDefault = byDefault
if use in (True, False):
self.use = int(use)
else:
self.use = use
if type(flavor) == conary.deps.deps.Flavor:
self.flavor = flavor.freeze()
else:
self.flavor = flavor
@property
def key(self):
return (self.name, self.flavor)
class XPackageData(XItemList):
"""
Mapping of package name to package group data.
"""
items = [ XPackageItem ]
class XPackageDoc(XDocManager):
"""
Document class for group data.
"""
data = XPackageData
class XGroup(XHashableItem):
"""
Group file info.
"""
name = str
filename = str
byDefault = int
depCheck = int
def __init__(self, name=None, filename=None, byDefault=True, depCheck=True):
self.name = name
self.filename = filename
self.byDefault = byDefault and 1 or 0
self.depCheck = depCheck and 1 or 0
@property
def key(self):
return self.name
class XGroupList(XItemList):
"""
List of file names to load as groups.
"""
items = [ XGroup ]
class XGroupDoc(XDocManager):
"""
Document for managing group.xml.
"""
data = XGroupList
|
Python
| 0.000039
|
@@ -4652,16 +4652,17 @@
lavor)%0A%0A
+%0A
class XP
|
ca66caa2081a518bb3487775c67cf84e6ae27afa
|
Add auth_enabled=False to a cron trigger test
|
mistral/tests/unit/engine/test_cron_trigger.py
|
mistral/tests/unit/engine/test_cron_trigger.py
|
# Copyright 2015 Alcatel-Lucent, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import mock
from oslo_config import cfg
from mistral.db.v2 import api as db_api
from mistral.services import periodic
from mistral.services import security
from mistral.services import triggers
from mistral.services import workflows
from mistral.tests.unit.engine import base
WORKFLOW_LIST = """
---
version: '2.0'
my_wf:
type: direct
tasks:
task1:
action: std.echo output='Hi!'
"""
class ProcessCronTriggerTest(base.EngineTestCase):
@mock.patch.object(security,
'create_trust',
type('trust', (object,), {'id': 'my_trust_id'}))
def test_start_workflow(self):
cfg.CONF.set_default('auth_enable', True, group='pecan')
wf = workflows.create_workflows(WORKFLOW_LIST)[0]
t = triggers.create_cron_trigger(
'test',
wf.name,
{},
{},
'* * * * * */1',
None,
None,
None
)
self.assertEqual('my_trust_id', t.trust_id)
cfg.CONF.set_default('auth_enable', False, group='pecan')
next_trigger = triggers.get_next_cron_triggers()[0]
next_execution_time_before = next_trigger.next_execution_time
periodic.MistralPeriodicTasks(cfg.CONF).process_cron_triggers_v2(None)
next_trigger = triggers.get_next_cron_triggers()[0]
next_execution_time_after = next_trigger.next_execution_time
# Checking the workflow was executed, by
# verifying that the next execution time changed.
self.assertNotEqual(
next_execution_time_before,
next_execution_time_after
)
def test_workflow_without_auth(self):
cfg.CONF.set_default('auth_enable', False, group='pecan')
wf = workflows.create_workflows(WORKFLOW_LIST)[0]
triggers.create_cron_trigger(
'test',
wf.name,
{},
{},
'* * * * * */1',
None,
None,
None
)
next_triggers = triggers.get_next_cron_triggers()
self.assertEqual(1, len(next_triggers))
next_trigger = next_triggers[0]
next_execution_time_before = next_trigger.next_execution_time
periodic.MistralPeriodicTasks(cfg.CONF).process_cron_triggers_v2(None)
next_triggers = triggers.get_next_cron_triggers()
self.assertEqual(1, len(next_triggers))
next_trigger = next_triggers[0]
next_execution_time_after = next_trigger.next_execution_time
self.assertNotEqual(
next_execution_time_before,
next_execution_time_after
)
@mock.patch('mistral.services.triggers.validate_cron_trigger_input')
def test_create_cron_trigger_with_pattern_and_first_time(self,
validate_mock):
wf = workflows.create_workflows(WORKFLOW_LIST)[0]
# Make the first_time 1 sec later than current time, in order to make
# it executed by next cron-trigger task.
first_time = datetime.datetime.now() + datetime.timedelta(0, 1)
# Creates a cron-trigger with pattern and first time, ensure the
# cron-trigger can be executed more than once, and cron-trigger will
# not be deleted.
cron_trigger = triggers.create_cron_trigger(
'test',
wf.name,
{},
{},
'*/1 * * * *',
first_time,
None,
None
)
self.assertEqual(
first_time,
cron_trigger.next_execution_time
)
periodic.MistralPeriodicTasks(cfg.CONF).process_cron_triggers_v2(None)
next_time = triggers.get_next_execution_time(
cron_trigger.pattern,
cron_trigger.next_execution_time
)
cron_trigger_db = db_api.get_cron_trigger('test')
self.assertIsNotNone(cron_trigger_db)
self.assertEqual(
next_time,
cron_trigger_db.next_execution_time
)
|
Python
| 0.000017
|
@@ -3495,16 +3495,83 @@
_mock):%0A
+ cfg.CONF.set_default('auth_enable', False, group='pecan')%0A%0A
|
1692c2df3c744c6cfd79d78777ab6d9d421d06cd
|
Fix get_lang_class parsing
|
spacy/util.py
|
spacy/util.py
|
import os
import io
import json
import re
import os.path
import six
import sputnik
from sputnik.dir_package import DirPackage
from sputnik.package_list import (PackageNotFoundException,
CompatiblePackageNotFoundException)
from . import about
from .attrs import TAG, HEAD, DEP, ENT_IOB, ENT_TYPE
LANGUAGES = {}
def set_lang_class(name, cls):
global LANGUAGES
LANGUAGES[name] = cls
def get_lang_class(name):
lang = re.split('[^a-zA-Z0-9_]', name, 1)[0]
if lang not in LANGUAGES:
raise RuntimeError('Language not supported: %s' % lang)
return LANGUAGES[lang]
def get_package(data_dir):
if not isinstance(data_dir, six.string_types):
raise RuntimeError('data_dir must be a string')
return DirPackage(data_dir)
def get_package_by_name(name=None, via=None):
if name is None:
return
lang = get_lang_class(name)
try:
return sputnik.package(about.__title__, about.__version__,
name, data_path=via)
except PackageNotFoundException as e:
raise RuntimeError("Model '%s' not installed. Please run 'python -m "
"%s.download' to install latest compatible "
"model." % (name, lang.__module__))
except CompatiblePackageNotFoundException as e:
raise RuntimeError("Installed model is not compatible with spaCy "
"version. Please run 'python -m %s.download "
"--force' to install latest compatible model." %
(lang.__module__))
def normalize_slice(length, start, stop, step=None):
if not (step is None or step == 1):
raise ValueError("Stepped slices not supported in Span objects."
"Try: list(tokens)[start:stop:step] instead.")
if start is None:
start = 0
elif start < 0:
start += length
start = min(length, max(0, start))
if stop is None:
stop = length
elif stop < 0:
stop += length
stop = min(length, max(start, stop))
assert 0 <= start <= stop <= length
return start, stop
def utf8open(loc, mode='r'):
return io.open(loc, mode, encoding='utf8')
def read_lang_data(package):
tokenization = package.load_json(('tokenizer', 'specials.json'))
with package.open(('tokenizer', 'prefix.txt'), default=None) as file_:
prefix = read_prefix(file_) if file_ is not None else None
with package.open(('tokenizer', 'suffix.txt'), default=None) as file_:
suffix = read_suffix(file_) if file_ is not None else None
with package.open(('tokenizer', 'infix.txt'), default=None) as file_:
infix = read_infix(file_) if file_ is not None else None
return tokenization, prefix, suffix, infix
def read_prefix(fileobj):
entries = fileobj.read().split('\n')
expression = '|'.join(['^' + re.escape(piece) for piece in entries if piece.strip()])
return expression
def read_suffix(fileobj):
entries = fileobj.read().split('\n')
expression = '|'.join([piece + '$' for piece in entries if piece.strip()])
return expression
def read_infix(fileobj):
entries = fileobj.read().split('\n')
expression = '|'.join([piece for piece in entries if piece.strip()])
return expression
# def read_tokenization(lang):
# loc = path.join(DATA_DIR, lang, 'tokenization')
# entries = []
# seen = set()
# with utf8open(loc) as file_:
# for line in file_:
# line = line.strip()
# if line.startswith('#'):
# continue
# if not line:
# continue
# pieces = line.split()
# chunk = pieces.pop(0)
# assert chunk not in seen, chunk
# seen.add(chunk)
# entries.append((chunk, list(pieces)))
# if chunk[0].isalpha() and chunk[0].islower():
# chunk = chunk[0].title() + chunk[1:]
# pieces[0] = pieces[0][0].title() + pieces[0][1:]
# seen.add(chunk)
# entries.append((chunk, pieces))
# return entries
# def read_detoken_rules(lang): # Deprecated?
# loc = path.join(DATA_DIR, lang, 'detokenize')
# entries = []
# with utf8open(loc) as file_:
# for line in file_:
# entries.append(line.strip())
# return entries
def align_tokens(ref, indices): # Deprecated, surely?
start = 0
queue = list(indices)
for token in ref:
end = start + len(token)
emit = []
while queue and queue[0][1] <= end:
emit.append(queue.pop(0))
yield token, emit
start = end
assert not queue
def detokenize(token_rules, words): # Deprecated?
"""To align with treebanks, return a list of "chunks", where a chunk is a
sequence of tokens that are separated by whitespace in actual strings. Each
chunk should be a tuple of token indices, e.g.
>>> detokenize(["ca<SEP>n't", '<SEP>!'], ["I", "ca", "n't", "!"])
[(0,), (1, 2, 3)]
"""
string = ' '.join(words)
for subtoks in token_rules:
# Algorithmically this is dumb, but writing a little list-based match
# machine? Ain't nobody got time for that.
string = string.replace(subtoks.replace('<SEP>', ' '), subtoks)
positions = []
i = 0
for chunk in string.split():
subtoks = chunk.split('<SEP>')
positions.append(tuple(range(i, i+len(subtoks))))
i += len(subtoks)
return positions
|
Python
| 0.000001
|
@@ -474,21 +474,9 @@
it('
-%5B%5Ea-zA-Z0-9_%5D
+_
', n
|
31f2f3671b803e3b08dbfdd33bc58a59beabcb5c
|
Add simple logging config.
|
src/project/settings.py
|
src/project/settings.py
|
"""
Django settings for project project.
Generated by 'django-admin startproject' using Django 1.9.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '3^*gla4yobv6!4l_0r4bb6am=qoe3!-^1+arl@q48=m!b7*sfd'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# ALLOWED_HOSTS = []
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# # MySQL
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.mysql',
# 'NAME': os.environ.get('MYSQL_ENV_MYSQL_DATABASE'),
# 'USER': os.environ.get('MYSQL_ENV_MYSQL_USER'),
# 'PASSWORD': os.environ.get('MYSQL_ENV_MYSQL_PASSWORD'),
# 'HOST': os.environ.get('MYSQL_PORT_3306_TCP_ADDR'),
# 'PORT': os.environ.get('MYSQL_PORT_3306_TCP_PORT'),
# 'OPTIONS': {
# 'init_command': "SET sql_mode='STRICT_TRANS_TABLES'",
# },
# },
# }
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.normpath(os.path.join(BASE_DIR, '..', 'static'))
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.normpath(os.path.join(BASE_DIR, '..', 'media'))
# PROJECT_DISPLAY_NAME = 'Project Name'
|
Python
| 0
|
@@ -807,16 +807,95 @@
= True%0A%0A
+ADMINS = (%0A # ('Your Name', 'your_email@example.com'),%0A)%0AMANAGERS = ADMINS%0A%0A
# ALLOWE
@@ -3990,8 +3990,392 @@
t Name'%0A
+%0ALOGGING = %7B%0A 'version': 1,%0A 'disable_existing_loggers': False,%0A 'handlers': %7B%0A 'mail_admins': %7B%0A 'level': 'ERROR',%0A 'class': 'django.utils.log.AdminEmailHandler'%0A %7D%0A %7D,%0A 'loggers': %7B%0A 'django.request': %7B%0A 'handlers': %5B'mail_admins'%5D,%0A 'level': 'ERROR',%0A 'propagate': True,%0A %7D,%0A %7D%0A%7D%0A
|
26c175cbe77eb2f27f01418e0d52381b12e8d33f
|
Add additional supported platforms to Subject (close #172)
|
snowplow_tracker/subject.py
|
snowplow_tracker/subject.py
|
"""
subject.py
Copyright (c) 2013-2014 Snowplow Analytics Ltd. All rights reserved.
This program is licensed to you under the Apache License Version 2.0,
and you may not use this file except in compliance with the Apache License
Version 2.0. You may obtain a copy of the Apache License Version 2.0 at
http://www.apache.org/licenses/LICENSE-2.0.
Unless required by applicable law or agreed to in writing,
software distributed under the Apache License Version 2.0 is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the Apache License Version 2.0 for the specific
language governing permissions and limitations there under.
Authors: Anuj More, Alex Dean, Fred Blundun
Copyright: Copyright (c) 2013-2014 Snowplow Analytics Ltd
License: Apache License Version 2.0
"""
from contracts import contract, new_contract
SUPPORTED_PLATFORMS = set(["pc", "tv", "mob", "cnsl", "iot"])
DEFAULT_PLATFORM = "pc"
new_contract("subject", lambda x: isinstance(x, Subject))
new_contract("supported_platform", lambda x: x in SUPPORTED_PLATFORMS)
class Subject(object):
"""
Class for an event subject, where we view events as of the form
(Subject) -> (Verb) -> (Object)
"""
def __init__(self):
self.standard_nv_pairs = {"p": DEFAULT_PLATFORM}
@contract
def set_platform(self, value):
"""
:param value: One of ["pc", "tv", "mob", "cnsl", "iot"]
:type value: supported_platform
:rtype: subject
"""
self.standard_nv_pairs["p"] = value
return self
@contract
def set_user_id(self, user_id):
"""
:param user_id: User ID
:type user_id: string
:rtype: subject
"""
self.standard_nv_pairs["uid"] = user_id
return self
@contract
def set_screen_resolution(self, width, height):
"""
:param width: Width of the screen
:param height: Height of the screen
:type width: int,>0
:type height: int,>0
:rtype: subject
"""
self.standard_nv_pairs["res"] = "".join([str(width), "x", str(height)])
return self
@contract
def set_viewport(self, width, height):
"""
:param width: Width of the viewport
:param height: Height of the viewport
:type width: int,>0
:type height: int,>0
:rtype: subject
"""
self.standard_nv_pairs["vp"] = "".join([str(width), "x", str(height)])
return self
@contract
def set_color_depth(self, depth):
"""
:param depth: Depth of the color on the screen
:type depth: int
:rtype: subject
"""
self.standard_nv_pairs["cd"] = depth
return self
@contract
def set_timezone(self, timezone):
"""
:param timezone: Timezone as a string
:type timezone: string
:rtype: subject
"""
self.standard_nv_pairs["tz"] = timezone
return self
@contract
def set_lang(self, lang):
"""
Set language.
:param lang: Language the application is set to
:type lang: string
:rtype: subject
"""
self.standard_nv_pairs["lang"] = lang
return self
@contract
def set_domain_user_id(self, duid):
"""
Set the domain user ID
:param duid: Domain user ID
:type duid: string
:rtype: subject
"""
self.standard_nv_pairs["duid"] = duid
return self
@contract
def set_ip_address(self, ip):
"""
Set the domain user ID
:param ip: IP address
:type ip: string
:rtype: subject
"""
self.standard_nv_pairs["ip"] = ip
return self
@contract
def set_useragent(self, ua):
"""
Set the user agent
:param ua: User agent
:type ua: string
:rtype: subject
"""
self.standard_nv_pairs["ua"] = ua
return self
@contract
def set_network_user_id(self, nuid):
"""
Set the network user ID field
This overwrites the nuid field set by the collector
:param nuid: Network user ID
:type nuid: string
:rtype: subject
"""
self.standard_nv_pairs["tnuid"] = nuid
return self
|
Python
| 0
|
@@ -984,16 +984,37 @@
%22, %22iot%22
+, %22web%22, %22srv%22, %22app%22
%5D)%0ADEFAU
@@ -1536,16 +1536,37 @@
%22, %22iot%22
+, %22web%22, %22srv%22, %22app%22
%5D%0A
|
a5d3c78295d951fd29f00fc8d8480c2a518fd615
|
set srid explicit
|
utm_zone_info/viewsets.py
|
utm_zone_info/viewsets.py
|
from rest_framework import status, viewsets
from rest_framework.response import Response
from utm_zone_info.coordinate_reference_system import utm_zones_for_representing
from utm_zone_info.serializers import GeometrySerializer
class UTMZoneInfoViewSet(viewsets.ViewSet):
"""
A simple ViewSet for posting Points and returns valid utm_zones.
"""
serializer_class = GeometrySerializer
def create(self, request):
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
geometry = serializer.validated_data['geom']
if geometry.srid is None:
geometry.srid = serializer.validated_data['srid']
data = dict(
utm_zone_srids=[zone.srid for zone in utm_zones_for_representing(geometry)]
)
return Response(data=data)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
Python
| 0.000002
|
@@ -584,50 +584,8 @@
m'%5D%0A
- if geometry.srid is None:%0A
|
b62423f6ccb47a6f4074ec8e95d9861a3bb06874
|
Change error message
|
ckanext/requestdata/logic/validators.py
|
ckanext/requestdata/logic/validators.py
|
from email_validator import validate_email
from ckan.plugins.toolkit import _
from ckan.plugins.toolkit import get_action
def email_validator(key, data, errors, context):
email = data[key]
try:
validate_email(email)
except Exception:
message = _('Please provide a valid email address.')
errors[key].append(message)
def state_validator(key, data, errors, context):
possible_state = ['new', 'open', 'archive']
if data[key] not in possible_state:
message = _('The state parameter must be new, open or archive.')
errors[key].append(message)
def boolean_validator(key, data, errors, context):
if not isinstance(data[key], bool):
message = _('The {0} parameter must be a Boolean value.'
.format(key[0]))
errors[key].append(message)
def members_in_org_validator(key, data, errors, context):
maintainers = data[key].split(',')
model = context['model']
owner_org = data[('owner_org',)]
data_dict = {
'id': owner_org
}
members_in_org = get_action('member_list')(context, data_dict)
# member_list returns more than just users, so we need to extract only
# users
members_in_org = [member for member in members_in_org
if member[1] == 'user']
for email in maintainers:
user = model.User.by_email(email)
user_found = False
if len(user) > 0:
user = user[0]
for member in members_in_org:
if member[0] == user.id:
user_found = True
if not user_found:
message = _('The user with email "{0}" is not part of this '
'organization.'.format(email))
errors[key].append(message)
else:
message = _('The user with email "{0}" is not part of this '
'organization.'.format(email))
errors[key].append(message)
|
Python
| 0.000001
|
@@ -1854,36 +1854,31 @@
l %22%7B0%7D%22
-i
+doe
s not
-part of this
+exist.
'%0A
@@ -1891,39 +1891,24 @@
-'organization.'
.format(emai
|
6427e4fd0175fd39e70337e12cc662f5f38cb9b5
|
Add more information on the context for the email
|
lowfat/mail.py
|
lowfat/mail.py
|
"""
Send email for some views.
"""
from constance import config
from django.contrib.flatpages.models import FlatPage
from django.core.mail import send_mail, mail_admins
from django.core.urlresolvers import reverse
from django.template import Context, Template
from .models import *
from .settings import DEFAULT_FROM_EMAIL
def reverse_full(*args, **kargs):
"""Return the full address using Django reverse."""
# FIXME Avoid hard code the domain.
return '{}{}'.format(
config.DOMAIN,
reverse(*args, **kargs)
)
def new_notification(admin_url, admin_context, user_url, user_email, user_context):
if config.STAFF_EMAIL_NOTIFICATION:
# Email to admin
flatemail = FlatPage.objects.get(url=admin_url)
mail_admins(
flatemail.title,
flatemail.content.format(**admin_context),
fail_silently=False,
)
if config.CLAIMANT_EMAIL_NOTIFICATION:
# Email to claimant
flatemail = FlatPage.objects.get(url=user_url)
send_mail(
flatemail.title,
flatemail.content.format(**user_context),
DEFAULT_FROM_EMAIL,
[user_email],
fail_silently=False
)
def new_fund_notification(fund):
admin_url = "/email/template/fund/admin/"
admin_context = {
"link": reverse_full("fund_review", args=[fund.id]),
}
user_url = "/email/template/fund/claimant/"
user_context = {
"link": reverse_full("fund_detail", args=[fund.id]),
}
user_email = fund.claimant.email
new_notification(admin_url, admin_context, user_url, user_email, user_context)
def new_expense_notification(expense):
admin_url = "/email/template/expense/admin/"
admin_context = {
"link": reverse_full("expense_review", args=[expense.id]),
}
user_url = "/email/template/expense/claimant/"
user_context = {
"link": reverse_full("expense_detail", args=[expense.id]),
}
user_email = expense.fund.claimant.email
new_notification(admin_url, admin_context, user_url, user_email, user_context)
def new_blog_notification(blog):
admin_url = "/email/template/blog/admin/"
admin_context = {
"link": reverse_full("blog_review", args=[blog.id]),
}
user_url = "/email/template/blog/claimant/"
user_context = {
"link": reverse_full("blog_detail", args=[blog.id]),
}
user_email = blog.fund.claimant.email
new_notification(admin_url, admin_context, user_url, user_email, user_context)
def review_notification(mail, old, new, url):
if config.CLAIMANT_EMAIL_NOTIFICATION:
# Email to claimant
flatemail = FlatPage.objects.get(url=url)
template = Template(flatemail.content)
context = Context({
"old": old,
"new": new,
"new_message": mail.justification,
})
send_mail(
flatemail.title,
template.render(context),
mail.sender.email,
[mail.receiver.email],
fail_silently=False
)
def fund_review_notification(message, sender, old, new):
mail = FundSentMail(
**{
"justification": message,
"sender": sender,
"receiver": new.claimant,
"fund": new,
}
)
review_notification(mail, old, new, "/email/template/fund/claimant/new/")
if message:
mail.save()
def expense_review_notification(message, sender, old, new):
mail = ExpenseSentMail(
**{
"justification": message,
"sender": sender,
"receiver": new.fund.claimant,
"expense": new,
}
)
review_notification(mail, old, new, "/email/template/expense/claimant/new/")
if message:
mail.save()
def blog_review_notification(message, sender, old, new):
mail = BlogSentMail(
**{
"justification": message,
"sender": sender,
"receiver": new.fund.claimant,
"blog": new,
}
)
review_notification(mail, old, new, "/email/template/blog/claimant/new/")
if message:
mail.save()
|
Python
| 0.000723
|
@@ -737,24 +737,112 @@
=admin_url)%0A
+ template = Template(flatemail.content)%0A context = Context(admin_context)%0A
mail
@@ -895,41 +895,24 @@
-flatemail.content.format(**admin_
+template.render(
cont
@@ -948,17 +948,16 @@
ly=False
-,
%0A
@@ -1082,24 +1082,111 @@
l=user_url)%0A
+ template = Template(flatemail.content)%0A context = Context(user_context)%0A
send
@@ -1237,40 +1237,24 @@
-flatemail.content.format(**user_
+template.render(
cont
@@ -1453,32 +1453,54 @@
min_context = %7B%0A
+ %22fund%22: fund,%0A
%22link%22:
@@ -1612,32 +1612,54 @@
ser_context = %7B%0A
+ %22fund%22: fund,%0A
%22link%22:
@@ -1933,32 +1933,60 @@
min_context = %7B%0A
+ %22expense%22: expense,%0A
%22link%22:
@@ -2107,32 +2107,60 @@
ser_context = %7B%0A
+ %22expense%22: expense,%0A
%22link%22:
@@ -2439,32 +2439,54 @@
min_context = %7B%0A
+ %22blog%22: blog,%0A
%22link%22:
@@ -2598,32 +2598,54 @@
ser_context = %7B%0A
+ %22blog%22: blog,%0A
%22link%22:
|
840bbdff4245d39317f8ad64f48999bdf288683f
|
version name/code and wifi timestamp
|
webserver/opentrain/analysis/logic.py
|
webserver/opentrain/analysis/logic.py
|
import models
import json
import reports.models
import common.ot_utils
from django.conf import settings
def analyze_raw_reports(clean=True):
if clean:
delete_all_reports()
COUNT = 20
offset = 0
while True:
cont = analyze_raw_reports_subset(offset,COUNT)
offset += COUNT
if not cont:
return
def analyze_raw_reports_subset(offset,count):
items = _collect_items(offset,count)
if items:
for item in items:
dump_item(item)
return True
return False
from django.db import transaction
@transaction.atomic
def dump_item(item):
if 'wifi' not in item:
return None
wifis = []
locs = []
report_dt = common.ot_utils.get_utc_time_from_timestamp(float(item['time'])/1000)
m = models.Report(device_id=item['device_id'],timestamp=report_dt)
if models.Report.objects.filter(device_id=item['device_id'],timestamp=report_dt).exists():
#print 'Repeated report - skipping'
return None
m.save()
item_loc = item.get('location_api')
if item_loc:
loc = models.LocationInfo(report=m,
lat=item_loc['lat'],
lon=item_loc['long'],
provider=item_loc['provider'],
timestamp = common.ot_utils.get_utc_time_from_timestamp(float(item_loc['time'])/1000),
accuracy = item_loc['accuracy'])
locs.append(loc)
for wifi in item['wifi']:
wifis.append(models.SingleWifiReport(SSID=wifi['SSID'],
signal=wifi['signal'],
frequency=wifi['frequency'],
key=wifi['key'],
report=m))
models.SingleWifiReport.objects.bulk_create(wifis)
models.LocationInfo.objects.bulk_create(locs)
def delete_all_reports():
common.ot_utils.delete_from_model(models.SingleWifiReport)
common.ot_utils.delete_from_model(models.LocationInfo)
common.ot_utils.delete_from_model(models.Report)
def _collect_items(offset,count):
all_reports_count = reports.models.RawReport.objects.count()
print '*** offset = %d count = %d all_reports_count = %d' % (offset,count,all_reports_count)
all_reports = reports.models.RawReport.objects.all().order_by('id')[offset:offset+count]
result = []
for rj in all_reports:
items = json.loads(rj.text)['items']
result.extend(items)
return result
def analyze_single_raw_report(rr):
import algorithm.train_tracker
items = json.loads(rr.text)['items']
reports = []
for item in items:
report = dump_item(item)
if report:
reports.append(report)
for report in reports:
algorithm.train_tracker.add_report(report)
## DEVICES SUMMAY ##
def get_devices_summary():
from django.db import connection
cursor = connection.cursor()
cursor.execute("""
SELECT device_id,MIN(DATE(timestamp)) as device_date,
COUNT(*) from analysis_report
GROUP BY device_id
ORDER BY device_date
""")
tuples = cursor.fetchall()
result = []
for t in tuples:
d = dict(device_id=t[0],
device_date=t[1].isoformat(),
device_count=t[2])
result.append(d)
return result
def get_device_reports(device_id,info):
qs = models.Report.objects.order_by('id').filter(my_loc__isnull=False,
id__gte=info['since_id'],
device_id=device_id)
if info['stops_only']:
qs = qs.filter(wifi_set__SSID='S-ISRAEL-RAILWAYS').distinct().order_by('id')
qs = qs.prefetch_related('my_loc','wifi_set')
info['total_count'] = qs.count()
qs = qs[info['offset']:info['offset'] + info['limit']]
result = []
for obj in qs:
result.append(obj.to_api_dict(full=info['full']))
return result
@common.ot_utils.benchit
def test4():
import gtfs.models
import gtfs.logic
secs = 1391451464.94
dt = common.ot_utils.unix_time_to_localtime(secs)
trip_id = '030214_00089'
trip = gtfs.models.Trip.objects.get(trip_id=trip_id)
exp_shape=gtfs.logic.get_expected_location(trip, dt)
assert exp_shape.shape_pt_lat == 32.10497517
assert exp_shape.shape_pt_lon == 34.80547358
def get_current_trips(dt=None):
import gtfs.logic
if not dt:
dt = common.ot_utils.get_localtime_now()
current_trips = gtfs.logic.get_all_trips_in_datetime(dt)
result = []
for trip in current_trips:
trip_dict = trip.to_json_full(with_shapes=False)
trip_dict['is_live'] = is_live(trip)
result.append(trip_dict)
return result
def get_trips_location(trip_ids):
import gtfs.logic
result = []
dt = common.ot_utils.get_localtime_now()
current_trips = gtfs.models.Trip.objects.filter(trip_id__in=trip_ids)
for trip in current_trips:
trip_id = trip.trip_id
(exp_shape,cur_shape)=gtfs.logic.get_expected_location(trip, dt)
res = dict(trip_id=trip_id,
exp_point = exp_shape)
if cur_shape and settings.FAKE_CUR:
res['cur_point'] = cur_shape
cur_loc = get_current_location(trip)
if cur_loc:
res['cur_point'] = cur_loc
result.append(res)
return result
def get_current_location(trip):
from redis_intf.client import load_by_key
return load_by_key('current_trip_id:coords:%s' % (trip.trip_id))
def is_live(trip):
import gtfs.logic
if get_current_location(trip):
return True
return gtfs.logic.fake_cur_location(trip)
|
Python
| 0.000003
|
@@ -1013,24 +1013,132 @@
return None%0A
+ m.app_version_name = item.get('app_version_name')%0A m.app_version_code = item.get('app_version_code')%0A
m.save()
@@ -1138,16 +1138,16 @@
.save()%0A
-
item
@@ -1654,24 +1654,188 @@
em%5B'wifi'%5D:%0A
+ if wifi.get('timestamp'):%0A ts = common.ot_utils.get_utc_time_from_timestamp(float(wifi%5B'timestamp'%5D) / 1000)%0A else:%0A ts = None%0A
wifi
@@ -2024,24 +2024,24 @@
requency'%5D,%0A
-
@@ -2086,24 +2086,85 @@
ifi%5B'key'%5D,%0A
+ timestamp = ts,%0A
|
46f651c7336e4c517c9f4ee19753ec8461757ce8
|
decompose algorithm
|
bin/Flaws_generators/Generator.py
|
bin/Flaws_generators/Generator.py
|
import shutil
from Classes.Manifest import Manifest
from .Input import InputSample
from .Filtering import FilteringSample
from .Sink import SinkSample
from .ExecQuery import ExecQuerySample
from Classes.FileManager import FileManager
from Classes.File import File
import xml.etree.ElementTree as ET
class Generator():
def __init__(self, date):
self.date = date
# TODO
# self.manifest = Manifest(date, flaw)
self.safe_Sample = 0
self.unsafe_Sample = 0
# parse XML files
tree_input = ET.parse(FileManager.getXML("input")).getroot()
self.tab_input = [InputSample(inp) for inp in tree_input]
tree_filtering = ET.parse(FileManager.getXML("filtering")).getroot()
self.tab_filtering = [FilteringSample(filtering) for filtering in tree_filtering]
tree_sink = ET.parse(FileManager.getXML("sink")).getroot()
self.tab_sink = [SinkSample(sink) for sink in tree_sink]
tree_exec_query = ET.parse(FileManager.getXML("exec_queries")).getroot()
self.tab_exec_queries = [ExecQuerySample(exec_query) for exec_query in tree_exec_query]
# set current samples
self.current_input = None
self.current_filtering = None
self.current_sink = None
self.current_exec_queries = None
def is_safe_selection(self):
return self.current_input.is_safe() or self.current_filtering.is_safe(self.current_sink.get_flaw_type()) or self.current_sink.is_safe()
def generate(self):
# TODO check params : ex CWE_XXX, URF, ...
self.select_sink()
# fist step : browse sink
def select_sink(self):
for sink in self.tab_sink:
self.current_sink = sink
# TODO check if sink need filtering or input
self.select_filtering()
# second step : browse filtering
def select_filtering(self):
# select filtering
for filtering in self.tab_filtering:
self.current_filtering = filtering
# check if sink and filtering are compatibles
if self.current_sink.compatible_with_filtering(filtering):
# TODO check if filtering need input
self.select_input()
# third step : browse input
def select_input(self):
# select input
for inp in self.tab_input:
if self.current_filtering.get_input_type() == inp.get_output_type():
self.current_input = inp
self.select_complexities()
# fourth step : browse complexities
def select_complexities(self):
self.select_exec_queries()
# fifth step : browse exec_queries
def select_exec_queries(self):
if self.current_sink.need_exec():
# select exec_queries
for exec_query in self.tab_exec_queries:
if self.current_sink.compatible_with_exec_queries(exec_query):
self.current_exec_queries = exec_query
self.compose()
else:
self.current_exec_queries = None
self.compose()
def compose(self):
print("#########################")
print("safe : "+str(self.is_safe_selection()))
print(self.current_input)
print(self.current_filtering)
print(self.current_sink)
print(self.current_exec_queries)
def getType(self):
pass
def generateFileName(self, params, name):
for param in params:
name += "__"
for dir in param.path:
name += dir+"-"
name = name[:-1]
return name
def onDestroy(self, flaw):
self.manifest.close()
if self.safe_Sample+self.unsafe_Sample > 0:
print(flaw + " generation report:")
print(str(self.safe_Sample) + " safe samples")
print(str(self.unsafe_Sample) + " unsafe samples")
print(str(self.unsafe_Sample + self.safe_Sample) + " total\n")
else:
shutil.rmtree("../CsharpTestSuite_"+self.date+"/"+flaw)
@staticmethod
def findFlaw(fileName):
sample = open(fileName, 'r')
i = 0
for line in sample.readlines():
i += 1
if line[:6] == "//flaw":
break
return i + 1
|
Python
| 0.999875
|
@@ -383,16 +383,51 @@
# TODO
+ readd this for manifest generation
%0A
@@ -1338,16 +1338,49 @@
s = None
+%0A self.current_code = None
%0A%0A de
@@ -2568,17 +2568,16 @@
ties()%0A%0A
-%0A
# fo
@@ -3150,124 +3150,352 @@
-def compose(self):%0A print(%22#########################%22)%0A print(%22safe : %22+str(self.is_safe_selection
+# seventh step : compose previous code chunks%0A def compose(self):%0A # TODO replace placeholder on complexities by input/filtering/sink/exec_queries%0A # temporary code%0A self.current_code = %22%22%0A self.current_code += str(self.current_input.get_code())%0A self.current_code += str(self.current_filtering.get_code
())
-)
%0A
@@ -3491,37 +3491,56 @@
code())%0A
-print
+self.current_code += str
(self.current_in
@@ -3541,126 +3541,512 @@
ent_
+s
in
-put)%0A print(self.current_filtering)%0A print(self.current_sink)%0A print(self.current_exec_queries)
+k.get_code())%0A if self.current_exec_queries:%0A self.current_code += str(self.current_exec_queries.get_code())%0A self.write_files()%0A%0A # eighth step : write on disk and update manifest%0A def write_files(self):%0A # TODO write on file%0A print(%22#########################################%22)%0A print(%22safe : %22+str(self.is_safe_selection()))%0A print(self.current_code)%0A print(%22#########################################%22)%0A # TODO update Manifest
%0A%0A
|
09e8dd8ed521105aedeb9d35234998d7fa82bb4d
|
Format max line length to 79.
|
sqlcop/cli.py
|
sqlcop/cli.py
|
from __future__ import print_function
import sys
import sqlparse
import optparse
from sqlcop.checks.cross_join import CrossJoinCheck
from sqlcop.checks.order_by_count import OrderByCountCheck
def parse_file(filename):
try:
return open(filename, 'r').readlines()
except UnicodeDecodeError:
# It's unclear whether or not something still relies on the ascii encoding so I've only changed it to use utf-8
# on exception.
return open(filename, 'r', encoding="utf-8").readlines()
CHECKS = (
(CrossJoinCheck, 'query contains cross join'),
(OrderByCountCheck, 'query contains a count with an order by clause')
)
def check_query(options, el):
"""
Run each of the defined checks on a query.
"""
stmt = sqlparse.parse(el)
checks = (
(check_class(**options), message)
for check_class, message in CHECKS
)
for check in checks:
if check[0](stmt[0]):
return False, check[1]
return True, ''
def main():
parser = optparse.OptionParser('sqlcop')
parser.add_option(
'--db-urls',
help=(
'Comma-separated db urls. '
'Used to fetch schema for the database so sqlcop '
'can make more accurate judgement based on the schema'
)
)
opt, args = parser.parse_args()
if len(args) < 1:
parser.error('SQL file required')
lines = parse_file(args[0])
db_urls = opt.db_urls.split(',')
if not isinstance(db_urls, list):
db_urls = list(db_urls)
failed = False
options = {'db_urls': db_urls}
last_comment = ''
for line in lines:
passed, message = check_query(options, line)
if not passed:
failed = True
print_message(message, line, last_comment)
if line.startswith('-- '):
last_comment = line
exit(failed)
def exit(failed):
sys.exit(255 if failed else 0)
def print_message(message, query, last_comment):
print("FAILED - %s" % (message))
print("-" * 70)
print()
print("Query:")
print("%s" % query)
if last_comment:
print("Preceding SQL Comment:")
print(last_comment)
|
Python
| 0.000351
|
@@ -373,16 +373,26 @@
he ascii
+%0A #
encodin
@@ -429,26 +429,16 @@
se utf-8
-%0A #
on exce
|
26c45d7885d0bc0e31624942ad64b76a6c7ca0cf
|
add default class
|
mpcontribs-client/mpcontribs/client/__init__.py
|
mpcontribs-client/mpcontribs/client/__init__.py
|
# -*- coding: utf-8 -*-
import os
import fido
import warnings
import pandas as pd
from pyisemail import is_email
from pyisemail.diagnosis import BaseDiagnosis
from swagger_spec_validator.common import SwaggerValidationError
from bravado_core.formatter import SwaggerFormat
from bravado.client import SwaggerClient
from bravado.fido_client import FidoClient # async
from bravado.http_future import HttpFuture
from bravado.swagger_model import Loader
from bravado.config import bravado_config_from_config_dict
from bravado_core.spec import Spec
from json2html import Json2Html
from IPython.display import display, HTML
from boltons.iterutils import remap
DEFAULT_HOST = "api.mpcontribs.org"
HOST = os.environ.get("MPCONTRIBS_API_HOST", DEFAULT_HOST)
j2h = Json2Html()
pd.options.plotting.backend = "plotly"
warnings.formatwarning = lambda msg, *args, **kwargs: f"{msg}\n"
warnings.filterwarnings("default", category=DeprecationWarning, module=__name__)
def validate_email(email_string):
d = is_email(email_string, diagnose=True)
if d > BaseDiagnosis.CATEGORIES["VALID"]:
raise SwaggerValidationError(f"{email_string} {d.message}")
email_format = SwaggerFormat(
format="email",
to_wire=str,
to_python=str,
validate=validate_email,
description="e-mail address",
)
class FidoClientGlobalHeaders(FidoClient):
def __init__(self, headers=None):
super().__init__()
self.headers = headers or {}
def request(self, request_params, operation=None, request_config=None):
request_for_twisted = self.prepare_request_for_twisted(request_params)
request_for_twisted["headers"].update(self.headers)
future_adapter = self.future_adapter_class(fido.fetch(**request_for_twisted))
return HttpFuture(
future_adapter, self.response_adapter_class, operation, request_config
)
def visit(path, key, value):
if isinstance(value, dict) and "display" in value:
return key, value["display"]
return key not in ["value", "unit"]
class Dict(dict):
def pretty(self, attrs=""):
return display(
HTML(j2h.convert(json=remap(self, visit=visit), table_attributes=attrs))
)
def load_client(apikey=None, headers=None, host=HOST):
warnings.warn(
"load_client(...) is deprecated, use Client(...) instead", DeprecationWarning
)
# TODO data__ regex doesn't work through bravado/swagger client
class Client(SwaggerClient):
"""client to connect to MPContribs API
We only want to load the swagger spec from the remote server when needed and not everytime the
client is initialized. Hence using the Borg design nonpattern (instead of Singleton): Since the
__dict__ of any instance can be re-bound, Borg rebinds it in its __init__ to a class-attribute
dictionary. Now, any reference or binding of an instance attribute will actually affect all
instances equally.
"""
_shared_state = {}
def __init__(self, apikey=None, headers=None, host=HOST):
# - Kong forwards consumer headers when api-key used for auth
# - forward consumer headers when connecting through localhost
self.__dict__ = self._shared_state
self.apikey = apikey
self.headers = {"x-api-key": apikey} if apikey else headers
self.host = host
if "swagger_spec" not in self.__dict__ or (
self.headers is not None
and self.swagger_spec.http_client.headers != self.headers
):
http_client = FidoClientGlobalHeaders(headers=self.headers)
loader = Loader(http_client)
protocol = "https" if self.apikey else "http"
origin_url = f"{protocol}://{self.host}/apispec.json"
spec_dict = loader.load_spec(origin_url)
spec_dict["host"] = self.host
spec_dict["schemes"] = [protocol]
config = {
"validate_responses": False,
"use_models": False,
"include_missing_properties": False,
"formats": [email_format],
}
bravado_config = bravado_config_from_config_dict(config)
for key in set(bravado_config._fields).intersection(set(config)):
del config[key]
config["bravado"] = bravado_config
swagger_spec = Spec.from_dict(spec_dict, origin_url, http_client, config)
super().__init__(
swagger_spec, also_return_response=bravado_config.also_return_response
)
def get_project(self, project):
"""Convenience function to get full project entry and display as HTML table"""
return Dict(self.projects.get_entry(pk=project, _fields=["_all"]).result())
def get_contribution(self, cid):
"""Convenience function to get full contribution entry and display as HTML table"""
return Dict(self.contributions.get_entry(pk=cid, _fields=["_all"]).result())
def get_table(self, tid):
"""Convenience function to get full Pandas DataFrame for a table."""
page, pages = 1, None
table = {"data": []}
while pages is None or page <= pages:
res = self.tables.get_entry(
pk=tid, _fields=["_all"], data_page=page, data_per_page=1000
).result()
if "columns" not in table:
pages = res["total_data_pages"]
table["columns"] = res["columns"]
table["data"].extend(res["data"])
page += 1
return pd.DataFrame.from_records(
table["data"], columns=table["columns"], index=table["columns"][0]
)
|
Python
| 0.000003
|
@@ -2077,18 +2077,31 @@
, attrs=
-%22%22
+'class=%22table%22'
):%0A
|
51257ca1ebb61d48b8c8dd5b1562fdc73e4ecc99
|
Load .solv file from testdata
|
bindings/python/tests/relation.py
|
bindings/python/tests/relation.py
|
#
# test Relation
#
# Relations are the primary means to specify dependencies.
# Relations combine names and version through an operator.
# Relations can be compared (<=> operator) or matched (=~ operator)
#
# The following operators are defined:
# REL_GT: greater than
# REL_EQ: equals
# REL_GE: greater equal
# REL_LT: less than
# REL_NE: not equal
# REL_LE: less equal
# Future extensions (not fully defined currently)
# REL_AND: and
# REL_OR: or
# REL_WITH: with
# REL_NAMESPACE: namespace
#
#
import unittest
import sys
sys.path.insert(0, '../../../build/bindings/python')
import satsolver
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.pool = satsolver.Pool()
assert self.pool
self.repo = satsolver.Repo( self.pool, "test" )
assert self.repo
self.pool.set_arch("i686")
self.repo = self.pool.add_solv( "os11-biarch.solv" )
assert self.repo.size() > 0
def test_relation_accessors(self):
rel1 = satsolver.Relation( self.pool, "A" )
assert rel1
assert rel1.name() == "A"
assert rel1.op() == 0
assert rel1.evr() == None
rel2 = satsolver.Relation( self.pool, "A", satsolver.REL_EQ, "1.0-0" )
assert rel2
assert rel2.name() == "A"
assert rel2.op() == satsolver.REL_EQ
assert rel2.evr() == "1.0-0"
def test_providers(self):
rel = self.pool.create_relation( "glibc", satsolver.REL_GT, "2.7" )
for s in self.pool.providers(rel):
print s, "provides ", rel
assert True
def test_relation(self):
rel = self.pool.create_relation( "A", satsolver.REL_EQ, "1.0-0" )
assert rel
print "Relation: ", rel
i = 0
for s in self.repo:
i = i + 1
if i > 10:
break
if not s.provides().empty():
print "%s provides %s" % (s, s.provides().get(1))
j = 0
for p in s.provides():
j = j + 1
if j > 3:
break
if p is not None:
res1 = cmp(p, rel)
print p, " cmp ", rel, " => ", res1
res2 = p.match(rel)
print p, " match ", rel, " => ", res1
if __name__ == '__main__':
unittest.main()
|
Python
| 0
|
@@ -882,16 +882,31 @@
_solv( %22
+../../testdata/
os11-bia
|
ec235941919c184641f9ab30f2df13ab7fea0414
|
add sni support to LiveConnection.change_server
|
libmproxy/protocol/primitives.py
|
libmproxy/protocol/primitives.py
|
from __future__ import absolute_import
import copy
import uuid
import netlib.tcp
from .. import stateobject, utils, version
from ..proxy.connection import ClientConnection, ServerConnection
KILL = 0 # const for killed requests
class Error(stateobject.StateObject):
"""
An Error.
This is distinct from an protocol error response (say, a HTTP code 500),
which is represented by a normal HTTPResponse object. This class is
responsible for indicating errors that fall outside of normal protocol
communications, like interrupted connections, timeouts, protocol errors.
Exposes the following attributes:
flow: Flow object
msg: Message describing the error
timestamp: Seconds since the epoch
"""
def __init__(self, msg, timestamp=None):
"""
@type msg: str
@type timestamp: float
"""
self.flow = None # will usually be set by the flow backref mixin
self.msg = msg
self.timestamp = timestamp or utils.timestamp()
_stateobject_attributes = dict(
msg=str,
timestamp=float
)
def __str__(self):
return self.msg
@classmethod
def from_state(cls, state):
# the default implementation assumes an empty constructor. Override
# accordingly.
f = cls(None)
f.load_state(state)
return f
def copy(self):
c = copy.copy(self)
return c
class Flow(stateobject.StateObject):
"""
A Flow is a collection of objects representing a single transaction.
This class is usually subclassed for each protocol, e.g. HTTPFlow.
"""
def __init__(self, type, client_conn, server_conn, live=None):
self.type = type
self.id = str(uuid.uuid4())
self.client_conn = client_conn
"""@type: ClientConnection"""
self.server_conn = server_conn
"""@type: ServerConnection"""
self.live = live
"""@type: LiveConnection"""
self.error = None
"""@type: Error"""
self._backup = None
_stateobject_attributes = dict(
id=str,
error=Error,
client_conn=ClientConnection,
server_conn=ServerConnection,
type=str
)
def get_state(self, short=False):
d = super(Flow, self).get_state(short)
d.update(version=version.IVERSION)
return d
def __eq__(self, other):
return self is other
def copy(self):
f = copy.copy(self)
f.client_conn = self.client_conn.copy()
f.server_conn = self.server_conn.copy()
if self.error:
f.error = self.error.copy()
return f
def modified(self):
"""
Has this Flow been modified?
"""
if self._backup:
return self._backup != self.get_state()
else:
return False
def backup(self, force=False):
"""
Save a backup of this Flow, which can be reverted to using a
call to .revert().
"""
if not self._backup:
self._backup = self.get_state()
def revert(self):
"""
Revert to the last backed up state.
"""
if self._backup:
self.load_state(self._backup)
self._backup = None
class ProtocolHandler(object):
"""
A ProtocolHandler implements an application-layer protocol, e.g. HTTP.
See: libmproxy.protocol.http.HTTPHandler
"""
def __init__(self, c):
self.c = c
"""@type: libmproxy.proxy.server.ConnectionHandler"""
self.live = LiveConnection(c)
"""@type: LiveConnection"""
def handle_messages(self):
"""
This method gets called if a client connection has been made. Depending
on the proxy settings, a server connection might already exist as well.
"""
raise NotImplementedError # pragma: nocover
def handle_server_reconnect(self, state):
"""
This method gets called if a server connection needs to reconnect and
there's a state associated with the server connection (e.g. a
previously-sent CONNECT request or a SOCKS proxy request). This method
gets called after the connection has been restablished but before SSL is
established.
"""
raise NotImplementedError # pragma: nocover
def handle_error(self, error):
"""
This method gets called should there be an uncaught exception during the
connection. This might happen outside of handle_messages, e.g. if the
initial SSL handshake fails in transparent mode.
"""
raise error # pragma: nocover
class LiveConnection(object):
"""
This facade allows interested parties (FlowMaster, inline scripts) to
interface with a live connection, without exposing the internals
of the ConnectionHandler.
"""
def __init__(self, c):
self.c = c
"""@type: libmproxy.proxy.server.ConnectionHandler"""
self._backup_server_conn = None
"""@type: libmproxy.proxy.connection.ServerConnection"""
def change_server(self, address, ssl=None, force=False, persistent_change=False):
"""
Change the server connection to the specified address.
@returns:
True, if a new connection has been established,
False, if an existing connection has been used
"""
address = netlib.tcp.Address.wrap(address)
ssl_mismatch = (ssl is not None and ssl != self.c.server_conn.ssl_established)
address_mismatch = (address != self.c.server_conn.address)
if persistent_change:
self._backup_server_conn = None
if ssl_mismatch or address_mismatch or force:
self.c.log(
"Change server connection: %s:%s -> %s:%s [persistent: %s]" % (
self.c.server_conn.address.host,
self.c.server_conn.address.port,
address.host,
address.port,
persistent_change
),
"debug"
)
if not self._backup_server_conn and not persistent_change:
self._backup_server_conn = self.c.server_conn
self.c.server_conn = None
else:
# This is at least the second temporary change. We can kill the
# current connection.
self.c.del_server_connection()
self.c.set_server_address(address)
self.c.establish_server_connection(ask=False)
if ssl:
self.c.establish_ssl(server=True)
return True
return False
def restore_server(self):
# TODO: Similar to _backup_server_conn, introduce _cache_server_conn,
# which keeps the changed connection open This may be beneficial if a
# user is rewriting all requests from http to https or similar.
if not self._backup_server_conn:
return
self.c.log("Restore original server connection: %s:%s -> %s:%s" % (
self.c.server_conn.address.host,
self.c.server_conn.address.port,
self._backup_server_conn.address.host,
self._backup_server_conn.address.port
), "debug")
self.c.del_server_connection()
self.c.server_conn = self._backup_server_conn
self._backup_server_conn = None
|
Python
| 0
|
@@ -5207,16 +5207,26 @@
sl=None,
+ sni=None,
force=F
@@ -5549,16 +5549,29 @@
atch = (
+%0A
ssl is n
@@ -5581,16 +5581,46 @@
None and
+%0A (%0A
ssl !=
@@ -5653,16 +5653,114 @@
ablished
+%0A or%0A (sni is not None and sni != self.c.sni)%0A )%0A
)%0A
@@ -6789,16 +6789,69 @@
=False)%0A
+ if sni:%0A self.c.sni = sni%0A
|
aa663a2483c119e3fe6fb6e2f0a160489b63b12b
|
create a course provisioning report method
|
restclients/canvas/reports.py
|
restclients/canvas/reports.py
|
from django.conf import settings
from restclients.canvas import Canvas
from restclients.dao import Canvas_DAO
from restclients.exceptions import DataFailureException
from restclients.models.canvas import Report, ReportType, Attachment
from urllib3 import PoolManager
import json
class Reports(Canvas):
def get_available_reports(self):
"""
Returns the list of reports for the current context.
https://canvas.instructure.com/doc/api/account_reports.html#method.account_reports.available_reports
"""
data = self._get_resource("/api/v1/accounts/%s/reports.json" % (
settings.RESTCLIENTS_CANVAS_ACCOUNT_ID))
report_types = []
for datum in data:
report_type = ReportType()
report_type.name = datum["report"]
report_type.title = datum["title"]
report_type.parameters = datum["parameters"]
if datum["last_run"] is not None:
report = self._report_from_json(datum["last_run"])
report_type.last_run = report
report_types.append(report_type)
return report_types
def get_reports_by_type(self, report_type):
"""
Shows all reports of the passed report_type that have been run
for the account.
https://canvas.instructure.com/doc/api/account_reports.html#method.account_reports.index
"""
data = self._get_resource("/api/v1/accounts/%s/reports/%s.json" % (
settings.RESTCLIENTS_CANVAS_ACCOUNT_ID,
report_type))
reports = []
for datum in data:
reports.append(self._report_from_json(datum))
return reports
def create_report(self, report_type, term_id=None, params={}):
"""
Generates a report instance for the account.
https://canvas.instructure.com/doc/api/account_reports.html#method.account_reports.create
"""
if term_id is not None:
params["enrollment_term_id"] = term_id
url = "/api/v1/accounts/%s/reports/%s" % (
settings.RESTCLIENTS_CANVAS_ACCOUNT_ID, report_type)
headers = {"Accept": "application/json",
"Content-Type": "application/json"}
body = json.dumps({"parameters": params})
dao = Canvas_DAO()
response = dao.postURL(url, headers, body)
if response.status != 200:
raise DataFailureException(url, response.status, response.data)
return self._report_from_json(json.loads(response.data))
def get_report_data(self, report):
"""
Returns a completed report as a list of csv strings.
"""
if report.attachment is None or report.attachment.url is None:
return
url = report.attachment.url
response = PoolManager().request("GET", url, retries=5,
timeout=settings.RESTCLIENTS_TIMEOUT)
if response.status != 200:
raise DataFailureException(url, response.status, response.data)
return response.data.split("\n")
def get_report_status(self, report):
"""
Returns the status of a report.
https://canvas.instructure.com/doc/api/account_reports.html#method.account_reports.show
"""
url = "/api/v1/accounts/%s/reports/%s/%s" % (
settings.RESTCLIENTS_CANVAS_ACCOUNT_ID, report.type,
report.report_id)
data = self._get_resource(url)
return self._report_from_json(data)
def delete_report(self, report):
"""
Deletes a generated report instance.
https://canvas.instructure.com/doc/api/account_reports.html#method.account_reports.destroy
"""
dao = Canvas_DAO()
url = "/api/v1/accounts/%s/reports/%s/%s.json" % (
settings.RESTCLIENTS_CANVAS_ACCOUNT_ID, report.type,
report.report_id)
response = dao.deleteURL(url, {"Accept": "application/json"})
if response.status != 200:
raise DataFailureException(url, response.status, response.data)
return self._report_from_json(json.loads(response.data))
def _report_from_json(self, data):
report = Report()
report.report_id = data["id"]
report.type = data["report"]
report.url = data["file_url"]
report.status = data["status"]
report.progress = data["progress"]
report.parameters = data["parameters"]
if "attachment" in data:
report.attachment = Attachment(
attachment_id=data["attachment"]["id"],
filename=data["attachment"]["filename"],
display_name=data["attachment"]["display_name"],
content_type=data["attachment"]["content-type"],
size=data["attachment"]["size"],
url=data["attachment"]["url"])
return report
|
Python
| 0.000007
|
@@ -2599,32 +2599,342 @@
esponse.data))%0A%0A
+ def create_course_provisioning_report(self, term_id=None, params=%7B%7D)%0A %22%22%22%0A Convenience method for create_report, for creating a course-based%0A provisioning report.%0A %22%22%22%0A params%5B%22courses%22%5D = True%0A return self.create_report(ReportType.PROVISIONING, term_id, params)%0A%0A
def get_repo
|
e64d922a7e7c64921c90d81c44014f7287ba83fa
|
disable logging in travis
|
.travis/localsettings.py
|
.travis/localsettings.py
|
import os
####### Configuration for CommCareHQ Running on Travis-CI #####
from docker.dockersettings import *
USE_PARTITIONED_DATABASE = os.environ.get('USE_PARTITIONED_DATABASE', 'no') == 'yes'
PARTITION_DATABASE_CONFIG = get_partitioned_database_config(USE_PARTITIONED_DATABASE)
BASE_ADDRESS = '{}:8000'.format(os.environ.get('WEB_TEST_PORT_8000_TCP_ADDR', 'localhost'))
####### S3 mock server config ######
S3_BLOB_DB_SETTINGS = {"url": "http://localhost:5000"}
KAFKA_URL = 'kafka:9092'
######## Email setup ########
# email settings: these ones are the custom hq ones
EMAIL_LOGIN = "notifications@dimagi.com"
EMAIL_PASSWORD = "******"
EMAIL_SMTP_HOST = "smtp.gmail.com"
EMAIL_SMTP_PORT = 587
EMAIL_BACKEND='django.core.mail.backends.console.EmailBackend'
####### Bitly ########
BITLY_LOGIN = None
####### Jar signing config ########
_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
JAR_SIGN = dict(
jad_tool = os.path.join(_ROOT_DIR, "corehq", "apps", "app_manager", "JadTool.jar"),
key_store = os.path.join(_ROOT_DIR, "InsecureTestingKeyStore"),
key_alias = "javarosakey",
store_pass = "onetwothreefourfive",
key_pass = "onetwothreefourfive",
)
AUDIT_MODEL_SAVE = ['django.contrib.auth.models.User']
AUDIT_ADMIN_VIEWS = False
SECRET_KEY = 'secrettravis'
# No logging
LOGGING = {
'version': 1,
'handlers': {
'null': {
'level': 'DEBUG',
'class': 'django.utils.log.NullHandler',
},
},
'loggers': {
'': {
'level': 'CRITICAL',
'handler': 'null',
'propagate': False,
},
'south': {
'level': 'CRITICAL',
'handler': 'null',
'propagate': False,
},
'pillowtop': {
'level': 'CRITICAL',
'handler': 'null',
'propagate': False,
}
}
}
PHONE_TIMEZONES_HAVE_BEEN_PROCESSED = True
PHONE_TIMEZONES_SHOULD_BE_PROCESSED = True
ENABLE_PRELOGIN_SITE = True
TESTS_SHOULD_TRACK_CLEANLINESS = True
UNIT_TESTING = True
LOCAL_APPS = (
'testapps.test_elasticsearch',
'testapps.test_pillowtop',
)
PILLOWTOP_MACHINE_ID = 'testhq'
ELASTICSEARCH_VERSION = 1.7
CACHE_REPORTS = True
|
Python
| 0
|
@@ -1312,58 +1312,33 @@
g%0ALO
-GGING = %7B%0A 'version': 1,%0A 'handlers': %7B%0A
+CAL_LOGGING_HANDLERS = %7B%0A
@@ -1343,28 +1343,24 @@
'null': %7B%0A
-
'lev
@@ -1373,20 +1373,16 @@
DEBUG',%0A
-
@@ -1430,43 +1430,40 @@
- %7D,%0A %7D,%0A 'loggers': %7B%0A
+%7D,%0A%7D%0A%0ALOCAL_LOGGING_LOGGERS = %7B%0A
@@ -1468,36 +1468,32 @@
'': %7B%0A
-
-
'level': 'CRITIC
@@ -1497,36 +1497,32 @@
TICAL',%0A
-
'handler': 'null
@@ -1524,36 +1524,32 @@
'null',%0A
-
-
'propagate': Fal
@@ -1549,49 +1549,40 @@
e':
-False,%0A
+True,%0A
-
%7D,%0A
- 'south
+'pillowtop
': %7B%0A
-
@@ -1606,36 +1606,32 @@
TICAL',%0A
-
'handler': 'null
@@ -1633,36 +1633,32 @@
'null',%0A
-
-
'propagate': Fal
@@ -1658,44 +1658,32 @@
e':
-False,%0A
+True,%0A
-
%7D,%0A
-
- 'pillowtop
+'notify
': %7B
@@ -1688,27 +1688,24 @@
%7B%0A
-
'level': 'CR
@@ -1718,27 +1718,24 @@
',%0A
-
'handler': '
@@ -1746,27 +1746,24 @@
',%0A
-
-
'propagate':
@@ -1767,32 +1767,23 @@
e':
-False,%0A %7D%0A
+True,%0A %7D,%0A
%7D%0A
-%7D
%0A%0APH
|
5c4870204f5ca14ee4d47541d14532dbe42528b2
|
Version 2.2 - Not really bugs so much as my being an idiot
|
twitch_irc.py
|
twitch_irc.py
|
from twisted.internet import protocol, reactor
from collections import defaultdict
import sys, json
import socketio
import eventlet
from flask import Flask, render_template
from threading import Thread
import bot
import time
import logging
import logging.config
logging.config.fileConfig('logging.conf')
global sio
global raffleEntrants
raffleEntrants = []
class chatSocket(Thread):
def __init__(self):
Thread.__init__(self)
def run(self):
# sio = socketio.Server()
# app = Flask(__name__, static_url_path='/webjs')
# @app.route('/')
# def index():
# """Serve the client-side application."""
# return render_template('index.html')
#
# @sio.on('new message')
# def message(sid, data):
# print('message ', data)
# sio.emit("new message", data)
#
# app = socketio.Middleware(sio, app)
# eventlet.wsgi.server(eventlet.listen(('', 3000)), app)
from flask import Flask, render_template
from flask_socketio import SocketIO, emit
app = Flask(__name__)
socketio = SocketIO(app)
@app.route("/")
def index():
return render_template('imgloader.html',)
@app.route("/mod")
def mod():
return render_template('mod.html',)
@socketio.on('image')
def test_connect(message):
emit('image', message, broadcast=True)
socketio.run(app, port=3000)
class BotFactory(protocol.ClientFactory):
protocol = bot.TwitchBot
tags = defaultdict(dict)
activity = dict()
wait_time = 1
def clientConnectionLost(self, connector, reason):
logging.error("Lost connection, reconnecting")
self.protocol = reload(bot).TwitchBot
connector.connect()
def clientConnectionFailed(self, connector, reason):
msg = "Could not connect, retrying in {}s"
logging.warning(msg.format(self.wait_time))
time.sleep(self.wait_time)
self.wait_time = min(512, self.wait_time * 2)
connector.connect()
if __name__ == "__main__":
thread = chatSocket()
thread.daemon = True
thread.start()
bot.TwitchBot.revlostart()
reactor.connectTCP('irc.twitch.tv', 6667, BotFactory())
reactor.run()
|
Python
| 0
|
@@ -1538,16 +1538,42 @@
tchBot%0A%0A
+ protocol.revlostart()%0A
tags
@@ -2199,39 +2199,8 @@
t()%0A
- bot.TwitchBot.revlostart()%0A
|
a150520c9fd49a3e6cb6b4396694371797de8440
|
make plot tool skip invalid files
|
modules/tools/plot_trace/plot_planning_result.py
|
modules/tools/plot_trace/plot_planning_result.py
|
#!/usr/bin/env python
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import sys
import matplotlib
import matplotlib.animation as animation
import matplotlib.pyplot as plt
from subprocess import call
import numpy as np
import rosbag
import rospy
from std_msgs.msg import String
from google.protobuf import text_format
from mpl_toolkits.mplot3d import Axes3D
from modules.canbus.proto import chassis_pb2
from modules.localization.proto import localization_pb2
from modules.planning.proto import planning_pb2
g_args = None
def plot_planning(ax, planning_file):
fhandle = file(planning_file, 'r')
if not fhandle:
print "Failed to open file %s" % (planning_file)
planning_pb = planning_pb2.ADCTrajectory()
text_format.Merge(fhandle.read(), planning_pb)
x = [p.path_point.x for p in planning_pb.trajectory_point]
y = [p.path_point.y for p in planning_pb.trajectory_point]
z = [p.v for p in planning_pb.trajectory_point]
ax.plot(x, y, z, label=planning_file)
ax.legend()
def press_key(event):
if event.key == 'c':
files = g_args.planning_files
if len(files) != 2:
print "Need more than two files"
return
command = ["cp"]
for f in files:
command.append(f)
if call(command) == 0:
print "command success: %s" % " ".join(command)
sys.exit(0)
else:
print "Failed to run command: %s " % " ".join(command)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(
description=
"""A visualization tool that can plot one or multiple planning "
results, so that we can compare the differences.
Example: plot_planning_result.py result_file1.pb.txt result_file2.pb.txt"""
)
parser.add_argument(
"planning_files",
action='store',
nargs="+",
help="The planning results")
g_args = parser.parse_args()
matplotlib.rcParams['legend.fontsize'] = 10
fig = plt.figure()
fig.canvas.mpl_connect('key_press_event', press_key)
ax = fig.gca(projection='3d')
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("speed")
for planning_file in g_args.planning_files:
plot_planning(ax, planning_file)
plt.show()
|
Python
| 0.000001
|
@@ -1271,16 +1271,29 @@
_file):%0A
+ try:%0A
fhan
@@ -1331,22 +1331,14 @@
-if not fhandle
+except
:%0A
@@ -1388,24 +1388,39 @@
nning_file)%0A
+ return%0A
planning
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.