commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
c4966e274c885da4e5d252143b9feb260c8f78f5
|
Correct config path finding for Linux.
|
pokemon_go_hunter/watch_twitter.py
|
pokemon_go_hunter/watch_twitter.py
|
import logging
import os
import re
import time
import twitter
import yaml
from pushbullet import Pushbullet
def get_config():
config_path = os.path.join(os.path.abspath(__file__), '../../config.yaml')
with open(config_path) as f:
return yaml.load(f)
def get_twitter_api(config):
api_config = config['API']
twitter_api_config = api_config['Twitter']
return twitter.Api(consumer_key=twitter_api_config['consumer key'],
consumer_secret=twitter_api_config['consumer secret'],
access_token_key=twitter_api_config['access token key'],
access_token_secret=twitter_api_config['access token secret'])
def get_pushbullet_api(config):
api_config = config['API']
pushbullet_api_config = api_config['Pushbullet']
return Pushbullet(api_key=pushbullet_api_config['api key'],
encryption_password=pushbullet_api_config['encryption password'])
def get_pushbullet_device(pb, config):
devices = pb.devices
result = None
for d in devices:
if d.nickname == config['API']['Pushbullet']['device name']:
result = d
assert result is not None, "Couldn't find Pushbullet device."
return result
_config = get_config()
_twitter_api = get_twitter_api(_config)
_pb = get_pushbullet_api(_config)
_device = get_pushbullet_device(_pb, _config)
def main(screen_name: str,
pattern,
callback,
period_s: int = 61):
logging.info("Waiting for tweets.")
since_id = None
while True:
statuses = _twitter_api.GetUserTimeline(screen_name=screen_name,
since_id=since_id,
trim_user=True)
for status in statuses:
if since_id is None:
since_id = status.id
else:
since_id = max(since_id, status.id)
text = status.text
m = pattern.search(text)
logging.debug(text)
if m:
callback(status)
time.sleep(period_s)
def notify(status):
text = status.text
for url in status.urls:
text = text.replace(url.url, url.expanded_url)
logging.info("Sending: \"%s\".", text)
_pb.push_sms(_device, 'TODO', text)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s [%(levelname)s] - %(name)s:%(filename)s:%(funcName)s\n%(message)s',
level=logging.INFO)
# TODO Read from config.
# Example.
main(screen_name='montrealpokemap',
pattern=re.compile(r'\b(Unown)\b', re.IGNORECASE),
callback=notify)
|
Python
| 0
|
@@ -161,16 +161,32 @@
os.path.
+dirname(os.path.
abspath(
@@ -198,17 +198,15 @@
e__)
+)
, '../
-../
conf
|
8b78463ac8d8953dffb3c3ecd5e9e1e4396da106
|
Make sure set_mpl_backend works if qtpy is not installed
|
glue/_mpl_backend.py
|
glue/_mpl_backend.py
|
class MatplotlibBackendSetter(object):
"""
Import hook to make sure the proper Qt backend is set when importing
Matplotlib.
"""
enabled = True
def find_module(self, mod_name, pth):
if self.enabled and 'matplotlib' in mod_name:
self.enabled = False
set_mpl_backend()
def find_spec(self, name, import_path, target_module=None):
pass
def set_mpl_backend():
from matplotlib import rcParams, rcdefaults
# standardize mpl setup
rcdefaults()
from qtpy import PYQT5
if PYQT5:
rcParams['backend'] = 'Qt5Agg'
else:
rcParams['backend'] = 'Qt4Agg'
# The following is a workaround for the fact that Matplotlib checks the
# rcParams at import time, not at run-time. I have opened an issue with
# Matplotlib here: https://github.com/matplotlib/matplotlib/issues/5513
from matplotlib import get_backend
from matplotlib import backends
backends.backend = get_backend()
|
Python
| 0
|
@@ -420,16 +420,176 @@
end():%0A%0A
+ try:%0A from qtpy import PYQT5%0A except:%0A # If Qt isn't available, we don't have to worry about%0A # setting the backend%0A return%0A%0A
from
@@ -679,36 +679,8 @@
()%0A%0A
- from qtpy import PYQT5%0A%0A
|
1bbd84111b142daf9301842f1cb411983fccedef
|
Comment change.
|
gnuplot-py/gp_mac.py
|
gnuplot-py/gp_mac.py
|
# $Id$
# Copyright (C) 1999 Michael Haggerty <mhagger@alum.mit.edu>
# Thanks to Tony Ingraldi and Noboru Yamamoto for their contributions.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version. This program is distributed in the
# hope that it will be useful, but WITHOUT ANY WARRANTY; without even
# the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details; it is
# available at <http://www.fsf.org/copyleft/gpl.html>, or by writing to
# the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
"""gp_mac -- an interface to gnuplot for the Macintosh.
"""
__cvs_version__ = '$Revision$'
import os, string
import Errors
# ############ Configuration variables: ################################
class GnuplotOpts:
"""The configuration options for gnuplot on the Macintosh.
See gp.py for details about the meaning of these options. Please
let me know if you know better choices for these settings."""
# The '-persist' option is not supported on the Mac:
recognizes_persist = 0
# Apparently the Mac can use binary data:
recognizes_binary_splot = 1
# Apparently the Mac can not use inline data:
prefer_inline_data = 0
# The default choice for the 'set term' command (to display on screen):
default_term = 'pict'
# I don't know how to print directly to a printer on the Mac:
default_lpr = '| lpr'
# Used the 'enhanced' option of postscript by default? Set to
# None (*not* 0!) if your version of gnuplot doesn't support
# enhanced postscript.
prefer_enhanced_postscript = 1
# ############ End of configuration options ############################
# The Macintosh doesn't support pipes so communication is via
# AppleEvents.
import gnuplot_Suites
import Required_Suite
import aetools
# Mac doesn't recognize persist.
def test_persist():
return 0
class _GNUPLOT(aetools.TalkTo,
Required_Suite.Required_Suite,
gnuplot_Suites.gnuplot_Suite,
gnuplot_Suites.odds_and_ends,
gnuplot_Suites.Standard_Suite,
gnuplot_Suites.Miscellaneous_Events):
"""Start a gnuplot program and emulate a pipe to it."""
def __init__(self):
aetools.TalkTo.__init__(self, '{GP}', start=1)
class GnuplotProcess:
"""Unsophisticated interface to a running gnuplot program.
See gp_unix.GnuplotProcess for usage information.
"""
def __init__(self, persist=0):
"""Start a gnuplot process.
Create a 'GnuplotProcess' object. This starts a gnuplot
program and prepares to write commands to it.
Keyword arguments:
'persist' -- the '-persist' option is not supported on the
Macintosh so this argument must be zero.
"""
if persist:
raise Errors.OptionError(
'-persist is not supported on the Macintosh!')
self.gnuplot = _GNUPLOT()
# forward close method:
self.close = self.gnuplot.quit
def write(self, s):
"""Mac gnuplot apparently requires '\r' to end statements."""
self.gnuplot.gnuexec(string.replace(s, '\n', os.linesep))
def flush(self):
pass
def __call__(self, s):
"""Send a command string to gnuplot, for immediate execution."""
# Apple Script doesn't seem to need the trailing '\n'.
self.write(s)
self.flush()
|
Python
| 0.000012
|
@@ -1539,17 +1539,135 @@
screen)
-:
+.%0A # Terminal types are different in Gnuplot 3.7.1c.%0A # For earlier versions, this was default_term = 'macintosh'
%0A def
@@ -2255,16 +2255,17 @@
turn 0%0A%0A
+%0A
class _G
|
525795adedc79bc77d5c61f238a10255b83078d2
|
Fix import compatability with Python 3 (Fixes #68)
|
gpytorch/__init__.py
|
gpytorch/__init__.py
|
from .module import Module
import models
import means
import kernels
from torch.autograd import Variable
from .contexts import fast_pred_var
from .lazy import LazyVariable
from .functions import AddDiag, DSMM, NormalCDF, LogNormalCDF
from .utils import function_factory
_inv_matmul_class = function_factory.inv_matmul_factory()
_trace_logdet_quad_form_factory_class = function_factory.trace_logdet_quad_form_factory()
_exact_gp_mll_class = function_factory.exact_gp_mll_factory()
def add_diag(input, diag):
"""
Adds a diagonal matrix s*I to the input matrix input.
Args:
- input (matrix nxn) - Variable or LazyVariable wrapping matrix to add diagonal \
component to.
- diag (scalar) - Scalar s so that s*I is added to the input matrix.
Returns:
- matrix nxn - Variable or LazyVariable wrapping a new matrix with the diagonal \
component added.
"""
if not isinstance(diag, Variable):
raise RuntimeError('Expected a variable for the diagonal component.')
if isinstance(input, LazyVariable):
return input.add_diag(diag)
else:
return AddDiag()(input, diag)
def add_jitter(mat):
"""
Adds "jitter" to the diagonal of a matrix.
This ensures that a matrix that *should* be positive definite *is* positive definate.
Args:
- mat (matrix nxn) - Positive definite matrxi
Returns: (matrix nxn)
"""
if isinstance(mat, LazyVariable):
return mat.add_jitter()
elif isinstance(mat, Variable):
diag = Variable(mat.data.new(mat.size(-1)).fill_(1e-3).diag())
if mat.ndimension() == 3:
return mat + diag.unsqueeze(0).expand(mat.size(0), mat.size(1), mat.size(2))
else:
return mat + diag
else:
diag = mat.new(mat.size(-1)).fill_(1e-3).diag()
if mat.ndimension() == 3:
return mat.add_(diag.unsqueeze(0).expand(mat.size(0), mat.size(1), mat.size(2)))
else:
return diag.add_(mat)
def dsmm(sparse_mat, dense_mat):
return DSMM(sparse_mat)(dense_mat)
def exact_gp_marginal_log_likelihood(covar, target):
"""
Computes the log marginal likelihood of the data with a GP prior and Gaussian noise model
given a label vector and covariance matrix.
Args:
- covar (matrix nxn) - Variable or LazyVariable representing the covariance matrix of the observations.
Usually, this is K + s*I, where s is the noise variance, and K is the prior covariance.
- target (vector n) - Training label vector.
Returns:
- scalar - The marginal log likelihood of the data.
"""
if isinstance(covar, LazyVariable):
return covar.exact_gp_marginal_log_likelihood(target)
else:
return _exact_gp_mll_class()(covar, target)
def inv_matmul(mat1, rhs):
"""
Computes a linear solve with several right hand sides.
Args:
- mat1 (matrix nxn) - Matrix to solve with
- rhs (matrix nxk) - rhs matrix or vector
Returns:
- matrix nxk - (mat1)^{-1} rhs
"""
if isinstance(mat1, LazyVariable):
return mat1.inv_matmul(rhs)
else:
return _inv_matmul_class()(mat1, rhs)
def log_normal_cdf(x):
"""
Computes the element-wise log standard normal CDF of an input tensor x.
This function should always be preferred over calling normal_cdf and taking the log
manually, as it is more numerically stable.
"""
return LogNormalCDF()(x)
def normal_cdf(x):
"""
Computes the element-wise standard normal CDF of an input tensor x.
"""
return NormalCDF()(x)
def trace_logdet_quad_form(mean_diffs, chol_covar_1, covar_2):
if isinstance(covar_2, LazyVariable):
return covar_2.trace_log_det_quad_form(mean_diffs, chol_covar_1)
else:
return _trace_logdet_quad_form_factory_class()(mean_diffs, chol_covar_1, covar_2)
__all__ = [
# Submodules
models,
means,
kernels,
# Classes
Module,
# Functions
add_diag,
add_jitter,
dsmm,
exact_gp_marginal_log_likelihood,
inv_matmul,
log_normal_cdf,
normal_cdf,
trace_logdet_quad_form,
# Context managers
fast_pred_var,
]
|
Python
| 0
|
@@ -20,16 +20,23 @@
Module%0A
+from .
import m
@@ -41,16 +41,23 @@
models%0A
+from .
import m
@@ -61,16 +61,23 @@
t means%0A
+from .
import k
|
bb6bdc5e929e6647ae772aec18556a3489911dfc
|
fix to poll.py, in python3 in raspberry, the cmd string to serial must be encode as UTF-8
|
greenery/bin/poll.py
|
greenery/bin/poll.py
|
#!/usr/bin/python3
"""
Poll sensors to get temp/humid/soil-moisture etc...
The Arduino accepts number-based command codes over the usb serial connection.
Like,
002\n = 0=mode(get),0=measurement(temperature),2=address(digital pin 2)
0214\n = 0=mode(get),2=measurement(soil),14=address(analog pin 14, or A0)
Commands MUST be terminated with '\n'!
See command-map in global vars
"""
import os
import sys
import re
import time
import datetime
import logging
import serial
sys.path.append( os.environ.get('GREENERY_WEB','/var/www/greenery') )
from greenery import db
from greenery.apps.measurement.models import MeasurementType, Measurement
from greenery.apps.admin.models import Setting
from greenery.apps.sensor.models import Sensor
logfile = '/var/tmp/greenery.errors.log'
logging.basicConfig(filename=logfile)
logger = logging.getLogger('actions')
logger.setLevel(10)
# global vars
poll = None
fahrenheit = None
sdevice = '/dev/ttyUSB0'
now = datetime.datetime.now().replace(second=0, microsecond=0)
cmd_map = {
# first char
'get': 0,
'set': 1,
'tx': 2,
# second(*) char for get-mode
'temperature': 0,
'humidity': 1,
'soil': 2,
}
def main():
ser = None
try:
ser = serial.Serial(sdevice, 9600, 5)
except Exception as x:
logger.error(x)
sys.stderr.write("Error! see log %s\n" % logfile)
sys.exit(1)
mtypes = MeasurementType.query.all()
sensors = Sensor.query.all()
for s in sensors:
for typ in ('temperature', 'humidity', 'soil'):
if re.search(typ, s.tags):
cmd = "%d%d%d\n" % (cmd_map['get'], cmd_map[typ], s.address)
ser.write(cmd.encode('UTF-8'))
while True:
# returns like;
# line = "sm,14,22" (code, address, value)
line = ser.readline()
if re.search(r'^ok', line, re.IGNORECASE):
# nothing more to read!
break;
if re.search(r'^fail', line, re.IGNORECASE):
logger.warning("sensor '%s' fail result '%s'" % (s.name, line))
break;
atoms = line.split(",")
if len(atoms) != 3:
logger.warning("sensor '%s' garbled output '%s" % (s.name, line))
continue;
code,addr,val = atoms
mt = match_flag_to_object(code, mtypes)
if code == 't' and fahrenheit:
val = val * 1.8 + 32
label = format_label(code, val, fahrenheit)
# adjust for only one decimal place during write to db
m = Measurement(mt.id, s.id, "%0.1f" % float(val), label, now)
db.session.add(m)
db.session.commit()
ser.close()
def match_flag_to_object(f, objects):
label = None
if f == 't':
label = 'temperature'
elif f == 'h':
label = 'humidity'
elif f == 'sm':
label = 'soil moisture'
else:
return None
for o in objects:
if o.name == label:
return o
return None
def format_label(typ, val, fahrenheit=False):
if re.search(r'^t', typ):
label = str(val) + u'\N{DEGREE SIGN}'
if fahrenheit:
label += "F"
else:
label += "C"
return label
if re.search(r'^(h|sm)', typ):
label = str(val) + "%"
return label
return None
if __name__ == '__main__':
poll = Setting.query.filter(Setting.name == 'polling interval minutes').first()
if not poll:
logger.error("could not determine polling interval from db")
sys.stderr.write("error\n")
sys.exit(1)
if now.minute % poll.value > 0:
# not the right time to be running this. exit
sys.exit(0)
fahrenheit = bool(Setting.query.filter(Setting.name == 'store temperature fahrenheit').first().value)
main()
|
Python
| 0.000175
|
@@ -1927,16 +1927,48 @@
dline()%0A
+ print(line)%0A
|
247fe732ad71d2db3e664b63636492782a804151
|
Support old Selenium
|
capture/capture.py
|
capture/capture.py
|
#!/bin/python3
from __future__ import print_function
"""
Benchmark creator, for Cassius.
Uses Selenium Webdriver to download new benchmarks for Cassius.
Opens a page in Firefox, causes it to execute get_bench.js, and saves the result.
"""
from selenium import webdriver
import os, sys
import warnings
try:
import urllib.parse as parse
except:
import urlparse as parse
import collections
import argparse
def jsfile(name):
return open(os.path.join(os.path.dirname(__file__), name), "rt").read()
def measure_scrollbar(browser):
browser.get("about:blank");
browser.execute_script(jsfile("scrollbar.js") + "; estimate_scrollbar()");
def make_browser():
from selenium.webdriver.firefox.options import Options
options = Options()
options.set_headless(True)
profile = webdriver.FirefoxProfile()
profile.set_preference("security.mixed_content.block_active_content", False)
profile.set_preference("security.mixed_content.block_display_content", False)
browser = webdriver.Firefox(firefox_profile=profile, firefox_options=options)
measure_scrollbar(browser)
return browser
def capture(browser, url, id, prerun=None):
browser.get(url)
if prerun: browser.execute_script(prerun)
text = browser.execute_script(jsfile("all.js") + "; return page2text(arguments[0]);", id)
return ";; From {}\n\n{}\n\n".format(url, text)
def main(urls, prerun=None, fd=None):
urls = sorted([url if "://" in url else "file://" + os.path.abspath(url)
for url in urls])
for url in urls:
scheme, _, _, _, _, _ = parse.urlparse(url)
if scheme not in ["http", "https", "file"]:
warnings.warn("Only http and file scheme supported (not {})".format(scheme))
try:
browser = make_browser()
print("Saving layout to {}:".format(fd.name), file=sys.stderr, end=" ")
for i, url in enumerate(urls):
id = str(i+1).rjust(len(str(len(urls))), "0")
try:
fd.write(capture(browser, url, "doc-" + id, prerun=prerun))
print(id, file=sys.stderr, end=" ")
except:
import traceback
traceback.print_exc()
continue
print(file=sys.stderr)
finally:
browser.quit()
if __name__ == "__main__":
p = argparse.ArgumentParser(description="Download a website as Cassius test cases")
p.add_argument("urls", metavar="URLs", type=str, nargs="+", help="URLs to dowload")
p.add_argument("--output", type=argparse.FileType('w'), default=sys.stdout, help="File name under bench/.")
p.add_argument("--prerun", type=argparse.FileType('r'), help="JS file to run before capturing.")
args = p.parse_args()
prerun = args.prerun.read() if args.prerun else None
main(args.urls, prerun=prerun, fd=args.output)
|
Python
| 0
|
@@ -767,20 +767,32 @@
ons.
-set_
+add_argument(%22--
headless
(Tru
@@ -791,13 +791,9 @@
less
-(True
+%22
)%0A
|
0c0fbfacd83a4949b9d6b4411edb5b59dc613d06
|
clean up __init__ imports
|
blaze/__init__.py
|
blaze/__init__.py
|
from __future__ import absolute_import, division, print_function
from pandas import DataFrame
from into import into, convert, append, resource, drop
from into.backends.csv import CSV
from multipledispatch import halt_ordering, restart_ordering
halt_ordering() # Turn off multipledispatch ordering
from datashape import dshape, discover
from .expr import (Symbol, TableSymbol, symbol)
from .expr import (by, count, count_values, distinct, head, join, label, like,
mean, merge, nunique, relabel, selection, sort, summary, var, transform)
from .expr import (date, datetime, day, hour, microsecond, millisecond, month,
second, time, year)
from .expr.functions import *
from .index import create_index
from .json import *
from .interactive import *
from .compute.csv import *
from .compute.python import *
from .compute.pandas import *
from .compute.numpy import *
from .compute.core import *
from .compute.core import compute
try:
from .server import *
except ImportError:
pass
try:
from .sql import *
except ImportError:
pass
try:
from .spark import *
except (AttributeError, ImportError):
pass
try:
from .compute.sparksql import *
from .sparksql import *
except (ImportError, TypeError):
pass
try:
from dynd import nd
from .compute.dynd import *
except ImportError:
pass
try:
from .h5py import *
from .compute.h5py import *
except ImportError:
pass
try:
from .compute.pytables import *
except ImportError:
pass
try:
import blaze.compute.chunks
except ImportError:
pass
try:
from .bcolz import *
except ImportError:
pass
try:
from .mongo import *
except ImportError:
pass
try:
from .pytables import *
except ImportError:
pass
restart_ordering() # Restart multipledispatch ordering and do ordering
inf = float('inf')
nan = float('nan')
__version__ = '0.6.8'
# If IPython is already loaded, register the Blaze catalog magic
# from . import catalog
# import sys
# if 'IPython' in sys.modules:
# catalog.register_ipy_magic()
# del sys
def print_versions():
"""Print all the versions of software that Blaze relies on."""
import sys, platform
import numpy as np
import datashape
print("-=" * 38)
print("Blaze version: %s" % __version__)
print("Datashape version: %s" % datashape.__version__)
print("NumPy version: %s" % np.__version__)
print("Python version: %s" % sys.version)
(sysname, nodename, release, version, machine, processor) = \
platform.uname()
print("Platform: %s-%s-%s (%s)" % (sysname, release, machine, version))
if sysname == "Linux":
print("Linux dist: %s" % " ".join(platform.linux_distribution()[:-1]))
if not processor:
processor = "not recognized"
print("Processor: %s" % processor)
print("Byte-ordering: %s" % sys.byteorder)
print("-=" * 38)
def test(verbose=False, junitfile=None, exit=False):
"""
Runs the full Blaze test suite, outputting
the results of the tests to sys.stdout.
This uses py.test to discover which tests to
run, and runs tests in any 'tests' subdirectory
within the Blaze module.
Parameters
----------
verbose : int, optional
Value 0 prints very little, 1 prints a little bit,
and 2 prints the test names while testing.
junitfile : string, optional
If provided, writes the test results to an junit xml
style xml file. This is useful for running the tests
in a CI server such as Jenkins.
exit : bool, optional
If True, the function will call sys.exit with an
error code after the tests are finished.
"""
import os
import sys
import pytest
args = []
if verbose:
args.append('--verbose')
# Output an xunit file if requested
if junitfile is not None:
args.append('--junit-xml=%s' % junitfile)
# Add all 'tests' subdirectories to the options
rootdir = os.path.dirname(__file__)
for root, dirs, files in os.walk(rootdir):
if 'tests' in dirs:
testsdir = os.path.join(root, 'tests')
args.append(testsdir)
print('Test dir: %s' % testsdir[len(rootdir) + 1:])
# print versions (handy when reporting problems)
print_versions()
sys.stdout.flush()
# Ask pytest to do its thing
error_code = pytest.main(args=args)
if exit:
return sys.exit(error_code)
return error_code == 0
|
Python
| 0.999728
|
@@ -1017,32 +1017,63 @@
m .sql import *%0A
+ from .compute.sql import *%0A
except ImportErr
@@ -1537,20 +1537,13 @@
-import blaze
+from
.com
@@ -1553,16 +1553,25 @@
e.chunks
+ import *
%0Aexcept
@@ -1606,16 +1606,24 @@
from
+.compute
.bcolz i
@@ -1681,32 +1681,65 @@
.mongo import *%0A
+ from .compute.mongo import *%0A
except ImportErr
@@ -1748,32 +1748,32 @@
:%0A pass%0Atry:%0A
-
from .pytabl
@@ -1776,32 +1776,68 @@
tables import *%0A
+ from .compute.pytables import *%0A
except ImportErr
|
eb1fdf3419bdfd1d5920d73a877f707162b783b0
|
Drop unused and dangerous entrypoint `open_fileindex`
|
cfgrib/__init__.py
|
cfgrib/__init__.py
|
#
# Copyright 2017-2021 European Centre for Medium-Range Weather Forecasts (ECMWF).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "0.9.9.2.dev0"
# cfgrib core API depends on the ECMWF ecCodes C-library only
from .cfmessage import CfMessage
from .dataset import (
Dataset,
DatasetBuildError,
open_container,
open_file,
open_fileindex,
open_from_index,
)
from .messages import FileStream, Message
# NOTE: xarray is not a hard dependency, but let's provide helpers if it is available.
try:
from .xarray_store import open_dataset, open_datasets
except ImportError:
pass
|
Python
| 0
|
@@ -772,22 +772,16 @@
ort
-(%0A
Dataset,
%0A
@@ -776,20 +776,16 @@
Dataset,
-%0A
Dataset
@@ -795,20 +795,16 @@
ldError,
-%0A
open_co
@@ -811,20 +811,16 @@
ntainer,
-%0A
open_fi
@@ -826,32 +826,8 @@
ile,
-%0A open_fileindex,%0A
ope
@@ -842,11 +842,8 @@
ndex
-,%0A)
%0Afro
|
ad8036e5a21fd29885dc7ebf201e599a0ca79563
|
add charliecloud 0.9.7 (#10661)
|
var/spack/repos/builtin/packages/charliecloud/package.py
|
var/spack/repos/builtin/packages/charliecloud/package.py
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Charliecloud(MakefilePackage):
"""Lightweight user-defined software stacks for HPC."""
homepage = "https://hpc.github.io/charliecloud"
url = "https://github.com/hpc/charliecloud/archive/v0.2.4.tar.gz"
version('0.9.6', sha256='50e20d5e2a3710cd06e7c999db22495b07ef0fb15ffbc0af3bccac5387f0fddb')
version('0.9.3', sha256='f1bf032377b8845bc9a93b8a4fad6386161e35900223c0acc61d1f3aa3a87bc7')
version('0.9.2', sha256='8d0e4804d412beef720a66f886a0a78bce42f3269e880ebf11f602581f8047d4')
version('0.9.1', sha256='8e69150a271285da71ece7a09b48251ef6593f72207c5126741d9976aa737d95')
version('0.9.0', sha256='7e74cb16e31fd9d502198f7509bab14d1049ec68ba90b15e277e76f805db9458')
version('0.2.4', 'b112de661c2c360174b42c99022c1967')
@property
def install_targets(self):
return ['install', 'PREFIX=%s' % self.prefix]
|
Python
| 0.000001
|
@@ -439,16 +439,112 @@
ar.gz%22%0A%0A
+ version('0.9.7', sha256='ec80a4b9bef3a2161a783e11d99cc58e09a32dfbc8a6234c8f7ce7fa76e2f62d')%0A
vers
|
1533101139ad5497519bf2b25a6b0c8329a01535
|
Fix TypeError when fetching a playlist with a podcast or similar item (#201)
|
spotify_dl/spotify.py
|
spotify_dl/spotify.py
|
import sys
from spotify_dl.scaffold import log
from spotify_dl.utils import sanitize
def fetch_tracks(sp, item_type, url):
"""
Fetches tracks from the provided URL.
:param sp: Spotify client
:param item_type: Type of item being requested for: album/playlist/track
:param url: URL of the item
:return Dictionary of song and artist
"""
songs_list = []
offset = 0
if item_type == 'playlist':
while True:
items = sp.playlist_items(playlist_id=url,
fields='items.track.name,items.track.artists(name, uri),'
'items.track.album(name, release_date, total_tracks, images),'
'items.track.track_number,total, next,offset,'
'items.track.id',
additional_types=['track'], offset=offset)
total_songs = items.get('total')
for item in items['items']:
track_info = item.get('track')
track_album_info = track_info.get('album')
track_num = track_info.get('track_number')
spotify_id = track_info.get('id')
track_name = track_info.get('name')
track_artist = ", ".join([artist['name'] for artist in track_info.get('artists')])
if track_album_info:
track_album = track_album_info.get('name')
track_year = track_album_info.get('release_date')[:4] if track_album_info.get('release_date') else ''
album_total = track_album_info.get('total_tracks')
if len(item['track']['album']['images']) > 0:
cover = item['track']['album']['images'][0]['url']
else:
cover = None
artists = track_info.get('artists')
main_artist_id = artists[0].get('uri', None) if len(artists) > 0 else None
genres = sp.artist(artist_id=main_artist_id).get('genres', []) if main_artist_id else []
if len(genres) > 0:
genre = genres[0]
else:
genre = ""
songs_list.append({"name": track_name, "artist": track_artist, "album": track_album, "year": track_year,
"num_tracks": album_total, "num": track_num, "playlist_num": offset + 1,
"cover": cover, "genre": genre, "spotify_id": spotify_id})
offset += 1
log.info(f"Fetched {offset}/{total_songs} songs in the playlist")
if total_songs == offset:
log.info('All pages fetched, time to leave. Added %s songs in total', offset)
break
elif item_type == 'album':
while True:
album_info = sp.album(album_id=url)
items = sp.album_tracks(album_id=url)
total_songs = items.get('total')
track_album = album_info.get('name')
track_year = album_info.get('release_date')[:4] if album_info.get('release_date') else ''
album_total = album_info.get('total_tracks')
if len(album_info['images']) > 0:
cover = album_info['images'][0]['url']
else:
cover = None
if len(sp.artist(artist_id=album_info['artists'][0]['uri'])['genres']) > 0:
genre = sp.artist(artist_id=album_info['artists'][0]['uri'])['genres'][0]
else:
genre = ""
for item in items['items']:
track_name = item.get('name')
track_artist = ", ".join([artist['name'] for artist in item['artists']])
track_num = item['track_number']
spotify_id = item.get('id')
songs_list.append({"name": track_name, "artist": track_artist, "album": track_album, "year": track_year,
"num_tracks": album_total, "num": track_num, "playlist_num": offset + 1,
"cover": cover, "genre": genre, "spotify_id": spotify_id})
offset += 1
log.info(f"Fetched {offset}/{total_songs} songs in the album")
if total_songs == offset:
log.info('All pages fetched, time to leave. Added %s songs in total', offset)
break
elif item_type == 'track':
items = sp.track(track_id=url)
track_name = items.get('name')
album_info = items.get('album')
track_artist = ", ".join([artist['name'] for artist in items['artists']])
if album_info:
track_album = album_info.get('name')
track_year = album_info.get('release_date')[:4] if album_info.get('release_date') else ''
album_total = album_info.get('total_tracks')
track_num = items['track_number']
spotify_id = items['id']
if len(items['album']['images']) > 0:
cover = items['album']['images'][0]['url']
else:
cover = None
if len(sp.artist(artist_id=items['artists'][0]['uri'])['genres']) > 0:
genre = sp.artist(artist_id=items['artists'][0]['uri'])['genres'][0]
else:
genre = ""
songs_list.append({"name": track_name, "artist": track_artist, "album": track_album, "year": track_year,
"num_tracks": album_total, "num": track_num, "playlist_num": offset + 1,
"cover": cover, "genre": genre, "spotify_id": spotify_id})
return songs_list
def parse_spotify_url(url):
"""
Parse the provided Spotify playlist URL and determine if it is a playlist, track or album.
:param url: URL to be parsed
:return tuple indicating the type and id of the item
"""
if url.startswith("spotify:"):
log.error("Spotify URI was provided instead of a playlist/album/track URL.")
sys.exit(1)
parsed_url = url.replace("https://open.spotify.com/", "")
item_type = parsed_url.split("/")[0]
item_id = parsed_url.split("/")[1]
return item_type, item_id
def get_item_name(sp, item_type, item_id):
"""
Fetch the name of the item.
:param sp: Spotify Client
:param item_type: Type of the item
:param item_id: id of the item
:return String indicating the name of the item
"""
if item_type == 'playlist':
name = sp.playlist(playlist_id=item_id, fields='name').get('name')
elif item_type == 'album':
name = sp.album(album_id=item_id).get('name')
elif item_type == 'track':
name = sp.track(track_id=item_id).get('name')
return sanitize(name)
def validate_spotify_url(url):
"""
Validate the URL and determine if the item type is supported.
:return Boolean indicating whether or not item is supported
"""
item_type, item_id = parse_spotify_url(url)
log.debug(f"Got item type {item_type} and item_id {item_id}")
if item_type not in ['album', 'track', 'playlist']:
log.info("Only albums/tracks/playlists are supported")
return False
if item_id is None:
log.info("Couldn't get a valid id")
return False
return True
|
Python
| 0
|
@@ -1075,16 +1075,306 @@
track')%0A
+ # If the user has a podcast in their playlist, there will be no track%0A # Without this conditional, the program will fail later on when the metadata is fetched%0A if track_info is None:%0A offset += 1%0A continue%0A
|
a3a19ab3cad0999cc61fdebe9c6fb1ceca873ab6
|
make it full screen
|
boothpy/widget.py
|
boothpy/widget.py
|
# Copyright 2017 Christian Menard
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from PyQt5.QtWidgets import QWidget, QLabel, QMessageBox
from PyQt5.QtGui import QPixmap
from PyQt5.QtCore import QTimer, Qt
class ErrorMessage(QMessageBox):
def __init__(self, error, description):
super().__init__()
self.setIcon(QMessageBox.Critical)
self.setWindowTitle('PyBooth Error')
self.setText(error)
self.setInformativeText(description)
class BoothPyWidget(QWidget):
def __init__(self, camera):
super().__init__()
self.camera = camera
self.init_ui()
def init_ui(self):
self.setGeometry(300, 300, 300, 220)
self.setWindowTitle('BoothPy')
preview_data = None
try:
preview_data = self.camera.capture_preview()
except BaseException as e:
err = ErrorMessage('Error while capturing preview:', str(e))
err.exec_()
self.close()
self.preview = QLabel(self)
pixmap = QPixmap()
pixmap.loadFromData(preview_data)
self.preview.setPixmap(pixmap)
self.resize(pixmap.width(), pixmap.height())
self.frame_timer = QTimer()
self.frame_timer.timeout.connect(self.on_frame_timeout)
self.frame_timer.setInterval(50)
self.frame_timer.start()
self.show()
def on_frame_timeout(self):
preview_data = None
try:
preview_data = self.camera.capture_preview()
except BaseException as e:
err = ErrorMessage('Error while capturing preview:', str(e))
err.exec_()
self.close()
pixmap = QPixmap()
pixmap.loadFromData(preview_data)
self.preview.setPixmap(pixmap)
def keyPressEvent(self, e):
if e.key() == Qt.Key_Escape:
self.close()
|
Python
| 0.000233
|
@@ -1141,16 +1141,30 @@
ssageBox
+, QApplication
%0Afrom Py
@@ -1945,32 +1945,57 @@
view:', str(e))%0A
+ self.close()%0A
err.
@@ -1998,32 +1998,33 @@
err.exec_()%0A
+%0A
self.clo
@@ -2011,33 +2011,118 @@
- self.close()%0A
+self.preview = QLabel(self)%0A%0A self.preview.setGeometry(QApplication.desktop().screenGeometry())
%0A
@@ -2134,30 +2134,39 @@
.preview
- = QLabel(self
+.setScaledContents(True
)%0A
@@ -2270,61 +2270,8 @@
map)
-%0A self.resize(pixmap.width(), pixmap.height())
%0A%0A
@@ -2460,16 +2460,26 @@
elf.show
+FullScreen
()%0A%0A
@@ -2725,25 +2725,26 @@
-err.exec_
+self.close
()%0A
@@ -2750,26 +2750,25 @@
-self.close
+err.exec_
()%0A%0A
|
c7322a1ff37c7f2d4c3dfb149c2e36daafae6043
|
Bump to version 0.11.3
|
ckanny/__init__.py
|
ckanny/__init__.py
|
# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
ckanny
~~~~~~
Miscellaneous CKAN utility scripts
Examples:
literal blocks::
python example_google.py
Attributes:
module_level_variable1 (int): Module level variables may be documented in
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
from manager import Manager
from . import datastorer, filestorer, hdx
__title__ = 'ckanny'
__author__ = 'Reuben Cummings'
__description__ = 'Miscellaneous CKAN utility scripts'
__email__ = 'reubano@gmail.com'
__version__ = '0.11.2'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
manager = Manager()
manager.merge(datastorer.manager, namespace='ds')
manager.merge(filestorer.manager, namespace='fs')
manager.merge(hdx.manager, namespace='hdx')
@manager.command
def ver():
"""Show ckanny version"""
from . import __version__ as version
print('v%s' % version)
if __name__ == '__main__':
manager.main()
|
Python
| 0
|
@@ -602,17 +602,17 @@
= '0.11.
-2
+3
'%0A__lice
|
07a74375fabddc9b6fa4de0c345949bfadb54504
|
Revert silly change
|
examples/sync_test.py
|
examples/sync_test.py
|
"""
=============
A-V sync test
=============
This example tests synchronization between the screen and the audio playback.
"""
# Author: Dan McCloy <drmccloy@uw.edu>
#
# License: BSD (3-clause)
print __doc__
import numpy as np
from expyfun import ExperimentController
rng = np.random.RandomState(0)
with ExperimentController('SyncTest', screen_num=0, window_size=[300, 300],
full_screen=False, stim_db=70, noise_db=-np.inf,
stim_fs=44100, participant='s', session='0',
output_dir=None) as ec:
ec.load_buffer(np.r_[0.1, np.zeros(2000)])
white = [1, 1, 1]
black = [-1, -1, -1]
while True:
ec.draw_background_color(white)
t1 = ec.flip_and_play()
ec.draw_background_color(black)
t2 = ec.flip() # expyfun
print 1. / (t2 - t1)
ec.wait_one_press(0.5)
|
Python
| 0.000002
|
@@ -487,13 +487,13 @@
_fs=
+2
441
-00
+4
, pa
|
63ec2f241c219f9a5fea33de63b520d8b0da5fd8
|
Fix initial display update.
|
classes/display.py
|
classes/display.py
|
"""Display Class"""
import time
class Display:
"""Display progress of process.
Attributes:
start_time (float): Seconds since epoch to when progress starts.
elapsed_time (float): Seconds since progress started.
last_updated (float): Seconds since epoch to when progress was
last updated.
"""
def __init__(self):
self.start_time = None
self.elapsed_time = None
self.last_updated = None
def start(self, message=None):
"""Initiates start time. Can display start messages.
Args:
message (string): Optional start message.
Returns:
None
Raises:
None
"""
self.start_time = time.time()
if message:
print(message)
def update_progress_bar(self, step, end):
"""
Args:
step (float): Current iteration of process.
end (float): Final iteration of process.
Returns:
None
Raises:
None
"""
percent = float(step) / float(end)
start_time = self.start_time
current_time = time.time()
if current_time < self.last_updated + 0.017:
return
else:
self.last_updated = current_time
elapsed_time = current_time - start_time
self.elapsed_time = elapsed_time
estimated_time = (elapsed_time / percent) - elapsed_time
hours = int(estimated_time / 3600.0)
minutes = int((estimated_time - (hours * 3600)) / 60.0)
seconds = int(estimated_time - (minutes * 60) - (hours * 3600))
time_remaining = "{:02d}:{:02d}:{:02d}".format(
hours,
minutes,
seconds
)
progress_bar = "{}".format('\u2588' * int(percent * 25.0))
remainder = (percent * 25.0) - len(progress_bar)
if remainder >= 0.75:
progress_bar += '\u258a'
elif remainder >= 0.5:
progress_bar += '\u258c'
elif remainder >= 0.25:
progress_bar += '\u258e'
progress_bar += ' ' * (25 - len(progress_bar))
output = " {:05.2f}% |{}| Time Remaining: {}".format(
percent * 100.0,
progress_bar,
time_remaining
)
print(' ' * 72, end='\r')
print(output, end='\r')
def finish(self):
"""Displays elapsed time of process. Clears attributes.
Args:
None
Returns:
None
Raises:
None
"""
hours = int(self.elapsed_time / 3600.0)
minutes = int(self.elapsed_time / 60.0)
seconds = int(self.elapsed_time - (minutes * 60) - (hours * 3600))
elapsed_time = "{:02d}:{:02d}:{:02d}".format(
hours,
minutes,
seconds
)
print(" 100.00% |{}| Elapsed Time: {} ".format(
'\u2588' * 25,
elapsed_time
))
self.start_time = None
self.elapsed_time = None
self.last_updated = None
|
Python
| 0
|
@@ -1180,16 +1180,38 @@
if
+self.last_updated and
current_
@@ -2352,10 +2352,10 @@
' *
-72
+80
, en
|
d008b0cec67a1428a2761b32f8b9cd7fee6372ed
|
Fix hardcoded vsdk branch
|
generator/src/lib/managers.py
|
generator/src/lib/managers.py
|
# -*- coding: utf-8 -*-
import os
import shutil
import threading
from git import Repo, GitCommandError
from printer import Printer
class TaskManager(object):
""" Multi threading manager """
def __init__(self):
""" Initializes a TaskManager
"""
self.threads = list()
def wait_until_exit(self):
""" Wait until all the threads are finished.
"""
[t.join() for t in self.threads]
self.threads = list()
def start_task(self, method, *args, **kwargs):
""" Start a task in a separate thread
Args:
method: the method to start in a separate thread
args: Accept args/kwargs arguments
"""
thread = threading.Thread(target=method, args=args, kwargs=kwargs)
thread.is_daemon = False
thread.start()
self.threads.append(thread)
class GitManager(object):
""" Manager of git repository
"""
def __init__(self, url, branch, directory):
""" Initializes a GitManager
Args:
url: url of the git repository to clone
branch: name of the branch
directory: the directory name
"""
self.url = url
self.directory = directory
self.branch = branch
self.repo = None
self._nb_changes = 0
self.remove_directory()
self.repo = Repo.clone_from(url=self.url, to_path=self.directory)
try:
self.repo.git.checkout('3.0')
Printer.log('Switching to branch %s' % self.branch)
except GitCommandError:
Printer.log('Branch %s does not exist yet. Creating it...' % self.branch)
branch = self.repo.create_head(self.branch)
self.repo.head.reference = branch
# remote = self.repo.remote()
# remote.push(self.repo.head)
def commit(self, message):
""" Add all modification and add a commit message
Args:
message: the message for the commit
Returns:
Returns the number of diffs affected by the commit
No commit are made if no diffs are found
"""
diffs = self.repo.index.diff(None)
nb_diffs = len(diffs)
nb_untracked_files = len(self.repo.untracked_files)
if nb_diffs:
for diff in diffs:
if diff.b_mode == 0 and diff.b_blob is None:
self.repo.index.remove(items=[diff.a_blob.path])
else:
self.repo.index.add(items=[diff.a_blob.path])
if nb_untracked_files > 0:
self.repo.index.add(items=self.repo.untracked_files)
self._nb_changes = nb_diffs + nb_untracked_files
if self._nb_changes > 0:
self.repo.index.commit(message)
return self._nb_changes
def push(self):
""" Push all modififcation to the repository
"""
if self._nb_changes > 0:
remote = self.repo.remote()
remote.push(self.repo.head)
self._nb_changes = 0
def remove_directory(self):
""" Clean the clone repository
"""
if os.path.exists(self.directory):
shutil.rmtree(self.directory)
|
Python
| 0.000335
|
@@ -1293,22 +1293,27 @@
ranch =
+str(
branch
+)
%0A
@@ -1519,13 +1519,19 @@
out(
-'3.0'
+self.branch
)%0A
|
72e71235d0f5e4851b212e4c7fa583eeddce6252
|
Fix QueueUtility to read request from view again
|
src/plone.server/plone/server/async.py
|
src/plone.server/plone/server/async.py
|
# -*- coding: utf-8 -*-
from datetime import datetime
from plone.server.browser import ErrorResponse
from plone.server.browser import UnauthorizedResponse
from plone.server.browser import View
from plone.server import _
from plone.server.transactions import sync
from plone.server.transactions import TransactionProxy
from zope.interface import Interface
from zope.security.interfaces import Unauthorized
import asyncio
import logging
logger = logging.getLogger(__name__)
class IAsyncUtility(Interface):
async def initialize(self):
pass
class IQueueUtility(IAsyncUtility):
pass
class QueueUtility(object):
def __init__(self, settings):
self._queue = asyncio.PriorityQueue()
self._exceptions = False
self._total_queued = 0
async def initialize(self, app=None):
# loop
self.app = app
while True:
got_obj = False
try:
priority, view = await self._queue.get()
got_obj = True
txn = request.conn.transaction_manager.begin(request)
try:
view_result = await view()
if isinstance(view_result, ErrorResponse):
await sync(request)(txn.abort)
elif isinstance(view_result, UnauthorizedResponse):
await sync(request)(txn.abort)
else:
await sync(request)(txn.commit)
except Unauthorized:
await sync(request)(txn.abort)
view_result = UnauthorizedResponse(
_('Not authorized to render operation'))
except Exception as e:
logger.error(
"Exception on writing execution",
exc_info=e)
await sync(request)(txn.abort)
view_result = ErrorResponse(
'ServiceError',
_('Error on execution of operation')
)
except KeyboardInterrupt or MemoryError or SystemExit or asyncio.CancelledError:
self._exceptions = True
raise
except:
self._exceptions = True
logger.error('Worker call failed')
finally:
if got_obj:
self._queue.task_done()
@property
def exceptions(self):
return self._exceptions
@property
def total_queued(self):
return self._total_queued
async def add(self, view, priority=3):
await self._queue.put((priority, view))
self._total_queued += 1
return self._queue.qsize()
class QueueObject(View):
def __init__(self, context, request):
super(QueueObject, self).__init__(context, TransactionProxy(request))
self.time = datetime.now().timestamp()
def __lt__(self, view):
return self.time < view.time
|
Python
| 0
|
@@ -1024,16 +1024,21 @@
txn =
+view.
request.
@@ -1068,16 +1068,21 @@
r.begin(
+view.
request)
@@ -1240,32 +1240,37 @@
await sync(
+view.
request)(txn.abo
@@ -1372,32 +1372,37 @@
await sync(
+view.
request)(txn.abo
@@ -1458,32 +1458,37 @@
await sync(
+view.
request)(txn.com
@@ -1552,32 +1552,37 @@
await sync(
+view.
request)(txn.abo
@@ -1904,16 +1904,21 @@
it sync(
+view.
request)
|
85880dbf68718737fa52535326163d9b40adf7f9
|
Add tags to event serializer
|
src/sentry/api/serializers/models/event.py
|
src/sentry/api/serializers/models/event.py
|
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import Event
@register(Event)
class EventSerializer(Serializer):
def _get_entries(self, event, user):
# XXX(dcramer): These are called entries for future-proofing
interface_list = []
for key, interface in event.interfaces.iteritems():
if key == 'user':
continue
entry = {
'data': interface.to_json(),
'type': interface.get_alias(),
}
interface_list.append((interface, entry))
interface_list.sort(key=lambda x: x[0].get_display_score(), reverse=True)
return [i[1] for i in interface_list]
def get_attrs(self, item_list, user):
Event.objects.bind_nodes(item_list, 'data')
results = {}
for item in item_list:
user_interface = item.interfaces.get('sentry.interfaces.User')
if user_interface:
user_data = user_interface.to_json()
else:
user_data = None
results[item] = {
'entries': self._get_entries(item, user),
'user': user_data,
}
return results
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'eventID': str(obj.event_id),
'entries': attrs['entries'],
'message': obj.message,
'user': attrs['user'],
'platform': obj.platform,
'dateCreated': obj.datetime,
'timeSpent': obj.time_spent,
}
return d
|
Python
| 0
|
@@ -1497,24 +1497,60 @@
rs%5B'user'%5D,%0A
+ 'tags': obj.get_tags(),%0A
|
71636292d089f16485691f242edf74fcbd72ff2b
|
Enforce PEP8 on readpdf.py
|
jarviscli/plugins/readpdf.py
|
jarviscli/plugins/readpdf.py
|
# importing the modules
import PyPDF2
import pyttsx3
from plugin import plugin
"""
A tool for reading out the pdf files using the jarvis.Uses PyPDF2 and pyttsx3 libraries
"""
@plugin('readpdf')
class readpdfjarvis():
def __init__(self):
self.path = None
def __call__(self, jarvis, s):
self.read_pdf(jarvis)
def read_pdf(self, jarvis):
filename = jarvis.input("Enter your file path with '/' seperations:")
pdf = open(filename, 'rb')
pdfRead = PyPDF2.PdfFileReader(pdf)
for i in range(pdfRead.getNumPages()):
page = pdfRead.getPage(i)
jarvis.say("Page No: "+str(1 + pdfRead.getPageNumber(page)))
pageContent = page.extractText()
jarvis.say(pageContent)
speak = pyttsx3.init()
speak.say(pageContent)
speak.runAndWait()
|
Python
| 0
|
@@ -77,17 +77,12 @@
in%0A%0A
-%0A
%22%22%22%0A
-
A to
@@ -169,16 +169,14 @@
ies%0A
-
%22%22%22%0A
+%0A%0A
@plu
@@ -216,10 +216,8 @@
s():
-%0A%09
%0A%0A
@@ -428,17 +428,17 @@
'/' sep
-e
+a
rations:
@@ -638,17 +638,19 @@
ge No: %22
-+
+ +
str(1 +
|
08451a470fe52525ae7101a2dc4bf0295c04a044
|
make coastlines gray instead of black
|
examples/warpimage.py
|
examples/warpimage.py
|
import pylab as P
from matplotlib.toolkits.basemap import Basemap
from matplotlib.numerix import ma
from matplotlib.image import pil_to_array
from PIL import Image
# shows how to warp an image from one map projection to another.
# image from http://visibleearth.nasa.gov/
# read in jpeg image to rgba array of normalized floats.
pilImage = Image.open('land_shallow_topo_2048.jpg')
rgba = pil_to_array(pilImage)
rgba = rgba.astype(P.Float32)/255. # convert to normalized floats.
# define lat/lon grid that image spans (projection='cyl').
nlons = rgba.shape[1]; nlats = rgba.shape[0]
delta = 360./float(nlons)
lons = P.arange(-180.+0.5*delta,180.,delta)
lats = P.arange(-90.+0.5*delta,90.,delta)
# define cylindrical equidistant projection.
m = Basemap(projection='cyl',llcrnrlon=-180,llcrnrlat=-90,urcrnrlon=180,urcrnrlat=90,resolution='l')
# plot (unwarped) rgba image.
im = m.imshow(rgba)
# draw coastlines.
m.drawcoastlines(linewidth=0.5)
# draw lat/lon grid lines.
m.drawmeridians(P.arange(-180,180,60),labels=[0,0,0,1])
m.drawparallels(P.arange(-90,90,30),labels=[1,0,0,0])
P.title("Blue Marble image - native 'cyl' projection",fontsize=12)
P.show()
# define orthographic projection centered on North America.
m = Basemap(projection='ortho',lat_0=50,lon_0=-100,resolution='l')
# transform to nx x ny regularly spaced native projection grid
# nx and ny chosen to have roughly the same horizontal res as original image.
dx = 2.*P.pi*m.rmajor/float(nlons)
nx = int((m.xmax-m.xmin)/dx)+1; ny = int((m.ymax-m.ymin)/dx)+1
rgba_warped = ma.zeros((ny,nx,4),P.Float64)
# interpolate rgba values from proj='cyl' (geographic coords) to 'lcc'
# values outside of projection limb will be masked.
for k in range(4):
rgba_warped[:,:,k] = m.transform_scalar(rgba[:,:,k],lons,lats,nx,ny,masked=True)
# make points outside projection limb transparent.
rgba_warped = rgba_warped.filled(0.)
# plot warped rgba image.
im = m.imshow(rgba_warped)
# draw coastlines.
m.drawcoastlines(linewidth=0.5)
# draw lat/lon grid lines every 30 degrees.
m.drawmeridians(P.arange(0,360,30))
m.drawparallels(P.arange(-90,90,30))
P.title("Blue Marble image warped from 'cyl' to 'ortho' projection",fontsize=12)
P.show()
# define Lambert Conformal basemap for North America.
m = Basemap(llcrnrlon=-145.5,llcrnrlat=1.,urcrnrlon=-2.566,urcrnrlat=46.352,\
rsphere=(6378137.00,6356752.3142),lat_1=50.,lon_0=-107.,\
resolution='i',area_thresh=1000.,projection='lcc')
# transform to nx x ny regularly spaced native projection grid
# nx and ny chosen to have roughly the same horizontal res as original image.
dx = 2.*P.pi*m.rmajor/float(nlons)
nx = int((m.xmax-m.xmin)/dx)+1; ny = int((m.ymax-m.ymin)/dx)+1
rgba_warped = P.zeros((ny,nx,4),P.Float64)
# interpolate rgba values from proj='cyl' (geographic coords) to 'lcc'
for k in range(4):
rgba_warped[:,:,k] = m.transform_scalar(rgba[:,:,k],lons,lats,nx,ny)
# plot warped rgba image.
im = m.imshow(rgba_warped)
# draw coastlines.
m.drawcoastlines(linewidth=0.5)
# draw parallels and meridians.
# label on left, right and bottom of map.
parallels = P.arange(0.,80,20.)
m.drawparallels(parallels,labels=[1,1,0,1])
meridians = P.arange(10.,360.,30.)
m.drawmeridians(meridians,labels=[1,1,0,1])
P.title("Blue Marble image warped from 'cyl' to 'lcc' projection",fontsize=12)
P.show()
|
Python
| 0.000006
|
@@ -928,32 +928,44 @@
es(linewidth=0.5
+,color='0.5'
)%0A# draw lat/lon
@@ -1031,16 +1031,28 @@
0,0,0,1%5D
+,color='0.5'
)%0Am.draw
@@ -1097,16 +1097,28 @@
1,0,0,0%5D
+,color='0.5'
)%0AP.titl
@@ -1289,17 +1289,17 @@
t_0=
-5
+4
0,lon_0=
-100
@@ -1294,19 +1294,17 @@
0,lon_0=
--10
+4
0,resolu
@@ -2005,32 +2005,44 @@
es(linewidth=0.5
+,color='0.5'
)%0A# draw lat/lon
@@ -2105,16 +2105,28 @@
,360,30)
+,color='0.5'
)%0Am.draw
@@ -2154,16 +2154,28 @@
0,90,30)
+,color='0.5'
)%0AP.titl
@@ -3071,16 +3071,28 @@
idth=0.5
+,color='0.5'
)%0A# draw
@@ -3233,16 +3233,28 @@
1,1,0,1%5D
+,color='0.5'
)%0Ameridi
@@ -3324,16 +3324,28 @@
1,1,0,1%5D
+,color='0.5'
)%0AP.titl
|
bb7fa507a31901819dbc7712b13c4223fe6d3585
|
Correct p tags on system message output
|
src/sentry/templatetags/sentry_activity.py
|
src/sentry/templatetags/sentry_activity.py
|
"""
sentry.templatetags.sentry_activity
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django import template
from django.utils.html import escape, linebreaks
from django.utils.safestring import mark_safe
from sentry.models import Activity
from sentry.templatetags.sentry_helpers import timesince
register = template.Library()
ACTIVITY_ACTION_STRINGS = {
Activity.COMMENT: 'left a comment',
Activity.SET_RESOLVED: 'marked this event as resolved',
Activity.SET_UNRESOLVED: 'marked this event as unresolved',
Activity.SET_MUTED: 'marked this event as muted',
Activity.SET_PUBLIC: 'made this event public',
Activity.SET_PRIVATE: 'made this event private',
Activity.SET_REGRESSION: 'marked this event as a regression',
}
@register.filter
def render_activity(item):
if not item.group:
# not implemented
return
action_str = ACTIVITY_ACTION_STRINGS[item.type]
if item.user:
name = item.user.first_name or item.user.email
output = '<p><strong>%s</strong> %s' % (escape(name), action_str)
else:
output = 'The system %s' % (action_str,)
output += ' — %s</p>' % (timesince(item.datetime),)
if item.type == Activity.COMMENT:
output += linebreaks(item.data['body'])
return mark_safe(output)
|
Python
| 0.00002
|
@@ -1027,16 +1027,36 @@
.type%5D%0A%0A
+ output = '%3Cp%3E'%0A%0A
if i
@@ -1139,15 +1139,13 @@
put
++
= '%3C
-p%3E%3C
stro
@@ -1217,16 +1217,17 @@
output
++
= 'The s
|
736e1f7f4de56a57df3b51058c5b45455e577cf0
|
Fix flake8
|
busstops/management/commands/import_areas.py
|
busstops/management/commands/import_areas.py
|
"""
Import administrative areas from the NPTG.
Usage:
import_areas < AdminAreas.csv
"""
from ..import_from_csv import ImportFromCSVCommand
from ...models import AdminArea
class Command(ImportFromCSVCommand):
def handle_row(self, row):
AdminArea.objects.update_or_create(
id=row['AdministrativeAreaCode'],
defaults={
'atco_code': row['AtcoAreaCode'],
'name': row['AreaName'],
'short_name': row['ShortName'],
'country': row['Country'],
'region_id': row['RegionCode'],
}
)
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
# Move Cumbria to the North West.
# There is the legacy of the confusing 'North East and Cumbria' Traveline region,
# but actually Cumbrian bus services are in the North West now
AdminArea.objects.filter(name='Cumbria').update(region_id='NW')
|
Python
| 0
|
@@ -761,27 +761,25 @@
#
-There is the legacy
+Necessary because
of
@@ -830,16 +830,8 @@
line
- region,
%0A
@@ -840,21 +840,20 @@
#
-%C2%A0but actually
+ region, but
Cum
@@ -866,16 +866,17 @@
bus
+*
services
are
@@ -875,13 +875,23 @@
ices
+*
are
+actually
in t
|
8a870c6faf8aa50ad7f8c58458c4af9ddef7cfdc
|
Make authbind check graceful.
|
braid/authbind.py
|
braid/authbind.py
|
import os
from fabric.api import sudo, run, abort
from braid import package, hasSudoCapabilities
def install():
package.install('authbind')
def allow(user, port):
path = os.path.join('/etc/authbind/byport', str(port))
state = run('stat -c %U:%a {}'.format(path))
if state.strip().split(':') != (user, '500'):
if not hasSudoCapabilities():
abort('Trying to give {} access to port {} but have insufficient '
'capabilities.'.format(user, port))
sudo('touch {}'.format(path))
sudo('chown {0}:{0} {1}'.format(user, path))
sudo('chmod 0500 {}'.format(path))
|
Python
| 0
|
@@ -43,16 +43,23 @@
n, abort
+, quiet
%0A%0Afrom b
@@ -228,24 +228,69 @@
str(port))%0A
+ needsUpdate = True%0A with quiet():%0A
state =
@@ -330,18 +330,33 @@
h))%0A
-if
+ needsUpdate =
state.s
@@ -376,17 +376,17 @@
':') !=
-(
+%5B
user, '5
@@ -388,17 +388,36 @@
r, '500'
-)
+%5D%0A if needsUpdate
:%0A
|
ad2087daae138d3897fc47f0713c8955352ed6ae
|
add SecretBallotUserIdMiddleware
|
secretballot/middleware.py
|
secretballot/middleware.py
|
# -*- coding: utf-8 -*-
from hashlib import md5
from django.utils.deprecation import MiddlewareMixin
class SecretBallotMiddleware(MiddlewareMixin):
def process_request(self, request):
request.secretballot_token = self.generate_token(request)
def generate_token(self, request):
raise NotImplementedError
class SecretBallotIpMiddleware(SecretBallotMiddleware):
def generate_token(self, request):
return request.META['REMOTE_ADDR']
class SecretBallotIpUseragentMiddleware(SecretBallotMiddleware):
def generate_token(self, request):
s = u"".join((request.META['REMOTE_ADDR'], request.META.get('HTTP_USER_AGENT', '')))
return md5(s.encode('utf-8')).hexdigest()
|
Python
| 0.000001
|
@@ -465,16 +465,295 @@
DDR'%5D%0A%0A%0A
+class SecretBallotUserIdMiddleware(SecretBallotMiddleware):%0A %22%22%22%0A As the token is generated based on the user ID, this middleware%0A should only be used on pages where the user is logged in.%0A %22%22%22%0A def genereate_token(self, request):%0A return request.user.id%0A%0A%0A
class Se
|
c81b20dde92979b563228bd35eb218e65139d283
|
fix model param
|
spych/kaldi/decode.py
|
spych/kaldi/decode.py
|
import os
from spych.utils import textfile
class KaldiDecode(object):
def __init__(self, env):
self.env = env
def make_graph(self, language_folder, model_folder, output_folder):
"""
Creates training graph from model, lexicon and language model.
:param language_folder: Path to the lang folder (with lexicon and language model).
:param model_folder: Path to the model folder.
:param output_folder: Path to the folder to put the graph into.
:param monophone: Set True for monophone model.
:param quinphone: Set True for quinphone model.
:return:
"""
args = [
os.path.abspath(language_folder),
os.path.abspath(model_folder),
os.path.abspath(output_folder)
]
self.env.run_bash_script('utils/mkgraph.sh', args=args)
def decode(self, graph_folder, data_folder, decode_folder, model=None, number_of_jobs=4):
"""
Decodes the given data with the given decoding graph.
:param graph_folder: Path to folder with the graph.
:param data_folder: Path to the data folder.
:param decode_folder: Path to the folder to put decoding files into.
:param number_of_jobs: Number of parallel jobs.
:return:
"""
args = [
'--nj', str(number_of_jobs),
os.path.abspath(graph_folder),
os.path.abspath(data_folder),
os.path.abspath(decode_folder)
]
if model is not None:
args.append('--model')
args.append(os.path.abspath(model))
self.env.run_bash_script('steps/decode.sh', args=args)
def create_dummy_reco2file(self, data_folder):
data_folder = os.path.abspath(data_folder)
wav_file = os.path.join(data_folder, 'wav.scp')
wavs = textfile.read_key_value_lines(wav_file, separator=' ', )
out = []
for rec_id, rec_path in wavs.items():
filename = os.path.splitext(os.path.basename(rec_path))[0]
out.append([rec_id, filename, 'A'])
reco_file = os.path.join(data_folder, 'reco2file_and_channel')
textfile.write_separated_lines(reco_file, out, separator=' ')
def get_ctm(self, data_folder, graph_folder, decode_folder):
"""
Create word alignment in CTM format from decoding folder (lattice).
:param data_folder: Path to data folder.
:param graph_folder: Path to graph folder.
:param decode_folder: Path to decode folder.
:return:
"""
self.env.run_bash_script('steps/get_ctm.sh', args=[
os.path.abspath(data_folder),
os.path.abspath(graph_folder),
os.path.abspath(decode_folder)
])
def latgen_faster_mapped(self, word_symbol_table, model, graph, in_path, out_path, min_active=200, max_active=7000, max_mem=50000000, beam=13.0,
lattice_beam=8.0, acoustic_scale=0.08, allow_partial=True, num_threads=1):
cmd = [
'latgen-faster-mapped-parallel' if num_threads > 1 else 'latgen-faster-mapped'
]
if num_threads > 1:
cmd.append('--num-threads={}'.format(num_threads))
cmd.extend([
'--min-active={}'.format(min_active),
'--max-active={}'.format(max_active),
'--max-mem={}'.format(max_mem),
'--beam={}'.format(beam),
'--lattice-beam={}'.format(lattice_beam),
'--acoustic-scale={}'.format(acoustic_scale),
'--allow-partial={}'.format('true' if allow_partial else 'false'),
'--word-symbol-table={}'.format(word_symbol_table),
os.path.abspath(model),
os.path.abspath(graph),
'ark:{}'.format(in_path),
'ark:|gzip -c > {}'.format(out_path)
])
self.env.run_cmd(cmd, stdin=None)
def score(self, data, graph_dir, decode_dir, cmd='utils/run.pl', min_lmwt=4, max_lmwt=15):
self.env.run_bash_script('local/score.sh', args=[
'--min-lmwt', str(min_lmwt),
'--max-lmwt', str(max_lmwt),
'--cmd', cmd,
os.path.abspath(data),
os.path.abspath(graph_dir),
os.path.abspath(decode_dir)
])
|
Python
| 0.000001
|
@@ -1361,94 +1361,84 @@
obs)
-,%0A os.path.abspath(graph_folder),%0A os.path.abspath(data_folder),
+%0A %5D%0A%0A if model is not None:%0A args.append('--model')
%0A
@@ -1438,32 +1438,44 @@
l')%0A
+args.append(
os.path.abspath(
@@ -1478,98 +1478,123 @@
ath(
-decode_folder)%0A %5D%0A%0A if model is not None:%0A args.append('--model')
+model))%0A%0A args.extend(%5B%0A os.path.abspath(graph_folder),%0A os.path.abspath(data_folder),
%0A
@@ -1598,36 +1598,24 @@
-args.append(
os.path.absp
@@ -1610,38 +1610,56 @@
os.path.abspath(
-model)
+decode_folder)%0A %5D
)%0A%0A self.
|
17dd75df619519a3a565d6d95d87f8aa20cf8cf7
|
Switch testing grid district
|
test_dingo2.py
|
test_dingo2.py
|
#!/usr/bin/env python3
import matplotlib.pyplot as plt
from oemof import db
from dingo.core import NetworkDingo
from dingo.tools import config as cfg_dingo
plt.close('all')
cfg_dingo.load_config('config_db_tables')
cfg_dingo.load_config('config_calc')
cfg_dingo.load_config('config_files')
cfg_dingo.load_config('config_misc')
# get engine for database connection
#conn = db.connection(db_section='ontohub_wdb', cfg_file='~/.dingo/config') # <-- TODO: include custom config file from given path (+input for oemof)
# instantiate dingo network object
nd = NetworkDingo(name='network')
# get database connection info from config file
conn = db.connection(section='oedb')
# mv_grid_districts=[360, 571, 593, 368, 491, 425, 416, 372, 387, 407, 403, 373, 482] # some MV grid_districts from SPF region
# mv_grid_districts=[360, 571, 593, 368, 491, 416, 372, 387, 407, 403, 373, 482] # some MV grid_districts from SPF region
# mv_grid_districts=[482]
# mv_grid_districts = [386,372,406,371,402,415,480,424,489,367,359,569,591]
mv_grid_districts=[386]
nd.import_mv_grid_districts(conn, mv_grid_districts)
nd.import_generators(conn)
nd.mv_parametrize_grid()
nd.mv_routing(debug=False, animation=False)
nd.connect_generators()
nd.set_branch_ids()
# Open and close all circuit breakers in grid (for testing)
#nd._mv_grid_districts[0].mv_grid.open_circuit_breakers()
#nd._mv_grid_districts[0].mv_grid.close_circuit_breakers()
nd._mv_grid_districts[0].mv_grid.export_to_pypsa(conn, single_half_ring=False)
nd.export_mv_grid(conn, mv_grid_districts)
conn.close()
# for edge in nd._mv_grid_districts[0].mv_grid.graph_edges():
# if edge['branch'].type is not None:
# print(edge['branch'].type['name'])
# else:
# print('None')
# lvrg = []
# for mv_grid_district in nd.mv_grid_districts():
# #print(mv_grid_district._lv_load_area_groups)
# #print(type(mv_grid_district._lv_load_area_groups))
# for lv_load_area_group in iter(mv_grid_district._lv_load_area_groups):
# lvrg.append([str(lv_load_area_group), lv_load_area_group.peak_load_sum, lv_load_area_group.branch_length_sum])
# lvrg = sorted(lvrg, key=lambda x: x[1])
#
# for lvrg_name, lvrg_load, lvrg_length in lvrg:
# print(lvrg_name, lvrg_load, lvrg_length)
#df = nx.to_pandas_dataframe(nd._mv_grid_districts[0].mv_grid._graph)
# import pprint
# for edge in nd._mv_grid_districts[0].mv_grid._graph.edge.keys():
# # print(edge, type(edge))
# pprint.pprint(edge)
# pprint.pprint(nd._mv_grid_districts[0].mv_grid._graph.edge[edge])
#nd._mv_grid_districts[0].mv_grid.graph_draw()
|
Python
| 0
|
@@ -1039,19 +1039,19 @@
tricts=%5B
-386
+489
%5D%0A%0And.im
|
910a715a2543b3e87879937da0aa0a6d63f33d7f
|
remove unused import
|
test_pillow.py
|
test_pillow.py
|
# MIT License
# Copyright (c) 2017 Tuxedo
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from pyseeta import Detector
from pyseeta import Aligner
from pyseeta import Identifier
try:
from PIL import Image, ImageDraw
import numpy as np
except ImportError:
raise ImportError('Pillow can not be found!')
def test_detector():
print('test detector:')
# load model
detector = Detector()
detector.set_min_face_size(30)
image_color = Image.open('data/chloecalmon.png').convert('RGB')
image_gray = image_color.convert('L')
faces = detector.detect(image_gray)
draw = ImageDraw.Draw(image_color)
for i, face in enumerate(faces):
print('({0},{1},{2},{3}) score={4}'.format(face.left, face.top, face.right, face.bottom, face.score))
draw.rectangle([face.left, face.top, face.right, face.bottom])
image_color.show()
detector.release()
def test_aligner():
print('test aligner:')
# load model
detector = Detector()
detector.set_min_face_size(30)
aligner = Aligner()
image_color = Image.open('data/chloecalmon.png').convert('RGB')
image_gray = image_color.convert('L')
faces = detector.detect(image_gray)
draw = ImageDraw.Draw(image_color)
draw.ellipse ((0,0,40,80), fill=128)
for face in faces:
landmarks = aligner.align(image_gray, face)
for point in landmarks:
x1, y1 = point[0] - 2, point[1] - 2
x2, y2 = point[0] + 2, point[1] + 2
draw.ellipse((x1,y1,x2,y2), fill='red')
image_color.show()
aligner.release()
detector.release()
def test_identifier():
print('test identifier:')
detector = Detector()
aligner = Aligner()
identifier = Identifier()
# load image
image_color_A = Image.open('data/single.jpg').convert('RGB')
image_gray_A = image_color_A.convert('L')
image_color_B = Image.open('data/double.jpg').convert('RGB')
image_gray_B = image_color_B.convert('L')
# detect face in image
faces_A = detector.detect(image_gray_A)
faces_B = detector.detect(image_gray_B)
draw_A = ImageDraw.Draw(image_color_A)
draw_B = ImageDraw.Draw(image_color_B)
if len(faces_A) and len(faces_B):
landmarks_A = aligner.align(image_gray_A, faces_A[0])
featA = identifier.extract_feature_with_crop(image_color_A, landmarks_A)
draw_A.rectangle([(faces_A[0].left, faces_A[0].top), (faces_A[0].right, faces_A[0].bottom)], outline='green')
sim_list = []
for face in faces_B:
landmarks_B = aligner.align(image_gray_B, face)
featB = identifier.extract_feature_with_crop(image_color_B, landmarks_B)
sim = identifier.calc_similarity(featA, featB)
sim_list.append(sim)
print('sim: {}'.format(sim_list))
index = np.argmax(sim_list)
for i, face in enumerate(faces_B):
color = 'green' if i == index else 'red'
draw_B.rectangle([(face.left, face.top), (face.right, face.bottom)], outline=color)
image_color_A.show()
image_color_B.show()
identifier.release()
aligner.release()
detector.release()
def test_cropface():
detector = Detector()
detector.set_min_face_size(30)
aligner = Aligner()
identifier = Identifier()
image_color = Image.open('data/chloecalmon.png').convert('RGB')
image_gray = image_color.convert('L')
import cv2
faces = detector.detect(image_gray)
for face in faces:
landmarks = aligner.align(image_gray, face)
crop_face = identifier.crop_face(image_color, landmarks)
Image.fromarray(crop_face).show()
identifier.release()
aligner.release()
detector.release()
if __name__ == '__main__':
test_detector()
# test_aligner()
# test_identifier()
# test_cropface()
|
Python
| 0.000001
|
@@ -4418,18 +4418,8 @@
-import cv2
%0A
|
898e97a38ea0510b743ca79d97444458274426b2
|
Add tests for queue predeclaration.
|
st2common/tests/unit/test_service_setup.py
|
st2common/tests/unit/test_service_setup.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tempfile
from st2common import service_setup
from st2tests.base import CleanFilesTestCase
from st2tests import config
__all__ = [
'ServiceSetupTestCase'
]
MOCK_LOGGING_CONFIG_INVALID_LOG_LEVEL = """
[loggers]
keys=root
[handlers]
keys=consoleHandler
[formatters]
keys=simpleConsoleFormatter
[logger_root]
level=invalid_log_level
handlers=consoleHandler
[handler_consoleHandler]
class=StreamHandler
level=DEBUG
formatter=simpleConsoleFormatter
args=(sys.stdout,)
[formatter_simpleConsoleFormatter]
class=st2common.logging.formatters.ConsoleLogFormatter
format=%(asctime)s %(levelname)s [-] %(message)s
datefmt=
""".strip()
class ServiceSetupTestCase(CleanFilesTestCase):
def test_no_logging_config_found(self):
def mock_get_logging_config_path():
return ''
config.get_logging_config_path = mock_get_logging_config_path
expected_msg = "No section: .*"
self.assertRaisesRegexp(Exception, expected_msg,
service_setup.setup, service='api',
config=config,
setup_db=False, register_mq_exchanges=False,
register_signal_handlers=False,
register_internal_trigger_types=False,
run_migrations=False)
def test_invalid_log_level_friendly_error_message(self):
_, mock_logging_config_path = tempfile.mkstemp()
self.to_delete_files.append(mock_logging_config_path)
with open(mock_logging_config_path, 'w') as fp:
fp.write(MOCK_LOGGING_CONFIG_INVALID_LOG_LEVEL)
def mock_get_logging_config_path():
return mock_logging_config_path
config.get_logging_config_path = mock_get_logging_config_path
expected_msg = 'Invalid log level selected. Log level names need to be all uppercase'
self.assertRaisesRegexp(KeyError, expected_msg,
service_setup.setup, service='api',
config=config,
setup_db=False, register_mq_exchanges=False,
register_signal_handlers=False,
register_internal_trigger_types=False,
run_migrations=False)
|
Python
| 0
|
@@ -794,43 +794,206 @@
le%0A%0A
-from st2common import service_setup
+import mock%0Afrom oslo_config import cfg%0A%0Afrom st2common import service_setup%0Afrom st2common.transport.bootstrap_utils import register_exchanges%0Afrom st2common.transport.bootstrap_utils import QUEUES
%0A%0Afr
@@ -3258,28 +3258,777 @@
run_migrations=False)%0A
+%0A @mock.patch('kombu.Queue.declare')%0A def test_register_exchanges_predeclare_queues(self, mock_declare):%0A # Verify that queues are correctly pre-declared if the corresponding config option is set%0A%0A # Pre-declaration is disabled%0A self.assertEqual(mock_declare.call_count, 0)%0A cfg.CONF.set_override(group='messaging', name='predeclare_queues', override=False)%0A%0A register_exchanges()%0A self.assertEqual(mock_declare.call_count, 0)%0A%0A # Pre-declaration is enabled%0A self.assertEqual(mock_declare.call_count, 0)%0A cfg.CONF.set_override(group='messaging', name='predeclare_queues', override=True)%0A%0A register_exchanges()%0A self.assertEqual(mock_declare.call_count, len(QUEUES))%0A
|
ea17679936442d8e5af90dcae72c003f708d7b0c
|
Fix check_user_support for custom user models
|
guardian/backends.py
|
guardian/backends.py
|
from __future__ import unicode_literals
from django.db import models
from guardian.compat import get_user_model
from guardian.conf import settings
from guardian.exceptions import WrongAppError
from guardian.core import ObjectPermissionChecker
def check_object_support(obj):
"""
Returns ``True`` if given ``obj`` is supported
"""
# Backend checks only object permissions (isinstance implies that obj
# is not None)
# Backend checks only permissions for Django models
return isinstance(obj, models.Model)
def check_user_support(user_obj):
"""
Returns a tuple of checkresult and ``user_obj`` which should be used for
permission checks
Checks if the given user is supported. Anonymous users need explicit
activation via ANONYMOUS_USER_NAME
"""
# This is how we support anonymous users - simply try to retrieve User
# instance and perform checks for that predefined user
if not user_obj.is_authenticated():
# If anonymous user permission is disabled then they are always
# unauthorized
if settings.ANONYMOUS_USER_NAME is None:
return False, user_obj
user_obj = get_user_model().objects.get(username=settings.ANONYMOUS_USER_NAME)
return True, user_obj
def check_support(user_obj, obj):
"""
Combination of ``check_object_support`` and ``check_user_support``
"""
obj_support = check_object_support(obj)
user_support, user_obj = check_user_support(user_obj)
return obj_support and user_support, user_obj
class ObjectPermissionBackend(object):
supports_object_permissions = True
supports_anonymous_user = True
supports_inactive_user = True
def authenticate(self, username, password):
return None
def has_perm(self, user_obj, perm, obj=None):
"""
Returns ``True`` if given ``user_obj`` has ``perm`` for ``obj``. If no
``obj`` is given, ``False`` is returned.
.. note::
Remember, that if user is not *active*, all checks would return
``False``.
Main difference between Django's ``ModelBackend`` is that we can pass
``obj`` instance here and ``perm`` doesn't have to contain
``app_label`` as it can be retrieved from given ``obj``.
**Inactive user support**
If user is authenticated but inactive at the same time, all checks
always returns ``False``.
"""
# check if user_obj and object are supported
support, user_obj = check_support(user_obj, obj)
if not support:
return False
if '.' in perm:
app_label, perm = perm.split('.')
if app_label != obj._meta.app_label:
raise WrongAppError("Passed perm has app label of '%s' and "
"given obj has '%s'" % (app_label, obj._meta.app_label))
check = ObjectPermissionChecker(user_obj)
return check.has_perm(perm, obj)
def get_all_permissions(self, user_obj, obj=None):
"""
Returns a set of permission strings that the given ``user_obj`` has for ``obj``
"""
# check if user_obj and object are supported
support, user_obj = check_support(user_obj, obj)
if not support:
return set()
check = ObjectPermissionChecker(user_obj)
return check.get_perms(obj)
|
Python
| 0.000001
|
@@ -1153,24 +1153,20 @@
-u
+U
ser
-_obj
= get_u
@@ -1180,58 +1180,122 @@
el()
-.objects.get(username=settings.ANONYMOUS_USER_NAME
+%0A lookup = %7BUser.USERNAME_FIELD: settings.ANONYMOUS_USER_NAME%7D%0A user_obj = User.objects.get(**lookup
)%0A%0A
|
112cb1eb06034f5afb24f9f1c20052a87d8a6374
|
Update pir_test.py
|
sensor_testing/pir_test.py
|
sensor_testing/pir_test.py
|
# parallax_pir_reva.py - write to screen when movement detected
# (c) BotBook.com - Karvinen, Karvinen, Valtokari
# 22.9.2017 modified by Vesa Valli
import time
import botbook_gpio as gpio
learningPeriod = 30
def main():
pirPin = 7
gpio.mode(pirPin,"in")
#Learning period
print ("learning... " + str(learningPeriod) + " seconds")
time.sleep(learningPeriod) # <1>
while (True):
movement = gpio.read(pirPin) # <2>
if(movement == gpio.HIGH):
print ("Movement detected " + time.ctime())
else:
print ("No movement detected " + time.ctime())
time.sleep(0.3)
if __name__ == "__main__":
main()
|
Python
| 0.000004
|
@@ -132,22 +132,21 @@
ied
-by Vesa Valli%0A
+from original
%0Aimp
|
e79b5ec2b31c69c80ea8123720b822ca825dab91
|
add find_or_create_or_refresh
|
client/__init__.py
|
client/__init__.py
|
# -*- encoding: utf-8 -*-
#
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import os
import requests
import simplejson.scanner
import subprocess
import sys
import tempfile
import time
class DCIClient(object):
def __init__(self, end_point=None, login=None, password=None):
if not end_point and not login and not password:
end_point = os.environ['DCI_CONTROL_SERVER']
login = os.environ['DCI_LOGIN']
password = os.environ['DCI_PASSWORD']
self.end_point = end_point
self.s = requests.Session()
self.s.headers.setdefault('Content-Type', 'application/json')
self.s.auth = (login, password)
def delete(self, path):
return self.s.delete("%s%s" % (self.end_point, path))
def patch(self, path, data):
return self.s.patch(
"%s%s" % (self.end_point, path), data=json.dumps(data))
def post(self, path, data):
return self.s.post("%s%s" % (
self.end_point, path), data=json.dumps(data))
def get(self, path, where={}, embedded={}, params=None):
return self.s.get("%s%s?where=%s&embedded=%s" % (
self.end_point, path,
json.dumps(where),
json.dumps(embedded)), params=params)
def list_items(self, item_type, where={}, embedded={},
projection={}, page=1, max_results=10):
"""List the items for a given products.
Return an iterator.
"""
while True:
r = self.s.get(
'%s/%s?where=%s&embedded=%s'
'&projection=%s&page=%d&max_results=%d' % (
self.end_point,
item_type,
json.dumps(where),
json.dumps(embedded),
json.dumps(projection),
page,
max_results))
try:
rd = r.json()
except simplejson.scanner.JSONDecodeError as e:
print(r.text)
raise e
if '_items' in rd:
for item in rd['_items']:
yield item
if '_links' not in rd:
raise Exception
if 'next' not in rd['_links']:
break
page += 1
def upload_file(self, fd, jobstate_id, mime='text/plain', name=None):
fd.seek(0)
output = ""
while True:
s = fd.read(1024).decode("UTF-8")
output += s
if s == '':
break
if output:
data = {"name": name,
"content": output,
"mime": mime,
"jobstate_id": jobstate_id}
return self.post("/files", data)
def call(self, job_id, arg, cwd=None, env=None, ignore_error=False):
state = {"job_id": job_id,
"status": "ongoing",
"comment": "calling: %s" % " ".join(arg)}
jobstate_id = self.post("/jobstates", state).json()["id"]
print("Calling: %s" % arg)
try:
p = subprocess.Popen(arg,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=cwd,
env=env)
except OSError as e:
state = {"job_id": job_id,
"status": "failure",
"comment": "internal failure: %s" % e}
self.post("/jobstates", state)
raise DCIInternalFailure
f = tempfile.TemporaryFile()
f.write(("starting: %s\n" % " ".join(arg)).encode('utf-8'))
s = True
while p.returncode is None or s:
time.sleep(0.01)
s = os.read(p.stdout.fileno(), 10)
sys.stdout.write(s.decode('utf-8'))
f.write(s)
f.flush()
p.poll()
self.upload_file(f, jobstate_id, name='output.log')
if p.returncode != 0 and not ignore_error:
state = {"job_id": job_id,
"status": "failure",
"comment": "call failure w/ code %s" % (p.returncode)}
self.post("/jobstates", state)
raise DCICommandFailure
return jobstate_id
class DCIInternalFailure(Exception):
pass
class DCICommandFailure(Exception):
"""Raised when a user-defined command has failed"""
pass
|
Python
| 0.00001
|
@@ -598,16 +598,28 @@
cense.%0A%0A
+import copy%0A
import j
@@ -674,16 +674,27 @@
scanner%0A
+import six%0A
import s
@@ -4851,16 +4851,830 @@
ate_id%0A%0A
+ def find_or_create_or_refresh(self, path, data, unicity_key=%5B'name'%5D):%0A # TODO(Gon%C3%A9ri): need a test coverage%0A where = %7Bk: data%5Bk%5D for k in unicity_key%7D%0A items = self.get(path,%0A where=where).json()%0A try:%0A item = items%5B'_items'%5D%5B0%5D%0A data_to_patch = copy.copy(data)%0A for k, v in six.iteritems(data):%0A if json.dumps(item%5Bk%5D, sort_keys=True) %5C%0A == json.dumps(data_to_patch%5Bk%5D, sort_keys=True):%0A del(data_to_patch%5Bk%5D)%0A if len(data_to_patch) %3E 0:%0A self.patch(path + '/' + item%5B'id'%5D,%0A item%5B'etag'%5D,%0A data)%0A except IndexError:%0A item = self.post(path, data).json()%0A return item%0A%0A
%0Aclass D
|
6c17a81685f4f1b24cefb4760b26e9a33298742c
|
Bump to v1.10.0
|
client/__init__.py
|
client/__init__.py
|
__version__ = 'v1.9.6'
FILE_NAME = 'ok'
import os
import sys
sys.path.insert(0, '')
# Add directory in which the ok.zip is stored to sys.path.
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
|
Python
| 0.000001
|
@@ -15,11 +15,12 @@
'v1.
-9.6
+10.0
'%0A%0AF
|
86d59bbcad5d33e9a4cbad473a36972d29ddbaf0
|
missing comma
|
src/rwtypes/writers/activemq/ActivemqWriter.py
|
src/rwtypes/writers/activemq/ActivemqWriter.py
|
import datetime
import time
import json
from third.stomp import stomp_sender
from __writer import Writer
class ActivemqWriter(Writer):
def write(self, msg):
try:
headers = {'destination' : self.destination,
'eventtype' : self.eventtype
'timestamp' : int(time.time()*1000)}
for item in msg:
if isinstance(msg[item], datetime.datetime):
msg[item] = msg[item].isoformat()
body = json.dumps(msg)
stomp_sender.send_message_via_stomp([(self.host, self.port)], headers, body)
return True
except Exception, e:
print e
return False
|
Python
| 0.999885
|
@@ -279,16 +279,17 @@
venttype
+,
%0A
|
be4535f3671fd1e9e58ac4a82d3a9640ff7e2382
|
remove unused imports
|
fasttld/FastTLDExtract.py
|
fasttld/FastTLDExtract.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@author: Jophy and Wu Tingfeng
@file: psl.py
Copyright (c) 2022 Wu Tingfeng
Copyright (c) 2017-2018 Jophy
"""
import re
import socket
from operator import itemgetter
import idna
from fasttld.psl import getPublicSuffixList, update
IP_RE = re.compile(
r"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}"
r"([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$"
)
# Characters valid in scheme names
SCHEME_RE = re.compile(r"^[A-Za-z0-9+-.]+://")
def looks_like_ip(maybe_ip):
"""Does the given str look like an IP address?"""
try:
socket.inet_aton(maybe_ip)
return True
except socket.error: # for Python 2 compatibility
pass
except (AttributeError, UnicodeError):
if IP_RE.match(maybe_ip):
return True
return False
class FastTLDExtract(object):
def __init__(self, exclude_private_suffix=False, file_path=''):
self.trie = self._trie_construct(exclude_private_suffix, file_path)
def update(self, *args, **kwargs):
update(*args, **kwargs)
def nested_dict(self, dic, keys):
"""
The idea of this function is based on https://stackoverflow.com/questions/13687924
:param dic:
:param keys:
:return:
"""
for key in keys[:-1]:
dic_bk = dic
if key not in dic:
dic[key] = {}
dic = dic[key]
if isinstance(dic, bool):
dic = dic_bk
dic[keys[-2]] = {
'_END': True,
keys[-1]: True
}
dic[keys[-1]] = True
def _trie_construct(self, exclude_private_suffix, file_path=''):
"""
This function for building a trie structure based on Mozilla Public Suffix List.
In order to construct this, all suffixes sorted in a reverse order.
For example, www.google.com -> com.google.www
:return: a trie dict
"""
tld_trie = {}
PublicSuffixList, PrivateSuffixList, AllSuffixList = getPublicSuffixList(file_path)
SuffixList = PublicSuffixList if exclude_private_suffix else AllSuffixList
for suffix in SuffixList:
if '.' in suffix:
sp = suffix.split('.')
sp.reverse()
self.nested_dict(tld_trie, sp)
else:
tld_trie[suffix] = {'_END': True}
for key, val in tld_trie.items():
if len(val) == 1 and '_END' in val:
tld_trie[key] = True
return tld_trie
def __call__(self, *args, **kwargs):
return self.extract(*args, **kwargs)
def extract(self, raw_url, subdomain=True, format=False):
"""
Extract suffix and subdomain from a Domain.
:param raw_url:
:param subdomain: Output options. This option will reduce efficiency. Maybe 10%
:param format: To format raw_url string.
:return: Tuple(subdomain, domain, suffix, domain_name)
>>> FastTLDExtract.extract('www.google.com.hk', subdomain=True)
>>> ('www', 'google', 'com.hk', 'google.com.hk')
>>> FastTLDExtract.extract('127.0.0.1', subdomain=True)
>>> ('', '127.0.0.1', '', '127.0.0.1')
"""
ret_subdomain = ret_domain = ret_suffix = ret_domain_name = ''
if format:
raw_url = self.format(raw_url)
# Borrowed from tldextract library (https://github.com/john-kurkowski/tldextract)
# Use regex to strip raw_url of scheme subcomponent and anything after host subcomponent
# Reference: https://en.wikipedia.org/wiki/Uniform_Resource_Identifier#Syntax
netloc = (
SCHEME_RE.sub("", raw_url)
.partition("/")[0]
.partition("?")[0]
.partition("#")[0]
.split("@")[-1]
.partition(":")[0]
.strip()
.rstrip(".")
)
# Determine if raw_url is an IP address
if len(netloc) != 0 and looks_like_ip(netloc):
return ("", netloc, "", netloc)
labels = netloc.split(".")
labels.reverse()
node = self.trie # define the root node
suffix = []
for label in labels:
if node is True: # or alternatively if type(node) is not dict:
# This node is an end node.
ret_domain = label
break
# This node has sub-nodes and maybe an end-node.
# eg. cn -> (cn, gov.cn)
if '_END' in node:
# check if there is a sub node
# eg. gov.cn
if label in node:
suffix.append(label)
node = node[label]
continue
if '*' in node:
# check if there is a sub node
# eg. www.ck
if ("!%s" % label) in node:
ret_domain = label
else:
suffix.append(label)
break
# check a TLD in PSL
if label in node:
suffix.append(label)
node = node[label]
else:
break
suffix.reverse()
len_suffix = len(suffix)
len_labels = len(labels)
ret_suffix = ".".join(suffix)
if 0 < len_suffix < len_labels:
ret_domain = labels[len_suffix]
if subdomain:
if len_suffix + 1 < len_labels:
ret_subdomain = netloc[:-(len(ret_domain) + len(ret_suffix) + 2)]
if ret_domain and ret_suffix:
ret_domain_name = "%s.%s" % (ret_domain, ret_suffix)
return (ret_subdomain,
ret_domain,
ret_suffix,
ret_domain_name
)
def format(self, raw_url):
"""
Now we provide simple rules to format strings.
eg. lower case, punycode transform
Todo:
1.URL Parser to extract domain.
2.idna domain parser
:param raw_url:
:return: input
"""
# idna_url = idna.encode(raw_url.strip().lower()).decode()
# input_ = urlparse.urlparse(idna_url).netloc
# if '//' in input_:
# _, _, input_ = input_.rpartition('//')
# if '/' in input_:
# input_, _, _ = input_.lpartition('//')
# return input_
# Punycode costs too much time! Make sure you really need it.
return idna.encode(raw_url.strip().lower()).decode()
|
Python
| 0.000001
|
@@ -182,40 +182,8 @@
cket
-%0Afrom operator import itemgetter
%0A%0Aim
|
dac03e2a04e69622c3afa6d3386a8b3f5a39b9a4
|
Fix one-day all-day events
|
features/content/forms.py
|
features/content/forms.py
|
import django.db.transaction
from django import forms
import core.forms
from . import models
from features.associations import models as associations
from features.contributions import forms as contributions
from features.groups import models as groups
class Comment(contributions.Text):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper.layout.append(core.forms.Submit('Kommentieren'))
class Create(forms.ModelForm):
class Meta:
model = associations.Association
fields = ('pinned', 'public')
group = forms.ModelChoiceField(
label='Veröffentlichen als', queryset=groups.Group.objects.none(), required=False,
widget=core.forms.GroupSelect)
text = forms.CharField(label='Text', widget=core.forms.EditorTextarea)
title = forms.CharField(label='Titel')
place = forms.CharField(label='Veranstaltungsort / Anschrift', max_length=255)
time = forms.DateTimeField(label='Beginn')
until_time = forms.DateTimeField(label='Ende')
all_day = forms.BooleanField(
label='ganztägig', help_text='Das Ereignis dauert den ganzen Tag.', required=False)
def __init__(self, **kwargs):
self.author = kwargs.pop('author')
with_time = kwargs.pop('with_time')
super().__init__(**kwargs)
if self.instance.entity.is_group:
del self.fields['group']
else:
self.fields['group'].queryset = groups.Group.objects.filter(
memberships__member=self.author)
if not with_time:
del self.fields['place']
del self.fields['time']
del self.fields['until_time']
del self.fields['all_day']
def save(self, commit=True):
with django.db.transaction.atomic():
if not self.instance.entity.is_group and self.cleaned_data['group']:
self.instance.entity = self.cleaned_data['group']
self.instance.slug = core.models.get_unique_slug(
associations.Association, {
'entity_id': self.instance.entity_id,
'entity_type': self.instance.entity_type,
'slug': core.text.slugify(self.cleaned_data['title']),
})
self.instance.container = models.Content.objects.create(
title=self.cleaned_data['title'],
place=self.cleaned_data.get('place', ''),
time=self.cleaned_data.get('time'),
until_time=self.cleaned_data.get('until_time'),
all_day=self.cleaned_data.get('all_day', False))
self.instance.container.versions.create(
author=self.author, text=self.cleaned_data['text'])
return super().save(commit)
class Update(forms.ModelForm):
class Meta:
model = associations.Association
fields = ('pinned', 'public', 'slug')
title = forms.CharField(label='Titel')
text = forms.CharField(label='Text', widget=core.forms.EditorTextarea())
place = forms.CharField(label='Veranstaltungsort / Anschrift', max_length=255)
time = forms.DateTimeField(label='Beginn')
until_time = forms.DateTimeField(label='Ende')
all_day = forms.BooleanField(
label='ganztägig', help_text='Das Ereignis dauert den ganzen Tag.', required=False)
def __init__(self, **kwargs):
self.author = kwargs.pop('author')
super().__init__(**kwargs)
if not self.instance.entity.is_group:
del self.fields['pinned']
if self.instance.public:
del self.fields['public']
if not self.initial['time']:
del self.fields['place']
del self.fields['time']
del self.fields['until_time']
del self.fields['all_day']
def clean_slug(self):
q = associations.Association.objects.filter(
entity_type=self.instance.entity_type, entity_id=self.instance.entity_id,
slug=self.cleaned_data['slug'])
if q.exists() and q.get() != self.instance:
raise forms.ValidationError('Der Kurzname ist bereits vergeben.', code='unique')
return self.cleaned_data['slug']
def save(self, commit=True):
association = super().save(commit)
association.container.title = self.cleaned_data['title']
if self.initial['time']:
association.container.place = self.cleaned_data['place']
association.container.time = self.cleaned_data['time']
association.container.until_time = self.cleaned_data['until_time']
association.container.all_day = self.cleaned_data['all_day']
association.container.save()
association.container.versions.create(author=self.author, text=self.cleaned_data['text'])
return association
|
Python
| 0.999999
|
@@ -1029,32 +1029,48 @@
eld(label='Ende'
+, required=False
)%0A all_day =
@@ -3291,16 +3291,32 @@
l='Ende'
+, required=False
)%0A al
|
4b54488dd2b40254f6217d98c37690dcb37cf783
|
fix false origin on replies
|
halibot/halmodule.py
|
halibot/halmodule.py
|
from .halobject import HalObject
from .message import Message
class HalModule(HalObject):
def reply(self, msg0=None, **kwargs):
# Create the reply message
body = kwargs.get('body', msg0.body)
mtype = kwargs.get('type', msg0.type)
author = kwargs.get('author', msg0.author)
origin = kwargs.get('origin', msg0.origin)
msg = Message(body=body, type=mtype, author=author, origin=origin)
# Synchronous reply?
if msg0.sync:
self.sync_replies[msg0.uuid].append(msg)
else:
self.send_to(msg, [ msg.origin ])
def hasPermission(self, msg, perm):
return self._hal.auth.hasPermission(msg.origin, msg.identity, perm)
|
Python
| 0.000003
|
@@ -313,19 +313,17 @@
n',
-msg0.origin
+self.name
)%0A%0A%09
@@ -508,16 +508,17 @@
g, %5B msg
+0
.origin
|
13b5a5ade126b39c683f3f81968b9350fc0f6ba2
|
Remove redundant FakeSchemaAPI __init__ method
|
tests/utils.py
|
tests/utils.py
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import json
import six
import six.moves.urllib.parse as urlparse
import testtools
from glanceclient.v2.schemas import Schema
class FakeAPI(object):
def __init__(self, fixtures):
self.fixtures = fixtures
self.calls = []
def _request(self, method, url, headers=None, data=None,
content_length=None):
call = build_call_record(method, sort_url_by_query_keys(url),
headers or {}, data)
if content_length is not None:
call = tuple(list(call) + [content_length])
self.calls.append(call)
fixture = self.fixtures[sort_url_by_query_keys(url)][method]
data = fixture[1]
if isinstance(fixture[1], six.string_types):
try:
data = json.loads(fixture[1])
except ValueError:
data = six.StringIO(fixture[1])
return FakeResponse(fixture[0], fixture[1]), data
def get(self, *args, **kwargs):
return self._request('GET', *args, **kwargs)
def post(self, *args, **kwargs):
return self._request('POST', *args, **kwargs)
def put(self, *args, **kwargs):
return self._request('PUT', *args, **kwargs)
def patch(self, *args, **kwargs):
return self._request('PATCH', *args, **kwargs)
def delete(self, *args, **kwargs):
return self._request('DELETE', *args, **kwargs)
def head(self, *args, **kwargs):
return self._request('HEAD', *args, **kwargs)
class FakeSchemaAPI(FakeAPI):
def __init__(cls, *args):
super(FakeSchemaAPI, cls).__init__(*args)
def get(self, *args, **kwargs):
_, raw_schema = self._request('GET', *args, **kwargs)
return Schema(raw_schema)
class RawRequest(object):
def __init__(self, headers, body=None,
version=1.0, status=200, reason="Ok"):
"""
:param headers: dict representing HTTP response headers
:param body: file-like object
:param version: HTTP Version
:param status: Response status code
:param reason: Status code related message.
"""
self.body = body
self.status = status
self.reason = reason
self.version = version
self.headers = headers
def getheaders(self):
return copy.deepcopy(self.headers).items()
def getheader(self, key, default):
return self.headers.get(key, default)
def read(self, amt):
return self.body.read(amt)
class FakeResponse(object):
def __init__(self, headers=None, body=None,
version=1.0, status_code=200, reason="Ok"):
"""
:param headers: dict representing HTTP response headers
:param body: file-like object
:param version: HTTP Version
:param status: Response status code
:param reason: Status code related message.
"""
self.body = body
self.reason = reason
self.version = version
self.headers = headers
self.status_code = status_code
self.raw = RawRequest(headers, body=body, reason=reason,
version=version, status=status_code)
@property
def ok(self):
return (self.status_code < 400 or
self.status_code >= 600)
def read(self, amt):
return self.body.read(amt)
def close(self):
pass
@property
def content(self):
if hasattr(self.body, "read"):
return self.body.read()
return self.body
@property
def text(self):
if isinstance(self.content, six.binary_type):
return self.content.decode('utf-8')
return self.content
def json(self, **kwargs):
return self.body and json.loads(self.text) or ""
def iter_content(self, chunk_size=1, decode_unicode=False):
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
class TestCase(testtools.TestCase):
TEST_REQUEST_BASE = {
'config': {'danger_mode': False},
'verify': True}
class FakeTTYStdout(six.StringIO):
"""A Fake stdout that try to emulate a TTY device as much as possible."""
def isatty(self):
return True
def write(self, data):
# When a CR (carriage return) is found reset file.
if data.startswith('\r'):
self.seek(0)
data = data[1:]
return six.StringIO.write(self, data)
class FakeNoTTYStdout(FakeTTYStdout):
"""A Fake stdout that is not a TTY device."""
def isatty(self):
return False
def sort_url_by_query_keys(url):
"""A helper function which sorts the keys of the query string of a url.
For example, an input of '/v2/tasks?sort_key=id&sort_dir=asc&limit=10'
returns '/v2/tasks?limit=10&sort_dir=asc&sort_key=id'. This is to
prevent non-deterministic ordering of the query string causing
problems with unit tests.
:param url: url which will be ordered by query keys
:returns url: url with ordered query keys
"""
parsed = urlparse.urlparse(url)
queries = urlparse.parse_qsl(parsed.query, True)
sorted_query = sorted(queries, key=lambda x: x[0])
encoded_sorted_query = urlparse.urlencode(sorted_query, True)
url_parts = (parsed.scheme, parsed.netloc, parsed.path,
parsed.params, encoded_sorted_query,
parsed.fragment)
return urlparse.urlunparse(url_parts)
def build_call_record(method, url, headers, data):
"""Key the request body be ordered if it's a dict type.
"""
if isinstance(data, dict):
data = sorted(data.items())
if isinstance(data, six.string_types):
# NOTE(flwang): For image update, the data will be a 'list' which
# contains operation dict, such as: [{"op": "remove", "path": "/a"}]
try:
data = json.loads(data)
except ValueError:
return (method, url, headers or {}, data)
data = [sorted(d.items()) for d in data]
return (method, url, headers or {}, data)
|
Python
| 0.000083
|
@@ -2176,89 +2176,8 @@
I):%0A
- def __init__(cls, *args):%0A super(FakeSchemaAPI, cls).__init__(*args)%0A%0A
|
8dd9d4bf58e976ca40bcafa7249ed3140b77ea69
|
fix cfg parsing
|
tf2director.py
|
tf2director.py
|
#!/usr/bin/env python3
import os
import sys
from argparse import ArgumentParser
from configparser import ConfigParser
import actions
from tf2server import Tf2Server
def main():
"""
Parse command line options, read config and run desired action.
"""
description = 'tf2director is a script that helps managing multiple Team Fortress 2 server instances.'
parser = ArgumentParser(description=description)
parser.add_argument('server', help='server to be used or "all"', metavar='server')
parser.add_argument('action', choices=['start', 'stop', 'restart', 'console', 'update', 'status'],
help='action to do', metavar='action')
args = parser.parse_args()
home = os.path.expanduser('~')
config_file = os.path.join(home, 'tf2director.ini')
if not os.path.isfile(config_file):
print('Config file missing (' + config_file + ')')
sys.exit(1)
config = ConfigParser()
config.read(config_file)
if 'all' in config:
raise ValueError('A server cannot be named \'all\'!')
if args.server not in config and args.server != 'all':
raise ValueError('Server \'{0}\' is not configured'.format(args.server))
servers = []
if args.server == 'all':
for s in config.sections():
c = config[s]
server = Tf2Server(s, os.path.expanduser(c['path']))
server.ip = c['ip']
server.port = c['port']
server.initial_map = c['initial_map']
server.cfg_file = c['server_config']
server.max_players = c['max_players']
servers.append(server)
else:
c = config[args.server]
path = c['path']
server = Tf2Server(args.server, os.path.expanduser(path))
server.ip = c['ip']
server.port = c['port']
server.initial_map = c['initial_map']
server.cfg_file = c['server_config']
server.max_players = c['max_players']
server.tv_port = int(c['tv_port']) if 'tv_port' in c else server.port + 5
servers.append(server)
try:
if args.action == 'start':
actions.start(servers)
elif args.action == 'stop':
actions.stop(servers)
elif args.action == 'restart':
actions.restart(servers)
elif args.action == 'console':
if len(servers) == 1:
server = servers[0]
server.attach()
elif args.action == 'update':
actions.update(servers)
elif args.action == 'status':
actions.status(servers)
except ValueError as error:
print('{0}'.format(error))
if __name__ == '__main__':
main()
|
Python
| 0.000007
|
@@ -2025,16 +2025,20 @@
c else
+int(
server.p
@@ -2040,16 +2040,17 @@
ver.port
+)
+ 5%0A%0A
|
cd3cdf787bcd878e3a0a39641cbf8ba35a7e62f2
|
Update logging
|
jobcrawl/selenium_scraper.py
|
jobcrawl/selenium_scraper.py
|
import time
import logging
from selenium import webdriver
from pyvirtualdisplay import Display
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.remote.remote_connection import LOGGER
from selenium.common.exceptions import WebDriverException
LOGGER.setLevel(logging.WARNING)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
class DrushimScraper(object):
WAIT_TIME = 10
def __init__(self, url, log):
self.log = log
self.url = url
self.total_crash_count = 0
self.crash_count = 0
display = Display(visible=0, size=(800, 800))
display.start()
self.init_driver()
def init_driver(self):
self.close_driver()
chrome_options = Options()
chrome_options.page_load_strategy = 'eager'
chrome_options.add_argument("start-maximized")
chrome_options.add_argument("enable-automation")
chrome_options.add_argument("--headless")
chrome_options.add_argument("--no-sandbox")
chrome_options.add_argument("--disable-infobars")
chrome_options.add_argument("--disable-dev-shm-usage")
chrome_options.add_argument("--disable-browser-side-navigation")
chrome_options.add_argument("--disable-gpu")
chrome_options.add_argument("--enable-javascript")
chrome_driver = '/usr/local/bin/chromedriver'
self.driver = webdriver.Chrome(chrome_options=chrome_options, executable_path=chrome_driver)
def scrape(self, offset=None):
self.log.info("Scraping %s", self.url)
# self.driver.implicitly_wait(self.WAIT_TIME)
self.driver.get(self.url)
if offset is None:
yield self.driver.page_source
page_count = 1
while True:
try:
if not self.click_load_jobs_button(page_count):
break
except WebDriverException:
self.init_driver()
time.sleep(1)
self.scrape(offset=page_count)
break
if offset is not None and page_count < offset:
page_count += 1
time.sleep(1)
continue
time.sleep(10)
yield self.driver.page_source
page_count += 1
def click_load_jobs_button(self, page_count):
self.log.info("Clicking load jobs button: Pages scraped = %s", page_count)
if page_count == 1:
try:
close_btn_cls = 'v-icon notranslate font-weight-bold mdi mdi-close theme--dark'
close_btn = WebDriverWait(self.driver, self.WAIT_TIME).until(expected_conditions.visibility_of_element_located((
By.XPATH, "//i[@class='{}']".format(close_btn_cls))))
close_btn.click()
except:
self.log.exception("Failed to click close btn")
try:
load_more_jobs = WebDriverWait(self.driver, self.WAIT_TIME).until(
expected_conditions.visibility_of_element_located((
By.XPATH,
"//button[@class='v-btn v-btn--contained theme--light v-size--default load_jobs_btn ']")))
load_more_jobs.click()
except WebDriverException as e:
self.crash_count += 1
self.total_crash_count += 1
if "session deleted because of page crash" in str(e):
self.log.exception("CLicking load jobs button failed coz of page crash: crash_count={}, total_crash_count={}"
"".format(self.crash_count, self.total_crash_count))
raise e
self.log.exception("CLicking load jobs button failed: crash_count={}, total_crash_count={}".format(self.crash_count, self.total_crash_count))
return
except:
self.crash_count += 1
self.total_crash_count += 1
self.log.exception("CLicking load jobs button failed: crash_count={}, total_crash_count={}".format(self.crash_count, self.total_crash_count))
return
self.crash_count = 0
self.log.info("Load more jobs button clicked successfully")
return True
def close_driver(self):
try:
self.driver.close()
except:
pass
if __name__ == '__main__':
import logging
self = DrushimScraper('https://www.drushim.co.il/jobs/search/%22%22/?ssaen=1', logging.getLogger())
count = 0
for page_source in self.scrape():
count += 1
if count == 5:
break
print("All Done")
self.close_driver()
|
Python
| 0.000001
|
@@ -3537,28 +3537,16 @@
unt += 1
-
%0A
@@ -3728,16 +3728,17 @@
count=%7B%7D
+,
%22%0A
@@ -3767,16 +3767,24 @@
%22
+ page=%7B%7D
%22.format
@@ -3816,32 +3816,44 @@
otal_crash_count
+, page_count
))%0A
@@ -3957,32 +3957,75 @@
l_crash_count=%7B%7D
+, page=%7B%7D%22%0A %22
%22.format(self.cr
@@ -4049,32 +4049,44 @@
otal_crash_count
+, page_count
))%0A r
@@ -4283,16 +4283,59 @@
count=%7B%7D
+, page=%7B%7D%22%0A %22
%22.format
@@ -4371,24 +4371,36 @@
_crash_count
+, page_count
))%0A
@@ -4504,17 +4504,46 @@
essfully
-%22
+ (page=%7B%7D)%22.format(page_count)
)%0A
|
4123beacbd89128193c69c1b61f77acdd82419ad
|
version 0.1.9
|
jokekappa/__init__.py
|
jokekappa/__init__.py
|
# coding: utf-8
from jokekappa.core import get_joke, get_jokes, update_jokes # noqa: F401
__version__ = '0.1.8'
|
Python
| 0.000002
|
@@ -106,11 +106,11 @@
= '0.1.
-8
+9
'%0A
|
c802426e1c7e45ed456ad92a8b88ab18fba59aa3
|
更新 modules ELOs 中的 management command 'clone_metadata', 新增函式功能宣告註解
|
commonrepo/elos/management/commands/clone_metadata.py
|
commonrepo/elos/management/commands/clone_metadata.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from commonrepo.elos.models import ELO, ELOMetadata
class Command(BaseCommand):
help = 'Clone Metadata of ELOs'
def add_arguments(self, parser):
parser.add_argument('--source',
type=int,
help='Build Metadata of specific ELOs')
parser.add_argument('--target',
nargs='+',
type=int,
help='Build Metadata of specific ELOs')
def handle(self, *args, **options):
try:
elo_source = ELO.objects.get(id=options['source'])
except ELO.DoesNotExist:
raise CommandError('Source ELO "%s" does not exist' % options['source'])
if not elo_source.metadata:
raise CommandError('Source Metadata of ELO "%s" does not exist' % elo_source.id)
for target in options['target']:
try:
elo_target = ELO.objects.get(id=target)
except ELO.DoesNotExist:
raise CommandError('ELO "%s" does not exist' % target)
# Delete original metadata
if elo_target.metadata:
elo_target.metadata.delete()
metadata = elo_source.metadata
metadata.pk = None
metadata.save()
elo_target.metadata = metadata
elo_target.save()
self.stdout.write('Successfully clone Metadata to target ELO "%s"' % elo_target.id)
|
Python
| 0
|
@@ -17,16 +17,883 @@
f-8 -*-%0A
+%0A#%0A# Copyright 2016 edX PDR Lab, National Central University, Taiwan.%0A#%0A# http://edxpdrlab.ncu.cc/%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A#%0A# Created By: yrchen@ATCity.org%0A# Maintained By: yrchen@ATCity.org%0A#%0A%0A'''%0AManagement command %60%60clone_metadata%60%60 of ELOs in Common Repo projects.%0A%0AThis command will clone the metadata related with specefic ELOs.%0A'''%0A%0A
from __f
@@ -1095,16 +1095,17 @@
tadata%0A%0A
+%0A
class Co
@@ -1702,32 +1702,49 @@
se CommandError(
+%0A
'Source ELO %22%25s%22
@@ -1761,16 +1761,32 @@
exist' %25
+%0A
options
@@ -1865,16 +1865,33 @@
ndError(
+%0A
'Source
@@ -1928,16 +1928,32 @@
exist' %25
+%0A
elo_sou
@@ -2511,16 +2511,33 @@
t.write(
+%0A
'Success
@@ -2578,16 +2578,32 @@
%22%25s%22' %25
+%0A
elo_tar
|
c01c97583e11bfe1c41dd41e7b39d19be22fbb7c
|
use the real paths
|
tools/build.py
|
tools/build.py
|
#!/usr/bin/env python
import os
import subprocess
import sys
# TODO: release/debug
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
build_dir = os.path.join(root, 'out')
def build():
if sys.platform != "win32":
cmd = 'make -C %s' % build_dir
else:
cmd = 'tools\win_build.bat'
print cmd
sys.exit(subprocess.call(cmd, shell=True))
def test():
agent = os.path.join(root, 'monitoring-agent')
cmd = '%s --zip monitoring-test.zip -e tests -c docs/sample.state' % agent
print cmd
rc = subprocess.call(cmd, shell=True)
sys.exit(rc)
commands = {
'build': build,
'test': test,
}
def usage():
print('Usage: build.py [%s]' % ', '.join(commands.keys()))
sys.exit(1)
if len(sys.argv) != 2:
usage()
ins = sys.argv[1]
if not commands.has_key(ins):
print('Invalid command: %s' % ins)
sys.exit(1)
print('Running %s' % ins)
cmd = commands.get(ins)
cmd()
|
Python
| 0.000017
|
@@ -406,16 +406,32 @@
in(root,
+ 'out', 'Debug',
'monito
@@ -461,16 +461,26 @@
s --zip
+out/Debug/
monitori
|
d0bdd50a8ca6367b08434075ab9573f5e412a197
|
typeify dbschema
|
graphscale/kvetch/dbschema.py
|
graphscale/kvetch/dbschema.py
|
from warnings import filterwarnings, resetwarnings
import contextlib
import pymysql
import graphscale.check as check
from graphscale.utils import execute_gen
from .kvetch import IndexDefinition, IndexType
@contextlib.contextmanager
def disable_pymysql_warnings():
filterwarnings('ignore', category=pymysql.Warning)
yield
resetwarnings()
def execute_ddl(shard, ddl):
with disable_pymysql_warnings():
with shard.create_safe_conn() as conn:
with conn.cursor() as cursor:
cursor.execute(ddl)
return ddl
def create_kvetch_objects_table_sql():
return """CREATE TABLE IF NOT EXISTS kvetch_objects (
row_id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
obj_id BINARY(16) NOT NULL,
type_id INT NOT NULL,
created DATETIME NOT NULL,
updated DATETIME NOT NULL,
body MEDIUMBLOB,
UNIQUE KEY (obj_id),
UNIQUE KEY (type_id, obj_id),
KEY (updated)
) ENGINE=InnoDB;
"""
def create_kvetch_index_table_sql(index_column, index_sql_type, target_column, index_name):
check.param(index_column, str, 'index_column')
check.param(target_column, str, 'target_column')
check.param(index_name, str, 'index_name')
# something is up here. the two indexing keys (not updated) should be unique
return """CREATE TABLE IF NOT EXISTS %s (
row_id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
%s %s NOT NULL,
%s BINARY(16) NOT NULL,
created DATETIME NOT NULL,
KEY (%s, %s),
KEY (%s, %s),
KEY (created)
) ENGINE=InnoDB;
""" % (
index_name, index_column, index_sql_type, target_column, index_column, target_column,
target_column, index_column
)
def create_kvetch_edge_table_sql():
return """CREATE TABLE IF NOT EXISTS kvetch_edges (
row_id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
edge_id INT NOT NULL,
from_id BINARY(16) NOT NULL,
to_id BINARY(16) NOT NULL,
created DATETIME NOT NULL,
updated DATETIME NOT NULL,
body MEDIUMBLOB,
UNIQUE KEY(edge_id, from_id, to_id),
UNIQUE KEY(edge_id, from_id, row_id),
KEY(updated)
) ENGINE=InnoDB;
"""
def create_kvetch_objects_table(shard):
execute_ddl(shard, create_kvetch_objects_table_sql())
def create_kvetch_edges_table(shard):
execute_ddl(shard, create_kvetch_edge_table_sql())
def create_kvetch_index_table(shard, shard_index):
check.param(shard_index, IndexDefinition, 'shard_index')
mapping = {
IndexType.STRING: 'VARCHAR(512)',
IndexType.INT: 'INT',
}
sql_type = mapping[shard_index.index_type]
sql = create_kvetch_index_table_sql(
shard_index.indexed_attr, sql_type, 'target_id', shard_index.index_name
)
execute_ddl(shard, sql)
def init_shard_db_tables(shard, indexes):
check.param(indexes, list, 'indexes')
create_kvetch_objects_table(shard)
create_kvetch_edges_table(shard)
for shard_index in indexes:
create_kvetch_index_table(shard, shard_index)
def drop_shard_db_tables(shard, indexes):
check.param(indexes, list, 'indexes')
execute_ddl(shard, 'DROP TABLE IF EXISTS kvetch_objects')
execute_ddl(shard, 'DROP TABLE IF EXISTS kvetch_edges')
for shard_index in indexes:
execute_ddl(shard, 'DROP TABLE IF EXISTS %s' % shard_index.index_name)
def build_index(shard, index):
create_kvetch_index_table(shard, index)
objects = execute_gen(shard.gen_objects_of_type(index.indexed_type_id))
attr = index.indexed_attr
for obj_id, obj in objects.items():
if attr in obj:
execute_gen(shard.gen_insert_index_entry(index, obj[attr], obj_id))
|
Python
| 0.999999
|
@@ -66,56 +66,58 @@
lib%0A
-import pymysql%0A%0Aimport graphscale.check as check
+%0Afrom typing import Iterator, List%0Aimport pymysql%0A
%0Afro
@@ -201,16 +201,51 @@
dexType%0A
+from .dbshard import KvetchDbShard%0A
%0A%0A@conte
@@ -291,24 +291,36 @@
l_warnings()
+ -%3E Iterator
:%0A filter
@@ -363,16 +363,32 @@
Warning)
+ # type: ignore
%0A yie
@@ -437,14 +437,41 @@
hard
-, ddl)
+: KvetchDbShard, ddl: str) -%3E str
:%0A
@@ -676,32 +676,39 @@
ects_table_sql()
+ -%3E str
:%0A return %22%22%22
@@ -1078,16 +1078,21 @@
ble_sql(
+%0A
index_co
@@ -1087,32 +1087,37 @@
index_column
+: str
, index_sql_type
@@ -1108,32 +1108,37 @@
, index_sql_type
+: str
, target_column,
@@ -1140,173 +1140,40 @@
lumn
-, index_name):%0A check.param(index_column,
+:
str,
-'
index_
-column')%0A check.param(target_column, str, 'target_column')%0A check.param(index_name, str, 'index_name')
+name: str%0A) -%3E str:
%0A%0A
@@ -1679,16 +1679,23 @@
le_sql()
+ -%3E str
:%0A re
@@ -2121,33 +2121,56 @@
ects_table(shard
-)
+: KvetchDbShard) -%3E None
:%0A execute_dd
@@ -2250,17 +2250,40 @@
le(shard
-)
+: KvetchDbShard) -%3E None
:%0A ex
@@ -2372,40 +2372,25 @@
hard
-, shard_index):%0A check.param(
+: KvetchDbShard,
shar
@@ -2396,17 +2396,17 @@
rd_index
-,
+:
IndexDe
@@ -2417,25 +2417,10 @@
tion
-, 'shard_index')%0A
+):
%0A
@@ -2749,61 +2749,57 @@
hard
-, indexes):%0A check.param(indexes, list, 'indexes')
+: KvetchDbShard, indexes: List%5BIndexDefinition%5D):
%0A
@@ -2993,61 +2993,57 @@
hard
-, indexes):%0A check.param(indexes, list, 'indexes')
+: KvetchDbShard, indexes: List%5BIndexDefinition%5D):
%0A
@@ -3276,331 +3276,4 @@
me)%0A
-%0A%0Adef build_index(shard, index):%0A create_kvetch_index_table(shard, index)%0A objects = execute_gen(shard.gen_objects_of_type(index.indexed_type_id))%0A attr = index.indexed_attr%0A for obj_id, obj in objects.items():%0A if attr in obj:%0A execute_gen(shard.gen_insert_index_entry(index, obj%5Battr%5D, obj_id))%0A
|
1edac6151b4a730039e0782a5cb9777fe7f4a21d
|
Implement basic teste
|
code/web/scisynergy_flask/tests/test_basic.py
|
code/web/scisynergy_flask/tests/test_basic.py
|
import os
import unittest
from scisynergy import app
class BasicTests(unittest.TestCase):
def test_main_page(self):
response = self.app.get('/', follow_redirects=True)
self.assertEqual(response.status_code, 200)
|
Python
| 0.02115
|
@@ -36,16 +36,22 @@
isynergy
+_flask
import
@@ -54,16 +54,16 @@
ort app%0A
-
%0Aclass
@@ -93,16 +93,107 @@
tCase):%0A
+ def setUp(self):%0A self.app = app.test_client()%0A self.app.testing = True%0A%0A
def
@@ -326,8 +326,190 @@
e, 200)%0A
+%0A def test_maintenance(self):%0A response = self.app.get('/maintenance')%0A%0A self.assertEqual(response.status_code, 200)%0A%0A%0Aif __name__ == %22__main__%22:%0A unittest.main()
|
0e03dd0b82b0c7030345475bb03424deb651e3bc
|
Update tests.py
|
helios_auth/tests.py
|
helios_auth/tests.py
|
"""
Unit Tests for Auth Systems
"""
import unittest
import models
from django.db import IntegrityError, transaction
from django.test.client import Client
from django.test import TestCase
from django.core import mail
from auth_systems import AUTH_SYSTEMS
class UserModelTests(unittest.TestCase):
def setUp(self):
pass
def test_unique_users(self):
"""
there should not be two users with the same user_type and user_id
"""
for auth_system, auth_system_module in AUTH_SYSTEMS.iteritems():
models.User.objects.create(user_type = auth_system, user_id = 'foobar', info={'name':'Foo Bar'})
def double_insert():
models.User.objects.create(user_type = auth_system, user_id = 'foobar', info={'name': 'Foo2 Bar'})
self.assertRaises(IntegrityError, double_insert)
transaction.rollback()
def test_create_or_update(self):
"""
shouldn't create two users, and should reset the password
"""
for auth_system, auth_system_module in AUTH_SYSTEMS.iteritems():
u = models.User.update_or_create(user_type = auth_system, user_id = 'foobar_cou', info={'name':'Foo Bar'})
def double_update_or_create():
new_name = 'Foo2 Bar'
u2 = models.User.update_or_create(user_type = auth_system, user_id = 'foobar_cou', info={'name': new_name})
self.assertEquals(u.id, u2.id)
self.assertEquals(u2.info['name'], new_name)
def test_can_create_election(self):
"""
check that auth systems have the can_create_election call and that it's true for the common ones
"""
for auth_system, auth_system_module in AUTH_SYSTEMS.iteritems():
assert(hasattr(auth_system_module, 'can_create_election'))
if auth_system != 'clever':
assert(auth_system_module.can_create_election('foobar', {}))
def test_status_update(self):
"""
check that a user set up with status update ability reports it as such,
and otherwise does not report it
"""
for auth_system, auth_system_module in AUTH_SYSTEMS.iteritems():
u = models.User.update_or_create(user_type = auth_system, user_id = 'foobar_status_update', info={'name':'Foo Bar Status Update'})
if hasattr(auth_system_module, 'send_message'):
self.assertNotEquals(u.update_status_template, None)
else:
self.assertEquals(u.update_status_template, None)
def test_eligibility(self):
"""
test that users are reported as eligible for something
FIXME: also test constraints on eligibility
"""
for auth_system, auth_system_module in AUTH_SYSTEMS.iteritems():
u = models.User.update_or_create(user_type = auth_system, user_id = 'foobar_status_update', info={'name':'Foo Bar Status Update'})
self.assertTrue(u.is_eligible_for({'auth_system': auth_system}))
def test_eq(self):
for auth_system, auth_system_module in AUTH_SYSTEMS.iteritems():
u = models.User.update_or_create(user_type = auth_system, user_id = 'foobar_eq', info={'name':'Foo Bar Status Update'})
u2 = models.User.update_or_create(user_type = auth_system, user_id = 'foobar_eq', info={'name':'Foo Bar Status Update'})
self.assertEquals(u, u2)
import views
import auth_systems.password as password_views
from django.core.urlresolvers import reverse
# FIXME: login CSRF should make these tests more complicated
# and should be tested for
class UserBlackboxTests(TestCase):
def setUp(self):
# create a bogus user
self.test_user = models.User.objects.create(user_type='password',user_id='foobar-test@adida.net',name="Foobar User", info={'password':'foobaz'})
def test_password_login(self):
## we can't test this anymore until it's election specific
pass
# get to the login page
# login_page_response = self.client.get(reverse(views.start, kwargs={'system_name':'password'}), follow=True)
# log in and follow all redirects
# response = self.client.post(reverse(password_views.password_login_view), {'username' : 'foobar_user', 'password': 'foobaz'}, follow=True)
# self.assertContains(response, "logged in as")
# self.assertContains(response, "Foobar User")
def test_logout(self):
response = self.client.post(reverse(views.logout), follow=True)
self.assertContains(response, u'Nejste přihlášen')
self.assertNotContains(response, "Foobar User")
def test_email(self):
"""using the test email backend"""
self.test_user.send_message("testing subject", "testing body")
self.assertEquals(len(mail.outbox), 1)
self.assertEquals(mail.outbox[0].subject, "testing subject")
self.assertEquals(mail.outbox[0].to[0], "\"Foobar User\" <foobar-test@adida.net>")
|
Python
| 0.000001
|
@@ -1,12 +1,36 @@
+# -*- coding: utf-8 -*-%0A
%22%22%22%0AUnit Tes
|
20d1ad27c85ecc7dcfbfb30abd7a68be10db2a33
|
Change showIt=False to pass test on Travis
|
simpegEM/Tests/test_forward_EMproblem.py
|
simpegEM/Tests/test_forward_EMproblem.py
|
import unittest
from SimPEG import *
import simpegEM as EM
from scipy.constants import mu_0
from simpegEM.Utils.Ana import hzAnalyticDipoleT
import matplotlib.pyplot as plt
class TDEM_bTests(unittest.TestCase):
def setUp(self):
cs = 10.
ncx = 15
ncy = 10
npad = 20
hx = Utils.meshTensors(((0,cs), (ncx,cs), (npad,cs)))
hy = Utils.meshTensors(((npad,cs), (ncy,cs), (npad,cs)))
mesh = Mesh.Cyl1DMesh([hx,hy], -hy.sum()/2)
active = mesh.vectorCCz<0.
model = Model.ActiveModel(mesh, active, -8, nC=mesh.nCz)
model = Model.ComboModel(mesh,
[Model.LogModel, Model.Vertical1DModel, model])
opts = {'txLoc':0.,
'txType':'VMD_MVP',
'rxLoc':np.r_[30., 0.],
'rxType':'bz',
'timeCh':np.logspace(-4,-2.5, 21),
}
self.dat = EM.TDEM.SurveyTDEM1D(**opts)
self.prb = EM.TDEM.ProblemTDEM_b(model)
self.prb.setTimes([1e-6, 5e-6, 1e-5, 5e-5, 1e-4, 5e-4], [40, 40, 40, 40, 40, 40])
self.sigma = np.ones(mesh.nCz)*1e-8
self.sigma[mesh.vectorCCz<0] = 1e-3
self.sigma = np.log(self.sigma[active])
self.showIt = True
self.prb.pair(self.dat)
def test_analitic_b(self):
bz_calc = self.dat.dpred(self.sigma)
bz_ana = mu_0*hzAnalyticDipoleT(self.dat.rxLoc[0], self.prb.times, np.exp(self.sigma[0]))
ind = self.prb.times > 1e-5
diff = np.linalg.norm(bz_calc[ind].flatten() - bz_ana[ind].flatten())/np.linalg.norm(bz_ana[ind].flatten())
if self.showIt == True:
plt.loglog(self.prb.times[bz_calc>0], bz_calc[bz_calc>0], 'b', self.prb.times[bz_calc<0], -bz_calc[bz_calc<0], 'b--')
plt.loglog(self.prb.times, abs(bz_ana), 'b*')
plt.xlim(1e-5, 1e-2)
plt.show()
print diff
self.assertTrue(diff < 0.10)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000004
|
@@ -1243,19 +1243,20 @@
howIt =
-Tru
+Fals
e%0A
|
1c1232d523687a11a9ec66bd550b671d4243a99a
|
disable logging
|
file_has_rationale_yes.py
|
file_has_rationale_yes.py
|
#!/usr/bin/env python
from __future__ import unicode_literals
"""
Copyright (C) 2012 Legoktm
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""
import pywikibot
import mwparserfromhell
import robot
import awb_gen_fixes
class FileHasRationaleYesBot(robot.Robot):
def __init__(self):
self.count = 1
robot.Robot.__init__(self, task=22)
self.startLogging(pywikibot.Page(self.site, 'User:Legobot/Logs/22'))
self.cat = pywikibot.Category(self.site, "Category:Non-free images for NFUR review")
self.gen = self.cat.articles(namespaces=[6], content=True)
self.AWBGenFixes = awb_gen_fixes.AWBGenFixes(self.site)
self.AWBGenFixes.load()
self.AWBGenFixes.load_redirects(pywikibot.Page(self.site, 'User:Legoktm/AWB/TR'))
self.stop_page = pywikibot.Page(self.site, 'User:Legobot/Stop/22')
def run(self):
#fetch copyright licenses
cat = pywikibot.Category(self.site, 'Category:Wikipedia non-free file copyright tags')
templates = cat.articles(namespaces=[10])
self.licenses = [temp.title(withNamespace=False).lower() for temp in templates]
cat2 = pywikibot.Category(self.site, 'Category:Non-free use rationale templates')
nfur_temps = cat2.articles(namespaces=[10])
self.NFURs = [temp.title(withNamespace=False).lower() for temp in nfur_temps]
for page in self.gen:
self.do_page(page)
def check_page(self):
text = self.stop_page.get(force=True)
if text.lower() != 'run':
raise Exception("Stop page disabled")
def do_page(self, page):
print page.title(asLink=True)
if page.namespace() != 6:
return
text = page.get()
if '<nowiki>' in text:
print 'NOWIKI'
# return
text, gen_fix_summary = self.AWBGenFixes.do_page(text)
code = mwparserfromhell.parse(text)
tag = False
log = '* '
summary = 'Bot: Updating license tag(s) with image has rationale=yes'
for template in code.filter_templates(recursive=True):
name = template.name.lower().strip()
if name in self.NFURs:
print name
tag = True
if tag:
for template in code.filter_templates(recursive=True):
if template.name.lower().strip() in self.licenses:
template.add('image has rationale', 'yes')
log += '[[:%s]]: Adding <code>|image has rationale=yes</code>' % page.title()
else:
print 'Skipping '+page.title(asLink=True)
return
if gen_fix_summary:
summary += ', also dating ' + gen_fix_summary
if self.count > 50:
quit()
puttext = unicode(code).lstrip('\n')
pywikibot.showDiff(text, puttext)
self.output(log)
self.check_page()
page.put(puttext, summary)
self.count +=1
if __name__ == "__main__":
bot = FileHasRationaleYesBot()
try:
bot.run()
finally:
bot.pushLog()
pass
|
Python
| 0.000002
|
@@ -4045,24 +4045,25 @@
ly:%0A
+#
bot.pushLog(
|
1e5345a786b24d341cfd99c3334c3122e3e5a91b
|
Update simulate method in Framework
|
simulation/MappingSimulationFrameWork.py
|
simulation/MappingSimulationFrameWork.py
|
from ResourceGetter import ResouceGetter
from RequestGenerator import TestReqGen
from RequestGenerator import SimpleReqGen
from RequestGenerator import MultiReqGen
from AbstractOrchestrator
import sys
#sys.path.append(./RequestGenerator)
#from escape.mapping.simulation import ResourceGetter
#from escape.mapping.simulation import RequestGenerator
class MappingSolutionFramework:
#__request_generator = None
remaining_request_lifetimes = []
def __init__(self, resource_getter, request_generator): #, orchestrator_adaptor):
self.__resource_getter = resource_getter
self.__request_generator = request_generator
#self.__orchestrator_adaptor = orchestrator_adaptor
def simulate(self,topology_type,request_type,sim_end,discrete_sim):
#Get resource
resource_getter = ResouceGetter()
resource_graph = resource_getter.GetNFFG(topology_type)
#Simulation cycle
sim_running = True
sim_iter = 0
while sim_running:
#Get request
#TODO: EZT MEG MODOSITANI AZ OSZTALYDIAGRAM SZERINT
if request_type == "test":
request_generator = TestReqGen()
elif request_type == "simple":
request_generator = SimpleReqGen()
elif request_type == "multi":
request_generator = MultiReqGen()
else:
#TODO: create exception
pass
service_graph = request_generator.get_request()
#Discrete working
if discrete_sim:
#Indiscrete working
else:
pass
#Increase simulation iteration
if (sim_iter < sim_end):
sim_iter += 1
else:
sim_running = False
if __name__ == "__main__":
#Start simulate:
resource_graph = ResouceGetter()
asd = resource_graph.GetNFFG('pico')
request = RequestGenerator()
# orch_adaptor = OrchestratorAdaptor()
test = MappingSolutionFramework(resource_graph,request) #,orch_adaptor)
|
Python
| 0
|
@@ -183,16 +183,24 @@
strator
+import *
%0A%0Aimport
@@ -394,40 +394,137 @@
-#__request_generator = None%0A
+__discrete_simulation = True%0A __resource_getter = None%0A __request_generator = None%0A __orchestrator_adaptor = None%0A __
rema
@@ -553,10 +553,14 @@
s =
-%5B%5D
+list()
%0A%0A
@@ -584,181 +584,436 @@
lf,
-resource_getter, request_generator): #, orchestrator_adaptor):%0A self.__resource_getter = resource_getter%0A self.__request_generator = request_generator%0A
+simulation_type):%0A self.__discreate_simulation = simulation_type%0A%0A%0A def __clean_expired_requests(self,time,service_graph):%0A%0A # Delete expired SCs%0A for sc in self.__remaining_request_lifetimes:%0A if sc.dead_time %3C time:%0A # Delete mapping%0A for nf in sc.SC.nfs:%0A service_graph.del_node(nf)%0A # refresh the active SCs list%0A
#sel
@@ -1012,59 +1012,59 @@
-#
+
self.__
-orchestrator_adaptor = orchestrator_adaptor
+remaining_request_lifetimes.remove(sc)%0A%0A
%0A%0A
@@ -1110,16 +1110,34 @@
st_type,
+orchestrator_type,
sim_end,
@@ -1148,24 +1148,68 @@
rete_sim):%0A%0A
+ time = 0%0A mapping_level = 1%0A%0A
#Get
@@ -1890,24 +1890,36 @@
ervice_graph
+, life_time
= request_g
@@ -1939,16 +1939,44 @@
request(
+resource_graph,mapping_level
)%0A%0A
@@ -2030,16 +2030,1046 @@
e_sim:%0A%0A
+ time += 1%0A%0A #Get Orchestrator%0A if orchestrator_type == %22online%22:%0A orchestrator_adaptor = OnlineOrchestrator()%0A elif orchestrator_type == %22offline%22:%0A orchestrator_adaptor = OfflineOrchestrator()%0A elif orchestrator_type == %22hybrid%22:%0A orchestrator_adaptor = HybridOrchestrator()%0A else:%0A # TODO: create exception%0A pass%0A%0A #Synchronous MAP call%0A orchestrator_adaptor.MAP(service_graph,resource_graph)%0A%0A #Adding successfully mapped request to the remaining_request_lifetimes%0A # TODO: ELLENORIZNI, HOGY MAPPING SIKERES-E%0A service_life_element = %7B%22dead_time%22:time+life_time,%22SG%22:service_graph%7D%0A self.__remaining_request_lifetimes.append(service_life_element)%0A%0A #Remove expired service graph requests%0A self.__clean_expired_requests()%0A
%0A%0A
@@ -3104,32 +3104,83 @@
else:%0A
+ #TODO: Create this simulation type%0A
@@ -3387,183 +3387,8 @@
%22:%0A%0A
- #Start simulate:%0A%0A resource_graph = ResouceGetter()%0A asd = resource_graph.GetNFFG('pico')%0A request = RequestGenerator()%0A # orch_adaptor = OrchestratorAdaptor()%0A
|
492ab05637b92f2decbd8fe60e25783ce63f9733
|
remove ignore from staging
|
server/settings/staging.py
|
server/settings/staging.py
|
""" Do not put secrets in this file. This file is public.
For staging environment (Using Dokku)
"""
import os
import sys
import binascii
from server.settings import RAVEN_IGNORE_EXCEPTIONS
default_secret = binascii.hexlify(os.urandom(24))
ENV = 'staging'
PREFERRED_URL_SCHEME = 'https'
SECRET_KEY = os.getenv('SECRET_KEY', default_secret)
CACHE_TYPE = 'simple'
DEBUG = False
ASSETS_DEBUG = False
TESTING_LOGIN = False
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_TRACK_MODIFICATIONS = False
db_url = os.getenv('DATABASE_URL')
if db_url:
db_url = db_url.replace('mysql://', 'mysql+pymysql://')
db_url += "&sql_mode=STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION"
else:
db_url = os.getenv('SQLALCHEMY_URL', 'sqlite:///../oksqlite.db')
SQLALCHEMY_DATABASE_URI = db_url
WTF_CSRF_CHECK_DEFAULT = True
WTF_CSRF_ENABLED = True
RAVEN_IGNORE_EXCEPTIONS =['werkzeug.exceptions.Forbidden', 'werkzeug.exceptions.NotFound',
'werkzeug.exceptions.Unauthorized']
try:
os.environ["GOOGLE_ID"]
os.environ["GOOGLE_SECRET"]
except KeyError:
print("Please set the google login variables. source secrets.sh")
sys.exit(1)
GOOGLE = {
'consumer_key': os.environ.get('GOOGLE_ID'),
'consumer_secret': os.environ.get('GOOGLE_SECRET')
}
SENDGRID_AUTH = {
'user': os.environ.get("SENDGRID_USER"),
'key': os.environ.get("SENDGRID_KEY")
}
|
Python
| 0.000001
|
@@ -918,162 +918,8 @@
ue%0A%0A
-RAVEN_IGNORE_EXCEPTIONS =%5B'werkzeug.exceptions.Forbidden', 'werkzeug.exceptions.NotFound',%0A 'werkzeug.exceptions.Unauthorized'%5D%0A%0A
try:
|
f5d3bb1dd20e9d2c3c5bd66b30032bea6c3f45d8
|
fix CLIP conversion script (#13474)
|
src/transformers/models/clip/convert_clip_original_pytorch_to_hf.py
|
src/transformers/models/clip/convert_clip_original_pytorch_to_hf.py
|
# coding=utf-8
# Copyright 2021 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import torch
from clip import load
from transformers import CLIPConfig, CLIPModel
def copy_attn_layer(hf_attn_layer, pt_attn_layer):
q_proj, k_proj, v_proj = pt_attn_layer.in_proj_weight.chunk(3, dim=0)
q_proj_bias, k_proj_bias, v_proj_bias = pt_attn_layer.in_proj_bias.chunk(3, dim=0)
out_proj_weights = pt_attn_layer.out_proj.weight
out_proj_bias = pt_attn_layer.out_proj.bias
hf_attn_layer.q_proj.weight.data = q_proj
hf_attn_layer.q_proj.bias.data = q_proj_bias
hf_attn_layer.k_proj.weight.data = k_proj
hf_attn_layer.k_proj.bias.data = k_proj_bias
hf_attn_layer.v_proj.weight.data = v_proj
hf_attn_layer.v_proj.bias.data = v_proj_bias
hf_attn_layer.out_proj.weight = out_proj_weights
hf_attn_layer.out_proj.bias = out_proj_bias
def copy_mlp(hf_mlp, pt_mlp):
copy_linear(hf_mlp.fc1, pt_mlp.c_fc)
copy_linear(hf_mlp.fc2, pt_mlp.c_proj)
def copy_linear(hf_linear, pt_linear):
hf_linear.weight = pt_linear.weight
hf_linear.bias = pt_linear.bias
def copy_layer(hf_layer, pt_layer):
# copy layer norms
copy_linear(hf_layer.layer_norm1, pt_layer.ln_1)
copy_linear(hf_layer.layer_norm2, pt_layer.ln_2)
# copy MLP
copy_mlp(hf_layer.mlp, pt_layer.mlp)
# copy attn
copy_attn_layer(hf_layer.self_attn, pt_layer.attn)
def copy_layers(hf_layers, pt_layers):
for hf_layer, pt_layer in zip(hf_layers, pt_layers):
copy_layer(hf_layer, pt_layer)
def copy_encoder(hf_encoder, pt_model):
# copy embeds
hf_encoder.embeddings.token_embedding.weight = pt_model.token_embedding.weight
hf_encoder.embeddings.position_embedding.weight.data = pt_model.positional_embedding
# copy layer norm
copy_linear(hf_encoder.final_layer_norm, pt_model.ln_final)
# copy hidden layers
copy_layers(hf_encoder.encoder.layers, pt_model.transformer.resblocks)
def copy_text_model_and_projection(hf_model, pt_model):
# copy projection
hf_model.text_projection.weight.data = pt_model.text_projection.data.T
# copy text encoder
copy_encoder(hf_model.text_model, pt_model)
def copy_vison_model_and_projection(hf_model, pt_model):
# copy projection
hf_model.visual_projection.weight.data = pt_model.visual.proj.data.T
# copy layer norms
copy_linear(hf_model.vision_model.pre_layrnorm, pt_model.visual.ln_pre)
copy_linear(hf_model.vision_model.post_layernorm, pt_model.visual.ln_post)
# copy embeds
hf_model.vision_model.embeddings.patch_embedding.weight.data = pt_model.visual.conv1.weight.data
hf_model.vision_model.embeddings.class_embedding = pt_model.visual.class_embedding
hf_model.vision_model.embeddings.position_embedding.weight.data = pt_model.visual.positional_embedding.data
# copy encoder
copy_layers(hf_model.vision_model.encoder.layers, pt_model.visual.transformer.resblocks)
@torch.no_grad()
def convert_clip_checkpoint(checkpoint_path, pytorch_dump_folder_path, config_path=None):
"""
Copy/paste/tweak model's weights to transformers design.
"""
if config_path is not None:
config = CLIPConfig.from_pretrained(config_path)
else:
config = CLIPConfig(projection_dim=512, text_config={}, vision_config={})
hf_model = CLIPModel(config).eval()
pt_model, _ = load(checkpoint_path, jit=False)
pt_model = pt_model.eval()
copy_text_model_and_projection(hf_model, pt_model)
copy_vison_model_and_projection(hf_model, pt_model)
hf_model.logit_scale = pt_model.logit_scale
input_ids = torch.arange(0, 77).unsqueeze(0)
pixel_values = torch.randn(1, 3, 224, 224)
hf_logits_per_image, hf_logits_per_text = hf_model(
input_ids=input_ids, pixel_values=pixel_values, return_dict=True
)[1:3]
pt_logits_per_image, pt_logits_per_text = pt_model(pixel_values, input_ids)
assert torch.allclose(hf_logits_per_image, pt_logits_per_image, atol=1e-3)
assert torch.allclose(hf_logits_per_text, pt_logits_per_text, atol=1e-3)
hf_model.save_pretrained(pytorch_dump_folder_path)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--pytorch_dump_folder_path", default=None, type=str, help="Path to the output PyTorch model.")
parser.add_argument("--checkpoint_path", default=None, type=str, help="Path to fairseq checkpoint")
parser.add_argument("--config_path", default=None, type=str, help="Path to hf config.json of model to convert")
args = parser.parse_args()
convert_clip_checkpoint(args.checkpoint_path, args.pytorch_dump_folder_path, args.config_path)
|
Python
| 0
|
@@ -3954,16 +3954,30 @@
nt_path,
+ device=%22cpu%22,
jit=Fal
|
21561f33e14018b7081e91f5d84ff65c8797909e
|
refactor me later
|
conditional/blueprints/dashboard.py
|
conditional/blueprints/dashboard.py
|
import uuid
import structlog
from flask import Blueprint, request
from conditional.util.ldap import ldap_get_room_number
from conditional.util.ldap import ldap_is_active
from conditional.util.ldap import ldap_is_onfloor
from conditional.util.ldap import ldap_get_housing_points
from conditional.util.ldap import ldap_is_intromember
from conditional.util.ldap import ldap_get_name
from conditional.util.ldap import ldap_get_active_members
from conditional.util.ldap import ldap_get_intro_members
from conditional.models.models import FreshmanEvalData
from conditional.models.models import MemberCommitteeAttendance
from conditional.models.models import MemberSeminarAttendance
from conditional.models.models import TechnicalSeminar
from conditional.models.models import MemberHouseMeetingAttendance
from conditional.models.models import MajorProject
from conditional.models.models import Conditional
from conditional.models.models import HouseMeeting
from conditional.models.models import CommitteeMeeting
from conditional.util.housing import get_queue_length, get_queue_position
from conditional.util.flask import render_template
logger = structlog.get_logger()
dashboard_bp = Blueprint('dashboard_bp', __name__)
def get_freshman_data(user_name):
freshman = {}
freshman_data = FreshmanEvalData.query.filter(FreshmanEvalData.uid == user_name).first()
freshman['status'] = freshman_data.freshman_eval_result
# number of committee meetings attended
c_meetings = [m.meeting_id for m in
MemberCommitteeAttendance.query.filter(
MemberCommitteeAttendance.uid == user_name
)]
freshman['committee_meetings'] = len(c_meetings)
# technical seminar total
t_seminars = [s.seminar_id for s in
MemberSeminarAttendance.query.filter(
MemberSeminarAttendance.uid == user_name
)]
freshman['ts_total'] = len(t_seminars)
attendance = [m.name for m in TechnicalSeminar.query.filter(
TechnicalSeminar.id.in_(t_seminars)
)]
freshman['ts_list'] = attendance
h_meetings = [(m.meeting_id, m.attendance_status) for m in
MemberHouseMeetingAttendance.query.filter(
MemberHouseMeetingAttendance.uid == user_name)]
freshman['hm_missed'] = len([h for h in h_meetings if h[1] == "Absent"])
freshman['social_events'] = freshman_data.social_events
freshman['general_comments'] = freshman_data.other_notes
freshman['fresh_proj'] = freshman_data.freshman_project
freshman['sig_missed'] = freshman_data.signatures_missed
freshman['eval_date'] = freshman_data.eval_date
return freshman
def get_voting_members():
voting_list = []
active_members = [x['uid'][0].decode('utf-8') for x
in ldap_get_active_members()]
intro_members = [x['uid'][0].decode('utf-8') for x
in ldap_get_intro_members()]
passed_fall = FreshmanEvalData.query.filter(
FreshmanEvalData.freshman_eval_result == "Passed"
).distinct()
for intro_member in passed_fall:
voting_list.append(intro_member.uid)
for active_member in active_members:
if active_member not in intro_members:
voting_list.append(active_member)
return voting_list
@dashboard_bp.route('/dashboard/')
def display_dashboard():
log = logger.new(user_name=request.headers.get("x-webauth-user"),
request_id=str(uuid.uuid4()))
log.info('frontend', action='display dashboard')
# get user data
user_name = request.headers.get('x-webauth-user')
can_vote = get_voting_members()
logger.info('backend', action=can_vote)
data = dict()
data['username'] = user_name
data['name'] = ldap_get_name(user_name)
# Member Status
data['active'] = ldap_is_active(user_name)
# On-Floor Status
data['onfloor'] = ldap_is_onfloor(user_name)
# Voting Status
data['voting'] = bool(user_name in can_vote)
# freshman shit
if ldap_is_intromember(user_name):
data['freshman'] = get_freshman_data(user_name)
else:
data['freshman'] = False
spring = {}
c_meetings = [m.meeting_id for m in
MemberCommitteeAttendance.query.filter(
MemberCommitteeAttendance.uid == user_name
)]
spring['committee_meetings'] = len(c_meetings)
h_meetings = [(m.meeting_id, m.attendance_status) for m in
MemberHouseMeetingAttendance.query.filter(
MemberHouseMeetingAttendance.uid == user_name)]
spring['hm_missed'] = len([h for h in h_meetings if h[1] == "Absent"])
data['spring'] = spring
# only show housing if member has onfloor status
if ldap_is_onfloor(user_name):
housing = dict()
housing['points'] = ldap_get_housing_points(user_name)
housing['room'] = ldap_get_room_number(user_name)
if housing['room'] == "N/A":
housing['queue_pos'] = "%s / %s" % (get_queue_position(user_name), get_queue_length())
else:
housing['queue_pos'] = "N/A"
else:
housing = None
data['housing'] = housing
data['major_projects'] = [
{
'name': p.name,
'status': p.status,
'description': p.description
} for p in
MajorProject.query.filter(MajorProject.uid == user_name)]
data['major_projects_count'] = len(data['major_projects'])
spring['mp_status'] = "Failed"
for mp in data['major_projects']:
if mp['status'] == "Pending":
spring['mp_status'] = 'Pending'
continue
if mp['status'] == "Passed":
spring['mp_status'] = 'Passed'
break
conditionals = [
{
'date_created': c.date_created,
'date_due': c.date_due,
'description': c.description,
'status': c.status
} for c in
Conditional.query.filter(Conditional.uid == user_name)]
data['conditionals'] = conditionals
data['conditionals_len'] = len(conditionals)
cm_attendance = [
{
'type': m.committee,
'datetime': m.timestamp
} for m in CommitteeMeeting.query.filter(
CommitteeMeeting.id.in_(c_meetings)
)]
hm_attendance = [
{
'reason': m.excuse,
'datetime': HouseMeeting.query.filter(
HouseMeeting.id == m.meeting_id).first().date
} for m in
MemberHouseMeetingAttendance.query.filter(
MemberHouseMeetingAttendance.uid == user_name
).filter(MemberHouseMeetingAttendance.attendance_status == "Absent")]
data['cm_attendance'] = cm_attendance
data['cm_attendance_len'] = len(cm_attendance)
data['hm_attendance'] = hm_attendance
data['hm_attendance_len'] = len(hm_attendance)
return render_template(request, 'dashboard.html', **data)
|
Python
| 0.000002
|
@@ -3324,16 +3324,54 @@
_list%0A%0A%0A
+# pylint: disable=too-many-statements%0A
@dashboa
|
1569633e1e73bbfb11a2cc34a1ed5239fdc58b1f
|
load extent data as json from form
|
casework/forms.py
|
casework/forms.py
|
# -*- coding: utf-8 -*-
from flask_wtf import Form
from wtforms import StringField, RadioField, DecimalField, HiddenField, TextAreaField, FieldList, DateField, FormField
from wtforms.validators import DataRequired, Optional
from casework.validators import validate_postcode, validate_price_paid, validate_extent, format_postcode
import simplejson
class ChargeForm(Form):
"""
Charge Form
"""
charge_date = DateField('Charge date', format='%d-%m-%Y', validators=[DataRequired()])
chargee_name = StringField('Company name', validators=[DataRequired()])
chargee_registration_number = StringField('Company registration number', validators=[DataRequired()])
chargee_address = TextAreaField('Address', validators=[DataRequired()])
class RegistrationForm(Form):
"""
The names of the variables here MUST match the name attribute of the fields
in the index.html for WTForms to work
Nope: you just have to use the form object you pass to the template and use
the form object to do the work for you
"""
title_number = HiddenField('Title Number')
first_name1 = StringField('First name 1', validators=[DataRequired()])
surname1 = StringField('Surname 1', validators=[DataRequired()])
first_name2 = StringField('First name 2')
surname2 = StringField('Surname 2')
house_number = StringField('House number', validators=[DataRequired()])
road = StringField('Road', validators=[DataRequired()])
town = StringField('Town', validators=[DataRequired()])
postcode = StringField('Postcode', validators=[DataRequired(), validate_postcode])
property_tenure = RadioField(
'Property tenure',
choices=[
('Freehold', 'Freehold'),
('Leasehold', 'Leasehold')
]
)
property_class = RadioField(
'Property class',
choices=[
('Absolute', 'Absolute'),
('Good', 'Good'),
('Qualified', 'Qualified'),
('Possessory', 'Possessory')
]
)
price_paid = DecimalField(
'Price paid (£)',
validators=[Optional(), validate_price_paid],
places=2,
rounding=None)
charges = FieldList(FormField(ChargeForm), min_entries=0)
charges_template = FieldList(FormField(ChargeForm), min_entries=1)
extent = TextAreaField('GeoJSON', validators=[DataRequired(), validate_extent])
def validate(self):
old_form_charges_template = self.charges_template
del self.charges_template
form_is_validated = super(RegistrationForm, self).validate()
self.charges_template = old_form_charges_template
return form_is_validated
def to_json(self):
arr = []
for charge in self['charges'].data:
dt = charge.pop('charge_date')
print "xXX", dt
charge['charge_date'] = str(dt)
arr.append(charge)
data = simplejson.dumps({
"title_number": self['title_number'].data,
"proprietors": [
{
"first_name": self['first_name1'].data,
"last_name": self['surname1'].data
},
{
"first_name": self['first_name2'].data,
"last_name": self['surname2'].data
}
],
"property": {
"address": {
"house_number": self['house_number'].data,
"road": self['road'].data,
"town": self['town'].data,
"postcode": format_postcode(self['postcode'].data)
},
"tenure": self['property_tenure'].data,
"class_of_title": self['property_class'].data
},
"payment": {
"price_paid": self['price_paid'].data,
"titles": [
self['title_number'].data
]
},
"charges": arr,
"extent": self['extent'].data
})
return data
|
Python
| 0
|
@@ -4019,16 +4019,33 @@
xtent%22:
+simplejson.loads(
self%5B'ex
@@ -4051,24 +4051,25 @@
xtent'%5D.data
+)
%0A %7D)%0A
@@ -4089,11 +4089,8 @@
rn data%0A
-%0A%0A%0A
|
661fa0d89d66fe012165ee7553c65e1e73356763
|
Fix pylint
|
batchflow/tests/filesindex_test.py
|
batchflow/tests/filesindex_test.py
|
""" Tests for FilesIndex class. """
# pylint: disable=missing-docstring
# pylint: disable=protected-access
# pylint: disable=redefined-outer-name
import os
import shutil
from contextlib import ExitStack as does_not_raise
import pytest
import numpy as np
from batchflow import FilesIndex, DatasetIndex
@pytest.fixture(scope='module')
def files_setup(request):
""" Fixture that creates files for tests """
path = 'fi_test_tmp'
folder1 = 'folder'
folder2 = 'other_folder'
folders = [path, os.path.join(path, folder1), os.path.join(path, folder2)]
for folder in folders:
os.mkdir(folder)
for i in range(3):
open(os.path.join(folder, 'file_{}.txt'.format(i)), 'w').close()
def fin():
shutil.rmtree(path)
request.addfinalizer(fin)
return path, folder1, folder2
@pytest.mark.parametrize('path, expectation', [['', does_not_raise()],
[[], pytest.raises(ValueError)],
[['', ''], does_not_raise()]
])
def test_build_index_empty(path, expectation):
with expectation:
findex = FilesIndex(path=path)
assert len(findex) == 0
assert isinstance(findex.index, np.ndarray)
@pytest.mark.parametrize('path, error', [(1, TypeError),
([2, 3], TypeError),
([None], TypeError)])
def test_build_index_non_path(path, error):
""" `path` should be string or list of strings """
with pytest.raises(error):
FilesIndex(path=path)
def test_build_no_ext(files_setup):
path, _, _ = files_setup
path = os.path.join(path, '*')
findex = FilesIndex(path=path, no_ext=True)
assert len(findex) == 3
assert os.path.splitext(findex.indices[0])[1] == ''
def test_build_dirs(files_setup):
path, folder1, _ = files_setup
path = os.path.join(path, '*')
findex = FilesIndex(path=path, dirs=True, sort=True)
assert len(findex) == 2
assert findex.indices[0] == os.path.split(folder1)[1]
def test_same_name_in_differen_folders(files_setup):
path, _, _ = files_setup
path = os.path.join(path, '*', '*')
with pytest.raises(ValueError):
FilesIndex(path=path)
def test_build_from_index(files_setup):
path, _, _ = files_setup
files = ['file_{}.txt'.format(i) for i in range(3)]
paths = dict(zip(files, [os.path.join(path, f) for f in files]))
dsindex = DatasetIndex(files)
findex = FilesIndex(index=dsindex, paths=paths, dirs=False)
assert len(dsindex) == len(findex)
def test_get_full_path(files_setup):
path, _, _ = files_setup
findex = FilesIndex(path=os.path.join(path, '*'))
file_name = 'file_1.txt'
full_path = findex.get_fullpath(file_name)
assert os.path.dirname(full_path) == path
assert os.path.basename(full_path) == file_name
@pytest.mark.parametrize('index', [DatasetIndex(['file_1.txt']), ['file_1.txt']])
def test_create_subset(files_setup, index):
path, _, _ = files_setup
findex = FilesIndex(path=os.path.join(path, '*'))
new_findex = findex.create_subset(index)
file_name = 'file_1.txt'
full_path = new_findex.get_fullpath(file_name)
assert len(new_findex) == 1
assert isinstance(new_findex.indices, np.ndarray)
assert os.path.dirname(full_path) == path
assert os.path.basename(full_path) == file_name
|
Python
| 0.000099
|
@@ -948,16 +948,17 @@
+
%5B%5B%5D, pyt
@@ -1028,16 +1028,17 @@
+
%5B%5B'', ''
@@ -1061,55 +1061,8 @@
e()%5D
-%0A
%5D)%0Ad
@@ -1350,16 +1350,17 @@
+
(%5B2, 3%5D,
@@ -1364,32 +1364,33 @@
3%5D, TypeError),%0A
+
|
352583af500746b431d46d7efc3a0d3f931b43a0
|
Fix context processors
|
skcodeonlinetester/context_processors.py
|
skcodeonlinetester/context_processors.py
|
"""
Extra context processors for the SkCodeOnlineTester app.
"""
from django.utils.translation import ugettext_lazy as _
from django.contrib.sites.shortcuts import get_current_site
def app_constants(request):
"""
Constants context processor.
:param request: the current request.
:return: All constants for the app.
"""
site = get_current_site(request)
return {
'APP': {
'TITLE': _('Test console for the PySkCode project'),
'TITLE_SHORT': _('PySkCode test console'),
'AUTHOR': 'Fabien Batteix',
'COPYRIGHT': 'TamiaLab 2016',
'DESCRIPTION': _('Test console for the PySkCode project.'),
'GOOGLE_SITE_VERIFICATION_CODE': '',
'TWITTER_USERNAME': 'skywodd',
'TWITTER_ACCOUNT_ID': '250273994',
'FACEBOOK_URL': 'https://www.facebook.com/fabien.batteix',
},
'SITE': {
'NAME': site.name,
'DOMAIN': site.domain,
'PROTO': 'https' if request.is_secure() else 'http'
}
}
|
Python
| 0.024871
|
@@ -426,79 +426,8 @@
LE':
- _('Test console for the PySkCode project'),%0A 'TITLE_SHORT':
_('
@@ -516,16 +516,18 @@
RIGHT':
+_(
'TamiaLa
@@ -533,16 +533,17 @@
ab 2016'
+)
,%0A
|
75926fe8be6f47287561200a0d6e47cad5c51082
|
Update tokenizers.py
|
cobe/tokenizers.py
|
cobe/tokenizers.py
|
# Copyright (C) 2010 Peter Teichman
import re
import Stemmer
import types
class MegaHALTokenizer:
"""A traditional MegaHAL style tokenizer. This considers any of these
to be a token:
* one or more consecutive alpha characters (plus apostrophe)
* one or more consecutive numeric characters
* one or more consecutive punctuation/space characters (not apostrophe)
This tokenizer ignores differences in capitalization."""
def split(self, phrase):
if type(phrase) != types.UnicodeType:
raise TypeError("Input must be Unicode")
if len(phrase) == 0:
return []
# add ending punctuation if it is missing
if phrase[-1] not in ".!?":
phrase = phrase + "."
words = re.findall("([A-Z']+|[0-9]+|[^A-Z'0-9]+)", phrase.upper(),
re.UNICODE)
return words
def join(self, words):
"""Capitalize the first alpha character in the reply and the
first alpha character that follows one of [.?!] and a
space."""
chars = list(u"".join(words))
start = True
for i in xrange(len(chars)):
char = chars[i]
if char.isalpha():
if start:
chars[i] = char.upper()
else:
chars[i] = char.lower()
start = False
else:
if i > 2 and chars[i - 1] in ".?!" and char.isspace():
start = True
return u"".join(chars)
class CobeTokenizer:
"""A tokenizer that is somewhat improved from MegaHAL. These are
considered tokens:
* one or more consecutive Unicode word characters (plus apostrophe and dash)
* one or more consecutive Unicode non-word characters, possibly with
internal whitespace
* the whitespace between word or non-word tokens
* an HTTP url, [word]: followed by any run of non-space characters.
This tokenizer collapses multiple spaces in a whitespace token into a
single space character.
It preserves differences in case. foo, Foo, and FOO are different
tokens."""
def __init__(self):
# Add hyphen to the list of possible word characters, so hyphenated
# words become one token (e.g. hy-phen). But don't remove it from
# the list of non-word characters, so if it's found entirely within
# punctuation it's a normal non-word (e.g. :-( )
self.regex = re.compile("(\w+:\S+" # urls
"|[\w'-]+" # words
"|[^\w\s][^\w]*[^\w\s]" # multiple punctuation
"|[^\w\s]" # a single punctuation character
"|\s+)", # whitespace
re.UNICODE)
def split(self, phrase):
if type(phrase) != types.UnicodeType:
raise TypeError("Input must be Unicode")
# Strip leading and trailing whitespace. This might not be the
# correct choice long-term, but in the brain it prevents edges
# from the root node that have has_space set.
phrase = phrase.strip()
if len(phrase) == 0:
return []
tokens = self.regex.findall(phrase)
# collapse runs of whitespace into a single space
space = u" "
for i, token in enumerate(tokens):
if token[0] == " " and len(token) > 1:
tokens[i] = space
return tokens
def join(self, words):
return u"".join(words)
class CobeStemmer:
def __init__(self, name):
# use the PyStemmer Snowball stemmer bindings
self.stemmer = Stemmer.Stemmer(name)
def stem(self, token):
if not re.search("\w", token, re.UNICODE):
return self.stem_nonword(token)
# Don't preserve case when stemming, i.e. create lowercase stems.
# This will allow us to create replies that switch the case of
# input words, but still generate the reply in context with the
# generated case.
stem = self.stemmer.stemWord(token.lower())
return stem
def stem_nonword(self, token):
# Stem common smile and frown emoticons down to :) and :(
if re.search(":-?[ \)]*\)", token):
return ":)"
if re.search(":-?[' \(]*\(", token):
return ":("
|
Python
| 0.000001
|
@@ -457,107 +457,8 @@
se):
-%0A if type(phrase) != types.UnicodeType:%0A raise TypeError(%22Input must be Unicode%22)
%0A%0A
@@ -2699,108 +2699,8 @@
e):%0A
- if type(phrase) != types.UnicodeType:%0A raise TypeError(%22Input must be Unicode%22)%0A%0A
|
447b0bb977f050b904d36cb44aabe34cb03b87af
|
fix notation
|
chainer/functions/array/reshape.py
|
chainer/functions/array/reshape.py
|
from chainer import function
from chainer.utils import type_check
def _count_unknown_dims(shape):
cnt = 0
for dim in shape:
cnt += dim < 0
return cnt
class Reshape(function.Function):
"""Reshapes an input array without copy."""
def __init__(self, shape):
cnt = _count_unknown_dims(shape)
assert cnt == 0 or cnt == 1
self.shape = shape
def check_type_forward(self, in_types):
type_check.expect(
in_types.size() == 1,
)
x_type, = in_types
cnt = _count_unknown_dims(self.shape)
if cnt == 0:
type_check.expect(
type_check.prod(x_type.shape) == type_check.prod(self.shape))
else:
known_size = 1
for s in self.shape:
if s > 0:
known_size *= s
size_var = type_check.Variable(known_size,
'known_size(=%d)' % known_size)
type_check.expect(
type_check.prod(x_type.shape) % size_var == 0)
def forward(self, x):
return x[0].reshape(self.shape),
def backward(self, x, gy):
return gy[0].reshape(x[0].shape),
def reshape(x, shape):
"""Reshapes an input variable without copy.
Args:
x (:class:`~chainer.Variable` or :class:`numpy.ndarray` or \
:class:`cupy.ndarray`): Input variable.
shape (:class:`tuple` of :class:`int` s):
The `size` of shape (`size` means the number of elements) must be
equal to that of original shape. One shape dimension can be -1. In
this case, the value is inferred from the length of the array and
remaining dimensions.
Returns:
~chainer.Variable:
Variable that holds a reshaped version of the input variable.
.. seealso:: :func:`numpy.reshape`, :func:`cupy.reshape`
.. admonition:: Example
>>> x = np.array([[1, 2, 3, 4], [5, 6, 7, 8]])
>>> y = F.reshape(x, (8,))
>>> y.shape
(8,)
>>> y.data
array([1, 2, 3, 4, 5, 6, 7, 8])
>>> y = F.reshape(x, (4,-1))
>>> y.shape
(4, 2)
>>> y.data
array([[1, 2],
[3, 4],
[5, 6],
[7, 8]])
"""
return Reshape(shape)(x)
|
Python
| 0.000026
|
@@ -1477,22 +1477,24 @@
The
-%60
+**
size
-%60
+**
of shap
@@ -1500,14 +1500,16 @@
pe (
-%60
+**
size
-%60
+**
mea
@@ -1539,19 +1539,16 @@
ts) must
- be
%0A
@@ -1552,16 +1552,19 @@
+be
equal to
@@ -1618,19 +1618,16 @@
n be -1.
- In
%0A
@@ -1631,16 +1631,19 @@
+In
this cas
@@ -1699,12 +1699,8 @@
rray
- and
%0A
@@ -1708,16 +1708,20 @@
+and
remainin
|
f2edfbbf3a5c4e18a26b8b9479456b91311bd4ea
|
check that the datum has a module_id
|
corehq/apps/app_manager/app_schemas/session_schema.py
|
corehq/apps/app_manager/app_schemas/session_schema.py
|
from django.utils.text import slugify
from corehq import toggles
from corehq.apps.app_manager.const import USERCASE_TYPE
from corehq.apps.app_manager.templatetags.xforms_extras import clean_trans
from corehq.apps.app_manager.util import is_usercase_in_use
def get_session_schema(form):
"""Get form session schema definition
"""
from corehq.apps.app_manager.suite_xml.sections.entries import EntriesHelper
app = form.get_app()
structure = {}
datums = EntriesHelper(app).get_datums_meta_for_form_generic(form)
datums = [
d for d in datums
if d.requires_selection and d.case_type and not d.is_new_case_id
]
def _get_structure(datum, data_registry, source=None):
id_source = f":{slugify(source)}" if source else ""
return {
"reference": {
"hashtag": f'#registry_case{id_source}' if data_registry else f"#case{id_source}",
"source": "registry" if data_registry else "casedb",
"subset": f"case{id_source}",
"key": "@case_id",
},
}
unrelated_parents = set()
for datum in datums:
if datum.module_id:
module = app.get_module_by_unique_id(datum.module_id)
parent_select_active = hasattr(module, 'parent_select') and module.parent_select.active
if parent_select_active and module.parent_select.relationship is None:
# for child modules that use parent select where the parent is not a 'related' case
# See toggles.NON_PARENT_MENU_SELECTION
unrelated_parents.add(module.parent_select.module_id)
data_structure = {}
for i, datum in enumerate(reversed(datums)):
module = app.get_module_by_unique_id(datum.module_id)
data_registry = module.search_config.data_registry
if i == 0:
# always add the datum for this module
data_structure[datum.datum.id] = _get_structure(datum, data_registry)
else:
if datum.module_id and datum.module_id in unrelated_parents:
source = clean_trans(module.name, app.langs) # ensure that this structure reference is unique
data_structure[datum.datum.id] = _get_structure(datum, data_registry, source)
if data_structure:
structure["data"] = {
"merge": True,
"structure": data_structure,
}
if is_usercase_in_use(app.domain):
structure["context"] = {
"merge": True,
"structure": {
"userid": {
"reference": {
"hashtag": "#user",
"source": "casedb",
"subset": USERCASE_TYPE,
"subset_key": "@case_type",
"subset_filter": True,
"key": "hq_user_id",
},
},
},
}
return {
"id": "commcaresession",
"uri": "jr://instance/session",
"name": "Session",
"path": "/session",
"structure": structure,
}
|
Python
| 0.000018
|
@@ -1719,16 +1719,69 @@
tums)):%0A
+ if not datum.module_id:%0A continue%0A
|
e3548d62aa67472f291f6d3c0c8beca9813d6032
|
Make it possible to step() in a newly created env, rather than throwing AttributeError
|
gym/envs/toy_text/discrete.py
|
gym/envs/toy_text/discrete.py
|
from gym import Env
from gym import spaces
import numpy as np
def categorical_sample(prob_n):
"""
Sample from categorical distribution
Each row specifies class probabilities
"""
prob_n = np.asarray(prob_n)
csprob_n = np.cumsum(prob_n)
return (csprob_n > np.random.rand()).argmax()
class DiscreteEnv(Env):
"""
Has the following members
- nS: number of states
- nA: number of actions
- P: transitions (*)
- isd: initial state distribution (**)
(*) dictionary dict of dicts of lists, where
P[s][a] == [(probability, nextstate, reward, done), ...]
(**) list or array of length nS
"""
def __init__(self, nS, nA, P, isd):
self.action_space = spaces.Discrete(nA)
self.observation_space = spaces.Discrete(nS)
self.nA = nA
self.P = P
self.isd = isd
self.lastaction=None # for rendering
@property
def nS(self):
return self.observation_space.n
def _reset(self):
self.s = categorical_sample(self.isd)
return self.s
def _step(self, a):
transitions = self.P[self.s][a]
i = categorical_sample([t[0] for t in transitions])
p, s, r, d= transitions[i]
self.s = s
self.lastaction=a
return (s, r, d, {"prob" : p})
|
Python
| 0.000004
|
@@ -898,16 +898,38 @@
endering
+%0A self._reset()
%0A%0A @p
|
253a0f786339e90b1b5841b94a22d44e5db3b85c
|
Add small delay in TemporalInformationRetriever to avoid endless loop
|
server/src/weblab/user_processing/TemporalInformationRetriever.py
|
server/src/weblab/user_processing/TemporalInformationRetriever.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
import threading
import time
import voodoo.log as log
import weblab.data.experiments.Usage as Usage
import weblab.data.Command as Command
class TemporalInformationRetriever(threading.Thread):
"""
This class retrieves continuously the information of initial and finished experiments.
"""
def __init__(self, initial_store, finished_store, db_manager):
threading.Thread.__init__(self)
self.keep_running = True
self.initial_store = initial_store
self.finished_store = finished_store
self.iterations = 0
self.db_manager = db_manager
self.timeout = None
self.setDaemon(True)
def run(self):
while self.keep_running:
try:
self.iterations += 1
self.iterate()
except:
log.log( TemporalInformationRetriever, log.LogLevel.Critical, "Exception iterating in TemporalInformationRetriever!!!")
log.log_exc( TemporalInformationRetriever, log.LogLevel.Critical )
def stop(self):
self.keep_running = False
def iterate(self):
self.iterate_over_store(self.initial_store, 'initial')
if self.keep_running:
self.iterate_over_store(self.finished_store, 'finish')
def iterate_over_store(self, store, message):
information = store.get(timeout=self.timeout)
if information is not None:
reservation_id, obj, initial_time, end_time = information
initial_timestamp = time.mktime(initial_time.timetuple())
end_timestamp = time.mktime(end_time.timetuple())
command = Usage.CommandSent(
Command.Command("@@@%s@@@" % message), initial_timestamp,
Command.Command(str(obj)), end_timestamp
)
if not self.keep_running or not self.db_manager.append_command(reservation_id, command):
# If it could not be added because the experiment id
# did not exist, put it again in the queue
store.put(reservation_id, obj, initial_time, end_time)
|
Python
| 0
|
@@ -2501,10 +2501,43 @@
nd_time)
+%0A time.sleep(0.01)
%0A%0A
|
92737e3f95ff94129e52e1fab1f40a0f70550d46
|
Update the ParticleFilterSetOperations
|
hoomd/filter/set_.py
|
hoomd/filter/set_.py
|
from hoomd.filter.filter_ import ParticleFilter
from hoomd import _hoomd
class ParticleFilterSetOperations(ParticleFilter):
def __init__(self, f, g):
if f == g:
raise ValueError("Cannot use same filter for {}"
"".format(self.__class__.__name__))
else:
self._f = f
self._g = g
getattr(_hoomd, self._cpp_cls_name).__init__(self, f, g)
def __hash__(self):
return hash(hash(self._f) + hash(self._g))
def __eq__(self, other):
return type(self) == type(other) and \
self._f == other._f and \
self._g == other._g
class SetDifference(ParticleFilterSetOperations,
_hoomd.ParticleFilterSetDifference):
_cpp_cls_name = 'ParticleFilterSetDifference'
class Union(ParticleFilterSetOperations, _hoomd.ParticleFilterUnion):
_cpp_cls_name = 'ParticleFilterUnion'
class Intersection(ParticleFilterSetOperations,
_hoomd.ParticleFilterIntersection):
_cpp_cls_name = 'ParticleFilterIntersection'
|
Python
| 0
|
@@ -356,16 +356,128 @@
._g = g%0A
+ # Grab the C++ class constructor for the set operation using the class%0A # variable _cpp_cls_name%0A
@@ -635,24 +635,56 @@
lf, other):%0A
+ if self._symmetric:%0A
retu
@@ -722,32 +722,37 @@
d %5C%0A
+ (
self._f == other
@@ -754,16 +754,40 @@
other._f
+ or self._f == other._g)
and %5C%0A
@@ -789,32 +789,186 @@
d %5C%0A
+
+ (self._g == other._g or self._g == other._f)%0A else:%0A return type(self) == type(other) and %5C%0A self._f == other._f and
self._g == other
@@ -993,16 +993,17 @@
ference(
+_
Particle
@@ -1130,16 +1130,39 @@
erence'%0A
+ _symmetric = False%0A
%0A%0Aclass
@@ -1163,24 +1163,25 @@
class Union(
+_
ParticleFilt
@@ -1268,18 +1268,40 @@
rUnion'%0A
+ _symmetric = True
%0A%0A
+%0A
class In
@@ -1311,16 +1311,17 @@
section(
+_
Particle
@@ -1441,12 +1441,34 @@
tersection'%0A
+ _symmetric = True%0A
|
c6218505ee78077b682490ecd6cba64b0d12ba28
|
update docs for electric field
|
hoomd/md/external.py
|
hoomd/md/external.py
|
# Copyright (c) 2009-2021 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause License.
# Maintainer: joaander / All Developers are free to add commands for new features
R""" External forces.
Apply an external force to all particles in the simulation. This module
organizes all external forces. As an example, a force derived from a `Periodic`
potential can be used to induce a concentration modulation in the system.
"""
from hoomd import _hoomd
from hoomd.md import _md
from hoomd.md import force
import hoomd
import sys
import math
from hoomd.data.parameterdicts import TypeParameterDict
from hoomd.data.typeparam import TypeParameter
class External(force.Force):
"""
Common External potential documentation.
Users should not invoke `External` directly. Documentation common to all
external potentials is located here. External potentials represent forces
which are applied to all particle in the simulation by an external agent.
"""
def _attach(self):
if isinstance(self._simulation.device, hoomd.device.CPU):
cls = getattr(_md, self._cpp_class_name)
else:
cls = getattr(_md, self._cpp_class_name + "GPU")
self._cpp_obj = cls(self._simulation.state._cpp_sys_def)
super()._attach()
class Periodic(External):
""" One-dimension periodic potential.
`Periodic` specifies that an external force should be added to every
particle in the simulation to induce a periodic modulation in the particle
concentration. The modulation is one-dimensional and extends along the
lattice vector :math:`\\mathbf{a}_i` of the simulation cell. The force
parameters can be set on a per particle type basis. This potential can, for
example, be used to induce an ordered phase in a block-copolymer melt.
The external potential :math:`V(\\vec{r})` is implemented using the following
formula:
.. math::
V(\\vec{r}) = A * \\tanh\\left[\\frac{1}{2 \\pi p w} \\cos\\left(
p \\vec{b}_i\\cdot\\vec{r}\\right)\\right]
The coefficients above must be set per unique particle type.
.. py:attribute:: params
The `Periodic` external potential parameters. The dictionary has the
following keys:
* ``A`` (`float`, **required**) -
Ordering parameter :math:`A` (in energy units).
* ``i`` (`int`, **required**) -
:math:`\\vec{b}_i`, :math:`i=0, 1, 2`, is the simulation box's
reciprocal lattice vector in the :math:`i` direction (dimensionless).
* ``w`` (`float`, **required**) -
The interface width :math:`w` relative to the distance
:math:`2\\pi/|\\mathbf{b_i}|` between planes in the
:math:`i`-direction. (dimensionless).
* ``p`` (`int`, **required**) -
The periodicity :math:`p` of the modulation (dimensionless).
Type: `TypeParameter` [``particle_type``, `dict`]
Example::
# Apply a periodic composition modulation along the first lattice vector
periodic = external.Periodic()
periodic.params['A'] = dict(A=1.0, i=0, w=0.02, p=3)
periodic.params['B'] = dict(A=-1.0, i=0, w=0.02, p=3)
"""
_cpp_class_name = "PotentialExternalPeriodic"
def __init__(self):
params = TypeParameter(
'params', 'particle_types',
TypeParameterDict(i=int, A=float, w=float, p=int, len_keys=1))
self._add_typeparam(params)
class ElectricField(External):
R""" Electric field.
:py:class:`ElectricField` specifies that an external force should be
added to every particle in the simulation that results from an electric field.
The external potential :math:`V(\vec{r})` is implemented using the following formula:
.. math::
V(\vec{r}) = - q_i \vec{E} \cdot \vec{r}
where :math:`q_i` is the particle charge and :math:`\vec{E}` is the field vector
Example::
# Apply an electric field in the x-direction
e_field = external.ElectricField((1,0,0))
"""
_cpp_class_name = "PotentialExternalElectricField"
def __init__(self):
params = TypeParameter(
'params', 'particle_types',
TypeParameterDict(E=(float, float, float), len_keys=1))
self._add_typeparam(params)
|
Python
| 0
|
@@ -3559,17 +3559,16 @@
l):%0A
-R
%22%22%22 Elec
@@ -3588,18 +3588,8 @@
-:py:class:
%60Ele
@@ -3642,20 +3642,16 @@
hould be
-%0A
added t
@@ -3657,16 +3657,20 @@
to every
+%0A
particl
@@ -3764,16 +3764,17 @@
ath:%60V(%5C
+%5C
vec%7Br%7D)%60
@@ -3808,16 +3808,20 @@
ollowing
+%0A
formula
@@ -3848,16 +3848,17 @@
V(%5C
+%5C
vec%7Br%7D)
@@ -3869,16 +3869,17 @@
q_i
+%5C
%5Cvec%7BE%7D
%5Ccdo
@@ -3878,14 +3878,16 @@
%7BE%7D
+%5C
%5Ccdot
+%5C
%5Cvec
@@ -3949,16 +3949,17 @@
:math:%60%5C
+%5C
vec%7BE%7D%60
@@ -3970,23 +3970,419 @@
he field
+%0A
vector
+. The field vector :math:%60%5C%5Cvec%7BE%7D%60 must be set per unique particle%0A types.%0A%0A .. py:attribute:: params%0A%0A The %60ElectricField%60 potential parameters. The dictionary has the%0A following keys:%0A%0A * %60%60E%60%60 (%60tuple%60 %5B%60float%60, %60float%60, %60float%60%5D, **required**) -%0A The electric field vector :math:%60%5C%5Cvec%7BE%7D%60.%0A%0A Type: %60TypeParameter%60 %5B%60%60particle_type%60%60, %60dict%60%5D
%0A%0A Ex
@@ -4484,16 +4484,55 @@
icField(
+)%0A e_field.params%5B'A'%5D = dict(E=
(1,0,0))
|
8d98fe5570ce37512128d46853000dc860f798b2
|
Update jupyterhub_config.py
|
jupyterhub/jupyterhub_config.py
|
jupyterhub/jupyterhub_config.py
|
# Configuration file for jupyterhub.
from jupyter_client.localinterfaces import public_ips
c = get_config() # noqa
c.JupyterHub.ssl_key = 'test.key'
c.JupyterHub.ssl_cert = 'test.crt'
c.JupyterHub.hub_ip = public_ips()[0]
# Choose between system-user mode and virtual-user mode
setting_mode = ('system_user', 'virtual_user')[1]
if setting_mode == 'virtual_user':
c.JupyterHub.spawner_class = 'remoteappmanager.spawners.VirtualUserSpawner'
# Parent directory in which temporary directory is created for
# each virtual user
# Set this to a drive with well defined capacity quota
# If unset, no workspace would be available
c.Spawner.workspace_dir = '/tmp/remoteapp'
# FIXME: replace me with other authenticator (e.g. GitHub OAuth...)
c.JupyterHub.authenticator_class = (
'remoteappmanager.auth.WorldAuthenticator')
elif setting_mode == 'system_user':
c.JupyterHub.spawner_class = 'remoteappmanager.spawners.Spawner'
|
Python
| 0.000001
|
@@ -951,16 +951,26 @@
pawners.
+SystemUser
Spawner'
|
6e10b24fe798c2c666c050eb834370e5745276b4
|
fix retry options
|
cumulusci/tasks/salesforce/install_package_version.py
|
cumulusci/tasks/salesforce/install_package_version.py
|
from cumulusci.core.exceptions import TaskOptionsError
from cumulusci.core.utils import process_bool_arg
from cumulusci.salesforce_api.exceptions import MetadataApiError
from cumulusci.salesforce_api.package_install import install_package_version
from cumulusci.salesforce_api.package_zip import InstallPackageZipBuilder
from cumulusci.tasks.salesforce import Deploy
class InstallPackageVersion(Deploy):
task_options = {
"name": {
"description": "The name of the package to install. Defaults to project__package__name_managed",
"required": False,
},
"namespace": {
"description": "The namespace of the package to install. Defaults to project__package__namespace",
"required": True,
},
"version": {
"description": 'The version of the package to install. "latest" and "latest_beta" can be used to trigger lookup via Github Releases on the repository.',
"required": True,
},
"activateRSS": {
"description": "If True, preserve the isActive state of "
"Remote Site Settings and Content Security Policy "
"in the package. Default: False."
},
"password": {"description": "The package password. Optional."},
"retries": {"description": "Number of retries (default=5)"},
"retry_interval": {
"description": "Number of seconds to wait before the next retry (default=5),"
},
"retry_interval_add": {
"description": "Number of seconds to add before each retry (default=30),"
},
"security_type": {
"description": "Which users to install package for (FULL = all users, NONE = admins only)"
},
}
def _init_options(self, kwargs):
super(InstallPackageVersion, self)._init_options(kwargs)
if "namespace" not in self.options:
self.options["namespace"] = self.project_config.project__package__namespace
if "name" not in self.options:
self.options["name"] = (
self.project_config.project__package__name_managed
or self.project_config.project__package__name
or self.options["namespace"]
)
if "retries" not in self.options:
self.options["retries"] = 5
if "retry_interval" not in self.options:
self.options["retry_interval"] = 5
if "retry_interval_add" not in self.options:
self.options["retry_interval_add"] = 30
version = self.options.get("version")
if version == "latest":
self.options["version"] = self.project_config.get_latest_version()
elif version == "latest_beta":
self.options["version"] = self.project_config.get_latest_version(beta=True)
elif version == "previous":
self.options["version"] = self.project_config.get_previous_version()
self.options["activateRSS"] = process_bool_arg(self.options.get("activateRSS"))
self.options["security_type"] = self.options.get("security_type", "FULL")
if self.options["security_type"] not in ("FULL", "NONE", "PUSH"):
raise TaskOptionsError(
f"Unsupported value for security_type: {self.options['security_type']}"
)
def _get_api(self, path=None):
package_zip = InstallPackageZipBuilder(
namespace=self.options["namespace"],
version=self.options["version"],
activateRSS=self.options["activateRSS"],
password=self.options.get("password"),
securityType=self.options.get("security_type", "FULL"),
)
return self.api_class(self, package_zip(), purge_on_delete=False)
def _run_task(self):
version = self.options["version"]
self.logger.info(f"Installing {self.options['name']} {version}")
if isinstance(version, str) and version.startswith("04t"):
install_options = {**self.options, "version_id": version}
retry_options = {**self.options}
install_package_version(
self.project_config, self.org_config, install_options, retry_options
)
else:
self._retry()
self.org_config.reset_installed_packages()
def _try(self):
api = self._get_api()
api()
def _is_retry_valid(self, e):
if isinstance(e, MetadataApiError) and (
"This package is not yet available" in str(e)
or "InstalledPackage version number" in str(e)
or "The requested package doesn't yet exist or has been deleted" in str(e)
):
return True
def freeze(self, step):
options = self.options.copy()
options["version"] = str(options["version"])
name = options.pop("name")
task_config = {"options": options, "checks": self.task_config.checks or []}
ui_step = {
"name": "Install {} {}".format(name, options["version"]),
"kind": "managed",
"is_required": True,
}
ui_step.update(step.task_config.get("ui_options", {}))
ui_step.update(
{
"path": step.path,
"step_num": str(step.step_num),
"task_class": self.task_config.class_path,
"task_config": task_config,
"source": step.project_config.source.frozenspec,
}
)
return [ui_step]
|
Python
| 0.000022
|
@@ -4062,30 +4062,221 @@
ions = %7B
-**self.options
+%0A %22retries%22: self.options%5B%22retries%22%5D,%0A %22retry_interval%22: self.options%5B%22retry_interval%22%5D,%0A %22retry_interval_add%22: self.options%5B%22retry_interval_add%22%5D,%0A
%7D%0A
|
1de78567614ace199e03aca5d93aee9d426ddb3e
|
Allow lastchange.py to be used for arbitrary repositories.
|
build/util/lastchange.py
|
build/util/lastchange.py
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
lastchange.py -- Chromium revision fetching utility.
"""
import re
import optparse
import os
import subprocess
import sys
_GIT_SVN_ID_REGEX = re.compile(r'.*git-svn-id:\s*([^@]*)@([0-9]+)', re.DOTALL)
class VersionInfo(object):
def __init__(self, url, revision):
self.url = url
self.revision = revision
def FetchSVNRevision(directory, svn_url_regex):
"""
Fetch the Subversion branch and revision for a given directory.
Errors are swallowed.
Returns:
A VersionInfo object or None on error.
"""
try:
proc = subprocess.Popen(['svn', 'info'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=directory,
shell=(sys.platform=='win32'))
except OSError:
# command is apparently either not installed or not executable.
return None
if not proc:
return None
attrs = {}
for line in proc.stdout:
line = line.strip()
if not line:
continue
key, val = line.split(': ', 1)
attrs[key] = val
try:
match = svn_url_regex.search(attrs['URL'])
if match:
url = match.group(2)
else:
url = ''
revision = attrs['Revision']
except KeyError:
return None
return VersionInfo(url, revision)
def RunGitCommand(directory, command):
"""
Launches git subcommand.
Errors are swallowed.
Returns:
A process object or None.
"""
command = ['git'] + command
# Force shell usage under cygwin. This is a workaround for
# mysterious loss of cwd while invoking cygwin's git.
# We can't just pass shell=True to Popen, as under win32 this will
# cause CMD to be used, while we explicitly want a cygwin shell.
if sys.platform == 'cygwin':
command = ['sh', '-c', ' '.join(command)]
try:
proc = subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=directory,
shell=(sys.platform=='win32'))
return proc
except OSError:
return None
def FetchGitRevision(directory):
"""
Fetch the Git hash for a given directory.
Errors are swallowed.
Returns:
A VersionInfo object or None on error.
"""
proc = RunGitCommand(directory, ['rev-parse', 'HEAD'])
if proc:
output = proc.communicate()[0].strip()
if proc.returncode == 0 and output:
return VersionInfo('git', output[:7])
return None
def FetchGitSVNURLAndRevision(directory, svn_url_regex):
"""
Fetch the Subversion URL and revision through Git.
Errors are swallowed.
Returns:
A tuple containing the Subversion URL and revision.
"""
proc = RunGitCommand(directory, ['log', '-1',
'--grep=git-svn-id', '--format=%b'])
if proc:
output = proc.communicate()[0].strip()
if proc.returncode == 0 and output:
# Extract the latest SVN revision and the SVN URL.
# The target line is the last "git-svn-id: ..." line like this:
# git-svn-id: svn://svn.chromium.org/chrome/trunk/src@85528 0039d316....
match = _GIT_SVN_ID_REGEX.search(output)
if match:
revision = match.group(2)
url_match = svn_url_regex.search(match.group(1))
if url_match:
url = url_match.group(2)
else:
url = ''
return url, revision
return None, None
def FetchGitSVNRevision(directory, svn_url_regex):
"""
Fetch the Git-SVN identifier for the local tree.
Errors are swallowed.
"""
url, revision = FetchGitSVNURLAndRevision(directory, svn_url_regex)
if url and revision:
return VersionInfo(url, revision)
return None
def FetchVersionInfo(default_lastchange, directory=None,
directory_regex_prior_to_src_url='chrome|svn'):
"""
Returns the last change (in the form of a branch, revision tuple),
from some appropriate revision control system.
"""
svn_url_regex = re.compile(
r'.*/(' + directory_regex_prior_to_src_url + r')(/.*)')
version_info = (FetchSVNRevision(directory, svn_url_regex) or
FetchGitSVNRevision(directory, svn_url_regex) or
FetchGitRevision(directory))
if not version_info:
if default_lastchange and os.path.exists(default_lastchange):
revision = open(default_lastchange, 'r').read().strip()
version_info = VersionInfo(None, revision)
else:
version_info = VersionInfo(None, None)
return version_info
def WriteIfChanged(file_name, contents):
"""
Writes the specified contents to the specified file_name
iff the contents are different than the current contents.
"""
try:
old_contents = open(file_name, 'r').read()
except EnvironmentError:
pass
else:
if contents == old_contents:
return
os.unlink(file_name)
open(file_name, 'w').write(contents)
def main(argv=None):
if argv is None:
argv = sys.argv
parser = optparse.OptionParser(usage="lastchange.py [options]")
parser.add_option("-d", "--default-lastchange", metavar="FILE",
help="default last change input FILE")
parser.add_option("-o", "--output", metavar="FILE",
help="write last change to FILE")
parser.add_option("--revision-only", action='store_true',
help="just print the SVN revision number")
opts, args = parser.parse_args(argv[1:])
out_file = opts.output
while len(args) and out_file is None:
if out_file is None:
out_file = args.pop(0)
if args:
sys.stderr.write('Unexpected arguments: %r\n\n' % args)
parser.print_help()
sys.exit(2)
version_info = FetchVersionInfo(opts.default_lastchange,
os.path.dirname(sys.argv[0]))
if version_info.revision == None:
version_info.revision = '0'
if opts.revision_only:
print version_info.revision
else:
contents = "LASTCHANGE=%s\n" % version_info.revision
if out_file:
WriteIfChanged(out_file, contents)
else:
sys.stdout.write(contents)
return 0
if __name__ == '__main__':
sys.exit(main())
|
Python
| 0.000009
|
@@ -5556,16 +5556,139 @@
umber%22)%0A
+ parser.add_option(%22-s%22, %22--source-dir%22, metavar=%22DIR%22,%0A help=%22use repository in the given directory%22)%0A
opts,
@@ -5722,16 +5722,16 @@
gv%5B1:%5D)%0A
-
%0A out_f
@@ -5955,16 +5955,134 @@
xit(2)%0A%0A
+ if opts.source_dir:%0A src_dir = opts.source_dir%0A else:%0A src_dir = os.path.dirname(os.path.abspath(__file__))%0A%0A
versio
@@ -6135,43 +6135,16 @@
nge,
-%0A os.path.dirname(sys.argv%5B0%5D)
+ src_dir
)%0A%0A
|
fc6716854bc876730f1f3684945061fcf1d48072
|
Fix default optional prefix
|
hashid_field/rest.py
|
hashid_field/rest.py
|
from django.apps import apps
from django.core import exceptions
from hashids import Hashids
from rest_framework import fields, serializers
from hashid_field.conf import settings
from hashid_field.hashid import Hashid
class UnconfiguredHashidSerialField(fields.Field):
def bind(self, field_name, parent):
super().bind(field_name, parent)
raise exceptions.ImproperlyConfigured(
"The field '{field_name}' on {parent} must be explicitly declared when used with a ModelSerializer".format(
field_name=field_name, parent=parent.__class__.__name__))
class HashidSerializerMixin(object):
usage_text = "Must pass a HashidField, HashidAutoField or 'app_label.model.field'"
def __init__(self, **kwargs):
self.hashid_salt = kwargs.pop('salt', settings.HASHID_FIELD_SALT)
self.hashid_min_length = kwargs.pop('min_length', 7)
self.hashid_alphabet = kwargs.pop('alphabet', Hashids.ALPHABET)
self.prefix = None
source_field = kwargs.pop('source_field', None)
if source_field:
from hashid_field import HashidField, HashidAutoField
if isinstance(source_field, str):
try:
app_label, model_name, field_name = source_field.split(".")
except ValueError:
raise ValueError(self.usage_text)
model = apps.get_model(app_label, model_name)
source_field = model._meta.get_field(field_name)
elif not isinstance(source_field, (HashidField, HashidAutoField)):
raise TypeError(self.usage_text)
self.hashid_salt, self.hashid_min_length, self.hashid_alphabet, self.prefix = \
source_field.salt, source_field.min_length, source_field.alphabet, source_field.prefix
self._hashids = Hashids(salt=self.hashid_salt, min_length=self.hashid_min_length, alphabet=self.hashid_alphabet)
super().__init__(**kwargs)
def to_internal_value(self, data):
try:
value = super().to_internal_value(data)
return Hashid(value, hashids=self._hashids, prefix=self.prefix)
except ValueError:
raise serializers.ValidationError("Invalid int or Hashid string")
class HashidSerializerCharField(HashidSerializerMixin, fields.CharField):
def to_representation(self, value):
return str(value)
class HashidSerializerIntegerField(HashidSerializerMixin, fields.IntegerField):
def to_representation(self, value):
return int(value)
|
Python
| 0.000002
|
@@ -977,20 +977,42 @@
refix =
-None
+kwargs.pop('alphabet', %22%22)
%0A%0A
|
713a3c19fa5ec4bc5aa865e1403022069b62bfd9
|
Remove obsolete line break. (#539)
|
buildifier/buildifier.py
|
buildifier/buildifier.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import fnmatch
import html
import locale
import os.path
import re
import subprocess
import sys
regex = re.compile(
r"^(?P<filename>[^:]*):(?P<line>\d*):(?:(?P<column>\d*):)? (?P<message_id>[^:]*): (?P<message>.*) \((?P<message_url>.*)\)$",
re.MULTILINE,
)
BUILDIFIER_VERSION_PATTERN = re.compile(r"^buildifier version: ([\.\w]+)$", re.MULTILINE)
BUILDIFIER_URL = "https://github.com/bazelbuild/buildtools/tree/master/buildifier"
# https://github.com/bazelbuild/buildtools/blob/master/buildifier/buildifier.go#L333
# Buildifier error code for "needs formatting". We should fail on all other error codes > 0
# since they indicate a problem in how Buildifier is used.
BUILDIFIER_FORMAT_ERROR_CODE = 4
def eprint(*args, **kwargs):
"""
Print to stderr and flush (just in case).
"""
print(*args, flush=True, file=sys.stderr, **kwargs)
def upload_output(output):
# Generate output usable by Buildkite's annotations.
eprint("--- :hammer_and_wrench: Printing raw output for debugging")
eprint(output)
eprint("+++ :buildkite: Uploading output via 'buildkite annotate'")
result = subprocess.run(
["buildkite-agent", "annotate", "--style", "warning", "--context", "buildifier"],
input=output.encode(locale.getpreferredencoding(False)),
)
if result.returncode != 0:
eprint(
":rotating_light: 'buildkite-agent annotate' failed with exit code {}".format(
result.returncode
)
)
def get_file_url(filename, line):
commit = os.environ.get("BUILDKITE_COMMIT")
repo = os.environ.get("BUILDKITE_PULL_REQUEST_REPO", os.environ.get("BUILDKITE_REPO", None))
if not commit or not repo:
return None
# Example 1: https://github.com/bazelbuild/bazel.git
# Example 2: git://github.com/philwo/bazel.git
# Example 3: git@github.com:bazelbuild/bazel.git
match = re.match(r"(?:(?:git|https?)://|git@)(github.com[:/].*)\.git", repo)
if match:
return "https://{}/blob/{}/{}#L{}".format(
match[1].replace(":", "/"), commit, filename, line
)
return None
def run_buildifier(flag, files=None, version=None, what=None):
label = "+++ :bazel: Running "
if version:
label += "Buildifier " + version
else:
label += "unreleased Buildifier"
if what:
label += ": " + what
eprint(label)
args = ["buildifier", flag]
if files:
args += files
return subprocess.run(args, capture_output=True, universal_newlines=True)
def create_heading(issue_type, issue_count):
return "##### :bazel: buildifier: found {} {} issue{} in your WORKSPACE, BUILD and *.bzl files\n".format(
issue_count, issue_type, "s" if issue_count > 1 else ""
)
def main(argv=None):
if argv is None:
argv = sys.argv[1:]
# Gather all files to process.
eprint("+++ :female-detective: Looking for WORKSPACE, BUILD, BUILD.bazel and *.bzl files")
files = []
build_bazel_found = False
for root, _, filenames in os.walk("."):
for filename in filenames:
if fnmatch.fnmatch(filename, "BUILD.bazel"):
build_bazel_found = True
for pattern in ("WORKSPACE", "BUILD", "BUILD.bazel", "*.bzl"):
if fnmatch.fnmatch(filename, pattern):
files.append(os.path.relpath(os.path.join(root, filename)))
if build_bazel_found:
eprint(
"Found BUILD.bazel files in the workspace, thus ignoring BUILD files without suffix."
)
files = [fname for fname in files if not fnmatch.fnmatch(os.path.basename(fname), "BUILD")]
if not files:
eprint("No files found, exiting.")
return 0
files = sorted(files)
eprint("+++ :female-detective: Detecting Buildifier version")
version_result = run_buildifier("--version", what="Version info")
match = BUILDIFIER_VERSION_PATTERN.search(version_result.stdout)
version = match.group(1) if match and match.group(1) != "redacted" else None
# Run formatter before linter since --lint=warn implies --mode=fix,
# thus fixing any format issues.
formatter_result = run_buildifier(
"--mode=check", files=files, version=version, what="Format check"
)
if formatter_result.returncode and formatter_result.returncode != BUILDIFIER_FORMAT_ERROR_CODE:
output = "##### :bazel: buildifier: error while checking format:\n"
output += "<pre><code>" + html.escape(formatter_result.stderr) + "</code></pre>"
if "BUILDKITE_JOB_ID" in os.environ:
output += "\n\nSee [job {job}](#{job})\n".format(job=os.environ["BUILDKITE_JOB_ID"])
upload_output(output)
return formatter_result.returncode
# Format: "<file name> # reformated"
unformatted_files = [l.partition(" ")[0] for l in formatter_result.stdout.splitlines()]
if unformatted_files:
eprint(
"+++ :construction: Found {} file(s) that must be formatted".format(
len(unformatted_files)
)
)
linter_result = run_buildifier("--lint=warn", files=files, version=version, what="Lint checks")
if linter_result.returncode == 0 and not unformatted_files:
# If buildifier was happy, there's nothing left to do for us.
eprint("+++ :tada: Buildifier found nothing to complain about")
return 0
output = ""
if unformatted_files:
output = create_heading("format", len(unformatted_files))
output += (
"Please download <a href=\"{}\">buildifier</a> and run the following "
"command in your workspace:<br/><pre><code>buildifier {}</code></pre>"
"<br/>\n".format(
BUILDIFIER_URL, " ".join(unformatted_files)
)
)
# Parse output.
eprint("+++ :gear: Parsing buildifier output")
findings = list(regex.finditer(linter_result.stderr))
output += create_heading("lint", len(findings))
output += "<pre><code>"
for finding in findings:
file_url = get_file_url(finding["filename"], finding["line"])
if file_url:
output += '<a href="{}">{}:{}</a>:'.format(
file_url, finding["filename"], finding["line"]
)
else:
output += "{}:{}:".format(finding["filename"], finding["line"])
if finding["column"]:
output += "{}:".format(finding["column"])
output += ' <a href="{}">{}</a>: {}\n'.format(
finding["message_url"], finding["message_id"], finding["message"]
)
output = output.strip() + "</pre></code>"
upload_output(output)
# Preserve buildifier's exit code.
return linter_result.returncode
if __name__ == "__main__":
sys.exit(main())
|
Python
| 0.000235
|
@@ -5752,21 +5752,16 @@
%22
-%3Cbr/%3E
%5Cn%22.form
|
bca7f7f6ae870a0a307566ee1735e899596d3f99
|
Simplify the brightness calculation, in preparation for multi-LED drips
|
hardware/mote/mote_icicles.py
|
hardware/mote/mote_icicles.py
|
import time
from random import randint
from mote import Mote
mote = Mote()
mote.configure_channel(1, 16, False)
mote.configure_channel(2, 16, False)
mote.configure_channel(3, 16, False)
mote.configure_channel(4, 16, False)
max_brightness = 40
class Icicle:
def __init__(self, channel):
self.channel = channel
self.current_pixel = 0
self.start_random_wait_for_next_drip()
def step(self):
# Turn off previous pixel
mote.set_pixel(self.channel, self.previous_pixel(), 0, 0, 0)
# Check if we are pausing between drips
if self.frames_to_wait > 0:
self.frames_to_wait -= 1
return
# Advance to next pixel
brightness = max_brightness -(2*self.current_pixel)
mote.set_pixel(self.channel, self.current_pixel, brightness, brightness, brightness)
# Advance pixel number, ready for next frame
self.current_pixel = self.next_pixel()
# If the next pixel will be zero, set up a random wait before starting the
# next cycle:
if self.current_pixel == 0:
self.start_random_wait_for_next_drip()
def next_pixel(self, delta = 1):
new_pixel = self.current_pixel + delta
if not self.valid_pixel(new_pixel):
new_pixel -= 16
return new_pixel
def previous_pixel(self, delta = 1):
new_pixel = self.current_pixel - delta
if not self.valid_pixel(new_pixel):
new_pixel += 16
return new_pixel
def valid_pixel(self, pixel):
return pixel >=0 and pixel <= 15
def start_random_wait_for_next_drip(self):
self.frames_to_wait = randint(15, 30)
if __name__ == "__main__":
mote.clear()
icicles = [
Icicle(1),
Icicle(2),
Icicle(3),
Icicle(4)
]
while True:
for icicle in icicles:
icicle.step()
mote.show()
time.sleep(0.2)
|
Python
| 0
|
@@ -219,19 +219,20 @@
False)%0A%0A
-max
+full
_brightn
@@ -736,11 +736,12 @@
s =
-max
+full
_bri
@@ -751,32 +751,8 @@
ness
- -(2*self.current_pixel)
%0A
|
9718e6c216b8d5205a19f095593ec099004785a6
|
add app
|
src/studio/launch/commands/app_commands.py
|
src/studio/launch/commands/app_commands.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import print_function
import os
import sys
import importlib
from sh import pip
from termcolor import colored
from studio.frame.config import common as common_config
from studio.launch.base import manager
app_manager = manager.subcommand('app')
VASSAL = common_config['UWSGI_EMPEROR']
def _get_app(appname):
try:
module = importlib.import_module(appname)
except ImportError:
print(colored('Can\'t import app %s.' % appname,
'yellow', attrs=['bold']),
file=sys.stderr)
return None
for name in dir(module):
app = getattr(module, name)
if hasattr(app, 'config'):
return app
else:
print(colored('Can\'t find app %s\'s entry' % appname,
'yellow', attrs=['bold']),
file=sys.stderr)
return None
def _iter_all():
for pkg in pip.freeze():
appname, _ = pkg.split('==')
if 'microsite' == appname:
yield appname
def _get_pkgs():
return [str(pkg.split('==')[0]) for pkg in pip.freeze()]
def _get_appnames():
pkgs = _get_pkgs()
return [pkg[6:] for pkg in pkgs if pkg.startswith('qsapp-')]
def _mk_uwsgi_config(config):
conifg_d = {}
for k, v in config.items():
if k.startswith('UWSGI_'):
k = k[6:].replace('_', '-')
conifg_d[k] = v
print(VASSAL)
@app_manager.command
def add(*appnames):
_names = _get_appnames()
for appname in appnames:
if appname in _names:
app = _get_app(appname)
_mk_uwsgi_config(app.config)
|
Python
| 0.000003
|
@@ -117,16 +117,28 @@
ort sys%0A
+import json%0A
import i
@@ -146,16 +146,16 @@
portlib%0A
-
from sh
@@ -337,16 +337,17 @@
)%0AVASSAL
+S
= commo
@@ -1309,18 +1309,18 @@
%0A con
-i
f
+i
g_d = %7B%7D
@@ -1442,18 +1442,18 @@
con
-i
f
+i
g_d%5Bk%5D =
@@ -1459,24 +1459,613 @@
= v%0A
+%0A
-print(VASSAL
+return config_d%0A %0A%0Adef _register(appname, **config_d):%0A vassals_dir = VASSALS%0A try:%0A os.makedirs(vassals_dir)%0A except OSError:%0A pass%0A uwsgi_cfg = %7B%7D%0A uwsgi_cfg.setdefault('env', %5B%5D).extend(%5B%0A# 'STUDIO_ENVIRON=%25s' %25 common_config%5B'ENVIRON'%5D,%0A 'STUDIO_APPNAME=%25s' %25 appname%5D)%0A uwsgi_cfg.update(config_d)%0A print('Registering app %25s:' %25 appname, end=' ')%0A with open(os.path.join(vassals_dir,%0A '%25s.json' %25 appname), 'wb') as fp:%0A json.dump(%7B'uwsgi': uwsgi_cfg%7D, fp)%0A print(colored('ok', 'green', attrs=%5B'bold'%5D) + '.'
)%0A%0A%0A
@@ -2240,16 +2240,27 @@
+ config_d =
_mk_uws
@@ -2281,8 +2281,51 @@
config)%0A
+ _register(appname, **config_d)%0A
|
3739c8c54d48d4b4cb82f6d5fa6197cf1ae374ee
|
add 'cv.' where needed to improve parsing
|
datasources/botgarden/gbif/parseAndInsertGBIFparts.py
|
datasources/botgarden/gbif/parseAndInsertGBIFparts.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""originally this was parseNamesGBIF.py
"Use the GBIF name parser API (http://tools.gbif.org/nameparser/api.do) to
disect [sic] name strings into their components. Input should be a simple list of
name strings separated by newline-characters. The names can be read either
from textfile(s) or from <STDIN>.
Output will be written as JSON to <STDOUT> by default.
Usage: parseNamesGBIF.py filename1 [filename2 [...]]
from:
https://www.snip2code.com/Snippet/162694/Parse-taxon-names-with-Python--using-the"
However, I have hacked it substantially for this UCBG application, and it now takes three command line arguments,
caches the results, etc. etc.
... jblowe@berkeley.edu 4/6/2015
"""
import fileinput
import pickle
import requests
import re
import sys
import json
import time
import os
# import csv
import codecs
# empty class for counts
class count:
pass
count.input = 0
count.output = 0
count.newnames = 0
count.source = 0
count.datasource = 0
count.cultivars = 0
count.cultivarsinoriginal = 0
parts = {}
nameparts = ["authorsParsed",
"authorship",
"bracketAuthorship",
"canonicalName",
"canonicalNameComplete",
"canonicalNameWithMarker",
"genusOrAbove",
"infraSpecificEpithet",
"rankMarker",
"scientificName",
"specificEpithet",
"type"]
# from http://stackoverflow.com/questions/1158076/implement-touch-using-python
def touch(fname, times=None):
with open(fname, 'a'):
os.utime(fname, times)
# look for cultivars, e.g. "Ceanothus 'Berkeley Skies'", make it Ceanothus cv. 'Berkeley Skies' for GBIF parsing"
cultivarpattern = re.compile("(.*)('.*')")
def check4cultivars(name):
if not 'cv.' in name:
name = cultivarpattern.sub(r'\1 cv. \2', name)
else:
count.cultivarsinoriginal += 1
if 'cv.' in name: count.cultivars += 1
return name
def main():
if len(sys.argv) < 4:
print 'usage: %s inputfileofnames.csv outputnameparts.csv picklefile column' % sys.argv[0]
sys.exit(1)
namecolumn = 0
try:
namecolumn = int(sys.argv[4])
except:
print "column is not an integer: %s " % sys.argv[4]
sys.exit(1)
try:
namepartsfile = sys.argv[2]
# namepartsfh = csv.writer(open(namepartsfile, "wb"), delimiter='\t')
namepartsfh = open(namepartsfile, "wb")
#namepartsfh.write('\t'.join(nameparts) + '\n')
except:
print "could not open output file"
sys.exit(1)
try:
picklefile = sys.argv[3]
picklefh = open(picklefile, "rb")
except:
print "could not open pickle file, will try to create"
picklefh = open(picklefile, "wb")
pickle.dump({}, picklefh)
picklefh.close()
picklefh = open(picklefile, "rb")
try:
parsednames = pickle.load(picklefh)
picklefh.close()
print "%s names in datasource." % len(parsednames.keys())
except:
raise
print "could not parse data in picklefile %s" % picklefile
sys.exit(1)
try:
inputfile = codecs.open(sys.argv[1], "rb", "utf-8")
except:
raise
print "could not open input file %s" % sys.argv[1]
sys.exit(1)
for line in inputfile:
count.input += 1
inputrow = line.rstrip('\n')
cells = inputrow.split('\t')
name = cells[namecolumn]
# handle cultivars without 'cv.'...
name = check4cultivars(name)
if name in parsednames:
count.source += 1
name2use = parsednames[name]
else:
time.sleep(1) # delays for 1 second
response = requests.get('http://api.gbif.org/v1/parser/name', params={'name': name})
response.encoding = 'utf-8'
name2use = response.json()[0]
count.newnames += 1
parsednames[name] = name2use
row = []
for part in name2use.keys():
parts[part] = parts.get(part, 0) + 1
for part in nameparts:
if part in name2use:
try:
row.append(name2use[part].encode('utf-8'))
except:
row.append(str(name2use[part]))
else:
row.append('')
if count.input == 1:
row = [h + '_s' for h in nameparts]
cells = [x.encode('utf-8') for x in cells]
cells = cells[:namecolumn] + row + cells[namecolumn:]
namepartsfh.write('\t'.join(cells) + '\n')
try:
pickle.dump(parsednames, open(picklefile, "wb"))
count.datasource = len(parsednames.keys())
except:
print "could not write names to picklefile %s" % picklefile
sys.exit(1)
print "%s names input." % count.input
print "%s parsenames output." % count.output
print "%s new names found." % count.newnames
print "%s names now in datasource." % count.datasource
print "%s cultivars indicated already (i.e 'cv.' in original)." % count.cultivarsinoriginal
print "%s total cultivars identified." % count.cultivars
print
print 'name parts:'
for p in parts.keys():
print "%s: %s" % (p, parts[p])
if __name__ == '__main__':
main()
|
Python
| 0.999993
|
@@ -1690,17 +1690,16 @@
hus cv.
-'
Berkeley
@@ -1704,17 +1704,16 @@
ey Skies
-'
for GBI
@@ -1761,14 +1761,14 @@
(.*)
-('.*')
+'(.*)'
%22)%0A%0A
|
6380aabe25e38d198b6c4e10d126d6fd97860c85
|
remove Simple.validate function
|
flask_pam/token/simple.py
|
flask_pam/token/simple.py
|
# -*- coding: utf-8 -*-
from hashlib import sha256
from token import Token
class Simple(Token):
"""Simple token implementation. It's not safe. Only for testing purposes!"""
def generate(self):
return sha256(self.username).hexdigest()
def validate(self, token):
return sha256(self.username).hexdigest() == token
|
Python
| 0.000126
|
@@ -249,94 +249,4 @@
t()%0A
-%0A def validate(self, token):%0A return sha256(self.username).hexdigest() == token%0A
|
5a9c7222360051ea9b0b58426285d72d3726daf6
|
Add game to userinfo
|
cogs/moderation.py
|
cogs/moderation.py
|
from discord.ext import commands
from cogs.utils import checks
import discord
import datetime
class Moderation:
def __init__(self, liara):
self.liara = liara
@commands.command(pass_context=True, no_pm=True)
async def userinfo(self, ctx, user: discord.Member=None):
"""Shows you a user's info.
Defaults to message author if user is not specified.
"""
if user is None:
user = ctx.message.author
# user-friendly status
if user.status == discord.Status.online:
status = '<:online:212789758110334977>'
elif user.status == discord.Status.idle:
status = '<:away:212789859071426561>'
elif user.status == discord.Status.do_not_disturb:
status = '<:do_not_disturb:236744731088912384>'
else:
status = '<:offline:212790005943369728>'
embed = discord.Embed()
embed.title = '{} {}'.format(status, user)
avatar_url = user.avatar_url.replace('webp', 'png')
embed.description = '**Display name**: {0.display_name}\n**ID**: {0.id}\n[Avatar]({1})'\
.format(user, avatar_url)
join_delta = datetime.datetime.utcnow() - user.joined_at
created_delta = datetime.datetime.utcnow() - user.created_at
embed.add_field(name='Join Dates', value='**This server**: {} ago ({})\n**Discord**: {} ago ({})'
.format(join_delta, user.joined_at, created_delta, user.created_at))
roles = [x.mention for x in sorted(user.roles, key=lambda role: role.position) if not x.is_everyone]
roles.reverse() # just so it shows up like it does in the official Discord UI
if roles: # only show roles if the member has any
if len(str(roles)) < 1025: # deal with limits
embed.add_field(name='Roles', value=', '.join(roles))
embed.set_thumbnail(url=avatar_url.replace('size=1024', 'size=256'))
try:
await self.liara.say(embed=embed)
except discord.HTTPException:
await self.liara.say('Unable to post userinfo, please allow the Embed Links permission')
@commands.command(pass_context=True, no_pm=True)
async def serverinfo(self, ctx):
"""Shows you the server's info."""
server = ctx.message.server
if server.large:
await self.liara.request_offline_members(server)
embed = discord.Embed()
embed.title = str(server)
if server.icon_url is not None:
embed.description = '**ID**: {0.id}\n[Icon URL]({0.icon_url})'.format(server)
embed.set_thumbnail(url=server.icon_url)
else:
embed.description = '**ID**: {0.id}'.format(server)
embed.add_field(name='Members', value=str(len(server.members)))
roles = [x.mention for x in server.role_hierarchy if not x.is_everyone]
if roles: # only show roles if the server has any
if len(str(roles)) < 1025: # deal with limits
embed.add_field(name='Roles', value=', '.join(roles))
channels = [x[1] for x in sorted([(x.position, x.mention) for x in server.channels if x.type ==
discord.ChannelType.text])]
if len(str(channels)) < 1025:
embed.add_field(name='Text channels', value=', '.join(channels))
if server.verification_level == discord.VerificationLevel.none:
level = 'Off'
elif server.verification_level == discord.VerificationLevel.low:
level = 'Low'
elif server.verification_level == discord.VerificationLevel.medium:
level = 'Medium'
else:
level = '(╯°□°)╯︵ ┻━┻'
embed.add_field(name='Other miscellaneous info', value='**AFK Channel**: {0.afk_channel}\n'
'**AFK Timeout**: {0.afk_timeout} seconds\n'
'**Owner**: {0.owner.mention}\n'
'**Verification level**: {1}'.format(server, level))
embed.timestamp = server.created_at
embed.set_footer(text='Created on')
try:
await self.liara.say(embed=embed)
except discord.HTTPException:
await self.liara.say('Unable to post serverinfo, please allow the Embed Links permission')
@commands.command(no_pm=True)
@checks.mod_or_permissions(ban_members=True)
async def ban(self, member: discord.Member):
"""Bans a member."""
try:
await self.liara.ban(member)
await self.liara.say('Done. Good riddance.')
except discord.Forbidden:
await self.liara.say('Sorry, I don\'t have permission to ban that person here.')
@commands.command(no_pm=True)
@checks.mod_or_permissions(kick_members=True)
async def softban(self, member: discord.Member, days_to_clean: int=1):
"""Kicks a member, removing all their messages in the process."""
if not 0 <= days_to_clean <= 7:
await self.liara.say('Invalid clean value. Use a number from 0 to 7 (days).')
return
try:
await self.liara.ban(member, days_to_clean)
await self.liara.unban(member)
await self.liara.say('Done. Good riddance.')
except discord.Forbidden:
await self.liara.say('Sorry, I don\'t have permission to ban that person here.')
@commands.command(no_pm=True)
@checks.mod_or_permissions(kick_members=True)
async def kick(self, member: discord.Member):
"""Kicks a member."""
try:
await self.liara.ban(member)
await self.liara.unban(member)
await self.liara.say('Done. Good riddance.')
except discord.Forbidden:
await self.liara.say('Sorry, I don\'t have permission to kick that person here.')
def setup(liara):
liara.add_cog(Moderation(liara))
|
Python
| 0.000001
|
@@ -1169,16 +1169,124 @@
r_url)%0A%0A
+ if user.game is not None:%0A embed.description += '%5Cn**Game**: %7B%7D'.format(user.game.name)%0A%0A
|
d37d99dedfb7cc2c86a9f01a75213fcc430af13d
|
fix inheritance for `SmAttr`s
|
hashstore/utils/file_types.py
|
hashstore/utils/file_types.py
|
import mimetypes
from typing import List
from os.path import join, dirname
from hashstore.utils import load_json_file
from hashstore.utils.smattr import SmAttr
class FileType(SmAttr):
mime:str
ext:List[str]
def read_file_types(json_file):
load_json = load_json_file(json_file)
return {n: FileType(v) for n,v in load_json.items()}
file_types = read_file_types(join(dirname(__file__), 'file_types.json'))
my_mime_dict = dict(
(ext,ft.mime)
for ft in file_types.values()
for ext in ft.ext)
my_name_dict = dict(
(ext,k)
for k, ft in file_types.items()
for ext in ft.ext )
WDF = 'WDF'
HSB = 'HSB'
def guess_name(filename):
'''
>>> guess_name('abc.txt')
'TXT'
>>> guess_name('abc.log')
'LOG'
>>> guess_name('abc.wdf')
'WDF'
>>> guess_name('abc.hsb')
'HSB'
>>> guess_name('.wdf')
'BINARY'
>>> guess_name('abc.html')
'HTML'
>>> guess_name('abc.exe')
'BINARY'
:param filename: file path
:return: name from `file_types`
'''
try:
extension = extract_extension(filename)
if extension:
return my_name_dict[extension]
except:
pass
return 'BINARY'
def guess_type(filename):
'''
guess MIME type
>>> guess_type('abc.txt')
'text/plain'
>>> guess_type('abc.log')
'text/plain'
>>> guess_type('abc.wdf')
'text/wdf'
>>> guess_type('abc.hsb')
'text/hsb'
>>> guess_type('.wdf')
>>> guess_type('abc.html')
'text/html'
>>> guess_type('abc.exe')
'application/x-msdownload'
:param filename: file path
:return: mime type
'''
try:
extension = extract_extension(filename)
if extension:
return my_mime_dict[extension]
except:
pass
return mimetypes.guess_type(filename)[0]
def extract_extension(filename):
'''
>>> extract_extension('.txt')
>>> extract_extension(None)
>>> extract_extension('abc.txt')
'txt'
>>> extract_extension('a.html')
'html'
:param filename: file path
:return: extension
'''
try:
dot_p = filename.rindex('.')
if dot_p > 0:
return filename[dot_p+1:]
except:
pass
return None
|
Python
| 0.000021
|
@@ -1579,14 +1579,17 @@
msdo
-wnload
+s-program
'%0A%0A
|
c78b6d46ef68d079a1c3b19427eee41902079dfb
|
Use control_data network ip for amqp_server_ip.
|
fabfile/utils/host.py
|
fabfile/utils/host.py
|
import paramiko
from netaddr import *
from fabfile.config import testbed
from fabric.context_managers import settings
from fabric.api import env, run
def hstr_to_ip(host_string):
return host_string.split('@')[1]
def get_control_host_string(mgmt_host):
ctrl_ip_info= getattr(testbed, 'control_data', None)
if ctrl_ip_info:
if mgmt_host in ctrl_ip_info.keys():
ip = str(IPNetwork(ctrl_ip_info[mgmt_host]['ip']).ip)
user= mgmt_host.split('@')[0]
host_details= user+'@'+ip
else :
host_details= mgmt_host
return host_details
def get_service_token():
service_token = getattr(testbed, 'service_token', '')
testbed.service_token = get_from_testbed_dict('openstack','service_token',
service_token)
return testbed.service_token
def get_service_token_opt():
service_token = get_service_token()
if service_token:
return '--service_token %s' % (service_token)
else:
return ''
def get_haproxy_opt():
testbed.haproxy = getattr(testbed, 'haproxy', False)
haproxy_opt = '--haproxy' if testbed.haproxy else ''
return haproxy_opt
def get_region_name():
region_name = getattr(testbed, 'region_name', 'RegionOne')
return get_from_testbed_dict('keystone', 'region_name', region_name)
def get_region_name_opt():
region_name = get_region_name()
return '--region_name %s' %(region_name)
def get_keystone_ip(ignore_vip=False):
openstack_host = get_control_host_string(testbed.env.roledefs['openstack'][0])
openstack_ip = hstr_to_ip(openstack_host)
keystone_ip1 = getattr(testbed, 'keystone_ip', None)
keystone_ip = get_from_testbed_dict('keystone', 'keystone_ip', keystone_ip1)
internal_vip = get_from_testbed_dict('ha', 'internal_vip', None)
if ignore_vip:
return keystone_ip or openstack_ip
else:
if internal_vip and keystone_ip:
print "Openstack HA setup, Keystone running in different node other than [%s]" % ','.join(testbed.env.roledefs['openstack'])
return keystone_ip
elif keystone_ip:
print "Keystone running in different node other than [%s]" % ','.join(testbed.env.roledefs['openstack'])
return keystone_ip
elif internal_vip:
print "Openstack HA setup, Keystone running in nodes [%s]" % ','.join(testbed.env.roledefs['openstack'])
return internal_vip
return openstack_ip
def get_keystone_ip_opt():
keystone_ip = get_keystone_ip()
return '--keystone_ip %s' % (keystone_ip)
def get_from_testbed_dict( dictionary, key,default_value):
try:
val = env[dictionary][key]
except KeyError:
val = default_value
return val
def get_keystone_auth_protocol():
return get_from_testbed_dict('keystone', 'auth_protocol','http')
def get_keystone_insecure_flag():
return get_from_testbed_dict('keystone', 'insecure', 'False')
def get_keystone_auth_port():
return get_from_testbed_dict('keystone', 'auth_port','35357')
def get_keystone_admin_token():
keystone_ip = get_keystone_ip(ignore_vip=True)
if keystone_ip == hstr_to_ip(get_control_host_string(testbed.env.roledefs['openstack'][0])):
# Use Management interface IP to ssh
keystone_ip = hstr_to_ip(testbed.env.roledefs['openstack'][0])
cmd = 'grep "^[ ]*admin_token" /etc/keystone/keystone.conf | tr -d \' \'| awk -F"=" {\'print $2\'}'
with settings(host_string='root@%s' %(keystone_ip)):
token = run(cmd)
return token
def get_keystone_admin_user():
ks_admin_user = getattr(testbed, 'keystone_admin_user','admin')
return get_from_testbed_dict('keystone', 'admin_user', ks_admin_user)
def get_keystone_admin_password():
os_admin_password = getattr(env,'openstack_admin_password', 'contrail123')
ks_admin_password = getattr(testbed,
'keystone_admin_password', os_admin_password)
return get_from_testbed_dict('keystone',
'admin_password', ks_admin_password)
def get_keystone_service_tenant_name():
return get_from_testbed_dict('keystone', 'service_tenant', 'service')
def get_keystone_admin_tenant_name():
admin_tenant_name = getattr(testbed, 'os_tenant_name', 'admin')
return get_from_testbed_dict('keystone', 'admin_tenant', 'admin')
def get_openstack_amqp_server():
internal_vip = get_from_testbed_dict('ha', 'internal_vip', None)
return get_from_testbed_dict('openstack','amqp_host',
(internal_vip or hstr_to_ip(env.roledefs['cfgm'][0])))
def get_quantum_service_protocol():
return get_from_testbed_dict('neutron', 'protocol', 'http')
def verify_sshd(host, user, password):
try:
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(host, username=user, password=password, timeout=5)
except Exception:
return False
client.close()
return True
|
Python
| 0.000006
|
@@ -4501,33 +4501,16 @@
_host',%0A
-
@@ -4537,16 +4537,40 @@
r_to_ip(
+get_control_host_string(
env.role
@@ -4587,16 +4587,17 @@
'%5D%5B0%5D)))
+)
%0A%0Adef ge
|
beccef4eccda11e32ba30022008de44450f69fa2
|
Check if block exists before DAG edits.
|
src/api.py
|
src/api.py
|
def execute_blocks(dag_fpathname, block_ids, all=False):
import dag
import dagexecutor
d = dag.DAG.from_file(dag_fpathname)
if all: block_ids = d.block_ids()
dex = dagexecutor.DAGExecutor(d, dag_fpathname)
dex.execute_blocks(block_ids)
def open_notebook(nbfile):
from utils import ConsoleExecutor
import subprocess
print('Running {}'.format(nbfile))
command = 'jupyter notebook ' + nbfile
subprocess.check_output(command.split())
def create_flow(dag_fpathname, block_ids, flow_name, run=False):
import dag
import flowmanager
import os
d = dag.DAG.from_file(dag_fpathname)
flow = flowmanager.FlowManager(d, os.path.dirname(dag_fpathname))
flow_fname = flow_name
if not flow_fname.endswith('.ipynb'): flow_fname += '.ipynb'
flow.flow_to_file(block_ids, flow_fname)
if run: open_notebook(flow_fname)
def update_from_flow(dag_fpathname, flow_fpathname):
import dag
import flowmanager
d = dag.DAG.from_file(dag_fpathname)
flow = flowmanager.FlowManager(d, dag_fpathname)
flow.apply_flow_changes(flow_fpathname)
d.to_file(dag_fpathname)
def new_project(project_name, run=False):
import dag
from utils import ConsoleExecutor
d = dag.DAG.empty(project_name)
dag_fpathname = project_name+'.dagpy'
d.to_file(dag_fpathname)
if run:
flowname = project_name + '_initialflow.ipynb'
create_flow(dag_fpathname, [], flowname, run)
def display_dag(dag_fpathname, flow = None):
import dag
import utils
d = dag.DAG.from_file(dag_fpathname)
to_color = []
if flow is not None:
to_color = utils.all_dependencies(d, flow)
utils.dag_draw(d, to_color=to_color)
def add_or_update_block(dag_fpathname, block_id, block):
import dag
import blockio
d = dag.DAG.from_file(dag_fpathname)
is_new = d.add_or_update_block(block_id, block)
d.to_file(dag_fpathname)
if is_new: blockio.save_block(block_id, [], d)
def add_block(dag_fpathname, block_id, block):
import dag
import blockio
d = dag.DAG.from_file(dag_fpathname)
d.add_block(block_id, block)
d.to_file(dag_fpathname)
blockio.save_block(block_id, [], d)
def update_block(dag_fpathname, block_id, block):
import dag
import blockio
block['block_id'] = block_id
d = dag.DAG.from_file(dag_fpathname)
d.update_block(block)
blockio.save_block(block_id, [], d)
def remove_block(dag_fpathname, block_id):
import dag
import blockio
d = dag.DAG.from_file(dag_fpathname)
d.remove_block(block_id)
d.to_file(dag_fpathname)
|
Python
| 0
|
@@ -1,12 +1,130 @@
+%0A%0Adef check_blocks(dag, block_ids):%0A if set(block_ids) - set(dag.block_ids()):%0A return False%0A return True
%0A%0Adef execut
@@ -288,16 +288,177 @@
k_ids()%0A
+ nonexistent = set(block_ids) - set(d.block_ids())%0A elif nonexistent:%0A print('Block(s) %7B%7D have not been found.'.format(nonexistent))%0A return%0A
dex
@@ -2665,24 +2665,135 @@
_fpathname)%0A
+ if block_id not in d.block_ids():%0A print('Block %7B%7D was not found.'.format(block_id))%0A return%0A
d.update
@@ -2963,24 +2963,135 @@
_fpathname)%0A
+ if block_id not in d.block_ids():%0A print('Block %7B%7D was not found.'.format(block_id))%0A return%0A
d.remove
|
eb57a07277f86fc90b7845dc48fb5cde1778c8d4
|
Test cut_by_number with words and normal chunk numbers
|
test/unit_test/test_cut_number.py
|
test/unit_test/test_cut_number.py
|
from lexos.processors.prepare.cutter import split_keep_whitespace, \
count_words, cut_by_number
class TestCutByNumbers:
def test_split_keep_whitespace(self):
assert split_keep_whitespace("Test string") == ["Test", " ", "string"]
assert split_keep_whitespace("Test") == ["Test"]
assert split_keep_whitespace(" ") == ["", " ", ""] # intended?
assert split_keep_whitespace("") == [""]
def test_count_words(self):
assert count_words(["word", "word", " ", "not", "word"]) == 4
assert count_words(['\n', '\t', ' ', '', '\u3000', "word"]) == 1
assert count_words([""]) == 0
|
Python
| 0.000003
|
@@ -632,8 +632,304 @@
%5D) == 0%0A
+%0A def test_cut_by_number_normal(self):%0A assert cut_by_number(%22Text%22, 1) == %5B%22Text%22%5D%0A assert cut_by_number(%22This text has five words%22, 5) == %5C%0A %5B%22This %22, %22text %22, %22has %22, %22five %22, %22words%22%5D%0A assert cut_by_number(%22Hanging space %22, 2) == %5B%22Hanging %22, %22space %22%5D%0A%0A
|
4c109fdca6aa0bcd3d2c883ae193a025838573d5
|
Assign explicit names to MNIST op. Change: 134345909
|
tensorflow_serving/example/mnist_export.py
|
tensorflow_serving/example/mnist_export.py
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
#!/usr/bin/env python2.7
"""Train and export a simple Softmax Regression TensorFlow model.
The model is from the TensorFlow "MNIST For ML Beginner" tutorial. This program
simply follows all its training instructions, and uses TensorFlow Serving
exporter to export the trained model with proper signatures that can be
loaded by standard tensorflow_model_server.
Usage: mnist_export.py [--training_iteration=x] [--export_version=y] export_dir
"""
import sys
# This is a placeholder for a Google-internal import.
import tensorflow as tf
from tensorflow.contrib.session_bundle import exporter
from tensorflow_serving.example import mnist_input_data
tf.app.flags.DEFINE_integer('training_iteration', 1000,
'number of training iterations.')
tf.app.flags.DEFINE_integer('export_version', 1, 'version number of the model.')
tf.app.flags.DEFINE_string('work_dir', '/tmp', 'Working directory.')
FLAGS = tf.app.flags.FLAGS
def main(_):
if len(sys.argv) < 2 or sys.argv[-1].startswith('-'):
print('Usage: mnist_export.py [--training_iteration=x] '
'[--export_version=y] export_dir')
sys.exit(-1)
if FLAGS.training_iteration <= 0:
print 'Please specify a positive value for training iteration.'
sys.exit(-1)
if FLAGS.export_version <= 0:
print 'Please specify a positive value for version number.'
sys.exit(-1)
# Train model
print 'Training model...'
mnist = mnist_input_data.read_data_sets(FLAGS.work_dir, one_hot=True)
sess = tf.InteractiveSession()
serialized_tf_example = tf.placeholder(tf.string, name='tf_example')
feature_configs = {
'x': tf.FixedLenFeature(shape=[784], dtype=tf.float32),
}
tf_example = tf.parse_example(serialized_tf_example, feature_configs)
x = tf_example['x']
y_ = tf.placeholder('float', shape=[None, 10])
w = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
sess.run(tf.initialize_all_variables())
y = tf.nn.softmax(tf.matmul(x, w) + b)
cross_entropy = -tf.reduce_sum(y_ * tf.log(y))
train_step = tf.train.GradientDescentOptimizer(0.01).minimize(cross_entropy)
values, indices = tf.nn.top_k(y, 10)
prediction_classes = tf.contrib.lookup.index_to_string(
tf.to_int64(indices),
mapping=tf.constant([str(i) for i in xrange(10)]))
for _ in range(FLAGS.training_iteration):
batch = mnist.train.next_batch(50)
train_step.run(feed_dict={x: batch[0], y_: batch[1]})
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, 'float'))
print 'training accuracy %g' % sess.run(accuracy,
feed_dict={x: mnist.test.images,
y_: mnist.test.labels})
print 'Done training!'
# Export model
# WARNING(break-tutorial-inline-code): The following code snippet is
# in-lined in tutorials, please update tutorial documents accordingly
# whenever code changes.
export_path = sys.argv[-1]
print 'Exporting trained model to', export_path
init_op = tf.group(tf.initialize_all_tables(), name='init_op')
saver = tf.train.Saver(sharded=True)
model_exporter = exporter.Exporter(saver)
model_exporter.init(
sess.graph.as_graph_def(),
init_op=init_op,
default_graph_signature=exporter.classification_signature(
input_tensor=serialized_tf_example,
classes_tensor=prediction_classes,
scores_tensor=values),
named_graph_signatures={
'inputs': exporter.generic_signature({'images': x}),
'outputs': exporter.generic_signature({'scores': y})})
model_exporter.export(export_path, tf.constant(FLAGS.export_version), sess)
print 'Done exporting!'
if __name__ == '__main__':
tf.app.run()
|
Python
| 0.999999
|
@@ -2434,16 +2434,28 @@
)%0A x =
+tf.identity(
tf_examp
@@ -2461,16 +2461,63 @@
ple%5B'x'%5D
+, name='x') # use tf.identity() to assign name
%0A y_ =
@@ -2712,16 +2712,26 @@
, w) + b
+, name='y'
)%0A cros
|
c05d0f2dd77678133af1bbf49915aeaf24efbedc
|
simplify line counting method
|
httplang/httplang.py
|
httplang/httplang.py
|
import parse
import sys
import utils
import repl
def main():
if len(sys.argv) < 2:
repl.enterREPL()
sys.exit()
inputFile = sys.argv[1]
run(inputFile)
def run(file_):
with open(file_, 'rb') as file:
#pass enumerated file so we can get line numbers
parse.preParse(enumerate(file))
return utils.baseVariables
if __name__ == "__main__":
main()
|
Python
| 0.03329
|
@@ -281,16 +281,30 @@
numbers
+ starting at 1
%0A
@@ -333,16 +333,18 @@
ate(file
+,1
))%0A r
|
c806a3702c95812dd57aca4106a782a854268993
|
Comment out configuration of real systems
|
server/systems/__init__.py
|
server/systems/__init__.py
|
import logging
from base import BaseEnvironment
from producers import CogenerationUnit, PeakLoadBoiler
from storages import HeatStorage, PowerMeter
from consumers import ThermalConsumer, ElectricalConsumer
from server.models import Device, Configuration, DeviceConfiguration
from django.core.exceptions import ObjectDoesNotExist
logger = logging.getLogger('simulation')
def get_initialized_scenario():
devices = list(Device.objects.all())
system_list = []
env = BaseEnvironment()
for device in devices:
for device_type, class_name in Device.DEVICE_TYPES:
if device.device_type == device_type:
system_class = globals()[class_name]
system_list.append(system_class(device.id, env))
configurations = DeviceConfiguration.objects.all()
for device in system_list:
# configure systems
for configuration in configurations:
if configuration.device_id == device.id:
value = parse_value(configuration)
if configuration.key in device.config:
device.config[configuration.key] = value
return system_list
def get_user_function(systems, code=None):
local_names = ['device_%s' % system.id for system in systems]
if code is None:
with open('server/user_code.py', "r") as code_file:
code = code_file.read()
lines = []
lines.append("def user_function(%s):" %
(",".join(local_names)))
for line in code.split("\n"):
lines.append("\t" + line)
lines.append("\tpass") # make sure function is not empty
source = "\n".join(lines)
namespace = {}
exec source in namespace # execute code in namespace
return namespace['user_function']
def perform_configuration(data):
configurations = []
device_configurations = []
for config in data:
if all(x in config for x in ['device', 'key', 'value', 'type', 'unit']):
if config['device'] == '0':
try:
existing_config = Configuration.objects.get(
key=config['key'])
existing_config.value = config['value']
existing_config.value_type = int(
config['type'])
existing_config.unit = config['unit']
existing_config.save()
except Configuration.DoesNotExist:
configurations.append(
Configuration(key=config['key'], value=config['value'], value_type=int(config['type']), unit=config['unit']))
else:
try:
device = Device.objects.get(id=config['device'])
for device_type, class_name in Device.DEVICE_TYPES:
if device.device_type == device_type:
system_class = globals()[class_name]
# Make sure that key is present in corresponding system
# class
if config['key'] in system_class(0, BaseEnvironment()).config:
try:
existing_config = DeviceConfiguration.objects.get(
device=device, key=config['key'])
existing_config.device = device
existing_config.value = config['value']
existing_config.value_type = int(
config['type'])
existing_config.unit = config['unit']
existing_config.save()
except DeviceConfiguration.DoesNotExist:
device_configurations.append(
DeviceConfiguration(device=device, key=config['key'], value=config['value'], value_type=int(config['type']), unit=config['unit']))
except ObjectDoesNotExist:
logger.error("Unknown device %s" % config['device'])
except ValueError:
logger.error(
"ValueError value_type '%s' not an int" % config['type'])
else:
logger.error("Incomplete config data: %s" % config)
if len(configurations) > 0:
Configuration.objects.bulk_create(configurations)
if len(device_configurations) > 0:
DeviceConfiguration.objects.bulk_create(device_configurations)
|
Python
| 0
|
@@ -9,16 +9,71 @@
ogging%0A%0A
+from django.core.exceptions import ObjectDoesNotExist%0A%0A
from bas
@@ -97,16 +97,16 @@
ronment%0A
-
from pro
@@ -327,62 +327,8 @@
tion
-%0Afrom django.core.exceptions import ObjectDoesNotExist
%0A%0Alo
@@ -780,24 +780,26 @@
v))%0A%0A
+ #
configurati
@@ -837,32 +837,34 @@
ts.all()%0A
+ #
for device in s
@@ -878,24 +878,26 @@
ist:%0A
+ #
# confi
@@ -908,32 +908,34 @@
systems%0A
+ #
for configu
@@ -959,32 +959,34 @@
rations:%0A
+ #
if conf
@@ -1018,32 +1018,34 @@
vice.id:%0A
+ #
val
@@ -1075,32 +1075,34 @@
uration)%0A
+ #
if
@@ -1136,32 +1136,34 @@
.config:%0A
+ #
|
cf84dfda73032a276b2d6f63f2c70f69e61f89fe
|
Check validity of the config to avoid silent errors.
|
keras_retinanet/utils/config.py
|
keras_retinanet/utils/config.py
|
"""
Copyright 2017-2018 Fizyr (https://fizyr.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import configparser
import numpy as np
import keras
from ..utils.anchors import AnchorParameters
def read_config_file(config_path):
config = configparser.ConfigParser()
config.read(config_path)
return config
def parse_anchor_parameters(config):
ratios = np.array(list(map(float, config['anchor_parameters']['ratios'].split(' '))), keras.backend.floatx())
scales = np.array(list(map(float, config['anchor_parameters']['scales'].split(' '))), keras.backend.floatx())
sizes = list(map(int, config['anchor_parameters']['sizes'].split(' ')))
strides = list(map(int, config['anchor_parameters']['strides'].split(' ')))
return AnchorParameters(sizes, strides, ratios, scales)
|
Python
| 0
|
@@ -573,16 +573,26 @@
e.%0A%22%22%22%0A%0A
+import os%0A
import c
@@ -786,16 +786,439 @@
ig_path)
+%0A %0A assert os.path.isfile(config_path), %22Could not find %7B%7D.%22.format(config_path)%0A%0A assert 'anchor_parameters' in config, %5C%0A %22Malformed config file. Verify that it contains the anchor_parameters section.%22%0A%0A assert %7B'sizes', 'strides', 'ratios', 'scales'%7D %3C= set(config%5B'anchor_parameters'%5D), %5C%0A %22Malformed config file. Verify that it contains the following keys: sizes, strides, ratios and scales.%22
%0A%0A re
|
91e916cb67867db9ce835be28b31904e6efda832
|
Add comment to new test
|
spacy/tests/regression/test_issue1727.py
|
spacy/tests/regression/test_issue1727.py
|
from __future__ import unicode_literals
import numpy
from ...pipeline import Tagger
from ...vectors import Vectors
from ...vocab import Vocab
from ..util import make_tempdir
def test_issue1727():
data = numpy.ones((3, 300), dtype='f')
keys = [u'I', u'am', u'Matt']
vectors = Vectors(data=data, keys=keys)
tagger = Tagger(Vocab())
tagger.add_label('PRP')
tagger.begin_training()
assert tagger.cfg.get('pretrained_dims', 0) == 0
tagger.vocab.vectors = vectors
with make_tempdir() as path:
tagger.to_disk(path)
tagger = Tagger(Vocab()).from_disk(path)
assert tagger.cfg.get('pretrained_dims', 0) == 0
|
Python
| 0
|
@@ -1,12 +1,117 @@
+'''Test that models with no pretrained vectors can be deserialized correctly%0Aafter vectors are added.'''%0A
from __futur
|
50d8bb92e397602d07223aff40359bcbde50baf7
|
Enable debug output on loadsched
|
cheddar/summit.py
|
cheddar/summit.py
|
# Copyright 2016 Thierry Carrez <thierry@openstack.org>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import json
import requests
from cheddar.models import Track
from cheddar.session import Session
from cheddar.tracklead import is_valid_track
from oauthlib.oauth2 import BackendApplicationClient, TokenExpiredError
from requests_oauthlib import OAuth2Session
class API:
def __init__(self, settings):
requests.packages.urllib3.disable_warnings()
basescope = settings.SUM_RESOURCESRV + '/summits/'
self.scopes = [ basescope + 'read',
basescope + 'write-event',
basescope + 'publish-event',
basescope + 'delete-event' ]
self.summit_type_id = settings.SUM_SUMMITTYPEID
self.endpoint = ( settings.SUM_RESOURCESRV +
'/api/v1/summits/' +
settings.SUM_SUMMITID + '/' )
self.clientid = settings.SUM_CLIENTID
self.tokenurl = settings.SUM_TOKENURL
self.secret = settings.SUM_SECRET
self.eventids = settings.SUM_EVENTIDS
# Using a custom ofset since the one stored in Summit looks incorrect
self.timezone_offset = settings.SUM_TZOFFSET
self._refresh_token()
def _refresh_token(self):
self.oauth = OAuth2Session(
client=BackendApplicationClient(client_id=self.clientid))
self.token = self.oauth.fetch_token(
token_url=self.tokenurl,
client_id=self.clientid,
client_secret=self.secret,
verify=False,
scope=self.scopes)
def _call_summit(self, method, call, payload=None, debug=False):
def dumpjson(data):
print json.dumps(data, sort_keys=True, indent=4,
separators=(',', ': '))
if debug:
print method, self.endpoint, call
if payload:
dumpjson(payload)
print "--->"
try:
r = self.oauth.request(method, self.endpoint + call,
verify=False, json=payload)
except TokenExpiredError:
self._refresh_token()
r = self.oauth.request(method, self.endpoint + call,
verify=False, json=payload)
try:
if debug:
print str(r.status_code)
if r.text:
dumpjson(r.json())
print "==================================="
return r.json()
except ValueError:
return {}
def _summit_to_session(self, sjson):
session = Session(sjson['id'])
def _format_datetime(timestamp):
time = datetime.datetime.utcfromtimestamp(
timestamp + self.timezone_offset)
return time.strftime('%Y-%m-%d %H:%M:%S')
session.start = _format_datetime(sjson['start_date'])
session.end = _format_datetime(sjson['end_date'])
session.room = sjson['location']['name']
session.style = (k for k,v in self.eventids.items()
if v==sjson['type_id']).next()
elements = sjson['title'].split(":")
if len(elements) < 1:
session.maintrack = ""
else:
session.maintrack = elements[0]
session.extratracks = ""
for tag in sjson['tags']:
tagname = tag['tag']
if tagname.lower() != session.maintrack.lower():
session.extratracks = session.extratracks + tagname + ", "
session.extratracks = session.extratracks.strip(" ,")
session.set_title(sjson['title'])
session.set_desc(sjson['description'])
return session
def list_sessions(self, trackid):
t = Track.objects.get(id=trackid)
ret = self._call_summit('get','events/published', payload={
'page': 1,
'per_page': 100,
'filter': [ 'summit_type_id==%d' % self.summit_type_id,
'tags=@%s' % t.name ],
'expand': 'location'
})
sessions = []
for sessionjson in sorted(ret['data'],
key=lambda x: x['start_date']):
if sessionjson['title'].startswith(t.name + ": "):
sessions.append(self._summit_to_session(sessionjson))
return sessions
def get_session(self, sessionid):
ret = self._call_summit('get','events/'+str(sessionid), payload={
'expand': 'location'
})
return self._summit_to_session(ret)
def modify_session(self, sessionkey, session):
alltracks = [ session.maintrack ]
description = session.description
for track in session.extratracks.split(","):
track = track.strip()
if is_valid_track(track):
alltracks.append(track)
name = session.get_title()
description = session.get_desc()
self._call_summit('put', 'events/%s' % sessionkey, payload={
'title': name,
'tags': alltracks,
'description': description})
def swap_sessions(self, sessionkey, session, session2key, session2):
self.modify_session(sessionkey, session2)
self.modify_session(session2key, session)
def create_session(self, index, day, starttime, endtime, title,
desc, track, room, style):
def _dt_to_timestamp(ds):
dt = datetime.datetime.strptime(ds, "%Y-%m-%d %H:%M")
return int((dt - datetime.datetime(1970, 1, 1)).total_seconds())
r = self._call_summit('post', 'events', payload={
"title": title,
"start_date": _dt_to_timestamp(day + " " + starttime),
"end_date": _dt_to_timestamp(day + " " + endtime),
"description": desc,
"location_id": room,
"summit_types_id":[ self.summit_type_id ],
"tags": [ track ],
"type_id": self.eventids[style] })
self._call_summit('put', 'events/%d/publish' % r['id'],
payload={ 'location_id': room })
|
Python
| 0
|
@@ -6243,16 +6243,28 @@
events',
+ debug=True,
payload
@@ -6687,16 +6687,28 @@
r%5B'id'%5D,
+ debug=True,
%0A
|
3826140004b0686f9f262756da20c5163fc5b80d
|
update icinga_simple format string handling
|
py3status/modules/icinga_simple.py
|
py3status/modules/icinga_simple.py
|
# -*- coding: utf-8 -*-
"""
Display Icinga2 service status information
Configuration Parameters:
- cache_timeout: how often the data should be updated
- base_url: the base url to the icinga-web2 services list
- disable_acknowledge: enable or disable counting of acknowledged service problems
- user: username to authenticate against the icinga-web2 interface
- password: password to authenticate against the icinga-web2 interface
- format: define a format string like "CRITICAL: %d"
- color: define a color for the output
- status: set the status you want to optain (0=OK,1=WARNING,2=CRITICAL,3=UNKNOWN)
@author Ben Oswald <ben.oswald@root-space.de>
@license MIT License <https://opensource.org/licenses/MIT>
@source https://github.com/nazco/i3status-modules
"""
from time import time
import requests
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 60
base_url = ''
disable_acknowledge = False
url_parameters = "?service_state={service_state}&format=json"
user = ''
password = ''
ca = True
format = ''
color = '#ffffff'
status = 0
def get_status(self, i3s_output_list, i3s_config):
response = {
'color': self.color,
'cached_until': time() + self.cache_timeout,
'full_text': self.format % self._query_service_count(self.status)
}
return response
def _query_service_count(self, state):
if self.disable_acknowledge:
self.url_parameters = self.url_parameters + "&service_handled=0"
result = requests.get(
self.base_url + self.url_parameters.format(service_state=state),
auth=(self.user, self.password), verify=self.ca)
return len(result.json())
if __name__ == "__main__":
pass
|
Python
| 0
|
@@ -858,24 +858,158 @@
%22%22%22%0A %22%22%22%0A
+ STATUS_NAMES = %7B%0A 0: 'OK',%0A 1: 'WARNING',%0A 2: 'CRITICAL',%0A 3: 'UNKNOWN'%0A %7D%0A
# availa
@@ -1037,16 +1037,16 @@
ameters%0A
-
cach
@@ -1232,24 +1232,46 @@
format = '
+%7Bstatus_name%7D: %7Bcount%7D
'%0A color
@@ -1504,11 +1504,103 @@
rmat
- %25
+.format(%0A status_name=self.STATUS_NAMES.get(self.status),%0A count=
self
@@ -1634,16 +1634,34 @@
status)%0A
+ )%0A
|
30e984a0517e6443835f113c3a479aa8302ef14f
|
Update profile url on amazon tests
|
social_core/tests/backends/test_amazon.py
|
social_core/tests/backends/test_amazon.py
|
import json
from .oauth import OAuth2Test
class AmazonOAuth2Test(OAuth2Test):
backend_path = 'social_core.backends.amazon.AmazonOAuth2'
user_data_url = 'https://www.amazon.com/ap/user/profile'
expected_username = 'FooBar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer'
})
user_data_body = json.dumps({
'user_id': 'amzn1.account.ABCDE1234',
'email': 'foo@bar.com',
'name': 'Foo Bar'
})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
class AmazonOAuth2BrokenServerResponseTest(OAuth2Test):
backend_path = 'social_core.backends.amazon.AmazonOAuth2'
user_data_url = 'https://www.amazon.com/ap/user/profile'
expected_username = 'FooBar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer'
})
user_data_body = json.dumps({
'Request-Id': '02GGTU7CWMNFTV3KH3J6',
'Profile': {
'Name': 'Foo Bar',
'CustomerId': 'amzn1.account.ABCDE1234',
'PrimaryEmail': 'foo@bar.com'
}
})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
|
Python
| 0
|
@@ -157,35 +157,35 @@
_url = 'https://
-www
+api
.amazon.com/ap/u
@@ -172,35 +172,32 @@
/api.amazon.com/
-ap/
user/profile'%0A
|
08834335285b292fe0337525eb2052a38c35d881
|
Test a random element.
|
OWR/oh/tests.py
|
OWR/oh/tests.py
|
from __future__ import absolute_import
import json
from unittest import TestCase
from django.test.client import RequestFactory
from django.core.urlresolvers import reverse
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import SuspiciousOperation, PermissionDenied
from OWR.users.factory import UserFactory
from .models import OpenHardwareLike
from .views import like_set
from .factory import OpenHardwareFactory
class OpenHardwareViewTests(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.user = UserFactory()
self.admin = UserFactory(is_superuser=True, is_staff=True)
self.ohs = OpenHardwareFactory.create_batch(10)
def test_like_add_not_logged(self):
count_start = OpenHardwareLike.objects.count()
exception = False
response = None
# this is not logged
request = self.factory.post(reverse('oh:like_set'), data=json.dumps({'id': 1}), content_type='text/javascript')
request.user = AnonymousUser()
try:
response = like_set(request)
except PermissionDenied as e:
exception = True
# TODO: test error parameters
#js = json.loads(response.body)
count_final = OpenHardwareLike.objects.count()
self.assertEqual(exception, True, 'An exception must raise if the user is not logged')
self.assertEqual(count_start, count_final)
def test_like_add_logged(self):
count_start = OpenHardwareLike.objects.count()
# this is logged
request = self.factory.post(reverse('oh:like_set'), data=json.dumps({'id': 1, 'action': 'set'}), content_type='text/javascript')
request.user = self.user
response = like_set(request)
count_final = OpenHardwareLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertEqual(count_start + 1, count_final)
def test_like_with_malformed_json(self):
count_start = OpenHardwareLike.objects.count()
exception = False
# this is logged
request = self.factory.post(reverse('oh:like_set'), data=json.dumps({'id': 1, 'action': 'foobar'}), content_type='text/javascript')
request.user = self.user
try:
response = like_set(request)
except SuspiciousOperation as e:
exception = True
# TODO: test error parameters
#js = json.loads(response.body)
count_final = OpenHardwareLike.objects.count()
self.assertEqual(exception, True)
self.assertEqual(count_start, count_final)
def test_remove_like(self):
deal_to_test = self.ohs[0]
OpenHardwareLike.objects.create(oh=deal_to_test, user=self.user)
count_start = OpenHardwareLike.objects.count()
# this is logged
request = self.factory.post(reverse('oh:like_set'), data=json.dumps({'id': deal_to_test.pk, 'action': 'unset'}), content_type='text/javascript')
request.user = self.user
response = like_set(request)
count_final = OpenHardwareLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertEqual(count_start - 1, count_final)
self.assertEqual(json.loads(response.content), {'action': 'unset', 'status': 'ok'})
|
Python
| 0
|
@@ -44,16 +44,31 @@
rt json%0A
+import random%0A%0A
from uni
@@ -497,16 +497,38 @@
tCase):%0A
+ BATCH_NUMBER = 10%0A
def
@@ -734,18 +734,33 @@
e_batch(
-10
+self.BATCH_NUMBER
)%0A%0A d
@@ -1528,32 +1528,85 @@
d_logged(self):%0A
+ idx = random.randrange(0, self.BATCH_NUMBER)%0A
count_st
@@ -1745,33 +1745,48 @@
on.dumps(%7B'id':
-1
+self.ohs%5Bidx%5D.pk
, 'action': 'set
|
a85019e7c5e117467d0ce3bf30b9a7589cd17958
|
Update create_test_cutout
|
src/tasks/python/create_test_cutout.py
|
src/tasks/python/create_test_cutout.py
|
from cloudvolume import CloudVolume
image_in = 'gs://neuroglancer/pinky100_v0/image_single_slices'
image_out = 'gs://neuroglancer/pinky100_v0/test_image'
image_mip = 0
roi_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/roicc'
roi_out = 'gs://neuroglancer/pinky100_v0/test_image/roicc'
roi_mip = 6
cfsplit_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/cfsplit'
cfsplit_out = 'gs://neuroglancer/pinky100_v0/test_image/cfsplit'
cfsplit_mip = 2
match_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/nccnet'
match_out = 'gs://neuroglancer/pinky100_v0/test_image/nccnet'
match_mip = 2
dst_in = 'gs://neuroglancer/pinky100_v0/aligned_test_v5'
dst_mip = 0
src_dst = [(cfsplit_in, cfsplit_out, cfsplit_mip),
(match_in, match_out, match_mip)]
z_slice = slice(199, 208)
src_mip = 0
def scale_slice(s, src_mip, dst_mip):
scale = 1/2**(dst_mip - src_mip)
return slice(int(s.start*scale), int(s.stop*scale))
def scale_slices(x_slice, y_slice, z_slice, src_mip, dst_mip):
return (scale_slice(x_slice, src_mip, dst_mip),
scale_slice(y_slice, src_mip, dst_mip),
scale_slice(z_slice, src_mip, dst_mip))
def get_cloudvolume(path, mip):
return CloudVolume(path, mip=mip)
def update_info_mips(cv, no_of_mips=6):
print("updating info mips")
for mip in range(1,no_of_mips+1):
factor = (2**mip, 2**mip, 1)
cv.add_scale(factor)
cv.commit_info()
def get_xy_slice(cv):
o = cv.voxel_offset
s = cv.shape
return slice(o[0], o[0]+s[0]), slice(o[1], o[1]+s[1])
for (src_path, dst_path, mip) in src_dst:
print(src_path)
print(dst_path)
print(mip)
cv = get_cloudvolume(dst_path, 0)
update_info_mips(cv, 6)
dst_cv = get_cloudvolume(dst_path, mip)
src_cv = get_cloudvolume(src_path, mip)
sl = get_xy_slice(dst_cv) + (z_slice,)
print(sl)
dst_cv[sl] = src_cv[sl]
|
Python
| 0.000001
|
@@ -454,16 +454,175 @@
mip = 2%0A
+cfmanual_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/cfmanual'%0Acfmanual_out = 'gs://neuroglancer/pinky100_v0/test_image/cfmanual'%0Acfmanual_mip = 5%0A
match_in
@@ -847,33 +847,35 @@
%5B(cf
-split
+manual
_in, cf
-split
+manual
_out, cf
spli
@@ -874,50 +874,14 @@
, cf
-split_mip),%0A%09%09%09(match_in, match_out, match
+manual
_mip
|
20f14f6c86607d0f1d084ee35c8f2645fde2dacb
|
Replace 1s and 2s with Xs and Os
|
capstone/util/tic2pdf.py
|
capstone/util/tic2pdf.py
|
from __future__ import division
import subprocess
import tempfile
BG_COLOR = '1.0 1.0 1.0'
COLORS = {
'1': '0.85 0.12 0.15',
'2': '0.00 0.00 1.00',
' ': '0.90 0.90 0.90'
}
X_OFFSET = 17.0
ROWS = 3
COLS = 3
CELL_SIZE = 20
OFFSET = 10
class Tic2PDF(object):
'''
Generates a PDF of the given Tic-Tac-Toe board.
Example:
board = [[' ', ' ', '1'],
[' ', ' ', '2'],
[' ', ' ', '2']]
filename = '/Users/drobles/Desktop/c4.pdf'
Tic2PDF(board, filename).create()
'''
def __init__(self, board, filename):
self.board = board
self.filename = filename
def create(self):
self._tf_ps = tempfile.NamedTemporaryFile()
self._draw_lines()
self._draw_pieces()
self._create_pdf()
def _draw_lines(self):
f = self._tf_ps
f.write('newpath\n')
# horizontal
f.write('10 %f moveto\n' % (CELL_SIZE + 10))
f.write('60 0 rlineto\n')
f.write('10 50 moveto\n')
f.write('60 0 rlineto\n')
# vertical
f.write('30 10 moveto\n')
f.write('0 60 rlineto\n')
f.write('50 10 moveto\n')
f.write('0 60 rlineto\n')
f.write('closepath\n')
# stroke
f.write('0 setgray\n')
f.write('1 setlinewidth\n')
f.write('stroke\n')
def _draw_pieces(self):
f = self._tf_ps
offset = (CELL_SIZE // 2) + OFFSET
for ri, row in enumerate(reversed(self.board)):
for ci, col in enumerate(row):
f.write('2 setlinewidth\n')
if col == '1':
# /
f.write('newpath\n')
f.write('%f %f moveto\n' % ((ci * CELL_SIZE) + 10 + 4, (ri * CELL_SIZE) + 10 + 4))
f.write('12 12 rlineto\n')
f.write('closepath\n')
f.write('%s setrgbcolor\n' % COLORS[col])
f.write('stroke\n')
# \
f.write('newpath\n')
f.write('%f %f moveto\n' % ((ci * CELL_SIZE) + 10 + 16, (ri * CELL_SIZE) + 10 + 4))
f.write('-12 12 rlineto\n')
f.write('closepath\n')
f.write('%s setrgbcolor\n' % COLORS[col])
f.write('stroke\n')
elif col == '2':
f.write('%s setrgbcolor\n' % COLORS[col])
arc = (ci * CELL_SIZE + offset, ri * CELL_SIZE + offset, CELL_SIZE * 0.38)
f.write('%d %d %d 0 360 arc stroke\n' % arc)
def _create_pdf(self):
self._tf_ps.write('showpage')
self._tf_ps.flush()
self.tf_updf = tempfile.NamedTemporaryFile()
subprocess.call(['ps2pdf', self._tf_ps.name, self.tf_updf.name])
self._tf_ps.close()
subprocess.call(["pdfcrop", self.tf_updf.name, self.filename])
self.tf_updf.close()
def tic2pdf(board, filename):
return Tic2PDF(board, filename).create()
|
Python
| 0.010829
|
@@ -24,17 +24,35 @@
division
+, unicode_literals
%0A
-
import s
@@ -120,17 +120,17 @@
%7B%0A '
-1
+X
': '0.85
@@ -151,27 +151,27 @@
'
-2
+O
': '0.
-00 0.00 1.00
+21 0.60 0.83
',%0A
@@ -185,20 +185,20 @@
'0.
-90 0.9
+83 0.6
0 0.
-90
+32
'%0A%7D%0A
@@ -387,17 +387,17 @@
, ' ', '
-1
+X
'%5D,%0A
@@ -421,17 +421,17 @@
, ' ', '
-2
+O
'%5D,%0A
@@ -455,17 +455,17 @@
, ' ', '
-2
+X
'%5D%5D%0A
@@ -1641,17 +1641,17 @@
col == '
-1
+X
':%0A
@@ -2396,17 +2396,17 @@
col == '
-2
+O
':%0A
|
6ae4f3a71a80d7fe5bb1abe6925a05c4fe811f3c
|
bump version
|
forms_builder/__init__.py
|
forms_builder/__init__.py
|
__version__ = "0.12.2"
|
Python
| 0
|
@@ -12,12 +12,12 @@
= %22
-0.12.2
+9.7.16
%22%0A
|
66fc51e41b0b2db15c636ca366d5508c58840e0e
|
Initialize config with empty json.
|
src/app.py
|
src/app.py
|
import datetime
import glob
import os
import argparse
import setproctitle
import requests
import shutil
import json
from datetime import datetime, timezone
from operator import itemgetter
import numpy
from PIL import Image
from PIL.ExifTags import TAGS
from flask import Flask, Response, render_template, request, jsonify
from flask_bower import Bower
from functional import seq
from jsonmerge import merge
from shutil import copyfile
from astral import Astral, Location
import rpi_backlight as bl
from cachetools import cached, TTLCache
# Argument parsing
parser = argparse.ArgumentParser(description='Starts the photo frame server.')
parser.add_argument('-d', '--directory', default='/srv/photos/')
args = parser.parse_args()
# Cache configuration
cache = TTLCache(maxsize=100, ttl=600)
# Process name for os
setproctitle.setproctitle('rpi-photo-frame')
# Flask configuration
app = Flask(__name__, static_url_path='/static')
Bower(app)
# Make working folders and files
rpi_folder = '/home/pi/rpi-photo-frame'
open('%s/src/config.json' % rpi_folder, 'a+').close()
# Read / write / merge config file
with open('%s/src/config.json.template' % rpi_folder, 'r+') as base, open('%s/src/config.json' % rpi_folder, 'r+') as head:
config_template = json.load(base)
current_config = json.load(head)
config = merge(config_template, current_config)
head.write(json.dumps(config, indent=4, sort_keys=True))
@app.route('/')
def index():
try:
return render_template('index.html')
except Exception:
return "Missing frontend dependencies. Run bower install..."
@app.route('/backlight', methods=['GET'])
def get_backlight():
return jsonify({"status": bl.get_power()})
@app.route('/backlight', methods=['POST'])
def set_backlight():
bl.set_power(request.json['switch'])
return jsonify({"status": bl.get_power()})
@app.route('/weather')
def weather():
return get_weather_from_darksky()
@cached(cache)
def get_weather_from_darksky():
return requests.get('https://api.darksky.net/forecast/9559aa7862d3ef0cf894d3593fde1b11/'
+ str(config['location']['lat'])
+ ',' + str(config['location']['lon'])
+ '?lang=de&units=si').text
@app.route('/photo')
def photo():
files = glob.glob(args.directory + '*.jp*g')
# Sort images by date
sort = seq(files) \
.map(lambda x: (x, extract_exif_date(x))) \
.sorted(key=itemgetter(1), reverse=False) \
.map(lambda x: x[0]) \
.zip_with_index() \
.map(lambda x: (x[0], x[1] ** 6 + 1))
# Sum off all images
s = sort \
.map(lambda x: x[1]) \
.sum()
# All photos
photos = sort.map(lambda x: x[0]).to_list()
# Generate probabilites based on date
prob = sort.map(lambda x: float(x[1]) / float(s)).to_list()
# Get weighted random image, newer images are more likely to show up
abs_path = numpy.random.choice(photos, p=prob)
folder_name, file_name = os.path.split(abs_path)
l = Location()
l.name = config['location']['name']
l.region = config['location']['region']
l.latitude = config['location']['lat']
l.longitude = config['location']['lon']
l.timezone = config['location']['timezone']
l.elevation = config['location']['elevation']
sun = l.sun()
# Full brightness and all colors as fallback
brightness = 255
red = 0
green = 0
blue = 0
# Set brightness and redness based on the sun
now = datetime.now(timezone.utc)
if(now >= sun['dawn'] and now < sun['sunrise']):
red = config['brightness']['dawn']['red']
brightness = config['brightness']['dawn']['brightness']
blue = -red
if(now >= sun['sunrise'] and now < sun['noon']):
red = config['brightness']['sunrise']['red']
brightness = config['brightness']['sunrise']['brightness']
blue = -red
if(now >= sun['noon'] and now < sun['sunset']):
red = config['brightness']['noon']['red']
brightness = config['brightness']['noon']['brightness']
blue = -red
if(now >= sun['sunset'] and now < sun['dusk']):
red = config['brightness']['sunset']['red']
brightness = config['brightness']['sunset']['brightness']
blue = -red
if(now >= sun['dusk']):
red = config['brightness']['dusk']['red']
brightness = config['brightness']['dusk']['brightness']
blue = -red
bl.set_brightness(brightness, smooth=True, duration=3)
# Get processed image from thumbor
url = 'http://localhost:8888/unsafe/trim/800x450/smart/filters:rgb(%s,%s,%s)/Downloads/%s' % (
red, green, blue, file_name)
response = requests.get(url, stream=True)
# Show the processed image
Response(response.raw, mimetype='image/jpeg')
def extract_exif_date(photo):
ret = {}
im = Image.open(photo)
exif_data = im._getexif()
try:
for tag, value in exif_data.items():
decoded = TAGS.get(tag, tag)
ret[decoded] = value
datetime_object = datetime.datetime.strptime(
ret.get('DateTime'), '%Y:%m:%d %H:%M:%S'
)
unix_time = datetime_object.timestamp()
except Exception:
# File modification date as fallback
unix_time = os.path.getmtime(photo)
return unix_time
def update_config(cfg):
with open('%s/src/config.json' % rpi_folder, 'w') as outfile:
json.dump(cfg, outfile)
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
|
Python
| 0
|
@@ -1014,61 +1014,181 @@
me'%0A
-open('%25s/src/config.json' %25 rpi_folder, 'a+').close()
+if not os.path.isfile('%25s/src/config.json' %25 rpi_folder):%0A with open('%25s/src/config.json' %25 rpi_folder, 'w') as f:%0A f.write('%7B%7D')%0A f.close()%0Aelse:%0A pass%0A
%0A%0A#
|
0f7f8d71dc1c8eda869c423a324064d4bc419879
|
Use ContextualZipFile and contextlib.closing for archiveutil
|
setuptools/archive_util.py
|
setuptools/archive_util.py
|
"""Utilities for extracting common archive formats"""
__all__ = [
"unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
"UnrecognizedFormat", "extraction_drivers", "unpack_directory",
]
import zipfile
import tarfile
import os
import shutil
import posixpath
from pkg_resources import ensure_directory
from distutils.errors import DistutilsError
class UnrecognizedFormat(DistutilsError):
"""Couldn't recognize the archive type"""
def default_filter(src,dst):
"""The default progress/filter callback; returns True for all files"""
return dst
def unpack_archive(filename, extract_dir, progress_filter=default_filter,
drivers=None):
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
`progress_filter` is a function taking two arguments: a source path
internal to the archive ('/'-separated), and a filesystem path where it
will be extracted. The callback must return the desired extract path
(which may be the same as the one passed in), or else ``None`` to skip
that file or directory. The callback can thus be used to report on the
progress of the extraction, as well as to filter the items extracted or
alter their extraction paths.
`drivers`, if supplied, must be a non-empty sequence of functions with the
same signature as this function (minus the `drivers` argument), that raise
``UnrecognizedFormat`` if they do not support extracting the designated
archive type. The `drivers` are tried in sequence until one is found that
does not raise an error, or until all are exhausted (in which case
``UnrecognizedFormat`` is raised). If you do not supply a sequence of
drivers, the module's ``extraction_drivers`` constant will be used, which
means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
order.
"""
for driver in drivers or extraction_drivers:
try:
driver(filename, extract_dir, progress_filter)
except UnrecognizedFormat:
continue
else:
return
else:
raise UnrecognizedFormat(
"Not a recognized archive type: %s" % filename
)
def unpack_directory(filename, extract_dir, progress_filter=default_filter):
""""Unpack" a directory, using the same interface as for archives
Raises ``UnrecognizedFormat`` if `filename` is not a directory
"""
if not os.path.isdir(filename):
raise UnrecognizedFormat("%s is not a directory" % (filename,))
paths = {filename:('',extract_dir)}
for base, dirs, files in os.walk(filename):
src,dst = paths[base]
for d in dirs:
paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d)
for f in files:
target = os.path.join(dst,f)
target = progress_filter(src+f, target)
if not target:
continue # skip non-files
ensure_directory(target)
f = os.path.join(base,f)
shutil.copyfile(f, target)
shutil.copystat(f, target)
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
"""Unpack zip `filename` to `extract_dir`
Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
of the `progress_filter` argument.
"""
if not zipfile.is_zipfile(filename):
raise UnrecognizedFormat("%s is not a zip file" % (filename,))
z = zipfile.ZipFile(filename)
try:
for info in z.infolist():
name = info.filename
# don't extract absolute paths or ones with .. in them
if name.startswith('/') or '..' in name.split('/'):
continue
target = os.path.join(extract_dir, *name.split('/'))
target = progress_filter(name, target)
if not target:
continue
if name.endswith('/'):
# directory
ensure_directory(target)
else:
# file
ensure_directory(target)
data = z.read(info.filename)
f = open(target,'wb')
try:
f.write(data)
finally:
f.close()
del data
unix_attributes = info.external_attr >> 16
if unix_attributes:
os.chmod(target, unix_attributes)
finally:
z.close()
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
of the `progress_filter` argument.
"""
try:
tarobj = tarfile.open(filename)
except tarfile.TarError:
raise UnrecognizedFormat(
"%s is not a compressed or uncompressed tar file" % (filename,)
)
try:
tarobj.chown = lambda *args: None # don't do any chowning!
for member in tarobj:
name = member.name
# don't extract absolute paths or ones with .. in them
if not name.startswith('/') and '..' not in name.split('/'):
prelim_dst = os.path.join(extract_dir, *name.split('/'))
# resolve any links and to extract the link targets as normal files
while member is not None and (member.islnk() or member.issym()):
linkpath = member.linkname
if member.issym():
linkpath = posixpath.join(posixpath.dirname(member.name), linkpath)
linkpath = posixpath.normpath(linkpath)
member = tarobj._getmember(linkpath)
if member is not None and (member.isfile() or member.isdir()):
final_dst = progress_filter(name, prelim_dst)
if final_dst:
if final_dst.endswith(os.sep):
final_dst = final_dst[:-1]
try:
tarobj._extract_member(member, final_dst) # XXX Ugh
except tarfile.ExtractError:
pass # chown/chmod/mkfifo/mknode/makedev failed
return True
finally:
tarobj.close()
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
|
Python
| 0
|
@@ -279,16 +279,34 @@
sixpath%0A
+import contextlib%0A
from pkg
@@ -339,16 +339,35 @@
irectory
+, ContextualZipFile
%0Afrom di
@@ -3571,20 +3571,23 @@
-z = zipfile.
+with Contextual
ZipF
@@ -3599,24 +3599,21 @@
ilename)
-%0A try
+ as z
:%0A
@@ -4548,39 +4548,8 @@
es)%0A
- finally:%0A z.close()%0A
%0A%0Ade
@@ -5081,27 +5081,55 @@
)%0A
-try
+with contextlib.closing(tarobj)
:%0A ta
@@ -6458,16 +6458,16 @@
failed%0A
+
@@ -6481,44 +6481,8 @@
True
-%0A finally:%0A tarobj.close()
%0A%0Aex
|
a58646ee72fc894a2f2b885b242cc283a0addd7c
|
remove args
|
src/app.py
|
src/app.py
|
import argparse
import os
from actions import server, client
# the main entry point for the application
# for simplicity, let's decide that the user decides at runtime to listen
# and the server decides to serve
# location from which files should be served
app_directory = '/home/chris/blaster'
def main():
# get the arguments
parser = argparse.ArgumentParser(description="Exchange files!")
parser.add_argument('action',
help="To be the server, type serve; to be the client, type listen",
)
# parser.add_argument('directory',
# help="The top level directory from which to serve files, e.g. '~/Downloads'",
# )
args = parser.parse_args()
app_runner(args.action) #, args.directory)
def app_runner(how):
if how == "serve":
if os.path.exists(app_directory) == False:
os.mkdir(app_directory)
server.main(app_directory)
elif how == "listen":
client.main()
else:
return u'Please specify either listen or serve'
if __name__ == '__main__':
main()
|
Python
| 0.999811
|
@@ -551,179 +551,8 @@
)%0A
- # parser.add_argument('directory',%0A # help=%22The top level directory from which to serve files, e.g. '~/Downloads'%22,%0A # )%0A
@@ -609,27 +609,8 @@
ion)
- #, args.directory)
%0A%0Ade
|
0f216b43f42ebabedda701fafefe271a223798cb
|
Fix mcscf example
|
examples/mcscf/41-mcscf_with_given_densityfit_ints.py
|
examples/mcscf/41-mcscf_with_given_densityfit_ints.py
|
#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
import tempfile
import h5py
from pyscf import gto, df, scf, mcscf
'''
Input Cholesky decomposed integrals for CASSCF
'''
mol = gto.M(atom='H 0 0 0; F 0 0 1', basis='ccpvdz')
#
# Integrals in memory. The size of the integral array is (M,N*(N+1)/2), where
# the last two AO indices are compressed due to the symmetry
#
int3c = df.incore.cholesky_eri(mol, auxbasis='ccpvdz-fit')
mf = scf.density_fit(scf.RHF(mol))
mf._cderi = int3c
mf.kernel()
# 3-cetner DF or Cholesky decomposed integrals need to be initialized once in
# mf._cderi. DFCASSCF method automatically use the approximate integrals
mc = mcscf.DFCASSCF(mf, 8, 8)
mc.kernel()
#
# Integrals on disk
#
ftmp = tempfile.NamedTemporaryFile()
df.outcore.cholesky_eri(mol, ftmp.name, auxbasis='ccpvdz-fit')
with h5py.File(ftmp.name, 'r') as file1:
mf = scf.density_fit(scf.RHF(mol))
# Note, here the integral object file1['eri_mo'] are not loaded in memory.
# It is still the HDF5 array object held on disk. The HDF5 array can be used
# the same way as the regular numpy ndarray stored in memory.
mf._cderi = file1['eri_mo']
mf.kernel()
# Note the mc object must be put inside the "with" statement block because it
# still needs access the HDF5 integral array on disk
mc = mcscf.DFCASSCF(mf, 8, 8)
mc.kernel()
|
Python
| 0.000001
|
@@ -481,16 +481,24 @@
ol))%0Amf.
+with_df.
_cderi =
@@ -596,24 +596,32 @@
nce in%0A# mf.
+with_df.
_cderi. DFC
@@ -1148,16 +1148,24 @@
%0A mf.
+with_df.
_cderi =
|
e610eb413915e0d124fe7b1df37f69e128e8fd41
|
fix decode issue for local testing in email user mgmt cmd
|
sources/management/commands/email_user.py
|
sources/management/commands/email_user.py
|
from django.core.management.base import BaseCommand, CommandError
from django.core.mail import send_mail
from django.contrib.auth.models import User
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_encode
from sesame import utils
from sources.models import Person
from sources.forms import FIELDS_PUBLIC
from sources.tokens import account_confirmation_token
from sourcelist.settings import PROJECT_NAME, EMAIL_SENDER, SITE_URL
def email_user(email_address, status):
person = Person.objects.get(email_address=email_address)
person_id = person.id
fields = FIELDS_PUBLIC ## abstracted to use same fields as the submission form
fields.append('status') ## add status field bc it's not included in the SubmitForm and it's necessary for sending email
person = Person.objects.filter(email_address=email_address).values(*fields).exclude()[0]
person_info = '<table>'
spaces = ' ' * 5
## loop thru and unpack values
for key, value in person.items():
if value:
new_key = key.title().replace('_', ' ')
person_info += '\
<tr> \
<td><b>{}</b>:</td> \
<td>{}</td> \
<td>{}</td>\
</tr>'.format(
new_key,
spaces,
value
)
person_info += '</table>'
admin_url = '{}/admin/sources/person/{}/change/'.format(SITE_URL, person_id)
## django-sesame bits for magic link
user = User.objects.get(email=email_address)
# login_token = utils.get_query_string(user) ## using their URL
login_token = utils.get_parameters(user) ## making your own URL
login_link = '{}?method=magic&url_auth_token={}'.format(admin_url, login_token['url_auth_token']) ## change from admin url to live url?
## confirmation url (for both user and admin?)
confirm_token = account_confirmation_token.make_token(user)
uid = urlsafe_base64_encode(force_bytes(user.pk))
confirm_url = '{}/confirmation/{}/{}'.format(
SITE_URL,
uid.decode(),
confirm_token
)
status = person['status']
status_type = status.split('_')[0]
message = ''
if status_type == 'added':
subject_title = 'Please confirm your profile'
# if status == 'added_by_self':
# subject_title += 'yourself'
# elif status == 'added_by_other':
# subject_title += 'someone else'
# elif status == 'added_by_admin':
# subject_title += 'an admin'
html_message = '\
<p>{project_name} is a searchable database of underrepresented experts in the areas of science, health and the environment. Anyone who considers themselves underrepresented and is willing to respond to journalists on deadline is encouraged to join (including but not limited to appearance, ethnicity, gender expression, gender identity, language, mental health experience, nationality, physical abilities, race, religion, sex, sexual orientation, etc.).</p> \
<p>This database aims to make it easy for journalists and others to include a wider range of backgrounds, experiences and perspectives in their work. By doing so, we can improve our coverage and better reflect the world we cover.</p> \
<p>To confirm you would like be included in the {project_name} public database and to confirm the following information is correct, please click here:</p> \
<p>{confirm_url}</p> \
<p>{person_info}</p> \
<p>If the information is incorrect, please edit your entry:</p> \
<p>{login_link}</p> \
<p>View the database:</p> \
<p>{site_url}</p>\
'.format(
project_name=PROJECT_NAME,
confirm_url=confirm_url,
person_info=person_info,
login_link=login_link,
site_url=SITE_URL
)
# elif status_type == 'approved':
# subject_title = 'You have been approved as a source!'
# html_message = 'Congratulations! Your entry has been approved and now be viewed or updated by you here: {}'.format(person_url)
subject = '[{}] {}'.format(PROJECT_NAME, subject_title)
sender = EMAIL_SENDER
recipients = [email_address]
# reply_email = sender
send_mail(
subject,
message,
sender,
recipients,
# reply_to=[reply_email],
html_message=html_message,
fail_silently=False,
)
class Command(BaseCommand):
help = 'Email new user when added.'
def add_arguments(self, parser):
## required
parser.add_argument('email',
help='Specify the user email.'
)
parser.add_argument('status',
help='Specify the status.'
)
## optional
# parser.add_argument('-t' '--test',
# action='store_true',
# # type=str,
# dest='test',
# default=False,
# help="Specific whether it's a test or not"
# )
def handle(self, *args, **options):
## unpack args
email_address = options['email']
status = options['status']
## call the function
email_user(email_address, status)
|
Python
| 0.000114
|
@@ -2033,16 +2033,159 @@
er.pk))%0A
+ # necessary for prod%0A try:%0A uid = uid.decode()%0A # this has only been observed locally%0A except AttributeError:%0A pass%0A
conf
@@ -2255,25 +2255,16 @@
uid
-.decode()
,%0A
|
6035b13efda5180a42d00f1e1f7f75046f988e46
|
Raise more meaningful exception when trying to access methods on the Courier handler.
|
launchpad/nodes/courier/node.py
|
launchpad/nodes/courier/node.py
|
# Copyright 2020 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A PyClassNode subclass that also exposes the instance as a Courier server."""
import datetime
from typing import Callable, Generic, TypeVar
from absl import logging
import courier
from launchpad import address as lp_address
from launchpad.launch import worker_manager
from launchpad.nodes import base
from launchpad.nodes.courier import courier_utils
from launchpad.nodes.python import node as python
WorkerType = TypeVar('WorkerType')
CourierClient = courier.Client
COURIER_PORT_NAME = 'courier'
class CourierHandle(base.Handle[CourierClient]):
"""Handle of a CourierNode."""
def __init__(self, address: lp_address.Address, **kwargs):
self._address = address
self._kwargs = kwargs
def set_client_kwargs(self, **kwargs):
self._kwargs = kwargs
def dereference(self) -> CourierClient:
return CourierClient(self._address.resolve(), **self._kwargs)
class CourierNode(python.PyClassNode[CourierHandle, WorkerType],
Generic[WorkerType]):
"""Exposes a Python instance as a Courier server.
This will initialize the object and expose all its public methods as Courier
RPC methods. Attributes and method names starting with underscore will not be
exposed. After that, run() will be called if it's provided.
When run() is provided, the server will terminate at the end of run().
Otherwise, it will serve indefinitely (until the job/experiment terminates).
Advanced usage: if the object has a set_courier_server() method, it will be
called with the courier server object passed in as the only argument. The
courier server will then be managed by the user (e.g., need to manually call
Start() of the courier server).
"""
def __init__(self,
constructor: Callable[..., WorkerType],
*args,
courier_kwargs=None,
**kwargs):
super().__init__(constructor, *args, **kwargs) # pytype:disable=wrong-arg-types
self._address = lp_address.Address(COURIER_PORT_NAME)
self.allocate_address(self._address)
if courier_kwargs is None:
courier_kwargs = dict()
self._courier_kwargs = courier_kwargs
# Set in `run()` method.
self._server = None # type: courier.Server
def configure(self, *args, **kwargs):
"""Sets the args and kwargs being passed to the constructor.
This is useful for achieving cyclic referencing. E.g.:
foo_node = CourierNode(_foo)
foo_handle = foo_node.create_handle()
bar_node = CourierNode(_bar)
bar_handle = bar_node.create_handle()
foo_node.configure(bar=bar_handle)
bar_node.configure(foo=foo_handle)
p.add_node(foo_node)
p.add_node(bar_node)
Args:
*args: non-keyword arguments to pass to the constructor.
**kwargs: keyword arguments to pass to the constructor.
"""
self._args = args
self._kwargs = kwargs
# Somehow pytype doesn't recognize CourierNode as the subclass.
self._collect_input_handles() # pytype:disable=wrong-arg-types
def create_handle(self) -> CourierHandle:
return self._track_handle(CourierHandle(self._address))
def run(self) -> None:
instance = self._construct_instance() # pytype:disable=wrong-arg-types
self._server = courier_utils.make_courier_server(
instance,
port=lp_address.get_port_from_address(self._address.resolve()),
**self._courier_kwargs)
if hasattr(instance, 'set_courier_server'):
# Transfer the ownership of the server to the instance, so that the user
# can decide when to start and stop the courier server.
instance.set_courier_server(self._server)
if hasattr(instance, 'run') and self._should_run:
instance.run()
else:
# Start the server after instantiation and serve forever
self._server.Start()
try:
if hasattr(instance, 'run') and self._should_run:
# If a run() method is provided, stop the server at the end of run().
instance.run()
else:
worker_manager.wait_for_stop()
finally:
self._server.Stop()
self._server.Join()
@property
def courier_address(self) -> lp_address.Address:
return self._address
|
Python
| 0.000039
|
@@ -1310,24 +1310,263 @@
s = kwargs%0A%0A
+ def __getattr__(self, method):%0A raise AttributeError(%0A f'%5C'CourierHandle%5C' object has no attribute %5C'%7Bmethod%7D%5C'. '%0A 'Most likely you need to dereference handle before use '%0A '(see launchpad.maybe_dereference).')%0A%0A
def set_cl
|
d7b260005a30cfd848eefe62f021cb4bf7a59087
|
Use tempfile for default upload directory
|
pyfarm/master/api/agent_updates.py
|
pyfarm/master/api/agent_updates.py
|
# No shebang line, this module is meant to be imported
#
# Copyright 2014 Ambient Entertainment Gmbh & Co. KG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Agent Updates
-------------
The API allows access to agent update packages, possibly through redirects
"""
import re
from os import makedirs
from os.path import join, exists
try:
from httplib import BAD_REQUEST, CREATED
except ImportError: # pragma: no cover
from http.client import BAD_REQUEST, CREATED
from werkzeug.utils import secure_filename
from flask.views import MethodView
from flask import request, g
from pyfarm.core.config import read_env
from pyfarm.core.logger import getLogger
from pyfarm.master.utility import jsonify
logger = getLogger("api.agents")
class AgentUpdatesAPI(MethodView):
def put(self, version):
"""
A ``PUT`` to this endpoint will upload a new version of pyfarm-agent to
be used for agent auto-updates. The update must be a zip file.
.. http:put:: /api/v1/agents/updates/<string:version> HTTP/1.1
**Request**
.. sourcecode:: http
PUT /api/v1/agents/updates/1.2.3 HTTP/1.1
Content-Type: application/zip
<binary data>
**Response**
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
:statuscode 200: The update was put in place
:statuscode 400: there was something wrong with the request (such as an
invalid version number specified or the mime type not
being application/zip)
"""
if request.mimetype != "application/zip":
return (jsonify(error="Data for agent updates must be "
"application/zip"), BAD_REQUEST)
if not re.match("\d+(\.\d+(\.\d+)?)?((-pre\d?)|(-dev\d?)|(-rc?\d?)|"
"(-alpha\d?)|(-beta\d?))?$", version):
return (jsonify(error="Version is not an acceptable version number"),
BAD_REQUEST)
updates_dir = read_env("PYFARM_AGENT_UPDATES_DIR", "/tmp/pyfarm-updates")
if not exists(updates_dir):
makedirs(updates_dir)
path = join(updates_dir, "pyfarm-agent-%s.zip" % version)
with open(path, "wb+") as file:
file.write(request.data)
return "", CREATED
|
Python
| 0
|
@@ -771,16 +771,32 @@
port re%0A
+import tempfile%0A
from os
@@ -2662,15 +2662,69 @@
IR%22,
- %22/tmp/
+%0A join(tempfile.gettempdir(), %22
pyfa
@@ -2735,16 +2735,17 @@
pdates%22)
+)
%0A
|
cd828f76511d439af3baa0d209d6e23a19776142
|
Check if minValue/maxValue is not none before setting default uiMin/uiMax
|
Python/kraken/core/objects/attributes/number_attribute.py
|
Python/kraken/core/objects/attributes/number_attribute.py
|
"""Kraken - objects.Attributes.NumberAttribute module.
Classes:
NumberAttribute - Base Attribute.
"""
from attribute import Attribute
class NumberAttribute(Attribute):
"""Number Attributee. Base class for number attribute types"""
def __init__(self, name, value=0, minValue=None, maxValue=None):
super(NumberAttribute, self).__init__(name, value)
self._min = None
self._max = None
self._uiMin = None
self._uiMax = None
if minValue is not None:
self.setMin(minValue)
if maxValue is not None:
self.setMax(maxValue)
self.setUIMin(minValue)
self.setUIMax(maxValue)
# ==================
# Min / Max Methods
# ==================
def getMin(self):
"""Gets the minimum value for this attribute.
Return:
Float / Integer - minimum value.
"""
return self._min
def setMin(self, minimum):
"""Sets the minimum value for the attribute.
Note: Only works on float or integer attributes.
Arguments:
minimum -- float / integer, minimum value the attribute can have.
Return:
True if successful.
"""
assert type(minimum) in (int, float), "'minimum' is not of type 'int' or 'float'."
self._min = minimum
return True
def getMax(self):
"""Gets the maximum value for this attribute.
Return:
Float / Integer - maximum value.
"""
return self._max
def setMax(self, maximum):
"""Sets the maximum value for the attribute.
Note: Only works on float or integer attributes.
Arguments:
maximum -- float / integer, maximum value the attribute can have.
Return:
True if successful.
"""
assert type(maximum) in (int, float), "'maximum' is not of type 'int' or 'float'."
self._max = maximum
return True
def getUIMin(self):
"""Gets the default minimum ui slider value for this attribute.
Return:
Float / Integer - default minimum ui slider value.
"""
return self._uiMin
def setUIMin(self, minimum):
"""Sets the default minimum ui slider value for the attribute.
Note: Only works on float or integer attributes.
Arguments:
minimum -- float / integer, default minimum ui slider value.
Return:
True if successful.
"""
attrType = self.__class__.__name__
if attrType is 'IntegerAttribute':
if type(minimum) is not int:
raise TypeError("UiMin value is not of type 'int'.")
if attrType is 'FloatAttribute':
if type(minimum) not in (int, float):
raise TypeError("UiMin value is not of type 'int' or 'float'.")
if self._uiMax is not None:
if minimum > self._uiMax:
raise ValueError('UiMin value is greater than attribute uiMax')
if minimum > self._max:
raise ValueError('UiMin value is greater than attribute maximum')
if minimum < self._min:
raise ValueError('UiMin value is less than attribute minimum')
self._uiMin = minimum
return True
def getUIMax(self):
"""Gets the default maximum ui slider value for this attribute.
Return:
Float / Integer - default maximum ui slider value.
"""
return self._uiMax
def setUIMax(self, maximum):
"""Sets the default maximum ui slider value for the attribute.
Note: Only works on float or integer attributes.
Arguments:
maximum -- float / integer, default maximum ui slider value.
Return:
True if successful.
"""
attrType = self.__class__.__name__
if attrType is 'IntegerAttribute':
if type(maximum) is not int:
raise TypeError("UiMax value is not of type 'int'.")
if attrType is 'FloatAttribute':
if type(maximum) not in (int, float):
raise TypeError("UiMax value is not of type 'int' or 'float'.")
if self._uiMin is not None:
if maximum < self._uiMin:
raise ValueError('UiMax value is less than attribute uiMin')
if maximum < self._min:
raise ValueError('UiMax value is less than attribute minimum')
if maximum > self._max:
raise ValueError('UiMax value is greater than attribute maximum')
self._uiMax = maximum
return True
|
Python
| 0.000001
|
@@ -615,32 +615,107 @@
-self.setUIMin(minValue)%0A
+if minValue is not None:%0A self.setUIMin(minValue)%0A%0A if maxValue is not None:%0A
|
9a4f1da48e72627aa0ff358a3dafe8bb5639482a
|
refresh access token on each verification
|
componentsdb/ui.py
|
componentsdb/ui.py
|
"""
Traditional Web UI.
"""
from functools import wraps
from flask import (
Blueprint, redirect, url_for, render_template, request, session
)
from werkzeug.exceptions import BadRequest, Unauthorized
from componentsdb.app import set_current_user_with_token
from componentsdb.auth import user_for_google_id_token
ui = Blueprint(
'ui', __name__, template_folder='ui/templates', static_folder='ui/static',
static_url_path='/ui_static',
)
AUTH_TOKEN_SESSION_KEY = 'componentsdb_auth'
def try_verify_session():
"""Like verify_session but return a boolean indicating success rather than
raising an exception."""
try:
verify_session()
except Unauthorized:
return False
return True
def verify_session():
"""Verify the authorisation in the current session. Raises Unauthorized if
the session is not authorised. Sets current_user if the session is
authorised.
"""
t = session.get(AUTH_TOKEN_SESSION_KEY)
if t is None:
raise Unauthorized('no user token provided')
set_current_user_with_token(t)
def auth_or_signin(f):
"""Decorator for a view which re-directs to the sign in page if there is no
current user. The sign in page is given a query string which requests the
current URL as the redirect."""
@wraps(f)
def view(*args, **kwargs):
if not try_verify_session():
return redirect(url_for('ui.signin', target=request.url))
return f(*args, **kwargs)
return view
@ui.route('/')
@auth_or_signin
def index():
return render_template('index.html')
@ui.route('/auth/signin')
def signin():
redir_url = request.args.get('target', url_for('ui.index'))
# Already signed in?
if try_verify_session():
return redirect(redir_url)
# Have we been given a token?
token = request.args.get('token', None)
if token is not None:
set_current_user_with_token(token)
return redirect(redir_url)
# Show sign in
return render_template('signin.html')
@ui.route('/auth/google')
def signin_with_google_token():
redir_url = request.args.get('target', url_for('ui.index'))
token = request.args.get('token', None)
if token is None:
raise BadRequest('no token given')
# Get auth token and add to session
user = user_for_google_id_token(request.args['token'])
session[AUTH_TOKEN_SESSION_KEY] = user.token
return redirect(redir_url)
@ui.route('/auth/signout')
def signout():
redir_url = request.args.get('target', url_for('ui.index'))
# Clear token from user session
del session[AUTH_TOKEN_SESSION_KEY]
return redirect(redir_url)
|
Python
| 0
|
@@ -138,16 +138,19 @@
session
+, g
%0A)%0Afrom
@@ -1071,16 +1071,185 @@
ken(t)%0A%0A
+ # Update the token in the session to make sure that the user always has a%0A # good long expiry windows%0A session%5BAUTH_TOKEN_SESSION_KEY%5D = g.current_user.token%0A%0A
def auth
|
b92ce9a245ad9582a833b4d0ce62bf1230e99a68
|
Allow sending emails via Server Script (#11162)
|
frappe/utils/safe_exec.py
|
frappe/utils/safe_exec.py
|
import os, json, inspect
import mimetypes
from html2text import html2text
from RestrictedPython import compile_restricted, safe_globals
import RestrictedPython.Guards
import frappe
import frappe.utils
import frappe.utils.data
from frappe.website.utils import (get_shade, get_toc, get_next_link)
from frappe.modules import scrub
from frappe.www.printview import get_visible_columns
import frappe.exceptions
import frappe.integrations.utils
class ServerScriptNotEnabled(frappe.PermissionError): pass
def safe_exec(script, _globals=None, _locals=None):
# script reports must be enabled via site_config.json
if not frappe.conf.server_script_enabled:
frappe.throw('Please Enable Server Scripts', ServerScriptNotEnabled)
# build globals
exec_globals = get_safe_globals()
if _globals:
exec_globals.update(_globals)
# execute script compiled by RestrictedPython
exec(compile_restricted(script), exec_globals, _locals) # pylint: disable=exec-used
def get_safe_globals():
datautils = frappe._dict()
if frappe.db:
date_format = frappe.db.get_default("date_format") or "yyyy-mm-dd"
time_format = frappe.db.get_default("time_format") or "HH:mm:ss"
else:
date_format = "yyyy-mm-dd"
time_format = "HH:mm:ss"
add_module_properties(frappe.utils.data, datautils, lambda obj: hasattr(obj, "__call__"))
if "_" in getattr(frappe.local, 'form_dict', {}):
del frappe.local.form_dict["_"]
user = getattr(frappe.local, "session", None) and frappe.local.session.user or "Guest"
out = frappe._dict(
# make available limited methods of frappe
json=json,
dict=dict,
_dict=frappe._dict,
frappe=frappe._dict(
flags=frappe.flags,
format=frappe.format_value,
format_value=frappe.format_value,
date_format=date_format,
time_format=time_format,
format_date=frappe.utils.data.global_date_format,
form_dict=getattr(frappe.local, 'form_dict', {}),
get_meta=frappe.get_meta,
get_doc=frappe.get_doc,
get_cached_doc=frappe.get_cached_doc,
get_list=frappe.get_list,
get_all=frappe.get_all,
get_system_settings=frappe.get_system_settings,
utils=datautils,
get_url=frappe.utils.get_url,
render_template=frappe.render_template,
msgprint=frappe.msgprint,
throw=frappe.throw,
user=user,
get_fullname=frappe.utils.get_fullname,
get_gravatar=frappe.utils.get_gravatar_url,
full_name=frappe.local.session.data.full_name if getattr(frappe.local, "session", None) else "Guest",
request=getattr(frappe.local, 'request', {}),
session=frappe._dict(
user=user,
csrf_token=frappe.local.session.data.csrf_token if getattr(frappe.local, "session", None) else ''
),
make_get_request = frappe.integrations.utils.make_get_request,
make_post_request = frappe.integrations.utils.make_post_request,
socketio_port=frappe.conf.socketio_port,
get_hooks=frappe.get_hooks,
sanitize_html=frappe.utils.sanitize_html
),
style=frappe._dict(
border_color='#d1d8dd'
),
get_toc=get_toc,
get_next_link=get_next_link,
_=frappe._,
get_shade=get_shade,
scrub=scrub,
guess_mimetype=mimetypes.guess_type,
html2text=html2text,
dev_server=1 if os.environ.get('DEV_SERVER', False) else 0
)
add_module_properties(frappe.exceptions, out.frappe, lambda obj: inspect.isclass(obj) and issubclass(obj, Exception))
if not frappe.flags.in_setup_help:
out.get_visible_columns = get_visible_columns
out.frappe.date_format = date_format
out.frappe.time_format = time_format
out.frappe.db = frappe._dict(
get_list = frappe.get_list,
get_all = frappe.get_all,
get_value = frappe.db.get_value,
set_value = frappe.db.set_value,
get_single_value = frappe.db.get_single_value,
get_default = frappe.db.get_default,
escape = frappe.db.escape,
)
if frappe.response:
out.frappe.response = frappe.response
out.update(safe_globals)
# default writer allows write access
out._write_ = _write
out._getitem_ = _getitem
# allow iterators and list comprehension
out._getiter_ = iter
out._iter_unpack_sequence_ = RestrictedPython.Guards.guarded_iter_unpack_sequence
out.sorted = sorted
return out
def _getitem(obj, key):
# guard function for RestrictedPython
# allow any key to be accessed as long as it does not start with underscore
if isinstance(key, str) and key.startswith('_'):
raise SyntaxError('Key starts with _')
return obj[key]
def _write(obj):
# guard function for RestrictedPython
# allow writing to any object
return obj
def add_module_properties(module, data, filter_method):
for key, obj in module.__dict__.items():
if key.startswith("_"):
# ignore
continue
if filter_method(obj):
# only allow functions
data[key] = obj
|
Python
| 0
|
@@ -2225,16 +2225,47 @@
e.throw,
+%0A%09%09%09sendmail = frappe.sendmail,
%0A%0A%09%09%09use
|
becef09e0680786343c581d984e7de5dcb961d16
|
Fix for handle failed html parse
|
frappe/utils/xlsxutils.py
|
frappe/utils/xlsxutils.py
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import openpyxl
import re
from openpyxl.styles import Font
from openpyxl import load_workbook
from six import BytesIO, string_types
ILLEGAL_CHARACTERS_RE = re.compile(r'[\000-\010]|[\013-\014]|[\016-\037]')
# return xlsx file object
def make_xlsx(data, sheet_name, wb=None):
if wb is None:
wb = openpyxl.Workbook(write_only=True)
ws = wb.create_sheet(sheet_name, 0)
row1 = ws.row_dimensions[1]
row1.font = Font(name='Calibri',bold=True)
for row in data:
clean_row = []
for item in row:
if isinstance(item, string_types) and (sheet_name not in ['Data Import Template', 'Data Export']):
value = handle_html(item)
else:
value = item
if isinstance(item, string_types) and next(ILLEGAL_CHARACTERS_RE.finditer(value), None):
# Remove illegal characters from the string
value = re.sub(ILLEGAL_CHARACTERS_RE, '', value)
clean_row.append(value)
ws.append(clean_row)
xlsx_file = BytesIO()
wb.save(xlsx_file)
return xlsx_file
def handle_html(data):
# return if no html tags found
data = frappe.as_unicode(data)
if '<' not in data:
return data
if '>' not in data:
return data
from html2text import HTML2Text
h = HTML2Text()
h.unicode_snob = True
h = h.unescape(data or "")
obj = HTML2Text()
obj.ignore_links = True
obj.body_width = 0
try:
value = obj.handle(h)
except Exception:
# unable to parse html, send it raw
return value
value = ", ".join(value.split(' \n'))
value = " ".join(value.split('\n'))
value = ", ".join(value.split('# '))
return value
def read_xlsx_file_from_attached_file(file_id=None, fcontent=None, filepath=None):
if file_id:
from frappe.utils.file_manager import get_file_path
filename = get_file_path(file_id)
elif fcontent:
from io import BytesIO
filename = BytesIO(fcontent)
elif filepath:
filename = filepath
else:
return
rows = []
wb1 = load_workbook(filename=filename, read_only=True, data_only=True)
ws1 = wb1.active
for row in ws1.iter_rows():
tmp_list = []
for cell in row:
tmp_list.append(cell.value)
rows.append(tmp_list)
return rows
|
Python
| 0.000002
|
@@ -1542,21 +1542,20 @@
%09return
-value
+data
%0A%0A%09value
|
49d8bd1dbec1fa5927a1e487e7f0799de2e2ee11
|
Remove unused import
|
tests/unit/states/archive_test.py
|
tests/unit/states/archive_test.py
|
# -*- coding: utf-8 -*-
'''
unit tests for the archive state
'''
# Import Python Libs
import os
import tempfile
try:
import pwd
HAS_PWD = True
except ImportError:
HAS_PWD = False
# Import Salt Libs
from salt.states import archive
# Import Salt Testing Libs
from salttesting import skipIf, TestCase
from salttesting.helpers import ensure_in_syspath
from salttesting.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
ensure_in_syspath('../../')
archive.__opts__ = {}
archive.__salt__ = {}
archive.__env__ = 'test'
@skipIf(NO_MOCK, NO_MOCK_REASON)
class ArchiveTest(TestCase):
'''
Validate the archive state
'''
def test_extracted_tar(self):
'''
archive.extracted tar options
'''
source = 'file.tar.gz'
tmp_dir = os.path.join(tempfile.gettempdir(), 'test_archive', '')
test_tar_opts = [
'--no-anchored foo',
'v -p --opt',
'-v -p',
'--long-opt -z',
'z -v -weird-long-opt arg',
]
ret_tar_opts = [
['tar', 'x', '--no-anchored', 'foo', '-f'],
['tar', 'xv', '-p', '--opt', '-f'],
['tar', 'x', '-v', '-p', '-f'],
['tar', 'x', '--long-opt', '-z', '-f'],
['tar', 'xz', '-v', '-weird-long-opt', 'arg', '-f'],
]
mock_true = MagicMock(return_value=True)
mock_false = MagicMock(return_value=False)
ret = {'stdout': ['saltines', 'cheese'], 'stderr': 'biscuits', 'retcode': '31337', 'pid': '1337'}
mock_run = MagicMock(return_value=ret)
with patch('os.path.exists', mock_true):
with patch.dict(archive.__opts__, {'test': False,
'cachedir': tmp_dir}):
with patch.dict(archive.__salt__, {'file.directory_exists': mock_false,
'file.file_exists': mock_false,
'file.makedirs': mock_true,
'cmd.run_all': mock_run}):
filename = os.path.join(
tmp_dir,
'files/test/_tmp_test_archive_.tar'
)
for test_opts, ret_opts in zip(test_tar_opts, ret_tar_opts):
ret = archive.extracted(tmp_dir,
source,
'tar',
tar_options=test_opts)
ret_opts.append(filename)
mock_run.assert_called_with(ret_opts, cwd=tmp_dir, python_shell=False)
if __name__ == '__main__':
from integration import run_tests
run_tests(ArchiveTest)
|
Python
| 0.000001
|
@@ -109,87 +109,8 @@
file
-%0Atry:%0A import pwd%0A HAS_PWD = True%0Aexcept ImportError:%0A HAS_PWD = False
%0A%0A#
|
9c096a749610115569eec9c5f3946e9e55b95514
|
Add image in metablock for blog (#5292)
|
frappe/website/context.py
|
frappe/website/context.py
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, os, json
from frappe.website.doctype.website_settings.website_settings import get_website_settings
from frappe.website.router import get_page_context
from frappe.model.document import Document
def get_context(path, args=None):
if args and args.source:
context = args
else:
context = get_page_context(path)
if args:
context.update(args)
if hasattr(frappe.local, 'request'):
# for <body data-path=""> (remove leading slash)
# path could be overriden in render.resolve_from_map
context["path"] = frappe.local.request.path.strip('/ ')
else:
context["path"] = path
context.route = context.path
context = build_context(context)
# set using frappe.respond_as_web_page
if hasattr(frappe.local, 'response') and frappe.local.response.get('context'):
context.update(frappe.local.response.context)
return context
def update_controller_context(context, controller):
module = frappe.get_module(controller)
if module:
# get config fields
for prop in ("base_template_path", "template", "no_cache", "no_sitemap",
"condition_field"):
if hasattr(module, prop):
context[prop] = getattr(module, prop)
if hasattr(module, "get_context"):
try:
ret = module.get_context(context)
if ret:
context.update(ret)
except frappe.Redirect:
raise
except (frappe.PermissionError, frappe.DoesNotExistError):
raise
except:
if not frappe.flags.in_migrate:
frappe.errprint(frappe.utils.get_traceback())
if hasattr(module, "get_children"):
context.children = module.get_children(context)
def build_context(context):
"""get_context method of doc or module is supposed to render
content templates and push it into context"""
context = frappe._dict(context)
if not "url_prefix" in context:
context.url_prefix = ""
if context.url_prefix and context.url_prefix[-1]!='/':
context.url_prefix += '/'
# for backward compatibility
context.docs_base_url = '/docs'
context.update(get_website_settings())
context.update(frappe.local.conf.get("website_context") or {})
# provide doc
if context.doc:
context.update(context.doc.as_dict())
context.update(context.doc.get_website_properties())
if not context.template:
context.template = context.doc.meta.get_web_template()
if hasattr(context.doc, "get_context"):
ret = context.doc.get_context(context)
if ret:
context.update(ret)
for prop in ("no_cache", "no_sitemap"):
if not prop in context:
context[prop] = getattr(context.doc, prop, False)
elif context.controller:
# controller based context
update_controller_context(context, context.controller)
# controller context extensions
context_controller_hooks = frappe.get_hooks("extend_website_page_controller_context") or {}
for controller, extension in context_controller_hooks.items():
if isinstance(extension, list):
for ext in extension:
if controller == context.controller:
update_controller_context(context, ext)
else:
update_controller_context(context, extension)
add_metatags(context)
add_sidebar_and_breadcrumbs(context)
# determine templates to be used
if not context.base_template_path:
app_base = frappe.get_hooks("base_template")
context.base_template_path = app_base[0] if app_base else "templates/base.html"
if context.title_prefix and context.title and not context.title.startswith(context.title_prefix):
context.title = '{0} - {1}'.format(context.title_prefix, context.title)
return context
def add_sidebar_and_breadcrumbs(context):
'''Add sidebar and breadcrumbs to context'''
from frappe.website.router import get_page_info_from_template
if context.show_sidebar:
context.no_cache = 1
add_sidebar_data(context)
else:
if context.basepath:
sidebar_json_path = os.path.join(context.basepath, '_sidebar.json')
if os.path.exists(sidebar_json_path):
with open(sidebar_json_path, 'r') as sidebarfile:
context.sidebar_items = json.loads(sidebarfile.read())
context.show_sidebar = 1
if context.add_breadcrumbs and not context.parents:
if context.basepath:
parent_path = os.path.dirname(context.path).rstrip('/')
page_info = get_page_info_from_template(parent_path)
if page_info:
context.parents = [dict(route=parent_path, title=page_info.title)]
def add_sidebar_data(context):
from frappe.utils.user import get_fullname_and_avatar
import frappe.www.list
if context.show_sidebar and context.website_sidebar:
context.sidebar_items = frappe.get_all('Website Sidebar Item',
filters=dict(parent=context.website_sidebar), fields=['title', 'route', '`group`'],
order_by='idx asc')
if not context.sidebar_items:
sidebar_items = frappe.cache().hget('portal_menu_items', frappe.session.user)
if sidebar_items == None:
sidebar_items = []
roles = frappe.get_roles()
portal_settings = frappe.get_doc('Portal Settings', 'Portal Settings')
def add_items(sidebar_items, items):
for d in items:
if d.get('enabled') and ((not d.get('role')) or d.get('role') in roles):
sidebar_items.append(d.as_dict() if isinstance(d, Document) else d)
if not portal_settings.hide_standard_menu:
add_items(sidebar_items, portal_settings.get('menu'))
if portal_settings.custom_menu:
add_items(sidebar_items, portal_settings.get('custom_menu'))
items_via_hooks = frappe.get_hooks('portal_menu_items')
if items_via_hooks:
for i in items_via_hooks: i['enabled'] = 1
add_items(sidebar_items, items_via_hooks)
frappe.cache().hset('portal_menu_items', frappe.session.user, sidebar_items)
context.sidebar_items = sidebar_items
info = get_fullname_and_avatar(frappe.session.user)
context["fullname"] = info.fullname
context["user_image"] = info.avatar
context["user"] = info.name
def add_metatags(context):
tags = context.get("metatags")
if tags:
if not "twitter:card" in tags:
tags["twitter:card"] = "summary_large_image"
if not "og:type" in tags:
tags["og:type"] = "article"
if tags.get("name"):
tags["og:title"] = tags["twitter:title"] = tags["name"]
if tags.get("description"):
tags["og:description"] = tags["twitter:description"] = tags["description"]
if tags.get("image"):
tags["og:image"] = tags["twitter:image:src"] = tags["image"] = frappe.utils.get_url("image")
|
Python
| 0
|
@@ -6406,22 +6406,23 @@
url(
-%22image%22)%0A
+tags.get(%22image%22))
%0A
|
f5a1e7f8e350a5f1b29c0e60caf178208946a2b1
|
Add more samples.
|
learning-python/ch02/Looping.py
|
learning-python/ch02/Looping.py
|
for i in [1, 2, 3, 4]:
print(i)
for i in range(5):
print(i)
colors = ["red", "green", "blue"]
for i in range(len(colors)):
print(i, colors[i])
for color in colors:
print(color)
for idx, color in enumerate(colors):
print(idx, color)
people = ["Scott", "John", "Mike"]
ages = [50, 30, 25]
for person, age in zip(people, ages):
print(person, age)
for data in zip(people, ages):
print(data)
|
Python
| 0
|
@@ -414,8 +414,845 @@
t(data)%0A
+%0Aarr = %5B1, 2, 3, 4, 5, 6%5D%0Aprint(arr)%0Aarr = arr%5B::-1%5D%0Aprint(arr)%0A%0Aempid = 2%0Aclass NoResourceFoundException(Exception):%0A pass%0Aemps = %7B1: %22Scott%22, 2: %22John%22, 3: %22Tiger%22%7D%0Afor emp in emps.items():%0A if emp.__contains__(empid):%0A print(%22Found%22)%0A break%0Aelse:%0A raise NoResourceFoundException(%22Not found%22)%0A%0Afrom itertools import count%0Afor n in count(5, 3):%0A if n %3E 20:%0A break%0A print(n, end=%22, %22)%0Aprint()%0Afrom itertools import compress%0Aret = compress(%22abcdefg%22, (1, 0, 1, 1))%0Afor x in ret:%0A print(x)%0A%0Adata = range(10)%0Aeven = %5B1, 0%5D * 10%0Aodd = %5B0, 1%5D * 10%0AevenNumbers = compress(data, even)%0AoddNumbers = compress(data, odd)%0Aprint(list(data))%0Aprint(list(evenNumbers))%0Aprint(list(oddNumbers))%0A%0Afrom itertools import permutations, combinations%0Aprint(list(permutations(%22ABC%22)))%0Aprint(list(combinations(%22ABC%22, 2)))%0A%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.