commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
6565627e44566f1230ea8c3176678d604c020050
|
Generate network with bias properly
|
network_simulator.py
|
network_simulator.py
|
import numpy as np
from scipy import stats
def exp_cosh(H, beta=1.0):
return 0.5 * np.exp(beta * H) / np.cosh(beta * H)
def kinetic_ising_model(S, J):
""" Returns probabilities of S[t+1,:] being one.
:param S: numpy.ndarray (T,N)
Binary data where an entry is either 1 ('spike') or -1 ('silence').
:param J: numpy.ndarray (N, N)
Coupling matrix
:return: numpy.ndarray (T,N)
Probabilities that at time point t+1 neuron n fires
"""
# compute fields
H = np.dot(S, J)
# compute probabilities
p = exp_cosh(H)
# return
return p
def spike_and_slab(ro, N, bias=0, v_s=1.0):
''' This function generate spike and priors
:param ro: sparsity
:param N: number of neurons
:param bias: 1 if bias is included in the model, 0 other wise
:return:
'''
gamma = stats.bernoulli.rvs(p=ro, size=(N + bias, N))
normal_dist = np.random.normal(0.0, v_s, (N + bias, N))
return gamma * normal_dist
def generate_spikes(N, T, S0, J, bias=0, no_spike=-1, save=False):
""" Generates spike data according to kinetic Ising model
with a spike and slab prior.
:param J: numpy.ndarray (N, N)
Coupling matrix.
:param T: int
Length of trajectory that is generated.
:param S0: numpy.ndarray (N)
Initial pattern that sampling started from.
:param bias: 1 if bias is included in the model. 0 other wise.
:param no_spike: what number should represent 'no_spike'. Default is -1.
:return: numpy.ndarray (T, N)
Binary data where an entry is either 1 ('spike') or -1 ('silence'). First row is only ones for external fields.
"""
# Initialize array for data
S = np.empty([T, N + bias])
# Set initial spike pattern
S[0] = S0 if no_spike == -1 else np.zeros(N + bias)
# Last column in the activity matrix is of the bias and should be 1 at all times
if bias:
S[:, N] = 1
# Generate random numbers
X = np.random.rand(T - 1, N)
# Iterate through all time points
for i in range(1, T):
# Compute probabilities of neuron firing
p = kinetic_ising_model(np.array([S[i - 1]]), J)
# Check if spike or not
if no_spike == -1:
S[i, :N] = 2 * (X[i - 1] < p) - 1
else:
S[i, :N] = 2 * (X[i - 1] < p) / 2.0
S = S
return S
|
Python
| 0.999129
|
@@ -890,16 +890,53 @@
as, N))%0A
+ if bias:%0A gamma%5BN, :%5D = 1%0A
norm
|
70021d5df6beb0e8eb5b78a6484cbb650a7a1fb6
|
fix docs
|
cupyx/distributed/__init__.py
|
cupyx/distributed/__init__.py
|
from cupyx.distributed._init import init_process_group # NOQA
from cupyx.distributed._nccl_comm import NCCLBackend # NOQA
|
Python
| 0.000001
|
@@ -56,16 +56,68 @@
# NOQA%0A
+from cupyx.distributed._comm import Backend # NOQA%0A
from cup
|
28dc19021a6ecf66c82b799fb7d0711c8cdd0589
|
Add parameter dynamic_url
|
flask_apidoc/apidoc.py
|
flask_apidoc/apidoc.py
|
# Copyright 2015 Vinicius Chiele. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import mimetypes
from flask import request
from os.path import join, getmtime, getsize
from werkzeug.datastructures import Headers
from .utils import cached
class ApiDoc(object):
"""
ApiDoc hosts the apidoc files in a specified url.
"""
def __init__(self, folder_path=None, url_path=None, app=None):
"""
Initializes a new instance of ApiDoc.
:param folder_path: the folder with apidoc files. Defaults to the 'docs' folder in the flask static folder.
:param url_path: the url path for the apidoc files. Defaults to the '/docs'.
:param app: the flask application.
"""
self.folder_path = folder_path
self.url_path = url_path
if self.folder_path is None:
self.folder_path = 'docs'
if self.url_path is None:
self.url_path = '/docs'
self.app = None
if app:
self.init_app(app)
def init_app(self, app):
"""
Adds the flask url routes for the apidoc files.
:param app: the flask application.
"""
self.app = app
url = self.url_path
if not self.url_path.endswith('/'):
url += '/'
app.add_url_rule(url, 'docs', self.__send_static_file, strict_slashes=True)
app.add_url_rule(url + '<path:path>', 'docs', self.__send_static_file, strict_slashes=True)
def __send_static_file(self, path=None):
"""
Sends apidoc files from the apidoc folder to the browser.
:param path: the apidoc file
"""
if not path:
path = 'index.html'
file_name = join(self.folder_path, path)
# the api_project.js has the absolute url
# hard coded so we replace them by the current url.
if path == 'api_project.js':
return self.__send_api_file(file_name)
# Any other apidoc file is treated as a normal static file
return self.app.send_static_file(file_name)
@cached
def __send_api_file(self, file_name):
"""
Sends apidoc files from the apidoc folder to the browser.
This method replaces all absolute urls in the file by
the current url.
:param file_name: the apidoc file
"""
file_name = join(self.app.static_folder, file_name)
with open(file_name, 'rt') as file:
data = file.read()
# replaces the hard coded url by the
# current url.
api_project = self.__read_api_project()
new_url = request.url_root.strip('/')
old_url = api_project.get('url')
data = data.replace(old_url, new_url)
# creates a flask response to send
# the file to the browser
headers = Headers()
headers['Content-Length'] = getsize(file_name)
response = self.app.response_class(data,
mimetype=mimetypes.guess_type(file_name)[0],
headers=headers,
direct_passthrough=True)
response.last_modified = int(getmtime(file_name))
return response
@cached
def __read_api_project(self):
"""
Reads the api_project.json file from apidoc folder as a json string.
:return: a json string
"""
file_name = join(self.app.static_folder, self.folder_path, 'api_project.json')
with open(file_name, 'rt') as file:
data = file.read()
return json.loads(data)
|
Python
| 0.000001
|
@@ -911,16 +911,34 @@
th=None,
+ dynamic_url=True,
app=Non
@@ -933,32 +933,32 @@
rue, app=None):%0A
-
%22%22%22%0A
@@ -1028,17 +1028,17 @@
r_path:
-t
+T
he folde
@@ -1141,17 +1141,17 @@
l_path:
-t
+T
he url p
@@ -1208,38 +1208,137 @@
%0A :param
-app: t
+dynamic_url: Set %60True%60 to replace all the urls in ApiDoc files by the current url.%0A :param app: T
he flask applica
@@ -1531,32 +1531,32 @@
l_path is None:%0A
-
self
@@ -1572,24 +1572,64 @@
= '/docs'%0A%0A
+ self.dynamic_url = dynamic_url%0A%0A
self
@@ -2546,16 +2546,37 @@
if
+self.dynamic_url and
path ==
|
3061affd313aff39f722e6e5846a3191d6592a7d
|
fix FaqQuestionSitemap URLs
|
fluent_faq/sitemaps.py
|
fluent_faq/sitemaps.py
|
from django.contrib.sitemaps import Sitemap
from django.core.urlresolvers import NoReverseMatch
from fluent_faq.models import FaqCategory, FaqQuestion
from fluent_faq.urlresolvers import faq_reverse
def _url_patterns_installed():
# This module can use normal Django urls.py URLs, or mount the "FaqPage" in the page tree.
# Check whether the URLs are installed, so the `sitemap.xml` can be generated nevertheless.
# This issue will pop up elsewhere too, so there is no need to raise an error here.
try:
faq_reverse('faqcategory_detail', kwargs={'slug': 'category'}, ignore_multiple=True)
except NoReverseMatch:
return False
else:
return True
class FaqQuestionSitemap(Sitemap):
"""
Sitemap for FAQ questions
"""
def items(self):
if not _url_patterns_installed():
return None
return FaqQuestion.objects.published()
def lastmod(self, category):
"""Return the last modification of the object."""
return category.modification_date
def location(self, category):
"""Return url of an question."""
return faq_reverse('faqcategory_detail', kwargs={'slug': category.slug}, ignore_multiple=True)
class FaqCategorySitemap(Sitemap):
"""
Sitemap for FAQ categories.
"""
def items(self):
if not _url_patterns_installed():
return None
return FaqCategory.objects.published()
def lastmod(self, category):
"""Return the last modification of the object."""
return category.modification_date
def location(self, category):
"""Return url of an category."""
return faq_reverse('faqcategory_detail', kwargs={'slug': category.slug}, ignore_multiple=True)
|
Python
| 0.000164
|
@@ -892,32 +892,59 @@
ects.published()
+.select_related('category')
%0A%0A def lastmo
@@ -947,32 +947,32 @@
stmod(self,
-category
+question
):%0A %22
@@ -1031,32 +1031,32 @@
return
-category
+question
.modificatio
@@ -1082,32 +1082,32 @@
ation(self,
-category
+question
):%0A %22
@@ -1165,32 +1165,32 @@
reverse('faq
-category
+question
_detail', kw
@@ -1192,39 +1192,75 @@
', kwargs=%7B'
+cat_
slug':
-category
+question.category.slug, 'slug': question
.slug%7D, igno
|
ac44d2336585147cad815d0f5dbdd46c44ebd788
|
Fix deprecation warning in "colour_hdri.recovery.highlights" module.
|
colour_hdri/recovery/highlights.py
|
colour_hdri/recovery/highlights.py
|
# -*- coding: utf-8 -*-
"""
Clipped Highlights Recovery
===========================
Defines the clipped highlights recovery objects:
- :func:`colour_hdri.highlights_recovery_blend`
- :func:`colour_hdri.highlights_recovery_LCHab`
See Also
--------
`Colour - HDRI - Examples: Merge from Raw Files Jupyter Notebook
<https://github.com/colour-science/colour-hdri/\
blob/master/colour_hdri/examples/examples_merge_from_raw_files.ipynb>`__
References
----------
- :cite:`Coffin2015a` : Coffin, D. (2015). dcraw.
https://www.cybercom.net/~dcoffin/dcraw/
"""
from __future__ import division, unicode_literals
import numpy as np
from colour.models import (LCHab_to_Lab, Lab_to_LCHab, Lab_to_XYZ, RGB_to_XYZ,
XYZ_to_Lab, XYZ_to_RGB, RGB_COLOURSPACE_sRGB)
from colour.utilities import vector_dot, tsplit, tstack
__author__ = 'Colour Developers'
__copyright__ = 'Copyright (C) 2015-2020 - Colour Developers'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = 'colour-developers@colour-science.org'
__status__ = 'Production'
__all__ = ['highlights_recovery_blend', 'highlights_recovery_LCHab']
def highlights_recovery_blend(RGB, multipliers, threshold=0.99):
"""
Performs highlights recovery using *Coffin (1997)* method from *dcraw*.
Parameters
----------
RGB : array_like
*RGB* colourspace array.
multipliers : array_like
Normalised camera white level or white balance multipliers.
threshold : numeric, optional
Threshold for highlights selection.
Returns
-------
ndarray
Highlights recovered *RGB* colourspace array.
References
----------
:cite:`Coffin2015a`
"""
M = np.array(
[[1.0000000, 1.0000000, 1.0000000],
[1.7320508, -1.7320508, 0.0000000],
[-1.0000000, -1.0000000, 2.0000000]]) # yapf: disable
clipping_level = np.min(multipliers) * threshold
Lab = vector_dot(M, RGB)
Lab_c = vector_dot(M, np.minimum(RGB, clipping_level))
s = np.sum((Lab * Lab)[..., 1:3], axis=2)
s_c = np.sum((Lab_c * Lab_c)[..., 1:3], axis=2)
ratio = np.sqrt(s_c / s)
ratio[np.logical_or(np.isnan(ratio), np.isinf(ratio))] = 1
Lab[:, :, 1:3] *= np.rollaxis(ratio[np.newaxis], 0, 3)
RGB_o = vector_dot(np.linalg.inv(M), Lab)
return RGB_o
def highlights_recovery_LCHab(RGB,
threshold=None,
RGB_colourspace=RGB_COLOURSPACE_sRGB):
"""
Performs highlights recovery in *CIE L\\*C\\*Hab* colourspace.
Parameters
----------
RGB : array_like
*RGB* colourspace array.
threshold : numeric, optional
Threshold for highlights selection, automatically computed
if not given.
RGB_colourspace : RGB_Colourspace, optional
Working *RGB* colourspace to perform the *CIE L\\*C\\*Hab* to and from.
Returns
-------
ndarray
Highlights recovered *RGB* colourspace array.
"""
L, _C, H = tsplit(
Lab_to_LCHab(
XYZ_to_Lab(
RGB_to_XYZ(RGB, RGB_colourspace.whitepoint,
RGB_colourspace.whitepoint,
RGB_colourspace.matrix_RGB_to_XYZ),
RGB_colourspace.whitepoint)))
_L_c, C_c, _H_c = tsplit(
Lab_to_LCHab(
XYZ_to_Lab(
RGB_to_XYZ(
np.clip(RGB, 0, threshold), RGB_colourspace.whitepoint,
RGB_colourspace.whitepoint,
RGB_colourspace.matrix_RGB_to_XYZ),
RGB_colourspace.whitepoint)))
return XYZ_to_RGB(
Lab_to_XYZ(
LCHab_to_Lab(tstack([L, C_c, H])),
RGB_colourspace.whitepoint), RGB_colourspace.whitepoint,
RGB_colourspace.whitepoint, RGB_colourspace.XYZ_to_RGB_matrix)
|
Python
| 0
|
@@ -3895,16 +3895,23 @@
urspace.
+matrix_
XYZ_to_R
@@ -3916,13 +3916,6 @@
_RGB
-_matrix
)%0A
|
79e7ef509e4757c29d6fa0bd9161410aadbd305a
|
fix os.path.expand [sic] typo, and refactor
|
salt/utils/xdg.py
|
salt/utils/xdg.py
|
# -*- coding: utf-8 -*-
'''
Create an XDG function to get the config dir
'''
import os
def xdg_config_dir(config_dir=None):
'''
Check xdg locations for config files
'''
xdg_config = os.getenv('XDG_CONFIG_HOME', os.path.expanduser('~/.config'))
xdg_config_dir = os.path.join(xdg_config, 'salt')
if os.path.isdir(xdg_config_dir):
return xdg_config_dir
else:
if config_dir is None:
return os.path.expanduser('~/.')
else:
return os.path.expand(os.path.join('~/.', config_dir))
|
Python
| 0.000004
|
@@ -434,40 +434,26 @@
-return os.path.expanduser(
+config_dir =
'~/.'
-)
%0A
@@ -479,30 +479,21 @@
-return os.path.expand(
+config_dir =
os.p
@@ -519,10 +519,55 @@
fig_dir)
+%0A return os.path.expanduser(config_dir
)%0A
|
cdd491d1b007931425194f29723fa74603fea272
|
Fix test: Make new file python3 syntax compatible.
|
command_line/stills_process_mpi.py
|
command_line/stills_process_mpi.py
|
#!/usr/bin/env python
#
# LIBTBX_SET_DISPATCHER_NAME dials.stills_process_mpi
from __future__ import absolute_import, division
import libtbx.load_env
import logging
logger = logging.getLogger(libtbx.env.dispatcher_name)
from libtbx.utils import Abort, Sorry
import os
from time import time
help_message = '''
MPI derivative of dials.stills_process. Only handle individual images, not HDF5
'''
from dials.command_line.stills_process import Script as base_script
from dials.command_line.stills_process import do_import,phil_scope
from dials.command_line.stills_process import Processor
class Script(base_script):
'''A class for running the script.'''
def __init__(self,comm):
'''MPI-aware constructor.'''
self.comm = comm
self.rank = comm.Get_rank() # each process in MPI has a unique id, 0-indexed
self.size = comm.Get_size() # size: number of processes running in this job
if True:
from dials.util.options import OptionParser
import libtbx.load_env
# The script usage
usage = "usage: %s [options] [param.phil] mp.blob=<filepattern>" % libtbx.env.dispatcher_name
self.tag = None
self.reference_detector = None
# Create the parser
self.parser = OptionParser(
usage=usage,
phil=phil_scope,
epilog=help_message
)
def assign_work(self):
'''Execute the script.'''
from dials.util import log
if self.rank==0:
# Parse the command line
params, options, all_paths = self.parser.parse_args(
show_diff_phil=False, return_unhandled=True,quick_parse=True)
# Check that all filenames have been entered as mp.blob
assert all_paths == []
assert params.mp.glob is not None
# Log the diff phil
diff_phil = self.parser.diff_phil.as_str()
if diff_phil is not '':
logger.info('The following parameters have been modified:\n')
logger.info(diff_phil)
print diff_phil
import glob
for item in params.mp.glob:
all_paths += glob.glob(item)
transmitted_info = dict(p=params,
o=options,
a=all_paths )
else:
transmitted_info = None
transmitted_info = self.comm.bcast(transmitted_info, root = 0)
# Save the options
self.options = transmitted_info["o"]
self.params = transmitted_info["p"]
all_paths = transmitted_info["a"]
# Configure logging
log.config(
self.params.verbosity,
info=None,
debug=None)
for abs_params in self.params.integration.absorption_correction:
if abs_params.apply:
if not (self.params.integration.debug.output and not self.params.integration.debug.separate_files):
raise Sorry('Shoeboxes must be saved to integration intermediates to apply an absorption correction. '\
+'Set integration.debug.output=True, integration.debug.separate_files=False and '\
+'integration.debug.delete_shoeboxes=True to temporarily store shoeboxes.')
# Process the data
assert self.params.mp.method == 'mpi'
basenames = [os.path.splitext(os.path.basename(filename))[0] for filename in all_paths]
tags = []
for i, basename in enumerate(basenames):
if basenames.count(basename) > 1:
tags.append("%s_%05d"%(basename, i))
else:
tags.append(basename)
iterable = zip(tags, all_paths)
self.subset = [item for i, item in enumerate(iterable) if (i+self.rank)%self.size == 0]
print "DELEGATE %d of %d: %s"%( self.rank, self.size, self.subset[0:10])
def run(self):
import copy
st = time()
self.load_reference_geometry()
from dials.command_line.dials_import import ManualGeometryUpdater
update_geometry = ManualGeometryUpdater(self.params)
# Import stuff
# no preimport for MPI multifile specialization
if True:
# Wrapper function
def do_work(i, item_list):
processor = Processor(copy.deepcopy(self.params), composite_tag = "%04d"%i)
for item in item_list:
tag, filename = item
datablock = do_import(filename)
imagesets = datablock.extract_imagesets()
if len(imagesets) == 0 or len(imagesets[0]) == 0:
logger.info("Zero length imageset in file: %s"%filename)
return
if len(imagesets) > 1:
raise Abort("Found more than one imageset in file: %s"%filename)
if len(imagesets[0]) > 1:
raise Abort("Found a multi-image file. Run again with pre_import=True")
if self.reference_detector is not None:
from dxtbx.model import Detector
imagesets[0].set_detector(Detector.from_dict(self.reference_detector.to_dict()))
update_geometry(imagesets[0])
processor.process_datablock(tag, datablock)
processor.finalize()
# Process the data
assert self.params.mp.method == 'mpi'
do_work(self.rank, self.subset)
# Total Time
logger.info("")
logger.info("Total Time Taken = %f seconds" % (time() - st))
if __name__ == '__main__':
from mpi4py import MPI
comm = MPI.COMM_WORLD
script = Script(comm)
script.assign_work()
comm.barrier()
script.run()
|
Python
| 0
|
@@ -120,16 +120,32 @@
division
+, print_function
%0Aimport
@@ -1945,17 +1945,17 @@
print
-
+(
diff_phi
@@ -1951,24 +1951,25 @@
nt(diff_phil
+)
%0A impor
@@ -3520,17 +3520,17 @@
print
-
+(
%22DELEGAT
@@ -3587,16 +3587,17 @@
t%5B0:10%5D)
+)
%0A%0A def
|
a88fa5c07a210a67126bb3cc7333c89c95d114cc
|
Remove mako_middleware_process_request, which doesn't cleanup after itself
|
common/djangoapps/edxmako/tests.py
|
common/djangoapps/edxmako/tests.py
|
from mock import patch, Mock
import unittest
import ddt
from request_cache.middleware import RequestCache
from django.conf import settings
from django.http import HttpResponse
from django.test import TestCase
from django.test.utils import override_settings
from django.test.client import RequestFactory
from django.core.urlresolvers import reverse
from edxmako.request_context import get_template_request_context
from edxmako import add_lookup, LOOKUP
from edxmako.shortcuts import (
marketing_link,
is_marketing_link_set,
is_any_marketing_link_set,
render_to_string,
)
from student.tests.factories import UserFactory
from util.testing import UrlResetMixin
@ddt.ddt
class ShortcutsTests(UrlResetMixin, TestCase):
"""
Test the edxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'login'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': False}):
# we are using login because it is common across both cms and lms
expected_link = reverse('login')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'login'})
def test_is_marketing_link_set(self):
# test marketing site on
with patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
self.assertTrue(is_marketing_link_set('ABOUT'))
self.assertFalse(is_marketing_link_set('NOT_CONFIGURED'))
# test marketing site off
with patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': False}):
self.assertTrue(is_marketing_link_set('ABOUT'))
self.assertFalse(is_marketing_link_set('NOT_CONFIGURED'))
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'login'})
def test_is_any_marketing_link_set(self):
# test marketing site on
with patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
self.assertTrue(is_any_marketing_link_set(['ABOUT']))
self.assertTrue(is_any_marketing_link_set(['ABOUT', 'NOT_CONFIGURED']))
self.assertFalse(is_any_marketing_link_set(['NOT_CONFIGURED']))
# test marketing site off
with patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': False}):
self.assertTrue(is_any_marketing_link_set(['ABOUT']))
self.assertTrue(is_any_marketing_link_set(['ABOUT', 'NOT_CONFIGURED']))
self.assertFalse(is_any_marketing_link_set(['NOT_CONFIGURED']))
class AddLookupTests(TestCase):
"""
Test the `add_lookup` function.
"""
@patch('edxmako.LOOKUP', {})
def test_with_package(self):
add_lookup('test', 'management', __name__)
dirs = LOOKUP['test'].directories
self.assertEqual(len(dirs), 1)
self.assertTrue(dirs[0].endswith('management'))
class MakoRequestContextTest(TestCase):
"""
Test MakoMiddleware.
"""
def setUp(self):
super(MakoRequestContextTest, self).setUp()
self.user = UserFactory.create()
self.url = "/"
self.request = RequestFactory().get(self.url)
self.request.user = self.user
self.response = Mock(spec=HttpResponse)
self.addCleanup(RequestCache.clear_request_cache)
def test_with_current_request(self):
"""
Test that if get_current_request returns a request, then get_template_request_context
returns a RequestContext.
"""
with patch('edxmako.request_context.get_current_request', return_value=self.request):
# requestcontext should not be None.
self.assertIsNotNone(get_template_request_context())
def test_without_current_request(self):
"""
Test that if get_current_request returns None, then get_template_request_context
returns None.
"""
with patch('edxmako.request_context.get_current_request', return_value=None):
# requestcontext should be None.
self.assertIsNone(get_template_request_context())
def test_request_context_caching(self):
"""
Test that the RequestContext is cached in the RequestCache.
"""
with patch('edxmako.request_context.get_current_request', return_value=None):
# requestcontext should be None, because the cache isn't filled
self.assertIsNone(get_template_request_context())
with patch('edxmako.request_context.get_current_request', return_value=self.request):
# requestcontext should not be None, and should fill the cache
self.assertIsNotNone(get_template_request_context())
mock_get_current_request = Mock()
with patch('edxmako.request_context.get_current_request', mock_get_current_request):
# requestcontext should not be None, because the cache is filled
self.assertIsNotNone(get_template_request_context())
mock_get_current_request.assert_not_called()
RequestCache.clear_request_cache()
with patch('edxmako.request_context.get_current_request', return_value=None):
# requestcontext should be None, because the cache isn't filled
self.assertIsNone(get_template_request_context())
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
def test_render_to_string_when_no_global_context_lms(self):
"""
Test render_to_string() when makomiddleware has not initialized
the threadlocal REQUEST_CONTEXT.context. This is meant to run in LMS.
"""
self.assertIn("this module is temporarily unavailable", render_to_string("courseware/error-message.html", None))
@unittest.skipUnless(settings.ROOT_URLCONF == 'cms.urls', 'Test only valid in cms')
def test_render_to_string_when_no_global_context_cms(self):
"""
Test render_to_string() when makomiddleware has not initialized
the threadlocal REQUEST_CONTEXT.context. This is meant to run in CMS.
"""
self.assertIn("We're having trouble rendering your component", render_to_string("html_error.html", None))
def mako_middleware_process_request(request):
"""
Initialize the global RequestContext variable
edxmako.middleware.requestcontext using the request object.
"""
mako_middleware = edxmako.middleware.MakoMiddleware()
mako_middleware.process_request(request)
|
Python
| 0
|
@@ -6741,285 +6741,4 @@
e))%0A
-%0A%0Adef mako_middleware_process_request(request):%0A %22%22%22%0A Initialize the global RequestContext variable%0A edxmako.middleware.requestcontext using the request object.%0A %22%22%22%0A mako_middleware = edxmako.middleware.MakoMiddleware()%0A mako_middleware.process_request(request)%0A
|
c90c66cf735e4ec279859b6db593ed8d2316088d
|
add SysfsAttributes.get() with optional default
|
sasutils/sysfs.py
|
sasutils/sysfs.py
|
#
# Copyright (C) 2016
# The Board of Trustees of the Leland Stanford Junior University
# Written by Stephane Thiell <sthiell@stanford.edu>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import json
import glob
from os import access, listdir, readlink, R_OK
from os.path import basename, isdir, isfile, join, realpath
import string
SYSFS_ROOT = '/sys'
class SysfsNode(object):
def __init__(self, path=None):
if path is None:
self.path = SYSFS_ROOT
else:
self.path = path
def __repr__(self):
return '<sysfs.SysfsNode "%s">' % self.path
def __str__(self):
return basename(self.path)
def __eq__(self, other):
return realpath(self.path) == realpath(other.path)
def __hash__(self):
return hash(realpath(self.path))
def __iter__(self):
return iter(self.__class__(join(self.path, name))
for name in listdir(self.path))
def iterglob(self, pathname, is_dir=True):
for path in glob.glob(join(self.path, pathname)):
if isfile(path):
yield basename(path)
elif is_dir and isdir(path):
yield self.__class__(path)
def glob(self, pathname, is_dir=True):
return list(self.iterglob(pathname, is_dir))
def node(self, pathname, default=None):
glob_res = list(self.iterglob(pathname))
try:
return glob_res[0]
except IndexError:
if default is not None:
return default
# print meaningfull error
raise KeyError(join(self.path, pathname))
def iterget(self, pathname, ignore_errors, absolute=False):
if absolute:
path = pathname
else:
path = join(self.path, pathname)
for path in glob.glob(path):
if isfile(path) and access(path, R_OK):
try:
with open(path, 'r') as fp:
yield fp.read().strip()
except IOError, exc:
if not ignore_errors:
yield str(exc)
def get(self, pathname, default=None, ignore_errors=False, printable=True,
absolute=False):
if absolute:
path = pathname
else:
path = join(self.path, pathname)
glob_res = list(self.iterget(path, ignore_errors, absolute=True))
try:
result = glob_res[0]
except IndexError:
if not ignore_errors:
raise KeyError('Not found: %s' % path)
result = default
if not result:
return result
#elif printable and all(c in string.printable for c in result):
else:
return result
#else:
# return default
def readlink(self, pathname, default=None, absolute=False):
if absolute:
path = pathname
else:
path = join(self.path, pathname)
try:
return readlink(path)
except OSError:
if default is not None:
return default
raise
# For testing
SYSFSNODE_CLASS = SysfsNode
sysfs = SYSFSNODE_CLASS()
class SysfsAttributes(collections.MutableMapping):
"""SysfsObject attributes with dot.notation access"""
def __init__(self):
self.values = {}
self.paths = {}
def add_path(self, attr, path):
self.paths[attr] = path
def load(self):
for path in self.paths:
loaded = self[path]
# The next five methods are requirements of the ABC.
def __setitem__(self, key, value):
self.values[key] = value
def __getitem__(self, key):
if not self.values.__contains__(key):
try:
self.values[key] = SYSFSNODE_CLASS().get(self.paths[key],
absolute=True)
except KeyError:
raise AttributeError("%r object has no attribute %r" %
(self.__class__.__name__, key))
return self.values[key]
def __delitem__(self, key):
if key in self.values:
del self.values[key]
del self.paths[key]
def __iter__(self):
return iter(self.paths)
def __len__(self):
return len(self.paths)
__getattr__ = __getitem__
class SysfsObject(object):
def __init__(self, sysfsnode):
self.sysfsnode = sysfsnode
self.name = str(sysfsnode)
self.attrs = SysfsAttributes()
self.classname = self.__class__.__name__
if type(sysfsnode) is str:
assert len(sysfsnode) > 0
attrs = self.sysfsnode.glob('*', is_dir=False)
for attr in attrs:
self.attrs.add_path(attr, join(self.sysfsnode.path, attr))
def json_serialize(self):
"""May be overridden to change json serialization, eg. to avoid
circular reference issues."""
return self.__dict__
def to_json(self):
def json_default(o):
if hasattr(o, 'json_serialize'):
return o.json_serialize()
return o.__dict__
return json.dumps(self, default=json_default, sort_keys=True, indent=4)
def __eq__(self, other):
return self.sysfsnode == other.sysfsnode
def __hash__(self):
return hash(self.sysfsnode)
def __repr__(self):
return '<%s.%s %r>' % (self.__module__, self.__class__.__name__,
self.sysfsnode.path)
__str__ = __repr__
class SysfsDevice(SysfsObject):
def __init__(self, device, subsys, sysfsdev_pattern='*[0-9]'):
# only consider end_device-20:2:57, 20:0:119:0, host19
SysfsObject.__init__(self, device.node('%s/%s' % (subsys,
sysfsdev_pattern)))
self.device = device
|
Python
| 0.000001
|
@@ -3133,200 +3133,18 @@
-if not result:%0A return result%0A #elif printable and all(c in string.printable for c in result):%0A else:%0A return result%0A #else:%0A # return defa
+return res
ult%0A
@@ -4022,27 +4022,19 @@
def
-__
get
-item__
(self, k
@@ -4027,32 +4027,46 @@
ef get(self, key
+, default=None
):%0A if no
@@ -4150,33 +4150,21 @@
%5Bkey%5D =
-SYSFSNODE_CLASS()
+sysfs
.get(sel
@@ -4180,108 +4180,148 @@
ey%5D,
-%0A absolute=True)%0A except KeyError
+ absolute=True)%0A except KeyError:%0A if default is not None:%0A return default%0A else
:%0A
@@ -4330,24 +4330,28 @@
+
+
raise Attrib
@@ -4393,16 +4393,20 @@
e %25r%22 %25%0A
+
@@ -4491,32 +4491,94 @@
lf.values%5Bkey%5D%0A%0A
+ def __getitem__(self, key):%0A return self.get(key)%0A%0A
def __delite
|
237fcac11a12a1fb0eba32a4fc516cb449f15577
|
Fix bounce test
|
froide/bounce/tests.py
|
froide/bounce/tests.py
|
import os
import unittest
from datetime import datetime, timedelta
from django.test import TestCase
from django.db import connection
from froide.helper.email_utils import EmailParser
from froide.foirequest.tests.factories import UserFactory
from .models import Bounce
from .utils import (
make_bounce_address, add_bounce_mail, check_user_deactivation,
get_recipient_address_from_bounce
)
TEST_DATA_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), 'testdata'))
def p(path):
return os.path.join(TEST_DATA_ROOT, path)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific tests")
class BounceTest(TestCase):
def setUp(self):
self.email = 'nonexistant@example.org'
def test_bounce_address(self):
email = 'Upper_Case@example.org'
bounce_address = make_bounce_address(email)
self.assertEqual(bounce_address, bounce_address.lower())
recovered_email, status = get_recipient_address_from_bounce(bounce_address)
self.assertEqual(recovered_email, email.lower())
self.assertTrue(status)
def test_bounce_parsing(self):
parser = EmailParser()
with open(p("bounce_001.txt"), 'rb') as f:
email = parser.parse(f)
bounce_address = make_bounce_address(self.email)
email.to = [('', bounce_address)]
bounce_info = email.bounce_info
self.assertTrue(bounce_info.is_bounce)
self.assertEqual(bounce_info.bounce_type, 'hard')
self.assertEqual(bounce_info.status, (5, 0, 0))
add_bounce_mail(email)
bounce = Bounce.objects.get(email=self.email)
self.assertEqual(bounce.email, self.email)
self.assertIsNone(bounce.user)
self.assertEqual(len(bounce.bounces), 1)
def test_bounce_parsing_2(self):
parser = EmailParser()
with open(p("bounce_002.txt"), 'rb') as f:
email = parser.parse(f)
bounce_address = make_bounce_address(self.email)
email.to = [('', bounce_address)]
bounce_info = email.bounce_info
self.assertTrue(bounce_info.is_bounce)
self.assertEqual(bounce_info.bounce_type, 'hard')
self.assertEqual(bounce_info.status, (5, 1, 1))
def test_bounce_handling(self):
def days_ago(days):
return (datetime.now() - timedelta(days=days)).isoformat()
def bounce_factory(days, bounce_type='hard'):
return [{
'is_bounce': True, 'bounce_type': bounce_type,
'timestamp': days_ago(day)}
for day in days
]
bounce = Bounce(user=None, email='a@example.org',
bounces=bounce_factory([1, 5]))
result = check_user_deactivation(bounce)
self.assertIsNone(result)
user = UserFactory()
bounce = Bounce(user=user, email=user.email,
bounces=bounce_factory([1, 5]))
result = check_user_deactivation(bounce)
self.assertFalse(result)
user = UserFactory()
bounce = Bounce(user=user, email=user.email,
bounces=bounce_factory([1, 5, 12]))
result = check_user_deactivation(bounce)
self.assertTrue(result)
user = UserFactory()
bounce = Bounce(
user=user, email=user.email,
bounces=bounce_factory([1, 5, 12], bounce_type='soft'))
result = check_user_deactivation(bounce)
self.assertFalse(result)
|
Python
| 0.000002
|
@@ -1543,12 +1543,12 @@
(5,
-0, 0
+5, 3
))%0A
|
e6db95cce0239d9e8ce33aec5cf21aa1bd19df03
|
Add __str__ method
|
imagersite/imager_profile/models.py
|
imagersite/imager_profile/models.py
|
from django.db import models
from django.contrib.auth.models import User
import six
@six.python_2_unicode_compatible
class ImagerProfile(models.Model):
user = models.OneToOneField(User)
fav_camera = models.CharField(max_length=30)
address = models.CharField(max_length=100)
web_url = models.URLField()
type_photography = models.CharField(max_length=30)
|
Python
| 0.999825
|
@@ -1,20 +1,31 @@
+import six%0A
%0Afrom django.db impo
@@ -78,26 +78,16 @@
rt User%0A
-import six
%0A%0A@six.p
@@ -370,8 +370,89 @@
th=30)%0A%0A
+ def __str__(self):%0A return %22%7B%7D's profile%22.format(self.user.username)%0A%0A
|
e9f986a0ade7d08a56157641efb49366f3c54bcc
|
Add a create column migration for canonical_bug_link
|
mysite/search/migrations/0016_add_looks_closed_column.py
|
mysite/search/migrations/0016_add_looks_closed_column.py
|
from south.db import db
from django.db import models
from mysite.search.models import *
class Migration:
def forwards(self, orm):
# Adding field 'Bug.looks_closed'
db.add_column('search_bug', 'looks_closed', orm['search.bug:looks_closed'])
def backwards(self, orm):
# Deleting field 'Bug.looks_closed'
db.delete_column('search_bug', 'looks_closed')
models = {
'search.bug': {
'canonical_bug_link': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'date_reported': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.TextField', [], {}),
'good_for_newcomers': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'importance': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'last_polled': ('django.db.models.fields.DateTimeField', [], {}),
'last_touched': ('django.db.models.fields.DateTimeField', [], {}),
'looks_closed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'people_involved': ('django.db.models.fields.IntegerField', [], {}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'submitter_realname': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'submitter_username': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'search.project': {
'date_icon_was_fetched_from_ohloh': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'icon': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_for_search_result': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_smaller_for_badge': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
}
}
complete_apps = ['search']
|
Python
| 0.000001
|
@@ -269,16 +269,112 @@
osed'%5D)%0A
+ db.add_column('search_bug', 'canonical_bug_link', orm%5B'search.bug:canonical_bug_link'%5D)%0A
@@ -522,16 +522,77 @@
losed')%0A
+ db.delete_column('search_bug', 'canonical_bug_link')%0A
|
18bb441017c26b850eeb84fda576c613a08238b1
|
Fix the case when Paths.CWD is None
|
sc2/sc2process.py
|
sc2/sc2process.py
|
import sys
import signal
import time
import asyncio
import os.path
import shutil
import tempfile
import subprocess
import portpicker
import websockets
import logging
logger = logging.getLogger(__name__)
from .paths import Paths
from .protocol import Protocol
from .controller import Controller
class kill_switch(object):
_to_kill = []
@classmethod
def add(cls, value):
cls._to_kill.append(value)
@classmethod
def kill_all(cls):
logger.info("kill_switch: Process cleanup")
for p in cls._to_kill:
p._clean()
class SC2Process(object):
def __init__(self, host="127.0.0.1", port=None, fullscreen=False):
assert isinstance(host, str)
assert isinstance(port, int) or port is None
self._fullscreen = fullscreen
self._host = host
if port is None:
self._port = portpicker.pick_unused_port()
else:
self._port = port
self._tmp_dir = tempfile.mkdtemp(prefix="SC2_")
self._process = None
self._ws = None
async def __aenter__(self):
kill_switch.add(self)
def signal_handler(signal, frame):
kill_switch.kill_all()
signal.signal(signal.SIGINT, signal_handler)
try:
self._process = self._launch()
self._ws = await self._connect()
except:
self._clean()
raise
return Controller(self._ws)
async def __aexit__(self, *args):
kill_switch.kill_all()
signal.signal(signal.SIGINT, signal.SIG_DFL)
@property
def ws_url(self):
return f"ws://{self._host}:{self._port}/sc2api"
def _launch(self):
return subprocess.Popen([
str(Paths.EXECUTABLE),
"-listen", self._host,
"-port", str(self._port),
"-displayMode", "1" if self._fullscreen else "0",
"-dataDir", str(Paths.BASE),
"-tempDir", self._tmp_dir
],
cwd=str(Paths.CWD),
#, env=run_config.env
)
async def _connect(self):
for _ in range(30):
await asyncio.sleep(1)
try:
ws = await websockets.connect(self.ws_url, timeout=120)
return ws
except ConnectionRefusedError:
pass
raise TimeoutError("Websocket")
def _clean(self):
logger.info("Cleaning up...")
if self._ws is not None:
self._ws.close()
if self._process is not None:
if self._process.poll() is None:
for _ in range(3):
self._process.terminate()
time.sleep(2)
if self._process.poll() is not None:
break
else:
self._process.kill()
self._process.wait()
logger.error("KILLED")
if os.path.exists(self._tmp_dir):
shutil.rmtree(self._tmp_dir)
self._process = None
self._ws = None
logger.info("Cleanup complete")
|
Python
| 1
|
@@ -2019,16 +2019,17 @@
cwd=
+(
str(Path
@@ -2034,16 +2034,40 @@
ths.CWD)
+ if Paths.CWD else None)
,%0A
|
93d75e45a277cbdbc551831d6e0462e3e5c430fb
|
Set keys public again on S3 caching.
|
funsize/cache/cache.py
|
funsize/cache/cache.py
|
"""
funsize.database.cache
~~~~~~~~~~~~~~~~~~~~~~~~~~~
This is currently a stub file that contains function prototypes for the
caching layer core
"""
import os
from boto.s3.connection import S3Connection
import funsize.utils.oddity as oddity
class Cache(object):
""" Class that provides access to cache
Assumes all keys are hex-encoded SHA512s
Internally converts hex to base64 encoding
"""
def __init__(self, _bucket=os.environ.get('FUNSIZE_S3_UPLOAD_BUCKET')):
""" _bucket : bucket name to use for S3 resources """
if not _bucket:
raise oddity.CacheError("Amazon S3 bucket not set")
# open a connection and get the bucket
self.conn = S3Connection()
self.bucket = self.conn.get_bucket(_bucket)
def _get_cache_internals(self, identifier, category):
""" Method to return cache bucket key based on identifier """
if not identifier:
raise oddity.CacheError('Save object failed without identifier')
if category not in ('diff', 'partial', 'patch'):
raise oddity.CacheError("Category failed for S3 uploading")
bucket_key = "files/%s/%s" % (category, identifier)
return bucket_key
def _create_new_bucket_key(self, identifier, category):
""" Based on identifier and category create a new key in the bucket"""
_key = self._get_cache_internals(identifier, category)
return self.bucket.new_key(_key)
def _get_bucket_key(self, identifier, category):
""" Based on identifier and category retrieve key from bucket """
_key = self._get_cache_internals(identifier, category)
return self.bucket.get_key(_key)
def save(self, string, identifier, category, isfile=False):
""" Saves given file to cache, treats string as a local filepath if
isfile is true. returns hash of file.
"""
# FIXME: What should the behaviour be when we try to save to a
# pre-existing key?
key = self._create_new_bucket_key(identifier, category)
if isfile:
key.set_contents_from_filename(string)
else:
key.set_contents_from_string(string)
def save_blank_file(self, identifier, category):
""" Method to save a blank file to show a partial has been triggered and
it is being in progress
"""
key = self._create_new_bucket_key(identifier, category)
key.set_contents_from_string('')
def is_blank_file(self, identifier, category):
""" Function to check if the file is empty or not. To be used to ensure
no second triggering is done for the same partial
Returns True is file exists and is blank, False otherwise
"""
key = self._get_bucket_key(identifier, category)
if not key:
return False
return key.size == 0
def find(self, identifier, category):
""" Checks if file with specified key is in cache
returns True or False depending on whether the file exists
"""
key = self._get_bucket_key(identifier, category)
return bool(key)
def retrieve(self, identifier, category, output_file=None):
""" Retrieve file with the given key
writes the file to the path specified by output_file if present
otherwise returns the file as a binary string/file object
"""
key = self._get_bucket_key(identifier, category)
if output_file:
key.get_contents_to_filename(output_file)
else:
return key.get_contents_as_string()
def delete_from_cache(self, identifier, category):
""" Method to remove a file from cache """
key = self._get_bucket_key(identifier, category)
key.delete()
|
Python
| 0
|
@@ -2058,32 +2058,77 @@
fier, category)%0A
+ key.change_storage_class(%22STANDARD%22)%0A
if isfil
@@ -2126,24 +2126,24 @@
if isfile:%0A
-
@@ -2243,16 +2243,51 @@
(string)
+%0A key.set_acl('public-read')
%0A%0A de
@@ -2518,32 +2518,77 @@
fier, category)%0A
+ key.change_storage_class(%22STANDARD%22)%0A
key.set_
@@ -2611,16 +2611,51 @@
ring('')
+%0A key.set_acl('public-read')
%0A%0A de
|
37b994f96a7760ae962092e59d603455202f0985
|
Document what the interface is for...
|
usingnamespace/traversal/Archive.py
|
usingnamespace/traversal/Archive.py
|
import logging
log = logging.getLogger(__name__)
from zope.interface import Interface
from zope.interface import implementer
from Entry import Entry
class IArchive(Interface):
pass
@implementer(IArchive)
class ArchiveYear(object):
"""ArchiveYear is the context for this years archives"""
def __init__(self, year):
"""Initialises the context
:year: The year we are trying to get archives for
"""
log.debug("Creating new ArchiveYear: {}".format(year))
if isinstance(year, int):
self.__name__ = '{}'.format(year)
self.year = year
if isinstance(year, basestring):
self.__name__ = year
try:
self.year = int(year)
except ValueError:
raise ValueError('Year is not valid.')
def __getitem__(self, key):
"""Return the next item in the traversal tree
:key: The next item to look for
:returns: The next traversal item
"""
next_ctx = None
if key == 'page':
pass
# Last resort, see if it is a valid month
try:
next_ctx = ArchiveYearMonth(key)
except ValueError, e:
next_ctx = None
if next_ctx is None:
raise KeyError
else:
next_ctx.__parent__ = self
next_ctx._request = self._request
return next_ctx
@implementer(IArchive)
class ArchiveYearMonth(object):
"""ArchiveYearMonth is the context for the year/month archives"""
def __init__(self, month):
"""Initialises the context
:month: The month we are getting archives for
"""
log.debug("Creating new ArchiveYearMonth: {}".format(month))
if isinstance(month, int):
self.__name__ = '{}'.format(month)
self.month = month
if isinstance(month, basestring):
self.__name__ = month
try:
self.month = int(month)
if self.month > 12 or self.month < 1:
raise ValueError
except ValueError:
raise ValueError('Month is not valid.')
def __getitem__(self, key):
"""Return the next item in the traversal tree
:key: The next item to look for
:returns: The next traversal item
"""
next_ctx = None
if key == 'page':
pass
# Last resort, see if it is a valid day
try:
next_ctx = ArchiveYearMonthDay(key)
except ValueError:
next_ctx = None
if next_ctx is None:
raise KeyError
else:
next_ctx.__parent__ = self
next_ctx._request = self._request
return next_ctx
@implementer(IArchive)
class ArchiveYearMonthDay(object):
"""ArchiveYearMonthDay is the context for the year/month/day archives"""
def __init__(self, day):
"""Initialises the context
:day: The day we are getting archives for
"""
log.debug("Creating new ArchiveYearMonthDay: {}".format(day))
if isinstance(day, int):
self.__name__ = '{}'.format(month)
self.day = day
if isinstance(day, basestring):
self.__name__ = day
try:
self.day = int(day)
if self.day > 31 or self.day < 1:
raise ValueError
except ValueError:
raise ValueError('Day is not valid.')
def __getitem__(self, key):
"""Return the next item in the traversal tree
:key: The next item to look for
:returns: The next traversal item
"""
next_ctx = None
if key == 'page':
pass
# Last resort, see if it is a valid slug
try:
next_ctx = Entry(key)
except ValueError:
next_ctx = None
if next_ctx is None:
raise KeyError
else:
next_ctx.__parent__ = self
next_ctx._request = self._request
return next_ctx
|
Python
| 0
|
@@ -176,20 +176,59 @@
e):%0A
-pass
+%22%22%22Marker interface for archive contexts%22%22%22
%0A%0A@imple
|
f79a998c707f4ae7c6b5b3b1bf3aa730262f7538
|
fix csrf
|
gamechat/chat/views.py
|
gamechat/chat/views.py
|
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from django.db import IntegrityError
from chat.models import ChatRoom
from gevent import queue
QUEUES = {'Chat Room': {'User': queue.Queue(), }, }
ssb = {'Chat Room': {'User': queue.Queue(), }, }
wow = {'Chat Room': {'User': queue.Queue(), }, }
lol = {'Chat Room': {'User': queue.Queue(), }, }
cs = {'Chat Room': {'User': queue.Queue(), }, }
destiny = {'Chat Room': {'User': queue.Queue(), }, }
mine = {'Chat Room': {'User': queue.Queue(), }, }
hearth = {'Chat Room': {'User': queue.Queue(), }, }
dota = {'Chat Room': {'User': queue.Queue(), }, }
diablo = {'Chat Room': {'User': queue.Queue(), }, }
local = {'Chat Room': {'User': queue.Queue(), }, }
dict_of_queus = {'ssb': ssb, 'wow': wow, 'lol': lol, 'cs': cs, 'destiny': destiny,
'mine': mine, 'hearth': hearth, 'dota': dota, 'diablo': diablo,
'local': local}
chatrooms = ChatRoom.objects.all()
for chatroom in chatrooms:
if chatroom.main in dict_of_queus:
dict_of_queus[chatroom.main][chatroom.name] = {}
def index(request):
name = request.path.rsplit('/', 1)[1]
chat_room = []
for room in ChatRoom.objects.filter(main=name).all():
chat_room.append(room)
context = {
'chat_list': chat_room,
'channel': name,
}
return render(request, 'chat/index.html', context)
@csrf_exempt
@login_required
def create_room(request):
try:
main = request.path.rsplit('/', 2)[-1]
name = request.POST.get('Enter a New Room Name')
new_room = ChatRoom()
new_room.name = name
if main in dict_of_queus:
new_room.main = main
main = dict_of_queus[main]
else:
main = QUEUES
new_room.owner = request.user.profile
new_room.save()
main[name] = {}
return chat_room(request, new_room.pk)
except IntegrityError:
return redirect('/')
@login_required
def chat_room(request, chat_room_id):
room = ChatRoom.objects.get(pk=chat_room_id)
sel_queue = dict_of_queus[room.main]
context = {
'chatroom': room,
'subs': room.subscribers.all(),
'rooms': room.name,
# 'queues': QUEUES,
}
if request.user.profile:
room.add_subscriber(request.user.profile)
sel_queue[room.name][request.user.username] = queue.Queue()
return render(request, 'chat/chat_room.html', context)
@csrf_exempt
@login_required
def chat_add(request, chat_room_id):
message = request.POST.get('message')
chat_room = ChatRoom.objects.get(pk=chat_room_id)
chat_room_name = chat_room.name
sel_queue = dict_of_queus[chat_room.main]
for prof in sel_queue[chat_room_name]:
msg = "{}: {}".format(request.user.username, message)
sel_queue[chat_room_name][prof].put_nowait(msg)
return JsonResponse({'message': message})
@csrf_exempt
@login_required
def chat_messages(request, chat_room_id):
chat_room = ChatRoom.objects.get(pk=chat_room_id)
chat_room_main = chat_room.main
chat_room_name = chat_room.name
sel_queue = dict_of_queus[chat_room_main]
try:
q = sel_queue[chat_room_name][request.user.username]
print request.user.username
msg = q.get(timeout=1)
except queue.Empty:
msg = []
data = {
'messages': msg,
}
return JsonResponse(data)
@csrf_exempt
@login_required
def delete_chatroom(request, chat_room_id):
if request.user.profile == ChatRoom.objects.get(pk=chat_room_id).owner:
channel = ChatRoom.objects.get(pk=chat_room_id).main
ChatRoom.objects.get(pk=chat_room_id).delete()
url = '/chat/' + channel
return redirect(url)
|
Python
| 0.000001
|
@@ -1199,16 +1199,29 @@
= %7B%7D%0A%0A%0A
+@csrf_exempt%0A
def inde
@@ -2090,16 +2090,29 @@
('/')%0A%0A%0A
+@csrf_exempt%0A
@login_r
|
f930a0d8a52ef493098390c38d27622e882a8203
|
fix published_by typo
|
openedx/core/djangoapps/content/course_overviews/connector.py
|
openedx/core/djangoapps/content/course_overviews/connector.py
|
import MySQLdb
import logging
from django.conf import settings
logger = logging.getLogger(__name__)
class EdevateDbConnector:
host = settings.EDEVATE_MYSQL_HOST
port = settings.EDEVATE_MYSQL_PORT
user = settings.EDEVATE_MYSQL_USER
passwd = settings.EDEVATE_MYSQL_PASSWD
db = settings.EDEVATE_MYSQL_DB_NAME
def __init__(self):
self.connection = None
self.cursor = None
self.connect()
def connect(self):
self.connection = MySQLdb.connect(host=self.host,
port=self.port,
user=self.user,
passwd=self.passwd,
db=self.db)
self.cursor = self.connection.cursor()
def close(self):
if self.connection:
self.cursor.close()
self.connection.close()
def get_edevate_user_id(self, user_email):
self.cursor.execute("""SELECT *
FROM users_customuser
WHERE email='{}';""".format(user_email)
)
edevate_user = self.cursor.fetchone()
logger.debug("Get edevate user: {!r}".format(edevate_user))
return edevate_user[0]
def get_verification_course(self, openedx_course_id, published_by):
self.cursor.execute("""SELECT *
FROM openedx_edxcourseverification
WHERE openedx_course_id = '{}'
AND published_by='{}';
""".format(openedx_course_id,
published_by)
)
verification_course = self.cursor.fetchone()
logger.debug("Get verification course: {!r}".format(
verification_course)
)
return verification_course
def create_verification_course(self, openedx_course_id, published_by_id):
self.cursor.execute("""INSERT INTO openedx_edxcourseverification
(openedx_course_id, status, published_by_id)
VALUES ('{}', 'reviewable', '{}');
""".format(openedx_course_id, published_by_id)
)
self.connection.commit()
return self.cursor.lastrowid
def update_verification_course(self, openedx_course_id, published_by_id):
self.cursor.execute("""UPDATE openedx_edxcourseverification
SET status='reviewable'
WHERE openedx_course_id='{}'
AND published_by_id='{}';
""".format(openedx_course_id, published_by_id)
)
affected_rows = self.connection.affected_rows()
self.connection.commit()
return affected_rows
def update_or_create_verification_course(self,
openedx_course_id,
course_author):
published_by_id = self.get_edevate_user_id(course_author)
verification_course = self.get_verification_course(openedx_course_id,
published_by_id)
if verification_course:
affected_rows = self.update_verification_course(openedx_course_id,
published_by_id)
logger.debug("Update verification course: {}".format(
affected_rows)
)
else:
affected_rows = self.create_verification_course(openedx_course_id,
published_by_id)
logger.debug("Create verification course: {}".format(
affected_rows)
)
return affected_rows
|
Python
| 0
|
@@ -1583,16 +1583,19 @@
ished_by
+_id
='%7B%7D';%0A
|
d01d12a0cbe286d808c9870b71374b36c0585230
|
simplify newline handling
|
weka_weca/__init__.py
|
weka_weca/__init__.py
|
class Node:
"""Data structure of a single node."""
def __init__(self, start, end, depth=0, indent=' '):
"""
Wrap string initially around string.
Parameters
----------
:param start : string
The start of the if-condition.
:param end : string
The end of the if-condition.
:param depth : integer
The indentation depth.
:param indent : string
The indentation style.
"""
self.start = start
self.scope = []
self.end = end
self.depth = depth
self.indent = indent
def __str__(self):
indent = self.depth * self.indent
scope = '\n'.join([str(node) for node in self.scope])
result = '\n'.join([indent + self.start, scope, indent + self.end])
return result
def port(path, method_name='classify'):
"""
Convert a single decision tree as a function.
Parameters
----------
:param path : string
The path of the exported text file.
:param method_name : string (default='classify')
The method name.
:return:
"""
# Load data:
with open(path, 'r') as file:
content = file.readlines()
# Create root node:
root = Node('', '')
atts = []
# Construct tree:
for line in content:
line = line.strip()
depth = line.count('| ')
# Get current node:
node = None
d = depth
if d > 0:
while d > 0:
node = root.scope[-1] if node is None else node.scope[-1]
d -= 1
else:
node = root.scope
# Get always the scope list:
if type(node) is not list:
node = node.scope
# Build the condition:
cond = line[(depth * len('| ')):]
has_return = line.count(':') == 1
if has_return:
cond = cond.split(':')[0]
atts.append(cond.split(' ')[0])
cond = Node('if (%s) {' % cond, '}', depth=depth+1)
# Set condition logic:
if has_return:
indent = cond.indent * (depth + 2)
return_value = line[line.find(':') + 1 : line.find('(')].strip()
return_value = indent + 'return %s;' % str(return_value)
cond.scope.append(return_value)
node.append(cond)
# Merge the relevant attributes:
atts = list(set(atts))
atts.sort()
atts = ', '.join(['float ' + a for a in atts])
# Wrap function scope around built tree:
result = ''.join(['int %s function(%s) {'%
(method_name, atts), str(root), '}'])
return result
|
Python
| 0.000147
|
@@ -689,16 +689,34 @@
.indent%0A
+ nl = '%5Cn'%0A
@@ -724,20 +724,18 @@
cope =
-'%5Cn'
+nl
.join(%5Bs
@@ -785,20 +785,18 @@
esult =
-'%5Cn'
+nl
.join(%5Bi
|
9a21c446f1236e1b89663c991ea354d8e473b3b9
|
Fix a copyright and pep8 issues in lanzano_luzi_2019_test.py
|
openquake/hazardlib/tests/gsim/lanzano_luzi_2019_test.py
|
openquake/hazardlib/tests/gsim/lanzano_luzi_2019_test.py
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2015-2019 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Implements the tests for the set of GMPE classes included within the GMPE
of Lanzano and Luzi (2019). Test tables were created by an excel spreadsheet
that calculates expected values provided by the original authors.
"""
from openquake.hazardlib.gsim.lanzano_luzi_2019 import (LanzanoLuzi2019shallow,
LanzanoLuzi2019deep)
from openquake.hazardlib.tests.gsim.utils import BaseGSIMTestCase
# Discrepancy percentages to be applied to all tests
class LanzanoLuzi2019shallowTestCase(BaseGSIMTestCase):
"""
Tests the Lanzano and Luzi (2019) GMPE for the case of shallow events.
"""
GSIM_CLASS = LanzanoLuzi2019shallow
# File containing the results for the Mean
MEAN_FILE = "LL19/LanzanoLuzi2019shallow_MEAN.csv"
# File contaning the results for the Total Standard Deviation
STD_FILE = "LL19/LanzanoLuzi2019shallow_STD_TOTAL.csv"
def test_mean(self):
self.check(self.MEAN_FILE,
max_discrep_percentage=0.1)
def test_std_total(self):
self.check(self.STD_FILE,
max_discrep_percentage=0.1)
class LanzanoLuzi2019deepTestCase(BaseGSIMTestCase):
"""
Tests the Lanzano and Luzi (2019) GMPE for the case of deep events.
"""
GSIM_CLASS = LanzanoLuzi2019deep
MEAN_FILE = "LL19/LanzanoLuzi2019deep_MEAN.csv"
STD_FILE = "LL19/LanzanoLuzi2019deep_STD_TOTAL.csv"
def test_mean(self):
self.check(self.MEAN_FILE,
max_discrep_percentage=0.1)
def test_std_total(self):
self.check(self.STD_FILE,
max_discrep_percentage=0.1)
|
Python
| 0.001416
|
@@ -83,13 +83,8 @@
(C)
-2015-
2019
@@ -99,16 +99,16 @@
ndation%0A
+
#%0A# Open
@@ -1052,16 +1052,17 @@
hallow,%0A
+
@@ -1129,16 +1129,16 @@
19deep)%0A
-
%0Afrom op
@@ -1251,16 +1251,17 @@
tests%0A%0A
+%0A
class La
@@ -2174,20 +2174,16 @@
AL.csv%22%0A
-
%0A def
|
03ed43d7d8867ba066d9eea3b3fc7cfe557a31d9
|
Use C++
|
test/setup.py
|
test/setup.py
|
from distutils.core import setup, Extension
test_py3c_module = Extension(
'test_py3c',
sources=['test_py3c.c'],
include_dirs=['../include'],
)
setup_args = dict(
name='test_py3c',
version='0.0',
description = '',
ext_modules = [test_py3c_module]
)
if __name__ == '__main__':
setup(**setup_args)
|
Python
| 0.000211
|
@@ -147,16 +147,70 @@
lude'%5D,%0A
+ extra_compile_args = %5B'--std=c++0x', '-l mylib'%5D,%0A
)%0A%0Asetup
|
2543709c204f1dd6aca5d012e7c28193631bb74c
|
Use postgres standard env vars
|
gargbot_3000/config.py
|
gargbot_3000/config.py
|
#! /usr/bin/env python3.6
# coding: utf-8
import os
import datetime as dt
from pathlib import Path
import pytz
from dotenv import load_dotenv
env_path = Path(".") / ".env"
load_dotenv(dotenv_path=env_path)
slack_verification_token = os.environ["slack_verification_token"]
slack_bot_user_token = os.environ["slack_bot_user_token"]
bot_id = os.environ["bot_id"]
bot_name = os.environ["bot_name"]
home = Path(os.getenv("home_folder", os.getcwd()))
db_name = os.environ["db_name"]
db_user = os.environ["db_user"]
db_password = os.environ["db_password"]
db_host = os.environ["db_host"]
dropbox_token = os.environ["dropbox_token"]
dbx_pic_folder = os.environ["dbx_pic_folder"]
tz = pytz.timezone(os.environ["tz"])
test_channel = os.environ["test_channel"]
main_channel = os.environ["main_channel"]
countdown_message = os.environ["countdown_message"]
ongoing_message = os.environ["ongoing_message"]
finished_message = os.environ["finished_message"]
forum_url = os.environ["forum_url"]
countdown_date = dt.datetime.fromtimestamp(int(os.environ["countdown_date"]), tz=tz)
countdown_args = os.environ["countdown_args"].split(", ")
|
Python
| 0
|
@@ -466,23 +466,27 @@
nviron%5B%22
-db_name
+POSTGRES_DB
%22%5D%0Adb_us
@@ -502,23 +502,29 @@
nviron%5B%22
-db_user
+POSTGRES_USER
%22%5D%0Adb_pa
@@ -544,27 +544,33 @@
nviron%5B%22
-db_password
+POSTGRES_PASSWORD
%22%5D%0Adb_ho
@@ -586,23 +586,29 @@
nviron%5B%22
-db_host
+POSTGRES_HOST
%22%5D%0A%0Adrop
|
ac088bae6a09c8d2ee7f9deac3e701126ca5dfa4
|
update boost
|
var/spack/packages/boost/package.py
|
var/spack/packages/boost/package.py
|
from spack import *
class Boost(Package):
"""Boost provides free peer-reviewed portable C++ source
libraries, emphasizing libraries that work well with the C++
Standard Library.
Boost libraries are intended to be widely useful, and usable
across a broad spectrum of applications. The Boost license
encourages both commercial and non-commercial use.
"""
homepage = "http://www.boost.org"
url = "http://downloads.sourceforge.net/project/boost/boost/1.55.0/boost_1_55_0.tar.bz2"
list_url = "http://sourceforge.net/projects/boost/files/boost/"
list_depth = 2
version('1.55.0', 'd6eef4b4cacb2183f2bf265a5a03a354')
version('1.54.0', '15cb8c0803064faef0c4ddf5bc5ca279')
version('1.53.0', 'a00d22605d5dbcfb4c9936a9b35bc4c2')
version('1.52.0', '3a855e0f919107e0ca4de4d84ad3f750')
version('1.51.0', '4b6bd483b692fd138aef84ed2c8eb679')
version('1.50.0', '52dd00be775e689f55a987baebccc462')
version('1.49.0', '0d202cb811f934282dea64856a175698')
version('1.48.0', 'd1e9a7a7f532bb031a3c175d86688d95')
version('1.47.0', 'a2dc343f7bc7f83f8941e47ed4a18200')
version('1.46.1', '7375679575f4c8db605d426fc721d506')
version('1.46.0', '37b12f1702319b73876b0097982087e0')
version('1.45.0', 'd405c606354789d0426bc07bea617e58')
version('1.44.0', 'f02578f5218f217a9f20e9c30e119c6a')
version('1.43.0', 'dd49767bfb726b0c774f7db0cef91ed1')
version('1.42.0', '7bf3b4eb841b62ffb0ade2b82218ebe6')
version('1.41.0', '8bb65e133907db727a2a825c5400d0a6')
version('1.40.0', 'ec3875caeac8c52c7c129802a8483bd7')
version('1.39.0', 'a17281fd88c48e0d866e1a12deecbcc0')
version('1.38.0', '5eca2116d39d61382b8f8235915cb267')
version('1.37.0', '8d9f990bfb7e83769fa5f1d6f065bc92')
version('1.36.0', '328bfec66c312150e4c2a78dcecb504b')
version('1.35.0', 'dce952a7214e72d6597516bcac84048b')
version('1.34.1', '2d938467e8a448a2c9763e0a9f8ca7e5')
version('1.34.0', 'ed5b9291ffad776f8757a916e1726ad0')
def url_for_version(self, version):
"""Handle Boost's weird URLs, which write the version two different ways."""
parts = [str(p) for p in Version(version)]
dots = ".".join(parts)
underscores = "_".join(parts)
return "http://downloads.sourceforge.net/project/boost/boost/%s/boost_%s.tar.bz2" % (
dots, underscores)
def install(self, spec, prefix):
bootstrap = Executable('./bootstrap.sh')
bootstrap()
# b2 used to be called bjam, before 1.47 (sigh)
b2name = './b2' if spec.satisfies('@1.47:') else './bjam'
b2 = Executable(b2name)
b2('install',
'-j %s' % make_jobs,
'--prefix=%s' % prefix)
|
Python
| 0.000001
|
@@ -615,16 +615,248 @@
th = 2%0A%0A
+ version('1.59.0', '6aa9a5c6a4ca1016edd0ed1178e3cb87')%0A version('1.58.0', 'b8839650e61e9c1c0a89f371dd475546')%0A version('1.57.0', '1be49befbdd9a5ce9def2983ba3e7b76')%0A version('1.56.0', 'a744cf167b05d72335f27c88115f211d')%0A
vers
|
58e234ad7961e955726152436bcb3c8f270564c4
|
fix py33 error in setup.py
|
vendor.py
|
vendor.py
|
import subprocess
import os
from os import path
import re
import traceback
import sys
error_msg = """
This library depends on sources fetched when packaging that failed to be
retrieved.
This means that it will *not* work as expected. Errors encountered:
"""
def run(cmd):
sys.stdout.write('[vendoring] Running command: %s\n' % ' '.join(cmd))
try:
result = subprocess.Popen(
cmd,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE
)
except Exception:
# if building with python2.5 this makes it compatible
_, error, _ = sys.exc_info()
print_error([], traceback.format_exc(error).split('\n'))
raise SystemExit(1)
if result.wait():
print_error(result.stdout.readlines(), result.stderr.readlines())
def print_error(stdout, stderr):
sys.stderr.write('*\n'*80)
sys.stderr.write(error_msg+'\n')
for line in stdout:
sys.stderr.write(line+'\n')
for line in stderr:
sys.stderr.write(line+'\n')
sys.stderr.write('*'*80+'\n')
def vendor_library(name, version, git_repo):
this_dir = path.dirname(path.abspath(__file__))
vendor_dest = path.join(this_dir, 'remoto/lib/vendor/%s' % name)
vendor_init = path.join(vendor_dest, '__init__.py')
vendor_src = path.join(this_dir, name)
vendor_module = path.join(vendor_src, name)
current_dir = os.getcwd()
if path.exists(vendor_src):
run(['rm', '-rf', vendor_src])
if path.exists(vendor_init):
module_file = open(vendor_init).read()
metadata = dict(re.findall(r"__([a-z]+)__\s*=\s*['\"]([^'\"]*)['\"]", module_file))
if metadata.get('version') != version:
run(['rm', '-rf', vendor_dest])
if not path.exists(vendor_dest):
run(['git', 'clone', git_repo])
os.chdir(vendor_src)
run(['git', 'checkout', version])
run(['mv', vendor_module, vendor_dest])
os.chdir(current_dir)
def clean_vendor(name):
"""
Ensure that vendored code/dirs are removed, possibly when packaging when
the environment flag is set to avoid vendoring.
"""
this_dir = path.dirname(path.abspath(__file__))
vendor_dest = path.join(this_dir, 'remoto/lib/vendor/%s' % name)
run(['rm', '-rf', vendor_dest])
def vendorize(vendor_requirements):
"""
This is the main entry point for vendorizing requirements. It expects
a list of tuples that should contain the name of the library and the
version.
For example, a library ``foo`` with version ``0.0.1`` would look like::
vendor_requirements = [
('foo', '0.0.1', 'https://example.com/git_repo'),
]
"""
for library in vendor_requirements:
name, version, repo = library
vendor_library(name, version, repo)
if __name__ == '__main__':
# XXX define this in one place, so that we avoid making updates
# in two places
vendor_requirements = [
('execnet', '1.2.post1', 'https://github.com/alfredodeza/execnet'),
]
vendorize(vendor_requirements)
|
Python
| 0
|
@@ -884,16 +884,20 @@
r.write(
+str(
error_ms
@@ -897,16 +897,17 @@
rror_msg
+)
+'%5Cn')%0A
@@ -946,36 +946,41 @@
ys.stderr.write(
+str(
line
+)
+'%5Cn')%0A for l
@@ -1019,20 +1019,25 @@
r.write(
+str(
line
+)
+'%5Cn')%0A
|
281fd926786186e8f0b1ebc7d8aeb1c362310fc1
|
Remove unused variable
|
viewer.py
|
viewer.py
|
import sys
import pygame
import pygame.locals
pygame.init()
size = width, height = 575, 575
screen = pygame.display.set_mode(size)
label_lights = False
def up_row(x_offset):
for i in range(20):
x = x_offset + (i % 2) * 0.5
y = i * 0.5
yield x, y
def down_row(x_offset):
for i in range(20):
x = x_offset + ((i+ 1) % 2) * 0.5
y = 9.5 - (i * 0.5)
yield x, y
pos_list = []
for strip_pair in range(5):
pos_list += list(up_row(2 * strip_pair))
pos_list += list(down_row(2 * strip_pair + 1))
positions = {i: v for i, v in enumerate(pos_list)}
red = (255, 0, 0)
def get_color(i):
red = 255 * (i / 199.0)
green = 0
blue = 255 * ((199-i) / 199.0)
c = (int(red), int(green), int(blue))
return c
def get_screen_pos(x, y):
# upper_left is 0,0
# bottom left is 0, width
scaled_x = (int)(50*x+50)
scaled_y = (int)(width - 50 - (50*y))
return (scaled_x, scaled_y)
myfont = pygame.font.SysFont("monospace", 15)
import struct
data = open('Resources/video.bin', 'rb')
import time
while True:
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
sys.exit(0)
for k, v in positions.items():
x, y = v
pos = get_screen_pos(x, y)
r = ord(data.read(1))
g = ord(data.read(1))
b = ord(data.read(1))
color = (r,g,b)
pygame.draw.circle(screen, color, pos, 10)
if label_lights:
label = myfont.render(str(k), 1, (255, 255, 255))
screen.blit(label, pos)
pygame.display.update()
time.sleep(0.05)
|
Python
| 0.000015
|
@@ -607,27 +607,8 @@
)%7D%0A%0A
-red = (255, 0, 0)%0A%0A
def
|
30088e034e32209e32e65218974ee717c718b1ab
|
Add start and stop entry points to sims.
|
wmt/flask/api/sims.py
|
wmt/flask/api/sims.py
|
import os
from flask import Blueprint
from flask import json, url_for, current_app
from flask import g, request, abort, send_file
from flask.ext.login import current_user, login_required
from ..utils import as_resource, as_collection
from ..errors import InvalidFieldError, AuthorizationError
from ..services import sims, users
from ..core import deserialize_request
sims_page = Blueprint('sims', __name__)
def assert_owner_or_raise(sim):
user = users.first(username=current_user.get_id())
if user.id != sim.owner:
raise AuthorizationError()
@sims_page.route('/')
def show():
sort = request.args.get('sort', 'id')
order = request.args.get('order', 'asc')
return sims.jsonify_collection(sims.all(sort=sort, order=order))
@sims_page.route('/', methods=['POST'])
@login_required
def new():
data = deserialize_request(request, fields=['name', 'model'])
user = users.first(username=current_user.get_id())
sim = sims.create(data['name'], data['model'], owner=user.id)
sim.create_stage_dir()
return sim.jsonify()
@sims_page.route('/<int:id>')
def sim(id):
return sims.get_or_404(id).jsonify()
@sims_page.route('/<int:id>', methods=['PATCH', 'PUT'])
@login_required
def update(id):
sim = sims.get_or_404(id)
assert_owner_or_raise(sim)
kwds = dict(fields=['status', 'message'])
if request.method == 'PATCH':
kwds['require'] = 'some'
data = deserialize_request(request, **kwds)
sims.update_status(id, **data) or abort(401)
return sim.jsonify()
@sims_page.route('/<int:id>', methods=['DELETE'])
@login_required
def delete(id):
sim = sims.get_or_404(id)
user = users.first(username=current_user.get_id())
if user.id != sim.owner:
raise AuthorizationError()
sims.delete(sim)
return "", 204
@sims_page.route('/<int:id>/files')
def files(id):
import tempfile, tarfile, shutil
format = request.args.get('format', 'gztar')
sim = sims.get_or_404(id)
try:
tmpdir = tempfile.mkdtemp(prefix='wmt', suffix='.d')
except:
raise
else:
archive = os.path.join(tmpdir, str(sim.uuid))
name = shutil.make_archive(archive, format,
current_app.config['STAGE_DIR'], sim.uuid)
return send_file(name, attachment_filename=os.path.basename(name),
as_attachment=True)
finally:
shutil.rmtree(tmpdir)
@sims_page.route('/<int:id>/actions', methods=['POST'])
def actions(id):
if request.method == 'POST':
data = deserialize_request(request, fields=['action'])
if data['action'] == 'start':
sims.start(id)
elif data['action'] == 'stop':
sims.stop(id)
else:
raise InvalidFieldError('sim', 'action')
|
Python
| 0
|
@@ -362,16 +362,52 @@
request%0A
+from ..tasks import exec_remote_wmt%0A
%0A%0Asims_p
@@ -2498,23 +2498,21 @@
int:id%3E/
-actions
+start
', metho
@@ -2532,15 +2532,13 @@
def
-actions
+start
(id)
@@ -2547,41 +2547,35 @@
-if request.method == 'POST':%0A
+sim = sims.get_or_404(id)%0A%0A
@@ -2623,147 +2623,466 @@
s=%5B'
-action
+host',%0A 'username',%0A 'password
'%5D)%0A
+%0A
- if data%5B'action'%5D == 'start':%0A sims.start(id)%0A elif data%5B'action'%5D == 'stop':%0A sims.stop(id)
+hosts = current_app.config%5B'WMT_EXEC_HOSTS'%5D%0A if data%5B'host'%5D not in hosts:%0A raise InvalidFieldError('start', 'host')%0A else:%0A host_config = hosts%5Bdata%5B'host'%5D%5D%0A%0A return exec_remote_wmt(data%5B'host'%5D, sim.uuid,%0A username=data%5B'username'%5D,%0A password=data%5B'password'%5D,
%0A
@@ -3082,30 +3082,24 @@
'%5D,%0A
-else:%0A
@@ -3102,45 +3102,198 @@
-raise InvalidFieldError('sim', 'action')
+which_wmt_exe=host_config%5B'which_wmt_exe'%5D)%0A%0A%0A@sims_page.route('/%3Cint:id%3E/stop', methods=%5B'POST'%5D)%0Adef stop(id):%0A sim = sims.get_or_404(id)%0A%0A stop_simulation(sim.uuid)%0A%0A return '', 204
%0A
|
8c026f79c223091c06deb0589dbf0ee342f4f0e5
|
Change skeleton #2.
|
wood_site/settings.py
|
wood_site/settings.py
|
"""
Django settings for wood_site project.
Generated by 'django-admin startproject' using Django 1.10.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '8c$$$u7^8%7uwjwqj4ob+8$@ao*)i^m@hwt^cx1c3ht)n@c(a0'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'wood',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'wood_site.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'wood_site.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'wood',
'USER': 'postgres',
'PASSWORD': '123',
'HOST': '',
'PORT': '',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
|
Python
| 0
|
@@ -3231,8 +3231,74 @@
tatic/'%0A
+%0ASTATICFILES_DIRS = (%0A os.path.join(BASE_DIR, 'Wood/static'),%0A)
|
04595ae3ceaac9affabb3963e45857d534dfe95f
|
remove print statement (#1907)
|
insights/client/collection_rules.py
|
insights/client/collection_rules.py
|
"""
Rules for data collection
"""
from __future__ import absolute_import
import hashlib
import json
import logging
import six
import shlex
import os
import requests
from six.moves import configparser as ConfigParser
from subprocess import Popen, PIPE, STDOUT
from tempfile import NamedTemporaryFile
from .constants import InsightsConstants as constants
APP_NAME = constants.app_name
logger = logging.getLogger(__name__)
net_logger = logging.getLogger('network')
class InsightsUploadConf(object):
"""
Insights spec configuration from uploader.json
"""
def __init__(self, config, conn=None):
"""
Load config from parent
"""
self.config = config
self.fallback_file = constants.collection_fallback_file
self.remove_file = config.remove_file
self.collection_rules_file = constants.collection_rules_file
self.collection_rules_url = self.config.collection_rules_url
self.gpg = self.config.gpg
if conn:
if self.collection_rules_url is None:
if config.legacy_upload:
self.collection_rules_url = conn.base_url + '/v1/static/uploader.v2.json'
else:
self.collection_rules_url = conn.base_url.split('/platform')[0] + '/v1/static/uploader.v2.json'
print(self.collection_rules_url)
# self.collection_rules_url = conn.base_url + '/static/uploader.v2.json'
self.conn = conn
def validate_gpg_sig(self, path, sig=None):
"""
Validate the collection rules
"""
logger.debug("Verifying GPG signature of Insights configuration")
if sig is None:
sig = path + ".asc"
command = ("/usr/bin/gpg --no-default-keyring "
"--keyring " + constants.pub_gpg_path +
" --verify " + sig + " " + path)
if not six.PY3:
command = command.encode('utf-8', 'ignore')
args = shlex.split(command)
logger.debug("Executing: %s", args)
proc = Popen(
args, shell=False, stdout=PIPE, stderr=STDOUT, close_fds=True)
stdout, stderr = proc.communicate()
logger.debug("STDOUT: %s", stdout)
logger.debug("STDERR: %s", stderr)
logger.debug("Status: %s", proc.returncode)
if proc.returncode:
logger.error("ERROR: Unable to validate GPG signature: %s", path)
return False
else:
logger.debug("GPG signature verified")
return True
def try_disk(self, path, gpg=True):
"""
Try to load json off disk
"""
if not os.path.isfile(path):
return
if not gpg or self.validate_gpg_sig(path):
stream = open(path, 'r')
json_stream = stream.read()
if len(json_stream):
try:
json_config = json.loads(json_stream)
return json_config
except ValueError:
logger.error("ERROR: Invalid JSON in %s", path)
return False
else:
logger.warn("WARNING: %s was an empty file", path)
return
def get_collection_rules(self, raw=False):
"""
Download the collection rules
"""
logger.debug("Attemping to download collection rules from %s",
self.collection_rules_url)
net_logger.info("GET %s", self.collection_rules_url)
try:
req = self.conn.session.get(
self.collection_rules_url, headers=({'accept': 'text/plain'}))
if req.status_code == 200:
logger.debug("Successfully downloaded collection rules")
json_response = NamedTemporaryFile()
json_response.write(req.text.encode('utf-8'))
json_response.file.flush()
else:
logger.error("ERROR: Could not download dynamic configuration")
logger.error("Debug Info: \nConf status: %s", req.status_code)
logger.error("Debug Info: \nConf message: %s", req.text)
return None
except requests.ConnectionError as e:
logger.error(
"ERROR: Could not download dynamic configuration: %s", e)
return None
if self.gpg:
self.get_collection_rules_gpg(json_response)
self.write_collection_data(self.collection_rules_file, req.text)
if raw:
return req.text
else:
return json.loads(req.text)
def fetch_gpg(self):
logger.debug("Attemping to download collection "
"rules GPG signature from %s",
self.collection_rules_url + ".asc")
headers = ({'accept': 'text/plain'})
net_logger.info("GET %s", self.collection_rules_url + '.asc')
config_sig = self.conn.session.get(self.collection_rules_url + '.asc',
headers=headers)
if config_sig.status_code == 200:
logger.debug("Successfully downloaded GPG signature")
return config_sig.text
else:
logger.error("ERROR: Download of GPG Signature failed!")
logger.error("Sig status: %s", config_sig.status_code)
return False
def get_collection_rules_gpg(self, collection_rules):
"""
Download the collection rules gpg signature
"""
sig_text = self.fetch_gpg()
sig_response = NamedTemporaryFile(suffix=".asc")
sig_response.write(sig_text.encode('utf-8'))
sig_response.file.flush()
self.validate_gpg_sig(collection_rules.name, sig_response.name)
self.write_collection_data(self.collection_rules_file + ".asc", sig_text)
def write_collection_data(self, path, data):
"""
Write collections rules to disk
"""
flags = os.O_WRONLY | os.O_CREAT | os.O_TRUNC
fd = os.open(path, flags, 0o600)
with os.fdopen(fd, 'w') as dyn_conf_file:
dyn_conf_file.write(data)
def get_conf_file(self):
"""
Get config from local config file, first try cache, then fallback.
"""
for conf_file in [self.collection_rules_file, self.fallback_file]:
logger.debug("trying to read conf from: " + conf_file)
conf = self.try_disk(conf_file, self.gpg)
if not conf:
continue
version = conf.get('version', None)
if version is None:
raise ValueError("ERROR: Could not find version in json")
conf['file'] = conf_file
logger.debug("Success reading config")
logger.debug(json.dumps(conf))
return conf
raise ValueError("ERROR: Unable to download conf or read it from disk!")
def get_conf_update(self):
"""
Get updated config from URL, fallback to local file if download fails.
"""
dyn_conf = self.get_collection_rules()
if not dyn_conf:
return self.get_conf_file()
version = dyn_conf.get('version', None)
if version is None:
raise ValueError("ERROR: Could not find version in json")
dyn_conf['file'] = self.collection_rules_file
logger.debug("Success reading config")
config_hash = hashlib.sha1(json.dumps(dyn_conf).encode('utf-8')).hexdigest()
logger.debug('sha1 of config: %s', config_hash)
return dyn_conf
def get_rm_conf(self):
"""
Get excluded files config from remove_file.
"""
if not os.path.isfile(self.remove_file):
return None
# Convert config object into dict
parsedconfig = ConfigParser.RawConfigParser()
parsedconfig.read(self.remove_file)
rm_conf = {}
for item, value in parsedconfig.items('remove'):
if six.PY3:
rm_conf[item] = value.strip().encode('utf-8').decode('unicode-escape').split(',')
else:
rm_conf[item] = value.strip().decode('string-escape').split(',')
return rm_conf
if __name__ == '__main__':
from .config import InsightsConfig
print(InsightsUploadConf(InsightsConfig().load_all()))
|
Python
| 0.000857
|
@@ -1316,61 +1316,8 @@
on'%0A
- print(self.collection_rules_url)%0A
|
4571b8e6dee3d96073e88534620f6467a901c241
|
Update Language_Processing.py
|
Week3-Case-Studies-Part1/Language-Processing/Language_Processing.py
|
Week3-Case-Studies-Part1/Language-Processing/Language_Processing.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 28 22:41:26 2017
@author: lamahamadeh
"""
'''
Case Study about Language Processing
'''
#counting words
#---------------
text = "This is a test text. We're keping this text short to keep things manageable." #test text
#Using loops
#-----------
def count_words(text):
"""count the number of times each word occurs in text (str).
Return dictionary where keys are unique words and values are
word counts. skip punctuations"""
text = text.lower() #lowercase for the counting letters so the function can cont the same words whether it's capatilised or not
skips = [".", ",", ";", ":", "'", '"'] #skipping all the punctuations to not be counted with the words that come bfore them
for ch in skips:
text = text.replace(ch,"")
word_counts = {}
for word in text.split(" "):
if word in word_counts: #known word case
word_counts[word] += 1
else:
word_counts[word] = 1 #unknown word case
return word_counts
print(count_words(text))
print(len(count_words("This comprehension check is to check for comprehension.")))#first quiz question
#------------------------------------------------------------------------------
#using collections module
#-------------------------
from collections import Counter
def count_words_fast(text):
"""count the number of times each word occurs in text (str).
Return dictionary where keys are unique words and values are
word counts. skip punctuations"""
text = text.lower() #lowercase for the counting letters so the function can cont the same words whether it's capatilised or not
skips = [".", ",", ";", ":", "'", '"'] #skipping all the punctuations to not be counted with the words that come bfore them
for ch in skips:
text = text.replace(ch,"")
word_counts = Counter(text.split(" "))
return word_counts
print(count_words_fast)
print(count_words(text)==count_words_fast(text))
print(count_words(text) is count_words_fast(text))#second quiz question
#------------------------------------------------------------------------------
#read a book
#-------------
def read_book(title_path):
"""Read a book and return it as a string"""
with open(title_path, "r", encoding = "utf8") as current_file:
text = current_file.read()
text = text.replace("\n","").replace("\r","")
return text
text = read_book('/Users/ADB3HAMADL/Desktop/Movies/English/Nora Ephron/You Have Got Mail.txt')#read a book from its path
print(len(text))#number of charatcers in the book
#if there is a famous/wanted line in the book we can use the 'find' method to find it
ind = text.find("go to the mattresses")
print(ind)#print the index number of the famous/wanted sentence
sample_text = text[ind : ind + 953]#slice the paragraph that contains the famous line
print(sample_text)#print the whole chosen paragraph
#------------------------------------------------------------------------------
#Counting the number of unique words
#------------------------------------
def word_stats(word_counts):
"""return the number of unique words and word frequencies"""
num_unique = len(word_counts) #calculate the number of unique words in the text
counts = word_counts.values() #calculate the frequency of each word in the text
return(num_unique,counts)
text = read_book('/Users/ADB3HAMADL/Desktop/Movies/English/Nora Ephron/You Have Got Mail.txt')
word_counts = count_words(text)
(num_unique, counts) = word_stats(word_counts)
print(num_unique) #print the number of unique number of words in the text
print(sum(counts)) #print the total number of words in the text
#------------------------------------------------------------------------------
#Reading multiple files
#-----------------------
import os #to read directories
movie_dir = "/Users/ADB3HAMADL/Desktop/movies" #tells us how many directories in the book directory
import pandas as pd
'''
Pandas example of how to create a dataframe:
--------------------------------------------
import pandas as pd
table = pd.DataFrame(coloums = ("name" , "age"))
table.loc[1] = "James", 22
table.loc[2] = "Jess", 32
print(table)
'''
stats = pd.DataFrame(columns = ("language" , "director" , "title" , "lenght" , "unique")) #this creates an empty dataframe
#with empty table elements with 5 columns
#To put data in the table
title_num =1
for language in os.listdir(movie_dir):
for director in os.listdir(movie_dir + "/" + language):
for title in os.listdir(movie_dir + "/" + language + "/" + director):
inputfile = movie_dir + "/" + language + "/" + director + "/" + title
print(inputfile)
text = read_book(inputfile)
(num_unique, counts) = word_stats(count_words(text))
stats.loc[title_num ] = language , director.capitalize(), title.replace(".txt", " ") , sum(counts) , num_unique
title_num += 1
print(stats) #print the created dataframe
print(stats.head()) #print the top 5 lines
print(stats.tail()) #print the last 5 lines
print(stats[stats.language == "English"]) #print the number of entries for language English (a subset from the whole dataframe)
#------------------------------------------------------------------------------
#Plotting Book Statistics
#-------------------------
import matplotlib.pyplot as plt
plt.plot(stats.length, stats.unique, "bo")
plt.loglog(stats.length, stats.unique, "bo") #it is a straight line which suggest data modelling strategies that we might use
plt.figure(figsize = (10,10))
subset = stats[stats.language == "English"] #extract a subset that has only the rows with English Language
plt.loglog(subset.length, subset.unique, "o", label = "English", color = "blue")
subset = stats[stats.language == "French"] #extract a subset that has only the rows with French Language
plt.loglog(subset.length, subset.unique, "o", label = "English", color = "red")
plt.legend()
plt.xlabel("Movie Length")
plt.ylabel("Number of unique words")
plt.savefig("lang_plot.pdf")
#------------------------------------------------------------------------------
#
|
Python
| 0.000001
|
@@ -4319,18 +4319,18 @@
, %22leng
-h
t
+h
%22 , %22uni
@@ -5479,16 +5479,76 @@
, %22bo%22)%0A
+#OR we can write plt.plot(stats%5B'length'%5D, stats%5B'unique'%5D)%0A
plt.logl
|
2321dd5b0afedb9bb4a6e894149dd636174adf2c
|
Bump version to 4.0.1
|
stix2elevator/version.py
|
stix2elevator/version.py
|
__version__ = "4.0.0"
|
Python
| 0
|
@@ -12,11 +12,11 @@
= %224.0.
-0
+1
%22%0A
|
9fe2c266b72d6815c6270cfcc720cd5b1054eff8
|
add record_convert_unit due to DRY problem
|
vnstat.py
|
vnstat.py
|
""" python frontend for vnstat cmd/tool """
from subprocess import getoutput
from datetime import datetime
import json
if __name__ == 'vnstat.vnstat':
from . import jalali
else:
import jalali
def read():
""" it return a vnstat as json object """
cmd = 'vnstat --json'
vnstat_out = getoutput(cmd)
data_json = json.loads(vnstat_out)
return data_json
def format_data(data, unit='K'):
""" reformat data
add persian (jalali) date
add date as object
sort data by date
add total of rx + tx
args:
data: dictionary create from json output
"""
for interface in data['interfaces']:
for traffic_type in ['days', 'months', 'hours']:
for record in interface['traffic'][traffic_type]:
# if day not specified in date it replaced with 1
# usually it happens in month data
if 'day' not in record['date']:
record['date']['day'] = 1
date_string = '%d/%d/%d' % (record['date']['year'],
record['date']['month'], record['date']['day'])
record['date'] = datetime(record['date']['year'],
record['date']['month'], record['date']['day'])
record['jdate'] = jalali.Gregorian(date_string)
record['total'] = record['rx'] + record['tx']
record['unit'] = unit
# calucate totla for tops
for record in interface['traffic']['tops']:
record['total'] = record['rx'] + record['tx']
record['unit'] = unit
interface['traffic'][traffic_type].sort(key=lambda x: x.get('date'))
def convert_unit(data, destination='M'):
""" convert traffic unit of each record
units are in Xib but we just save X in record
"""
units = {'K':2**10, 'M':2**20, 'G':2**30}
for interface in data['interfaces']:
for traffic_type in ['days', 'months', 'hours']:
for record in interface['traffic'][traffic_type]:
# calulating divisor . divisor = source_unit / destination_unit
source = record['unit']
divisor = units[source.upper()] / units[destination.upper()]
record['rx'] = round(record['rx'] * divisor, 2)
record['tx'] = round(record['tx'] * divisor, 2)
record['total'] = round(record['total'] * divisor, 2)
record['unit'] = destination.upper()
for record in interface['traffic']['tops']:
# calulating divisor . divisor = source_unit / destination_unit
source = record['unit']
divisor = units[source.upper()] / units[destination.upper()]
record['rx'] = round(record['rx'] * divisor, 2)
record['tx'] = round(record['tx'] * divisor, 2)
record['total'] = round(record['total'] * divisor, 2)
record['unit'] = destination.upper()
def rx_sum(data):
""" return sum of rx traffic's """
rx_traffic = 0
for i in data['interfaces'][0]['traffic']['days']:
rx_traffic = rx_traffic + i['rx']
return rx_traffic
def get(data, traffic_set='days', interface=None):
"""get set of specific data like days , months
if no interface specified it returns all interfaces data in format:
{nick, [ tops ]}
"""
if interface is None:
return {item['nick']:item['traffic'][traffic_set] for item in data['interfaces']}
else:
iface_data = [item for item in data['interfaces'] if item['nick'] == interface][0]
return iface_data['traffic'][traffic_set]
def get_days(data, interface=None):
""" get daily traffic's
if no interface specified it returns all interfaces data in format:
{nick: [ days ]}
"""
return get(data, 'days', interface)
def get_months(data, interface=None):
""" get monthly traffic's
if no interface specified it returns all interfaces data in format:
{nick: [ months ]}
"""
return get(data, 'months', interface)
def get_hours(data, interface=None):
""" get hourly traffic's
if no interface specified it returns all interfaces data in format:
{nick: [ hours ]}
"""
return get(data, 'hours', interface)
def get_tops(data, interface=None):
""" get tops traffic's set
if no interface specified it returns all interfaces data in format:
{nick: [ tops ]}
"""
return get(data, 'tops', interface)
|
Python
| 0.000001
|
@@ -1726,16 +1726,582 @@
ate'))%0A%0A
+def record_convert_unit(record, destination='K'):%0A %22%22%22 convert traffic unit of a record%0A units are in Xib but we just save X in record%0A %22%22%22%0A%0A units = %7B'K':2**10, 'M':2**20, 'G':2**30%7D%0A source = record%5B'unit'%5D%0A # calulating divisor . divisor = source_unit / destination_unit%0A divisor = units%5Bsource.upper()%5D / units%5Bdestination.upper()%5D%0A%0A record%5B'rx'%5D = round(record%5B'rx'%5D * divisor, 2)%0A record%5B'tx'%5D = round(record%5B'tx'%5D * divisor, 2)%0A record%5B'total'%5D = round(record%5B'total'%5D * divisor, 2)%0A record%5B'unit'%5D = destination.upper()%0A%0A
def conv
@@ -2439,58 +2439,11 @@
-
%22%22%22
-%0A units = %7B'K':2**10, 'M':2**20, 'G':2**30%7D
%0A%0A
@@ -2620,420 +2620,35 @@
-# calulating divisor . divisor = source_unit / destination_unit%0A source = record%5B'unit'%5D%0A divisor = units%5Bsource.upper()%5D / units%5Bdestination.upper()%5D%0A%0A record%5B'rx'%5D = round(record%5B'rx'%5D * divisor, 2)%0A record%5B'tx'%5D = round(record%5B'tx'%5D * divisor, 2)%0A record%5B'total'%5D = round(record%5B'total'%5D * divisor, 2)%0A record%5B'unit'%5D =
+record_convert_unit(record,
des
@@ -2655,24 +2655,18 @@
tination
-.upper(
)
+%0A
%0A
@@ -2734,420 +2734,35 @@
-# calulating divisor . divisor = source_unit / destination_unit%0A source = record%5B'unit'%5D%0A divisor = units%5Bsource.upper()%5D / units%5Bdestination.upper()%5D%0A%0A record%5B'rx'%5D = round(record%5B'rx'%5D * divisor, 2)%0A record%5B'tx'%5D = round(record%5B'tx'%5D * divisor, 2)%0A record%5B'total'%5D = round(record%5B'total'%5D * divisor, 2)%0A record%5B'unit'%5D =
+record_convert_unit(record,
des
@@ -2765,31 +2765,24 @@
destination
-.upper(
)%0A%0Adef rx_su
|
282ac04e49c6adef237ea30fa4dcae64e6f959d8
|
Support for non-blank server roots
|
stronghold/middleware.py
|
stronghold/middleware.py
|
from django.contrib.auth.decorators import login_required
from stronghold import conf
class LoginRequiredMiddleware(object):
"""
Force all views to use login required
View is deemed to be public if the @public decorator is applied to the view
View is also deemed to be Public if listed in in django settings in the
STRONGHOLD_PUBLIC_URLS dictionary
each url in STRONGHOLD_PUBLIC_URLS must be a valid regex
"""
def __init__(self, *args, **kwargs):
self.public_view_urls = getattr(conf, 'STRONGHOLD_PUBLIC_URLS', ())
def process_view(self, request, view_func, view_args, view_kwargs):
# if request is authenticated, dont process it
if request.user.is_authenticated():
return None
# if its a public view, don't process it
is_public = getattr(view_func, 'STRONGHOLD_IS_PUBLIC', None)
if is_public:
return None
# if this view matches a whitelisted regex, don't process it
for view_url in self.public_view_urls:
if view_url.match(request.path):
return None
return login_required(view_func)(request, *view_args, **view_kwargs)
|
Python
| 0
|
@@ -1072,16 +1072,21 @@
est.path
+_info
):%0A
|
79893a76c0b438ab3885a9c09027842ff92a26d2
|
Update multiplier constant / tweak brightness
|
wakeup.py
|
wakeup.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# The Pin. Use Broadcom numbers.
BRIGHT_PIN = 17
# User config
START_BRIGHT = 15 # Minutes before alarm to start lighting up
END_BRIGHT = -45 # Minutes after alarm to turn off
MAX_BRIGHT = 255 # Max brightness 1 - 255
# Other constants
BRIGHT_MULTI = 255 / START_BRIGHT
SLEEP_TIME = 10
import os
import sys
import pigpio
import time
import datetime
import signal
from thread import start_new_thread
try:
alarmTime = datetime.datetime.strptime(sys.argv[1], '%H:%M')
alarmTime = alarmTime.replace(year=2000, month=1, day=1)
print(alarmTime)
except:
sys.stdout.write('Usage: %s HH:MM' % os.path.basename(sys.argv[0]))
print("")
quit()
bright = 0
oldbright = 0
abort = False
pi = pigpio.pi()
def setLights(pin, brightness):
realBrightness = int(int(brightness) * (float(bright) / 255.0))
pi.set_PWM_dutycycle(pin, realBrightness)
def fadeLights(pin, brightness):
# print("FADE IN")
newBrightness = brightness
currentBrightness = pi.get_PWM_dutycycle(pin)
if newBrightness < currentBrightness:
setLights(BRIGHT_PIN, brightness)
return
while currentBrightness < newBrightness:
currentBrightness = currentBrightness + 1
pi.set_PWM_dutycycle(pin, currentBrightness)
time.sleep(0.1)
# print("FADE OUT")
def sigterm_handler(_signo, _stack_frame):
setLights(BRIGHT_PIN, 0)
abort = True
sys.exit(0)
signal.signal(signal.SIGTERM, sigterm_handler)
signal.signal(signal.SIGINT, sigterm_handler)
def checkTime():
global bright
global oldbright
global abort
while True:
now = datetime.datetime.now()
now = now.replace(year=2000, month=1, day=1) # , hour=6, minute=25, second=0
d1_ts = time.mktime(now.timetuple())
d2_ts = time.mktime(alarmTime.timetuple())
minuteDiff = (d2_ts - d1_ts) / 60
percDiff = 0
if minuteDiff < START_BRIGHT and minuteDiff > END_BRIGHT:
if minuteDiff < 0:
bright = MAX_BRIGHT
else:
bright = (START_BRIGHT - minuteDiff) * BRIGHT_MULTI
percDiff = (minuteDiff / START_BRIGHT) * 100
if percDiff > 50:
bright = bright * 0.2
elif percDiff > 5:
bright = bright * 0.5
else:
bright = 0
bright = round(bright)
if bright > MAX_BRIGHT:
bright = MAX_BRIGHT
print("MINUTE: " + str(minuteDiff))
print("DIFF: " + str(percDiff))
print("BRIGHT: " + str(bright))
time.sleep(SLEEP_TIME)
start_new_thread(checkTime, ())
while abort == False:
if bright != oldbright:
oldbright = bright
if bright > 0:
fadeLights(BRIGHT_PIN, bright)
else:
setLights(BRIGHT_PIN, bright)
time.sleep(0.1)
setLights(BRIGHT_PIN, 0)
time.sleep(0.5)
pi.stop()
|
Python
| 0
|
@@ -299,19 +299,26 @@
MULTI =
-255
+MAX_BRIGHT
/ START
@@ -2060,17 +2060,16 @@
Diff %3E 5
-0
:%0A%09%09%09%09%09b
@@ -2101,33 +2101,33 @@
elif percDiff %3E
-5
+1
:%0A%09%09%09%09%09bright =
|
d425cf99e85367a47eb719bf1a94c9b344f61dec
|
add log trace when local node is not in address_list
|
supvisors/initializer.py
|
supvisors/initializer.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ======================================================================
# Copyright 2016 Julien LE CLEACH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ======================================================================
import sys
from supervisor import loggers, supervisord
from supervisor.supervisord import Supervisor
from supervisor.xmlrpc import Faults, RPCError
from .addressmapper import AddressMapper
from .commander import Starter, Stopper
from .context import Context
from .infosource import SupervisordSource
from .listener import SupervisorListener
from .options import *
from .sparser import Parser
from .statemachine import FiniteStateMachine
from .statscompiler import StatisticsCompiler
from .strategy import RunningFailureHandler
class Supvisors(object):
""" The Supvisors class used as a global structure passed to most Supvisors objects. """
# logger output (use ';' as separator as easier to cut)
LOGGER_FORMAT = '%(asctime)s;%(levelname)s;%(message)s\n'
def __init__(self, supervisor: Supervisor, **config) -> None:
""" Instantiation of all the Supvisors objects.
:param supervisor: the Supervisor global structure
"""
# declare zmq context (will be created in listener)
self.zmq = None
# get options from config
self.options = SupvisorsOptions(supervisor, **config)
# create logger
self.logger = self.create_logger(supervisor)
# re-realize configuration to get
self.server_options = SupvisorsServerOptions(self.logger)
self.server_options.realize(sys.argv[1:], doc=supervisord.__doc__)
# configure supervisor info source
self.info_source = SupervisordSource(supervisor, self.logger)
# set addresses and check local node
self.address_mapper = AddressMapper(self.logger)
self.address_mapper.node_names = self.options.node_list
if not self.address_mapper.local_node_name:
raise RPCError(Faults.SUPVISORS_CONF_ERROR,
'local node is expected in node list: {}'.format(self.options.node_list))
# create context data
self.context = Context(self)
# create application starter and stopper
self.starter = Starter(self)
self.stopper = Stopper(self)
# create statistics handler
self.statistician = StatisticsCompiler(self)
# create the failure handler of crashing processes
self.failure_handler = RunningFailureHandler(self)
# create state machine
self.fsm = FiniteStateMachine(self)
# check parsing
try:
self.parser = Parser(self)
except Exception as exc:
self.logger.warn('Supvisors: cannot parse rules files: {} - {}'.format(self.options.rules_files, exc))
self.parser = None
# create event subscriber
self.listener = SupervisorListener(self)
def create_logger(self, supervisor):
""" Create the logger that will be used in Supvisors.
If logfile is not set or set to AUTO, Supvisors will use Supervisor logger.
Else Supvisors will log in the file defined in option.
"""
if self.options.logfile is Automatic:
# use Supervisord logger but patch format anyway
logger = supervisor.options.logger
for handler in logger.handlers:
handler.setFormat(Supvisors.LOGGER_FORMAT)
return logger
# else create own Logger using Supervisor functions
nodaemon = supervisor.options.nodaemon
silent = supervisor.options.silent
logger = loggers.getLogger(self.options.loglevel)
# tag the logger so that it is properly closed when exiting
logger.SUPVISORS = True
if nodaemon and not silent:
loggers.handle_stdout(logger, Supvisors.LOGGER_FORMAT)
loggers.handle_file(logger,
self.options.logfile,
Supvisors.LOGGER_FORMAT,
rotating=not not self.options.logfile_maxbytes,
maxbytes=self.options.logfile_maxbytes,
backups=self.options.logfile_backups)
return logger
|
Python
| 0
|
@@ -2517,79 +2517,19 @@
-raise RPCError(Faults.SUPVISORS_CONF_ERROR,%0A
+message = f
'loc
@@ -2563,26 +2563,16 @@
list: %7B
-%7D'.format(
self.opt
@@ -2585,17 +2585,140 @@
ode_list
-)
+%7D'%0A self.logger.critical(f'Supvisors: %7Bmessage%7D')%0A raise RPCError(Faults.SUPVISORS_CONF_ERROR, message
)%0A
@@ -3321,16 +3321,17 @@
er.warn(
+f
'Supviso
@@ -3365,23 +3365,8 @@
s: %7B
-%7D - %7B%7D'.format(
self
@@ -3389,14 +3389,18 @@
iles
-, exc)
+%7D - %7Bexc%7D'
)%0A
@@ -3550,17 +3550,39 @@
pervisor
-)
+: Supervisor) -%3E Logger
:%0A
|
35de4045bc30a1ee0e9aaa17f0b3f370ad95d6c8
|
Bump (#16)
|
swag_client/__about__.py
|
swag_client/__about__.py
|
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "swag-client"
__summary__ = ("Cloud multi-account metadata management tool.")
__uri__ = "https://github.com/Netflix-Skunkworks/swag-client"
__version__ = "0.2.3"
__author__ = "The swag developers"
__email__ = "oss@netflix.com"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2017 {0}".format(__author__)
|
Python
| 0
|
@@ -371,9 +371,9 @@
0.2.
-3
+5
%22%0A%0A_
|
2c2e5bc83bbca3cd3c7ce7649be657293978ddd8
|
append _tbl
|
webapp.py
|
webapp.py
|
import cherrypy
import os
import json
import getpass
from collections import OrderedDict
from configparser import ConfigParser
from builtins import input
import mysql.connector
#
# import sys
# import os.path
class DBConfig:
settings = {}
def __init__(self, config_file='..\db.cfg'):
if os.path.isfile(config_file):
config = ConfigParser()
config.readfp(open(config_file))
for section in config.sections():
self.settings[section] = {}
for option in config.options(section):
self.settings[section][option] = config.get(section, option)
def get_list(list_name, params):
print('get_list list:{} params:{}'.format(str(list_name), str(params)))
print(db_conf.settings['DB']['db_user'])
print(db_conf.settings['DB']['db_user'] + '$' + db_conf.settings['DB']['db_name'])
return_vals = OrderedDict()
cnx = mysql.connector.connect(user=db_conf.settings['DB']['db_user'],
password=db_conf.settings['DB']['db_pass'],
host=db_conf.settings['DB']['db_user']+'.mysql.pythonanywhere-services.com',
database=db_conf.settings['DB']['db_user'] + '$' + db_conf.settings['DB']['db_name'])
cursor = cnx.cursor()
query = ("SELECT * FROM " + list_name )
cursor.execute(query)
for (participant_name) in cursor:
print("{}".format(participant_name))
cursor.close()
cnx.close()
return_vals['test1'] = 'Hello World'
return return_vals
class HoppersWebService(object):
exposed = True
def GET(self,*args):
print('GET:/hoppers/'+str(args))
if not args:
args = [None]
return json.dumps(get_list(args[0],args[1:]))
def POST(self, **kwargs):
return 'POST:/hoppers/' + str(kwargs)
def PUT(self, **kwargs):
return 'PUT:/hoppers/' + str(kwargs)
def DELETE(self, **kwargs):
return 'DELETE:/hoppers/' + str(kwargs)
class ws():
def __init__(self):
db_conf = DBConfig()
if not 'DB' in db_conf.settings.keys():
db_conf.settings['DB'] = {}
if not 'db_name' in db_conf.settings['DB'].keys():
db_conf.settings['DB']['db_name'] = input('db_name:')
if not 'db_user' in db_conf.settings['DB'].keys():
db_conf.settings['DB']['db_user'] = input('db_user:')
if not 'db_pass' in db_conf.settings['DB'].keys():
db_conf.settings['DB']['db_pass'] = getpass.getpass('Password:')
print("name {}".format(db_conf.settings['DB']['db_name']))
print("user {}".format(db_conf.settings['DB']['db_user']))
cherrypy.tree.mount(
HoppersWebService(),
'/hoppers',
{
'/': {
'request.dispatch': cherrypy.dispatch.MethodDispatcher()
},
}, )
# cherrypy.tree.mount(
# Root(),
# '/',
# {
# '/': {
# 'tools.sessions.on': True,
# 'tools.staticdir.root': os.path.abspath(os.getcwd())
# }
# }
# )
cherrypy.config.update({'server.socket_port': 80})
cherrypy.engine.start()
cherrypy.engine.block()
if __name__ == '__main__':
foo = ws()
|
Python
| 0.999691
|
@@ -1365,16 +1365,25 @@
ist_name
+ + %22_tbl%22
)%0A%0A%0A
|
67614fb784dca6166b112ddc60254ef5e493541d
|
Change 9/5 to 1.8
|
wfinal.py
|
wfinal.py
|
import RPi.GPIO as GPIO
import pywapi
import string
import time
channels = [4, 7, 8, 9, 10, 14, 15, 17, 18, 22, 23, 24, 25]
GPIO.setwarnings(True)
GPIO.setmode(GPIO.BCM)
GPIO.setup(channels, GPIO.OUT)
GPIO.output(channels, 0)
weather = pywapi.get_weather_from_weather_com('33020')
temperature = int(weather['current_conditions']['temperature'])
temp_f = temperature * (9/5)+32
humidity = int(weather['current_conditions']['humidity'])
cc = (weather['current_conditions']['text'].lower())
if humidity >= 80:
GPIO.output(7, 1)
if humidity <= 79 and humidity >= 60:
GPIO.output(18, 1)
if humidity <= 59:
GPIO.output(25, 1)
if temp_f >= 90:
GPIO.output(14, 1)
if temp_f <= 89 and temp_f >= 80:
GPIO.output(14, 1)
if temp_f <= 79 and temp_f >= 70:
GPIO.output(18, 1)
if temp_f <= 69 and temp_f >= 40:
GPIO.output(23, 1)
if temp_f <= 39:
GPIO.output(24, 1)
if cc == 'drizzle' or 'light drizzle' or 'heavy drizzle':
GPIO.output(4, 1)
if cc == 'rain' or 'light rain':
GPIO.output(4, 1)
if cc == 'heavy rain':
GPIO.output(17, 1)
if cc == 'light rain mist' or 'rain mist' or 'heavy rain mist':
GPIO.output(4, 1)
if cc == 'rain shower' or 'light rain showers':
GPIO.output(4, 1)
if cc == 'heavy rain shower':
GPIO.output(17, 1)
if cc == 'light thunderstorm' or 'heavy thunderstorm' or 'thunderstorm':
GPIO.output(17, 1)
GPIO.output(10, 1)
GPIO.output(9, 1)
if cc == 'light freezing drizzle' or 'heavy freezing drizzle' or 'freezing drizzle':
GPIO.output(4, 1)
if cc == 'light freezing rain' or 'heavy freezing rain' or 'freezing rain':
GPIO.output(17, 1)
if cc == 'patches of fog' or 'shallow fog' or 'partial fog' or 'light fog':
GPIO.output(22, 1)
if cc == 'fog' or 'heavy fog' or 'heavy fog patches' or 'light fog patches':
GPIO.output(10, 1)
if cc == 'overcast':
GPIO.output(10, 1)
if cc == 'partly cloudy' or 'scattered clouds':
GPIO.output(22, 1)
if cc == 'mostly cloudy':
GPIO.output(10, 1)
|
Python
| 0.999999
|
@@ -366,15 +366,13 @@
ture
- * (9/5
+*(1.8
)+32
|
5b02f334519964ffae6812df5413fcdae84db6ba
|
Undo changes to logger config, ie. remove the access_log_file option: decision is to support this through log_config rather tan adding an option.
|
synapse/config/logger.py
|
synapse/config/logger.py
|
# -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
from synapse.util.logcontext import LoggingContextFilter
from twisted.python.log import PythonLoggingObserver
import logging
import logging.config
import yaml
class LoggingConfig(Config):
def __init__(self, args):
super(LoggingConfig, self).__init__(args)
self.verbosity = int(args.verbose) if args.verbose else None
self.log_config = self.abspath(args.log_config)
self.log_file = self.abspath(args.log_file)
self.access_log_file = self.abspath(args.access_log_file)
@classmethod
def add_arguments(cls, parser):
super(LoggingConfig, cls).add_arguments(parser)
logging_group = parser.add_argument_group("logging")
logging_group.add_argument(
'-v', '--verbose', dest="verbose", action='count',
help="The verbosity level."
)
logging_group.add_argument(
'-f', '--log-file', dest="log_file", default="homeserver.log",
help="File to log to."
)
logging_group.add_argument(
'--log-config', dest="log_config", default=None,
help="Python logging config file"
)
logging_group.add_argument(
'--access-log-file', dest="access_log_file", default="access.log",
help="File to log server access to"
)
def setup_logging(self):
log_format = (
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s"
" - %(message)s"
)
if self.log_config is None:
level = logging.INFO
level_for_storage = logging.INFO
if self.verbosity:
level = logging.DEBUG
if self.verbosity > 1:
level_for_storage = logging.DEBUG
# FIXME: we need a logging.WARN for a -q quiet option
logger = logging.getLogger('')
logger.setLevel(level)
logging.getLogger('synapse.storage').setLevel(level_for_storage)
formatter = logging.Formatter(log_format)
if self.log_file:
# TODO: Customisable file size / backup count
handler = logging.handlers.RotatingFileHandler(
self.log_file, maxBytes=(1000 * 1000 * 100), backupCount=3
)
else:
handler = logging.StreamHandler()
handler.setFormatter(formatter)
handler.addFilter(LoggingContextFilter(request=""))
logger.addHandler(handler)
if self.access_log_file:
access_logger = logging.getLogger('synapse.access')
# we log to both files by default
access_logger.propagate = 1
access_log_handler = logging.handlers.RotatingFileHandler(
self.access_log_file, maxBytes=(1000 * 1000 * 100), backupCount=3
)
access_log_formatter = logging.Formatter('%(message)s')
access_log_handler.setFormatter(access_log_formatter)
access_logger.addHandler(access_log_handler)
else:
with open(self.log_config, 'r') as f:
logging.config.dictConfig(yaml.load(f))
observer = PythonLoggingObserver()
observer.start()
|
Python
| 0
|
@@ -1078,74 +1078,8 @@
ile)
-%0A self.access_log_file = self.abspath(args.access_log_file)
%0A%0A
@@ -1707,181 +1707,8 @@
)
-%0A logging_group.add_argument(%0A '--access-log-file', dest=%22access_log_file%22, default=%22access.log%22,%0A help=%22File to log server access to%22%0A )
%0A%0A
@@ -2858,24 +2858,24 @@
quest=%22%22))%0A%0A
+
@@ -2904,590 +2904,8 @@
ler)
-%0A%0A if self.access_log_file:%0A access_logger = logging.getLogger('synapse.access')%0A # we log to both files by default%0A access_logger.propagate = 1%0A access_log_handler = logging.handlers.RotatingFileHandler(%0A self.access_log_file, maxBytes=(1000 * 1000 * 100), backupCount=3%0A )%0A access_log_formatter = logging.Formatter('%25(message)s')%0A access_log_handler.setFormatter(access_log_formatter)%0A access_logger.addHandler(access_log_handler)
%0A
|
20198ee76393dba0c5bf3ee327b5ed902f4c7d61
|
new handlers for 'spiflash' and 'identifier_mem'
|
generate-zephyr-dts.py
|
generate-zephyr-dts.py
|
#!/usr/bin/env python3
# Copyright (c) 2019-2020 Antmicro <www.antmicro.com>
# Copyright (c) 2021 Henk Vergonet <henk.vergonet@gmail.com>
#
# Zephyr DTS & config overlay generator for LiteX SoC.
#
# This script parses LiteX 'csr.json' file and generates DTS and config
# files overlay for Zephyr.
# Changelog:
# - 2021-07-05 Henk Vergonet <henk.vergonet@gmail.com>
# removed dependency on intermediate interpretation layers
# switch to JSON csr
# fix uart size parameter
#
import argparse
import json
# DTS formatting
def dts_open(name, parm): return "&{} {{\n".format(parm.get('alias',name))
def dts_close(): return "};\n"
def dts_intr(name, csr): return " interrupts = <{} 0>;\n".format(
hex(csr['constants'][name+'_interrupt']))
def dts_reg(regs): return " reg = <{}>;\n".format(regs)
# DTS handlers
def disabled_handler(name, parm, csr):
return " status = \"disabled\";\n"
def ram_handler(name, parm, csr):
return dts_reg(" ".join([
hex(csr['memories'][name]['base']),
hex(csr['memories'][name]['size'])]))
def ethmac_handler(name, parm, csr):
dtsi = dts_reg(" ".join([
hex(csr['csr_bases'][name]),
hex(parm['size']),
hex(csr['memories'][name]['base']),
hex(csr['memories'][name]['size'])]))
dtsi += dts_intr(name, csr)
return dtsi
def i2c_handler(name, parm, csr):
dtsi = dts_reg(" ".join([
hex(csr['csr_bases'][name]),
hex(parm['size']),
hex(csr['csr_bases'][name] + parm['size']),
hex(parm['size'])]))
dtsi += dts_intr(name, csr)
return dtsi
def peripheral_handler(name, parm, csr):
dtsi = dts_reg(" ".join([
hex(csr['csr_bases'][name]),
hex(parm['size'])]))
try:
dtsi += dts_intr(name, csr)
except KeyError as e:
print(' dtsi key', e, 'not found, no interrupt override')
return dtsi
overlay_handlers = {
'uart': {
'handler': peripheral_handler,
'alias': 'uart0',
'size': 0x20,
'config_entry': 'UART_LITEUART'
},
'timer0': {
'handler': peripheral_handler,
'size': 0x40,
'config_entry': 'LITEX_TIMER'
},
'ethmac': {
'handler': ethmac_handler,
'alias': 'eth0',
'size': 0x80,
'config_entry': 'ETH_LITEETH'
},
'i2c0' : {
'handler': i2c_handler,
'size': 0x4,
'config_entry': 'I2C_LITEX'
},
'main_ram': {
'handler': ram_handler,
'alias': 'ram0',
},
}
def generate_dts_config(csr):
dts = cnf = ''
for name, parm in overlay_handlers.items():
print('Generating overlay for:',name)
enable = 'y'
dtsi = dts_open(name, parm)
try:
dtsi += parm['handler'](name, parm, csr)
except KeyError as e:
print(' dtsi key', e, 'not found, disable', name)
enable = 'n'
dtsi += disabled_handler(name, parm, csr)
dtsi += dts_close()
dts += dtsi
if 'config_entry' in parm:
cnf += ' -DCONFIG_'+parm['config_entry']+'='+enable
for name, value in csr['csr_bases'].items():
if name not in overlay_handlers.keys():
print('No overlay handler for:', name, 'at', hex(value))
return dts, cnf
# helpers
def print_or_save(filepath, lines):
""" Prints given string on standard output or to the file.
Args:
filepath (string): path to the file lines should be written to
or '-' to write to a standard output
lines (string): content to be printed/written
"""
if filepath == '-':
print(lines)
else:
with open(filepath, 'w') as f:
f.write(lines)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('conf_file',
help='JSON configuration generated by LiteX')
parser.add_argument('--dts', action='store', required=True,
help='Output DTS overlay file')
parser.add_argument('--config', action='store', required=True,
help='Output config overlay file')
return parser.parse_args()
def main():
args = parse_args()
with open(args.conf_file) as f:
csr = json.load(f)
dts, config = generate_dts_config(csr)
print_or_save(args.dts, dts)
print_or_save(args.config, config)
if __name__ == '__main__':
main()
|
Python
| 0.999999
|
@@ -452,34 +452,130 @@
r%0A#
- fix uart size parameter
+- 2021-07-15 Henk Vergonet %3Chenk.vergonet@gmail.com%3E%0A# added identifier_mem handler as dna0%0A# added spiflash as spi0
%0A#%0A%0A
@@ -2558,24 +2558,171 @@
ETH'%0A %7D,%0A
+ 'spiflash': %7B%0A 'handler': peripheral_handler,%0A 'alias': 'spi0',%0A 'size': 12,%0A 'config_entry': 'SPI_LITESPI'%0A %7D,%0A
'i2c0' :
@@ -2902,16 +2902,134 @@
%0A %7D,%0A
+ 'identifier_mem': %7B%0A 'handler': peripheral_handler,%0A 'alias': 'dna0',%0A 'size': 0x100,%0A %7D,%0A
%7D%0A%0Adef g
|
07f8f44fc5f69c71922bb3b85d621867d0df49fa
|
Support core logger as a property on the main scraper.
|
scrapekit/core.py
|
scrapekit/core.py
|
from scrapekit.config import Config
from scrapekit.tasks import TaskManager, Task
from scrapekit.http import make_session
class Scraper(object):
""" Scraper application object which handles resource management
for a variety of related functions. """
def __init__(self, name, config=None):
self.name = name
self.config = Config(self, config)
self._task_manager = None
@property
def task_manager(self):
if self._task_manager is None:
self._task_manager = \
TaskManager(threads=self.config.threads)
return self._task_manager
def task(self, fn):
""" Decorate a function as a task in the scraper framework.
This will enable the function to be queued and executed in
a separate thread, allowing for the execution of the scraper
to be asynchronous.
"""
return Task(self, fn)
def Session(self):
""" Create a pre-configured ``requests`` session instance
that can be used to run HTTP requests. This instance will
potentially be cached, or a stub, depending on the
configuration of the scraper. """
return make_session(self)
def head(self, url, **kwargs):
""" HTTP HEAD via ``requests``.
See: http://docs.python-requests.org/en/latest/api/#requests.head
"""
return self.Session().get(url, **kwargs)
def get(self, url, **kwargs):
""" HTTP GET via ``requests``.
See: http://docs.python-requests.org/en/latest/api/#requests.get
"""
return self.Session().get(url, **kwargs)
def post(self, url, **kwargs):
""" HTTP POST via ``requests``.
See: http://docs.python-requests.org/en/latest/api/#requests.post
"""
return self.Session().post(url, **kwargs)
def put(self, url, **kwargs):
""" HTTP PUT via ``requests``.
See: http://docs.python-requests.org/en/latest/api/#requests.put
"""
return self.Session().put(url, **kwargs)
def __repr__(self):
return '<Scraper(%s)>' % self.name
|
Python
| 0
|
@@ -1,20 +1,124 @@
+from uuid import uuid4%0Afrom time import time%0Afrom datetime import datetime%0Afrom threading import local%0A%0A
from scrapekit.confi
@@ -219,16 +219,55 @@
session%0A
+from scrapekit.logs import make_logger%0A
%0A%0Aclass
@@ -482,71 +482,314 @@
elf.
-config = Config(self, config)%0A self._task_manager = None
+id = uuid4()%0A self.start_time = datetime.utcnow()%0A self.config = Config(self, config)%0A self._task_manager = None%0A self.task_ctx = local()%0A self.log = make_logger(self)%0A self.log.info(%22Starting %25s, %25d threads.%22, self.name,%0A self.config.threads)
%0A%0A
|
6edd4114c4e715a3a0c440af455fff089a099620
|
Clarify comment about Pyhton versions
|
scrapy/squeues.py
|
scrapy/squeues.py
|
"""
Scheduler queues
"""
import marshal
from six.moves import cPickle as pickle
from queuelib import queue
def _serializable_queue(queue_class, serialize, deserialize):
class SerializableQueue(queue_class):
def push(self, obj):
s = serialize(obj)
super(SerializableQueue, self).push(s)
def pop(self):
s = super(SerializableQueue, self).pop()
if s:
return deserialize(s)
return SerializableQueue
def _pickle_serialize(obj):
try:
return pickle.dumps(obj, protocol=2)
# Python<=3.4 raises pickle.PicklingError here while
# Python>=3.5 raises AttributeError and
# Python>=3.6 raises TypeError
except (pickle.PicklingError, AttributeError, TypeError) as e:
raise ValueError(str(e))
PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
_pickle_serialize, pickle.loads)
PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
_pickle_serialize, pickle.loads)
MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
marshal.dumps, marshal.loads)
MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
marshal.dumps, marshal.loads)
FifoMemoryQueue = queue.FifoMemoryQueue
LifoMemoryQueue = queue.LifoMemoryQueue
|
Python
| 0.000001
|
@@ -582,10 +582,12 @@
thon
+
%3C=
+
3.4
@@ -634,20 +634,28 @@
#
+ 3.5 %3C=
Python
-%3E=3.5
+ %3C 3.6
rai
@@ -693,10 +693,12 @@
thon
+
%3E=
+
3.6
|
d6cdf99d87b23cd6bfd8fd7079919d89d6496501
|
Complete incomplete sentence
|
partner_identification/models/res_partner_id_category.py
|
partner_identification/models/res_partner_id_category.py
|
# -*- coding: utf-8 -*-
#
# © 2004-2010 Tiny SPRL http://tiny.be
# © 2010-2012 ChriCar Beteiligungs- und Beratungs- GmbH
# http://www.camptocamp.at
# © 2015 Antiun Ingenieria, SL (Madrid, Spain)
# http://www.antiun.com
# Antonio Espinosa <antonioea@antiun.com>
# © 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, models, fields
from openerp.exceptions import ValidationError, UserError
from openerp.tools.safe_eval import safe_eval
from openerp.tools.translate import _
class ResPartnerIdCategory(models.Model):
_name = "res.partner.id_category"
_order = "name"
code = fields.Char(string="Code", size=16, required=True)
name = fields.Char(string="ID name", required=True, translate=True)
active = fields.Boolean(string="Active", default=True)
validation_code = fields.Text(
'Python validation code',
help="Python code called to validate an id number.",
default="""
# Python code. Use failed = True to .
# You can use the following variables :
# - self: browse_record of the current ID Category browse_record
# - id_number: browse_record of ID number to validte
"""
)
@api.multi
def _validation_eval_context(self, id_number):
self.ensure_one()
return {'self': self,
'id_number': id_number,
}
@api.multi
def validate_id_number(self, id_number):
"""Validate the given ID number
The method raises an openerp.exceptions.ValidationError if the eval of
python validation code fails
"""
self.ensure_one()
eval_context = self._validation_eval_context(id_number)
try:
safe_eval(self.validation_code,
eval_context,
mode='exec',
nocopy=True)
except Exception as e:
raise UserError(
_('Error when evaluating the id_category validation code:'
':\n %s \n(%s)') % (self.name, e))
if eval_context.get('failed', False):
raise ValidationError(
_("%s is not a valid %s identifier") % (
id_number.name, self.name))
|
Python
| 0.998694
|
@@ -1059,16 +1059,55 @@
True to
+specify that the id number is not valid
.%0A# You
|
573d3a7411a1653f64b901077264ecb98c1f9673
|
Use subprocess.check_call replace os.system
|
script/version.py
|
script/version.py
|
import importlib
import os
import sys
here = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
def get_version() -> str:
"""
Return version.
"""
sys.path.insert(0, here)
return importlib.import_module("a2wsgi").__version__
os.chdir(here)
os.system(f"poetry version {get_version()}")
os.system("git add a2wsgi/* pyproject.toml")
os.system(f'git commit -m "v{get_version()}"')
os.system("git push")
os.system("git tag v{0}".format(get_version()))
os.system("git push --tags")
|
Python
| 0.000003
|
@@ -30,16 +30,34 @@
port sys
+%0Aimport subprocess
%0A%0Ahere =
@@ -284,25 +284,37 @@
r(here)%0A
-os.system
+subprocess.check_call
(f%22poetr
@@ -339,27 +339,51 @@
sion()%7D%22
-)%0Aos.system
+, shell=True)%0Asubprocess.check_call
(%22git ad
@@ -395,9 +395,19 @@
sgi/
-*
+__init__.py
pyp
@@ -418,27 +418,51 @@
ct.toml%22
-)%0Aos.system
+, shell=True)%0Asubprocess.check_call
(f'git c
@@ -493,41 +493,89 @@
)%7D%22'
-)%0Aos.system(%22git push%22)%0Aos.system
+, shell=True)%0Asubprocess.check_call(%22git push%22, shell=True)%0Asubprocess.check_call
(%22gi
@@ -611,19 +611,43 @@
n())
-)%0Aos.system
+, shell=True)%0Asubprocess.check_call
(%22gi
@@ -660,10 +660,22 @@
--tags%22
+, shell=True
)%0A
|
cbdee53bc2239277d314b93d09368ab2462ab8d6
|
Allow variety of input types
|
geojsonio/geojsonio.py
|
geojsonio/geojsonio.py
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import argparse
import sys
import urllib
import webbrowser
import github3
MAX_URL_LEN = 150e3 # Size threshold above which a gist is created
def display(contents, domain='http://geojson.io/'):
url = geojsonio_url(contents, domain)
webbrowser.open(url)
return url
# display() used to be called to_geojsonio. Keep it around for now...
to_geojsonio = display
def geojsonio_url(contents, domain='http://geojson.io/'):
"""
Returns the URL to open given the domain and contents
If the contents are large, then a gist will be created.
"""
if len(contents) <= MAX_URL_LEN:
url = _data_url(domain, contents)
else:
gist = _create_gist(contents)
url = _gist_url(domain, gist.id)
return url
def _create_gist(contents, description='', filename='data.geojson'):
"""
Create and return an anonymous gist with a single file and specified
contents
"""
ghapi = github3.GitHub()
files = {filename: {'content': contents}}
gist = ghapi.create_gist(description, files)
return gist
def _data_url(domain, contents):
url = (domain + '#data=data:application/json,' +
urllib.quote(contents))
return url
def _gist_url(domain, gist_id):
url = (domain + '#id=gist:/{}'.format(gist_id))
return url
def main():
parser = argparse.ArgumentParser(
description='Quickly visualize GeoJSON data on geojson.io')
parser.add_argument('-p', '--print',
dest='do_print',
action='store_true',
help='print the URL')
parser.add_argument('-d', '--domain',
dest='domain',
default='http://geojson.io',
help='Alternate URL instead of http://geojson.io/')
parser.add_argument('filename',
nargs='?',
type=argparse.FileType('r'),
default=sys.stdin,
help="The file to send to geojson.io")
args = parser.parse_args()
contents = args.filename.read()
url = geojsonio_url(contents, args.domain)
if args.do_print:
print(url)
else:
webbrowser.open(url)
if __name__ == '__main__':
main()
|
Python
| 0.000016
|
@@ -55,16 +55,28 @@
rgparse%0A
+import json%0A
import s
@@ -78,16 +78,16 @@
ort sys%0A
-
import u
@@ -545,24 +545,484 @@
d contents%0A%0A
+ The input contents may be:%0A * string - assumed to be GeoJSON%0A * an object that implements __geo_interface__%0A A FeatureCollection will be constructed with one feature,%0A the object.%0A * a sequence of objects that each implement __geo_interface__%0A A FeatureCollection will be constructed with the objects%0A as the features%0A%0A For more information about __geo_interface__ see:%0A https://gist.github.com/sgillies/2217756%0A%0A
If the c
@@ -1074,24 +1074,65 @@
d.%0A%0A %22%22%22%0A
+ contents = _parse_contents(contents)%0A
if len(c
@@ -1288,16 +1288,16 @@
st.id)%0A%0A
-
retu
@@ -1300,24 +1300,1382 @@
return url%0A%0A
+def _parse_contents(contents):%0A %22%22%22%0A Return a GeoJSON string from a variety of inputs.%0A See the documentation for geojsonio_url for the possible contents%0A input.%0A%0A Returns%0A -------%0A GeoJSON string%0A%0A %22%22%22%0A if isinstance(contents, basestring):%0A return contents%0A%0A if hasattr(contents, '__geo_interface__'):%0A features = %5B_geo_to_feature(contents)%5D%0A else:%0A try:%0A feature_iter = iter(contents)%0A except TypeError, e:%0A raise ValueError('Unknown type for input')%0A%0A features = %5B%5D%0A for i, f in enumerate(feature_iter):%0A if not hasattr(f, '__geo_interface__'):%0A raise ValueError('Unknown type at index %7B%7D'.format(i))%0A features.append(_geo_to_feature(f))%0A%0A data= %7B'type': 'FeatureCollection', 'features': features%7D%0A return json.dumps(data)%0A%0Adef _geo_to_feature(ob):%0A %22%22%22%0A Return a GeoJSON Feature from an object that implements%0A __geo_interface__%0A%0A If the object's type is a geometry, return a Feature with empty%0A properties and the object's mapping as the feature geometry. If the%0A object's type is a Feature, then return it.%0A%0A %22%22%22%0A mapping = ob.__geo_interface__%0A if mapping%5B'type'%5D == 'Feature':%0A return mapping%0A else:%0A return %7B'type': 'Feature',%0A 'geometry': mapping%7D%0A%0A
def _create_
|
61d07e1ef8b01f80111f24efbbdf9fa02010daff
|
Handle missing package "imbox" with error message
|
intelmq/bots/collectors/mail/collector_mail_url.py
|
intelmq/bots/collectors/mail/collector_mail_url.py
|
# -*- coding: utf-8 -*-
import re
import sys
import imbox
import requests
from intelmq.lib.bot import Bot
from intelmq.lib.message import Report
class MailURLCollectorBot(Bot):
def process(self):
mailbox = imbox.Imbox(self.parameters.mail_host,
self.parameters.mail_user,
self.parameters.mail_password,
self.parameters.mail_ssl)
emails = mailbox.messages(folder=self.parameters.folder, unread=True)
if emails:
for uid, message in emails:
if (self.parameters.subject_regex and
not re.search(self.parameters.subject_regex,
message.subject)):
continue
self.logger.info("Reading email report")
for body in message.body['plain']:
match = re.search(self.parameters.url_regex, body)
if match:
url = match.group()
# Build request
self.http_header = getattr(self.parameters,
'http_header', {})
self.http_verify_cert = getattr(self.parameters,
'http_verify_cert', True)
if hasattr(self.parameters, 'http_user') and hasattr(
self.parameters, 'http_password'):
self.auth = (self.parameters.http_user,
self.parameters.http_password)
else:
self.auth = None
http_proxy = getattr(self.parameters, 'http_proxy', None)
https_proxy = getattr(self.parameters,
'http_ssl_proxy', None)
if http_proxy and https_proxy:
self.proxy = {'http': http_proxy, 'https': https_proxy}
else:
self.proxy = None
self.http_header['User-agent'] = self.parameters.http_user_agent
self.logger.info("Downloading report from %s" % url)
resp = requests.get(url=url,
auth=self.auth, proxies=self.proxy,
headers=self.http_header,
verify=self.http_verify_cert)
if resp.status_code // 100 != 2:
raise ValueError('HTTP response status code was {}.'
''.format(resp.status_code))
self.logger.info("Report downloaded.")
report = Report()
report.add("raw", resp.content)
report.add("feed.name",
self.parameters.feed)
report.add("feed.accuracy", self.parameters.accuracy)
self.send_message(report)
mailbox.mark_seen(uid)
self.logger.info("Email report read")
if __name__ == "__main__":
bot = MailURLCollectorBot(sys.argv[1])
bot.start()
|
Python
| 0.000003
|
@@ -40,16 +40,25 @@
rt sys%0A%0A
+try:%0A
import i
@@ -62,16 +62,53 @@
t imbox%0A
+except ImportError:%0A imbox = None%0A
import r
@@ -189,16 +189,16 @@
eport%0A%0A%0A
-
class Ma
@@ -222,16 +222,163 @@
(Bot):%0A%0A
+ def init(self):%0A if imbox is None:%0A self.logger.error('Could not import imbox. Please install it.')%0A self.stop()%0A%0A
def
|
a4feb3abb75e9fd686546e877290c191961601e1
|
Update trns_validate_Genbank_Genome.py
|
plugins/scripts/validate/trns_validate_Genbank_Genome.py
|
plugins/scripts/validate/trns_validate_Genbank_Genome.py
|
#!/usr/bin/env python
# standard library imports
import sys
import os
import argparse
import logging
import subprocess
# 3rd party imports
# None
# KBase imports
import biokbase.Transform.script_utils as script_utils
def transform(input_file=None,
level=logging.INFO, logger=None):
"""
Validate Genbank file.
Args:
input_directory: An genbank input file
Returns:
Any validation errors or success.
Authors:
Shinjae Yoo, Matt Henderson, Marcin Joachimiak
"""
if logger is None:
logger = script_utils.stderrlogger(__file__)
logger.info("Starting Genbank validation")
token = os.environ.get("KB_AUTH_TOKEN")
classpath = "/kb/dev_container/modules/transform/lib/jars/kbase/transform/GenBankTransform.jar:$KB_TOP/lib/jars/kbase/genomes/kbase-genomes-20140411.jar:$KB_TOP/lib/jars/kbase/common/kbase-common-0.0.6.jar:$KB_TOP/lib/jars/jackson/jackson-annotations-2.2.3.jar:$KB_TOP/lib/jars/jackson/jackson-core-2.2.3.jar:$KB_TOP/lib/jars/jackson/jackson-databind-2.2.3.jar:$KB_TOP/lib/jars/kbase/transform/GenBankTransform.jar:$KB_TOP/lib/jars/kbase/auth/kbase-auth-1398468950-3552bb2.jar:$KB_TOP/lib/jars/kbase/workspace/WorkspaceClient-0.2.0.jar"
mc = 'us.kbase.genbank.ValidateGBK'
java_classpath = os.path.join(os.environ.get("KB_TOP"), classpath.replace('$KB_TOP', os.environ.get("KB_TOP")))
argslist = "{0}".format("--input_file {0}".format(input_file))
arguments = ["java", "-classpath", java_classpath, "us.kbase.genbank.ConvertGBK", argslist]
print arguments
tool_process = subprocess.Popen(arguments, stderr=subprocess.PIPE)
stdout, stderr = tool_process.communicate()
if len(stderr) > 0:
logger.error("Validation of Genbank.Genome failed on {0}".format(input_file))
sys.exit(1)
else:
logger.info("Validation of Genbank.Genome completed.")
sys.exit(0)
if __name__ == "__main__":
script_details = script_utils.parse_docs(transform.__doc__)
parser = argparse.ArgumentParser(prog=__file__,
description=script_details["Description"],
epilog=script_details["Authors"])
parser.add_argument("--input_file",
help=script_details["Args"]["input_file"],
action="store",
type=str,
nargs='?',
required=True)
args, unknown = parser.parse_known_args()
logger = script_utils.stderrlogger(__file__)
try:
transform(input_file=args.input_file,
logger = logger)
except Exception, e:
logger.exception(e)
sys.exit(1)
sys.exit(0)
|
Python
| 0.000001
|
@@ -789,32 +789,31 @@
ansform/
-GenBankT
+kbase_t
ransform
.jar:$KB
@@ -796,36 +796,37 @@
/kbase_transform
-.jar
+_deps
:$KB_TOP/lib/jar
@@ -1113,24 +1113,23 @@
orm/
-GenBankT
+kbase_t
ransform
.jar
@@ -1124,16 +1124,21 @@
ransform
+_deps
.jar:$KB
|
26355e29197659e6ca080eefa3871015167afa16
|
Update deployment script.
|
scripts/deploy.py
|
scripts/deploy.py
|
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Deployment script for Oppia.
USE THIS SCRIPT AT YOUR OWN RISK! A safe option is to modify app.yaml manually
and run the 'appcfg.py update' command.
This script performs a deployment of Oppia to a Google App Engine appspot
instance. It creates a build with unnecessary files removed, which is saved
in ../deployment_history. It then pushes this build to the production server.
IMPORTANT NOTES:
1. You will need to first create a folder called ../deploy_data/[APP_NAME],
where [APP_NAME] is the name of your app as defined in app.yaml. This
folder should contain a folder called /images, which in turn should contain:
- two files: favicon.ico and logo.png
- two folder: /splash and /sidebar, containing images used for the gallery
carousel and sidebar, respectively.
2. Before running this script, you must install third-party dependencies by
running
bash scripts/start.sh
at least once.
3. This script should be run from the oppia root folder:
python scripts/deploy.py --app_name=[APP_NAME]
where [APP_NAME] is the name of your app. Note that the root folder MUST be
named 'oppia'.
"""
import argparse
import datetime
import os
import shutil
import subprocess
import common
_PARSER = argparse.ArgumentParser()
_PARSER.add_argument(
'--app_name', help='name of the app to deploy to', type=str)
parsed_args = _PARSER.parse_args()
if parsed_args.app_name:
APP_NAME = parsed_args.app_name
else:
raise Exception('No app name specified.')
CURRENT_DATETIME = datetime.datetime.utcnow()
RELEASE_DIR_NAME = '%s-deploy-%s' % (
APP_NAME, CURRENT_DATETIME.strftime('%Y%m%d-%H%M%S'))
RELEASE_DIR_PATH = os.path.join(os.getcwd(), '..', RELEASE_DIR_NAME)
APPCFG_PATH = os.path.join(
'..', 'oppia_tools', 'google_appengine_1.9.11', 'google_appengine',
'appcfg.py')
LOG_FILE_PATH = os.path.join('..', 'deploy.log')
THIRD_PARTY_DIR = os.path.join('.', 'third_party')
def preprocess_release():
"""Pre-processes release files.
This function should be called from within RELEASE_DIR_NAME. Currently it
does the following:
(1) Changes the app name in app.yaml to APP_NAME.
(2) Substitutes image files for the splash page.
"""
# Change the app name in app.yaml.
f = open('app.yaml', 'r')
content = f.read()
os.remove('app.yaml')
content = content.replace('oppiaserver', APP_NAME)
d = open('app.yaml', 'w+')
d.write(content)
# Substitute image files for the splash page.
SPLASH_PAGE_FILES = ['favicon.ico', 'logo.png']
DEPLOY_DATA_PATH = os.path.join(
os.getcwd(), '..', 'deploy_data', APP_NAME)
if not os.path.exists(DEPLOY_DATA_PATH):
raise Exception(
'Could not find deploy_data directory at %s' % DEPLOY_DATA_PATH)
for filename in SPLASH_PAGE_FILES:
src = os.path.join(DEPLOY_DATA_PATH, 'images', filename)
dst = os.path.join(os.getcwd(), 'static', 'images', filename)
if not os.path.exists(src):
raise Exception(
'Could not find source path %s. Please check your deploy_data '
'folder.' % src)
if not os.path.exists(dst):
raise Exception(
'Could not find destination path %s. Has the code been '
'updated in the meantime?' % dst)
shutil.copyfile(src, dst)
IMAGE_DIRS = ['splash', 'sidebar']
for dir_name in IMAGE_DIRS:
src_dir = os.path.join(DEPLOY_DATA_PATH, 'images', dir_name)
dst_dir = os.path.join(os.getcwd(), 'static', 'images', dir_name)
if not os.path.exists(src_dir):
raise Exception(
'Could not find source dir %s. Please check your deploy_data '
'folder.' % src_dir)
shutil.copytree(src_dir, dst_dir)
# Check that the current directory is correct.
common.require_cwd_to_be_oppia()
CURRENT_GIT_VERSION = subprocess.check_output(
['git', 'rev-parse', 'HEAD']).strip()
print ''
print 'Starting deployment process.'
if not os.path.exists(THIRD_PARTY_DIR):
raise Exception(
'Could not find third_party directory at %s. Please run start.sh '
'prior to running this script.' % THIRD_PARTY_DIR)
# Create a folder in which to save the release candidate.
print 'Ensuring that the release directory parent exists'
common.ensure_directory_exists(os.path.dirname(RELEASE_DIR_PATH))
# Copy files to the release directory. Omits the .git subfolder.
print 'Copying files to the release directory'
shutil.copytree(
os.getcwd(), RELEASE_DIR_PATH, ignore=shutil.ignore_patterns('.git'))
# Change the current directory to the release candidate folder.
with common.CD(RELEASE_DIR_PATH):
if not os.getcwd().endswith(RELEASE_DIR_NAME):
raise Exception(
'Invalid directory accessed during deployment: %s' % os.getcwd())
print 'Changing directory to %s' % os.getcwd()
print 'Preprocessing release...'
preprocess_release()
# Do a build; ensure there are no errors.
print 'Building and minifying scripts...'
subprocess.check_output(['python', 'scripts/build.py'])
# Run the tests; ensure there are no errors.
print 'Running tests...'
test_output = subprocess.check_output([
'python', 'scripts/backend_tests.py'])
if 'All tests passed.' not in test_output:
raise Exception('Tests failed. Halting deployment.\n%s' % test_output)
# Deploy to GAE.
subprocess.check_output([APPCFG_PATH, 'update', '.', '--oauth2'])
# Writing log entry.
common.ensure_directory_exists(os.path.dirname(LOG_FILE_PATH))
with open(LOG_FILE_PATH, 'a') as log_file:
log_file.write('Successfully deployed to %s at %s (version %s)\n' % (
APP_NAME, CURRENT_DATETIME.strftime('%Y-%m-%d %H:%M:%S'),
CURRENT_GIT_VERSION))
print 'Returning to oppia/ root directory.'
print 'Done!'
|
Python
| 0
|
@@ -4181,32 +4181,33 @@
ges', dir_name)%0A
+%0A
if not o
@@ -4387,40 +4387,230 @@
-shutil.copytree(src_dir, dst_dir
+common.ensure_directory_exists(dst_dir)%0A%0A for filename in os.listdir(src_dir):%0A src = os.path.join(src_dir, filename)%0A dst = os.path.join(dst_dir, filename)%0A shutil.copyfile(src, dst
)%0A%0A%0A
|
714815fd943207089c1805e01c4f476ddf7c6917
|
Add suport for html reports
|
jasperserver/core/ExportExecutionRequestBuilder.py
|
jasperserver/core/ExportExecutionRequestBuilder.py
|
from time import sleep
from resources_mime_type import ResourceFilesMimeType as rmt
import json
class ExportExecutionRequestBuilder(object):
def __init__(self, ReportExecutionRequestBuilder,js_connect, requestId, exportId):
self.rerb = ReportExecutionRequestBuilder
self.requestId = requestId
self.exportId = exportId
self._connect = js_connect
self.result = {}
#self.opresult = opresult
path = "/reportExecutions"
self.url = self._connect._rest_url + '_v2' + path
#for each id. Get one file each time
def outputResource(self):
#exports = self.opresult.get('exports', [])
path = "/%s/exports/%s/outputResource" % (self.requestId, self.exportId)
content = None
while True:
response = self._connect.get(self.url + path).response
print "output-final {}".format(response.headers['output-final'])
if response.headers['output-final'] == "true":
break
#result = response.content
self.result["content"] = response.content
return self
def attachment(self, attachmentId):
if not attachmentId or attachmentId == "/":
raise Exception("attachmentId mustn't be an empty string!")
path = "/%s/exports/%s/attachments/%s" % (self.requestId, self.exportId, attachmentId)
while True:
result = self.rerb.status().content
status = json.loads(result)
if status.get('value') == "ready":
break
sleep(1)
print "before pedido attachment {} path {} conn {}".format(attachmentId, self.url + path, self._connect)
response = self._connect.get(self.url + path)
#print "path attach 5 {} content {}".format(self.url + path, response.content)
return response
def status(self):
path = "/%s/exports/%s/status" % (self.requestId, self.exportId)
#setHeader('Content-type',rmt.JSON,self._connect)
setHeader('accept','application/json',self._connect)
return self._connect.get(self.url + path)
def getResult(self, rtype=None):
return self.result.get(rtype.lower()) if rtype else self.result
'''def outputResource(self):
result = []
exports = self.opresult.get('exports', [])
for file in exports:
path = "/%s/exports/%s/outputResource" % (self.requestId, file.get("id"))
content = None
while True:
response = self._connect.get(self.url + path).response
print "output-final {}".format(response.headers['output-final'])
if response.headers['output-final'] == "true":
content = response.content
break
result.append(content)
return result'''
|
Python
| 0
|
@@ -616,20 +616,16 @@
-
#exports
@@ -660,24 +660,39 @@
s', %5B%5D)%0A
+ limit = 10%0A
path
@@ -756,28 +756,24 @@
f.exportId)%0A
-
cont
@@ -791,36 +791,33 @@
-
while
-True:%0A
+limit %3E 0:%0A
@@ -879,36 +879,32 @@
nse%0A
-
-
print %22output-fi
@@ -956,36 +956,32 @@
'%5D)%0A
-
-
if response.head
@@ -1019,36 +1019,32 @@
-
break%0A %0A
@@ -1033,37 +1033,59 @@
break%0A
+
+ limit = limit - 1
%0A
+%0A
#result
@@ -1095,36 +1095,32 @@
esponse.content%0A
-
self.res
@@ -1153,33 +1153,25 @@
content%0A
- %0A
+%0A
retu
@@ -1351,36 +1351,42 @@
ring!%22)%0A
-
+limit = 10
%0A path =
@@ -1471,36 +1471,41 @@
)%0A while
-True
+limit %3E 0
:%0A re
@@ -1639,32 +1639,32 @@
break%0A
-
slee
@@ -1668,16 +1668,46 @@
leep(1)%0A
+ limit = limit - 1%0A
|
487f7a2235e8541670fc0e9949dd3c0fb80eb932
|
fix formatting
|
projects/dendrites/permutedMNIST/experiments/__init__.py
|
projects/dendrites/permutedMNIST/experiments/__init__.py
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2021, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from .base import CONFIGS as BASE
from .batch import CONFIGS as BATCH
from .batch_mnist import CONFIGS as BATCH_MNIST
from .centroid import CONFIGS as CENTROID
from .hyperparameter_search import CONFIGS as HYPERPARAMETERSEARCH
from .no_dendrites import CONFIGS as NO_DENDRITES
from .si_centroid import CONFIGS as SI_CENTROID
from .sp_context import CONFIGS as SP_CONTEXT
from .sp_context_search import CONFIGS as SP_PROTO
"""
Import and collect all experiment configurations into one CONFIG
"""
__all__ = ["CONFIGS"]
# Collect all configurations
CONFIGS = dict()
CONFIGS.update(BASE)
CONFIGS.update(CENTROID)
CONFIGS.update(HYPERPARAMETERSEARCH)
CONFIGS.update(BATCH)
CONFIGS.update(BATCH_MNIST)
CONFIGS.update(CENTROID)
CONFIGS.update(NO_DENDRITES)
CONFIGS.update(SI_CENTROID)
|
Python
| 0.001459
|
@@ -1567,32 +1567,29 @@
FIGS.update(
-CENTROID
+BATCH
)%0ACONFIGS.up
@@ -1597,50 +1597,19 @@
ate(
-HYPERPARAMETERSEARCH)%0ACONFIGS.update(BATCH
+BATCH_MNIST
)%0ACO
@@ -1617,35 +1617,32 @@
FIGS.update(
-BATCH_MNIST
+CENTROID
)%0ACONFIGS.up
@@ -1642,32 +1642,44 @@
FIGS.update(
-CENTROID
+HYPERPARAMETERSEARCH
)%0ACONFIGS.up
|
8bdc316a6864e364a4d7a54d2d2da69c6855ca7b
|
Switch cql_major_version from 2 to 3
|
cql/connection.py
|
cql/connection.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cql.apivalues import ProgrammingError, NotSupportedError
class Connection(object):
cql_major_version = 2
def __init__(self, host, port, keyspace, user=None, password=None, cql_version=None,
compression=None, consistency_level="ONE", transport=None):
"""
Params:
* host ...............: hostname of Cassandra node.
* port ...............: port number to connect to.
* keyspace ...........: keyspace to connect to.
* user ...............: username used in authentication (optional).
* password ...........: password used in authentication (optional).
* cql_version.........: CQL version to use (optional).
* compression.........: whether to use compression. For Thrift connections,
* this can be None or the name of some supported
* compression type (like "GZIP"). For native
* connections, this is treated as a boolean, and if
* true, the connection will try to find a type of
* compression supported by both sides.
* consistency_level ..: consistency level to use for CQL3 queries (optional);
* "ONE" is the default CL, other supported values are:
* "ANY", "TWO", "THREE", "QUORUM", "LOCAL_QUORUM",
* "EACH_QUORUM" and "ALL";
* overridable on per-query basis.
* transport...........: Thrift transport to use (optional);
* not applicable to NativeConnection.
"""
self.host = host
self.port = port
self.keyspace = keyspace
self.cql_version = cql_version
self.compression = compression
self.consistency_level = consistency_level
self.transport = transport
self.open_socket = False
self.credentials = None
if user or password:
self.credentials = {"username": user, "password": password}
self.establish_connection()
self.open_socket = True
if self.keyspace:
self.set_initial_keyspace(self.keyspace)
def __str__(self):
return ("%s(host=%r, port=%r, keyspace=%r, %s)"
% (self.__class__.__name__, self.host, self.port, self.keyspace,
self.open_socket and 'conn open' or 'conn closed'))
def keyspace_changed(self, keyspace):
self.keyspace = keyspace
###
# Connection API
###
def close(self):
if not self.open_socket:
return
self.terminate_connection()
self.open_socket = False
def commit(self):
"""
'Database modules that do not support transactions should
implement this method with void functionality.'
"""
return
def rollback(self):
raise NotSupportedError("Rollback functionality not present in Cassandra.")
def cursor(self):
if not self.open_socket:
raise ProgrammingError("Connection has been closed.")
curs = self.cursorclass(self)
curs.compression = self.compression
curs.consistency_level = self.consistency_level
return curs
# TODO: Pull connections out of a pool instead.
def connect(host, port=None, keyspace=None, user=None, password=None,
cql_version=None, native=False, compression=None,
consistency_level="ONE", transport=None):
"""
Create a connection to a Cassandra node.
@param host Hostname of Cassandra node.
@param port Port number to connect to (default 9160 for thrift, 8000
for native)
@param keyspace If set, authenticate to this keyspace on connection.
@param user If set, use this username in authentication.
@param password If set, use this password in authentication.
@param cql_version If set, try to use the given CQL version. If unset,
uses the default for the connection.
@param compression Whether to use compression. For Thrift connections,
this can be None or the name of some supported compression
type (like "GZIP"). For native connections, this is treated
as a boolean, and if true, the connection will try to find
a type of compression supported by both sides.
@param consistency_level Consistency level to use for CQL3 queries (optional);
"ONE" is the default CL, other supported values are:
"ANY", "TWO", "THREE", "QUORUM", "LOCAL_QUORUM",
"EACH_QUORUM" and "ALL"; overridable on per-query basis.
@param transport If set, use this Thrift transport instead of creating one;
doesn't apply to native connections.
@returns a Connection instance of the appropriate subclass.
"""
if native:
from native import NativeConnection
connclass = NativeConnection
if port is None:
port = 8000
else:
from thrifteries import ThriftConnection
connclass = ThriftConnection
if port is None:
port = 9160
return connclass(host, port, keyspace, user, password,
cql_version=cql_version, compression=compression,
consistency_level=consistency_level, transport=transport)
|
Python
| 0
|
@@ -891,17 +891,17 @@
rsion =
-2
+3
%0A%0A de
|
57ac1c43181d3bd1f5a18a1ed3137c1b997e2533
|
Fix lint error
|
keras_cv/layers/regularization/stochastic_depth.py
|
keras_cv/layers/regularization/stochastic_depth.py
|
# Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
@tf.keras.utils.register_keras_serializable(package="keras_cv")
class StochasticDepth(tf.keras.layers.Layer):
"""
Implements the Stochastic Depth layer. It randomly drops residual branches
in residual architectures. It is used as a drop-in replacement for addition
operation. Note that this layer DOES NOT drop a residual block across
individual samples but across the entire batch.
Reference:
- [Deep Networks with Stochastic Depth](https://arxiv.org/abs/1603.09382).
- Docstring taken from [tensorflow_addons/layers/stochastic_depth.py](tinyurl.com/mr3y2af6).
Args:
survival_probability: float, the probability of the residual branch
being kept.
Usage:
`StochasticDepth` can be used in a residual network as follows:
```python
# (...)
input = tf.ones((1, 3, 3, 1), dtype=tf.float32)
residual = tf.keras.layers.Conv2D(1, 1)(input)
output = keras_cv.layers.StochasticDepth()([input, residual])
# (...)
```
At train time, StochasticDepth returns:
$$
x[0] + b_l * x[1],
$$
where $b_l$ is a random Bernoulli variable with probability $P(b_l = 1) = p_l$
At test time, StochasticDepth rescales the activations of the residual
branch based on the survival probability ($p_l$):
$$
x[0] + p_l * x[1]
$$
"""
def __init__(self, survival_probability=0.5, **kwargs):
super().__init__(**kwargs)
self.survival_probability = survival_probability
def call(self, x, training=None):
if len(x) != 2:
raise ValueError(
f"""Input must be a list of length 2. """
f"""Got input with length={len(x)}."""
)
shortcut, residual = x
b_l = tf.keras.backend.random_bernoulli([], p=self.survival_probability)
if training:
return shortcut + b_l * residual
else:
return shortcut + self.survival_probability * residual
def get_config(self):
config = {"survival_probability": self.survival_probability}
base_config = super().get_config()
return dict(list(base_config.items()) + list(config.items()))
|
Python
| 0.000035
|
@@ -1141,33 +1141,8 @@
om %5B
-tensorflow_addons/layers/
stoc
@@ -1158,16 +1158,24 @@
pth.py%5D(
+https://
tinyurl.
@@ -1187,17 +1187,16 @@
r3y2af6)
-.
%0A%0A Ar
|
6fbfa11a6f13f8271687a83fc4de68f62d4a4501
|
Fix encrpytion with custom salt
|
crypto/encrypt.py
|
crypto/encrypt.py
|
#!/bin/env python3
"""
Encrypt password with salt for unix
Usage:
encrypt.py [options] [--sha512 | --sha256 | --md5 | --crypt] [<salt>]
Options:
--sha512
--sha256
--md5
--crypt
"""
import sys
import crypt
from getpass import getpass
from docopt import docopt
# docopt(doc, argv=None, help=True, version=None, options_first=False))
default_flag = {"--sha512":True}
methods = {
"--sha512" : crypt.METHOD_SHA512,
"--sha256" : crypt.METHOD_SHA256,
"--md5" : crypt.METHOD_MD5,
"--crypt" : crypt.METHOD_CRYPT,
}
def get_method(opt):
for key in methods.keys():
if opt.get(key, False):
return methods.get(key)
def main():
opt = docopt(__doc__, sys.argv[1:])
method = get_method(opt)
salt = opt.get("<salt>")
if not salt:
salt = crypt.mksalt(method)
password = ""
if not sys.stdin.isatty():
password = sys.stdin.readline()
else:
password = getpass()
if not password:
sys.exit(1)
shadow = crypt.crypt(password, salt)
print(shadow)
def usage():
pass
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -83,16 +83,35 @@
options%5D
+ %5B--rounds %3Ccount%3E%5D
%5B--sha5
@@ -210,16 +210,64 @@
--crypt%0A
+ -r, --rounds %3Ccount%3E rounds%5Bdefault: 1000%5D%0A
%22%22%22%0A%0Aimp
@@ -431,17 +431,16 @@
_flag =
-%7B
%22--sha51
@@ -445,14 +445,8 @@
512%22
-:True%7D
%0A%0Ame
@@ -480,176 +480,445 @@
%22 :
-crypt.METHOD_SHA512,%0A %22--sha256%22 : crypt.METHOD_SHA256,%0A %22--md5%22 : crypt.METHOD_MD5,%0A %22--crypt%22 : crypt.METHOD_CRYPT,%0A %7D%0A%0Adef get_method(opt
+%7B%0A %22method%22: crypt.METHOD_SHA512,%0A %22id%22: %226%22,%0A %7D,%0A %22--sha256%22 : %7B%0A %22method%22: crypt.METHOD_SHA256,%0A %22id%22: %225%22,%0A %7D,%0A %22--md5%22 : %7B%0A %22method%22: crypt.METHOD_MD5,%0A %22id%22: %221%22,%0A %7D,%0A %22--crypt%22 : %7B%0A %22method%22: crypt.METHOD_CRYPT,%0A %22id%22: %22%22,%0A %7D,%0A %7D%0A%0Adef get_method(opt, default=default_flag
):%0A
@@ -1015,16 +1015,53 @@
et(key)%0A
+ return methods.get(default_flag)%0A
%0A%0Adef ma
@@ -1107,16 +1107,49 @@
v%5B1:%5D)%0A%0A
+ rounds = opt.get(%22--rounds%22)%0A
meth
@@ -1150,16 +1150,17 @@
method
+s
= get_m
@@ -1169,16 +1169,90 @@
hod(opt)
+%0A method = methods.get(%22method%22)%0A id_prefix = methods.get(%22id%22)%0A
%0A%0A sa
@@ -1326,16 +1326,93 @@
(method)
+%0A else:%0A salt = %22$%7B%7D$rounds=%7B%7D$%7B%7D$%22.format(id_prefix, rounds, salt)
%0A%0A pa
|
4c2f6372bb5c1db18998626049aa8e53e9889452
|
Fix an invalid build dependency.
|
syzygy/trace/rpc/rpc.gyp
|
syzygy/trace/rpc/rpc.gyp
|
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
{
'variables': {
'chromium_code': 1,
'midl_out_dir': '<(SHARED_INTERMEDIATE_DIR)/syzygy/trace/rpc',
},
'target_defaults': {
'all_dependent_settings': {
'include_dirs': ['<(SHARED_INTERMEDIATE_DIR)'],
},
},
'targets': [
{
'target_name': 'call_trace_rpc_lib',
'type': 'static_library',
# Build our IDL file to the shared intermediate directory using the
# midl_rpc.gypi include (because the default rules for .idl files are
# specific to COM interfaces). This include expects the prefix and
# midl_out_dir variables to be defined.
'variables': {
'prefix': 'CallTrace',
},
'includes': ['../../build/midl_rpc.gypi'],
'sources': ['call_trace_rpc.idl'],
'dependencies': [
'<(src)/syzygy/common/rpc/rpc.gyp:common_rpc_lib',
'<(src)/syzygy/trace/protocol/protocol.gyp:protocol_lib',
],
},
{
'target_name': 'logger_rpc_lib',
'type': 'static_library',
# Build our IDL file to the shared intermediate directory using the
# midl_rpc.gypi include (because the default rules for .idl files are
# specific to COM interfaces). This include expects the prefix and
# midl_out_dir variables to be defined.
'variables': {
'prefix': 'Logger',
},
'includes': ['../../build/midl_rpc.gypi'],
'sources': ['logger_rpc.idl'],
'dependencies': [
'<(src)/syzygy/common/rpc/rpc.gyp:common_rpc_lib',
'<(src)/syzygy/trace/protocol/protocol.gyp:protocol_lib',
],
},
],
}
|
Python
| 0.999952
|
@@ -1492,32 +1492,321 @@
_lib',%0A %5D,%0A
+ 'outputs': %5B%0A '%3C(SHARED_INTERMEDIATE_DIR)/syzygy/trace/rpc/call_trace_rpc.h',%0A '%3C(SHARED_INTERMEDIATE_DIR)/syzygy/trace/rpc/call_trace_rpc_c.cc',%0A '%3C(SHARED_INTERMEDIATE_DIR)/syzygy/trace/rpc/call_trace_rpc_s.cc',%0A %5D,%0A 'process_outputs_as_sources': 1,%0A
%7D,%0A %7B%0A
|
6d174ff58dfef28cfa2ddfe65553bcefe9ae3e6f
|
Fix exc_info leack
|
gevent_fastcgi/wsgi.py
|
gevent_fastcgi/wsgi.py
|
# Copyright (c) 2011-2013, Alexander Kulakov
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
import sys
import logging
from traceback import format_exception
import re
from wsgiref.handlers import BaseCGIHandler
from zope.interface import implements
from .interfaces import IRequestHandler
from .server import Request, FastCGIServer
__all__ = ('WSGIRequestHandler', 'WSGIRefRequestHandler', 'WSGIServer')
logger = logging.getLogger(__name__)
mandatory_environ = (
'REQUEST_METHOD',
'SCRIPT_NAME',
'PATH_INFO',
'QUERY_STRING',
'CONTENT_TYPE',
'CONTENT_LENGTH',
'SERVER_NAME',
'SERVER_PORT',
'SERVER_PROTOCOL',
)
class WSGIRefRequestHandler(object):
implements(IRequestHandler)
def __init__(self, app):
self.app = app
def __call__(self, request):
handler = self.CGIHandler(request)
handler.run(self.app)
class CGIHandler(BaseCGIHandler):
def __init__(self, request):
BaseCGIHandler.__init__(self, request.stdin, request.stdout,
request.stderr, request.environ)
def log_exception(self, exc_info):
logger.exception('WSGI application failed')
def error_output(self, environ, start_response):
start_response('500 Internal Server Error', [
('Content-type', 'text/plain'),
])
yield ''
class WSGIRequest(object):
status_pattern = re.compile(r'^[1-5]\d\d .+$')
def __init__(self, fastcgi_request):
self._environ = self.make_environ(fastcgi_request)
self._stdout = fastcgi_request.stdout
self._stderr = fastcgi_request.stderr
self._status = None
self._headers = []
self._headers_sent = False
def make_environ(self, fastcgi_request):
env = fastcgi_request.environ
for name in mandatory_environ:
env.setdefault(name, '')
env['wsgi.version'] = (1, 0)
env['wsgi.input'] = fastcgi_request.stdin
env['wsgi.errors'] = fastcgi_request.stderr
env['wsgi.multithread'] = True
env['wsgi.multiprocess'] = False
env['wsgi.run_once'] = False
https = env.get('HTTPS', '').lower()
if https in ('yes', 'on', '1'):
env['wsgi.url_scheme'] = 'https'
else:
env['wsgi.url_scheme'] = 'http'
return env
def start_response(self, status, headers, exc_info=None):
if exc_info is not None:
try:
if self._headers_sent:
raise exc_info[0], exc_info[1], exc_info[2]
finally:
exc_info = None
self._status = status
self._headers = headers
return self._app_write
def finish(self, app_iter):
if self._headers_sent:
# _app_write has been already called
self._stdout.writelines(app_iter)
else:
app_iter = iter(app_iter)
for chunk in app_iter:
# do nothing until first non-empty chunk
if chunk:
self._send_headers()
self._stdout.write(chunk)
self._stdout.writelines(app_iter)
break
else:
# app_iter had no data
self._headers.append(('Content-length', '0'))
self._send_headers()
self._stdout.close()
self._stderr.close()
def _app_write(self, chunk):
if not self._headers_sent:
self._send_headers()
self._stdout.write(chunk)
def _send_headers(self):
headers = ['Status: {0}\r\n'.format(self._status)]
headers.extend(('{0}: {1}\r\n'.format(name, value)
for name, value in self._headers))
headers.append('\r\n')
self._stdout.writelines(headers)
self._headers_sent = True
class WSGIRequestHandler(object):
implements(IRequestHandler)
def __init__(self, app):
self.app = app
def __call__(self, fastcgi_request):
request = WSGIRequest(fastcgi_request)
try:
app_iter = self.app(request._environ, request.start_response)
request.finish(app_iter)
if hasattr(app_iter, 'close'):
app_iter.close()
except Exception:
exc_info = sys.exc_info()
try:
logger.exception('Application raised exception')
request.start_response('500 Internal Server Error', [
('Content-type', 'text/plain'),
])
request.finish(map(str, format_exception(*exc_info)))
finally:
exc_info = None
class WSGIServer(FastCGIServer):
def __init__(self, address, app, **kwargs):
handler = WSGIRequestHandler(app)
super(WSGIServer, self).__init__(address, handler, **kwargs)
|
Python
| 0.999551
|
@@ -2192,16 +2192,37 @@
_info):%0A
+ try:%0A
@@ -2269,25 +2269,24 @@
ailed')%0A
-%0A
def erro
@@ -2281,198 +2281,52 @@
-def error_output(self, environ, start_response):%0A start_response('500 Internal Server Error', %5B%0A ('Content-type', 'text/plain'),%0A %5D)%0A yield ''
+ finally:%0A exc_info = None
%0A%0A%0Ac
|
84df8646ba396088e70ca8469b301d11d13d2da7
|
Fix wrong query on running tis (#17631)
|
airflow/api/common/experimental/delete_dag.py
|
airflow/api/common/experimental/delete_dag.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Delete DAGs APIs."""
import logging
from sqlalchemy import or_
from airflow import models
from airflow.exceptions import AirflowException, DagNotFound
from airflow.models import DagModel, TaskFail
from airflow.models.serialized_dag import SerializedDagModel
from airflow.utils.session import provide_session
from airflow.utils.state import State
log = logging.getLogger(__name__)
@provide_session
def delete_dag(dag_id: str, keep_records_in_log: bool = True, session=None) -> int:
"""
:param dag_id: the dag_id of the DAG to delete
:param keep_records_in_log: whether keep records of the given dag_id
in the Log table in the backend database (for reasons like auditing).
The default value is True.
:param session: session used
:return count of deleted dags
"""
log.info("Deleting DAG: %s", dag_id)
running_tis = (
session.query(models.TaskInstance.state).filter(models.TaskInstance.state.in_(State.unfinished)).all()
)
if running_tis:
raise AirflowException("TaskInstances still running")
dag = session.query(DagModel).filter(DagModel.dag_id == dag_id).first()
if dag is None:
raise DagNotFound(f"Dag id {dag_id} not found")
# Scheduler removes DAGs without files from serialized_dag table every dag_dir_list_interval.
# There may be a lag, so explicitly removes serialized DAG here.
if SerializedDagModel.has_dag(dag_id=dag_id, session=session):
SerializedDagModel.remove_dag(dag_id=dag_id, session=session)
count = 0
for model in models.base.Base._decl_class_registry.values():
if hasattr(model, "dag_id"):
if keep_records_in_log and model.__name__ == 'Log':
continue
cond = or_(model.dag_id == dag_id, model.dag_id.like(dag_id + ".%"))
count += session.query(model).filter(cond).delete(synchronize_session='fetch')
if dag.is_subdag:
parent_dag_id, task_id = dag_id.rsplit(".", 1)
for model in TaskFail, models.TaskInstance:
count += (
session.query(model).filter(model.dag_id == parent_dag_id, model.task_id == task_id).delete()
)
# Delete entries in Import Errors table for a deleted DAG
# This handles the case when the dag_id is changed in the file
session.query(models.ImportError).filter(models.ImportError.filename == dag.fileloc).delete(
synchronize_session='fetch'
)
return count
|
Python
| 0.000017
|
@@ -1698,16 +1698,25 @@
e.state)
+%0A
.filter(
@@ -1739,40 +1739,100 @@
nce.
-state.in_(State.unfinished)).all
+dag_id == dag_id)%0A .filter(models.TaskInstance.state == State.RUNNING)%0A .first
()%0A
|
bf87d7a60f20d9811fe2ff2c579f52b3e77a1ed3
|
Remove unneeded print statement.
|
ctree/c/dotgen.py
|
ctree/c/dotgen.py
|
"""
DOT generator for C constructs.
"""
from ctree.dotgen import DotGenLabeller
from ctree.types import codegen_type
class CDotGenLabeller(DotGenLabeller):
"""
Manages generation of DOT.
"""
def visit_SymbolRef(self, node):
s = r""
if node._global:
s += r"__global "
if node._local:
s += r"__local "
if node._const:
s += r"__const "
if node.type is not None:
s += r"%s " % codegen_type(node.type)
print(node.type)
s += r"%s" % node.name
return s
def visit_FunctionDecl(self, node):
s = r""
if node.static:
s += r"static "
if node.inline:
s += r"inline "
if node.kernel:
s += r"__kernel "
s += r"%s %s(...)" % (codegen_type(node.return_type), node.name)
return s
def visit_Constant(self, node):
return str(node.value)
def visit_String(self, node):
return r'\" \"'.join(node.values)
def visit_CFile(self, node):
return node.get_filename()
def visit_NdPointer(self, node):
s = "dtype: %s\n" % node.ptr.dtype
s += "ndim, shape: %s, %s\n" % (node.ptr.ndim, node.ptr.shape)
s += "flags: %s" % node.ptr.flags
return s
def visit_BinaryOp(self, node):
return type(node.op).__name__
def visit_UnaryOp(self, node):
return type(node.op).__name__
|
Python
| 0.000005
|
@@ -502,33 +502,8 @@
pe)%0A
- print(node.type)%0A
|
742cdc4419449a8190bddd8439c3559a1bf19180
|
fix a bug, adding import
|
alphatwirl/concurrently/TaskPackageDropbox.py
|
alphatwirl/concurrently/TaskPackageDropbox.py
|
# Tai Sakuma <tai.sakuma@cern.ch>
import logging
from operator import itemgetter
from .WorkingArea import WorkingArea
##__________________________________________________________________||
class TaskPackageDropbox(object):
"""A drop box for task packages.
It puts task packages in a working area and dispatches runners
that execute the tasks.
"""
def __init__(self, workingArea, dispatcher):
self.workingArea = workingArea
self.dispatcher = dispatcher
def __repr__(self):
return '{}(workingArea = {!r}, dispatcher = {!r})'.format(
self.__class__.__name__,
self.workingArea,
self.dispatcher
)
def open(self):
self.workingArea.open()
self.runid_package_index_map = { }
def put(self, package):
package_index = self.workingArea.put_package(package)
runid = self.dispatcher.run(self.workingArea, package_index)
self.runid_package_index_map[runid] = package_index
def receive(self):
package_index_result_pairs = [ ] # a list of (package_index, _result)
try:
sleep = 5
while self.runid_package_index_map:
finished_runid = self.dispatcher.poll()
# e.g., [1001, 1003]
package_indices = [self.runid_package_index_map.pop(i) for i in finished_runid]
# e.g., [0, 2]
pairs = [(i, self.workingArea.collect_result(i)) for i in package_indices]
# e.g., [(0, result0), (2, None)] # None indicates the job failed
failed_package_indices = [i for i, r in pairs if r is None]
# e.g., [2]
pairs = [(i, r) for i, r in pairs if i not in failed_package_indices]
# e.g., [(0, result0)] # only successful ones
# rerun failed jobs
for package_index in failed_package_indices:
logger = logging.getLogger(__name__)
logger.warning('resubmitting {}'.format(self.workingArea.package_path(package_index)))
runid = self.dispatcher.run(self.workingArea, package_index)
self.runid_package_index_map[runid] = package_index
package_index_result_pairs.extend(pairs)
time.sleep(sleep)
except KeyboardInterrupt:
logger = logging.getLogger(__name__)
logger.warning('received KeyboardInterrupt')
self.dispatcher.terminate()
# sort in the order of package_index
package_index_result_pairs = sorted(package_index_result_pairs, key = itemgetter(0))
results = [result for i, result in package_index_result_pairs]
return results
def close(self):
self.dispatcher.terminate()
self.workingArea.close()
##__________________________________________________________________||
|
Python
| 0.000001
|
@@ -42,16 +42,28 @@
logging%0A
+import time%0A
from ope
|
50861c6d256438afd880aebbb3a19ea360367fac
|
upgrade IdentityDetailSerializer to DRF3
|
api/serializers/identity_detail_serializer.py
|
api/serializers/identity_detail_serializer.py
|
from core.models.identity import Identity
from rest_framework import serializers
class IdentityDetailSerializer(serializers.ModelSerializer):
created_by = serializers.CharField(source='creator_name')
quota = serializers.Field(source='get_quota_dict')
provider_id = serializers.Field(source='provider.uuid')
id = serializers.Field(source="uuid")
class Meta:
model = Identity
exclude = ('credentials', 'created_by', 'provider', 'uuid')
|
Python
| 0
|
@@ -140,16 +140,18 @@
er):%0A
+ #
created
@@ -217,32 +217,40 @@
a = serializers.
+ReadOnly
Field(source='ge
@@ -286,32 +286,40 @@
d = serializers.
+ReadOnly
Field(source='pr
@@ -353,16 +353,24 @@
alizers.
+ReadOnly
Field(so
@@ -436,45 +436,21 @@
-exclude = ('credentials', 'created_by
+fields = ('id
', '
@@ -461,14 +461,18 @@
ider
+_id
', '
-uuid
+quota
')
|
310dfafe22677fa24524a50dd873c7c871b526e1
|
make models ans questions required args
|
goethe/eval/analogy.py
|
goethe/eval/analogy.py
|
import os
import argparse
from collections import defaultdict
import gensim.models.keyedvectors
import pandas as pd
# Names used in output files
CORRECT = 'correct' # correctly answered questions
INCORRECT = 'incorrect' # incorrectly answered questions
ANSWERED = 'answered' # answered questions in total (correct + incorrect)
SECTION = 'section' # name of sections in questions file (start with ':')
ACCURACY = 'accuracy' # name accuracy
TOTAL = '(total)' # identifier for summary sections
QUESTIONS = 'questions' # number of questions in a section
MODEL = 'model' # name of models
OUT_EXT = '.analogy' # extension used for output files
def name(path):
"""Strip path and extension from name.
"""
base = os.path.basename(path)
return os.path.splitext(base)[0]
def end_with_totals(df):
"""Move rows with TOTAL in the section name to the end.
"""
non_totals = [i for i in df.index if TOTAL not in i]
totals = [i for i in df.index if TOTAL in i]
return df.reindex(non_totals + totals)
def count_sections(questions):
"""Count questions per section."""
counts = defaultdict(int)
current = None
with open(questions) as q:
for line in q:
if line.startswith(':'):
current = line.split(':')[1].strip()
else:
counts[current] += 1
counts['total'] = sum(counts.values())
counts = pd.Series(counts).astype(int)
counts.name = name(questions)
return counts
def accuracy_df(questions, model):
"""Create a dataframe with every section name as row and evaluation results
(accuracy, correct/incorrect/total answers) as columns.
"""
sections = (gensim.models.KeyedVectors
.load_word2vec_format(model)
.accuracy(questions))
def collapse_section(s):
"""Take section and return results as dictionary."""
section_name = s['section']
correct = len(s['correct'])
incorrect = len(s['incorrect'])
answered = correct + incorrect
accuracy = correct / answered if answered else 0
return {SECTION: section_name, ACCURACY: accuracy, CORRECT: correct,
INCORRECT: incorrect, ANSWERED: answered}
collapsed_sections = [collapse_section(s) for s in sections]
return (pd.DataFrame.from_records(collapsed_sections, index=SECTION)
.assign(questions=count_sections(questions))
.rename({'total': f'{name(questions)} {TOTAL}'}))
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Evaluate model and generate CSV with results')
parser.add_argument('-q', '--questions', nargs='+',
help='questions file in word2vec format')
parser.add_argument('-m', '--models', nargs='+',
help='one or more models to be evaluated')
parser.add_argument('-o', '--output', default='.',
help='folder to write output files')
parser.add_argument('-c', '--counts', default=False, action='store_true',
help='create an output file for each model containing the counts of correct/incorrect/total answers')
parser.add_argument('-t', '--totals', default=False, action='store_true',
help=f"move all summary sections (containing '{TOTAL}') to the end")
parser.add_argument('-p', '--print', default=False, action='store_true',
help='print output instead of writing to file')
args = parser.parse_args()
model_dfs = []
for m in args.models:
df = pd.concat(accuracy_df(q, m) for q in args.questions)
# Divide sum by two because of the per file total columns
total = df.sum().div(2).rename(TOTAL).astype(int)
df = df.append(total)
df.loc[TOTAL, ACCURACY] = (df.loc[TOTAL, CORRECT]
/ df.loc[TOTAL, ANSWERED])
if args.totals:
df = end_with_totals(df)
df.name = name(m)
model_dfs.append(df)
os.makedirs(args.output, exist_ok=True)
if args.counts:
for df in model_dfs:
if args.print:
print(df)
else:
df.to_csv(os.path.join(args.output, f'{df.name}{OUT_EXT}'),
index_label=SECTION)
else:
accuracy_df = pd.concat([df[ACCURACY] for df in model_dfs], axis=1)
accuracy_df.columns = [df.name for df in model_dfs]
if args.print:
print(accuracy_df)
else:
path = os.path.join(args.output, f'{ACCURACY}{OUT_EXT}')
accuracy_df.to_csv(index_label=MODEL, path=path)
|
Python
| 0.000002
|
@@ -2668,32 +2668,47 @@
ons', nargs='+',
+ required=True,
%0A
@@ -2810,16 +2810,31 @@
rgs='+',
+ required=True,
%0A
|
ef18eb5ce3ed8c65a1cf57c139cd5380f76ef707
|
Improve `is_node` error message
|
graphene/relay/node.py
|
graphene/relay/node.py
|
from functools import partial
import six
from graphql_relay import from_global_id, to_global_id
from ..types import ID, Field, Interface, ObjectType
from ..types.interface import InterfaceMeta
def is_node(objecttype):
'''
Check if the given objecttype has Node as an interface
'''
assert issubclass(objecttype, ObjectType), (
'Only ObjectTypes can have a Node interface.'
)
for i in objecttype._meta.interfaces:
if issubclass(i, Node):
return True
return False
def get_default_connection(cls):
from .connection import Connection
assert issubclass(cls, ObjectType), (
'Can only get connection type on implemented Nodes.'
)
class Meta:
node = cls
return type('{}Connection'.format(cls.__name__), (Connection,), {'Meta': Meta})
class GlobalID(Field):
def __init__(self, node, *args, **kwargs):
super(GlobalID, self).__init__(ID, *args, **kwargs)
self.node = node
@staticmethod
def id_resolver(parent_resolver, node, root, args, context, info):
id = parent_resolver(root, args, context, info)
return node.to_global_id(info.parent_type.name, id) # root._meta.name
def get_resolver(self, parent_resolver):
return partial(self.id_resolver, parent_resolver, self.node)
class NodeMeta(InterfaceMeta):
def __new__(cls, name, bases, attrs):
cls = InterfaceMeta.__new__(cls, name, bases, attrs)
cls._meta.fields['id'] = GlobalID(cls, required=True, description='The ID of the object.')
return cls
class NodeField(Field):
def __init__(self, node, type=False, deprecation_reason=None,
name=None, **kwargs):
assert issubclass(node, Node), 'NodeField can only operate in Nodes'
type = type or node
super(NodeField, self).__init__(
type,
description='The ID of the object',
id=ID(required=True),
resolver=node.node_resolver
)
class Node(six.with_metaclass(NodeMeta, Interface)):
'''An object with an ID'''
@classmethod
def Field(cls, *args, **kwargs): # noqa: N802
return NodeField(cls, *args, **kwargs)
@classmethod
def node_resolver(cls, root, args, context, info):
return cls.get_node_from_global_id(args.get('id'), context, info)
@classmethod
def get_node_from_global_id(cls, global_id, context, info):
try:
_type, _id = cls.from_global_id(global_id)
graphene_type = info.schema.get_type(_type).graphene_type
# We make sure the ObjectType implements the "Node" interface
assert cls in graphene_type._meta.interfaces
except:
return None
get_node = getattr(graphene_type, 'get_node', None)
if get_node:
return get_node(_id, context, info)
@classmethod
def from_global_id(cls, global_id):
return from_global_id(global_id)
@classmethod
def to_global_id(cls, type, id):
return to_global_id(type, id)
@classmethod
def implements(cls, objecttype):
get_connection = getattr(objecttype, 'get_connection', None)
if not get_connection:
get_connection = partial(get_default_connection, objecttype)
objecttype.Connection = get_connection()
|
Python
| 0.000002
|
@@ -392,23 +392,48 @@
terface.
-'%0A )
+ Received %25s'%0A ) %25 objecttype
%0A for
|
af4440512a220a6e91f37b68250aaf5bb111ab15
|
Handle tzlocal failures
|
graphite_api/config.py
|
graphite_api/config.py
|
import logging
import os
import structlog
import warnings
import yaml
from tzlocal import get_localzone
from importlib import import_module
from structlog.processors import (format_exc_info, JSONRenderer,
KeyValueRenderer)
from .middleware import CORS, TrailingSlash
from .search import IndexSearcher
from .storage import Store
from . import DEBUG
try:
from logging.config import dictConfig
except ImportError:
from logutils.dictconfig import dictConfig
if DEBUG:
processors = (format_exc_info, KeyValueRenderer())
else:
processors = (format_exc_info, JSONRenderer())
logger = structlog.get_logger()
default_conf = {
'search_index': '/srv/graphite/index',
'finders': [
'graphite_api.finders.whisper.WhisperFinder',
],
'functions': [
'graphite_api.functions.SeriesFunctions',
'graphite_api.functions.PieFunctions',
],
'whisper': {
'directories': [
'/srv/graphite/whisper',
],
},
'time_zone': get_localzone().zone,
}
# attributes of a classical log record
NON_EXTRA = set(['module', 'filename', 'levelno', 'exc_text', 'pathname',
'lineno', 'msg', 'funcName', 'relativeCreated',
'levelname', 'msecs', 'threadName', 'name', 'created',
'process', 'processName', 'thread'])
class StructlogFormatter(logging.Formatter):
def __init__(self, *args, **kwargs):
self._bound = structlog.BoundLoggerBase(None, processors, {})
def format(self, record):
if not record.name.startswith('graphite_api'):
kw = dict(((k, v) for k, v in record.__dict__.items()
if k not in NON_EXTRA))
kw['logger'] = record.name
return self._bound._process_event(
record.levelname.lower(), record.getMessage(), kw)[0]
return record.getMessage()
def load_by_path(path):
module, klass = path.rsplit('.', 1)
finder = import_module(module)
return getattr(finder, klass)
def configure(app):
config_file = os.environ.get('GRAPHITE_API_CONFIG',
'/etc/graphite-api.yaml')
if os.path.exists(config_file):
with open(config_file) as f:
config = yaml.safe_load(f)
config['path'] = config_file
else:
warnings.warn("Unable to find configuration file at {0}, using "
"default config.".format(config_file))
config = {}
configure_logging(config)
for key, value in list(default_conf.items()):
config.setdefault(key, value)
loaded_config = {'functions': {}, 'finders': []}
for functions in config['functions']:
loaded_config['functions'].update(load_by_path(functions))
finders = []
for finder in config['finders']:
finders.append(load_by_path(finder)(config))
loaded_config['store'] = Store(finders)
loaded_config['searcher'] = IndexSearcher(config['search_index'])
app.config['GRAPHITE'] = loaded_config
app.config['TIME_ZONE'] = config['time_zone']
logger.info("configured timezone", timezone=app.config['TIME_ZONE'])
if 'sentry_dsn' in config:
try:
from raven.contrib.flask import Sentry
except ImportError:
warnings.warn("'sentry_dsn' is provided in the configuration but "
"the sentry client is not installed. Please `pip "
"install raven[flask]`.")
else:
Sentry(app, dsn=config['sentry_dsn'])
app.wsgi_app = TrailingSlash(CORS(app.wsgi_app,
config.get('allowed_origins')))
def configure_logging(config):
structlog.configure(processors=processors,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True)
config.setdefault('logging', {})
config['logging'].setdefault('version', 1)
config['logging'].setdefault('handlers', {})
config['logging'].setdefault('formatters', {})
config['logging'].setdefault('loggers', {})
config['logging']['handlers'].setdefault('raw', {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'raw',
})
config['logging']['loggers'].setdefault('root', {
'handlers': ['raw'],
'level': 'DEBUG',
'propagate': False,
})
config['logging']['loggers'].setdefault('graphite_api', {
'handlers': ['raw'],
'level': 'DEBUG',
})
config['logging']['formatters']['raw'] = {'()': StructlogFormatter}
dictConfig(config['logging'])
if 'path' in config:
logger.info("loading configuration", path=config['path'])
else:
logger.info("loading default configuration")
|
Python
| 0.000001
|
@@ -1048,16 +1048,127 @@
zone,%0A%7D%0A
+if default_conf%5B'time_zone'%5D == 'local': # tzlocal didn't find anything%0A default_conf%5B'time_zone'%5D = 'UTC'%0A
%0A%0A# attr
|
f21180292db82abfc69272c5c1b9e50c68645eca
|
fix gdpr form scrub tests in python 3
|
corehq/apps/users/management/commands/gdpr_scrub_user_from_forms.py
|
corehq/apps/users/management/commands/gdpr_scrub_user_from_forms.py
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from corehq.apps.users.models import CouchUser
from corehq.form_processor.interfaces.dbaccessors import FormAccessors
from io import StringIO
from lxml import etree
import sys
import six
import logging
logger = logging.getLogger(__name__)
NEW_USERNAME = "Redacted User (GDPR)"
class Command(BaseCommand):
help = "Scrubs the username from all forms associated with the given user"
def add_arguments(self, parser):
parser.add_argument('username')
parser.add_argument('domain')
def handle(self, username, domain, **options):
this_form_accessor = FormAccessors(domain=domain)
user = CouchUser.get_by_username(username)
if not user:
logger.info("User {} not found.".format(username))
sys.exit(1)
user_id = user._id
form_ids = this_form_accessor.get_form_ids_for_user(user_id)
input_response = six.moves.input(
"Update {} form(s) for user {} in domain {}? (y/n): ".format(len(form_ids), username, domain))
if input_response == "y":
for form_data in this_form_accessor.iter_forms(form_ids):
form_attachment_xml_new = self.update_form_data(form_data, NEW_USERNAME)
this_form_accessor.modify_attachment_xml_and_metadata(form_data,
form_attachment_xml_new,
NEW_USERNAME)
logging.info("Updated {} form(s) for user {} in domain {}".format(len(form_ids), username, domain))
elif input_response == "n":
logging.info("No forms updated, exiting.")
else:
logging.info("Command not recognized. Exiting.")
@staticmethod
def update_form_data(form_data, new_username):
form_attachment_xml = form_data.get_attachment("form.xml")
xml_elem = etree.parse(StringIO(six.text_type(form_attachment_xml)))
id_elem = xml_elem.find("{http://openrosa.org/jr/xforms}meta").find(
"{http://openrosa.org/jr/xforms}username")
id_elem.text = new_username
new_form_attachment_xml = etree.tostring(xml_elem)
return new_form_attachment_xml
|
Python
| 0
|
@@ -2047,16 +2047,32 @@
rm.xml%22)
+.decode('utf-8')
%0A
@@ -2108,22 +2108,8 @@
gIO(
-six.text_type(
form
@@ -2125,17 +2125,16 @@
nt_xml))
-)
%0A
|
1e4d80c50aaf253fd2bad9a2139737d8bf8dc927
|
fix escape sequence DeprecationWarning (#1595)
|
gym/spaces/discrete.py
|
gym/spaces/discrete.py
|
import numpy as np
from .space import Space
class Discrete(Space):
"""A discrete space in :math:`\{ 0, 1, \dots, n-1 \}`.
Example::
>>> Discrete(2)
"""
def __init__(self, n):
assert n >= 0
self.n = n
super(Discrete, self).__init__((), np.int64)
def sample(self):
return self.np_random.randint(self.n)
def contains(self, x):
if isinstance(x, int):
as_int = x
elif isinstance(x, (np.generic, np.ndarray)) and (x.dtype.kind in np.typecodes['AllInteger'] and x.shape == ()):
as_int = int(x)
else:
return False
return as_int >= 0 and as_int < self.n
def __repr__(self):
return "Discrete(%d)" % self.n
def __eq__(self, other):
return isinstance(other, Discrete) and self.n == other.n
|
Python
| 0.000004
|
@@ -66,16 +66,17 @@
e):%0A
+r
%22%22%22A dis
@@ -106,16 +106,17 @@
%7B 0, 1,
+%5C
%5Cdots, n
@@ -124,20 +124,16 @@
1 %5C%7D%60. %0A
-
%0A Exa
@@ -139,20 +139,16 @@
ample::%0A
-
%0A
@@ -164,24 +164,16 @@
rete(2)%0A
-
%0A %22%22%22
|
31b842cd0a443eb792d2e5a1c08880523f0239a2
|
Improve Session Support
|
handler/BaseHandler.py
|
handler/BaseHandler.py
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
# Powered By KK Studio
# 2017-04-13
import tornado
import os
import time
import json
import hashlib
class BaseHandler(tornado.web.RequestHandler):
# 初始化函数
def initialize(self):
# 当前请求时间
self.time = int(time.time())
self.time_str = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.time))
# Session
self.session = None # 用户未登录标识
self.init_session()
# Version
self.app_version = self.application.__version__
# Current Route
self.url = self.get_current_route()
# 后面的方法如果重写on_finish方法,需要调用_on_finish
def _on_finish(self):
# 更新Session
if self.session:
self.save_session() # 更新Session到Redis
# 请求逻辑处理结束时关闭数据库连接,如果不关闭可能会造成MySQL Server has gone away 2006错误
#self.db.close()
# 重载on_finish
def on_finish(self):
self._on_finish()
# 重载write_error方法
def write_error(self, status_code, **kwargs):
title = "%s - %s" % (status_code, self._reason)
if status_code == 404: # 捕获404
self.render('page/error.html',title=title)
elif status_code == 500: # 500可以正常捕获,404好像不行
#print self.settings.get("serve_traceback")
msg = ''
if 'exc_info' in kwargs:
for i in kwargs['exc_info']:
#print type(i)
msg += "<p>%s</p>" % str(i)
self.render('page/error.html', title=title, code=status_code, msg=msg)
else:
self.render('page/error.html', title=title, code=status_code, msg=status_code)
# Log Instance
@property
def log(self):
return self.application.log
# 获取当前路由
def get_current_route(self):
uri = self.request.uri.split('?')
return uri[0]
# 数据库
@property
def db(self):
return self.application.db
# Redis
@property
def redis(self):
return self.application.redis
# 返回Json
def jsonReturn(self,data):
self.set_header('Content-Type', 'application/json')
self.write(data)
# 格式化时间戳
def format_time(self,timstamp=None):
return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(timstamp))
# 获取当前登录用户
def get_current_user(self):
return self.session
# Session初始化
def init_session(self):
self.session_key = self.settings.get('session_key')
self.session_expires = self.settings.get('session_expires')
self.cookie_name = self.settings.get('cookie_name')
self.cookie_value = self.get_secure_cookie(self.cookie_name)
if self.cookie_value:
self.session_id = self.session_key + self.cookie_value
self.session = self.get_session()
#if self.session:
# # 刷新Seesion过期时间,这一步放到_on_finish方法中执行
# self.redis.expire(self.session_id, self.session_expires)
else:
#self.cookie_value = self.gen_session_id()
#self.set_secure_cookie(self.cookie_name,self.cookie_value)
#self.session_id = self.session_key + self.cookie_value
self.session_id = None
def get_session(self):
session = self.redis.get(self.session_id)
if not session:
return None
session = json.loads(session) # 字符串转字典
return session
def set_session(self):
self.redis.set(self.session_id,json.dumps(self.session),self.session_expires) # 后端Session
def create_session(self,session,expires_days=None):
self.cookie_value = self.gen_session_id()
self.session_id = self.session_key + self.cookie_value
self.session = session
self.set_session()
self.set_secure_cookie(self.cookie_name, self.cookie_value, expires_days) # 前端Cookie
# 销毁Session
def remove_session(self):
if self.session: # Session存在
self.redis.delete(self.session_id)
self.clear_cookie(self.cookie_name)
self.session = None
# 生成SessionID
def gen_session_id(self):
return hashlib.sha1('%s%s' % (os.urandom(16), time.time())).hexdigest()
# MD5计算
def md5(self,text):
s = hashlib.md5()
s.update(text)
return s.hexdigest()
|
Python
| 0
|
@@ -134,16 +134,48 @@
hashlib
+%0Afrom app.Session import Session
%0A%0Aclass
@@ -416,46 +416,8 @@
ion%0A
- self.session = None # %E7%94%A8%E6%88%B7%E6%9C%AA%E7%99%BB%E5%BD%95%E6%A0%87%E8%AF%86%0A
@@ -2376,33 +2376,22 @@
-self.session_key
+prefix
= self.
@@ -2412,19 +2412,22 @@
session_
-key
+prefix
')%0A
@@ -2433,29 +2433,16 @@
-self.session_
expires
= s
@@ -2437,17 +2437,16 @@
expires
-
= self.s
@@ -2545,36 +2545,27 @@
self.
-cookie_value
+sid
= self.get_
@@ -2608,1532 +2608,68 @@
-if self.cookie_value:%0A self.session_id = self.session_key + self.cookie_value%0A self.session = self.get_session()%0A #if self.session:%0A # # %E5%88%B7%E6%96%B0Seesion%E8%BF%87%E6%9C%9F%E6%97%B6%E9%97%B4%EF%BC%8C%E8%BF%99%E4%B8%80%E6%AD%A5%E6%94%BE%E5%88%B0_on_finish%E6%96%B9%E6%B3%95%E4%B8%AD%E6%89%A7%E8%A1%8C%0A # self.redis.expire(self.session_id, self.session_expires)%0A else:%0A #self.cookie_value = self.gen_session_id()%0A #self.set_secure_cookie(self.cookie_name,self.cookie_value)%0A #self.session_id = self.session_key + self.cookie_value%0A self.session_id = None%0A%0A%0A def get_session(self):%0A session = self.redis.get(self.session_id)%0A if not session:%0A return None%0A session = json.loads(session) # %E5%AD%97%E7%AC%A6%E4%B8%B2%E8%BD%AC%E5%AD%97%E5%85%B8%0A return session%0A%0A%0A def set_session(self):%0A self.redis.set(self.session_id,json.dumps(self.session),self.session_expires) # %E5%90%8E%E7%AB%AFSession%0A%0A%0A def create_session(self,session,expires_days=None):%0A self.cookie_value = self.gen_session_id()%0A self.session_id = self.session_key + self.cookie_value%0A self.session = session%0A self.set_session()%0A self.set_secure_cookie(self.cookie_name, self.cookie_value, expires_days) # %E5%89%8D%E7%AB%AFCookie%0A%0A%0A # %E9%94%80%E6%AF%81Session%0A def remove_session(self):%0A if self.session: # Session%E5%AD%98%E5%9C%A8%0A self.redis.delete(self.session_id)%0A self.clear_cookie(self.cookie_name)%0A self.session = None%0A%0A%0A # %E7%94%9F%E6%88%90SessionID%0A def gen_session_id(self):%0A return hashlib.sha1('%25s%25s' %25 (os.urandom(16), time.time())).hexdigest(
+self.session = Session(prefix, self.sid, expires, self.redis
)%0A%0A%0A
|
50248c3989624f935a4ff2a80229b997ca77f5c2
|
fix generator issue
|
hazm/SequenceTagger.py
|
hazm/SequenceTagger.py
|
# coding: utf8
from __future__ import unicode_literals
from nltk.tag.api import TaggerI
from wapiti import Model
class SequenceTagger(TaggerI):
""" wrapper for [Wapiti](http://wapiti.limsi.fr) sequence tagger
>>> tagger = SequenceTagger(patterns=['*', 'U:word-%x[0,0]'])
>>> tagger.train([[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]])
>>> tagger.tag_sents([['من', 'به', 'مدرسه', 'رفته_بودم', '.']])
[[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]]
>>> tagger.save_model('test.tagger')
>>> SequenceTagger(model='test.tagger').tag_sents([['من', 'به', 'مدرسه', 'رفته_بودم', '.']])
[[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]]
"""
def __init__(self, patterns=[], **options):
self.model = Model(patterns='\n'.join(patterns), **options)
def train(self, sentences):
self.model.train(['\n'.join([' '.join(word) for word in sentence]) for sentence in sentences])
def save_model(self, filename):
self.model.save(filename)
def tag_sents(self, sentences):
lines = '\n\n'.join(['\n'.join(sentence) for sentence in sentences])
results = self.model.label_sequence(lines).decode('utf8')
tags = iter(results.strip().split('\n'))
return [[(word, next(tags)) for word in sentence] for sentence in sentences]
|
Python
| 0
|
@@ -1071,16 +1071,46 @@
ences):%0A
+%09%09sentences = list(sentences)%0A
%09%09lines
|
7cbc591a0a1c1bee5bdf573c55e12a270591e520
|
handle no position in sample
|
xgds_sample/labels.py
|
xgds_sample/labels.py
|
# __BEGIN_LICENSE__
# Copyright (c) 2015, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All rights reserved.
#
# The xGDS platform is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
# __END_LICENSE__
from datetime import datetime
import qrcode
import textwrap
import os
from fpdf import Template
from django.conf import settings
from xgds_sample import labelTemplates
from geocamUtil.loader import LazyGetModelByName
SAMPLE_MODEL = LazyGetModelByName(settings.XGDS_SAMPLE_SAMPLE_MODEL)
def generateMultiPDF(sampleLabels, size, index):
"""
Actually create the PDF for multiple sample labels.
"""
now = datetime.utcnow()
printableFile = "multi_" + size.name + "_" + now.strftime("%Y%m%d_%H%M%S") + "_temp" + str(index) + ".pdf"
# clear out any old version
outputFilename = os.path.join(settings.MEDIA_ROOT, settings.XGDS_SAMPLE_PDF_DIR, printableFile)
# get the template
elements = getattr(labelTemplates, 'multi' + size.name)
template = Template(format=[215.9, 279.4], orientation="P", elements=elements)
template.add_page()
qrCodeImages = []
i = 0
paragraph = size.paragraphWidth
for sampleLabel in sampleLabels:
if i == 10:
break
# make the qr code image
qrCodeImages.append(generateQRCode(sampleLabel.url, sampleLabel.number))
# populate the templatto ae
template[str(i) + "_qrcode"] = qrCodeImages[i]
if sampleLabel.number:
template[str(i) + "_id"] = sampleLabel.number
try:
sample = sampleLabel.sample
except:
sample = None
# display description next to the qr code
if sample:
rows = [sample.name]
if sample.description:
rows.append(sample.description)
samplePosition = sample.getPositionDict()
if samplePosition:
if samplePosition['lat'] and samplePosition['lon']:
rows.append('lat, lon: ' + str(samplePosition['lat']) + ' ' + str(samplePosition['lon']))
if sample.collection_time:
rows.append('collected at ' + sample.collection_time.strftime('%Y-%m-%d %H:%i %e'))
finalrows = []
for row in rows:
if row is not None:
if len(row) > paragraph:
wrapped = textwrap.wrap(row, paragraph)
for l in wrapped:
finalrows.append(l)
else:
finalrows.append(row)
for j, row in enumerate(finalrows):
key = "%d_row%d" % (i, j + 1)
if (key) in template.keys:
template[key] = row
# update the record. This is also a lie, we don't know if you actually printed it, but whatever.
sampleLabel.printTime = now
sampleLabel.printableFile = printableFile
sampleLabel.save()
i = i + 1
while i < 10:
template[str(i) + "_qrcode"] = os.path.join(settings.STATIC_ROOT, "xgds_sample/images/ipx.gif")
i = i + 1
# make the PDF
template.render(outputFilename)
# remove the qr code image
for qrCodeImage in qrCodeImages:
os.remove(qrCodeImage)
return outputFilename
def generateQRCode(data, label_id):
"""
Create a qr code image for this sample.
Right now these are a fixed size.
"""
qr = qrcode.QRCode(
version=1,
error_correction=qrcode.constants.ERROR_CORRECT_L,
box_size=6, # how many pixels each box is
border=4, # 4 is the minimum # of boxes for the border
)
qr.add_data(data)
qr.make(fit=True)
img = qr.make_image()
filename = "qrcode_%d.png" % label_id
imgFilename = os.path.join(settings.MEDIA_ROOT, settings.XGDS_SAMPLE_PDF_DIR, filename)
img.save(imgFilename)
return imgFilename
|
Python
| 0
|
@@ -2411,16 +2411,37 @@
nDict()%0A
+ try:%0A
@@ -2459,24 +2459,28 @@
lePosition:%0A
+
@@ -2535,16 +2535,20 @@
'lon'%5D:%0A
+
@@ -2649,16 +2649,57 @@
lon'%5D))%0A
+ except:%0A pass%0A
|
649a70d825d2182e3d5a4f42a83f377b66043e09
|
bump version
|
yandextank/version.py
|
yandextank/version.py
|
VERSION = '1.17.1'
|
Python
| 0.000001
|
@@ -9,11 +9,11 @@
= '1.17.
-1
+2
'%0A
|
60d93c3ade6f465e627c6c47c17d9c86e2b52f2a
|
Handle None challenge
|
app/grandchallenge/core/context_processors.py
|
app/grandchallenge/core/context_processors.py
|
import logging
from django.conf import settings
from guardian.shortcuts import get_perms
from guardian.utils import get_anonymous_user
logger = logging.getLogger(__name__)
def challenge(request):
try:
challenge = request.challenge
except AttributeError:
logger.warning(f"Could not get challenge for request: {request}")
return {}
try:
user = request.user
except AttributeError:
user = get_anonymous_user()
return {
"challenge": challenge,
"challenge_perms": get_perms(user, challenge),
"user_is_participant": challenge.is_participant(user),
"pages": challenge.page_set.all(),
}
def google_keys(*_, **__):
return {
"google_analytics_id": settings.GOOGLE_ANALYTICS_ID,
"geochart_api_key": settings.GOOGLE_MAPS_API_KEY,
}
def debug(*_, **__):
return {"DEBUG": settings.DEBUG}
|
Python
| 0.000001
|
@@ -240,16 +240,70 @@
allenge%0A
+%0A if challenge is None:%0A return %7B%7D%0A%0A
exce
|
1518347c2c1ceb482031ca091d54dcae25eed083
|
Refactor flip
|
zl/indicators/flip.py
|
zl/indicators/flip.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Jason Koelker
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import numbers
from zipline.transforms import utils as transforms
BULL = 'Bull'
BEAR = 'Bear'
class Flip(object):
__metaclass__ = transforms.TransformMeta
def __init__(self, period=4, setup_price='close_price'):
self.period = period
self.setup_price = setup_price
self.sid_windows = collections.defaultdict(self.create_window)
def create_window(self):
return FlipWindow(self.period, self.setup_price)
def update(self, event):
window = self.sid_windows[event.sid]
window.update(event)
return window()
class FlipWindow(transforms.EventWindow):
def __init__(self, period, setup_price):
transforms.EventWindow.__init__(self, window_length=period + 2)
self.period = period
self.setup_price = setup_price
def handle_add(self, event):
assert self.setup_price in event
assert isinstance(event[self.setup_price], numbers.Number)
def handle_remove(self, event):
pass
def __call__(self):
if len(self.ticks) < self.window_length:
return
Yp = self.ticks[-1][self.setup_price]
Xp = self.ticks[-2][self.setup_price]
X = self.ticks[0][self.setup_price]
Y = self.ticks[1][self.setup_price]
if (Xp > X) and (Yp < Y):
return BEAR
if (Xp < X) and (Yp > Y):
return BULL
|
Python
| 0
|
@@ -743,16 +743,248 @@
Bear'%0A%0A%0A
+def flip(events, field):%0A Yp = events%5B-1%5D%5Bfield%5D%0A Xp = events%5B-2%5D%5Bfield%5D%0A X = events%5B0%5D%5Bfield%5D%0A Y = events%5B1%5D%5Bfield%5D%0A%0A if (Xp %3E X) and (Yp %3C Y):%0A return BEAR%0A if (Xp %3C X) and (Yp %3E Y):%0A return BULL%0A%0A%0A
class Fl
@@ -1074,27 +1074,21 @@
riod=4,
-setup_price
+field
='close_
@@ -1142,33 +1142,21 @@
elf.
-setup_price = setup_price
+field = field
%0A
@@ -1297,27 +1297,21 @@
d, self.
-setup_price
+field
)%0A%0A d
@@ -1507,27 +1507,21 @@
period,
-setup_price
+field
):%0A
@@ -1634,33 +1634,21 @@
elf.
-setup_price = setup_price
+field = field
%0A%0A
@@ -1698,27 +1698,21 @@
rt self.
-setup_price
+field
in even
@@ -1750,27 +1750,21 @@
nt%5Bself.
-setup_price
+field
%5D, numbe
@@ -1930,104 +1930,20 @@
-Yp = self.ticks%5B-1%5D%5Bself.setup_price%5D%0A Xp = self.ticks%5B-2%5D%5Bself.setup_price%5D%0A X =
+return flip(
self
@@ -1952,187 +1952,18 @@
icks
-%5B0%5D%5B
+,
self.
-setup_price%5D%0A Y = self.ticks%5B1%5D%5Bself.setup_price%5D%0A%0A if (Xp %3E X) and (Yp %3C Y):%0A return BEAR%0A if (Xp %3C X) and (Yp %3E Y):%0A return BULL
+field)
%0A
|
0be54cb28387c535bea17e6c3a1a277151b9648a
|
Add the url name for students_info view to gci.views.helper.url_names.
|
app/soc/modules/gci/views/helper/url_names.py
|
app/soc/modules/gci/views/helper/url_names.py
|
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for storing GCI related URL names.
"""
GCI_LIST_ORG_INVITES = 'gci_list_org_invites'
GCI_LIST_INVITES = 'gci_list_invites'
GCI_MANAGE_INVITE = 'gci_manage_invite'
GCI_RESPOND_INVITE = 'gci_respond_invite'
GCI_SEND_INVITE = 'gci_send_invite'
GCI_MANAGE_REQUEST = 'gci_manage_request'
GCI_RESPOND_REQUEST = 'gci_respond_request'
GCI_SEND_REQUEST = 'gci_send_request'
GCI_LEADERBOARD = 'gci_leaderboard'
GCI_STUDENT_TASKS = 'gci_student_tasks'
GCI_STUDENT_FORM_DOWNLOAD = 'gci_student_form_download'
CREATE_GCI_ORG_PROFILE = 'create_gci_org_profile'
EDIT_GCI_ORG_PROFILE = 'edit_gci_org_profile'
GCI_ORG_HOME = 'gci_org_home'
GCI_VIEW_TASK = 'gci_view_task'
# GET PARAMETERS WHICH ARE USED THROUGHOUT THE MODULE
#TODO(dhans): consider creation of a separate module for that
"""GET parameter which should be set in order to download Consent Form.
"""
CONSENT_FORM_GET_PARAM = 'consent_form'
"""GET parameter which should be set in order to download Student ID Form.
"""
STUDENT_ID_FORM_GET_PARAM = 'student_id_form'
|
Python
| 0
|
@@ -1629,12 +1629,53 @@
ent_id_form'
+%0A%0AGCI_STUDENTS_INFO = 'gci_students_info'
|
65c6c0b5ac47caac71c6c1284d84c1004d348c01
|
Fix imports at top of file.
|
partner_relations/model/__init__.py
|
partner_relations/model/__init__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2013 Therp BV (<http://therp.nl>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
PADDING = 10
def get_partner_type(partner):
"""Get partner type for relation.
:param partner: a res.partner either a company or not
:return: 'c' for company or 'p' for person
:rtype: str
"""
return 'c' if partner.is_company else 'p'
from . import res_partner
from . import res_partner_relation
from . import res_partner_relation_type
from . import res_partner_relation_all
from . import res_partner_relation_type_selection
|
Python
| 0
|
@@ -979,16 +979,207 @@
######%0A%0A
+from . import res_partner%0Afrom . import res_partner_relation%0Afrom . import res_partner_relation_type%0Afrom . import res_partner_relation_all%0Afrom . import res_partner_relation_type_selection%0A%0A
PADDING
@@ -1434,196 +1434,4 @@
'p'%0A
-%0A%0Afrom . import res_partner%0Afrom . import res_partner_relation%0Afrom . import res_partner_relation_type%0Afrom . import res_partner_relation_all%0Afrom . import res_partner_relation_type_selection%0A
|
8a926a1894a20d5e9134f3996a31b82b3dcc37e1
|
use gz file extension
|
pipeline/templatetags/compressed.py
|
pipeline/templatetags/compressed.py
|
from __future__ import unicode_literals
from django.contrib.staticfiles.storage import staticfiles_storage
from django import template
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from pipeline.conf import settings
from pipeline.packager import Packager, PackageNotFound
from pipeline.utils import guess_type
register = template.Library()
class CompressedMixin(object):
def package_for(self, package_name, package_type):
package = {
'js': getattr(settings, 'PIPELINE_JS', {}).get(package_name, {}),
'css': getattr(settings, 'PIPELINE_CSS', {}).get(package_name, {}),
}[package_type]
if package:
package = {package_name: package}
packager = {
'js': Packager(css_packages={}, js_packages=package),
'css': Packager(css_packages=package, js_packages={}),
}[package_type]
return packager.package_for(package_type, package_name)
def render_compressed(self, package, package_type):
if settings.PIPELINE_ENABLED:
method = getattr(self, "render_{0}".format(package_type))
return method(package, package.output_filename)
else:
packager = Packager()
method = getattr(self, "render_individual_{0}".format(package_type))
paths = packager.compile(package.paths)
templates = packager.pack_templates(package)
return method(package, paths, templates=templates)
class CompressedCSSNode(CompressedMixin, template.Node):
def __init__(self, name):
self.name = name
def render(self, context):
package_name = template.Variable(self.name).resolve(context)
try:
package = self.package_for(package_name, 'css')
except PackageNotFound:
return '' # fail silently, do not return anything if an invalid group is specified
return self.render_compressed(package, 'css')
def render_css(self, package, path):
template_name = package.template_name or "pipeline/css.html"
context = package.extra_context
context.update({
'type': guess_type(path, 'text/css'),
'url': mark_safe(staticfiles_storage.url(path))
})
return render_to_string(template_name, context)
def render_individual_css(self, package, paths, **kwargs):
tags = [self.render_css(package, path) for path in paths]
return '\n'.join(tags)
class CompressedJSNode(CompressedMixin, template.Node):
def __init__(self, name):
self.name = name
def render(self, context):
package_name = template.Variable(self.name).resolve(context)
try:
package = self.package_for(package_name, 'js')
except PackageNotFound:
return '' # fail silently, do not return anything if an invalid group is specified
return self.render_compressed(package, 'js')
def render_js(self, package, path):
template_name = package.template_name or "pipeline/js.html"
context = package.extra_context
context.update({
'type': guess_type(path, 'text/javascript'),
'url': mark_safe(staticfiles_storage.url(path))
})
return render_to_string(template_name, context)
def render_inline(self, package, js):
context = package.extra_context
context.update({
'source': js
})
return render_to_string("pipeline/inline_js.html", context)
def render_individual_js(self, package, paths, templates=None):
tags = [self.render_js(package, js) for js in paths]
if templates:
tags.append(self.render_inline(package, templates))
return '\n'.join(tags)
@register.tag
def compressed_css(parser, token):
try:
tag_name, name = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError('%r requires exactly one argument: the name of a group in the PIPELINE_CSS setting' % token.split_contents()[0])
return CompressedCSSNode(name)
@register.tag
def compressed_js(parser, token):
try:
tag_name, name = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError('%r requires exactly one argument: the name of a group in the PIPELINE_JS setting' % token.split_contents()[0])
return CompressedJSNode(name)
|
Python
| 0
|
@@ -1513,24 +1513,222 @@
templates)%0A%0A
+ def gzip_allowed(self, http_accepts):%0A print http_accepts%0A return 'gzip' in http_accepts and%5C%0A settings.PIPELINE_ENABLED and getattr(settings, 'AWS_IS_GZIPPED', False)%0A%0A
%0Aclass Compr
@@ -1998,24 +1998,178 @@
ame, 'css')%0A
+ if self.gzip_allowed(getattr(context%5B'request'%5D.META,'HTTP_ACCEPT_ENCODING', '')):%0A package.config%5B'output_filename'%5D += '.gz'%0A
exce
@@ -3134,24 +3134,178 @@
name, 'js')%0A
+ if self.gzip_allowed(getattr(context%5B'request'%5D.META,'HTTP_ACCEPT_ENCODING', '')):%0A package.config%5B'output_filename'%5D += '.gz'%0A
exce
|
59ce82a3a98be7eb68c6d41117bced6802a84ee1
|
fix for latest jwt
|
pivportal/lib/pivportal/security.py
|
pivportal/lib/pivportal/security.py
|
""" Command Line Interface Module """
from flask import Response, request
import json
import re
import jwt
import datetime
from functools import wraps
# Redis "requests" hash
# {"12345678": { "username": X, "client_ip": X, "authorized": False, "time": time.time()}}
# {"dn1": "user1", "dn2": "user2"}
dn_to_username = {}
register_ticket_timeout = 60
def dn_is_valid(dn):
if re.match(r'^[a-zA-Z0-9_\-\,\(\)\+\=\:\s\. ]+$', dn):
return True
return False
def username_is_valid(username):
if re.match(r'^[a-zA-Z0-9_\-]+$', username) and username in dn_to_username.values():
return True
return False
def requestid_is_valid(requestid):
if re.match(r'^[a-zA-Z0-9]+$', requestid) and len(requestid) == 16:
return True
return False
def ip_is_valid(ip):
if re.match(r'^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$', ip):
return True
return False
def is_duplicate_register(username, requestid, auth_requests):
if requestid in auth_requests:
this_request = json.loads(auth_requests[requestid])
if this_request["username"] == username:
# Request Is Already Registered
return True
return False
def create_token(user, secret_key):
payload = {
# subject
'sub': user,
#issued at
'iat': datetime.datetime.utcnow(),
#expiry
'exp': datetime.datetime.utcnow() + datetime.timedelta(days=1)
}
token = jwt.encode(payload, secret_key, algorithm='HS256')
return token.encode().decode('unicode_escape')
def parse_token(token, secret_key):
return jwt.encode().decode(token, secret_key, algorithms='HS256')
def token_required(secret_key):
def token_required_decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
g = f.__globals__
if not request.headers.get('Authorization'):
return Response(response="Missing authorization header", status=401)
try:
payload = parse_token(request.headers.get('Authorization').split()[1], secret_key)
except jwt.DecodeError:
return Response(response="Token is invalid", status=401)
except jwt.ExpiredSignatureError:
return Response(response="Token has expired", status=401)
# Set username for decorated func
g["username"] = payload['sub']
return f(*args, **kwargs)
return decorated_function
return token_required_decorator
def valid_client_cert_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
g = f.__globals__
if not request.headers.get('SSL_CLIENT_S_DN'):
return Response(response="Missing Client DN Header", status=401)
# Get Client DN
user_dn = request.headers.get('SSL_CLIENT_S_DN')
# Valid DN
if not dn_is_valid(user_dn):
return Response(response=json.dumps({"response": " Invalid Request DN %s" % user_dn}), status=400, mimetype="application/json")
# Authorize User
if user_dn not in dn_to_username:
return Response(response=json.dumps({"response": "Authentication Failure for DN %s" % user_dn}), status=401, mimetype="application/json")
username = dn_to_username[user_dn]
# Verify Request
if not username_is_valid(username):
return Response(response=json.dumps({"response": " Invalid Request Username"}), status=400, mimetype="application/json")
# Set username for decorated func
g["username"] = username
return f(*args, **kwargs)
return decorated_function
|
Python
| 0
|
@@ -2105,16 +2105,27 @@
ept jwt.
+exceptions.
DecodeEr
@@ -2225,16 +2225,27 @@
ept jwt.
+exceptions.
ExpiredS
|
b1a9ce85686e8ff4a75864bd9a3ae1dc61a1c5ab
|
change make_result()
|
plenum/server/ledger_req_handler.py
|
plenum/server/ledger_req_handler.py
|
from abc import ABCMeta, abstractmethod
from typing import List
from plenum.common.constants import STATE_PROOF, TXN_TIME, DATA, MULTI_SIGNATURE, PROOF_NODES, ROOT_HASH
from common.exceptions import PlenumValueError, LogicError
from common.serializers.serialization import state_roots_serializer, proof_nodes_serializer
from plenum.common.plenum_protocol_version import PlenumProtocolVersion
from plenum.common.types import f
from stp_core.common.log import getlogger
from plenum.common.ledger import Ledger
from plenum.common.request import Request
from plenum.server.req_handler import RequestHandler
from plenum.common.txn_util import reqToTxn, append_txn_metadata
from state.state import State
logger = getlogger()
class LedgerRequestHandler(RequestHandler, metaclass=ABCMeta):
"""
Base class for request handlers
Declares methods for validation, application of requests and
state control
"""
query_types = set()
write_types = set()
def __init__(self, ledger: Ledger, state: State, ts_store=None):
self.state = state
self.ledger = ledger
self.ts_store = ts_store
def updateState(self, txns, isCommitted=False):
"""
Updates current state with a number of committed or
not committed transactions
"""
def gen_txn_path(self, txn):
return None
def _reqToTxn(self, req: Request):
return reqToTxn(req)
def apply(self, req: Request, cons_time: int):
txn = self._reqToTxn(req)
txn = append_txn_metadata(txn, txn_id=self.gen_txn_path(txn))
self.ledger.append_txns_metadata([txn], cons_time)
(start, end), _ = self.ledger.appendTxns(
[self.transform_txn_for_ledger(txn)])
self.updateState([txn])
return start, txn
def commit(self, txnCount, stateRoot, txnRoot, ppTime) -> List:
"""
:param txnCount: The number of requests to commit (The actual requests
are picked up from the uncommitted list from the ledger)
:param stateRoot: The state trie root after the txns are committed
:param txnRoot: The txn merkle root after the txns are committed
:return: list of committed transactions
"""
return self._commit(self.ledger, self.state, txnCount, stateRoot,
txnRoot, ppTime, ts_store=self.ts_store)
def applyForced(self, req: Request):
if not req.isForced():
raise LogicError('requestHandler.applyForce method is called '
'for not forced request: {}'.format(req))
def onBatchCreated(self, state_root, txn_time):
pass
def onBatchRejected(self):
pass
@abstractmethod
def doStaticValidation(self, request: Request):
pass
def is_query(self, txn_type):
return txn_type in self.query_types
def get_query_response(self, request):
raise NotImplementedError
@staticmethod
def transform_txn_for_ledger(txn):
return txn
@staticmethod
def _commit(ledger, state, txnCount, stateRoot, txnRoot, ppTime, ts_store=None):
_, committedTxns = ledger.commitTxns(txnCount)
stateRoot = state_roots_serializer.deserialize(stateRoot.encode()) if isinstance(
stateRoot, str) else stateRoot
# TODO test for that
if ledger.root_hash != txnRoot:
# Probably the following fail should trigger catchup
# TODO add repr / str for Ledger class and dump it here as well
raise PlenumValueError(
'txnRoot', txnRoot,
("equal to current ledger root hash {}"
.format(ledger.root_hash))
)
state.commit(rootHash=stateRoot)
if ts_store:
ts_store.set(ppTime, stateRoot)
return committedTxns
@property
def operation_types(self) -> set:
return self.write_types.union(self.query_types)
@property
def valid_txn_types(self) -> set:
return self.write_types.union(self.query_types)
def get_value_from_state(self, path, head_hash=None, with_proof=False, multi_sig=None):
'''
Get a value (and proof optionally)for the given path in state trie.
Does not return the proof is there is no aggregate signature for it.
:param path: the path generate a state proof for
:param head_hash: the root to create the proof against
:param get_value: whether to return the value
:return: a state proof or None
'''
root_hash = head_hash if head_hash else self.state.committedHeadHash
encoded_root_hash = state_roots_serializer.serialize(bytes(root_hash))
if not with_proof:
return self.state.get_for_root_hash(root_hash, path), None
if not multi_sig:
# Just return the value and not proof
try:
return self.state.get_for_root_hash(root_hash, path), None
except KeyError:
return None, None
else:
try:
proof, value = self.state.generate_state_proof(key=path,
root=self.state.get_head_by_hash(root_hash),
serialize=True,
get_value=True)
value = self.state.get_decoded(value) if value else value
encoded_proof = proof_nodes_serializer.serialize(proof)
proof = {
ROOT_HASH: encoded_root_hash,
MULTI_SIGNATURE: multi_sig.as_dict(),
PROOF_NODES: encoded_proof
}
return value, proof
except KeyError:
return None, None
@staticmethod
def make_result(request, data, last_seq_no=None, update_time=None, proof=None):
result = {**request.operation, **{
DATA: data,
f.IDENTIFIER.nm: request.identifier,
f.REQ_ID.nm: request.reqId
}}
if proof and request.protocolVersion and \
request.protocolVersion >= PlenumProtocolVersion.STATE_PROOF_SUPPORT.value:
result[STATE_PROOF] = proof
# Do not inline please, it makes debugging easier
return result
|
Python
| 0
|
@@ -5905,44 +5905,8 @@
ata,
- last_seq_no=None, update_time=None,
pro
|
a056ddc885d7eb333ab323f7552bfffd35635a8a
|
Add period at end of plug-in description
|
plugins/ChangeLogPlugin/__init__.py
|
plugins/ChangeLogPlugin/__init__.py
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import ChangeLog
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Changelog"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Shows changes since latest checked version"),
"api": 2
}
}
def register(app):
return {"extension": ChangeLog.ChangeLog()}
|
Python
| 0
|
@@ -457,16 +457,17 @@
version
+.
%22),%0A
|
9d92862f903b4683f1365e7ae82dd48d60e86d34
|
Add new urls, login and register
|
aeSupernova/urls.py
|
aeSupernova/urls.py
|
from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'aeSupernova.views.home', name='home'),
# url(r'^aeSupernova/', include('aeSupernova.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
#url(r'^$', 'aeSupernova.view.index'),
url(r'^index/$', TemplateView.as_view(template_name='index.html')),
url(r'^opticalSheet/', include('aeSupernova.opticalSheet.urls')),
url(r'^datafile/', include('aeSupernova.datafile.urls')),
url(r'^header/', include('aeSupernova.header.urls')),
url(r'^generator/', include('aeSupernova.generator.urls')),
url(r'^control/', include('aeSupernova.control.urls')),
url(r'^presentation/', include('aeSupernova.presentation.urls')),
url(r'^encoder/', include('aeSupernova.encoder.urls')),
url(r'^lerJupiter/', include('aeSupernova.lerJupiter.urls')),
url(r'^algeLin/', include('aeSupernova.algeLin.urls')),
)
|
Python
| 0
|
@@ -90,16 +90,86 @@
lateView
+%0Afrom django.contrib import admin%0Afrom login import views%0Aimport login
%0A%0A# Unco
@@ -1359,18 +1359,120 @@
in.urls')),%0A
+ url(r'%5Elogin/', include('login.urls')),%0A url(r'%5Eregister/$', views.register, name='register'),%0A
)%0A
|
1fb2a774765bc46e1bc2474136f135c59006c787
|
Return ConversationType in serializer
|
yunity/conversations/serializers.py
|
yunity/conversations/serializers.py
|
from rest_framework import serializers
from rest_framework.fields import CharField, DateTimeField
from rest_framework.relations import PrimaryKeyRelatedField
from yunity.api.serializers import UserSerializer
from yunity.conversations.models import ConversationMessage as MessageModel, ConversationType
from yunity.conversations.models import Conversation as ConversationModel
from yunity.users.models import User as UserModel
class MessageSerializer(serializers.Serializer):
content = CharField(max_length=100000)
author = PrimaryKeyRelatedField(read_only=True)
time = DateTimeField(read_only=True, source='created_at')
def create(self, validated_data):
message = MessageModel.objects.create(
sent_by_id=self.context['request'].user.id,
in_conversation_id=self.context['request'].data['in_conversation_id'],
**validated_data)
return message
class ConversationSerializer(serializers.Serializer):
topic = CharField(max_length=150, required=False)
# Writing
with_participants = PrimaryKeyRelatedField(many=True, write_only=True, queryset=UserModel.objects.all())
message = CharField(max_length=100000, write_only=True)
# Reading
id = PrimaryKeyRelatedField(read_only=True)
participants = UserSerializer(many=True, read_only=True)
messages = MessageSerializer(many=True, read_only=True)
def create(self, validated_data):
"""
Create new conversation with other users and a message
"""
participant_ids = [_.id for _ in validated_data['with_participants']] + \
[self.context['request'].user.id, ]
if len(participant_ids) > 2:
chat_type = ConversationType.MULTICHAT
else:
chat_type = ConversationType.ONE_ON_ONE
chat = ConversationModel.objects.create(type=chat_type)
chat.participants = participant_ids
chat.save()
MessageModel.objects.create(
sent_by_id=self.context['request'].user.id,
in_conversation_id=chat.id,
content=validated_data['message']['content'],
)
return chat
def update(self, conversation, validated_data):
conversation.name = validated_data.get('name', conversation.name)
conversation.save()
return conversation
def validate_with_participants(self, value):
if len(value) < 1:
raise serializers.ValidationError("No chat participants given")
if len(value) == 1 and self.context['request'].user.id in value:
raise serializers.ValidationError("Requesting user is only participant")
return value
|
Python
| 0.000001
|
@@ -86,24 +86,47 @@
ateTimeField
+, SerializerMethodField
%0Afrom rest_f
@@ -1282,24 +1282,73 @@
_only=True)%0A
+ type = SerializerMethodField(read_only=True)%0A
particip
@@ -1453,24 +1453,101 @@
only=True)%0A%0A
+ def get_type(self, obj):%0A return ConversationType.name(obj.type)%0A%0A
def crea
|
255ddb1a6910e590cb454a0d4e03f51b8d7b2092
|
Update setup.py console script to use cli instead of main
|
{{cookiecutter.repo_name}}/setup.py
|
{{cookiecutter.repo_name}}/setup.py
|
import sys
import os
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
setup(
name='{{cookiecutter.repo_name}}',
version='{{cookiecutter.version}}',
author='{{cookiecutter.full_name}}',
author_email='{{cookiecutter.email}}',
description='{{cookiecutter.short_description}}',
long_description=read('README.rst'),
license='MIT',
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url='https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}',
install_requires=['kivy>={{cookiecutter.kivy_version}}'],
zip_safe=False,
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': [
'{{cookiecutter.repo_name}}={{cookiecutter.repo_name}}.main:main'
]
},
tests_require=['pytest'],
cmdclass={'test': PyTest},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
Python
| 0
|
@@ -1517,20 +1517,19 @@
_name%7D%7D.
-main
+cli
:main'%0A
|
2a4e5ad6ac5e5400564d0dc9306c2ab30b9dba98
|
bump version
|
pinax_theme_bootstrap/__init__.py
|
pinax_theme_bootstrap/__init__.py
|
__version__ = "0.1.3"
|
Python
| 0
|
@@ -16,6 +16,6 @@
0.1.
-3
+4
%22
|
2eef6357cad8510163ef769a76e1d8680f16c639
|
Fix compat tests
|
tests/private/compatpatch.py
|
tests/private/compatpatch.py
|
import copy
from ..common import ApiTestBase, ClientCompatPatch
class CompatPatchTests(ApiTestBase):
@classmethod
def init_all(cls, api):
return [
{
'name': 'test_compat_media',
'test': CompatPatchTests('test_compat_media', api, media_id='1206573574980690068_1497851591')
},
{
'name': 'test_compat_comment',
'test': CompatPatchTests('test_compat_comment', api, media_id='1206573574980690068_1497851591')
},
{
'name': 'test_compat_user',
'test': CompatPatchTests('test_compat_user', api, user_id='124317')
},
{
'name': 'test_compat_user_list',
'test': CompatPatchTests('test_compat_user_list', api, user_id='124317')
},
]
def test_compat_media(self):
self.api.auto_patch = False
results = self.api.media_info(self.test_media_id)
self.api.auto_patch = True
media = results.get('items', [])[0]
media_patched = copy.deepcopy(media)
ClientCompatPatch.media(media_patched)
self.assertIsNone(media.get('link'))
self.assertIsNotNone(media_patched.get('link'))
self.assertIsNone(media.get('created_time'))
self.assertIsNotNone(media_patched.get('created_time'))
self.assertIsNone(media.get('images'))
self.assertIsNotNone(media_patched.get('images'))
self.assertIsNone(media.get('type'))
self.assertIsNotNone(media_patched.get('type'))
self.assertIsNone(media.get('filter'))
self.assertIsNotNone(media_patched.get('filter'))
self.assertIsNone(media.get('user', {}).get('id'))
self.assertIsNotNone(media_patched.get('user', {}).get('id'))
self.assertIsNone(media.get('user', {}).get('profile_picture'))
self.assertIsNotNone(media_patched.get('user', {}).get('profile_picture'))
if media['caption']:
self.assertIsNone(media.get('caption', {}).get('id'))
self.assertIsNotNone(media_patched['caption']['id'])
self.assertIsNone(media.get('caption', {}).get('from'))
self.assertIsNotNone(media_patched['caption']['from'])
media_dropped = copy.deepcopy(media)
ClientCompatPatch.media(media_dropped, drop_incompat_keys=True)
self.assertIsNone(media_dropped.get('pk'))
def test_compat_comment(self):
self.api.auto_patch = False
results = self.api.media_comments(self.test_media_id)
self.api.auto_patch = True
self.assertGreater(len(results.get('comments', [])), 0, 'No items returned.')
comment = results.get('comments', [{}])[0]
comment_patched = copy.deepcopy(comment)
ClientCompatPatch.comment(comment_patched)
self.assertIsNone(comment.get('id'))
self.assertIsNotNone(comment_patched.get('id'))
self.assertIsNone(comment.get('created_time'))
self.assertIsNotNone(comment_patched.get('created_time'))
self.assertIsNone(comment.get('from'))
self.assertIsNotNone(comment_patched.get('from'))
comment_patched = copy.deepcopy(comment)
ClientCompatPatch.comment(comment_patched, drop_incompat_keys=True)
self.assertIsNone(comment_patched.get('pk'))
def test_compat_user(self):
self.api.auto_patch = False
results = self.api.user_info(self.test_user_id)
self.api.auto_patch = True
user = results.get('user', {})
user_patched = copy.deepcopy(user)
ClientCompatPatch.user(user_patched)
self.assertIsNone(user.get('id'))
self.assertIsNotNone(user_patched.get('id'))
self.assertIsNone(user.get('bio'))
self.assertIsNotNone(user_patched.get('bio'))
self.assertIsNone(user.get('profile_picture'))
self.assertIsNotNone(user_patched.get('profile_picture'))
self.assertIsNone(user.get('website'))
self.assertIsNotNone(user_patched.get('website'))
user_patched = copy.deepcopy(user)
ClientCompatPatch.user(user_patched, drop_incompat_keys=True)
self.assertIsNone(user_patched.get('pk'))
def test_compat_user_list(self):
self.api.auto_patch = False
results = self.api.user_following(self.test_user_id)
self.api.auto_patch = True
user = results.get('users', [{}])[0]
user_patched = copy.deepcopy(user)
ClientCompatPatch.list_user(user_patched)
self.assertIsNone(user.get('id'))
self.assertIsNotNone(user_patched.get('id'))
self.assertIsNone(user.get('profile_picture'))
self.assertIsNotNone(user_patched.get('profile_picture'))
user_patched = copy.deepcopy(user)
ClientCompatPatch.user(user_patched, drop_incompat_keys=True)
self.assertIsNone(user_patched.get('pk'))
|
Python
| 0.000002
|
@@ -3184,37 +3184,37 @@
comment_
-patch
+dropp
ed = copy.deepco
@@ -3267,21 +3267,21 @@
comment_
-patch
+dropp
ed, drop
@@ -3327,37 +3327,37 @@
tIsNone(comment_
-patch
+dropp
ed.get('pk'))%0A%0A
@@ -4065,37 +4065,37 @@
)%0A%0A user_
-patch
+dropp
ed = copy.deepco
@@ -4131,37 +4131,37 @@
Patch.user(user_
-patch
+dropp
ed, drop_incompa
@@ -4196,37 +4196,37 @@
sertIsNone(user_
-patch
+dropp
ed.get('pk'))%0A%0A
@@ -4753,37 +4753,37 @@
)%0A%0A user_
-patch
+dropp
ed = copy.deepco
@@ -4809,32 +4809,37 @@
ientCompatPatch.
+list_
user(user_patche
@@ -4832,21 +4832,21 @@
er(user_
-patch
+dropp
ed, drop
@@ -4889,35 +4889,35 @@
sertIsNone(user_
-patch
+dropp
ed.get('pk'))%0A
|
17a53960f069b7908bc1ac9d0815643d360e5d39
|
fix a test in test_charts_json.py
|
tests/py/test_charts_json.py
|
tests/py/test_charts_json.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
import datetime
import json
from mock import patch
from aspen.utils import utcnow
from gratipay.billing.payday import Payday
from gratipay.testing import Harness
def today():
return datetime.datetime.utcnow().date().strftime('%Y-%m-%d')
class TestChartsJson(Harness):
def setUp(self):
Harness.setUp(self)
self.alice = self.make_participant('alice', claimed_time='now')
self.bob = self.make_participant('bob', claimed_time='now')
self.carl = self.make_participant('carl', claimed_time='now')
self.make_exchange('bill', 10, 0, self.alice)
self.make_exchange('bill', 10, 0, self.bob)
self.make_participant('notactive', claimed_time='now')
self.alice.set_tip_to(self.carl, '1.00')
self.bob.set_tip_to(self.carl, '2.00')
def run_payday(self):
with patch.object(Payday, 'fetch_card_holds') as fch:
fch.return_value = {}
Payday.start().run()
def test_no_payday_returns_empty_list(self):
assert json.loads(self.client.GET('/carl/charts.json').body) == []
def test_zeroth_payday_is_ignored(self):
self.run_payday() # zeroeth
assert json.loads(self.client.GET('/carl/charts.json').body) == []
def test_first_payday_comes_through(self):
self.run_payday() # zeroeth, ignored
self.run_payday() # first
expected = [ { "date": today()
, "npatrons": 2
, "receipts": 3.00
}
]
actual = json.loads(self.client.GET('/carl/charts.json').body)
assert actual == expected
def test_second_payday_comes_through(self):
self.run_payday() # zeroth, ignored
self.run_payday() # first
self.alice.set_tip_to(self.carl, '5.00')
self.bob.set_tip_to(self.carl, '0.00')
self.run_payday() # second
expected = [ { "date": today()
, "npatrons": 1 # most recent first
, "receipts": 5.00
}
, { "date": today()
, "npatrons": 2
, "receipts": 3.00
}
]
actual = json.loads(self.client.GET('/carl/charts.json').body)
assert actual == expected
def test_sandwiched_tipless_payday_comes_through(self):
self.run_payday() # zeroth, ignored
self.run_payday() # first
# Oops! Sorry, Carl. :-(
self.alice.set_tip_to(self.carl, '0.00')
self.bob.set_tip_to(self.carl, '0.00')
self.run_payday() # second
# Bouncing back ...
self.alice.set_tip_to(self.carl, '5.00')
self.run_payday() # third
expected = [ { "date": today()
, "npatrons": 1 # most recent first
, "receipts": 5.00
}
, { "date": today()
, "npatrons": 0
, "receipts": 0.00
}
, { "date": today()
, "npatrons": 2
, "receipts": 3.00
}
]
actual = json.loads(self.client.GET('/carl/charts.json').body)
assert actual == expected
def test_out_of_band_transfer_gets_included_with_prior_payday(self):
self.run_payday() # zeroth, ignored
self.run_payday() # first
self.run_payday() # second
# Do an out-of-band transfer.
self.db.run("UPDATE participants SET balance=balance - 4 WHERE username='alice'")
self.db.run("UPDATE participants SET balance=balance + 4 WHERE username='carl'")
self.db.run("INSERT INTO transfers (tipper, tippee, amount, context) "
"VALUES ('alice', 'carl', 4, 'tip')")
self.run_payday() # third
expected = [ { "date": today()
, "npatrons": 2 # most recent first
, "receipts": 3.00
}
, { "date": today()
, "npatrons": 3 # Since this is rare, don't worry that we double-count alice.
, "receipts": 7.00
}
, { "date": today()
, "npatrons": 2
, "receipts": 3.00
}
]
actual = json.loads(self.client.GET('/carl/charts.json').body)
assert actual == expected
def test_never_received_gives_empty_array(self):
self.run_payday() # zeroeth, ignored
self.run_payday() # first
self.run_payday() # second
self.run_payday() # third
expected = []
actual = json.loads(self.client.GET('/alice/charts.json').body)
assert actual == expected
def test_transfer_volume(self):
self.run_payday()
self.run_payday()
expected = { "date": today()
, "weekly_gifts": '3.00'
, "charges": '0.00'
, "withdrawals": '0.00'
, "active_users": '3'
, "total_users": '4'
, "total_gifts": '6.00'
, "xTitle": utcnow().strftime('%Y-%m-%d')
}
actual = json.loads(self.client.GET('/about/charts.json').body)[0]
assert actual == expected
def test_anonymous_receiver(self):
self.run_payday()
self.run_payday()
self.client.POST('/carl/anonymous.json',
{'toggle': 'receiving'},
auth_as='carl')
r = self.client.GxT('/carl/charts.json')
assert r.code == 401
r = self.client.GxT('/carl/charts.json', auth_as='alice')
assert r.code == 403
|
Python
| 0.999832
|
@@ -5660,25 +5660,23 @@
('/carl/
-anonymous
+privacy
.json',%0A
@@ -5712,16 +5712,26 @@
ggle': '
+anonymous_
receivin
|
483397df89119bc1967d0e6774375b495347b357
|
fix viz routine
|
ott/utils/otp_utils.py
|
ott/utils/otp_utils.py
|
import os
import socket
import urllib2
import logging
log = logging.getLogger(__file__)
from ott.utils import file_utils
from ott.utils import exe_utils
# constants
DEF_NAME = "prod"
DEF_PORT = "55555"
OTP_DOWNLOAD_URL="http://maven.conveyal.com.s3.amazonaws.com/org/opentripplanner/otp/0.19.0/otp-0.19.0-shaded.jar"
OTP_NAME="otp.jar"
GRAPH_NAME = "Graph.obj"
def call_planner_svc(url, accept='application/xml'):
ret_val = None
try:
socket.setdefaulttimeout(2000)
log.debug("call_otp: OTP output for " + url)
req = urllib2.Request(url, None, {'Accept':accept})
res = urllib2.urlopen(req)
log.debug("call_otp: OTP output for " + url)
ret_val = res.read()
res.close()
except:
log.warn('ERROR: could not get data from url (timeout?): {0}'.format(url))
return ret_val
def run_otp_server(graph_dir, port="8080", otp_name=OTP_NAME, java_mem=None):
''' launch the server in a separate process
'''
file_utils.cd(graph_dir)
otp_path = os.path.join(graph_dir, otp_name)
cmd='-server -jar {} --port {} --router "" --graphs {}'.format(otp_path, port, graph_dir)
exe_utils.run_java(cmd, fork=True, big_xmx=java_mem)
def run_graph_builder(graph_dir, graph_name=GRAPH_NAME, otp_name=OTP_NAME, java_mem=None):
''' run OTP graph builder
'''
log.info("building the graph")
graph_path = os.path.join(graph_dir, graph_name)
otp_path = os.path.join(graph_dir, otp_name)
file_utils.rm(graph_path)
file_utils.cd(graph_dir)
cmd='-jar {} --build {} --cache {}'.format(otp_path, graph_dir, graph_dir)
exe_utils.run_java(cmd, big_xmx=java_mem)
def vizualize_graph(graph_dir, java_mem=None):
otp_path = os.path.join(graph_dir, otp_name)
file_utils.cd(graph_dir)
cmd='-jar {} --visualize --router "" --graphs {}'.format(otp_path, graph_dir)
exe_utils.run_java(cmd, fork=True, big_xmx=java_mem)
def config_graph_dir(graph_config, base_dir, force_update=False):
''' utility to make the graph dir, copy OTP config files into the graph directory, etc...
'''
name = graph_config.get('name', DEF_NAME)
dir = graph_config.get('dir', name) # optional 'dir' name overrides graph name
# step 1: mkdir (makes the dir if it doesn't exist)
graph_dir = os.path.join(base_dir, dir)
file_utils.mkdir(graph_dir)
graph_config['dir'] = graph_dir # save off the full graph dir back struct
# step 2: copy OTP config files
config_dir = os.path.join(base_dir, "config")
file_utils.copy_contents(config_dir, graph_dir, overwrite=force_update)
# step 3: check OTP jar exists in config dir
check_otp_jar(graph_dir, force_update=force_update)
return graph_dir
def get_graph_details(graphs, index=0):
''' utility function to find a graph config (e.g., graph folder name, web port, etc...) from self.graphs
@see [otp] section in config/app.ini
'''
ret_val = None
if graphs is None or len(graphs) < 1:
ret_val = {"name":DEF_NAME, "port":DEF_PORT}
log.warn("graphs config was NIL, using default 'prod' graph info")
else:
if index >= len(graphs):
index = 0
log.warn("graph index of {} exceeds list length, so defaulting to index 0".format(index))
ret_val = graphs[index]
return ret_val
def check_otp_jar(graph_dir, jar=OTP_NAME, expected_size=50000000, download_url=OTP_DOWNLOAD_URL, force_update=False):
""" utility to make sure otp.jar exists in the particular graph dir...
if not, download it
:return full-path to otp.jar
"""
jar_path = os.path.join(graph_dir, jar)
exists = os.path.exists(jar_path)
if not exists or file_utils.file_size(jar_path) < expected_size or force_update:
log.info("we don't see OTP {} in {}, so will download {} now".format(jar, graph_dir, download_url))
exe_utils.wget(download_url, jar_path)
return jar_path
|
Python
| 0
|
@@ -1703,24 +1703,43 @@
ava_mem=None
+, otp_name=OTP_NAME
):%0A otp_p
|
22fb6e8facd9bb7b22a175dd9ccac98609b2cfaa
|
fix sorting profiles in clustermanager
|
IPython/frontend/html/notebook/clustermanager.py
|
IPython/frontend/html/notebook/clustermanager.py
|
"""Manage IPython.parallel clusters in the notebook.
Authors:
* Brian Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import os
from tornado import web
from zmq.eventloop import ioloop
from IPython.config.configurable import LoggingConfigurable
from IPython.config.loader import load_pyconfig_files
from IPython.utils.traitlets import Dict, Instance, CFloat
from IPython.parallel.apps.ipclusterapp import IPClusterStart
from IPython.core.profileapp import list_profiles_in
from IPython.core.profiledir import ProfileDir
from IPython.utils.path import get_ipython_dir
from IPython.utils.sysinfo import num_cpus
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class DummyIPClusterStart(IPClusterStart):
"""Dummy subclass to skip init steps that conflict with global app.
Instantiating and initializing this class should result in fully configured
launchers, but no other side effects or state.
"""
def init_signal(self):
pass
def init_logging(self):
pass
def reinit_logging(self):
pass
class ClusterManager(LoggingConfigurable):
profiles = Dict()
delay = CFloat(1., config=True,
help="delay (in s) between starting the controller and the engines")
loop = Instance('zmq.eventloop.ioloop.IOLoop')
def _loop_default(self):
from zmq.eventloop.ioloop import IOLoop
return IOLoop.instance()
def build_launchers(self, profile_dir):
starter = DummyIPClusterStart(log=self.log)
starter.initialize(['--profile-dir', profile_dir])
cl = starter.controller_launcher
esl = starter.engine_launcher
n = starter.n
return cl, esl, n
def get_profile_dir(self, name, path):
p = ProfileDir.find_profile_dir_by_name(path,name=name)
return p.location
def update_profiles(self):
"""List all profiles in the ipython_dir and cwd.
"""
for path in [get_ipython_dir(), os.getcwdu()]:
for profile in list_profiles_in(path):
pd = self.get_profile_dir(profile, path)
if profile not in self.profiles:
self.log.debug("Overwriting profile %s" % profile)
self.profiles[profile] = {
'profile': profile,
'profile_dir': pd,
'status': 'stopped'
}
def list_profiles(self):
self.update_profiles()
result = [self.profile_info(p) for p in self.profiles.keys()]
result.sort()
return result
def check_profile(self, profile):
if profile not in self.profiles:
raise web.HTTPError(404, u'profile not found')
def profile_info(self, profile):
self.check_profile(profile)
result = {}
data = self.profiles.get(profile)
result['profile'] = profile
result['profile_dir'] = data['profile_dir']
result['status'] = data['status']
if 'n' in data:
result['n'] = data['n']
return result
def start_cluster(self, profile, n=None):
"""Start a cluster for a given profile."""
self.check_profile(profile)
data = self.profiles[profile]
if data['status'] == 'running':
raise web.HTTPError(409, u'cluster already running')
cl, esl, default_n = self.build_launchers(data['profile_dir'])
n = n if n is not None else default_n
def clean_data():
data.pop('controller_launcher',None)
data.pop('engine_set_launcher',None)
data.pop('n',None)
data['status'] = 'stopped'
def engines_stopped(r):
self.log.debug('Engines stopped')
if cl.running:
cl.stop()
clean_data()
esl.on_stop(engines_stopped)
def controller_stopped(r):
self.log.debug('Controller stopped')
if esl.running:
esl.stop()
clean_data()
cl.on_stop(controller_stopped)
dc = ioloop.DelayedCallback(lambda: cl.start(), 0, self.loop)
dc.start()
dc = ioloop.DelayedCallback(lambda: esl.start(n), 1000*self.delay, self.loop)
dc.start()
self.log.debug('Cluster started')
data['controller_launcher'] = cl
data['engine_set_launcher'] = esl
data['n'] = n
data['status'] = 'running'
return self.profile_info(profile)
def stop_cluster(self, profile):
"""Stop a cluster for a given profile."""
self.check_profile(profile)
data = self.profiles[profile]
if data['status'] == 'stopped':
raise web.HTTPError(409, u'cluster not running')
data = self.profiles[profile]
cl = data['controller_launcher']
esl = data['engine_set_launcher']
if cl.running:
cl.stop()
if esl.running:
esl.stop()
# Return a temp info dict, the real one is updated in the on_stop
# logic above.
result = {
'profile': data['profile'],
'profile_dir': data['profile_dir'],
'status': 'stopped'
}
return result
def stop_all_clusters(self):
for p in self.profiles.keys():
self.stop_cluster(profile)
|
Python
| 0.000001
|
@@ -3095,32 +3095,39 @@
nfo(p) for p in
+sorted(
self.profiles.ke
@@ -3134,31 +3134,10 @@
ys()
-%5D%0A result.sort(
)
+%5D
%0A
|
e428bd3776257030c538e85fe94154686b3e4ff0
|
Make gen_fh.py script always use the next available FH number (#151)
|
scripts/gen_fh.py
|
scripts/gen_fh.py
|
# Generate this week's friday hack
# To generate some other FH pass in a number as argument
# e.g python gen_fh.py 1 generates next week's
# e.g python gen_fh.py 3 generates next next next week's
# Please first update data/friday_hacks.yml before running this
import yaml
from datetime import datetime, timedelta
from os import listdir
from os.path import isfile, join
from sys import argv
import re
with open('../data/friday_hacks.yml', 'r') as fin:
doc = yaml.load(fin)
start_date = datetime.strptime(doc['start_date'],
'%Y-%m-%d %H:%M:%S +0800')
# Time delta fixes weird bug
now = datetime.today() - timedelta(hours=3)
# Sick undocumented feature
if len(argv) > 1:
now += timedelta(days=7 * int(argv[1]))
hacks = doc['hacks']
cur = start_date
next_hack = None
next_date = None
for hack in hacks:
if cur > now:
next_hack = hack
next_date = cur
break
cur += timedelta(days=7)
if not next_hack:
print "Dude semester's over"
quit()
if not next_hack.get('topics'):
print "Dude no hackz"
quit()
date = cur
print "Creating FH post for " + str(cur)
name = raw_input("Your name? ")
# so future-proof it's sick
fhre = re.compile(
r'^20[0-9][0-9]-[01][0-9]-[0-3][0-9]-friday-hacks-([1-9][0-9]*)\.md$')
num = 0
# so.. tempted... to... use lazy evaluation
for f in listdir('../content/post/'):
result = fhre.search(f)
if result:
cur = int(result.group(1))
if cur > num:
num = cur
num += 1
# What is abstraction?
if len(argv) > 1:
num += int(argv[1])
print "Creating FH post for #" + str(num) + ", at " + str(date)
# In case you want a different name, BUT WHYYY!?!?
# name = raw_input("Your name? ")
# now witness templating in raw string
content = '''\
---
title: "Friday Hacks #{num}, {month} {day}"
date: {now}
author: {author}
url: /{year}/{no_of_month}/friday-hacks-{num}
---
--- say something as introduction ---
{{{{% friday_hack_header venue="{venue}" date="{month} {day}" %}}}}
'''.format(
num=num,
now=datetime.today(),
year=next_date.strftime("%Y"),
month=next_date.strftime("%B"),
no_of_month=next_date.strftime('%m'),
day=next_date.day,
author=name,
venue=next_hack['venue']) + '\n'.join([
'''
### {talk_name}
#### Talk Description:
--- describe ----
#### Speaker Profile
--- describe ----
'''.format(talk_name=topic['title']) for topic in next_hack['topics']
])
filename = '../content/post/{now}-friday-hacks-{num}.md'.format(
now=next_date.strftime("%Y-%m-%d"),
num=num,
month=next_date.strftime('%b'),
day=next_date.day,
)
with open(filename, 'a') as fout:
fout.write(content)
|
Python
| 0
|
@@ -189,16 +189,143 @@
week's%0A
+# As for numbering, it will take the next number%0A# (e.g. if the previous post is FH #1000, the generated one will be FH #1001)%0A
# Please
@@ -1787,16 +1787,72 @@
m += 1%0A%0A
+ # In case you want to skip FH numbers BUT WHYYY!?!?%0A
# Wh
@@ -1869,24 +1869,26 @@
raction?%0A
+ #
if len(argv
@@ -1893,24 +1893,26 @@
gv) %3E 1:%0A
+ #
num +=
|
7faa33c1eff79223252d6a7c4fe5ad033383df6c
|
Bump version
|
l10n_ch_payment_slip/__openerp__.py
|
l10n_ch_payment_slip/__openerp__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
# Financial contributors: Hasa SA, Open Net SA,
# Prisme Solutions Informatique SA, Quod SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Switzerland - Payment Slip (BVR/ESR)',
'summary': 'Print ESR/BVR payment slip with your invoices',
'description': """
Swiss Payment slip known as ESR/BVR
===================================
This addon allows you to print the ESR/BVR report Using Qweb report.
The ESR/BVR is grenerated as an image and is availabe in a fields
of the `l10n_ch.payment_slip` Model.
The ESR/BVR is created each time an invoice is validated.
To modify it you have to cancel it and reconfirm the invoice.
You can adjust the print out of ESR/BVR, which depend on each printer,
for every company in the "BVR Data" tab.
This is especialy useful when using pre-printed paper.
An option also allow you to print the ESR/BVR in background when using
white paper.
This module will also allows you to import v11 files provided
by financial institute into a bank statement
To do so, use the wizard provided in bank statement.
This module also adds transaction_ref field on entries in order to manage
reconciliation in multi payment context (unique reference needed on
account.move.line). Many BVR can now be printed from on invoice for each
payment terms.
""",
'version': '8.0.2.1.0',
'author': "Camptocamp,Odoo Community Association (OCA)",
'category': 'Localization',
'website': 'http://www.camptocamp.com',
'license': 'AGPL-3',
'depends': ['base',
'account',
'account_payment',
'report',
'l10n_ch_base_bank',
'base_transaction_id'],
'data': ["company_view.xml",
"bank_view.xml",
"account_invoice_view.xml",
"wizard/bvr_import_view.xml",
"report/report_declaration.xml",
"security/ir.model.access.csv"],
'demo': [],
'test': [],
'auto_install': False,
'installable': True,
'images': []
}
|
Python
| 0
|
@@ -2212,17 +2212,17 @@
8.0.2.1.
-0
+1
',%0A 'aut
|
79ab477593813641063491f1064e6bc6c2d0c7fb
|
Add review-dashboard parameter
|
managesf/model/yamlbkd/resources/project.py
|
managesf/model/yamlbkd/resources/project.py
|
#
# Copyright (c) 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from managesf.model.yamlbkd.resource import BaseResource
from managesf.model.yamlbkd.resources.storyboard import StoryboardOps
logger = logging.getLogger(__name__)
class ProjectOps(object):
def __init__(self, conf, new):
self.conf = conf
self.new = new
self.client = None
self.stb_ops = StoryboardOps(conf, new)
def create(self, **kwargs):
logs = []
if self.stb_ops.is_activated(**kwargs):
try:
self.stb_ops.update_project_groups(**kwargs)
except Exception, e:
msg = "Create Storyboard project group : err: %s" % e
logger.exception(msg)
logs.append(msg)
return logs
def update(self, **kwargs):
logs = []
if self.stb_ops.is_activated(**kwargs):
try:
self.stb_ops.update_project_groups(**kwargs)
except Exception, e:
msg = "Update Storyboard project group: err: %s" % e
logger.exception(msg)
logs.append(msg)
return logs
def delete(self, **kwargs):
logs = []
if self.stb_ops.is_activated(**kwargs):
try:
self.stb_ops.delete_project_groups(**kwargs)
except Exception, e:
msg = "Delete Storyboard project group: err: %s" % e
logger.exception(msg)
logs.append(msg)
return logs
def extra_validations(self, **kwargs):
logs = []
if self.stb_ops.is_activated(**kwargs):
logs.extend(self.stb_ops.extra_validations(**kwargs))
return logs
class Project(BaseResource):
DESCRIPTION = ("The project resource can be is used to describe a "
"project. It can be seen as the top level resource type in "
"in this model. You can use it reference multiple Git "
"repositories and multiple link to external resources like "
"a project website and the issues tracker website.")
MODEL_TYPE = 'project'
MODEL = {
'name': (
str,
'^([a-zA-Z0-9\-_\./])+$',
False,
"",
False,
"The project name",
),
'description': (
str,
'.*',
True,
None,
True,
"The project description",
),
'website': (
str,
'.*',
False,
"",
True,
"The project web page link",
),
'documentation': (
str,
'.*',
False,
"",
True,
"The project documentation link",
),
'issue-tracker-url': (
str,
'.*',
False,
"",
True,
"The project issue tracker link",
),
'issue-tracker': (
str,
'^(SFRedmine|SFStoryboard|)$',
False,
"",
True,
"The local issue tracker activated for this project",
),
'mailing-lists': (
list,
'.+@.+',
False,
[],
True,
"Email addresses of project mailing lists",
),
'contacts': (
list,
'.+@.+',
False,
[],
True,
"Email addresses of project main contacts",
),
'source-repositories': (
list,
'.+',
True,
None,
True,
"Code source repositories related to the project",
),
}
PRIORITY = 10
PRIMARY_KEY = 'name'
CALLBACKS = {
'update': lambda conf, new, kwargs:
ProjectOps(conf, new).update(**kwargs),
'create': lambda conf, new, kwargs:
ProjectOps(conf, new).create(**kwargs),
'delete': lambda conf, new, kwargs:
ProjectOps(conf, new).delete(**kwargs),
'extra_validations': lambda conf, new, kwargs:
ProjectOps(conf, new).extra_validations(**kwargs),
'get_all': lambda conf, new: ([], {}),
}
def get_deps(self, keyname=False):
if keyname:
return 'source-repositories'
return {'repos': set(self.resource['source-repositories'])}
|
Python
| 0.000001
|
@@ -3758,32 +3758,227 @@
ct%22,%0A ),%0A
+ 'review-dashboard': (%0A str,%0A '%5E(%5Ba-zA-Z0-9%5C-_%5D)*$',%0A False,%0A %22%22,%0A True,%0A %22A gerrit dashboard name reference%22,%0A ),%0A
'mailing
|
2e608036c8611026f9fb47a762901700891e284e
|
use BufferedWriter for gzip files -- 30% faster writing
|
cutadapt/xopen.py
|
cutadapt/xopen.py
|
"""
Open compressed files transparently.
"""
import gzip
import sys
import io
__author__ = 'Marcel Martin'
import sys
if sys.version_info[0] >= 3:
basestring = str
from codecs import getreader, getwriter
if sys.version_info < (2, 7):
buffered_reader = lambda x: x
else:
buffered_reader = io.BufferedReader
def xopen(filename, mode='r'):
"""
Replacement for the "open" function that can also open
files that have been compressed with gzip. If the filename ends with .gz,
the file is opened with gzip.open(). If it doesn't, the regular open()
is used. If the filename is '-', standard output (mode 'w') or input
(mode 'r') is returned.
"""
assert isinstance(filename, basestring)
if filename == '-':
return sys.stdin if 'r' in mode else sys.stdout
if filename.endswith('.gz'):
if sys.version_info[0] < 3:
if 'r' in mode:
return buffered_reader(gzip.open(filename, mode))
else:
return gzip.open(filename, mode)
else:
if 'r' in mode:
return getreader('ascii')(gzip.open(filename, mode))
else:
return getwriter('ascii')(gzip.open(filename, mode))
else:
return open(filename, mode)
|
Python
| 0
|
@@ -264,16 +264,47 @@
da x: x%0A
+%09buffered_writer = lambda x: x%0A
else:%0A%09b
@@ -338,16 +338,53 @@
dReader%0A
+%09buffered_writer = io.BufferedWriter%0A
%0A%0Adef xo
@@ -981,16 +981,32 @@
%09return
+buffered_writer(
gzip.ope
@@ -1022,16 +1022,17 @@
e, mode)
+)
%0A%09%09else:
|
3f20f16ac4e5ba04317239a629f319064d331c85
|
Change sanity check strategy again
|
cyder/base/vcs.py
|
cyder/base/vcs.py
|
import os
import re
from os.path import dirname, basename
from cyder.base.utils import dict_merge, Logger, run_command
class SanityCheckFailure(Exception):
pass
class ChdirHandler(object):
def __init__(self, path):
self.path = path
def __enter__(self):
self.old_dir = os.getcwd()
os.chdir(self.path)
return self
def __exit__(self, type, value, traceback):
os.chdir(self.old_dir)
def repo_chdir_wrapper(func):
"""A decorator that handles changing to and from repo_dir"""
def wrapped(self, *args, **kwargs):
with ChdirHandler(self.repo_dir):
return func(self, *args, **kwargs)
wrapped.__name__ = func.__name__
return wrapped
class VCSRepo(object):
def _run_command(self, command, ignore_failure=False):
return run_command(command, logger=self.logger,
ignore_failure=ignore_failure)
def __init__(self, repo_dir, line_decrease_limit=None,
line_increase_limit=None, logger=Logger()):
self.repo_dir = repo_dir
self.line_decrease_limit = line_decrease_limit
self.line_increase_limit = line_increase_limit
self.logger = logger
@repo_chdir_wrapper
def reset_to_head(self):
self._reset_to_head()
@repo_chdir_wrapper
def reset_and_pull(self):
"""Make the working tree match what's currently upstream."""
self._reset_to_head()
self._pull()
@repo_chdir_wrapper
def commit_and_push(self, message, sanity_check=True):
self._commit_and_push(message, sanity_check=sanity_check)
@repo_chdir_wrapper
def get_revision(self):
return self._get_revision()
def _sanity_check(self):
difference = self._get_line_count_difference()
if (self.line_decrease_limit is not None and
-difference > self.line_decrease_limit):
raise SanityCheckFailure(
'Line count decrease ({0}) exceeded limit ({1}).\n'
'Aborting commit.\n'.format(-difference,
self.line_decrease_limit))
if (self.line_increase_limit is not None and
difference > self.line_increase_limit):
raise SanityCheckFailure(
'Line count increase ({0}) exceeded limit ({1}).\n'
'Aborting commit.\n'.format(difference,
self.line_increase_limit))
class GitRepo(VCSRepo):
@repo_chdir_wrapper
def commit_and_push(self, message, sanity_check=True,
empty=False):
self._commit_and_push(message, sanity_check=sanity_check,
empty=empty)
def _get_revision(self):
revision, _, _ = self._run_command('git rev-parse HEAD')
return revision.strip()
def _is_index_dirty(self):
_, _, returncode = self._run_command('git diff --cached --quiet',
ignore_failure=True)
return returncode != 0
def _commit_and_push(self, message, sanity_check=True, empty=False):
if not empty:
self._add_all()
if not self._is_index_dirty() and not empty:
self.logger.log_notice('There were no changes. Nothing to commit.')
return
if sanity_check:
self._sanity_check()
else:
self.logger.log_debug(
'Skipping sanity check because sanity_check=False was passed.')
self._commit(message, allow_empty=empty)
self._push()
def _reset_to_head(self):
self._run_command('git reset --hard')
self._run_command('git clean -dxf')
def _remove_all(self):
self._run_command('git rm -rf .', ignore_failure=True)
def _pull(self):
self._run_command('git pull --ff-only')
def _add_all(self):
self._run_command('git add -A .')
def _get_line_count_difference(self):
old_line_count = new_line_count = 0
old_filenames, _, _ = self._run_command(
'git ls-tree -r --name-only HEAD', ignore_failure=True)
for n in old_filenames.splitlines():
with open(n) as f:
while True:
chunk = f.read(4096)
if chunk == '':
break
old_line_count += chunk.count('\n')
del old_filenames
new_filenames, _, _ = self._run_command('git ls-files',
ignore_failure=True)
for n in new_filenames.splitlines():
with open(n) as f:
while True:
chunk = f.read(4096)
if chunk == '':
break
new_line_count += chunk.count('\n')
return new_line_count - old_line_count
def _commit(self, message, allow_empty=False):
cmd = ('git commit' + (' --allow-empty' if allow_empty else '') +
' -m "{0}"'.format(message))
self._run_command(cmd)
def _push(self):
self._run_command('git push origin master')
class VCSRepoManager(object):
def __init__(self, logger=Logger()):
self.logger = logger
def _run_command(self, command, ignore_failure=False):
return run_command(command, logger=Logger(),
ignore_failure=ignore_failure)
def open(self, *args, **kwargs):
return VCSRepo(*args, **kwargs)
class GitRepoManager(VCSRepoManager):
def __init__(self, config):
self.config = config
def open(self, *args, **kwargs):
return GitRepo(*args, **kwargs)
def _update_git_config(self, config):
for name, value in config.iteritems():
self._run_command("git config '{0}' '{1}'".format(name, value))
def clone(self, source, dest):
run_command('git clone {0} {1}'.format(source, dest))
with ChdirHandler(dest):
self._update_git_config(self.config)
def init(self, repo_dir, **kwargs):
bare = kwargs.pop('bare', False)
with ChdirHandler(repo_dir):
self._run_command('git init' + (' --bare' if bare else ''))
self._update_git_config(self.config)
|
Python
| 0
|
@@ -4006,161 +4006,165 @@
-old_line_count = new_line_count = 0%0A%0A old_filenames, _, _ = self._run_command(%0A 'git ls-tree -r --name-only HEAD', ignore_failure=True)
+diff_ignore = (re.compile(r'--- %5CS'), re.compile(r'%5C+%5C+%5C+ %5CS'))%0A%0A output, _, _ = self._run_command('git diff --cached')%0A%0A added, removed = 0, 0
%0A
@@ -4176,461 +4176,160 @@
for
-n
+line
in o
-ld_filenames.splitlines():%0A with open(n) as f:%0A while True:%0A chunk = f.read(4096)%0A if chunk == '':%0A break%0A old_line_count += chunk.count('%5Cn')%0A del old_filenames%0A%0A new_filenames, _, _ = self._run_command('git ls-files',%0A ignore_failure=True)%0A for n in new_filenames.splitlines():%0A with open(n) as f
+utput.split('%5Cn'):%0A if any(regex.match(line) for regex in diff_ignore):%0A continue%0A if line.startswith('+')
:%0A
@@ -4346,19 +4346,18 @@
-while True:
+added += 1
%0A
@@ -4369,206 +4369,95 @@
- chunk = f.read(4096)%0A if chunk == '':%0A break%0A new_line_count += chunk.count('%5Cn')%0A%0A return new_line_count - old_line_count
+elif line.startswith('-'):%0A removed += 1%0A%0A return added - removed
%0A%0A
|
fc683685d7df05ee0acc63a216c5b8fd99462219
|
use f strings
|
metaci/plan/templatetags/templatehelpers.py
|
metaci/plan/templatetags/templatehelpers.py
|
"""
https://simpleisbetterthancomplex.com/snippet/2016/08/22/dealing-with-querystring-parameters.html
"""
from django import template
register = template.Library()
@register.simple_tag
def relative_url(value, field_name, urlencode=None):
url = "?{}={}".format(field_name, value)
if urlencode:
querystring = urlencode.split("&")
filtered_querystring = [p for p in querystring if p.split("=")[0] != field_name]
encoded_querystring = "&".join(filtered_querystring)
url = "{}&{}".format(url, encoded_querystring)
return url
|
Python
| 0.020803
|
@@ -248,24 +248,12 @@
l =
+f
%22?%7B
-%7D=%7B%7D%22.format(
fiel
@@ -262,16 +262,18 @@
name
-,
+%7D=%7B
value
-)
+%7D%22
%0A
@@ -498,28 +498,17 @@
l =
+f
%22%7B
+url
%7D&%7B
-%7D%22.format(url,
enco
@@ -518,25 +518,26 @@
_querystring
-)
+%7D%22
%0A return
|
01e2be42f93f4e68b79ecee21818881158ecb759
|
fix the whitelist + blacklist when neither are specified
|
environment_kernels/core.py
|
environment_kernels/core.py
|
# -*- coding: utf-8 -*-
import os
import glob
import platform
from jupyter_client.kernelspec import KernelSpecManager, KernelSpec, NoSuchKernel
from traitlets import List
__all__ = ['EnvironmentKernelSpecManager']
try:
import conda.config
HAVE_CONDA = True
except ImportError:
HAVE_CONDA = False
class EnvironmentKernelSpecManager(KernelSpecManager):
"""
A Jupyter Kenel manager which dyamically checks for Environments
Given a list of base directories, this class searches for the pattern::
BASE_DIR/NAME/bin/ipython
where NAME is taken to be the name of the environment.
"""
# Take the default home DIR for conda and virtualenv as the default
_default_dirs = ['~/.conda/envs/', '~/.virtualenvs']
# Check for the windows specific CONDA_ENVS_PATH variable and add it to the
# list if set.
if os.environ.get('CONDA_ENVS_PATH', False):
_default_dirs.append(os.environ['CONDA_ENVS_PATH'])
# If we are running inside conda we can get all the env dirs:
if HAVE_CONDA:
_default_dirs += conda.config.envs_dirs
# Remove any duplicates
_default_dirs = list(set(map(os.path.expanduser, _default_dirs)))
env_dirs = List(_default_dirs, config=True)
extra_env_dirs = List([], config=True)
blacklist_envs = List([], config=True)
whitelist_envs = List([], config=True)
def validate_env(self, envname):
"""
Check the name of the environment against the black list and the
whitelist. If a whitelist is specified only it is checked.
"""
if self.whitelist_envs and envname in self.whitelist_envs:
return True
elif self.whitelist_envs:
return False
if self.blacklist_envs and envname not in self.blacklist_envs:
return True
else:
return False
def _get_env_paths(self):
if platform.system() == 'Windows':
search = '*/Scripts/ipython'
else:
search = '*/bin/ipython'
return [os.path.join(os.path.expanduser(base_dir), search)
for base_dir in self.env_dirs + self.extra_env_dirs]
def find_python_paths(self):
# find a python executeable
python_dirs = {}
for env_path in self._get_env_paths():
for python_exe in glob.glob(env_path):
venv_dir = os.path.split(os.path.split(python_exe)[0])[0]
venv_name = os.path.split(venv_dir)[1]
if self.validate_env(venv_name):
python_dirs.update({venv_name: venv_dir})
return python_dirs
def venv_kernel_specs(self):
python_dirs = self.find_python_paths()
kspecs = {}
for venv_name, venv_dir in python_dirs.items():
exe_name = os.path.join(venv_dir, 'bin/python')
kspec_dict = {"argv": [exe_name,
"-m",
"IPython.kernel",
"-f",
"{connection_file}"],
"display_name": "Environment ({})".format(venv_name),
"env": {}}
kspecs.update({venv_name: KernelSpec(**kspec_dict)})
return kspecs
def find_kernel_specs(self):
"""Returns a dict mapping kernel names to resource directories."""
d = super(EnvironmentKernelSpecManager, self).find_kernel_specs()
d.update(self.find_python_paths())
return d
def get_kernel_spec(self, kernel_name):
"""Returns a :class:`KernelSpec` instance for the given kernel_name.
Raises :exc:`NoSuchKernel` if the given kernel name is not found.
"""
try:
return super(EnvironmentKernelSpecManager, self).get_kernel_spec(kernel_name)
except (NoSuchKernel, FileNotFoundError):
if kernel_name.lower() in self.venv_kernel_specs():
return self.venv_kernel_specs()[kernel_name.lower()]
else:
raise NoSuchKernel(kernel_name)
|
Python
| 0.000018
|
@@ -1568,25 +1568,24 @@
%22%22%22%0A
-%0A
if s
@@ -1820,34 +1820,54 @@
True%0A el
+if self.blacklist_env
s
-e
:%0A re
@@ -1872,24 +1872,62 @@
return False
+%0A else:%0A return True
%0A%0A def _g
|
0430957f2b65ee0e14821027a15cfb956e976c62
|
make method static
|
RatS/tmdb/tmdb_ratings_inserter.py
|
RatS/tmdb/tmdb_ratings_inserter.py
|
import time
from RatS.base.base_ratings_uploader import RatingsUploader
from RatS.tmdb.tmdb_site import TMDB
class TMDBRatingsInserter(RatingsUploader):
def __init__(self, args):
super(TMDBRatingsInserter, self).__init__(TMDB(args), args)
self.url_for_csv_file_upload = self._get_url_for_csv_upload()
self.css_id_of_file_input_element = 'csv_file'
self.xpath_selector_for_submit_button = "//form[@name='import_csv']//input[@type='submit']"
def _get_url_for_csv_upload(self):
return 'https://www.themoviedb.org/settings/import-list'
def pre_upload_action(self):
cookie_accept_button = self.site.browser.find_element_by_id('cookie_notice')\
.find_elements_by_class_name('accept')
if cookie_accept_button is not None and len(cookie_accept_button) > 0:
cookie_accept_button[0].click()
time.sleep(1)
|
Python
| 0.000023
|
@@ -473,16 +473,34 @@
mit'%5D%22%0A%0A
+ @staticmethod%0A
def
@@ -523,20 +523,16 @@
_upload(
-self
):%0A
|
35a0904506ff51946c9c26a4ca9f61ae2f08b63e
|
bump to 1.6
|
eventdispatcher/__init__.py
|
eventdispatcher/__init__.py
|
__author__ = 'Calvin Lobo'
__version__ = '1.5'
from .property import Property
from .dictproperty import DictProperty
from .listproperty import ListProperty
from .unitproperty import UnitProperty
class BindError(Exception):
pass
class EventDispatcher(object):
def __init__(self, **kwargs):
self.event_dispatcher_event_callbacks = {}
bindings = {}
# Walk through the MRO looking for Property attributes in the classes. Then register and bind them to
# 'on_<prop_name>' if it exists.
for cls in self.__class__.__mro__:
for prop_name, prop in cls.__dict__.iteritems():
if isinstance(prop, Property):
prop.name = prop_name
prop.register(self, prop_name, prop.default_value)
if hasattr(self, 'on_{}'.format(prop_name)):
func = getattr(self, 'on_{}'.format(prop_name))
bindings.update({prop_name: func})
self.bind(**bindings)
def dispatch(self, key, *args):
for callback in self.event_dispatcher_properties[key]['callbacks']:
if callback(*args):
break
def dispatch_event(self, event, *args):
for callback in self.event_dispatcher_event_callbacks[event]:
if callback(*args):
break
def register_event(self, name):
if hasattr(self, 'on_{}'.format(name)):
self.event_dispatcher_event_callbacks[name] = [getattr(self, 'on_{}'.format(name))]
else:
self.event_dispatcher_event_callbacks[name] = []
def unbind(self, **kwargs):
all_properties = self.event_dispatcher_properties
for prop_name, callback in kwargs.iteritems():
if prop_name in all_properties:
try:
all_properties[prop_name]['callbacks'].remove(callback)
except ValueError:
raise BindError("No binding for {} in property '{}'".format(callback.__name__, prop_name))
elif prop_name in self.event_dispatcher_event_callbacks:
try:
self.event_dispatcher_event_callbacks[prop_name].remove(callback)
except ValueError:
raise BindError("No binding for {} in event '{}'".format(callback.__name__, prop_name))
else:
raise BindError('No property or event by the name of %s' % prop_name)
def unbind_all(self, *args):
all_properties = self.event_dispatcher_properties
for prop_name in args:
if prop_name in all_properties:
del all_properties[prop_name]['callbacks'][:]
elif prop_name in self.event_dispatcher_event_callbacks:
del self.event_dispatcher_event_callbacks[prop_name][:]
else:
raise BindError("No such property or event '%s'" % prop_name)
def bind(self, **kwargs):
for prop_name, callback in kwargs.iteritems():
if prop_name in self.event_dispatcher_properties:
# Queue the callback into the property
self.event_dispatcher_properties[prop_name]['callbacks'].append(callback)
elif prop_name in self.event_dispatcher_event_callbacks:
# If a property was not found, search in events
self.event_dispatcher_event_callbacks[prop_name].append(callback)
else:
raise BindError("No property or event by the name of '%s'" % prop_name)
def setter(self, prop_name):
return lambda inst, value: setattr(self, prop_name, value)
def get_dispatcher_property(self, prop_name):
return self.event_dispatcher_properties[prop_name]['property']
|
Python
| 0.000003
|
@@ -41,9 +41,9 @@
'1.
-5
+6
'%0A%0Af
|
fe0f2b85af896b91001b39098a1a234399247293
|
add `field_pk` for `parameter_name` for custom primary key in model
|
dal_admin_filters/__init__.py
|
dal_admin_filters/__init__.py
|
# -*- encoding: utf-8 -*-
from dal import autocomplete
from django import forms
from django.contrib.admin.filters import SimpleListFilter
from django.core.exceptions import ImproperlyConfigured
from django.forms.widgets import Media, MEDIA_TYPES
class AutocompleteFilter(SimpleListFilter):
template = "dal_admin_filters/autocomplete-filter.html"
title = ''
field_name = ''
autocomplete_url = ''
is_placeholder_title = False
widget_attrs = {}
class Media:
css = {
'all': (
'autocomplete_light/vendor/select2/dist/css/select2.css',
'autocomplete_light/select2.css',
'dal_admin_filters/css/autocomplete-fix.css'
)
}
js = (
'autocomplete_light/jquery.init.js',
'autocomplete_light/autocomplete.init.js',
'autocomplete_light/vendor/select2/dist/js/select2.full.js',
'autocomplete_light/select2.js',
'dal_admin_filters/js/querystring.js',
)
def __init__(self, request, params, model, model_admin):
if self.parameter_name:
raise AttributeError(
'Rename attribute `parameter_name` to '
'`field_name` for {}'.format(self.__class__)
)
self.parameter_name = '{}__id__exact'.format(self.field_name)
super(AutocompleteFilter, self).__init__(request, params, model, model_admin)
self._add_media(model_admin)
field = forms.ModelChoiceField(
queryset=getattr(model, self.field_name).get_queryset(),
widget=autocomplete.ModelSelect2(
url=self.autocomplete_url,
)
)
attrs = self.widget_attrs.copy()
attrs['id'] = 'id-%s-dal-filter' % self.field_name
if self.is_placeholder_title:
attrs['data-placeholder'] = "By " + self.title
self.rendered_widget = field.widget.render(
name=self.parameter_name,
value=self.used_parameters.get(self.parameter_name, ''),
attrs=attrs
)
def _add_media(self, model_admin):
if not hasattr(model_admin, 'Media'):
raise ImproperlyConfigured('Add empty Media class to %s. Sorry about this bug.' % model_admin)
def _get_media(obj):
return Media(media=getattr(obj, 'Media', None))
media = _get_media(model_admin) + _get_media(AutocompleteFilter) + _get_media(self)
for name in MEDIA_TYPES:
setattr(model_admin.Media, name, getattr(media, "_" + name))
def has_output(self):
return True
def lookups(self, request, model_admin):
return ()
def queryset(self, request, queryset):
if self.value():
return queryset.filter(**{self.parameter_name: self.value()})
else:
return queryset
|
Python
| 0
|
@@ -376,24 +376,44 @@
d_name = ''%0A
+ field_pk = 'id'%0A
autocomp
@@ -1337,18 +1337,18 @@
= '%7B%7D__
-id
+%7B%7D
__exact'
@@ -1370,16 +1370,31 @@
eld_name
+, self.field_pk
)%0A
|
76f5e98aec0024fb6d015004e1f3f26434a01fc2
|
Update _version.py
|
core/_version.py
|
core/_version.py
|
"""Version information."""
# The following line *must* be the last in the module, exactly as formatted:
__version__ = "0.5.2"
|
Python
| 0.000002
|
@@ -121,7 +121,7 @@
0.5.
-2
+3
%22%0A
|
8ff56b13178e2c7fa87e4b1bb0c9ecc7359e8775
|
Use a func name instead of endpoints (#30204)
|
dashboard/http_server_head.py
|
dashboard/http_server_head.py
|
import asyncio
import errno
import ipaddress
import logging
from math import floor
import os
import sys
import time
try:
from packaging.version import Version
except ImportError:
from distutils.version import LooseVersion as Version
import ray.dashboard.optional_utils as dashboard_optional_utils
import ray.dashboard.utils as dashboard_utils
from ray.dashboard.dashboard_metrics import DashboardPrometheusMetrics
# All third-party dependencies that are not included in the minimal Ray
# installation must be included in this file. This allows us to determine if
# the agent has the necessary dependencies to be started.
from ray.dashboard.optional_deps import aiohttp, hdrs
from ray._private.gcs_utils import GcsClient
# Logger for this module. It should be configured at the entry point
# into the program using Ray. Ray provides a default configuration at
# entry/init points.
logger = logging.getLogger(__name__)
routes = dashboard_optional_utils.ClassMethodRouteTable
def setup_static_dir():
build_dir = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "client", "build"
)
module_name = os.path.basename(os.path.dirname(__file__))
if not os.path.isdir(build_dir):
raise dashboard_utils.FrontendNotFoundError(
errno.ENOENT,
"Dashboard build directory not found. If installing "
"from source, please follow the additional steps "
"required to build the dashboard"
f"(cd python/ray/{module_name}/client "
"&& npm ci "
"&& npm run build)",
build_dir,
)
static_dir = os.path.join(build_dir, "static")
routes.static("/static", static_dir, follow_symlinks=True)
return build_dir
class HttpServerDashboardHead:
def __init__(
self,
ip: str,
http_host: str,
http_port: int,
http_port_retries: int,
gcs_address: str,
gcs_client: GcsClient,
session_name: str,
metrics: DashboardPrometheusMetrics,
):
self.ip = ip
self.http_host = http_host
self.http_port = http_port
self.http_port_retries = http_port_retries
self.gcs_client = gcs_client
self.head_node_ip = gcs_address.split(":")[0]
self.metrics = metrics
self._session_name = session_name
# Below attirubtes are filled after `run` API is invoked.
self.runner = None
# Setup Dashboard Routes
try:
build_dir = setup_static_dir()
logger.info("Setup static dir for dashboard: %s", build_dir)
except dashboard_utils.FrontendNotFoundError as ex:
# Not to raise FrontendNotFoundError due to NPM incompatibilities
# with Windows.
# Please refer to ci.sh::build_dashboard_front_end()
if sys.platform in ["win32", "cygwin"]:
logger.warning(ex)
else:
raise ex
dashboard_optional_utils.ClassMethodRouteTable.bind(self)
# Create a http session for all modules.
# aiohttp<4.0.0 uses a 'loop' variable, aiohttp>=4.0.0 doesn't anymore
if Version(aiohttp.__version__) < Version("4.0.0"):
self.http_session = aiohttp.ClientSession(loop=asyncio.get_event_loop())
else:
self.http_session = aiohttp.ClientSession()
@routes.get("/")
async def get_index(self, req) -> aiohttp.web.FileResponse:
return aiohttp.web.FileResponse(
os.path.join(
os.path.dirname(os.path.abspath(__file__)), "client/build/index.html"
)
)
@routes.get("/favicon.ico")
async def get_favicon(self, req) -> aiohttp.web.FileResponse:
return aiohttp.web.FileResponse(
os.path.join(
os.path.dirname(os.path.abspath(__file__)), "client/build/favicon.ico"
)
)
def get_address(self):
assert self.http_host and self.http_port
return self.http_host, self.http_port
@aiohttp.web.middleware
async def metrics_middleware(self, request, handler):
start_time = time.monotonic()
try:
response = await handler(request)
status_tag = f"{floor(response.status / 100)}xx"
return response
except (Exception, asyncio.CancelledError):
status_tag = "5xx"
raise
finally:
resp_time = time.monotonic() - start_time
try:
self.metrics.metrics_request_duration.labels(
endpoint=request.path,
http_status=status_tag,
SessionName=self._session_name,
Component="dashboard",
).observe(resp_time)
self.metrics.metrics_request_count.labels(
method=request.method,
endpoint=request.path,
http_status=status_tag,
SessionName=self._session_name,
Component="dashboard",
).inc()
except Exception as e:
logger.exception(f"Error emitting api metrics: {e}")
async def run(self, modules):
# Bind http routes of each module.
for c in modules:
dashboard_optional_utils.ClassMethodRouteTable.bind(c)
# Http server should be initialized after all modules loaded.
# working_dir uploads for job submission can be up to 100MiB.
app = aiohttp.web.Application(
client_max_size=100 * 1024**2, middlewares=[self.metrics_middleware]
)
app.add_routes(routes=routes.bound_routes())
self.runner = aiohttp.web.AppRunner(
app,
access_log_format=(
"%a %t '%r' %s %b bytes %D us " "'%{Referer}i' '%{User-Agent}i'"
),
)
await self.runner.setup()
last_ex = None
for i in range(1 + self.http_port_retries):
try:
site = aiohttp.web.TCPSite(self.runner, self.http_host, self.http_port)
await site.start()
break
except OSError as e:
last_ex = e
self.http_port += 1
logger.warning("Try to use port %s: %s", self.http_port, e)
else:
raise Exception(
f"Failed to find a valid port for dashboard after "
f"{self.http_port_retries} retries: {last_ex}"
)
self.http_host, self.http_port, *_ = site._server.sockets[0].getsockname()
self.http_host = (
self.ip
if ipaddress.ip_address(self.http_host).is_unspecified
else self.http_host
)
logger.info(
"Dashboard head http address: %s:%s", self.http_host, self.http_port
)
# Dump registered http routes.
dump_routes = [r for r in app.router.routes() if r.method != hdrs.METH_HEAD]
for r in dump_routes:
logger.info(r)
logger.info("Registered %s routes.", len(dump_routes))
async def cleanup(self):
# Wait for finish signal.
await self.runner.cleanup()
|
Python
| 0.000567
|
@@ -4160,24 +4160,25 @@
monotonic()%0A
+%0A
try:
@@ -4589,36 +4589,40 @@
endpoint=
-request.path
+handler.__name__
,%0A
@@ -4922,20 +4922,24 @@
int=
-request.path
+handler.__name__
,%0A
@@ -5303,24 +5303,24 @@
in modules:%0A
-
@@ -5374,16 +5374,17 @@
bind(c)%0A
+%0A
|
92febbffb91943f13cfac8c00e55103b20645b70
|
Update [MediaContainer] children with the correct `section` object
|
plex/objects/library/container.py
|
plex/objects/library/container.py
|
from plex.objects.core.base import Property
from plex.objects.container import Container
from plex.objects.library.section import Section
class MediaContainer(Container):
section = Property(resolver=lambda: MediaContainer.construct_section)
title1 = Property
title2 = Property
identifier = Property
art = Property
thumb = Property
view_group = Property('viewGroup')
view_mode = Property('viewMode', int)
media_tag_prefix = Property('mediaTagPrefix')
media_tag_version = Property('mediaTagVersion')
no_cache = Property('nocache', bool)
allow_sync = Property('allowSync', bool)
mixed_parents = Property('mixedParents', bool)
@staticmethod
def construct_section(client, node):
attribute_map = {
'key': 'librarySectionID',
'uuid': 'librarySectionUUID',
'title': 'librarySectionTitle'
}
return Section.construct(client, node, attribute_map, child=True)
|
Python
| 0
|
@@ -971,8 +971,157 @@
d=True)%0A
+%0A def __iter__(self):%0A for item in super(MediaContainer, self).__iter__():%0A item.section = self.section%0A%0A yield item%0A
|
d9ddec5e0b6ceabbfc29e662fa8f54f0317ed3bb
|
set response.from_cache before calling hooks
|
requests_cache/core.py
|
requests_cache/core.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests_cache.core
~~~~~~~~~~~~~~~~~~~
Core functions for configuring cache and monkey patching ``requests``
"""
from contextlib import contextmanager
from datetime import datetime, timedelta
import requests
from requests import Session as OriginalSession
from requests.hooks import dispatch_hook
from requests_cache import backends
from requests_cache.compat import str, basestring
try:
ver = tuple(map(int, requests.__version__.split(".")))
except ValueError:
pass
else:
# We don't need to dispatch hook in Requests <= 1.1.0
if ver < (1, 2, 0):
dispatch_hook = lambda key, hooks, hook_data, *a, **kw: hook_data
del ver
class CachedSession(OriginalSession):
""" Requests ``Sessions`` with caching support.
"""
def __init__(self, cache_name='cache', backend='sqlite', expire_after=None,
allowable_codes=(200,), allowable_methods=('GET',),
**backend_options):
"""
:param cache_name: for ``sqlite`` backend: cache file will start with this prefix,
e.g ``cache.sqlite``
for ``mongodb``: it's used as database name
for ``redis``: it's used as the namespace. This means all keys
are prefixed with ``'cache_name:'``
:param backend: cache backend name e.g ``'sqlite'``, ``'mongodb'``, ``'redis'``, ``'memory'``.
(see :ref:`persistence`). Or instance of backend implementation.
:param expire_after: number of seconds after cache will be expired
or `None` (default) to ignore expiration
:type expire_after: float
:param allowable_codes: limit caching only for response with this codes (default: 200)
:type allowable_codes: tuple
:param allowable_methods: cache only requests of this methods (default: 'GET')
:type allowable_methods: tuple
:kwarg backend_options: options for chosen backend. See corresponding
:ref:`sqlite <backends_sqlite>`, :ref:`mongo <backends_mongo>`
and :ref:`redis <backends_redis>` backends API documentation
"""
if isinstance(backend, basestring):
try:
self.cache = backends.registry[backend](cache_name, **backend_options)
except KeyError:
raise ValueError('Unsupported backend "%s" try one of: %s' %
(backend, ', '.join(backends.registry.keys())))
else:
self.cache = backend
self._cache_expire_after = expire_after
self._cache_allowable_codes = allowable_codes
self._cache_allowable_methods = allowable_methods
self._is_cache_disabled = False
super(CachedSession, self).__init__()
def send(self, request, **kwargs):
if (self._is_cache_disabled
or request.method not in self._cache_allowable_methods):
response = super(CachedSession, self).send(request, **kwargs)
response.from_cache = False
return response
cache_key = self.cache.create_key(request)
def send_request_and_cache_response():
response = super(CachedSession, self).send(request, **kwargs)
if response.status_code in self._cache_allowable_codes:
self.cache.save_response(cache_key, response)
response.from_cache = False
return response
response, timestamp = self.cache.get_response_and_time(cache_key)
if response is None:
return send_request_and_cache_response()
if self._cache_expire_after is not None:
difference = datetime.utcnow() - timestamp
if difference > timedelta(seconds=self._cache_expire_after):
self.cache.delete(cache_key)
return send_request_and_cache_response()
# dispatch hook here, because we've removed it before pickling
response = dispatch_hook('response', request.hooks, response, **kwargs)
response.from_cache = True
return response
def request(self, method, url, params=None, data=None, headers=None,
cookies=None, files=None, auth=None, timeout=None,
allow_redirects=True, proxies=None, hooks=None, stream=None,
verify=None, cert=None):
response = super(CachedSession, self).request(method, url, params, data,
headers, cookies, files,
auth, timeout,
allow_redirects, proxies,
hooks, stream, verify, cert)
if self._is_cache_disabled:
return response
main_key = self.cache.create_key(response.request)
for r in response.history:
self.cache.add_key_mapping(
self.cache.create_key(r.request), main_key
)
return response
@contextmanager
def cache_disabled(self):
"""
Context manager for temporary disabling cache
::
>>> s = CachedSession()
>>> with s.cache_disabled():
... s.get('http://httpbin.org/ip')
"""
self._is_cache_disabled = True
try:
yield
finally:
self._is_cache_disabled = False
def install_cache(cache_name='cache', backend='sqlite', expire_after=None,
allowable_codes=(200,), allowable_methods=('GET',),
session_factory=CachedSession, **backend_options):
"""
Installs cache for all ``Requests`` requests by monkey-patching ``Session``
Parameters are the same as in :class:`CachedSession`. Additional parameters:
:param session_factory: Session factory. It should inherit :class:`CachedSession` (default)
"""
_patch_session_factory(
lambda : session_factory(cache_name=cache_name,
backend=backend,
expire_after=expire_after,
allowable_codes=allowable_codes,
allowable_methods=allowable_methods,
**backend_options)
)
# backward compatibility
configure = install_cache
def uninstall_cache():
""" Restores ``requests.Session`` and disables cache
"""
_patch_session_factory(OriginalSession)
@contextmanager
def disabled():
"""
Context manager for temporary disabling globally installed cache
.. warning:: not thread-safe
::
>>> with requests_cache.disabled():
... requests.get('http://httpbin.org/ip')
... requests.get('http://httpbin.org/get')
"""
previous = requests.Session
uninstall_cache()
try:
yield
finally:
_patch_session_factory(previous)
@contextmanager
def enabled(*args, **kwargs):
"""
Context manager for temporary installing global cache.
Accepts same arguments as :func:`install_cache`
.. warning:: not thread-safe
::
>>> with requests_cache.enabled('cache_db'):
... requests.get('http://httpbin.org/get')
"""
install_cache(*args, **kwargs)
try:
yield
finally:
uninstall_cache()
def get_cache():
""" Returns internal cache object from globally installed ``CachedSession``
"""
return requests.Session().cache
def clear():
""" Clears globally installed cache
"""
get_cache().clear()
def _patch_session_factory(session_factory=CachedSession):
requests.Session = requests.sessions.Session = session_factory
|
Python
| 0.000001
|
@@ -4100,24 +4100,59 @@
re pickling%0A
+ response.from_cache = True%0A
resp
@@ -4223,43 +4223,8 @@
gs)%0A
- response.from_cache = True%0A
|
d4aa45b39eab5ce4b06d6343344afb05a0bf8582
|
Fix pep8.
|
tryfer/tests/test_formatters.py
|
tryfer/tests/test_formatters.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import struct
from twisted.trial.unittest import TestCase
from tryfer import formatters
class TestFormatters(TestCase):
def test_ipv4_to_int(self):
""" Thrift expects ipv4 address to be a signed 32-bit integer.
Previously this function converted ip addresses to an unsigned 32-bit
int. struct.pack is strict about integer overflows for signed 32-bit
integers, so this function very much needs to produce a signed integer
to allow IP addresses in the upper half to work
"""
# ip that doesn't overflow in signed 32-bit
low_ip = '127.0.0.1'
# ip that does overflow in signed 32-bit
high_ip = '172.17.1.1'
low_ip_as_int = formatters.ipv4_to_int(low_ip)
high_ip_as_int = formatters.ipv4_to_int(high_ip)
# both parsed ips should be packable as signed 32-bit int
struct.pack('!i', low_ip_as_int)
struct.pack('!i', high_ip_as_int)
|
Python
| 0
|
@@ -629,16 +629,17 @@
atters%0A%0A
+%0A
class Te
|
410f65020c3b1ce13563a5019f4b0c49b599cbe0
|
Add test for login with incorrect credentials
|
tests/test_authentication.py
|
tests/test_authentication.py
|
import unittest
from flask import json
from api import db
from api.BucketListAPI import app
from instance.config import application_config
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
def test_index_route(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 201)
self.assertIn('Welcome Message', response.data.decode())
def test_registration_with_missing_dredentials(self):
"""Should throw error for missing credentials"""
user = json.dumps({
'name': '',
'email': '',
'password': ''
})
response = self.client.post('/auth/register', data=user)
self.assertEqual(response.status_code, 400)
self.assertIn('Missing', response.data.decode())
def test_registration_with_invalid_email(self):
"""Should return invalid email"""
user = json.dumps({
'name': 'Patrick',
'email': 'pato',
'password': 'patrick'
})
response = self.client.post('/auth/register', data=user)
self.assertEqual(response.status_code, 400)
self.assertIn('Invalid Email', response.data.decode())
def test_registration_with_short_password(self):
"""Should return invalid email"""
user = json.dumps({
'name': 'Patrick',
'email': 'pato@gmail.com',
'password': 'pato'
})
response = self.client.post('/auth/register', data=user)
self.assertEqual(response.status_code, 400)
self.assertIn('Password is short', response.data.decode())
def test_for_existing_email(self):
"""Should check if email exists"""
user = json.dumps({
'name': 'Patrick',
'email': 'pato@gmail.com',
'password': 'patrickluboobi'
})
self.client.post('/auth/register', data=user)
response = self.client.post('/auth/register', data=user)
self.assertEqual(response.status_code, 400)
self.assertIn('Email Already exists', response.data.decode())
def test_successfull_registration(self):
"""Should register user successfully"""
user = json.dumps({
'name': 'Patrick',
'email': 'patrick@gmail.com',
'password': 'patrickluboobi'
})
response = self.client.post('/auth/register', data=user)
self.assertEqual(response.status_code, 201)
self.assertIn('Successfully registered', response.data.decode())
def test_login_without_credentials(self):
"""Should check for valid email"""
user = json.dumps({
'email': '',
'password': ''
})
response = self.client.post('/auth/login', data=user)
self.assertEqual(response.status_code, 400)
self.assertIn('Missing login credentials', response.data.decode())
def test_login_with_invalid_email(self):
"""Should check for valid email"""
user = json.dumps({
'email': 'patrick',
'password': 'patrickluboobi'
})
response = self.client.post('/auth/login', data=user)
self.assertEqual(response.status_code, 400)
self.assertIn('Enter valid email', response.data.decode())
def test_successful_login(self):
"""Should check for valid email"""
# First of all register
self.test_successfull_registration()
user = json.dumps({
'email': 'patrick@gmail.com',
'password': 'patrickluboobi'
})
response = self.client.post('/auth/login', data=user)
self.assertEqual(response.status_code, 201)
self.assertIn('Login Successful', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
|
Python
| 0
|
@@ -3524,32 +3524,512 @@
data.decode())%0A%0A
+ def test_incorrect_login_credentials(self):%0A %22%22%22Should check for valid email%22%22%22%0A%0A # First of all register%0A self.test_successfull_registration()%0A user = json.dumps(%7B%0A 'email': 'incorrect@gmail.com',%0A 'password': 'incorrect'%0A %7D)%0A response = self.client.post('/auth/login', data=user)%0A self.assertEqual(response.status_code, 400)%0A self.assertIn('Incorrect email or password', response.data.decode())%0A%0A
def test_suc
|
35293cecc99a629b3a185e69cf9ed3a339d9d1cf
|
Remove indentation level for easier review
|
automat/_introspection.py
|
automat/_introspection.py
|
"""
Python introspection helpers.
"""
from types import CodeType as code, FunctionType as function
def copycode(template, changes):
if hasattr(code, "replace"):
return template.replace(**{"co_" + k : v for k, v in changes.items()})
else:
names = [
"argcount", "nlocals", "stacksize", "flags", "code", "consts",
"names", "varnames", "filename", "name", "firstlineno", "lnotab",
"freevars", "cellvars"
]
if hasattr(code, "co_kwonlyargcount"):
names.insert(1, "kwonlyargcount")
if hasattr(code, "co_posonlyargcount"):
# PEP 570 added "positional only arguments"
names.insert(1, "posonlyargcount")
values = [
changes.get(name, getattr(template, "co_" + name))
for name in names
]
return code(*values)
def copyfunction(template, funcchanges, codechanges):
names = [
"globals", "name", "defaults", "closure",
]
values = [
funcchanges.get(name, getattr(template, "__" + name + "__"))
for name in names
]
return function(copycode(template.__code__, codechanges), *values)
def preserveName(f):
"""
Preserve the name of the given function on the decorated function.
"""
def decorator(decorated):
return copyfunction(decorated,
dict(name=f.__name__), dict(name=f.__name__))
return decorator
|
Python
| 0.000004
|
@@ -244,22 +244,8 @@
)%7D)%0A
- else:%0A
@@ -250,28 +250,24 @@
names = %5B%0A
-
%22arg
@@ -333,20 +333,16 @@
-
%22names%22,
@@ -407,20 +407,16 @@
-
%22freevar
@@ -430,34 +430,26 @@
llvars%22%0A
-
- %5D%0A
+%5D%0A
if hasat
@@ -479,36 +479,32 @@
ount%22):%0A
-
-
names.insert(1,
@@ -517,28 +517,24 @@
yargcount%22)%0A
-
if hasat
@@ -573,20 +573,16 @@
-
-
# PEP 57
@@ -621,28 +621,24 @@
ts%22%0A
-
names.insert
@@ -664,20 +664,16 @@
t%22)%0A
-
-
values =
@@ -671,28 +671,24 @@
values = %5B%0A
-
chan
@@ -738,28 +738,24 @@
e))%0A
-
for name in
@@ -764,26 +764,18 @@
mes%0A
- %5D%0A
+%5D%0A
retu
|
2ff8489dce184d3c9762cc41e0c88b7b178b7555
|
fix summary view button
|
conjureup/controllers/steps/gui.py
|
conjureup/controllers/steps/gui.py
|
import os
import os.path as path
from collections import OrderedDict, deque
from functools import partial
import yaml
from conjureup import async, controllers, utils
from conjureup.app_config import app
from conjureup.controllers.steps import common
from conjureup.models.step import StepModel
from conjureup.ui.views.steps import StepsView
from conjureup.ui.widgets.step import StepWidget
from ubuntui.ev import EventLoop
class StepsController:
def __init__(self):
self.view = None
self.bundle_scripts = path.join(
app.config['spell-dir'], 'steps'
)
self.step_metas = common.get_step_metadata_filenames(
self.bundle_scripts)
self.summary_button_exposed = False
self.results = OrderedDict()
def __handle_exception(self, tag, exc):
utils.pollinate(app.session_id, tag)
EventLoop.remove_alarms()
app.ui.show_exception_message(exc)
def get_result(self, future):
if future.exception():
self.__handle_exception('E002', future.exception())
step_model, step_widget = future.result()
step_widget.set_icon_state('active')
step_widget.set_description(
"{}\n\nResult: {}".format(
step_model.description,
step_model.result),
'info_context')
step_widget.show_output = False
step_widget.clear_output()
app.log.debug("Storing step result for: {}={}".format(
step_model.title, step_model.result))
self.results[step_model.title] = step_model.result
if len(self.view.steps) == 0 and not self.summary_button_exposed:
self.summary_button_exposed = True
app.log.debug(
"End of step list waiting for last step to complete "
"then rendering summary.")
self.view.step_pile.contents.append(
(self.view.buttons(),
self.view.step_pile.options()))
index = self.view.current_summary_button_index
self.view.step_pile.focus_position = index
def finish(self, step_model, step_widget, done=False):
""" handles processing step with input data
Arguments:
step_model: step_model returned from widget
done: if True continues on to the summary view
"""
if done:
EventLoop.remove_alarms()
return controllers.use('summary').render(self.results)
# Set next button focus here now that the step is complete.
self.view.steps.popleft()
if len(self.view.steps) > 0:
next_step = self.view.steps[0]
next_step.generate_additional_input()
self.view.step_pile.focus_position = self.view.step_pile.focus_position + 1 # noqa
future = async.submit(partial(common.do_step,
step_model,
step_widget,
app.ui.set_footer,
gui=True),
partial(self.__handle_exception, 'E002'))
future.add_done_callback(self.get_result)
def update(self, *args):
for w in self.all_step_widgets:
w.update()
EventLoop.set_alarm_in(1, self.update)
def render(self):
if len(self.step_metas) == 0:
self.finish(None, None, done=True)
return
step_widgets = deque()
for step_meta_path in self.step_metas:
step_ex_path, ext = path.splitext(step_meta_path)
if not path.isfile(step_ex_path) or \
not os.access(step_ex_path, os.X_OK):
app.log.error(
'Unable to process step, missing {}'.format(step_ex_path))
continue
step_metadata = {}
with open(step_meta_path) as fp:
step_metadata = yaml.load(fp.read())
try:
# Store step model and its widget
model = StepModel(step_metadata, step_meta_path)
step_widget = StepWidget(
app,
model,
self.finish)
if not step_widget.model.viewable:
app.log.debug("Skipping step: {}".format(step_widget))
continue
model.path = step_ex_path
step_widgets.append(step_widget)
app.log.debug("Queueing step: {}".format(step_widget))
except Exception as e:
self.__handle_exception('E002', e)
return
try:
self.all_step_widgets = list(step_widgets)
self.view = StepsView(app, step_widgets, self.finish)
# Set initial step as active and viewable
step_widgets[0].description.set_text((
'body', step_widgets[0].model.description))
step_widgets[0].icon.set_text((
'pending_icon', step_widgets[0].icon.get_text()[0]
))
step_widgets[0].generate_additional_input()
self.view.step_pile.focus_position = 2
except Exception as e:
self.__handle_exception('E002', e)
return
app.ui.set_header(
title="Additional Application Configuration",
excerpt="Please finish the installation by configuring your "
"application with these steps.")
app.ui.set_body(self.view)
app.ui.set_footer('')
self.update()
_controller_class = StepsController
|
Python
| 0
|
@@ -689,52 +689,8 @@
s)%0A%0A
- self.summary_button_exposed = False%0A
@@ -1556,120 +1556,100 @@
-if len(self.view.steps) == 0 and not self.summary_button_exposed:%0A self.summary_button_exposed = True
+self.n_completed_steps += 1%0A if self.n_completed_steps == len(self.all_step_widgets):
%0A
@@ -3423,16 +3423,50 @@
deque()%0A
+ self.n_completed_steps = 0
%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.