commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
1df18d3c1753aadd1342f870905e60953b4aeba9
|
Remove overwritten DEBUG mapping
|
cea/interfaces/dashboard/dashboard.py
|
cea/interfaces/dashboard/dashboard.py
|
from flask import Flask
import cea.config
import cea.plots
import cea.plots.cache
import webbrowser
import threading
import os
import sys
def list_tools():
"""List the tools known to the CEA. The result is grouped by category.
"""
import cea.scripts
from itertools import groupby
tools = sorted(cea.scripts.for_interface('dashboard'), key=lambda t: t.category)
result = {}
for category, group in groupby(tools, lambda t: t.category):
result[category] = [t for t in group]
return result
# modified from here: https://stackoverflow.com/a/827398/2260
def get_drives():
"""Get a list of valid drive letters on windows:
In [12]: get_drives()
Out[12]: ['C:', 'I:', 'K:', 'S:', 'Y:', 'Z:']
On on-windows systems, returns None
"""
if sys.platform == 'win32':
import string
from ctypes import windll
drives = []
bitmask = windll.kernel32.GetLogicalDrives()
for letter in string.uppercase:
if bitmask & 1:
drives.append(letter + ':')
bitmask >>= 1
return drives
else:
return None
def main(config):
config.restricted_to = None # allow access to the whole config file
plot_cache = cea.plots.cache.PlotCache(config.project)
app = Flask(__name__, static_folder='base/static')
app.config.from_mapping({'DEBUG': True,
'SECRET_KEY': 'secret'})
# provide the list of tools
@app.context_processor
def tools_processor():
return dict(tools=list_tools(), drives=get_drives())
# @app.context_processor
# def dashboards_processor():
# dashboards = cea.plots.read_dashboards(config, plot_cache)
# return dict(dashboards=dashboards)
@app.context_processor
def project_processor():
return dict(project_name=os.path.basename(config.project))
@app.context_processor
def scenario_processor():
return dict(scenario_name=os.path.basename(config.scenario_name))
@app.template_filter('escapejs')
def escapejs(text):
"""Escape text for a javascript string (without surrounding quotes)"""
escapes = {
'\\': '\\u005C',
'\'': '\\u0027',
'"': '\\u0022',
'>': '\\u003E',
'<': '\\u003C',
'&': '\\u0026',
'=': '\\u003D',
'-': '\\u002D',
';': '\\u003B',
u'\u2028': '\\u2028',
u'\u2029': '\\u2029'
}
# Escape every ASCII character with a value less than 32.
escapes.update(('%c' % z, '\\u%04X' % z) for z in range(32))
retval = []
for char in text:
if escapes.has_key(char):
retval.append(escapes[char])
else:
retval.append(char)
return "".join(retval)
@app.template_filter('join_path')
def join_path(path1, path2):
return os.path.join(path1, path2)
@app.template_filter('join_paths')
def join_paths(path_list, loop_index):
if path_list and path_list[0].endswith(':'):
# os.path.join will not add a separator to "C:", see here:
path_list[0] = path_list[0] + os.path.sep
result = os.path.join(*path_list[:loop_index])
print('join_paths(%(path_list)s, %(loop_index)s) --> %(result)s' % locals())
return result
import base.routes
import tools.routes
import plots.routes
import inputs.routes
import project.routes
import landing.routes
app.register_blueprint(base.routes.blueprint)
app.register_blueprint(tools.routes.blueprint)
app.register_blueprint(plots.routes.blueprint)
app.register_blueprint(inputs.routes.blueprint)
app.register_blueprint(project.routes.blueprint)
app.register_blueprint(landing.routes.blueprint)
# keep a copy of the configuration we're using
app.cea_config = config
app.plot_cache = plot_cache
# keep a list of running scripts - (Process, Connection)
# the protocol for the Connection messages is tuples ('stdout'|'stderr', str)
app.workers = {} # script-name -> (Process, Connection)
# FIXME: this needs to be replaced with a better solution
threading.Timer(0.5, lambda: webbrowser.open('http://localhost:5050')).start()
app.run(host='localhost', port=5050, threaded=False, debug=config.debug)
if __name__ == '__main__':
main(cea.config.Configuration())
|
Python
| 0.000001
|
@@ -1383,52 +1383,8 @@
ng(%7B
-'DEBUG': True,%0A
'SEC
|
4aa8f6ad999f87151f16681c2b968b680bb7eb66
|
autoflush (#9385)
|
src/transformers/utils/logging.py
|
src/transformers/utils/logging.py
|
# coding=utf-8
# Copyright 2020 Optuna, Hugging Face
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Logging utilities. """
import logging
import os
import threading
from logging import CRITICAL # NOQA
from logging import DEBUG # NOQA
from logging import ERROR # NOQA
from logging import FATAL # NOQA
from logging import INFO # NOQA
from logging import NOTSET # NOQA
from logging import WARN # NOQA
from logging import WARNING # NOQA
from typing import Optional
_lock = threading.Lock()
_default_handler: Optional[logging.Handler] = None
log_levels = {
"debug": logging.DEBUG,
"info": logging.INFO,
"warning": logging.WARNING,
"error": logging.ERROR,
"critical": logging.CRITICAL,
}
_default_log_level = logging.WARNING
def _get_default_logging_level():
"""
If TRANSFORMERS_VERBOSITY env var is set to one of the valid choices return that as the new default level. If it is
not - fall back to ``_default_log_level``
"""
env_level_str = os.getenv("TRANSFORMERS_VERBOSITY", None)
if env_level_str:
if env_level_str in log_levels:
return log_levels[env_level_str]
else:
logging.getLogger().warning(
f"Unknown option TRANSFORMERS_VERBOSITY={env_level_str}, "
f"has to be one of: { ', '.join(log_levels.keys()) }"
)
return _default_log_level
def _get_library_name() -> str:
return __name__.split(".")[0]
def _get_library_root_logger() -> logging.Logger:
return logging.getLogger(_get_library_name())
def _configure_library_root_logger() -> None:
global _default_handler
with _lock:
if _default_handler:
# This library has already configured the library root logger.
return
_default_handler = logging.StreamHandler() # Set sys.stderr as stream.
# Apply our default configuration to the library root logger.
library_root_logger = _get_library_root_logger()
library_root_logger.addHandler(_default_handler)
library_root_logger.setLevel(_get_default_logging_level())
library_root_logger.propagate = False
def _reset_library_root_logger() -> None:
global _default_handler
with _lock:
if not _default_handler:
return
library_root_logger = _get_library_root_logger()
library_root_logger.removeHandler(_default_handler)
library_root_logger.setLevel(logging.NOTSET)
_default_handler = None
def get_logger(name: Optional[str] = None) -> logging.Logger:
"""
Return a logger with the specified name.
This function is not supposed to be directly accessed unless you are writing a custom transformers module.
"""
if name is None:
name = _get_library_name()
_configure_library_root_logger()
return logging.getLogger(name)
def get_verbosity() -> int:
"""
Return the current level for the 🤗 Transformers's root logger as an int.
Returns:
:obj:`int`: The logging level.
.. note::
🤗 Transformers has following logging levels:
- 50: ``transformers.logging.CRITICAL`` or ``transformers.logging.FATAL``
- 40: ``transformers.logging.ERROR``
- 30: ``transformers.logging.WARNING`` or ``transformers.logging.WARN``
- 20: ``transformers.logging.INFO``
- 10: ``transformers.logging.DEBUG``
"""
_configure_library_root_logger()
return _get_library_root_logger().getEffectiveLevel()
def set_verbosity(verbosity: int) -> None:
"""
Set the vebosity level for the 🤗 Transformers's root logger.
Args:
verbosity (:obj:`int`):
Logging level, e.g., one of:
- ``transformers.logging.CRITICAL`` or ``transformers.logging.FATAL``
- ``transformers.logging.ERROR``
- ``transformers.logging.WARNING`` or ``transformers.logging.WARN``
- ``transformers.logging.INFO``
- ``transformers.logging.DEBUG``
"""
_configure_library_root_logger()
_get_library_root_logger().setLevel(verbosity)
def set_verbosity_info():
"""Set the verbosity to the :obj:`INFO` level."""
return set_verbosity(INFO)
def set_verbosity_warning():
"""Set the verbosity to the :obj:`WARNING` level."""
return set_verbosity(WARNING)
def set_verbosity_debug():
"""Set the verbosity to the :obj:`DEBUG` level."""
return set_verbosity(DEBUG)
def set_verbosity_error():
"""Set the verbosity to the :obj:`ERROR` level."""
return set_verbosity(ERROR)
def disable_default_handler() -> None:
"""Disable the default handler of the HuggingFace Transformers's root logger."""
_configure_library_root_logger()
assert _default_handler is not None
_get_library_root_logger().removeHandler(_default_handler)
def enable_default_handler() -> None:
"""Enable the default handler of the HuggingFace Transformers's root logger."""
_configure_library_root_logger()
assert _default_handler is not None
_get_library_root_logger().addHandler(_default_handler)
def disable_propagation() -> None:
"""
Disable propagation of the library log outputs. Note that log propagation is disabled by default.
"""
_configure_library_root_logger()
_get_library_root_logger().propagate = False
def enable_propagation() -> None:
"""
Enable propagation of the library log outputs. Please disable the HuggingFace Transformers's default handler to
prevent double logging if the root logger has been configured.
"""
_configure_library_root_logger()
_get_library_root_logger().propagate = True
def enable_explicit_format() -> None:
"""
Enable explicit formatting for every HuggingFace Transformers's logger. The explicit formatter is as follows:
::
[LEVELNAME|FILENAME|LINE NUMBER] TIME >> MESSAGE
All handlers currently bound to the root logger are affected by this method.
"""
handlers = _get_library_root_logger().handlers
for handler in handlers:
formatter = logging.Formatter("[%(levelname)s|%(filename)s:%(lineno)s] %(asctime)s >> %(message)s")
handler.setFormatter(formatter)
def reset_format() -> None:
"""
Resets the formatting for HuggingFace Transformers's loggers.
All handlers currently bound to the root logger are affected by this method.
"""
handlers = _get_library_root_logger().handlers
for handler in handlers:
handler.setFormatter(None)
|
Python
| 0.999826
|
@@ -645,16 +645,27 @@
port os%0A
+import sys%0A
import t
@@ -2358,16 +2358,66 @@
stream.
+%0A _default_handler.flush = sys.stderr.flush
%0A%0A
|
bd3a075dd5c08546883b55b868f4e90f4e2e25fa
|
Fixing typo in target name.
|
src/untrusted/pthread/pthread.gyp
|
src/untrusted/pthread/pthread.gyp
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'includes': [
'../../../build/common.gypi',
],
'variables': {
'common_sources': [
'nc_thread.c',
'nc_mutex.c',
'nc_condvar.c',
'nc_semaphore.c',
'nc_token.c',
'nc_init_irt.c',
'../valgrind/dynamic_annotations.c',
],
},
'targets' : [
{
'target_name': 'pthread_lib',
'type': 'none',
'variables': {
'nlib_target': 'libpthread.a',
'build_glibc': 0,
'build_newlib': 1,
},
'sources': ['<@(common_sources)'],
'dependencies': [
'<(DEPTH)/native_client/tools.gyp:prep_toolchain',
],
},
{
'target_name': 'pthreadb_private_lib',
'type': 'none',
'variables': {
'nlib_target': 'libpthread_private.a',
'build_glibc': 0,
'build_newlib': 1,
},
'sources': [
'<@(common_sources)',
'nc_init_private.c',
'../irt/irt_blockhook.c',
'../irt/irt_cond.c',
'../irt/irt_mutex.c',
'../irt/irt_sem.c',
],
'dependencies': [
'<(DEPTH)/native_client/tools.gyp:prep_toolchain',
'pthread_lib'
],
},
],
}
|
Python
| 0.999999
|
@@ -820,17 +820,16 @@
'pthread
-b
_private
|
1ba20c9822887a6cd830af25fae76f11409f98a5
|
Update validation.py
|
cea/optimization/master/validation.py
|
cea/optimization/master/validation.py
|
"""
Validation
"""
from __future__ import division
import random
from cea.optimization.constants import DH_CONVERSION_TECHNOLOGIES_WITH_SPACE_RESTRICTIONS, \
DH_CONVERSION_TECHNOLOGIES_SHARE, DC_CONVERSION_TECHNOLOGIES_SHARE, DC_CONVERSION_TECHNOLOGIES_WITH_SPACE_RESTRICTIONS
def validation_main(individual_with_name_dict,
column_names_buildings_heating,
column_names_buildings_cooling,
district_heating_network,
district_cooling_network
):
if district_heating_network:
# FOR BUILDINGS CONNECTIONS - they should be inside the range
for building_name in column_names_buildings_heating:
lim_inf = 0
lim_sup = 1
while individual_with_name_dict[building_name] > lim_sup:
individual_with_name_dict[building_name] = random.randint(lim_inf, lim_sup)
# FOR BUILDINGS CONNECTIONS - constrains that at least 2 buildings should be connected to the network
lim_inf = 0
lim_sup = 1
candidate = ''
while candidate.count('1') < 2:
candidate = ''.join(str(individual_with_name_dict[building_name]) for building_name in column_names_buildings_heating)
if candidate.count('1') < 2: #there are at least two buildings connected
for building_name in column_names_buildings_heating:
individual_with_name_dict[building_name] = random.randint(lim_inf, lim_sup)
# FOR SUPPLY SYSTEMS SHARE - turn off if they are below the minimum (trick to avoid strings with on - off behavior
for technology_name, limits in DH_CONVERSION_TECHNOLOGIES_SHARE.iteritems():
minimum = limits["minimum"]
if individual_with_name_dict[technology_name] < minimum:
individual_with_name_dict[technology_name] = 0.0 #0.0 denotes off
else:
individual_with_name_dict[technology_name] = round(individual_with_name_dict[technology_name],2)
# FOR SUPPLY SYSTEMS SHARE - The share of solar technologies should be 1 (because they share the same area)
unit_name, unit_share = [], []
for technology_name, limits in DH_CONVERSION_TECHNOLOGIES_SHARE.iteritems():
minimum = limits["minimum"]
if individual_with_name_dict[technology_name] >= minimum and technology_name in DH_CONVERSION_TECHNOLOGIES_WITH_SPACE_RESTRICTIONS: # only if the unit is activated
unit_name.append(technology_name)
unit_share.append(individual_with_name_dict[technology_name])
sum_shares = sum(unit_share)
if sum_shares > 1.0: #only i the case that the sum of shares is more than the maximum of 1.0
normalized_shares = [round(i / sum_shares,2) for i in unit_share]
for column, share in zip(unit_name, normalized_shares):
individual_with_name_dict[column] = share
if district_cooling_network:
# FOR BUILDINGS CONNECTIONS
for building_name in column_names_buildings_cooling:
lim_inf = 0
lim_sup = 1
while individual_with_name_dict[building_name] > lim_sup:
individual_with_name_dict[building_name] = random.randint(lim_inf, lim_sup)
#FOR BUILDINGS CONNECTIONS - constrains that at least 2 buildings should be connected to the network
lim_inf = 0
lim_sup = 1
candidate = ''
while candidate.count('1') < 2:
candidate = ''.join(str(individual_with_name_dict[building_name]) for building_name in column_names_buildings_cooling)
if candidate.count('1') < 2: #there are at least two buildings connected
for building_name in column_names_buildings_cooling:
individual_with_name_dict[building_name] = random.randint(lim_inf, lim_sup)
# FOR SUPPLY SYSTEMS SHARE - turn off if they are below the minimum (trick to avoid strings with on - off behavior
for technology_name, limits in DC_CONVERSION_TECHNOLOGIES_SHARE.iteritems():
minimum = limits["minimum"]
if individual_with_name_dict[technology_name] < minimum:
individual_with_name_dict[technology_name] = 0.0 #0.0 denotes off
# FOR SUPPLY SYSTEMS SHARE - The share of solar technologies should be 1 (because they share the same area)
unit_name, unit_share = [], []
for technology_name, limits in DC_CONVERSION_TECHNOLOGIES_SHARE.iteritems():
minimum = limits["minimum"]
if individual_with_name_dict[technology_name] >= minimum and technology_name in DC_CONVERSION_TECHNOLOGIES_WITH_SPACE_RESTRICTIONS: # only if the unit is activated
unit_name.append(technology_name)
unit_share.append(individual_with_name_dict[technology_name])
sum_shares = sum(unit_share)
normalized_shares = [round(i / sum_shares,2) for i in unit_share]
for column, share in zip(unit_name, normalized_shares):
individual_with_name_dict[column] = share
return individual_with_name_dict
|
Python
| 0.000001
|
@@ -1093,75 +1093,8 @@
te =
- ''%0A while candidate.count('1') %3C 2:%0A candidate =
''.
@@ -1156,32 +1156,60 @@
building_name in
+%0A
column_names_bu
@@ -1225,38 +1225,37 @@
eating)%0A
- if
+while
candidate.count
@@ -1268,58 +1268,9 @@
%3C 2:
- #there are at least two buildings connected%0A
+%0A
@@ -1322,36 +1322,32 @@
ldings_heating:%0A
-
@@ -1426,16 +1426,178 @@
im_sup)%0A
+ candidate = ''.join(str(individual_with_name_dict%5Bbuilding_name%5D) for building_name in%0A column_names_buildings_heating)
%0A%0A
@@ -3563,75 +3563,8 @@
te =
- ''%0A while candidate.count('1') %3C 2:%0A candidate =
''.
@@ -3626,32 +3626,60 @@
building_name in
+%0A
column_names_bu
@@ -3703,22 +3703,21 @@
- if
+while
candida
@@ -3738,58 +3738,9 @@
%3C 2:
- #there are at least two buildings connected%0A
+%0A
@@ -3792,36 +3792,32 @@
ldings_cooling:%0A
-
@@ -3887,32 +3887,195 @@
im_inf, lim_sup)
+%0A candidate = ''.join(str(individual_with_name_dict%5Bbuilding_name%5D) for building_name in%0A column_names_buildings_cooling)
%0A%0A # FOR
|
de5b159360686839cffe82347d0db90301843e22
|
Update NASM.py
|
executors/NASM.py
|
executors/NASM.py
|
import os
import subprocess
from cptbox import CHROOTSecurity, SecurePopen
from error import CompileError
from .utils import test_executor
from .resource_proxy import ResourceProxy
from judgeenv import env
ASM_FS = ['.*\.so']
class Executor(ResourceProxy):
def __init__(self, problem_id, source_code):
super(Executor, self).__init__()
source_code_file = self._file('%s.asm' % problem_id)
with open(source_code_file, 'wb') as fo:
fo.write(source_code)
obj_file = self._file('%s.o' % problem_id)
self._executable = output_file = self._file(str(problem_id))
nasm_process = subprocess.Popen([env['runtime']['nasm'], '-f', 'elf32', source_code_file, '-o', obj_file],
stderr=subprocess.PIPE, cwd=self._dir)
_, compile_error = nasm_process.communicate()
if nasm_process.returncode != 0:
raise CompileError(compile_error)
if 'gcc' in env['runtime'] and source_code.startswith('; libc'):
ld_process = subprocess.Popen([env['runtime']['gcc'], '-m32', obj_file, '-o', output_file],
stderr=subprocess.PIPE, cwd=self._dir)
else:
ld_process = subprocess.Popen([env['runtime']['ld'], '-melf_i386', '-s', obj_file, '-o', output_file],
stderr=subprocess.PIPE, cwd=self._dir)
_, link_error = ld_process.communicate()
if ld_process.returncode != 0:
raise CompileError(link_error)
self.name = problem_id
self.warning = ('%s\n%s' % (compile_error, link_error)).strip('\n')
def launch(self, *args, **kwargs):
return SecurePopen([self.name] + list(args),
executable=self._executable,
security=CHROOTSecurity(ASM_FS),
time=kwargs.get('time'),
memory=kwargs.get('memory'),
env={}, cwd=self._dir)
def initialize():
if 'nasm' not in env['runtime'] or 'ld' not in env['runtime']:
return False
if not os.path.isfile(env['runtime']['nasm']) or not os.path.isfile(env['runtime']['ld']):
return False
return test_executor('NASM', Executor, '''\
section .text
global _start
_start:
mov eax, 4
xor ebx, ebx
inc ebx
mov ecx, msg
mov edx, len
int 80h
xor eax, eax
inc eax
int 80h
section .data
msg db 'Hello, World!', 0xA
len equ $ - msg
''')
|
Python
| 0
|
@@ -2034,16 +2034,28 @@
tialize(
+sandbox=True
):%0A i
@@ -2639,8 +2639,9 @@
msg%0A''')
+%0A
|
3192634b5468c3aff64367366f7419ad0aabf49b
|
Fix Objective-C executor
|
executors/OBJC.py
|
executors/OBJC.py
|
from subprocess import check_output, CalledProcessError
from GCCExecutor import GCCExecutor
from judgeenv import env
class Executor(GCCExecutor):
ext = '.m'
objc_flags = []
objc_ldflags = []
command = env['runtime'].get('gobjc')
name = 'OBJC'
address_grace = 131072
test_program = r'''
#import <Foundation/Foundation.h>
int main (int argc, const char * argv[]) {
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
int ch;
while ((ch = getchar()) != EOF)
putchar(ch);
[pool drain];
return 0;
}
'''
def get_flags(self):
return self.objc_flags + super(Executor, self).get_flags()
def get_ldflags(self):
return self.objc_ldflags + super(Executor, self).get_ldflags()
def get_fs(self):
return super(Executor, self).get_fs() + ['/proc/\d+/cmdline', '/usr/lib', '/dev/urandom$']
@classmethod
def initialize(cls):
if 'gnustep-config' not in env['runtime']:
return False
try:
cls.objc_flags = check_output([env['runtime']['gnustep-config'], '--objc-flags']).split()
cls.objc_ldflags = check_output([env['runtime']['gnustep-config'], '--base-libs']).split()
except CalledProcessError as e:
return False
return super(Executor, cls).initialize()
initialize = Executor.initialize
|
Python
| 0.998125
|
@@ -914,16 +914,30 @@
lize(cls
+, sandbox=True
):%0A
@@ -1337,16 +1337,31 @@
tialize(
+sandbox=sandbox
)%0A%0Ainiti
|
47a7321682f3f47c638c133ccbcb3f9daea32e77
|
add help text for language
|
molo/core/models.py
|
molo/core/models.py
|
from django.db import models
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch import index
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class HomePage(Page):
pass
class Main(Page):
parent_page_types = []
subpage_types = ['core.LanguagePage']
class LanguagePage(Page):
code = models.CharField(max_length=255)
parent_page_types = ['core.Main']
subpage_types = ['core.HomePage', 'core.SectionPage']
class Meta:
verbose_name = 'Language'
LanguagePage.content_panels = [
FieldPanel('title'),
FieldPanel('code'),
]
class SectionPage(Page):
description = models.TextField(null=True, blank=True)
subpage_types = ['core.ArticlePage']
search_fields = Page.search_fields + (
index.SearchField('description'),
)
class Meta:
verbose_name = 'Section'
SectionPage.content_panels = [
FieldPanel('title'),
FieldPanel('description'),
]
class ArticlePage(Page):
subtitle = models.TextField(null=True, blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
subpage_types = []
search_fields = Page.search_fields + (
index.SearchField('subtitle'),
)
class Meta:
verbose_name = 'Article'
ArticlePage.content_panels = [
FieldPanel('title'),
FieldPanel('subtitle'),
ImageChooserPanel('image'),
]
|
Python
| 0.00002
|
@@ -21,16 +21,72 @@
t models
+%0Afrom django.utils.translation import ugettext_lazy as _
%0A%0Afrom w
@@ -465,16 +465,25 @@
arField(
+%0A
max_leng
@@ -488,16 +488,83 @@
ngth=255
+,%0A help_text=_('The language code as specified in iso639-2')
)%0A%0A p
@@ -694,16 +694,18 @@
_name =
+_(
'Languag
@@ -706,16 +706,17 @@
anguage'
+)
%0A%0ALangua
@@ -749,32 +749,56 @@
FieldPanel('
+title', classname='full
title'),%0A Fie
@@ -1074,16 +1074,18 @@
_name =
+_(
'Section
@@ -1085,16 +1085,17 @@
Section'
+)
%0A%0ASectio
@@ -1127,32 +1127,56 @@
FieldPanel('
+title', classname='full
title'),%0A Fie
@@ -1602,16 +1602,18 @@
_name =
+_(
'Article
@@ -1613,16 +1613,17 @@
Article'
+)
%0A%0AArticl
@@ -1659,24 +1659,48 @@
FieldPanel('
+title', classname='full
title'),%0A
|
9ffcb95f0475c9d3a090b8de62073049f999fcf0
|
update version
|
mongoctl/version.py
|
mongoctl/version.py
|
__author__ = 'abdul'
MONGOCTL_VERSION = '0.9.0'
|
Python
| 0
|
@@ -39,11 +39,11 @@
= '0.9.
-0
+1
'%0A
|
4084e79d56e93b70d986dbaccbd1510c9fecd1d4
|
Split tests.
|
chaco/tests/linearmapper_test_case.py
|
chaco/tests/linearmapper_test_case.py
|
import unittest
from numpy import array
from numpy.testing import assert_array_almost_equal, assert_equal
from chaco.api import ArrayDataSource, DataRange1D, LinearMapper
class LinearMapperTestCase(unittest.TestCase):
def test_basic(self):
ary = array([5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
ds = ArrayDataSource(ary)
r = DataRange1D(ds)
mapper = LinearMapper(range=r, low_pos=50, high_pos=100)
result = mapper.map_screen(ary)
assert_equal(result , array([50, 60, 70, 80, 90, 100]))
return
def test_reversed(self):
ary = array([5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
ds = ArrayDataSource(ary)
r = DataRange1D(ds)
mapper = LinearMapper(range=r, low_pos=100, high_pos=0)
result = mapper.map_screen(ary)
assert_equal(result , array([100, 80, 60, 40, 20, 0]))
return
def test_update_screen_bounds(self):
ary = array([5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
ds = ArrayDataSource(ary)
r = DataRange1D(ds)
mapper = LinearMapper(range=r, stretch_data=True)
# Initialize the bounds, then modify them.
mapper.screen_bounds = (50.0, 100.0)
mapper.screen_bounds = (40.0, 120.0)
result = mapper.map_screen(ary)
assert_array_almost_equal(
result, array([40.0, 56.0, 72.0, 88.0, 104.0, 120.0]))
mapper = LinearMapper(range=r, stretch_data=False)
# Initialize the bounds, then modify them.
mapper.screen_bounds = (50.0, 100.0)
mapper.screen_bounds = (40.0, 120.0)
result = mapper.map_screen(ary)
assert_array_almost_equal(
result, array([40.0, 50.0, 60.0, 70.0, 80.0, 90.0]))
def test_reversed_update_screen_bounds(self):
ary = array([5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
ds = ArrayDataSource(ary)
r = DataRange1D(ds)
mapper = LinearMapper(range=r, stretch_data=True)
# Initialize the bounds, then modify them.
mapper.screen_bounds = (100.0, 0.0)
mapper.screen_bounds = (120.0, -10.0)
result = mapper.map_screen(ary)
assert_array_almost_equal(
result, array([120.0, 94.0, 68.0, 42.0, 16.0, -10.0]))
mapper = LinearMapper(range=r, stretch_data=False)
# Initialize the bounds, then modify them.
mapper.screen_bounds = (100.0, 0.0)
mapper.screen_bounds = (120.0, -10.0)
result = mapper.map_screen(ary)
assert_array_almost_equal(
result, array([120.0, 100.0, 80.0, 60.0, 40.0, 20.0]))
if __name__ == '__main__':
import nose
nose.run()
|
Python
| 0
|
@@ -898,32 +898,45 @@
creen_bounds
+_stretch_data
(self):%0A
ary
@@ -915,32 +915,32 @@
tch_data(self):%0A
-
ary = ar
@@ -1372,32 +1372,206 @@
04.0, 120.0%5D))%0A%0A
+ def test_update_screen_bounds_dont_stretch_data(self):%0A ary = array(%5B5.0, 6.0, 7.0, 8.0, 9.0, 10.0%5D)%0A ds = ArrayDataSource(ary)%0A r = DataRange1D(ds)%0A
mapper =
@@ -1937,16 +1937,29 @@
n_bounds
+_stretch_data
(self):%0A
@@ -2411,24 +2411,207 @@
, -10.0%5D))%0A%0A
+ def test_reversed_update_screen_bounds_dont_stretch_data(self):%0A ary = array(%5B5.0, 6.0, 7.0, 8.0, 9.0, 10.0%5D)%0A ds = ArrayDataSource(ary)%0A r = DataRange1D(ds)%0A
mapp
|
b6b8ae74eabd69defcbf46a2e0bd46512872bd40
|
Add tools to write bidsignore and dataset_description.json
|
mriqc/utils/bids.py
|
mriqc/utils/bids.py
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""PyBIDS tooling"""
from collections import defaultdict
DEFAULT_TYPES = ["bold", "T1w", "T2w"]
def collect_bids_data(
layout, participant_label=None, session=None, run=None, task=None, bids_type=None
):
"""Get files in dataset"""
bids_type = bids_type or DEFAULT_TYPES
if not isinstance(bids_type, (list, tuple)):
bids_type = [bids_type]
basequery = {
"subject": participant_label,
"session": session,
"task": task,
"run": run,
}
# Filter empty lists, strings, zero runs, and Nones
basequery = {k: v for k, v in basequery.items() if v}
# Start querying
imaging_data = defaultdict(list, {})
for btype in bids_type:
imaging_data[btype] = layout.get(
suffix=btype, return_type="file", extension=["nii", "nii.gz"], **basequery
)
return imaging_data
|
Python
| 0
|
@@ -128,16 +128,63 @@
ling%22%22%22%0A
+import os%0Aimport json%0Afrom pathlib import Path%0A
from col
@@ -1023,8 +1023,1968 @@
ng_data%0A
+%0A%0Adef write_bidsignore(deriv_dir):%0A bids_ignore = (%0A %22*.html%22, %22logs/%22, # Reports%0A %22*_T1w.json%22, %22*_T2w.json%22, %22*_bold.json%22, # Outputs are not yet standardized%0A )%0A ignore_file = Path(deriv_dir) / %22.bidsignore%22%0A%0A ignore_file.write_text(%22%5Cn%22.join(bids_ignore) + %22%5Cn%22)%0A%0A%0Adef write_derivative_description(bids_dir, deriv_dir):%0A from ..__about__ import __version__, __download__%0A%0A bids_dir = Path(bids_dir)%0A deriv_dir = Path(deriv_dir)%0A desc = %7B%0A 'Name': 'MRIQC - MRI Quality Control',%0A 'BIDSVersion': '1.4.0',%0A 'DatasetType': 'derivative',%0A 'GeneratedBy': %5B%7B%0A 'Name': 'MRIQC',%0A 'Version': __version__,%0A 'CodeURL': __download__,%0A %7D%5D,%0A 'HowToAcknowledge':%0A 'Please cite our paper (https://doi.org/10.1371/journal.pone.0184661).',%0A %7D%0A%0A # Keys that can only be set by environment%0A # XXX: This currently has no effect, but is a stand-in to remind us to figure out%0A # how to detect the container%0A if 'MRIQC_DOCKER_TAG' in os.environ:%0A desc%5B'GeneratedBy'%5D%5B0%5D%5B'Container'%5D = %7B%0A %22Type%22: %22docker%22,%0A %22Tag%22: f%22poldracklab/mriqc:%7Bos.environ%5B'MRIQC_DOCKER_TAG'%5D%7D%22%0A %7D%0A if 'MRIQC_SINGULARITY_URL' in os.environ:%0A desc%5B'GeneratedBy'%5D%5B0%5D%5B'Container'%5D = %7B%0A %22Type%22: %22singularity%22,%0A %22URI%22: os.getenv('MRIQC_SINGULARITY_URL')%0A %7D%0A%0A # Keys deriving from source dataset%0A orig_desc = %7B%7D%0A fname = bids_dir / 'dataset_description.json'%0A if fname.exists():%0A orig_desc = json.loads(fname.read_text())%0A%0A if 'DatasetDOI' in orig_desc:%0A desc%5B'SourceDatasets'%5D = %5B%7B%0A 'URL': f'https://doi.org/%7Borig_desc%5B%22DatasetDOI%22%5D%7D',%0A 'DOI': orig_desc%5B'DatasetDOI'%5D%0A %7D%5D%0A if 'License' in orig_desc:%0A desc%5B'License'%5D = orig_desc%5B'License'%5D%0A%0A Path.write_text(deriv_dir / 'dataset_description.json', json.dumps(desc, indent=4))%0A
|
18644a1fde3d20600692cfa156d2a1b6be059ebf
|
Change build type to RelWithDebInfo
|
ci_scripts/test_linux-daemon-cmake.py
|
ci_scripts/test_linux-daemon-cmake.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import multiprocessing as mp
import neblio_ci_libs as nci
nci.setup_travis_or_gh_actions_env_vars()
working_dir = os.getcwd()
build_dir = "wallet"
deploy_dir = os.path.join(os.environ['BUILD_DIR'],'deploy', '')
packages_to_install = \
[
"ccache",
"qt5-default",
"qt5-qmake",
"qtbase5-dev-tools",
"qttools5-dev-tools",
"qttools5-dev",
"build-essential",
"libssl-dev",
"libdb++-dev",
"libminiupnpc-dev",
"libqrencode-dev",
"libcurl4-openssl-dev",
"libldap2-dev",
"libidn11-dev",
"librtmp-dev",
"lib32z1",
"libx32z1",
"libx32z1-dev",
"zlib1g",
"zlib1g-dev",
"lib32z1-dev",
"libsodium-dev",
"libboost-all-dev",
"libdbus-glib-1-dev",
"gdb",
"python3",
"python3-pip"
]
nci.install_packages_debian(packages_to_install)
nci.mkdir_p(deploy_dir)
os.chdir(build_dir)
nci.call_with_err_code('ccache -s')
# prepend ccache to the path, necessary since prior steps prepend things to the path
os.environ['PATH'] = '/usr/lib/ccache:' + os.environ['PATH']
nci.call_with_err_code('cmake -DNEBLIO_CMAKE=1 -DCMAKE_BUILD_TYPE=Debug -DNEBLIO_DOWNLOAD_AND_TEST_ALL_TXS=OFF -DNEBLIO_RUN_NTP_PARSE_TESTS=ON ..')
nci.call_with_err_code("make -j" + str(mp.cpu_count()))
nci.call_with_err_code('ccache -s')
# download test data
nci.call_with_err_code('wget --progress=dot:giga https://files.nebl.io/test_data_mainnet.tar.xz -O ../wallet/test/data/test_data_mainnet.tar.xz')
nci.call_with_err_code('wget --progress=dot:giga https://files.nebl.io/test_data_testnet.tar.xz -O ../wallet/test/data/test_data_testnet.tar.xz')
nci.call_with_err_code('tar -xJvf ../wallet/test/data/test_data_mainnet.tar.xz -C ../wallet/test/data')
nci.call_with_err_code('tar -xJvf ../wallet/test/data/test_data_testnet.tar.xz -C ../wallet/test/data')
nci.call_with_err_code('rm ../wallet/test/data/*.tar.xz')
# run tests
os.chdir('./wallet/test')
nci.call_with_err_code('./neblio-tests')
os.chdir(build_dir)
nci.call_with_err_code('pip3 install litecoin_scrypt')
nci.call_with_err_code('python3 ../test/functional/test_runner.py feature_block.py')
nci.call_with_err_code('python3 ../test/functional/test_runner.py wallet_accounts.py')
nci.call_with_err_code('python3 ../test/functional/test_runner.py rpc_listtransactions.py')
nci.call_with_err_code('python3 ../test/functional/test_runner.py rpc_rawtransaction.py')
nci.call_with_err_code('python3 ../test/functional/test_runner.py rpc_blockchain.py')
nci.call_with_err_code('python3 ../test/functional/test_runner.py p2p_invalid_block.py')
nci.call_with_err_code('python3 ../test/functional/test_runner.py p2p_invalid_tx.py')
print("")
print("")
print("Building finished successfully.")
print("")
|
Python
| 0
|
@@ -1067,13 +1067,22 @@
YPE=
-Debug
+RelWithDebInfo
-DN
|
363bb6f409b4c3184a5b9e72c095b6aca207a11f
|
Cut 0.9.1 final
|
fabric/version.py
|
fabric/version.py
|
"""
Current Fabric version constant plus version pretty-print method.
This functionality is contained in its own module to prevent circular import
problems with ``__init__.py`` (which is loaded by setup.py during installation,
which in turn needs access to this version information.)
"""
VERSION = (0, 9, 0, "final", 0)
def get_version(form='short'):
"""
Return a version string for this package, based on `VERSION`.
Takes a single argument, ``form``, which should be one of the following
strings:
* ``branch``: just the major + minor, e.g. "0.9", "1.0".
* ``short`` (default): compact, e.g. "0.9rc1", "0.9.0". For package
filenames or SCM tag identifiers.
* ``normal``: human readable, e.g. "0.9", "0.9.1", "0.9 beta 1". For e.g.
documentation site headers.
* ``verbose``: like ``normal`` but fully explicit, e.g. "0.9 final". For
tag commit messages, or anywhere that it's important to remove ambiguity
between a branch and the first final release within that branch.
"""
# Setup
versions = {}
branch = "%s.%s" % (VERSION[0], VERSION[1])
tertiary = VERSION[2]
type_ = VERSION[3]
final = (type_ == "final")
type_num = VERSION[4]
firsts = "".join([x[0] for x in type_.split()])
# Branch
versions['branch'] = branch
# Short
v = branch
if (tertiary or final):
v += "." + str(tertiary)
if not final:
v += firsts
if type_num:
v += str(type_num)
versions['short'] = v
# Normal
v = branch
if tertiary:
v += "." + str(tertiary)
if not final:
v += " " + type_
if type_num:
v += " " + str(type_num)
versions['normal'] = v
# Verbose
v = branch
if tertiary:
v += "." + str(tertiary)
if not final:
v += " " + type_
if type_num:
v += " " + str(type_num)
else:
v += " final"
versions['verbose'] = v
try:
return versions[form]
except KeyError:
raise TypeError, '"%s" is not a valid form specifier.' % form
__version__ = get_version('short')
|
Python
| 0
|
@@ -300,17 +300,17 @@
(0, 9,
-0
+1
, %22final
|
9a3cade010fac5831db3aac2b6af020c3f816b73
|
Make use of CMS content in image group test helper
|
src/zeit/content/image/testing.py
|
src/zeit/content/image/testing.py
|
from __future__ import with_statement
import gocept.httpserverlayer.wsgi
import gocept.selenium
import mimetypes
import os.path
import pkg_resources
import zeit.cms.repository.interfaces
import zeit.cms.testing
import zeit.content.image.image
import zeit.content.image.imagegroup
import zeit.workflow.testing
import zope.component
product_config = """
<product-config zeit.content.image>
variant-source file://{here}/tests/fixtures/variants.xml
legacy-variant-source file://{here}/tests/fixtures/legacy-variants.xml
</product-config>
""".format(here=pkg_resources.resource_filename(__name__, '.'))
ZCML_LAYER = zeit.cms.testing.ZCMLLayer(
'ftesting.zcml',
product_config=product_config
+ zeit.cms.testing.cms_product_config
+ zeit.workflow.testing.product_config)
WSGI_LAYER = zeit.cms.testing.WSGILayer(
name='WSGILayer', bases=(ZCML_LAYER,))
HTTP_LAYER = gocept.httpserverlayer.wsgi.Layer(
name='HTTPLayer', bases=(WSGI_LAYER,))
WD_LAYER = gocept.selenium.WebdriverLayer(
name='WebdriverLayer', bases=(HTTP_LAYER,))
WEBDRIVER_LAYER = gocept.selenium.WebdriverSeleneseLayer(
name='WebdriverSeleneseLayer', bases=(WD_LAYER,))
def create_local_image(filename):
image = zeit.content.image.image.LocalImage(mimeType='image/jpeg')
fh = image.open('w')
file_name = pkg_resources.resource_filename(
__name__, 'browser/testdata/%s' % filename)
fh.write(open(file_name, 'rb').read())
fh.close()
return image
def create_image_group():
repository = zope.component.getUtility(
zeit.cms.repository.interfaces.IRepository)
repository['image-group'] = zeit.content.image.imagegroup.ImageGroup()
group = repository['image-group']
for filename in ('new-hampshire-450x200.jpg',
'new-hampshire-artikel.jpg',
'obama-clinton-120x120.jpg'):
group[filename] = create_local_image(filename)
return group
def create_image_group_with_master_image(file_name=None):
repository = zope.component.getUtility(
zeit.cms.repository.interfaces.IRepository)
if file_name is None:
file_name = 'DSC00109_2.JPG'
fh = repository['2006'][file_name].open()
else:
fh = open(file_name)
extension = os.path.splitext(file_name)[-1].lower()
group = zeit.content.image.imagegroup.ImageGroup()
group.master_image = u'master-image' + extension
repository['group'] = group
image = zeit.content.image.image.LocalImage()
image.mimeType = mimetypes.types_map[extension]
image.open('w').write(fh.read())
repository['group'][group.master_image] = image
return repository['group']
|
Python
| 0
|
@@ -2210,16 +2210,126 @@
else:%0A
+ try:%0A fh = zeit.cms.interfaces.ICMSContent(file_name).open()%0A except TypeError:%0A
|
5dcff1c4b43465c39963d8f16302d8a66fda96ff
|
refactor sla methods
|
cla_backend/apps/complaints/models.py
|
cla_backend/apps/complaints/models.py
|
# -*- coding: utf-8 -*-
import datetime
from django.conf import settings
from django.contrib.contenttypes.fields import GenericRelation
from django.db import models
from django.utils import timezone
from model_utils.models import TimeStampedModel
from cla_eventlog.constants import LOG_LEVELS
from complaints.constants import COMPLAINT_SOURCE, SLA_DAYS, \
HOLDING_LETTER_SLA_DAYS
from legalaid.utils.sla import get_day_sla_time
class ComplaintManager(models.Manager):
def get_queryset(self):
return super(ComplaintManager, self).get_queryset().select_related(
'eod',
'eod__case',
'eod__case__personal_details',
'eod__case__eligibility_check',
'eod__case__eligibility_check__category',
'category',
).prefetch_related(
'eod__categories',
)
class Complaint(TimeStampedModel):
eod = models.ForeignKey('legalaid.EODDetails')
description = models.TextField(null=True, blank=True)
source = models.CharField(max_length=15, choices=COMPLAINT_SOURCE,
blank=True)
level = models.PositiveSmallIntegerField(
choices=((LOG_LEVELS.HIGH, 'Major'), (LOG_LEVELS.MINOR, 'Minor')),
default=LOG_LEVELS.MINOR
)
justified = models.NullBooleanField()
resolved = models.NullBooleanField()
category = models.ForeignKey('Category', blank=True, null=True)
owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='%(app_label)s_%(class)s_owner',
limit_choices_to={'operator__is_manager': True},
blank=True,
null=True)
created_by = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='%(app_label)s_%(class)s_created_by',
limit_choices_to={'operator__isnull': False},
blank=True,
null=True)
logs = GenericRelation('cla_eventlog.ComplaintLog',
related_query_name='complaint')
objects = ComplaintManager()
class Meta(object):
ordering = ('-created',)
def __init__(self, *args, **kwargs):
self._closed = NotImplemented
self._holding_letter = NotImplemented
self._full_letter = NotImplemented
super(Complaint, self).__init__(*args, **kwargs)
def __unicode__(self):
return u'Complaint on case %s' % self.eod.case
@property
def case(self):
return self.eod.case
@property
def status_label(self):
if self.resolved is not None:
return 'resolved' if self.resolved else 'unresolved'
if self.owner_id:
return 'pending'
return 'received'
@property
def closed(self):
"""
The date the complaint was closed if it has a closed event log
NB: Not loaded here if this model is being serialised in a complaint
view set
"""
if self._closed is NotImplemented:
last_closed = self.logs.filter(code='COMPLAINT_CLOSED').order_by('-created').first()
self._closed = last_closed.created if last_closed else None
return self._closed
@closed.setter
def closed(self, value):
self._closed = value
@property
def holding_letter(self):
"""
The date the latest holding letter was sent
NB: Not loaded here if this model is being serialised in a complaint
view set
"""
if self._holding_letter is NotImplemented:
last_closed = self.logs.filter(code='HOLDING_LETTER_SENT').order_by('-created').first()
self._holding_letter = last_closed.created if last_closed else None
return self._holding_letter
@holding_letter.setter
def holding_letter(self, value):
self._holding_letter = value
@property
def full_letter(self):
"""
The date the latest full response was sent
NB: Not loaded here if this model is being serialised in a complaint
view set
"""
if self._full_letter is NotImplemented:
last_closed = self.logs.filter(code='FULL_RESPONSE_SENT').order_by('-created').first()
self._full_letter = last_closed.created if last_closed else None
return self._full_letter
@full_letter.setter
def full_letter(self, value):
self._full_letter = value
@property
def out_of_sla(self):
"""
True if complaint is unresolved for over 15 working days.
"""
sla = get_day_sla_time(self.created, SLA_DAYS)
return self.closed is None and timezone.now() > sla
@property
def holding_letter_out_of_sla(self):
"""
True if holding letter is not sent within 1 working day.
"""
holding_sla = get_day_sla_time(self.created, HOLDING_LETTER_SLA_DAYS)
return self.holding_letter is None and timezone.now() > holding_sla
def requires_action_at(self):
if self.holding_letter is None:
return get_day_sla_time(self.created, HOLDING_LETTER_SLA_DAYS)
elif self.closed is None:
return get_day_sla_time(self.created, SLA_DAYS)
class Category(TimeStampedModel):
name = models.CharField(max_length=255)
class Meta:
ordering = ('name',)
verbose_name_plural = 'categories'
def __unicode__(self):
return self.name
|
Python
| 0.000282
|
@@ -4567,24 +4567,25 @@
return
+(
self.closed
@@ -4584,27 +4584,18 @@
.closed
-is None and
+or
timezon
@@ -4601,16 +4601,17 @@
ne.now()
+)
%3E sla%0A%0A
@@ -4843,24 +4843,25 @@
return
+(
self.holding
@@ -4872,19 +4872,10 @@
ter
-is None and
+or
tim
@@ -4885,16 +4885,17 @@
ne.now()
+)
%3E holdi
|
634ece667007468a6555e6ce0f496151c0cbf15b
|
Use a value with more entry so it's less likely it occurs in real world string payload.
|
st2common/st2common/util/jinja.py
|
st2common/st2common/util/jinja.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import jinja2
import six
import re
import semver
__all__ = [
'get_jinja_environment',
'render_values'
]
# Magic string to which None type is serialized when using use_none filter
NONE_MAGIC_VALUE = '%__%NONE%__%'
class CustomFilters(object):
'''
Collection of CustomFilters for jinja2
'''
###############
# regex filters
@staticmethod
def _get_regex_flags(ignorecase=False):
return re.I if ignorecase else 0
@staticmethod
def _regex_match(value, pattern='', ignorecase=False):
if not isinstance(value, six.string_types):
value = str(value)
flags = CustomFilters._get_regex_flags(ignorecase)
return bool(re.match(pattern, value, flags))
@staticmethod
def _regex_replace(value='', pattern='', replacement='', ignorecase=False):
if not isinstance(value, six.string_types):
value = str(value)
flags = CustomFilters._get_regex_flags(ignorecase)
regex = re.compile(pattern, flags)
return regex.sub(replacement, value)
@staticmethod
def _regex_search(value, pattern='', ignorecase=False):
if not isinstance(value, six.string_types):
value = str(value)
flags = CustomFilters._get_regex_flags(ignorecase)
return bool(re.search(pattern, value, flags))
#################
# version filters
@staticmethod
def _version_compare(value, pattern):
return semver.compare(value, pattern)
@staticmethod
def _version_more_than(value, pattern):
return semver.compare(value, pattern) == 1
@staticmethod
def _version_less_than(value, pattern):
return semver.compare(value, pattern) == -1
@staticmethod
def _version_equal(value, pattern):
return semver.compare(value, pattern) == 0
@staticmethod
def _version_match(value, pattern):
return semver.match(value, pattern)
@staticmethod
def _version_bump_major(value):
return semver.bump_major(value)
@staticmethod
def _version_bump_minor(value):
return semver.bump_minor(value)
@staticmethod
def _version_bump_patch(value):
return semver.bump_patch(value)
@staticmethod
def _version_strip_patch(value):
return "{major}.{minor}".format(**semver.parse(value))
@staticmethod
def _use_none(value):
if value is None:
return NONE_MAGIC_VALUE
return value
@staticmethod
def get_filters():
return {
'regex_match': CustomFilters._regex_match,
'regex_replace': CustomFilters._regex_replace,
'regex_search': CustomFilters._regex_search,
'version_compare': CustomFilters._version_compare,
'version_more_than': CustomFilters._version_more_than,
'version_less_than': CustomFilters._version_less_than,
'version_equal': CustomFilters._version_equal,
'version_match': CustomFilters._version_match,
'version_bump_major': CustomFilters._version_bump_major,
'version_bump_minor': CustomFilters._version_bump_minor,
'version_bump_patch': CustomFilters._version_bump_patch,
'version_strip_patch': CustomFilters._version_strip_patch,
'use_none': CustomFilters._use_none
}
def get_jinja_environment(allow_undefined=False):
'''
jinja2.Environment object that is setup with right behaviors and custom filters.
:param strict_undefined: If should allow undefined variables in templates
:type strict_undefined: ``bool``
'''
undefined = jinja2.Undefined if allow_undefined else jinja2.StrictUndefined
env = jinja2.Environment(undefined=undefined,
trim_blocks=True,
lstrip_blocks=True)
env.filters.update(CustomFilters.get_filters())
env.tests['in'] = lambda item, list: item in list
return env
def render_values(mapping=None, context=None, allow_undefined=False):
"""
Render an incoming mapping using context provided in context using Jinja2. Returns a dict
containing rendered mapping.
:param mapping: Input as a dictionary of key value pairs.
:type mapping: ``dict``
:param context: Context to be used for dictionary.
:type context: ``dict``
:rtype: ``dict``
"""
if not context or not mapping:
return mapping
# Add in special __context variable that provides an easy way to get access to entire context.
# This mean __context is a reserve key word although backwards compat is preserved by making
# sure that real context is updated later and therefore will override the __context value.
super_context = {}
super_context['__context'] = context
super_context.update(context)
env = get_jinja_environment(allow_undefined=allow_undefined)
rendered_mapping = {}
for k, v in six.iteritems(mapping):
# jinja2 works with string so transform list and dict to strings.
reverse_json_dumps = False
if isinstance(v, dict) or isinstance(v, list):
v = json.dumps(v)
reverse_json_dumps = True
else:
v = str(v)
try:
rendered_v = env.from_string(v).render(super_context)
except Exception as e:
# Attach key and value which failed the rendering
e.key = k
e.value = v
raise e
# no change therefore no templatization so pick params from original to retain
# original type
if rendered_v == v:
rendered_mapping[k] = mapping[k]
continue
if reverse_json_dumps:
rendered_v = json.loads(rendered_v)
rendered_mapping[k] = rendered_v
return rendered_mapping
|
Python
| 0
|
@@ -996,16 +996,21 @@
LUE = '%25
+*****
__%25NONE%25
@@ -1011,16 +1011,21 @@
%25NONE%25__
+*****
%25'%0A%0A%0Acla
|
cbf5f26d4a1860082d9e29524146298ad0b4e0db
|
Support for pkgdb teams.
|
fedmsg_genacls.py
|
fedmsg_genacls.py
|
# -*- coding: utf-8 -*-
""" A fedmsg consumer that listens to pkgdb messages to update gitosis acls
Authors: Janez Nemanič <janez.nemanic@gmail.com>
Ralph Bean <rbean@redhat.com>
"""
import pprint
import subprocess
import os
import fedmsg.consumers
import moksha.hub.reactor
class GenACLsConsumer(fedmsg.consumers.FedmsgConsumer):
# Really, we want to use this specific topic to listen to.
topic = 'org.fedoraproject.prod.pkgdb.acl.update'
# But for testing, we'll just listen to all topics with this:
#topic = '*'
config_key = 'genacls.consumer.enabled'
def __init__(self, hub):
super(GenACLsConsumer, self).__init__(hub)
# This is required. It is the number of seconds that we should wait
# until we ultimately act on a pkgdb message.
self.delay = self.hub.config['genacls.consumer.delay']
# We use this to manage our state
self.queued_messages = []
def consume(self, msg):
msg = msg['body']
self.log.info("Got a message %r" % msg['topic'])
def delayed_consume():
if self.queued_messages:
try:
self.action(self.queued_messages)
finally:
# Empty our list at the end of the day.
self.queued_messages = []
else:
self.log.debug("Woke up, but there were no messages.")
self.queued_messages.append(msg)
moksha.hub.reactor.reactor.callLater(self.delay, delayed_consume)
def action(self, messages):
self.log.debug("Acting on %s" % pprint.pformat(messages))
command = '/usr/bin/sudo -u gen-acls /usr/local/bin/genacls.sh'.split()
self.log.info("Running %r" % command)
process = subprocess.Popen(args=command)
stdout, stderr = process.communicate()
if process.returncode == 0:
self.log.info("%r was successful" % command)
else:
self.log.error("%r exited with %r, stdout: %s, stderr: %s" % (
command, process.returncode, stdout, stderr))
|
Python
| 0
|
@@ -355,76 +355,280 @@
#
-Really, we want to use this specific topic to listen to.%0A topic =
+Because we are interested in a variety of topics, we tell moksha that%0A # we're interested in all of them (it doesn't know how to do complicated%0A # distinctions). But then we'll filter later in our consume() method.%0A topic = '*'%0A interesting_topics = %5B%0A
'or
@@ -669,91 +669,132 @@
ate'
+,
%0A
-# But for testing, we'll just listen to all topics with this:%0A #topic = '*'
+ 'org.fedoraproject.prod.fas.group.member.sponsor',%0A 'org.fedoraproject.prod.fas.group.member.remove',%0A %5D
%0A%0A
@@ -1189,16 +1189,16 @@
s = %5B%5D%0A%0A
-
def
@@ -1217,16 +1217,92 @@
, msg):%0A
+ if msg%5B'topic'%5D not in self.interesting_topics:%0A return%0A%0A
|
5022dada0153bca2c99f2faf302827307078e91a
|
Remove timestamp from forms
|
fellowms/forms.py
|
fellowms/forms.py
|
from django.forms import ModelForm, widgets
from .models import Fellow, Event, Expense, Blog
class FellowForm(ModelForm):
class Meta:
model = Fellow
exclude = [
"user",
"home_lon",
"home_lat",
"funding_notes",
"inauguration_year",
"fellowship_grant",
"mentor",
]
class EventForm(ModelForm):
class Meta:
model = Event
exclude = [
"status",
"ad_status",
"budget_approve",
"report_url",
]
# We don't want to expose fellows' data
# so we will request the email
# and match on the database.
labels = {
'fellow': 'Fellow',
'url': "Event's homepage url",
'name': "Event's name",
}
class ExpenseForm(ModelForm):
class Meta:
model = Expense
exclude = [
'id',
'status',
]
class BlogForm(ModelForm):
class Meta:
model = Blog
exclude = [
'status',
]
|
Python
| 0.000054
|
@@ -384,24 +384,76 @@
%22mentor%22,%0A
+ %22added%22,%0A %22updated%22,%0A
@@ -450,32 +450,32 @@
%5D%0A
-
%0A%0Aclass EventFor
@@ -661,24 +661,76 @@
eport_url%22,%0A
+ %22added%22,%0A %22updated%22,%0A
@@ -1152,32 +1152,84 @@
'status',%0A
+ %22added%22,%0A %22updated%22,%0A
@@ -1308,32 +1308,32 @@
exclude = %5B%0A
-
@@ -1334,32 +1334,84 @@
'status',%0A
+ %22added%22,%0A %22updated%22,%0A
|
49dc6eec1a6d408723159dec85fb00770edb7fc5
|
raise error if chunk out of range
|
lacli/source/chunked.py
|
lacli/source/chunked.py
|
from __future__ import division
import os
import math
from sys import maxint
from itertools import imap
from tempfile import mkstemp
from boto.utils import compute_md5
from filechunkio import FileChunkIO
class FileHash(object):
def __get__(self, instance, owner):
try:
return (instance.md5, instance.b64md5)
except AttributeError:
instance.seek(0)
(instance.md5, instance.b64md5, size) = compute_md5(
instance, instance.bytes)
instance.seek(0)
return (instance.md5, instance.b64md5)
class SavedPart(file):
def __init__(self, source, num, *args, **kwargs):
super(SavedPart, self).__init__(*args, **kwargs)
self.bytes = source.chunksize(num)
hash = FileHash()
class FilePart(FileChunkIO):
def __init__(self, source, num, *args, **kwargs):
super(FilePart, self).__init__(
source.path, 'r',
offset=source.chunkstart(num),
bytes=source.chunksize(num), *args, **kwargs)
hash = FileHash()
class ChunkedFile(object):
minchunk = 5242880
maxchunk = 104857600
def __init__(self, path, skip=0, chunk=None):
self.path = path
self.isfile = os.path.isfile(path)
size = maxint
if self.isfile:
size = os.path.getsize(path)
assert size >= skip
self.skip = skip
self.size = size-skip
self.chunk = chunk
if self.chunk is None:
self.chunk = min(max(int(self.size/100), self.minchunk),
self.maxchunk)
self.chunks = int(math.ceil(self.size/self.chunk))
if self.chunks == 0:
self.chunks = 1
def _savedchunks(self, tempdir):
# split file and save parts to disk
f = open(self.path, "rb")
def _save(seq):
b = f.read(self.chunk)
if b:
prefix = "part-{:>4}".format(seq)
fh, fn = mkstemp(dir=tempdir, prefix=prefix)
os.write(fh, b)
os.close(fh)
return fn
f.close()
return imap(_save, xrange(self.chunks))
def chunkstart(self, num):
return self.skip + num * self.chunk
def chunksize(self, num):
start = num * self.chunk
if (start + self.chunk > self.size):
# chunk end is EOF
return self.size - start
else:
return self.chunk
def chunkfile(self, seq, fname):
if self.isfile:
return FilePart(self, seq)
else:
return SavedPart(self, seq, fname)
|
Python
| 0.000002
|
@@ -2198,32 +2198,121 @@
art(self, num):%0A
+ if num %3E= self.chunks:%0A raise ValueError(%22chunk number out of range%22)%0A
return s
@@ -2385,22 +2385,16 @@
start =
- num *
self.ch
@@ -2392,24 +2392,34 @@
= self.chunk
+start(num)
%0A if
|
d866424562c2e392d04d03cffa25667211e1d56b
|
cleaned up query
|
lagesonum/bottle_app.py
|
lagesonum/bottle_app.py
|
# coding: utf-8
# Der WSGI-Server auf PythonAnywhere verwendet diese Datei
import sqlite3
import os
import time
import bottle
from bottle import default_app, route, view
from bottle import request
from bottle_utils.i18n import I18NPlugin
from bottle_utils.i18n import lazy_gettext as _
import input_number as ip
from dbhelper import initialize_database
import hashlib
MOD_PATH = os.path.dirname(os.path.abspath(__file__))
DB_PATH = os.path.abspath(os.path.join(MOD_PATH, '..', '..', "lagesonr.db"))
if not os.path.exists(DB_PATH):
initialize_database(DB_PATH)
lagesonrdb = sqlite3.connect(DB_PATH)
LANGS = [
('de_DE', 'Deutsch'),
('en_US', 'English'),
]
# ('ar_AR', 'Arab'),
DEFAULT_LOCALE = 'en_US'
@route('/user-agent')
def user_agent():
"""
returns an identification hash based on information from the user's browser
:return: string
"""
usr_agent = str(request.environ.get('HTTP_USER_AGENT'))
usr_lang = str(request.environ.get('HTTP_ACCEPT_LANGUAGE'))
usr_ip = str(request.remote_addr)
usr_fingerprint = usr_agent + usr_lang + usr_ip
usr_hash = hashlib.md5(usr_fingerprint.encode("utf-8"))
# no return
return ()
@route('/')
@view('start_page')
def index():
"""1.Seite: Helfer steht am LaGeSo und gibt Nummern ein [_____] """
return {'entered': []}
@route('/', method='POST')
@view('start_page')
def do_enter():
import pdb
#pdb.set_trace()
numbers = request.forms.get('numbers')
timestamp = time.asctime()
numbers = [num.strip() for num in numbers.split('\n')]
result_num = []
with lagesonrdb as con:
cur = con.cursor()
for num in set(numbers):
if ip.is_valid_number(num) and ip.is_ok_with_db(
num) and ip.is_valid_user():
insert = 'INSERT INTO NUMBERS(NUMBER, TIME, PLACE, USER) VALUES ("%s", "%s", "-", "-")' % (
num, timestamp)
cur.execute(insert)
result_num.append(num)
else:
result_num.append("INVALID INPUT: %s" % num)
return {'entered': result_num, 'timestamp': timestamp}
@route('/query')
@view('query_page')
def query_number():
"""
2. Seite: Flüchtling fragt ab: Wurde meine Nummer gezogen? [_____]
=> Antwort: X mal am LaGeSo eingetragen von (Erste Eintragung)
DD.MM.YY hh bis DD.MM.YY hh (LetzteEintragung)
application = default_app()
"""
return {'result': '-', 'timestamp_first': '-', 'timestamp_last': '-',
'n': '0'}
@route('/query', method='POST')
@view('query_page')
def do_query():
number = request.forms.get('number')
if ip.is_valid_number(number) and ip.is_ok_with_db(
number) and ip.is_valid_user():
with lagesonrdb as con:
cur = con.cursor()
query = 'SELECT TIME FROM NUMBERS WHERE NUMBER="%s" ORDER BY TIME' % number
result = list(cur.execute(query))
n = len(result)
if n > 0:
timestamp_first, timestamp_last = result[0][0], result[-1][0]
return {'result': number, 'timestamp_first': timestamp_first,
'timestamp_last': timestamp_last, 'n': n}
return {'result': 'number', 'timestamp_first': 'NOT FOUND',
'timestamp_last': '-', 'n': '0'}
else:
return {"INVALID INPUT": number}
# findet templates im gleichen Verzeichnis
bottle.TEMPLATE_PATH.append(MOD_PATH)
app = default_app()
application = I18NPlugin(app, langs=LANGS, default_locale=DEFAULT_LOCALE,
domain='messages',
locale_dir=os.path.join(MOD_PATH, 'locales'))
|
Python
| 0.998419
|
@@ -2633,16 +2633,79 @@
number')
+%0A timestamp_first = '-'%0A timestamp_last = '-'%0A n = '0'
%0A%0A if
@@ -3140,98 +3140,78 @@
- return %7B'result': number, 'timestamp_first': timestamp_first,%0A
+else:%0A timestamp_first = 'NOT FOUND'%0A else:%0A
'tim
@@ -3198,33 +3198,32 @@
else:%0A
-'
timestamp_last':
@@ -3220,47 +3220,37 @@
amp_
-last': timestamp_last, 'n': n%7D%0A
+first = 'INVALID INPUT'%0A%0A
-
retu
@@ -3263,24 +3263,22 @@
esult':
-'
number
-'
, 'times
@@ -3290,27 +3290,31 @@
first':
-'NOT FOUND'
+timestamp_first
,%0A
@@ -3345,73 +3345,30 @@
t':
-'-', 'n': '0'%7D%0A%0A else:%0A return %7B%22INVALID INPUT%22: number
+timestamp_last, 'n': n
%7D%0A%0A%0A
|
7163894c9c93ff31757de22dab01d1a2f00c67bf
|
Remove deprecated contrib package access.
|
law/contrib/__init__.py
|
law/contrib/__init__.py
|
# coding: utf-8
__all__ = ["available_packages", "loaded_packages", "load", "load_all"]
import os
import logging
import types
import uuid
import glob
import law
from law.util import law_src_path, flatten
thisdir = os.path.dirname(os.path.abspath(__file__))
logger = logging.getLogger(__name__)
#: List of names of available contrib packages.
available_packages = [
os.path.basename(os.path.dirname(contrib_init))
for contrib_init in glob.glob(os.path.join(thisdir, "*", "__init__.py"))
]
#: List of names of already loaded contrib packages.
loaded_packages = []
def load(*packages):
"""
Loads contrib *packages* and adds them to the law namespace. Example:
.. code-block:: python
import law
law.contrib.load("docker")
law.docker.DockerSandbox(...)
It is ensured that packages are loaded only once.
"""
for pkg in flatten(packages):
if pkg in loaded_packages:
logger.debug("skip contrib package '{}', already loaded".format(pkg))
continue
elif not os.path.exists(law_src_path("contrib", pkg, "__init__.py")):
raise Exception("contrib package '{}' does not exist".format(pkg))
elif getattr(law, pkg, None):
raise Exception("cannot load contrib package '{}', attribute with that name already "
"exists in the law module".format(pkg))
mod = __import__("law.contrib.{}".format(pkg), globals(), locals(), [pkg])
setattr(law, pkg, mod)
law.__all__.append(pkg)
loaded_packages.append(pkg)
logger.debug("loaded contrib package '{}'".format(pkg))
# the contrib mechanism used to add all members of the module to the main law namespace
# but given the growing number of contrib packages, the chance of collisions is not
# negligible any longer, so for the moment add dummy objects only for callables to the law
# module that, when used, raise verbose exceptions
# (to be removed for v0.1)
def dummy_factory(pkg, attr, member):
def _raise():
raise AttributeError("due to a change in 'law.contrib.load()', the attribute '{0}' "
"is no longer accessible on the global 'law' namespace, please use "
"'law.{1}.{0}' instead".format(attr, pkg))
if isinstance(member, types.FunctionType):
def dummy(*args, **kwargs):
"""
Dummy function throwing an *AttributeError* when called.
"""
_raise()
else:
class dummy(member):
"""
Dummy class throwing an *AttributeError* when instantiated.
"""
exclude_index = True
name = str(uuid.uuid4())
def __new__(cls, *args, **kwargs):
_raise()
return dummy
for attr in mod.__all__:
member = getattr(mod, attr)
if callable(member):
setattr(law, attr, dummy_factory(pkg, attr, member))
else:
logger.debug("skip creating dummy object for attribute {} of package {}".format(
attr, pkg))
def load_all():
"""
Loads all available contrib packages via :py:func:`load`. A package is skipped when an
ImportError was raised. The list of names of loaded packages is returned.
"""
loaded_packages = []
for name in available_packages:
try:
load(name)
except ImportError:
continue
loaded_packages.append(name)
return loaded_packages
|
Python
| 0
|
@@ -113,33 +113,8 @@
ing%0A
-import types%0Aimport uuid%0A
impo
@@ -1611,1675 +1611,8 @@
))%0A%0A
- # the contrib mechanism used to add all members of the module to the main law namespace%0A # but given the growing number of contrib packages, the chance of collisions is not%0A # negligible any longer, so for the moment add dummy objects only for callables to the law%0A # module that, when used, raise verbose exceptions%0A # (to be removed for v0.1)%0A def dummy_factory(pkg, attr, member):%0A def _raise():%0A raise AttributeError(%22due to a change in 'law.contrib.load()', the attribute '%7B0%7D' %22%0A %22is no longer accessible on the global 'law' namespace, please use %22%0A %22'law.%7B1%7D.%7B0%7D' instead%22.format(attr, pkg))%0A%0A if isinstance(member, types.FunctionType):%0A def dummy(*args, **kwargs):%0A %22%22%22%0A Dummy function throwing an *AttributeError* when called.%0A %22%22%22%0A _raise()%0A else:%0A class dummy(member):%0A %22%22%22%0A Dummy class throwing an *AttributeError* when instantiated.%0A %22%22%22%0A exclude_index = True%0A name = str(uuid.uuid4())%0A def __new__(cls, *args, **kwargs):%0A _raise()%0A%0A return dummy%0A%0A for attr in mod.__all__:%0A member = getattr(mod, attr)%0A if callable(member):%0A setattr(law, attr, dummy_factory(pkg, attr, member))%0A else:%0A logger.debug(%22skip creating dummy object for attribute %7B%7D%C2%A0of package %7B%7D%22.format(%0A attr, pkg))%0A%0A
%0Adef
|
2a1a5b738bd59880ea223606a321584145d6ba2b
|
Add archlinux chromium location
|
leapcast/environment.py
|
leapcast/environment.py
|
from __future__ import unicode_literals
import argparse
import glob
import logging
import os
import sys
import uuid
logger = logging.getLogger('Environment')
def _get_chrome_path():
if sys.platform == 'win32':
# First path includes fallback for Windows XP, because it doesn't have
# LOCALAPPDATA variable.
globs = [os.path.join(
os.getenv(
'LOCALAPPDATA', os.path.join(os.getenv('USERPROFILE'), 'Local Settings\\Application Data')), 'Google\\Chrome\\Application\\chrome.exe'),
os.path.join(os.getenv('ProgramW6432', 'C:\\Program Files'),
'Google\\Chrome\\Application\\chrome.exe'),
os.path.join(os.getenv('ProgramFiles(x86)', 'C:\\Program Files (x86)'), 'Google\\Chrome\\Application\\chrome.exe')]
elif sys.platform == 'darwin':
globs = [
'/Applications/Google Chrome.app/Contents/MacOS/Google Chrome']
else:
globs = ['/usr/bin/google-chrome',
'/opt/google/chrome/google-chrome',
'/opt/google/chrome-*/google-chrome',
'/usr/bin/chromium-browser']
for g in globs:
for path in glob.glob(g):
if os.path.exists(path):
return path
class Environment(object):
channels = dict()
global_status = dict()
friendlyName = 'leapcast'
user_agent = 'Mozilla/5.0 (CrKey - 0.9.3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1573.2 Safari/537.36'
chrome = _get_chrome_path()
fullscreen = False
window_size = False
interfaces = None
uuid = None
ips = []
apps = None
verbosity = logging.INFO
def parse_cmd():
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--debug', action='store_true',
default=False, dest='debug', help='Debug')
parser.add_argument('-i', '--interface', action='append',
dest='interfaces',
help='Interface to bind to (can be specified multiple times)',
metavar='IPADDRESS')
parser.add_argument('--name', help='Friendly name for this device')
parser.add_argument('--user_agent', help='Custom user agent')
parser.add_argument('--chrome', help='Path to Google Chrome executable')
parser.add_argument('--fullscreen', action='store_true',
default=False, help='Start in full-screen mode')
parser.add_argument('--window_size',
default=False,
help='Set the initial chrome window size. eg 1920,1080')
parser.add_argument(
'--ips', help='Allowed ips from which clients can connect')
parser.add_argument('--apps', help='Add apps from JSON file')
args = parser.parse_args()
if args.debug:
Environment.verbosity = logging.DEBUG
logging.basicConfig(level=Environment.verbosity)
if args.interfaces:
Environment.interfaces = args.interfaces
logger.debug('Interfaces is %s' % Environment.interfaces)
if args.name:
Environment.friendlyName = args.name
logger.debug('Service name is %s' % Environment.friendlyName)
if args.user_agent:
Environment.user_agent = args.user_agent
logger.debug('User agent is %s' % args.user_agent)
if args.chrome:
Environment.chrome = args.chrome
logger.debug('Chrome path is %s' % args.chrome)
if args.fullscreen:
Environment.fullscreen = True
if args.window_size:
Environment.window_size = args.window_size
if args.ips:
Environment.ips = args.ips
if args.apps:
Environment.apps = args.apps
if Environment.chrome is None:
parser.error('could not locate chrome; use --chrome to specify one')
generate_uuid()
def generate_uuid():
Environment.uuid = str(uuid.uuid5(
uuid.NAMESPACE_DNS, ('device.leapcast.%s' %
Environment.friendlyName).encode('utf8')))
|
Python
| 0.000001
|
@@ -1133,16 +1133,54 @@
browser'
+,%0A '/usr/bin/chromium'
%5D%0A fo
|
40da4a49ae389c8f3557d787197bbd44b5c8e53e
|
Put DateTimeShortcuts.js at the end and don't merge it
|
lfc/utils/initialize.py
|
lfc/utils/initialize.py
|
# portlets imports
from portlets.utils import register_portlet
# lfc imports
from lfc.utils.registration import register_template
from lfc.utils.registration import register_content_type
from lfc.models import NavigationPortlet
from lfc.models import Page
from lfc.models import PagesPortlet
from lfc.models import RandomPortlet
from lfc.models import TextPortlet
# resources imports
import resources.utils
from resources.utils import register_resource
from resources.config import CSS, JS
def initialize(create_resources=False):
"""Registers default portlets, templates and content types.
"""
# Portlets
register_portlet(NavigationPortlet, "Navigation")
register_portlet(PagesPortlet, "Pages")
register_portlet(RandomPortlet, "Random")
register_portlet(TextPortlet, "Text")
# Register Templates
register_template(name = "Plain", path="lfc/templates/plain.html")
register_template(name = "Article", path="lfc/templates/article.html")
register_template(name = "Gallery", path="lfc/templates/gallery.html", images_columns=3)
register_template(name = "Overview", path="lfc/templates/overview.html")
# Register Resources
register_resource(type=CSS, group="lfc", path="/lfc/yui/reset-fonts.css")
register_resource(type=CSS, group="lfc", path="/lfc/lightbox/css/jquery.lightbox-0.5.css")
register_resource(type=CSS, group="lfc", path="/lfc/blueprint/src/grid.css")
register_resource(type=CSS, group="lfc", path="/lfc_theme/css/tiny.css")
register_resource(type=CSS, group="lfc", path="/lfc_theme/css/lfc.css")
register_resource(type=JS, group="lfc", path="/lfc/jquery/jquery.min.js")
register_resource(type=JS, group="lfc", path="/lfc/jquery/jquery.tools.min.js")
register_resource(type=JS, group="lfc", path="/lfc/lightbox/js/jquery.lightbox-0.5.js")
register_resource(type=JS, group="lfc", path="/lfc_theme/js/lfctheme.js")
register_resource(type=CSS, group="manage", path="/lfc/yui/reset-min.css")
register_resource(type=CSS, group="manage", path="/lfc/lightbox/css/jquery.lightbox-0.5.css")
register_resource(type=CSS, group="manage", path="/lfc/jquery/jquery-ui-themeroller/jquery-ui-themeroller.css")
register_resource(type=CSS, group="manage", path="/lfc/jquery/jquery.jgrowl.css")
register_resource(type=CSS, group="manage", path="/lfc/jquery/superfish/superfish.css")
register_resource(type=CSS, group="manage", path="/lfc/jquery/autocomplete/jquery.autocomplete.css")
register_resource(type=CSS, group="manage", path="/lfc/css/lfc_manage.css")
register_resource(type=CSS, group="manage", path="/lfc/swfupload/default.css")
# register_resource(type=JS, group="manage", path="/admin/jsi18n")
register_resource(type=JS, group="manage", merge=0, path="/lfc/tiny_mce/jscripts/tiny_mce/tiny_mce.js")
register_resource(type=JS, group="manage", path="/admin/js/core.js")
register_resource(type=JS, group="manage", path="/admin/js/calendar.js")
register_resource(type=JS, group="manage", path="/admin/js/admin/DateTimeShortcuts.js")
register_resource(type=JS, group="manage", path="/admin/js/urlify.js")
register_resource(type=JS, group="manage", path="/lfc/jquery/jquery.min.js")
register_resource(type=JS, group="manage", path="/lfc/jquery/jquery.tools.min.js")
register_resource(type=JS, group="manage", path="/lfc/jquery/jquery.livequery.pack.js")
register_resource(type=JS, group="manage", path="/lfc/jquery/jquery.form.pack.js")
register_resource(type=JS, group="manage", path="/lfc/jquery/jquery.jgrowl_minimized.js")
register_resource(type=JS, group="manage", path="/lfc/jquery/jquery.cookie.pack.js")
register_resource(type=JS, group="manage", path="/lfc/jquery/superfish/superfish.js")
register_resource(type=JS, group="manage", path="/lfc/jquery/jquery-ui-personalized-1.5.3.packed.js")
register_resource(type=JS, group="manage", path="/lfc/jquery/autocomplete/jquery.autocomplete.pack.js")
register_resource(type=JS, group="manage", path="/lfc/js/json2.js")
register_resource(type=JS, group="manage", path="/lfc/js/tinymce.js")
register_resource(type=JS, group="manage", minify=0, path="/lfc/swfupload/swfupload.js")
register_resource(type=JS, group="manage", path="/lfc/swfupload/swfupload.queue.js")
register_resource(type=JS, group="manage", path="/lfc/swfupload/fileprogress.js")
register_resource(type=JS, group="manage", path="/lfc/swfupload/handlers.js")
register_resource(type=JS, group="manage", path="/lfc/swfupload/swfupload.cookies.js")
register_resource(type=JS, group="manage", path="/lfc/js/lfc_manage.js")
if create_resources:
resources.utils.create_resources()
# Content Types
register_content_type(
Page,
name="Page",
sub_types=["Page"],
templates=["Article", "Plain", "Gallery", "Overview"],
default_template="Article")
|
Python
| 0
|
@@ -2986,100 +2986,8 @@
s%22)%0A
- register_resource(type=JS, group=%22manage%22, path=%22/admin/js/admin/DateTimeShortcuts.js%22)%0A
@@ -4550,24 +4550,125 @@
_manage.js%22)
+%0A register_resource(type=JS, group=%22manage%22, merge=0, path=%22/admin/js/admin/DateTimeShortcuts.js%22)
%0A%0A if cre
|
6917d2b7f868714f256266d3529edb5da1211311
|
add ramen
|
ircbot/plugin/redditeu.py
|
ircbot/plugin/redditeu.py
|
import datetime
import random
import re
import socket
import urllib.error
import urllib.request
from ircbot import log
import ircbot.plugin
class YouPornComment():
_last_fetch = None
THROTTLE_SECS = 30
@classmethod
def get_random(cls, include_url=False):
now = datetime.datetime.now()
if cls._last_fetch is not None:
diff = now - cls._last_fetch
if diff.seconds < (cls.THROTTLE_SECS):
log.debug('YouPorn comment less than {secs} seconds old, blocking'.format(
secs=cls.THROTTLE_SECS))
return False
cls._last_fetch = now
result = cls._get_random(include_url)
if result:
return result
return None
@staticmethod
def _get_random(include_url=False):
try:
response = urllib.request.urlopen('http://www.youporn.com/random/video/',
timeout=2)
result = response.read().decode()
response.close()
result = re.findall('<p class="message">((?:.|\\n)*?)</p>', result)
if result is None:
log.debug('No comments found in '+response.url)
return None
result = random.choice(result).strip().replace('\r', '').replace('\n', ' ')
if ' ' not in result and '+' in result:
result = result.replace('+', ' ')
if include_url:
# remove the long slug at the end of the URL
url = '/'.join(response.url.split('/')[:-2])
result = result + ' (' + url + ')'
return result
except (socket.timeout, urllib.error.URLError, UnicodeDecodeError):
log.debug('HTTP request failed')
pass
return None
class Bitcoin:
currencies = (
'USD', 'EUR', 'hamburgers', 'farts', 'Razielcoins', 'BTC', 'salmons',
'marble eggs in a shitty condom', 'typematrix keyboards', 'clean teeth',
'dead Palestinian children', 'cmd.exe resizes', 'warp-in staplers',
'mutalisks on creep', 'mutalisks off creep', 'floating cars',
'burned rice', 'wordpress conference tickets', 'ice creams',
'base64 encoded o\'reilly books', 'rolls of vitamin E toilet paper',
'WISPY BEARDED POT SMOKING FAT FAGCUNT BITCOIN WORSHIPPERS WHO OBSESS OVER ME AND MAKE A SPORT OUT OF DRIVING ME INSANE AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
)
@classmethod
def get_worth(cls):
num = random.randint(100,100000) / 100
currency = random.choice(cls.currencies)
return '%.2f %s' % (num, currency)
class Raziel:
nicks = ('radio', 'brazier', 'easel', 'raIel', 'easiek')
@classmethod
def get_random_nick(cls):
return random.choice(cls.nicks)
class RedditeuPlugin(ircbot.plugin.Plugin):
"""#redditeu plugin."""
@ircbot.plugin.command('btc')
def get_btc_worth(self, cmd):
return '1 bitcoin is currently worth ' + Bitcoin.get_worth()
@ircbot.plugin.command('random')
def get_yp_comment(self, cmd):
result = YouPornComment.get_random(True)
if result:
return result
elif result is False:
return 'Error, try again!'
else:
return 'No comment found, try again later!'
@ircbot.plugin.command('michael')
def who_is_michael(self, cmd):
channel = self.bot.conn.channels.get(cmd.message.target)
if not channel:
return
for (host,nick) in channel.host_map.items():
if 'nevzetz' in host or 'ip51cc146b.speed.planet.nl' in host:
return 'Michael is ' + nick
return 'Michael not found!'
@ircbot.plugin.reply
def nay_here(self, msg):
if 'nay' not in msg.user.nick.lower():
return
# strip all non-standard characters
msg_str = ''.join([c for c in msg.message if 32 <= ord(c) <= 122])
msg_str = msg_str.lower().strip()
if 'sup' in msg_str and 'nay here' in msg_str:
return 'sup gay here'
if 'sup' == msg_str or 'yo' == msg_str:
return 'gay here'
@ircbot.plugin.join
def welcome(self, user, channel):
if 'happy0' in user.nick.lower():
return 'ypyotootp hippy 0'
if user.nick.lower().startswith('raziel'):
return 'hello ' + Raziel.get_random_nick()
|
Python
| 0.998975
|
@@ -2361,16 +2361,25 @@
'easiek'
+, 'ramen'
)%0A%0A%09@cla
|
b2d26411f80f2ccd685bbbe19af1eb6e9999c329
|
rename method and add docstring.
|
ism_spectra/sfd_lookup.py
|
ism_spectra/sfd_lookup.py
|
import astropy.io.fits as fits
import numpy as np
x_offset = 0.5
y_offset = 0.5
class SFDLookUp(object):
"""
Get extinction values from the SFD maps[1].
[1]: https://arxiv.org/abs/astro-ph/9809230
"""
def __init__(self, ngp_filename, sgp_filename):
"""
:param ngp_filename: NGP filename and path (e.g. 'SFD_dust_4096_ngp.fits')
:type ngp_filename: str
:param sgp_filename: SGP filename and path (e.g. 'SFD_dust_4096_sgp.fits')
:type sgp_filename: str
"""
ngp_list = fits.open(ngp_filename)
sgp_list = fits.open(sgp_filename)
ngp_data = ngp_list[0].data
sgp_data = sgp_list[0].data
self.data_res_x, self.data_res_y = ngp_data.shape
if ngp_data.shape != sgp_data.shape:
raise Exception("NGP and SGP maps must have the same resolution")
self.half_data_res_x = self.data_res_x // 2
self.half_data_res_y = self.data_res_y // 2
self.sfd_map = np.dstack((ngp_data, sgp_data))
def get_xy(self, l, b):
"""
get Lambert projection x and y values for the specified galactic coordinates
:param l: l-parameter (1D array)
:type l: np.ndarray
:param b: b-parameter (1D array)
:type b: np.ndarray
:return: 1D arrays containing x, y values and the NGP/SGP mask.
:rtype: np.ndarray
"""
# SFD 98, equations C1 and C2:
n_mask = b < 0
n = np.ones_like(b)
n[n_mask] = -1
x = self.half_data_res_x * np.sqrt(1 - n * np.sin(b)) * np.cos(l) + (
self.half_data_res_x - x_offset) # type: np.ndarray
y = -self.half_data_res_y * n * np.sqrt(1 - n * np.sin(b)) * np.sin(l) + (
self.half_data_res_y - x_offset) # type: np.ndarray
return x, y, n_mask
def lookup(self, l, b):
"""
get extinction values in the specified galactic coordinates
:param l: l-parameter (1D array)
:type l: np.ndarray
:param b: b-parameter (1D array)
:type b: np.ndarray
:return: 1D array containing extinction values
:rtype: np.ndarray
"""
x, y, n_mask = self.get_xy(l, b)
return self.sfd_map[y.astype(np.int), x.astype(np.int), n_mask.astype(np.int)]
def lookup_bilinear(self, l, b):
x, y, n_mask = self.get_xy(l, b)
x_fraction, x_floor = np.modf(x)
y_fraction, y_floor = np.modf(y)
weights = np.empty(shape=(2, 2,) + x.shape)
weights[0, 0] = (1 - x_fraction) * (1 - y_fraction)
weights[0, 1] = (1 - x_fraction) * y_fraction
weights[1, 0] = x_fraction * (1 - y_fraction)
weights[1, 1] = x_fraction * y_fraction
values = np.empty(shape=(2, 2,) + x.shape)
x_floor_int = x_floor.astype(np.int)
y_floor_int = y_floor.astype(np.int)
x_ceil_int = np.clip(x_floor_int + 1, 0, self.data_res_x - 1)
y_ceil_int = np.clip(y_floor_int + 1, 0, self.data_res_y - 1)
n_mask_int = n_mask.astype(np.int)
values[0, 0] = self.sfd_map[y_floor_int, x_floor_int, n_mask_int]
values[0, 1] = self.sfd_map[y_ceil_int, x_floor_int, n_mask_int]
values[1, 0] = self.sfd_map[y_floor_int, x_ceil_int, n_mask_int]
values[1, 1] = self.sfd_map[y_ceil_int, x_ceil_int, n_mask_int]
return np.sum(values * weights, axis=(0, 1))
|
Python
| 0
|
@@ -1846,16 +1846,24 @@
f lookup
+_nearest
(self, l
@@ -2338,32 +2338,374 @@
ar(self, l, b):%0A
+ %22%22%22%0A get extinction values in the specified galactic coordinates, using bilinear interpolation%0A :param l: l-parameter (1D array)%0A :type l: np.ndarray%0A :param b: b-parameter (1D array)%0A :type b: np.ndarray%0A :return: 1D array containing extinction values%0A :rtype: np.ndarray%0A %22%22%22%0A
x, y, n_
|
e97a43f4558b19311eea9a5ef508502c7151256f
|
fix periodic task schedule_updates()
|
mygpo/data/tasks.py
|
mygpo/data/tasks.py
|
from operator import itemgetter
from datetime import datetime, timedelta
from celery.decorators import periodic_task
from mygpo.data.podcast import calc_similar_podcasts
from mygpo.celery import celery
from mygpo.podcasts.models import Podcast
@celery.task
def update_podcasts(podcast_urls):
""" Task to update a podcast """
from mygpo.data.feeddownloader import PodcastUpdater
updater = PodcastUpdater()
podcasts = updater.update_queue(podcast_urls)
return list(podcasts)
@celery.task
def update_related_podcasts(podcast, max_related=20):
get_podcast = itemgetter(0)
related = calc_similar_podcasts(podcast)[:max_related]
related = map(get_podcast, related)
for p in related:
podcast.related_podcasts.add(p)
# interval in which podcast updates are scheduled
UPDATE_INTERVAL = timedelta(hours=1)
@periodic_task(run_every=UPDATE_INTERVAL)
def schedule_updates(self, interval=UPDATE_INTERVAL):
""" Schedules podcast updates that are due within ``interval`` """
now = datetime.utcnow()
# fetch podcasts for which an update is due within the next hour
podcasts = Podcast.objects.next_update_between(now, now+interval)\
.prefetch_related('urls')\
.only('pk')
# queue all those podcast updates
for podcast in podcasts:
update_podcasts.delay([podcast.url])
|
Python
| 0.000001
|
@@ -240,16 +240,97 @@
odcast%0A%0A
+from celery.utils.log import get_task_logger%0Alogger = get_task_logger(__name__)%0A%0A
%0A@celery
@@ -993,14 +993,8 @@
tes(
-self,
inte
@@ -1016,16 +1016,16 @@
ERVAL):%0A
+
%22%22%22
@@ -1215,16 +1215,54 @@
.objects
+.all()%5C%0A
.next_up
@@ -1394,16 +1394,88 @@
('pk')%0A%0A
+ logger.error('Scheduling %25d podcasts for update', podcasts.count())%0A
# qu
|
6289cb892d0d9d683ba57e369941d48f1a982c4e
|
Add missing timeouts to client.py
|
netuitive/client.py
|
netuitive/client.py
|
import logging
import json
import time
from netuitive import __version__
try:
import urllib.request as urllib2
except ImportError: # pragma: no cover
import urllib2
try:
from urllib.parse import urlparse
except ImportError: # pragma: no cover
from urlparse import urlparse
class Client(object):
"""
Netuitive Rest Api Client for agent data ingest.
Posts Element data to Netuitive Cloud
:param url: Base data source URL
:type url: string
:param api_key: API Key for data source
:type api_key: string
"""
def __init__(self, url='https://api.app.netuitive.com/ingest',
api_key='apikey',
agent='Netuitive-Python/' + __version__,
connection_timeout=5):
if url.endswith('/'):
url = url[:-1]
self.url = url
self.api_key = api_key
self.dataurl = self.url + '/' + self.api_key
self.timeurl = '{uri.scheme}://{uri.netloc}/time'.format(
uri=urlparse(url))
self.eventurl = self.dataurl.replace('/ingest/', '/ingest/events/', 1)
self.checkurl = self.dataurl.replace('/ingest/', '/check/', 1) \
.replace('/infrastructure', '', 1)
self.agent = agent
self.disabled = False
self.kill_codes = [410, 418]
self.post_error_count = 0
self.max_post_errors = 10
self.connection_timeout = connection_timeout
self.max_check_retry_count = 3
def post(self, element):
"""
:param element: Element to post to Netuitive
:type element: object
"""
try:
if self.disabled is True:
element.clear_samples()
logging.error('Posting has been disabled. '
'See previous errors for details.')
return(False)
if element.id is None:
raise Exception('element id is not set')
element.merge_metrics()
payload = json.dumps(
[element], default=lambda o: o.__dict__, sort_keys=True)
logging.debug(payload)
headers = {'Content-Type': 'application/json',
'User-Agent': self.agent}
request = urllib2.Request(
self.dataurl, data=payload, headers=headers)
resp = urllib2.urlopen(request)
logging.debug("Response code: %d", resp.getcode())
resp.close()
self.post_error_count = 0
return(True)
except urllib2.HTTPError as e:
logging.debug("Response code: %d", e.code)
if e.code in self.kill_codes:
self.disabled = True
logging.exception('Posting has been disabled.'
'See previous errors for details.')
else:
self.post_error_count += 1
if self.post_error_count > self.max_post_errors:
element.clear_samples()
logging.exception(
'error posting payload to api ingest endpoint (%s): %s',
self.dataurl, e)
except Exception as e:
self.post_error_count += 1
if self.post_error_count > self.max_post_errors:
element.clear_samples() # pragma: no cover
logging.exception(
'error posting payload to api ingest endpoint (%s): %s',
self.dataurl, e)
def post_event(self, event):
"""
:param event: Event to post to Netuitive
:type event: object
"""
if self.disabled is True:
logging.error('Posting has been disabled. '
'See previous errors for details.')
return(False)
payload = json.dumps(
[event], default=lambda o: o.__dict__, sort_keys=True)
logging.debug(payload)
try:
headers = {'Content-Type': 'application/json',
'User-Agent': self.agent}
request = urllib2.Request(
self.eventurl, data=payload, headers=headers)
resp = urllib2.urlopen(request)
logging.debug("Response code: %d", resp.getcode())
resp.close()
return(True)
except urllib2.HTTPError as e:
logging.debug("Response code: %d", e.code)
if e.code in self.kill_codes:
self.disabled = True
logging.exception('Posting has been disabled.'
'See previous errors for details.')
else:
logging.exception(
'error posting payload to api ingest endpoint (%s): %s',
self.eventurl, e)
except Exception as e:
logging.exception(
'error posting payload to api ingest endpoint (%s): %s',
self.eventurl, e)
def post_check(self, check):
"""
:param check: Check to post to Metricly
:type check: object
"""
if self.disabled is True:
logging.error('Posting has been disabled. '
'See previous errors for details.')
return(False)
url = self.checkurl + '/' \
+ check.name + '/' \
+ check.elementId + '/' \
+ str(check.ttl)
headers = {'User-Agent': self.agent}
try:
request = urllib2.Request(
url, data='', headers=headers)
resp = self._repeat_request(request, self.connection_timeout)
logging.debug("Response code: %d", resp.getcode())
resp.close()
return(True)
except urllib2.HTTPError as e:
logging.debug("Response code: %d", e.code)
if e.code in self.kill_codes:
self.disabled = True
logging.exception('Posting has been disabled.'
'See previous errors for details.')
else:
logging.exception(
'HTTPError posting payload to api ingest endpoint'
+ ' (%s): %s',
url, e)
def check_time_offset(self, epoch=None):
req = urllib2.Request(self.timeurl,
headers={'User-Agent': self.agent})
req.get_method = lambda: 'HEAD'
resp = urllib2.urlopen(req)
rdate = resp.info()['Date']
if epoch is None:
ltime = int(time.mktime(time.gmtime()))
else:
ltime = epoch
rtime = int(time.mktime(
time.strptime(rdate, "%a, %d %b %Y %H:%M:%S %Z")))
ret = ltime - rtime
return(ret)
def time_insync(self):
if self.check_time_offset() in range(-300, 300):
return(True)
else:
return(False)
def _repeat_request(self, request, timeout):
for i in range(self.max_check_retry_count + 1):
try:
return urllib2.urlopen(request, timeout=timeout)
except urllib2.HTTPError as e:
if 500 <= e.code < 600 and i < self.max_check_retry_count:
logging.debug("Response code: %d, retry count: %d from %d",
e.code, i + 1, self.max_check_retry_count)
time.sleep(0.25 * (i + 1))
else:
raise
|
Python
| 0.000002
|
@@ -2405,32 +2405,65 @@
.urlopen(request
+, timeout=self.connection_timeout
)%0A lo
@@ -4276,16 +4276,49 @@
(request
+, timeout=self.connection_timeout
)%0A
@@ -6586,16 +6586,49 @@
open(req
+, timeout=self.connection_timeout
)%0A
|
140b8b829f566a75a09a1ff7bc2d7d4ba4ea5272
|
fix version
|
newfies/__init__.py
|
newfies/__init__.py
|
# -*- coding: utf-8 -*-
#
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2012 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <info@star2billing.com>
#
# :copyright: (c) 2011 - 2012 by Arezqui Belaid.
# :license: MPL 2.0, see COPYING for more details.
VERSION = (1, 2, 2, "")
__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
__author__ = "Arezqui Belaid"
__contact__ = "info@star2billing.com"
__homepage__ = "http://www.newfies-dialer.org"
__docformat__ = "restructuredtext"
|
Python
| 0.000001
|
@@ -538,17 +538,17 @@
(1, 2,
-2
+4
, %22%22)%0A__
|
df5bcec8f6d05a27ba6be3ea0af401ab6045d636
|
Bump version to 0.5.2
|
nio_cli/__init__.py
|
nio_cli/__init__.py
|
__version__ = '0.5.1'
|
Python
| 0.000001
|
@@ -16,7 +16,7 @@
0.5.
-1
+2
'%0A
|
3e79c769e49f6677fe8922d37d6b63bcf71bbf7f
|
add autoreloading of SMS apps
|
lib/rapidsms/manager.py
|
lib/rapidsms/manager.py
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from config import Config
from router import Router
import os, sys, shutil
import i18n
# the Manager class is a bin for various RapidSMS specific management methods
class Manager (object):
def route (self, conf, *args):
router = Router()
router.set_logger(conf["log"]["level"], conf["log"]["file"])
router.info("RapidSMS Server started up")
import_i18n_sms_settings(conf)
# add each application from conf
for app_conf in conf["rapidsms"]["apps"]:
router.add_app(app_conf)
# add each backend from conf
for backend_conf in conf["rapidsms"]["backends"]:
router.add_backend(backend_conf)
# wait for incoming messages
router.start()
# TODO: Had to explicitly do this to end the script. Will need a fix.
sys.exit(0)
def _skeleton (self, tree):
return os.path.join(os.path.dirname(__file__), "skeleton", tree)
def startproject (self, conf, *args):
try:
name = args[0]
shutil.copytree(self._skeleton("project"), name)
except IndexError:
print "Oops. Please specify a name for your project."
def startapp (self, conf, *args):
try:
name = args[0]
target = os.path.join("apps",name)
shutil.copytree(self._skeleton("app"), target)
print "Don't forget to add '%s' to your rapidsms.ini apps." % name
except IndexError:
print "Oops. Please specify a name for your app."
def import_i18n_sms_settings(conf):
# Import i18n settings from rapidsms.ini for sms
if "i18n" in conf:
default = None
supported = None
if "sms_languages" in conf["i18n"]:
supported = conf["i18n"]["sms_languages"]
elif "languages" in conf["i18n"]:
supported = conf["i18n"]["languages"]
if "default_language" in conf["i18n"]:
default = conf["i18n"]["default_language"]
i18n.init(default,supported)
def import_local_settings (settings, ini, localfile="settings.py"):
"""Allow a settings.py file in the same directory as rapidsms.ini
to modify the imported rapidsms.webui.settings."""
local_settings = os.path.join(os.path.dirname(ini), localfile)
try:
execfile(local_settings, globals(), settings.__dict__)
except IOError:
# local_settings doesn't exist
pass
def start (args):
# if a specific conf has been provided (which it
# will be), if we're inside the django reloaded
if "RAPIDSMS_INI" in os.environ:
ini = os.environ["RAPIDSMS_INI"]
# use a local ini (for development)
# if one exists, to avoid everyone
# having their own rapidsms.ini
elif os.path.isfile("local.ini"):
ini = "local.ini"
# otherwise, fall back
else: ini = "rapidsms.ini"
# add the ini path to the environment, so we can
# access it globally, including any subprocesses
# spawned by django
os.environ["RAPIDSMS_INI"] = ini
# read the config, which is shared
# between the back and frontend
conf = Config(ini)
# if we found a config ini, try to configure Django
if conf.sources:
# import the webui settings, which builds the django
# config from rapidsms.config, in a round-about way.
# can't do it until env[RAPIDSMS_INI] is defined
from rapidsms.webui import settings
import_local_settings(settings, ini)
# whatever we're doing, we'll need to call
# django's setup_environ, to configure the ORM
os.environ["DJANGO_SETTINGS_MODULE"] = "rapidsms.webui.settings"
from django.core.management import setup_environ, execute_manager
setup_environ(settings)
else:
settings = None
# if one or more arguments were passed, we're
# starting up django -- copied from manage.py
if len(args) < 2:
print "Commands: route, startproject <name>, startapp <name>"
sys.exit(1)
if hasattr(Manager, args[1]):
handler = getattr(Manager(), args[1])
handler(conf, *args[2:])
elif settings:
# none of the commands were recognized,
# so hand off to Django
from django.core.management import ManagementUtility
# The following is equivalent to django's "execute_manager(settings)"
# only without overriding RapidSMS webui settings
utility = ManagementUtility()
utility.execute()
|
Python
| 0
|
@@ -235,16 +235,374 @@
bject):%0A
+%0A def testroute(self, conf, *args):%0A %22%22%22 Uses Django's autoreload functionality to automatically restart the %0A routing server when code is changed. %22%22%22%0A from django.utils import autoreload%0A%0A def run():%0A self.route(conf, args)%0A%0A # run our route command using Django's autoreload%0A autoreload.main(run)%0A%0A
def
@@ -4347,16 +4347,38 @@
: route,
+ testroute, runserver,
startpr
|
37f6e4f362586d6d966aad52b1df7d5e00baf148
|
Update regsvr32.py
|
lib/stagers/regsvr32.py
|
lib/stagers/regsvr32.py
|
from lib.common import helpers
class Stager:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'regsvr32',
'Author': ['@subTee', '@enigma0x3'],
'Description': ('Generates an sct file (COM Scriptlet) Host this anywhere'),
'Comments': [
'On the endpoint simple lauch regsvr32 /u /n /s /i:http://127.0.0.1/file.sct scrobj.dll '
]
}
# any options needed by the stager, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Listener': {
'Description': 'Listener to generate stager for.',
'Required': True,
'Value': ''
},
'StagerRetries': {
'Description': 'Times for the stager to retry connecting.',
'Required': False,
'Value': '0'
},
'OutFile': {
'Description': 'File to output SCT to, otherwise displayed on the screen.',
'Required': False,
'Value': '/tmp/launcher.sct'
},
'UserAgent': {
'Description': 'User-agent string to use for the staging request (default, none, or other).',
'Required': False,
'Value': 'default'
},
'Proxy': {
'Description': 'Proxy to use for request (default, none, or other).',
'Required': False,
'Value': 'default'
},
'ProxyCreds': {
'Description': 'Proxy credentials ([domain\]username:password) to use for request (default, none, or other).',
'Required': False,
'Value': 'default'
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
# extract all of our options
listenerName = self.options['Listener']['Value']
base64 = self.options['Base64']['Value']
userAgent = self.options['UserAgent']['Value']
proxy = self.options['Proxy']['Value']
proxyCreds = self.options['ProxyCreds']['Value']
stagerRetries = self.options['StagerRetries']['Value']
encode = False
if base64.lower() == "true":
encode = True
# generate the launcher code
launcher = self.mainMenu.stagers.generate_launcher(
listenerName, encode=encode, userAgent=userAgent, proxy=proxy, proxyCreds=proxyCreds, stagerRetries=stagerRetries)
if launcher == "":
print helpers.color("[!] Error in launcher command generation.")
return ""
else:
code = "<?XML version=\"1.0\"?>\n"
code += "<scriptlet>\n"
code += "<registration\n"
code += "description=\"Win32COMDebug\"\n"
code += "progid=\"Win32COMDebug\"\n"
code += "version=\"1.00\"\n"
code += "classid=\"{AAAA1111-0000-0000-0000-0000FEEDACDC}\"\n"
code += " >\n"
code += " <script language=\"JScript\">\n"
code += " <![CDATA[\n"
code += " var r = new ActiveXObject(\"WScript.Shell\").Run(\"" + launcher + "\");\n"
code += " ]]>\n"
code += " </script>\n"
code += "</registration>\n"
code += "<public>\n"
code += " <method name=\"Exec\"></method>\n"
code += "</public>\n"
code += "</scriptlet>\n"
return code
|
Python
| 0
|
@@ -345,21 +345,22 @@
nt simpl
-e
+y
lau
+n
ch regsv
|
a3ee5470379e58e67cf5a8884ad4790b7b5c6b03
|
Remove redundant check message
|
libnamebench/base_ui.py
|
libnamebench/base_ui.py
|
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A base user-interface workflow, to be inherited by UI modules."""
import tempfile
import benchmark
import better_webbrowser
import config
import data_sources
import geoip
import nameserver_list
import reporter
import site_connector
import util
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
class BaseUI(object):
"""Common methods for all UI implementations."""
def __init__(self):
self.SetupDataStructures()
def SetupDataStructures(self):
"""Instead of requiring users to inherit __init__(), this sets up structures."""
self.reporter = None
self.nameservers = None
self.bmark = None
self.report_path = None
self.csv_path = None
self.geodata = None
self.country = None
self.sources = {}
self.url = None
self.share_state = None
self.test_records = []
def UpdateStatus(self, msg, **kwargs):
"""Update the little status message on the bottom of the window."""
if hasattr(self, 'status_callback') and self.status_callback:
self.status_callback(msg, **kwargs)
else:
print msg
def DebugMsg(self, message):
self.UpdateStatus(message, debug=True)
def LoadDataSources(self):
self.data_src = data_sources.DataSources(status_callback=self.UpdateStatus)
def PrepareTestRecords(self):
"""Figure out what data source a user wants, and create test_records."""
if self.options.input_source:
src_type = self.options.input_source
else:
src_type = self.data_src.GetBestSourceDetails()[0]
self.options.input_source = src_type
self.test_records = self.data_src.GetTestsFromSource(
src_type,
self.options.query_count,
select_mode=self.options.select_mode
)
def PrepareNameServers(self):
"""Setup self.nameservers to have a list of healthy fast servers."""
self.nameservers = nameserver_list.NameServers(
self.supplied_ns,
global_servers=self.global_ns,
regional_servers=self.regional_ns,
include_internal=self.include_internal,
num_servers=self.options.num_servers,
timeout=self.options.timeout,
ping_timeout=self.options.ping_timeout,
health_timeout=self.options.health_timeout,
ipv6_only=self.options.ipv6_only,
status_callback=self.UpdateStatus
)
if self.options.invalidate_cache:
self.nameservers.InvalidateSecondaryCache()
self.nameservers.cache_dir = tempfile.gettempdir()
# Don't waste time checking the health of the only nameserver in the list.
if len(self.nameservers) > 1:
self.nameservers.thread_count = int(self.options.health_thread_count)
self.nameservers.cache_dir = tempfile.gettempdir()
self.UpdateStatus('Checking latest sanity reference')
(primary_checks, secondary_checks, censor_tests) = config.GetLatestSanityChecks()
if not self.options.enable_censorship_checks:
censor_tests = []
else:
self.UpdateStatus('Censorship checks enabled: %s found.' % len(censor_tests))
self.UpdateStatus('Checking nameserver health (%s servers)' % len(self.nameservers))
self.nameservers.CheckHealth(primary_checks, secondary_checks, censor_tests=censor_tests)
def PrepareBenchmark(self):
"""Setup the benchmark object with the appropriate dataset."""
if len(self.nameservers) == 1:
thread_count = 1
else:
thread_count = self.options.benchmark_thread_count
self.bmark = benchmark.Benchmark(self.nameservers,
query_count=self.options.query_count,
run_count=self.options.run_count,
thread_count=thread_count,
status_callback=self.UpdateStatus)
def RunBenchmark(self):
"""Run the benchmark."""
results = self.bmark.Run(self.test_records)
index = []
if self.options.upload_results:
connector = site_connector.SiteConnector(self.options, status_callback=self.UpdateStatus)
index_hosts = connector.GetIndexHosts()
if index_hosts:
index = self.bmark.RunIndex(index_hosts)
else:
index = []
self.DiscoverLocation()
if len(self.nameservers) > 1:
self.nameservers.RunPortBehaviorThreads()
self.reporter = reporter.ReportGenerator(self.options, self.nameservers,
results, index=index, geodata=self.geodata)
def DiscoverLocation(self):
if not getattr(self, 'geodata', None):
self.geodata = geoip.GetGeoData()
self.country = self.geodata.get('country_name', None)
return self.geodata
def RunAndOpenReports(self):
"""Run the benchmark and open up the report on completion."""
self.RunBenchmark()
best = self.reporter.BestOverallNameServer()
self.CreateReports()
if self.options.template == 'html':
self.DisplayHtmlReport()
if self.url:
self.UpdateStatus('Complete! Your results: %s' % self.url)
else:
self.UpdateStatus('Complete! %s [%s] is the best.' % (best.name, best.ip))
def CreateReports(self):
"""Create CSV & HTML reports for the latest run."""
if self.options.output_file:
self.report_path = self.options.output_file
else:
self.report_path = util.GenerateOutputFilename(self.options.template)
if self.options.csv_file:
self.csv_path = self.options_csv_file
else:
self.csv_path = util.GenerateOutputFilename('csv')
if self.options.upload_results:
# This is for debugging and transparency only.
self.json_path = util.GenerateOutputFilename('js')
self.UpdateStatus('Saving anonymized JSON to %s' % self.json_path)
json_data = self.reporter.CreateJsonData()
f = open(self.json_path, 'w')
f.write(json_data)
f.close()
self.UpdateStatus('Uploading results to %s' % self.options.site_url)
connector = site_connector.SiteConnector(self.options, status_callback=self.UpdateStatus)
self.url, self.share_state = connector.UploadJsonResults(
json_data,
hide_results=self.options.hide_results
)
if self.url:
self.UpdateStatus('Your sharing URL: %s (%s)' % (self.url, self.share_state))
self.UpdateStatus('Saving report to %s' % self.report_path)
f = open(self.report_path, 'w')
self.reporter.CreateReport(format=self.options.template,
output_fp=f,
csv_path=self.csv_path,
sharing_url=self.url,
sharing_state=self.share_state)
f.close()
self.UpdateStatus('Saving detailed results to %s' % self.csv_path)
self.reporter.SaveResultsToCsv(self.csv_path)
def DisplayHtmlReport(self):
self.UpdateStatus('Opening %s' % self.report_path)
better_webbrowser.output = self.DebugMsg
better_webbrowser.open(self.report_path)
|
Python
| 0.000004
|
@@ -3608,97 +3608,8 @@
))%0A%0A
- self.UpdateStatus('Checking nameserver health (%25s servers)' %25 len(self.nameservers))%0A
|
7addd05a33358c21e3d7979cb2a68fb3703c92fb
|
Version bump: 1.3-RC1
|
libnamebench/version.py
|
libnamebench/version.py
|
VERSION = '1.3-BETA1'
|
Python
| 0
|
@@ -12,12 +12,10 @@
1.3-
-BETA
+RC
1'%0A%0A
|
30b2247a9a7117a0662a111ae7161edc52634b17
|
Make sure users without a first name aren't returned
|
lily/users/api/views.py
|
lily/users/api/views.py
|
import django_filters
from rest_framework import viewsets, mixins, status
from rest_framework.authtoken.models import Token
from rest_framework.decorators import list_route
from rest_framework.exceptions import PermissionDenied
from rest_framework.response import Response
from .serializers import LilyGroupSerializer, LilyUserSerializer, LilyUserTokenSerializer
from ..models import LilyGroup, LilyUser
class TeamFilter(django_filters.FilterSet):
"""
Class to filter case queryset.
"""
class Meta:
model = LilyGroup
fields = ['name', ]
class TeamViewSet(viewsets.ReadOnlyModelViewSet):
"""
List all teams assigned to the current user.
"""
model = LilyGroup
serializer_class = LilyGroupSerializer
filter_class = TeamFilter
queryset = LilyGroup.objects
def get_queryset(self):
queryset = self.model.objects.filter(tenant_id=self.request.user.tenant_id)
return queryset
def get(self, request, format=None):
filtered_queryset = self.filter_class(request.GET, queryset=self.get_queryset())
serializer = self.serializer_class(filtered_queryset, context={'request': request}, many=True)
return Response(serializer.data)
@list_route(methods=['GET'])
def mine(self, request, *args, **kwargs):
queryset = self.get_queryset().filter(user=self.request.user)
filtered_queryset = self.filter_class(request.GET, queryset=queryset)
serializer = self.serializer_class(filtered_queryset, context={'request': request}, many=True)
return Response(serializer.data)
class LilyUserViewSet(mixins.UpdateModelMixin,
viewsets.ReadOnlyModelViewSet):
model = LilyUser
serializer_class = LilyUserSerializer
queryset = LilyUser.objects
def get_queryset(self):
queryset = self.model.objects.filter(tenant_id=self.request.user.tenant_id)
return queryset
def get_object(self):
"""
Get a user by pk
If the pk is set to 'me', the currently logged in user will be returned
Returns:
serialized user instance
"""
if self.kwargs['pk'] == 'me':
return self.request.user
else:
return super(LilyUserViewSet, self).get_object()
@list_route(methods=['GET', 'DELETE', 'POST'])
def token(self, request, *args, **kwargs):
"""
This view only returns the token of the currently logged in user
GET returns the current token
POST generates a new token
DELETE removes the current token
"""
if request.method in ('DELETE', 'POST'):
Token.objects.filter(user=request.user).delete()
if request.method == 'DELETE':
return Response(status=status.HTTP_204_NO_CONTENT)
else:
Token.objects.create(user=request.user)
serializer = LilyUserTokenSerializer(request.user)
return Response(serializer.data)
def update(self, request, *args, **kwargs):
if self.request.user != self.get_object():
raise PermissionDenied
return super(LilyUserViewSet, self).update(request, args, kwargs)
|
Python
| 0
|
@@ -1898,32 +1898,55 @@
.user.tenant_id)
+.exclude(first_name='')
%0A return
|
8c5de29849ca061ed528c93c36fb3479aef34e41
|
Update ipc_lista1.15.py
|
lista1/ipc_lista1.15.py
|
lista1/ipc_lista1.15.py
|
#ipc_lista1.15
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#
qHora = input("Quanto você ganha por hora: ")
hT = input("Quantas horas você trabalhou: ")
SalBruto = qHora
ir = (11/100.0 * salBruto)
inss = (8/100.0m* SalBruto)
sindicato = (5/100.0 * SalBruto)
vT = ir + sindicato
SalLiq = SalBruto - vT
print "Seu salário bruto e: ", SalBruto
|
Python
| 0
|
@@ -352,17 +352,16 @@
to e: %22,
-
SalBruto
|
6653e7d9289290b97f06e2b46314a12e194bfc6a
|
Update ipc_lista1.18.py
|
lista1/ipc_lista1.18.py
|
lista1/ipc_lista1.18.py
|
#ipc_lista1.18
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que peça o tamanho de um arquivo para download(em MB) e a velocidade de um link de internet (em Mbps), calcule e informe o tempo aproximado de downloado do arquivo usando este link (em minutos).
#
|
Python
| 0
|
@@ -290,10 +290,81 @@
nutos).%0A
-#
+%0Aarquivo = input(%22Informe o tamanho do arquivo para download (em MB): %22)
%0A
|
6c4d6587939b6e186fdc7c76448803559a00d5ea
|
Update ipc_lista2.02.py
|
lista2/ipc_lista2.02.py
|
lista2/ipc_lista2.02.py
|
#ipc_lista2.02
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que peça um valor e mostre
|
Python
| 0
|
@@ -121,9 +121,17 @@
e mostre
+ na tela
%0A
|
b029ecf823cb97c1d70a7599787bd798b9a95ac6
|
Update ipc_lista2.06.py
|
lista2/ipc_lista2.06.py
|
lista2/ipc_lista2.06.py
|
#ipc_lista2.06
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que leia três números e mostre o maior deles.
|
Python
| 0
|
@@ -141,8 +141,27 @@
deles.%0A
+%0Anumber1 =%0Anumber2%0A
|
2fbb692bc9937e72bc5840c867ec72041124d462
|
Add swift support to toolchain.py
|
lit/helper/toolchain.py
|
lit/helper/toolchain.py
|
import os
import itertools
import platform
import subprocess
import sys
import lit.util
from lit.llvm import llvm_config
from lit.llvm.subst import FindTool
from lit.llvm.subst import ToolSubst
def use_lldb_substitutions(config):
# Set up substitutions for primary tools. These tools must come from config.lldb_tools_dir
# which is basically the build output directory. We do not want to find these in path or
# anywhere else, since they are specifically the programs which are actually being tested.
dsname = 'debugserver' if platform.system() in ['Darwin'] else 'lldb-server'
dsargs = [] if platform.system() in ['Darwin'] else ['gdbserver']
lldbmi = ToolSubst('%lldbmi',
command=FindTool('lldb-mi'),
extra_args=['--synchronous'],
unresolved='ignore')
build_script = os.path.dirname(__file__)
build_script = os.path.join(build_script, 'build.py')
build_script_args = [build_script,
'--compiler=any', # Default to best compiler
'--arch=' + str(config.lldb_bitness)]
if config.lldb_lit_tools_dir:
build_script_args.append('--tools-dir={0}'.format(config.lldb_lit_tools_dir))
if config.lldb_tools_dir:
build_script_args.append('--tools-dir={0}'.format(config.lldb_tools_dir))
primary_tools = [
ToolSubst('%lldb',
command=FindTool('lldb'),
extra_args=['-S',
os.path.join(config.test_source_root,
'lit-lldb-init')]),
lldbmi,
ToolSubst('%debugserver',
command=FindTool(dsname),
extra_args=dsargs,
unresolved='ignore'),
'lldb-test',
ToolSubst('%build',
command="'" + sys.executable + "'",
extra_args=build_script_args)
]
llvm_config.add_tool_substitutions(primary_tools,
[config.lldb_tools_dir])
if lldbmi.was_resolved:
config.available_features.add('lldb-mi')
def _use_msvc_substitutions(config):
# If running from a Visual Studio Command prompt (e.g. vcvars), this will
# detect the include and lib paths, and find cl.exe and link.exe and create
# substitutions for each of them that explicitly specify /I and /L paths
cl = lit.util.which('cl')
link = lit.util.which('link')
if not cl or not link:
return
cl = '"' + cl + '"'
link = '"' + link + '"'
includes = os.getenv('INCLUDE', '').split(';')
libs = os.getenv('LIB', '').split(';')
config.available_features.add('msvc')
compiler_flags = ['"/I{}"'.format(x) for x in includes if os.path.exists(x)]
linker_flags = ['"/LIBPATH:{}"'.format(x) for x in libs if os.path.exists(x)]
tools = [
ToolSubst('%msvc_cl', command=cl, extra_args=compiler_flags),
ToolSubst('%msvc_link', command=link, extra_args=linker_flags)]
llvm_config.add_tool_substitutions(tools)
return
def use_support_substitutions(config):
# Set up substitutions for support tools. These tools can be overridden at the CMake
# level (by specifying -DLLDB_LIT_TOOLS_DIR), installed, or as a last resort, we can use
# the just-built version.
flags = []
if platform.system() in ['Darwin']:
try:
out = subprocess.check_output(['xcrun', '--show-sdk-path']).strip()
res = 0
except OSError:
res = -1
if res == 0 and out:
sdk_path = lit.util.to_string(out)
llvm_config.lit_config.note('using SDKROOT: %r' % sdk_path)
flags = ['-isysroot', sdk_path]
elif platform.system() in ['OpenBSD', 'Linux']:
flags = ['-pthread']
additional_tool_dirs=[]
if config.lldb_lit_tools_dir:
additional_tool_dirs.append(config.lldb_lit_tools_dir)
llvm_config.use_clang(additional_flags=flags,
additional_tool_dirs=additional_tool_dirs,
required=True)
if sys.platform == 'win32':
_use_msvc_substitutions(config)
have_lld = llvm_config.use_lld(additional_tool_dirs=additional_tool_dirs,
required=False)
if have_lld:
config.available_features.add('lld')
support_tools = ['yaml2obj', 'obj2yaml', 'llvm-pdbutil',
'llvm-mc', 'llvm-readobj', 'llvm-objdump',
'llvm-objcopy']
additional_tool_dirs += [config.lldb_tools_dir, config.llvm_tools_dir]
llvm_config.add_tool_substitutions(support_tools, additional_tool_dirs)
|
Python
| 0
|
@@ -3829,16 +3829,408 @@
read'%5D%0A%0A
+ # Swift support%0A swift_sdk = %5B' -sdk ', sdk_path%5D if platform.system() in %5B'Darwin'%5D else %5B%5D%0A tools = %5B%0A ToolSubst(%0A '%25target-swiftc', command=config.swiftc, extra_args=swift_sdk),%0A ToolSubst(%0A '%25target-swift-frontend',%0A command=config.swiftc%5B:-1%5D,%0A extra_args=swift_sdk)%0A %5D%0A llvm_config.add_tool_substitutions(tools)%0A
%0A add
|
40bd026ec34c17bd9d512c25310efcdcbfc3425c
|
Version Bump
|
littlepython/version.py
|
littlepython/version.py
|
version = '0.4.7'
|
Python
| 0.000001
|
@@ -12,7 +12,7 @@
0.4.
-7
+8
'%0A
|
37a55d4d0d4dd5827f1cd3e8cc62f3ac59f645fb
|
Remove old 'MIDDLEWARE_CLASSES'.
|
xadmin/plugins/language.py
|
xadmin/plugins/language.py
|
from django.conf import settings
from django.template import loader
from django.views.i18n import set_language
from xadmin.plugins.utils import get_context_dict
from xadmin.sites import site
from xadmin.views import BaseAdminPlugin, CommAdminView, BaseAdminView
class SetLangNavPlugin(BaseAdminPlugin):
def block_top_navmenu(self, context, nodes):
context = get_context_dict(context)
context['redirect_to'] = self.request.get_full_path()
nodes.append(loader.render_to_string('xadmin/blocks/comm.top.setlang.html', context=context))
def get_media(self, media):
media = media + self.vendor('xadmin.plugin.language.css')
return media
class SetLangView(BaseAdminView):
def post(self, request, *args, **kwargs):
if 'nav_menu' in request.session:
del request.session['nav_menu']
return set_language(request)
middleware = getattr(settings, "MIDDLEWARE", None) or \
settings.MIDDLEWARE_CLASSES
if settings.LANGUAGES and 'django.middleware.locale.LocaleMiddleware' in middleware:
site.register_plugin(SetLangNavPlugin, CommAdminView)
site.register_view(r'^i18n/setlang/$', SetLangView, 'set_language')
|
Python
| 0.000042
|
@@ -889,114 +889,8 @@
)%0A%0A%0A
-middleware = getattr(settings, %22MIDDLEWARE%22, None) or %5C%0A settings.MIDDLEWARE_CLASSES%0A%0A
if s
@@ -958,26 +958,35 @@
are' in
-middleware
+settings.MIDDLEWARE
:%0A si
|
5662921713bbd216d768cd563fb3764ea6b8fb59
|
update studio_api
|
xarm/wrapper/studio_api.py
|
xarm/wrapper/studio_api.py
|
#!/usr/bin/env python3
# Software License Agreement (BSD License)
#
# Copyright (c) 2020, UFACTORY, Inc.
# All rights reserved.
#
# Author: Vinman <vinman.wen@ufactory.cc> <vinman.cub@gmail.com>
import json
import functools
from .xarm_api import XArmAPI
try:
from requests import Session
except:
import urllib.request
class Session(object):
class Request:
def __init__(self, url, data, **kwargs):
req = urllib.request.Request(url, data.encode('utf-8'))
self.r = urllib.request.urlopen(req)
self._data = self.r.read()
@property
def status_code(self):
return self.r.code
def json(self):
return json.loads(self._data.decode('utf-8'))
def post(self, url, data=None, **kwargs):
return self.Request(url, data)
def close(self):
pass
class XArmStudioAPI(object):
def __init__(self, ip, ignore_warnning=False):
if not ignore_warnning:
import warnings
warnings.warn("don't use it for now, just for debugging")
self.__ip = ip
self.__session = Session()
self.arm = self.__RemoteXArmAPI(self._call_sdk_api)
def __del__(self):
self.__session.close()
def run_blockly_app(self, name, **kwargs):
try:
self.call_studio_api(None, 0, {}, api_name='xarm_set_blockly_init', show_fail_log=False)
except:
pass
kwargs['appName'] = name
return self.call_studio_api(None, 0, kwargs, api_name='run_blockly')
def delete_blockly_app(self, name):
return self.call_studio_api(None, 0, {
'parentPath': name,
'selectNode': {
'type': 'file'
}
}, api_name='app_delete_item')
def call_studio_api(self, *args, **kwargs):
kwargs['path'] = 'cmd'
return self.__call_remote_api(*args, **kwargs)
def _call_sdk_api(self, *args, **kwargs):
kwargs['path'] = 'api'
return self.__call_remote_api(*args, **kwargs)
def __call_remote_api(self, *args, **kwargs):
api_name = kwargs.pop('api_name', None)
show_fail_log = kwargs.pop('show_fail_log', True)
path = kwargs.pop('path')
if self.__ip and api_name:
r = self.__session.post('http://{}:18333/{}'.format(self.__ip, path), data=json.dumps({
'cmd': api_name, 'args': args, 'kwargs': kwargs
}), timeout=(5, None))
if r.status_code == 200:
res = r.json()
if 'info' in res:
if show_fail_log:
print(res['info'])
else:
return res['res']
else:
if show_fail_log:
print('request failed, http_status_code={}'.format(r.status_code))
else:
if show_fail_log:
print('ip or api_name is empty, ip={}, api_name={}'.format(self.__ip, api_name))
class __RemoteXArmAPI(XArmAPI):
def __init__(self, call_sdk_func, **kwargs):
XArmAPI.__init__(self, do_not_open=True)
self._arm = self.__RemoteXArm(call_sdk_func, self._arm)
class __RemoteXArm:
def __init__(self, call_sdk_func, _arm):
self.__call_sdk_func = call_sdk_func
self.__arm = _arm
def __getattr__(self, item):
if item.startswith(('register', 'release')):
raise Exception('Cannot call an interface with callback parameters remotely')
attr = getattr(self.__arm, item)
remote_api = functools.partial(self.__call_sdk_func, api_name=item)
return remote_api if callable(attr) else remote_api()
|
Python
| 0.000001
|
@@ -1832,24 +1832,473 @@
ete_item')%0A%0A
+ def playback_trajectory(self, filename, times=1, double_speed=1):%0A return self.call_studio_api(None, 0, %7B%0A 'filename': filename,%0A 'times': times,%0A 'speed': double_speed,%0A 'wait': False,%0A %7D, api_name='xarm_playback_traj')%0A%0A def delete_trajectory(self, filename):%0A return self.call_studio_api(None, 0, %7B%0A 'filename': filename,%0A %7D, api_name='xarm_delete_traj')%0A%0A
def call
|
03276ebebfd71e701583789888769df46f465c12
|
fix dh bug
|
xos/services/vpn/models.py
|
xos/services/vpn/models.py
|
from core.models import Service, TenantWithContainer
from django.db import transaction
VPN_KIND = "vpn"
class VPNService(Service):
"""Defines the Service for creating VPN servers."""
KIND = VPN_KIND
class Meta:
proxy = True
# The name used to find this service, all directories are named this
app_label = "vpn"
verbose_name = "VPN Service"
class VPNTenant(TenantWithContainer):
"""Defines the Tenant for creating VPN servers."""
class Meta:
proxy = True
verbose_name = "VPN Tenant"
KIND = VPN_KIND
sync_attributes = ("nat_ip", "nat_mac",)
default_attributes = {'server_key': None,
'vpn_subnet': None,
'server_network': None,
'clients_can_see_each_other': True,
'is_persistent': True,
'script': None,
'ca_crt': None,
'server_crt': None,
'server_key': None,
'dh': None}
def __init__(self, *args, **kwargs):
vpn_services = VPNService.get_service_objects().all()
if vpn_services:
self._meta.get_field(
"provider_service").default = vpn_services[0].id
super(VPNTenant, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
super(VPNTenant, self).save(*args, **kwargs)
model_policy_vpn_tenant(self.pk)
def delete(self, *args, **kwargs):
self.cleanup_container()
super(VPNTenant, self).delete(*args, **kwargs)
@property
def server_key(self):
"""str: The server_key used to connect to the VPN server."""
return self.get_attribute(
"server_key",
self.default_attributes['server_key'])
@server_key.setter
def server_key(self, value):
self.set_attribute("server_key", value)
@property
def addresses(self):
"""Mapping[str, str]: The ip, mac address, and subnet of the NAT network of this Tenant."""
if (not self.id) or (not self.instance):
return {}
addresses = {}
for ns in self.instance.ports.all():
if "nat" in ns.network.name.lower():
addresses["ip"] = ns.ip
addresses["mac"] = ns.mac
addresses["subnet"] = ns.network.subnet
break
return addresses
# This getter is necessary because nat_ip is a sync_attribute
@property
def nat_ip(self):
"""str: The IP of this Tenant on the NAT network."""
return self.addresses.get("ip", None)
# This getter is necessary because nat_mac is a sync_attribute
@property
def nat_mac(self):
"""str: The MAC address of this Tenant on the NAT network."""
return self.addresses.get("mac", None)
@property
def subnet(self):
"""str: The subnet of this Tenant on the NAT network."""
return self.addresses.get("subnet", None)
@property
def server_network(self):
"""str: The IP address of the server on the VPN."""
return self.get_attribute(
'server_network',
self.default_attributes['server_network'])
@server_network.setter
def server_network(self, value):
self.set_attribute("server_network", value)
@property
def vpn_subnet(self):
"""str: The IP address of the client on the VPN."""
return self.get_attribute(
'vpn_subnet',
self.default_attributes['vpn_subnet'])
@vpn_subnet.setter
def vpn_subnet(self, value):
self.set_attribute("vpn_subnet", value)
@property
def is_persistent(self):
"""bool: True if the VPN connection is persistence, false otherwise."""
return self.get_attribute(
"is_persistent",
self.default_attributes['is_persistent'])
@is_persistent.setter
def is_persistent(self, value):
self.set_attribute("is_persistent", value)
@property
def clients_can_see_each_other(self):
"""bool: True if the client can see the subnet of the server, false otherwise."""
return self.get_attribute(
"clients_can_see_each_other",
self.default_attributes['clients_can_see_each_other'])
@clients_can_see_each_other.setter
def clients_can_see_each_other(self, value):
self.set_attribute("clients_can_see_each_other", value)
@property
def script(self):
"""string: The file name of the client script"""
return self.get_attribute("script", self.default_attributes['script'])
@script.setter
def script(self, value):
self.set_attribute("script", value)
@property
def ca_crt(self):
"""str: the string for the ca certificate"""
return self.get_attribute("ca_crt", self.default_attributes['ca_crt'])
@ca_crt.setter
def ca_crt(self, value):
self.set_attribute("ca_crt", value)
@property
def server_crt(self):
"""str: the string for the server certificate"""
return self.get_attribute("server_crt", self.default_attributes['server_crt'])
@server_crt.setter
def server_crt(self, value):
self.set_attribute("server_crt", value)
@property
def server_key(self):
"""str: the string for the server certificate"""
return self.get_attribute("server_key", self.default_attributes['server_key'])
@server_key.setter
def server_key(self, value):
self.set_attribute("server_key", value)
@property
def dh(self):
"""str: the string for the server certificate"""
return self.get_attribute("dh", self.default_attributes['dh'])
@dh.setter
def server_key(self, value):
self.set_attribute("dh", value)
def model_policy_vpn_tenant(pk):
"""Manages the contain for the VPN Tenant."""
# This section of code is atomic to prevent race conditions
with transaction.atomic():
# We find all of the tenants that are waiting to update
tenant = VPNTenant.objects.select_for_update().filter(pk=pk)
if not tenant:
return
# Since this code is atomic it is safe to always use the first tenant
tenant = tenant[0]
tenant.manage_container()
|
Python
| 0.000001
|
@@ -5805,34 +5805,26 @@
ter%0A def
-server_key
+dh
(self, value
|
5b0386d0872d4106902655ada78389503c62a95a
|
Add some default feedback types for item requests
|
yunity/models/relations.py
|
yunity/models/relations.py
|
from django.db.models import ForeignKey, DateTimeField, ManyToManyField
from yunity.models.entities import User, Location, Mappable, Message
from yunity.models.utils import BaseModel, MaxLengthCharField
from yunity.utils.decorators import classproperty
class Chat(BaseModel):
participants = ManyToManyField(User)
messages = ManyToManyField(Message)
class MappableLocation(BaseModel):
mappable = ForeignKey(Mappable)
location = ForeignKey(Location)
startTime = DateTimeField(null=True)
endTime = DateTimeField(null=True)
class MappableResponsibility(BaseModel):
@classproperty
def TYPE(cls):
return cls.create_constants('type', 'OWNER')
@classproperty
def STATUS(cls):
return cls.create_constants('status', 'GRANTED', 'PENDING', 'REQUESTED')
responsible = ForeignKey(User, null=True)
mappable = ForeignKey(Mappable)
status = MaxLengthCharField()
date = DateTimeField(null=True, auto_now=True)
type = MaxLengthCharField()
class UserLocation(BaseModel):
user = ForeignKey(User)
location = ForeignKey(Location)
type = MaxLengthCharField()
class ItemRequest(BaseModel):
requester = ForeignKey(User)
requested = ForeignKey(Mappable)
feedback = MaxLengthCharField(null=True, default=None)
|
Python
| 0
|
@@ -1157,24 +1157,147 @@
BaseModel):%0A
+ @classproperty%0A def FEEDBACK(cls):%0A return cls.create_constants('feedback', 'OK', 'NO_SHOW', 'NOT_GRANTED')%0A%0A
requeste
|
f97ff7b1775fbe37fb081c514fa068ee44e1379f
|
Update remove-imageless-roms.py
|
remove-imageless-roms.py
|
remove-imageless-roms.py
|
import os
import os.path
import sys
import string
# config vars, if you are using non-standard stuff change these
rom_dir = '/home/pi/RetroPie/roms'
bak_dir = '/home/pi/RetroPie/cleaned_up'
img_dir = '/opt/retropie/configs/all/emulationstation/downloaded_images'
allowed_systems = [
'amiga','amstradcpc','apple2',
'arcade','atari800','atari2600',
'atari5200', 'atari7800','atarilynx',
'atarist','c64','coco',
'dragon32','dreamcast','fba',
'fds','gamegear','gb',
'gba','gbc','intellivision',
'macintosh','mame-advmame', 'mame-libretro',
'mame-mame4all','mastersystem','megadrive',
'msx','n64', 'neogeo',
'nes','ngp','ngpc',
'pc','pcengine','psp',
'psx','sega32x','segacd',
'sg-1000','snes','vectrex',
'videopac','wonderswan','wonderswancolor',
'zmachine','zxspectrum'
]
delete_count = 0
total_count = 0
question = """
*****************************************************************
WARNING! THIS SCRIPT WILL PERMANENTLY DELETE FILES!
AND IS WRITTEN BY AN AMATEUR PYTHON DEVELOPER
===== THIS COULD GO VERY VERY WRONG! =====
===== PLEASE BACKUP BEFORE RUNNING THIS! =====
ARE YOU SURE YOU WANT TO PROCEED? (YES/TEST/NO)
TYPE "TEST" TO DO A TEST RUN THAT (HOPEFULLY) WON'T
ACTUALLY DELETE ANY FILES (MAYBE)
TYPE "CLEAN" TO MOVE UNWANTED FILES TO A BACKUP DIRECTORY
(/home/pi/RetroPie/cleaned_up)
TYPE "DELETE" IF YOU ARE SOUND MIND, UNDERSTAND THE RISKS
AND WISH TO PROCEED WITH PERMANENTLY DELETING STUFF!
*****************************************************************
: """
user_input = raw_input(question).upper()
if user_input in ['DELETE', 'TEST','CLEAN']:
# for the cleanup process, we need somewhere to put our backups
if user_input == 'CLEAN':
if not os.path.isdir(bak_dir):
os.makedirs(bak_dir)
for root, subdirs, files in os.walk(rom_dir):
list_file_path = os.path.join(root, 'foo.txt')
with open(list_file_path, 'wb') as list_file:
os.remove(list_file_path)
for filename in files:
file_path = os.path.join(root, filename)
rom_name = filename.split('.')[0]
system_name = file_path.split('/')[-2]
if system_name in allowed_systems:
total_count += 1
image_path = img_dir + '/' + system_name + '/' + rom_name + '-image.jpg'
if not os.path.isfile(image_path):
## no image found, we should delete the rom!
if user_input == 'DELETE':
print "DELETING: " + system_name + "/" + rom_name + " (" + filename + ")"
os.remove(file_path)
delete_count += 1
elif user_input == 'CLEAN'
print "CLEANING: " + system_name + "/" + rom_name + " (" + filename + ")"
system_bak_dir = bak_dir + '/' + system_name
bak_file_path = system_bak_dir + '/' + filename
if not os.path.isdir(system_bak_dir):
os.makedirs(system_bak_dir)
os.rename(file_path, bak_file_path)
delete_count += 1
else:
print "TESTING: " + system_name + "/" + rom_name + " (" + filename + ")"
## do nothing
delete_count += 1
remaining_roms = total_count - delete_count
if user_input == 'DELETE':
print "\n--------------------------------------------------------------------"
print "CLEANUP COMPLETE: " + str(delete_count) + " of " + str(total_count) + " files have been deleted! (" + str(remaining_roms) + " remain)"
print "--------------------------------------------------------------------"
elif user_input == 'CLEAN':
print "\n--------------------------------------------------------------------"
print "CLEAN COMPLETE: " + str(delete_count) + " of " + str(total_count) + " files have been moved! (" + str(remaining_roms) + " remain)"
print "--------------------------------------------------------------------"
else:
print "\n--------------------------------------------------------------------"
print "TEST COMPLETE: " + str(delete_count) + " of " + str(total_count) + " are ripe to be tidied! (" + str(remaining_roms) + " remain)"
print "--------------------------------------------------------------------"
else :
print "SCRIPT ABORTED (GOOD CHOICE!)"
|
Python
| 0.000012
|
@@ -3309,16 +3309,17 @@
'CLEAN'
+:
%0A%0A
|
eea375060727111a3590fad694f8a40823a33f43
|
Add default sort to groups
|
representatives/views.py
|
representatives/views.py
|
import datetime
from django.db import models
from django.views import generic
from django.utils.text import slugify
from .models import Mandate, Group
class RepresentativeViewMixin(object):
"""
A view mixin to add pre-fetched main_mandate and country to Representative
If a Representative was fetched from a QuerySet that have been through
prefetch_for_representative_country_and_main_mandate(), then
add_representative_country_and_main_mandate(representative) adds the
``.country`` and ``.main_mandate`` properties "for free" - the prefetch
methods adds an extra query, but gets all.
"""
def prefetch_for_representative_country_and_main_mandate(self, queryset):
"""
Prefetch Mandates with their Group and Constituency with Country.
"""
mandates = Mandate.objects.order_by(
'-end_date').select_related('constituency__country', 'group')
return queryset.prefetch_related(
models.Prefetch('mandates', queryset=mandates))
def add_representative_country_and_main_mandate(self, representative):
"""
Set representative country and main_mandate.
Note that this will butcher your database if you don't use
self.prefetch_related.
"""
today = datetime.date.today()
representative.country = None
representative.main_mandate = None
for m in representative.mandates.all():
if m.constituency.country_id and not representative.country:
representative.country = m.constituency.country
if ((m.end_date is None or m.end_date > today) and
m.group.kind == 'group' and
not representative.main_mandate):
representative.main_mandate = m
if representative.country and representative.main_mandate:
break
return representative
class RepresentativeList(RepresentativeViewMixin, generic.ListView):
def get_context_data(self, **kwargs):
c = super(RepresentativeList, self).get_context_data(**kwargs)
c['object_list'] = [
self.add_representative_country_and_main_mandate(r)
for r in c['object_list']
]
return c
def search_filter(self, qs):
search = self.request.GET.get('search', None)
if search:
qs = qs.filter(slug__icontains=slugify(search))
return qs
def group_filter(self, qs):
group_kind = self.kwargs.get('group_kind', None)
chamber = self.kwargs.get('chamber', None)
group = self.kwargs.get('group', None)
today = datetime.date.today()
if group_kind and group:
if group.isnumeric():
group_qs = Group.objects.filter(
id=int(group)
)
else:
group_qs = Group.objects.filter(
name=group,
kind=group_kind
)
if chamber:
group_qs = group_qs.filter(chamber__name=chamber)
qs = qs.filter(
models.Q(mandates__end_date__gte=today) |
models.Q(mandates__end_date__isnull=True),
mandates__group__in=group_qs
)
return qs
def get_queryset(self):
qs = super(RepresentativeList, self).get_queryset()
qs = self.group_filter(qs)
qs = self.search_filter(qs)
qs = self.prefetch_for_representative_country_and_main_mandate(qs)
return qs
class RepresentativeDetail(RepresentativeViewMixin, generic.DetailView):
def get_queryset(self):
qs = super(RepresentativeDetail, self).get_queryset()
qs = self.prefetch_for_representative_country_and_main_mandate(qs)
return qs
def get_context_data(self, **kwargs):
c = super(RepresentativeDetail, self).get_context_data(**kwargs)
self.add_representative_country_and_main_mandate(c['object'])
c['votes'] = c['object'].votes.all()
c['mandates'] = c['object'].mandates.all()
c['positions'] = c['object'].positions.filter(
published=True).prefetch_related('tags')
return c
class GroupList(generic.ListView):
def get_queryset(self):
qs = Group.objects.filter(
models.Q(mandates__end_date__gte=datetime.date.today()) |
models.Q(mandates__end_date__isnull=True)
)
kind = self.kwargs.get('kind', None)
if kind:
qs = qs.filter(kind=kind).distinct()
return qs.select_related('chamber')
|
Python
| 0.000001
|
@@ -4618,9 +4618,43 @@
hamber')
+.order_by('chamber__name', 'name')
%0A
|
16d84c04b4f8d7861a9c75d2966eca8675d5cf58
|
fix line break problem
|
stacktester/tests/test_servers.py
|
stacktester/tests/test_servers.py
|
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import unittest2 as unittest
from stacktester import openstack
class ServersTest(unittest.TestCase):
@classmethod
def setUpClass(self):
self.os = openstack.Manager()
self.image_ref = self.os.config.env.image_ref
self.flavor_ref = self.os.config.env.flavor_ref
def test_create_delete_server(self):
"""
Verify that a server instance can be created and deleted
"""
post_body = json.dumps({
'server' : {
'name' : 'testserver',
'imageRef' : self.image_ref,
'flavorRef' : self.flavor_ref,
}
})
resp, body = self.os.nova.request(
'POST', '/servers', body=post_body)
data = json.loads(body)
server_id = data['server']['id']
# KNOWN-ISSUE lp796742
#self.assertEqual(202, resp.status) self.os.nova.wait_for_server_status(server_id, 'ACTIVE')
self.assertEqual('testserver', data['server']['name'])
response, body = self.os.nova.request(
'DELETE',
'/servers/%s' % server_id,
body=body)
# Raises TimeOutException on failure
self.os.nova.poll_request_status('GET', '/servers/%s' % server_id, 404)
def test_update_server_name(self):
"""
Verify the name of an instance can be changed
"""
post_body = json.dumps({
'server' : {
'name' : 'testserver',
'imageRef' : self.image_ref,
'flavorRef' : self.flavor_ref,
}
})
# Create Server
resp, body = self.os.nova.request(
'POST', '/servers', body=post_body)
# KNOWN-ISSUE lp796742
#self.assertEqual(202, resp.status)
data = json.loads(body)
self.assertTrue('testserver', data['server']['name'])
server_id = data['server']['id']
# Wait for it to be created
self.os.nova.wait_for_server_status(server_id, 'ACTIVE')
# Update name
put_body = json.dumps({
'server' : {
'name' : 'updatedtestserver'
}
})
resp, body = self.os.nova.request(
'PUT', '/servers/%s' % server_id, body=put_body)
self.assertEqual(204, resp.status)
# Get Server information
resp, body = self.os.nova.request('GET', '/servers/%s' % server_id)
self.assertEqual(200, resp.status)
data = json.loads(body)
self.assertEqual('updatedtestserver', data['server']['name'])
def test_create_server_invalid_image(self):
"""
Verify that creating a server with an unknown image ref will fail
"""
post_body = json.dumps({
'server' : {
'name' : 'testserver',
'imageRef' : -1,
'flavorRef' : self.flavor_ref,
}
})
resp, body = self.os.nova.request(
'POST', '/servers', body=post_body)
self.assertTrue(400, resp.status)
def test_create_server_invalid_flavor(self):
"""
Verify that creating a server with an unknown image ref will fail
"""
post_body = json.dumps({
'server' : {
'name' : 'testserver',
'imageRef' : self.image_ref,
'flavorRef' : -1,
}
})
resp, body = self.os.nova.request(
'POST', '/servers', body=post_body)
self.assertTrue(400, resp.status)
|
Python
| 0.000005
|
@@ -1518,16 +1518,24 @@
.status)
+%0A
self.os
|
7eef48e81bd36f95550399f052cae37e83657288
|
fix ids
|
databroker/tests/test_humantime_munging.py
|
databroker/tests/test_humantime_munging.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import time as ttime
import datetime
import pytz
import pytest
from databroker.core import _normalize_human_friendly_time
# ### Test metadatastore time formatting ######################################
def _make_time_params():
# should get tz from conf? but no other tests get conf stuff...
zone = pytz.timezone('US/Eastern')
good_test_values = [('2014', 1388552400.0),
('2014 ', 1388552400.0),
('2014-02', 1391230800.0),
('2014-02 ', 1391230800.0),
('2014-2', 1391230800.0),
('2014-2 ', 1391230800.0),
('2014-2-10', 1392008400.0),
('2014-2-10 ', 1392008400.0),
('2014-02-10', 1392008400.0),
('2014-02-10 ', 1392008400.0),
(' 2014-02-10 10 ', 1392044400.0),
('2014-02-10 10:1', 1392044460.0),
('2014-02-10 10:1 ', 1392044460.0),
('2014-02-10 10:1:00', 1392044460.0),
('2014-02-10 10:01:00', 1392044460.0),
# dst transistion tests
('2015-03-08 01:59:59', 1425797999.0), # is_dst==False
# at 2am, spring forward to 3am.
# [02:00:00 - 02:59:59] does not exist
('2015-03-08 03:00:00', 1425798000.0), # is_dst==True
('2015-11-01 00:59:59', 1446353999.0), # is_dst==True
# at 2am, fall back to 1am
# [01:00:00-01:59:59] is ambiguous without is_dst
('2015-11-01 02:00:00', 1446361200.0), # is_dst==False
# other
ttime.time(),
datetime.datetime.now(),
zone.localize(datetime.datetime.now()),
]
ids = [None] * (len(good_test_values) - 3) + ['time', 'now', 'tznow']
rets = []
for val in good_test_values:
rets.append([val, True, None])
bad_test_values = ['2015-03-08 02:00:00',
'2015-03-08 02:59:59']
for val in bad_test_values:
rets.append([val, False, pytz.NonExistentTimeError])
bad_test_values = ['2015-11-01 01:00:00',
'2015-11-01 01:59:59']
for val in bad_test_values:
rets.append([val, False, pytz.AmbiguousTimeError])
bad_test_values = ['2015-04-15 03:',
str(ttime.time()),
'aardvark',
]
for val in bad_test_values:
rets.append([val, False, ValueError])
return pytest.mark.parametrize('val,should_succeed,etype', rets, ids=ids)
@_make_time_params()
def test_normalize_human_friendly_time(val, should_succeed, etype):
if isinstance(val, tuple):
(val, check_output) = val
if should_succeed:
output = _normalize_human_friendly_time(val, 'US/Eastern')
assert(isinstance(output, float))
try:
assert output == check_output
except NameError:
pass
else:
with pytest.raises(etype):
_normalize_human_friendly_time(val, 'US/Eastern')
|
Python
| 0.000033
|
@@ -2424,32 +2424,73 @@
stentTimeError%5D)
+%0A ids += %5BNone%5D * len(bad_test_values)
%0A%0A bad_test_v
@@ -2653,24 +2653,65 @@
sTimeError%5D)
+%0A ids += %5BNone%5D * len(bad_test_values)
%0A%0A bad_te
@@ -2867,32 +2867,32 @@
ad_test_values:%0A
-
rets.app
@@ -2920,16 +2920,51 @@
eError%5D)
+%0A ids += %5BNone, 'curtime', None%5D
%0A%0A re
|
38b93a1c71aae112ec5a18234b1814b49d41066c
|
Add a test for operator / with an integer.
|
jpmesh/test/test_angle.py
|
jpmesh/test/test_angle.py
|
"""
Tests for jpmesh.angle.
"""
import unittest
from jpmesh.angle import Angle
class TestAngle(unittest.TestCase):
"""
Tests for jpmesh.angle.Angle.
"""
def test_properties(self):
"""
Test for properties.
"""
millisecond = 3600000
angle = Angle.from_millisecond(millisecond)
self.assertEqual(angle.degree, float(millisecond) / 60 / 60 / 1000)
self.assertEqual(angle.minute, float(millisecond) / 60 / 1000)
self.assertEqual(angle.second, float(millisecond) / 1000)
self.assertEqual(angle.millisecond, float(millisecond))
def test_operators(self):
"""
Test for operators.
"""
angle1 = Angle.from_degree(1.0)
angle2 = Angle.from_degree(2.0)
self.assertEqual(
(angle1 + angle2).degree, angle1.degree + angle2.degree)
self.assertEqual(
(angle1 - angle2).degree, angle1.degree - angle2.degree)
self.assertEqual((angle1 * 2).degree, angle1.degree * 2)
self.assertEqual((angle1 / 2.0).degree, angle1.degree / 2.0)
self.assertEqual((angle2 // 2).degree, angle2.degree // 2)
self.assertEqual((+angle1).degree, +angle1.degree)
self.assertEqual((-angle1).degree, -angle1.degree)
self.assertEqual(abs(angle1).degree, abs(angle1.degree))
|
Python
| 0.000002
|
@@ -1020,24 +1020,89 @@
degree * 2)%0A
+ self.assertEqual((angle1 / 2).degree, angle1.degree / 2)%0A
self
|
150a8a66211d9f914216cf5a69908a4090d92cfb
|
Argument should be str
|
lrrbot/commands/live.py
|
lrrbot/commands/live.py
|
import common.http
import lrrbot.decorators
from lrrbot.main import bot
from lrrbot import googlecalendar
from common import space
from common import twitch
from common import utils
from common.config import config
import asyncio
import json
import urllib.parse
import urllib.error
import irc.client
import sqlalchemy
@utils.cache(24 * 60 * 60)
@asyncio.coroutine
def extract_new_channels(loop, token):
data = yield from common.http.request_coro(googlecalendar.EVENTS_URL % urllib.parse.quote(googlecalendar.CALENDAR_FAN), {
"key": config["google_key"],
"maxResults": 25000,
})
data = json.loads(data)
channels = set()
for event in data["items"]:
if "location" in event:
for token in event["location"].split():
url = urllib.parse.urlparse(token)
if url.scheme == "":
url = urllib.parse.urlparse("https://" + token)
if url.netloc in {"www.twitch.tv", "twitch.tv"}:
try:
channel = url.path.split("/")[1].lower()
except IndexError:
continue
channels.add(channel)
follows = yield from twitch.get_follows_channels()
old_channels = {channel["channel"]["name"] for channel in follows}
old_channels.add(config["channel"])
futures = [twitch.follow_channel(channel, token) for channel in channels.difference(old_channels)]
yield from asyncio.gather(*futures, loop=loop, return_exceptions=True)
@bot.command("live")
@lrrbot.decorators.throttle()
@lrrbot.decorators.private_reply_when_live
@asyncio.coroutine
def live(lrrbot, conn, event, respond_to):
"""
Command: !live
Post the currenly live fanstreamers.
"""
users = lrrbot.metadata.tables["users"]
with lrrbot.engine.begin() as pg_conn:
token, = pg_conn.execute(sqlalchemy.select([users.c.twitch_oauth])
.where(users.c.name == config["username"])).first()
try:
yield from extract_new_channels(lrrbot.loop, token)
except urllib.error.HTTPError:
pass
streams = yield from twitch.get_streams_followed(token)
if streams == []:
return conn.privmsg(respond_to, "No fanstreamers currently live.")
streams.sort(key=lambda e: e["channel"]["display_name"])
tag = "Currently live fanstreamers: "
# Full message
message = tag + ", ".join([
"%s (%s%s)%s%s" % (
data["channel"]["display_name"],
data["channel"]["url"],
space.SPACE,
" is playing %s" % data["game"] if data.get("game") is not None else "",
" (%s)" % data["channel"]["status"] if data["channel"].get("status") not in [None, ""] else ""
) for data in streams
])
if len(message) <= 450:
return conn.privmsg(respond_to, message)
# Shorter message
message = tag + ", ".join([
"%s (%s%s)%s" % (
data["channel"]["display_name"],
data["channel"]["url"],
space.SPACE,
" is playing %s" % data["game"] if data.get("game") is not None else "",
) for data in streams
])
if len(message) <= 450:
return conn.privmsg(respond_to, message)
# Shortest message
message = tag + ", ".join([
"%s (%s%s)" % (
data["channel"]["display_name"],
data["channel"]["url"],
space.SPACE
) for data in streams
])
return conn.privmsg(respond_to, utils.shorten(message, 450))
@bot.command("live register")
@asyncio.coroutine
def register_self(lrrbot, conn, event, respond_to):
"""
Command: !live register
Register your channel as a fanstreamer channel.
"""
channel = irc.client.NickMask(event.source).nick.lower()
users = lrrbot.metadata.tables["users"]
with lrrbot.engine.begin() as pg_conn:
token, = pg_conn.execute(sqlalchemy.select([users.c.twitch_oauth])
.where(users.c.name == config["username"])).first()
yield from twitch.follow_channel(channel, token)
conn.privmsg(respond_to, "Channel '%s' added to the fanstreamer list." % channel)
@bot.command("live unregister")
@asyncio.coroutine
def unregister_self(lrrbot, conn, event, respond_to):
"""
Command: !live unregister
Unregister your channel as a fanstreamer channel.
"""
channel = irc.client.NickMask(event.source).nick.lower()
users = lrrbot.metadata.tables["users"]
with lrrbot.engine.begin() as pg_conn:
token, = pg_conn.execute(sqlalchemy.select([users.c.twitch_oauth])
.where(users.c.name == config["username"])).first()
yield from twitch.unfollow_channel(channel, token)
conn.privmsg(respond_to, "Channel '%s' removed from the fanstreamer list." % channel)
@bot.command("live register (.*)")
@lrrbot.decorators.mod_only
@asyncio.coroutine
def register(lrrbot, conn, event, respond_to, channel):
"""
Command: !live register CHANNEL
Register CHANNEL as a fanstreamer channel.
"""
users = lrrbot.metadata.tables["users"]
with lrrbot.engine.begin() as pg_conn:
token, = pg_conn.execute(sqlalchemy.select([users.c.twitch_oauth])
.where(users.c.name == config["username"])).first()
try:
yield from twitch.follow_channel(channel, token)
conn.privmsg(respond_to, "Channel '%s' added to the fanstreamer list." % channel)
except urllib.error.HTTPError:
conn.privmsg(respond_to, "'%s' isn't a Twitch channel." % channel)
@bot.command("live unregister (.*)")
@lrrbot.decorators.mod_only
@asyncio.coroutine
def unregister(lrrbot, conn, event, respond_to, channel):
"""
Command: !live unregister CHANNEL
Unregister CHANNEL as a fanstreamer channel.
"""
users = lrrbot.metadata.tables["users"]
with lrrbot.engine.begin() as pg_conn:
token, = pg_conn.execute(sqlalchemy.select([users.c.twitch_oauth])
.where(users.c.name == config["username"])).first()
try:
yield from twitch.unfollow_channel(channel, token)
conn.privmsg(respond_to, "Channel '%s' removed from the fanstreamer list." % channel)
except urllib.error.HTTPError:
conn.privmsg(respond_to, "'%s' isn't a Twitch channel." % channel)
|
Python
| 0.999984
|
@@ -571,13 +571,15 @@
s%22:
+%22
25000
+%22
,%0A%09%7D
|
af4c5a72afb80ff59662cc6992ce3084fed75dfe
|
Fix dedupe not preserving order
|
node/deduplicate.py
|
node/deduplicate.py
|
#!/usr/bin/env python
from nodes import Node
class Deduplicate(Node):
char = "}"
args = 1
results = 2
@Node.test_func([2], [4])
@Node.test_func([1.5], [3])
def double(self, inp: Node.number):
"""inp*2"""
self.results = 1
return inp*2
def func(self, seq:Node.indexable):
"""remove duplicates from seq"""
if isinstance(seq, str):
return "".join(set(seq))
return [type(seq)(set(seq))]
|
Python
| 0.000144
|
@@ -107,17 +107,17 @@
sults =
-2
+1
%0A %0A
@@ -240,33 +240,8 @@
%22%22%22%0A
- self.results = 1%0A
@@ -262,24 +262,110 @@
*2%0A %0A
+ @Node.test_func(%5B%5B1,2,3,1,1%5D%5D, %5B%5B1,2,3%5D%5D)%0A @Node.test_func(%5B%22hi!!!%22%5D, %5B%22hi!%22%5D)%0A
def func
@@ -429,24 +429,73 @@
from seq%22%22%22%0A
+ seen = set()%0A seen_add = seen.add%0A
if i
@@ -542,23 +542,63 @@
%22%22.join(
-set(seq
+x for x in seq if not (x in seen or seen_add(x)
))%0A
@@ -606,17 +606,16 @@
return
-
%5Btype(se
@@ -621,14 +621,56 @@
eq)(
-set(seq
+%5Bx for x in seq if not (x in seen or seen_add(x
))%5D
+)%5D
|
2c41bcc21f01be159be384fe5fa30c824dfb345c
|
Bump version to 14.0.0a7
|
resolwe_bio/__about__.py
|
resolwe_bio/__about__.py
|
"""Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe-bio'
__summary__ = 'Bioinformatics pipelines for the Resolwe platform'
__url__ = 'https://github.com/genialis/resolwe-bio'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '14.0.0a6'
__author__ = 'Genialis, Inc.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
"__title__", "__summary__", "__url__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
|
Python
| 0
|
@@ -498,9 +498,9 @@
0.0a
-6
+7
'%0A%0A_
|
4d2b384b7f91fac3b9a6aea7a5ef00de517314f9
|
Support for external test id (key = 'id')
|
nose_json/plugin.py
|
nose_json/plugin.py
|
"""
nose_json.plugin
~~~~~~~~~~~~~~~~
:copyright: 2012 DISQUS.
:license: BSD
"""
import codecs
import os
import simplejson
import traceback
from time import time
from nose.exc import SkipTest
from nose.plugins import Plugin
from nose.plugins.xunit import id_split, nice_classname, exc_message
class JsonReportPlugin(Plugin):
name = 'json'
score = 2000
encoding = 'UTF-8'
def _get_time_taken(self):
if hasattr(self, '_timer'):
taken = time() - self._timer
else:
# test died before it ran (probably error in setup())
# or success/failure added before test started probably
# due to custom TestResult munging
taken = 0.0
return taken
def options(self, parser, env):
Plugin.options(self, parser, env)
parser.add_option(
'--json-file', action='store',
dest='json_file', metavar="FILE",
default=env.get('NOSE_JSON_FILE', 'nosetests.json'),
help=("Path to json file to store the report in. "
"Default is nosetests.json in the working directory "
"[NOSE_JSON_FILE]"))
def configure(self, options, config):
Plugin.configure(self, options, config)
self.config = config
if not self.enabled:
return
self.stats = {
'errors': 0,
'failures': 0,
'passes': 0,
'skipped': 0,
}
self.results = []
report_output = options.json_file
path = os.path.realpath(os.path.dirname(report_output))
if not os.path.exists(path):
os.makedirs(path)
self.report_output = report_output
def report(self, stream):
self.stats['encoding'] = self.encoding
self.stats['total'] = (self.stats['errors'] + self.stats['failures']
+ self.stats['passes'] + self.stats['skipped'])
with codecs.open(
self.report_output, 'w', self.encoding, 'replace'
) as fp:
fp.write(simplejson.dumps({
'stats': self.stats,
'results': self.results,
}))
def startTest(self, test):
self._timer = time()
def addError(self, test, err, capt=None):
taken = self._get_time_taken()
if issubclass(err[0], SkipTest):
type = 'skipped'
self.stats['skipped'] += 1
else:
type = 'error'
self.stats['errors'] += 1
tb = ''.join(traceback.format_exception(*err))
id = test.id()
self.results.append({
'classname': ':'.join(id_split(id)[0].rsplit('.', 1)),
'name': id_split(id)[-1],
'time': taken,
'type': type,
'errtype': nice_classname(err[0]),
'message': exc_message(err),
'tb': tb,
})
def addFailure(self, test, err, capt=None, tb_info=None):
taken = self._get_time_taken()
tb = ''.join(traceback.format_exception(*err))
self.stats['failures'] += 1
id = test.id()
self.results.append({
'classname': ':'.join(id_split(id)[0].rsplit('.', 1)),
'name': id_split(id)[-1],
'time': taken,
'type': 'failure',
'errtype': nice_classname(err[0]),
'message': exc_message(err),
'tb': tb,
})
def addSuccess(self, test, capt=None):
taken = self._get_time_taken()
self.stats['passes'] += 1
id = test.id()
test_result = {
'classname': ':'.join(id_split(id)[0].rsplit('.', 1)),
'name': id_split(id)[-1],
'id': self.external_id(test),
'time': taken,
'type': 'success',
}
if hasattr(test.test.test, 'details'):
test_result.update({'details': test.test.test.details})
self.results.append(test_result)
|
Python
| 0.000002
|
@@ -2244,24 +2244,114 @@
r = time()%0A%0A
+ def external_id(self, test):%0A return getattr(test.test.test, 'test_id', None)%0A%0A
def addE
@@ -2376,32 +2376,32 @@
rr, capt=None):%0A
-
taken =
@@ -2817,32 +2817,74 @@
_split(id)%5B-1%5D,%0A
+ 'id': self.external_id(test),%0A
'tim
@@ -3346,32 +3346,32 @@
split('.', 1)),%0A
-
'nam
@@ -3384,32 +3384,74 @@
_split(id)%5B-1%5D,%0A
+ 'id': self.external_id(test),%0A
'tim
|
5b7f080c1e7616c7f9e63b285147aa9b58ee0bc7
|
Add arguments to __init__ - super to work on Python 2.7.X (#1796)
|
src/streamlink/plugins/btsports.py
|
src/streamlink/plugins/btsports.py
|
import re
import time
from uuid import uuid4
from streamlink.compat import quote
from streamlink.plugin import Plugin, PluginArguments, PluginArgument
from streamlink.plugin.api import http, useragents
from streamlink.stream import HLSStream
from streamlink.utils import url_equal
class BTSports(Plugin):
url_re = re.compile(r"https?://sport.bt.com")
arguments = PluginArguments(
PluginArgument(
"email",
requires=["password"],
metavar="EMAIL",
required=True,
help="The email associated with your BT Sport account, required to access any BT Sport stream."
),
PluginArgument(
"password",
sensitive=True,
metavar="PASSWORD",
help="Your BT Sport account password."
)
)
content_re = re.compile(r"CONTENT_(\w+)\s*=\s*'(\w+)'")
saml_re = re.compile(r'''name="SAMLResponse" value="(.*?)"''', re.M | re.DOTALL)
api_url = "https://be.avs.bt.com/AVS/besc"
saml_url = "https://samlfed.bt.com/sportgetfedwebhls"
login_url = "https://signin1.bt.com/siteminderagent/forms/login.fcc"
def __init__(self, url):
super().__init__(url)
http.headers = {"User-Agent": useragents.FIREFOX}
@classmethod
def can_handle_url(cls, url):
return cls.url_re.match(url) is not None
def login(self, username, password):
self.logger.debug("Logging in as {0}".format(username))
redirect_to = "https://home.bt.com/ss/Satellite/secure/loginforward?redirectURL={0}".format(quote(self.url))
data = {
"cookieExpp": "30",
"Switch": "yes",
"SMPostLoginUrl": "/appsyouraccount/secure/postlogin",
"loginforward": "https://home.bt.com/ss/Satellite/secure/loginforward",
"smauthreason": "0",
"TARGET": redirect_to,
"USER": username,
"PASSWORD": password}
res = http.post(self.login_url, data=data)
self.logger.debug("Redirected to: {0}".format(res.url))
if url_equal(res.url, self.url, ignore_scheme=True):
self.logger.debug("Login successful, getting SAML token")
res = http.get("https://samlfed.bt.com/sportgetfedwebhls?bt.cid={0}".format(self.acid()))
d = self.saml_re.search(res.text)
if d:
saml_data = d.group(1)
self.logger.debug("BT Sports federated login...")
res = http.post(self.api_url,
params={"action": "LoginBT", "channel": "WEBHLS", "bt.cid": self.acid},
data={"SAMLResponse": saml_data})
fed_json = http.json(res)
success = fed_json['resultCode'] == "OK"
if not success:
self.logger.error("Failed to login: {0} - {1}".format(fed_json['errorDescription'],
fed_json['message']))
return success
return False
def device_id(self):
device_id = self.cache.get("device_id") or str(uuid4())
self.cache.set("device_id", device_id)
return device_id
def acid(self):
acid = self.cache.get("acid") or "{cid}-B-{timestamp}".format(cid=self.device_id(), timestamp=int(time.time()))
self.cache.set("acid", acid)
return acid
def _get_cdn(self, channel_id, channel_type="LIVE"):
d = {"action": "GetCDN",
"type": channel_type,
"id": channel_id,
"channel": "WEBHLS",
"asJson": "Y",
"bt.cid": self.acid(),
"_": int(time.time())}
res = http.get(self.api_url, params=d, headers={"Accept": "application/json"})
return http.json(res)
def _get_streams(self):
if self.options.get("email") and self.options.get("password"):
if self.login(self.options.get("email"), self.options.get("password")):
self.logger.debug("Logged in and authenticated with BT Sports.")
res = http.get(self.url)
m = self.content_re.findall(res.text)
if m:
info = dict(m)
data = self._get_cdn(info.get("ID"), info.get("TYPE"))
if data['resultCode'] == 'OK':
return HLSStream.parse_variant_playlist(self.session, data['resultObj']['src'])
else:
self.logger.error("Failed to get stream with error: {0} - {1}".format(data['errorDescription'],
data['message']))
else:
self.logger.error("A username and password is required to use BT Sports")
__plugin__ = BTSports
|
Python
| 0
|
@@ -1183,16 +1183,30 @@
super(
+BTSports, self
).__init
|
4bd52a868deecc9d4a65066d8879a3a0b89a5a4d
|
query quant_estagio utilizando sql_where
|
src/lotes/queries/analise/quant_estagio.py
|
src/lotes/queries/analise/quant_estagio.py
|
from pprint import pprint
from utils.functions.models import rows_to_dict_list
def quant_estagio(
cursor, estagio=None, ref=None, tipo=None, cor=None, tam=None,
only=None, less=None, group=None, deposito=None):
def monta_filtro(in_, estagios):
filtro = ''
if estagios is not None:
lista_estagios = ''
sep = ''
for estagio in estagios:
lista_estagios += f'{sep}{str(estagio)}'
sep = ', '
filtro = (
f'AND l.CODIGO_ESTAGIO {in_} ({lista_estagios})')
return filtro
filtra_estagios = ' '.join([
monta_filtro('IN', only),
monta_filtro('NOT IN', less),
])
filtra_estagio = ''
if estagio is not None and estagio != '':
filtra_estagio = """--
AND l.CODIGO_ESTAGIO = {} """.format(estagio)
filtra_ref = ''
if ref is not None and ref != '':
if '%' in ref:
filtra_ref = """--
AND l.PROCONF_GRUPO LIKE '{}' """.format(ref)
else:
filtra_ref = """--
AND l.PROCONF_GRUPO = '{}' """.format(ref)
filtro_tam = ''
if tam is not None and tam != '':
filtro_tam = "AND l.PROCONF_SUBGRUPO = '{tam}'".format(tam=tam)
filtro_cor = ''
if cor is not None and cor != '':
filtro_cor = "AND l.PROCONF_ITEM = '{cor}'".format(cor=cor)
filtro_deposito = ''
if deposito is not None:
filtro_deposito = f"AND o.DEPOSITO_ENTRADA = {deposito}"
filtro_group = ''
if group is not None:
if group == 'o':
filtro_group = ", o.ORDEM_PRODUCAO"
elif group == 'op':
filtro_group = """--
, o.ORDEM_PRODUCAO
, o.PEDIDO_VENDA"""
filtro_tipo = ''
if tipo is not None:
if tipo == 'a':
filtro_tipo = "AND l.PROCONF_GRUPO < 'A0000'"
elif tipo == 'g':
filtro_tipo = "AND l.PROCONF_GRUPO like 'A%'"
elif tipo == 'b':
filtro_tipo = "AND l.PROCONF_GRUPO like 'B%'"
elif tipo == 'p':
filtro_tipo = \
"AND (l.PROCONF_GRUPO like 'A%' OR l.PROCONF_GRUPO like 'B%')"
elif tipo == 'v':
filtro_tipo = "AND l.PROCONF_GRUPO < 'C0000'"
elif tipo == 'm':
filtro_tipo = "AND l.PROCONF_GRUPO >= 'C0000'"
sql = f"""
SELECT
sum(
CASE WHEN (l.QTDE_DISPONIVEL_BAIXA + l.QTDE_CONSERTO) > 0
THEN 1
ELSE 0
END
) LOTES
, sum((l.QTDE_DISPONIVEL_BAIXA + l.QTDE_CONSERTO)) QUANT
{filtro_group} -- filtro_group
, l.PROCONF_NIVEL99 NIVEL
, l.PROCONF_GRUPO REF
, l.PROCONF_SUBGRUPO TAM
, l.PROCONF_ITEM COR
FROM PCPC_040 l
JOIN PCPC_020 o
ON o.ORDEM_PRODUCAO = l.ORDEM_PRODUCAO
LEFT JOIN BASI_220 t
ON t.TAMANHO_REF = l.PROCONF_SUBGRUPO
WHERE 1=1
AND o.SITUACAO in (4, 2) -- Ordens em produção, Ordem confec. gerada
{filtro_deposito} -- filtro_deposito
-- AND l.PERIODO_PRODUCAO = 1921
-- AND l.ORDEM_CONFECCAO = 01866
{filtra_estagio} -- filtra_estagio
{filtra_estagios} -- filtra_estagios
{filtra_ref} -- filtra_ref
{filtro_tipo} -- filtro_tipo
{filtro_tam} -- filtro_tam
{filtro_cor} -- filtro_cor
GROUP BY
l.PROCONF_NIVEL99
{filtro_group} -- filtro_group
, l.PROCONF_GRUPO
, t.ORDEM_TAMANHO
, l.PROCONF_SUBGRUPO
, l.PROCONF_ITEM
HAVING
sum((l.QTDE_DISPONIVEL_BAIXA + l.QTDE_CONSERTO)) > 0
ORDER BY
l.PROCONF_NIVEL99
{filtro_group} -- filtro_group
, l.PROCONF_GRUPO
, t.ORDEM_TAMANHO
, l.PROCONF_ITEM
"""
cursor.execute(sql)
return rows_to_dict_list(cursor)
|
Python
| 0.999053
|
@@ -73,16 +73,91 @@
ct_list%0A
+from utils.functions.queries import coalesce, sql_where, sql_where_none_if%0A
%0A%0Adef qu
@@ -821,770 +821,373 @@
o =
-''%0A if estagio is not None and estagio != '':%0A filtra_estagio = %22%22%22--%0A AND l.CODIGO_ESTAGIO = %7B%7D %22%22%22.format(
+sql_where_none_if('l.CODIGO_ESTAGIO',
estagio
+, ''
)%0A%0A
-filtra_ref = ''%0A if ref is not None and ref != '':
+ref = coalesce(ref, '')
%0A
- if '%25' in ref:%0A filtra_ref = %22%22%22--%0A AND l.PROCONF_GRUPO LIKE '%7B%7D' %22%22%22.format(ref)%0A else:%0A filtra_ref = %22%22%22--%0A AND l.PROCONF_GRUPO = '%7B%7D' %22%22%22.format(ref)%0A%0A filtro_tam = ''%0A if tam is not None and tam != '':%0A filtro_tam = %22AND l.PROCONF_SUBGRUPO = '%7Btam%7D'%22.format(tam=tam)%0A%0A filtro_cor = ''%0A if cor is not None and cor != '':%0A filtro_cor = %22AND l.PROCONF_ITEM = '%7Bcor%7D'%22.format(cor=cor)%0A%0A filtro_deposito = ''%0A if deposito is not None:%0A filtro_deposito = f%22AND
+filtra_ref = sql_where_none_if(%0A 'l.PROCONF_GRUPO', ref, '',%0A operation=%22LIKE%22 if '%25' in ref else %22=%22)%0A%0A filtro_tam = sql_where_none_if('l.PROCONF_SUBGRUPO', tam, '')%0A%0A filtro_cor = sql_where_none_if('l.PROCONF_ITEM', cor, '')%0A%0A filtro_deposito = sql_where('
o.DE
@@ -1204,20 +1204,19 @@
RADA
- = %7B
+',
deposito
%7D%22%0A%0A
@@ -1215,10 +1215,19 @@
sito
-%7D%22
+, quote='')
%0A%0A
|
05c319f868215f832e97577f5e158edf82fab074
|
Change version for next release
|
markdown/__version__.py
|
markdown/__version__.py
|
#
# markdown/__version__.py
#
# version_info should conform to PEP 386
# (major, minor, micro, alpha/beta/rc/final, #)
# (1, 1, 2, 'alpha', 0) => "1.1.2.dev"
# (1, 2, 0, 'beta', 2) => "1.2b2"
version_info = (2, 6, 0, 'zds', 7)
def _get_version():
" Returns a PEP 386-compliant version number from version_info. "
assert len(version_info) == 5
assert version_info[3] in ('alpha', 'beta', 'rc', 'final', 'zds')
parts = 2 if version_info[2] == 0 else 3
main = '.'.join(map(str, version_info[:parts]))
sub = ''
if version_info[3] == 'alpha' and version_info[4] == 0:
# TODO: maybe append some sort of git info here??
sub = '.dev'
elif version_info[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c', 'zds': 'post'}
sub = mapping[version_info[3]] + str(version_info[4])
return str(main + sub)
version = _get_version()
|
Python
| 0
|
@@ -221,9 +221,9 @@
s',
-7
+8
)%0A%0A%0A
|
2905d3ede8a34db95fc6a9321d77f60c59f90cc1
|
Suppress multiple long reports
|
stores/weathprov/providerutils.py
|
stores/weathprov/providerutils.py
|
import json
import config
import logsupport
from logsupport import ConsoleWarning, ConsoleDetail
WeathProvs = {}
TermShortener = {}
StillLong = {}
GenericShortener = {
'moderate': 'mdrt',
'thunder': 'thndr',
'patchy': 'pchy',
'chance': 'chc',
'freezing': 'frzing',
'light': 'lt',
'heavy': 'hvy',
'shower': 'shwr',
'showers': 'shwrs',
'drizzle': 'drzl',
'rain': 'rn',
'snow': 'snw',
'or': '/',
'with': 'w/',
'until': 'til',
'evening': 'evng',
'possible': 'psbl',
'morning': 'mrng',
'the': ''
}
def TryShorten(term):
global TermShortener, StillLong
maxlength = 12
newterm = term.replace(' throughout the day', '') # todo def a noise list also del trailing, leading spaces etc
newterm = newterm.replace('.', '')
if newterm in TermShortener:
return TermShortener[newterm]
elif len(newterm) > maxlength and newterm[0:4] != 'http':
phrase = newterm.split(' ')
chg = False
for i, word in enumerate(list(phrase)):
if word.lower() in GenericShortener:
chg = True
phrase[i] = GenericShortener[word.lower()]
if word[0].isupper(): phrase[i] = phrase[i].capitalize()
if chg:
newterm = ' '.join(phrase).replace(' /', '/').replace('/ ', '/').replace('.', '')
if len(newterm) > maxlength:
logsupport.Logs.Log("Long term: ", term, ' generically shortened to: ', newterm,
severity=ConsoleWarning)
StillLong[term] = newterm
else:
logsupport.Logs.Log("Long term: ", term, ' generically shortened to: ', newterm,
severity=ConsoleDetail)
else:
logsupport.Logs.Log("Long term: " + term, severity=ConsoleWarning)
TermShortener[term] = newterm # only report once
with open('{}/Console/termshortenlist.new'.format(config.sysStore.HomeDir), 'w') as f: # todo move to async?
json.dump(TermShortener, f, indent=4, separators=(',', ": "))
with open('{}/Console/problemterms.new'.format(config.sysStore.HomeDir), 'w') as f: # todo move to async?
json.dump(TermShortener, f, indent=4, separators=(',', ": "))
json.dump(StillLong, f, indent=4, separators=(',', ": "))
return newterm
# noinspection PyBroadException
def SetUpTermShortener():
global TermShortener
try:
with open('{}/Console/termshortenlist'.format(config.sysStore.HomeDir), 'r') as f:
# noinspection PyBroadException
TermShortener = json.load(f)
except:
TermShortener = {}
|
Python
| 0.999489
|
@@ -1229,16 +1229,42 @@
axlength
+ and term not in StillLong
:%0A%09%09%09%09lo
|
e333ac12bed75a1ee8c974107aa3ec8f2453c27d
|
Version bump to v0.5.0
|
kafka_influxdb/version.py
|
kafka_influxdb/version.py
|
__version__ = '0.4.1'
|
Python
| 0
|
@@ -14,9 +14,9 @@
'0.
-4.1
+5.0
'%0A
|
ab10f3d134065047a7260662d3c39295904795b8
|
Add fkey constraints at the same time
|
migration/versions/001_initial_migration.py
|
migration/versions/001_initial_migration.py
|
from __future__ import print_function
from getpass import getpass
import readline
import sys
from sqlalchemy import *
from migrate import *
from migrate.changeset.constraint import ForeignKeyConstraint
import annotateit
from annotateit import db
from annotateit.model import Consumer, User
meta = MetaData()
consumer = Table('consumer', meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('key', String),
Column('secret', String),
Column('ttl', Integer),
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('user_id', Integer),
)
user = Table('user', meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('username', String),
Column('email', String),
Column('password_hash', String),
Column('created_at', DateTime),
Column('updated_at', DateTime),
)
consumer_user_id_fkey = ForeignKeyConstraint([consumer.c.user_id], [user.c.id])
def upgrade(migrate_engine):
meta.bind = migrate_engine
consumer.create()
user.create()
consumer_user_id_fkey.create()
def downgrade(migrate_engine):
meta.bind = migrate_engine
consumer_user_id_fkey.create()
user.drop()
consumer.drop()
|
Python
| 0.000001
|
@@ -2,205 +2,49 @@
rom
-__future__ import print_function%0Afrom getpass import getpass%0Aimport readline%0Aimport sys%0A%0Afrom sqlalchemy import *%0Afrom migrate import *%0Afrom migrate.changeset.constraint import ForeignKeyConstraint
+sqlalchemy import *%0Afrom migrate import *
%0A%0Aim
@@ -431,16 +431,39 @@
Integer
+, ForeignKey('user.id')
),%0A)%0A%0Aus
@@ -725,89 +725,8 @@
%0A)%0A%0A
-consumer_user_id_fkey = ForeignKeyConstraint(%5Bconsumer.c.user_id%5D, %5Buser.c.id%5D)%0A%0A
def
@@ -773,32 +773,50 @@
migrate_engine%0A
+ user.create()%0A
consumer.cre
@@ -824,61 +824,8 @@
te()
-%0A user.create()%0A consumer_user_id_fkey.create()
%0A%0Ade
@@ -900,39 +900,8 @@
umer
-_user_id_fkey.create()%0A user
.dro
@@ -904,28 +904,24 @@
.drop()%0A
-consum
+us
er.drop()%0A
|
6ea40899c81a563f4e9a51b8c087f11f8557374b
|
Add continuous matching toggle check box
|
microphone_match_gui.py
|
microphone_match_gui.py
|
#!/usr/bin/python2
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
import sys
import os
from PyQt5.QtWidgets import (QApplication, QWidget, QPushButton, QVBoxLayout, QHBoxLayout, QLabel, QSizePolicy, QProgressBar)
from PyQt5.QtCore import (QCoreApplication, QThread, QBasicTimer)
import microphone_match
def main(argv):
app = QApplication(argv)
w = MainWindow()
sys.exit(app.exec_())
class RecorderMatcherThread(QThread):
def __init__(self, matcher):
super(self.__class__, self).__init__()
self.matcher = matcher
def __del__(self):
self.wait()
def run(self):
# database_file_path = QApplication.instance().arguments()[1] if len(QApplication.instance().arguments())>1 else os.path.join(os.path.dirname(os.path.abspath(__file__)),'fpdbase.pklz')
# microphone_match.recordAndMatch(database_file_path)
# self.recordButton.setText('Record')
self.result = self.matcher.recordAndMatch2()
class MainWindow(QWidget):
def __init__(self):
super(MainWindow,self).__init__()
self.initUI()
def initUI(self):
self.resize(400,50)
self.move(400,600)
self.setWindowTitle('Swing.azm')
self.continuousMatching = True
self.threadInterrupter = {'interrupted':False}
self.continuousMatcher = microphone_match.ContinuousMatcher(self.threadInterrupter)
self.matcherThread = RecorderMatcherThread(self.continuousMatcher)
self.matcherThread.finished.connect(self.recordingFinished)
self.recordButton = QPushButton('Record')
self.recordButton.resize(self.recordButton.sizeHint())
self.recordButton.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Preferred)
self.recordButton.clicked.connect(self.recordAndMatch)
self.resultLabel = QLabel('Ready')
self.progress = 0.0
self.progressBar = QProgressBar()
self.progressTimer = QBasicTimer()
self.recResHBox = QHBoxLayout()
self.recResHBox.addWidget(self.recordButton)
self.recResHBox.addWidget(self.resultLabel)
self.mainVBox = QVBoxLayout()
self.mainVBox.addLayout(self.recResHBox)
self.mainVBox.addWidget(self.progressBar)
self.mainVBox.addStretch(1)
self.setLayout(self.mainVBox)
self.show()
def interruptRecording(self):
self.threadInterrupter['interrupted'] = True
def recordAndMatch(self):
self.threadInterrupter['interrupted'] = False
self.recordButton.setText('Recording')
self.progress = 0.0
self.progressBar.setValue(0)
self.progressTimer.start(100,self)
self.matcherThread.start()
self.recordButton.clicked.disconnect()
self.recordButton.clicked.connect(self.interruptRecording)
def recordingFinished(self):
self.resultLabel.setText(self.matcherThread.result)
self.progressBar.setValue(100)
self.progress = 100.0
self.progressTimer.stop()
if(self.continuousMatching and not self.threadInterrupter['interrupted']):
self.recordAndMatch()
else:
self.recordButton.setText('Record')
self.recordButton.clicked.disconnect()
self.recordButton.clicked.connect(self.recordAndMatch)
def timerEvent(self, e):
if self.progress >= 100:
self.progressTimer.stop()
return
self.progress = self.progress + 1/3.0
self.progressBar.setValue(self.progress)
if __name__ == '__main__':
main(sys.argv)
|
Python
| 0
|
@@ -135,16 +135,24 @@
ication,
+%0A
QWidget
@@ -152,16 +152,24 @@
QWidget,
+%0A
QPushBu
@@ -173,16 +173,24 @@
hButton,
+%0A
QVBoxLa
@@ -194,16 +194,24 @@
xLayout,
+%0A
QHBoxLa
@@ -219,29 +219,72 @@
out,
- QLabel, QSizePolicy,
+%0A QLabel,%0A QSizePolicy,%0A QCheckBox,%0A
QPr
@@ -1910,24 +1910,269 @@
l('Ready')%0A%0A
+ self.continuousCheckBox = QCheckBox()%0A self.continuousCheckBox.setText('Continuous')%0A self.continuousCheckBox.setChecked(self.continuousMatching)%0A self.continuousCheckBox.stateChanged.connect(self.toggleContinuous)%0A%0A
self
@@ -2268,24 +2268,25 @@
asicTimer()%0A
+%0A
self
@@ -2502,24 +2502,81 @@
recResHBox)%0A
+ self.mainVBox.addWidget(self.continuousCheckBox)%0A
self
@@ -3844,16 +3844,16 @@
+ 1/3.0%0A
-
@@ -3894,16 +3894,187 @@
gress)%0A%0A
+ def toggleContinuous(self):%0A self.continuousMatching = self.continuousCheckBox.isChecked()%0A self.continuousCheckBox.setChecked(self.continuousMatching)%0A%0A
if __nam
|
728c06e2a4df11b03c9cd5c132fee003e0b2895b
|
Disable emailing
|
14B-088/HI/job_generator.py
|
14B-088/HI/job_generator.py
|
'''
Generate a pbs script for job submission, submit the job, be happy
'''
import glob
import os
import shutil
import time
def return_template(output_direc, ms_name, model_name, mask_name):
template = \
'''
#!/bin/bash
#PBS -S /bin/bash
#PBS -l pmem=2000m
#PBS -l feature=X5675
#PBS -l nodes=1:ppn=12
#PBS -l walltime=72:00:00
#PBS -m bea
#PBS -M koch.eric.w@gmail.com
#PBS -l epilogue=/home/ekoch/code_repos/simscript/epilogue.sh
source /home/ekoch/.bashrc
cd X1
echo "Starting at: `date`"
casa -c /home/ekoch/code_repos/VLA_Lband/14B-088/HI/HI_single_channel_clean.py X2 X3 X4
echo "Exited with code $? at: `date`"
'''
template = template.strip()
template = template.replace("X1", output_direc)
template = template.replace("X2", ms_name)
template = template.replace("X3", model_name)
template = template.replace("X4", mask_name)
return template
# Set the directory to look in.
ms_channel = "/home/ekoch/m33/14B-088/channel_ms/"
model_channels = "/home/ekoch/m33/14B-088/model_channels/M33_14B-088_HI_model_channel_"
mask_channels = "/home/ekoch/m33/14B-088/mask_channels/M33_14B-088_HI_mask_channel_"
output_direc = "/home/ekoch/m33/14B-088/single_channels/"
# Use mask and model? Disable when continuing to clean.
use_mask_model = True
while True:
channel_ms = glob.glob(os.path.join(ms_channel, "*channel*.ms"))
# If there aren't any more split ms in the path, break and exit
if len(channel_ms) == 0:
break
# Now loop through the existing channel ms
for chan in channel_ms:
chan_num = int(chan.split("_")[-1][:-3])
# adjust for numbering offset
mod_mask_num = chan_num - 670
channel_direc = os.path.join(output_direc, "channel_"+str(chan_num))
# Check if that channel has been imaged already
if os.path.isdir(channel_direc):
print("Already imaged "+str(chan_num)+". Skipping")
continue
os.mkdir(channel_direc)
shutil.move(chan, channel_direc)
shutil.move(model_channels+str(mod_mask_num)+".image", channel_direc)
shutil.move(mask_channels+str(mod_mask_num)+".image", channel_direc)
chan_ms = os.path.join(channel_direc, chan.split("/")[-1])
model_name = os.path.join(channel_direc,
"M33_14B-088_HI_model_channel_"
+ str(mod_mask_num) + ".image")
mask_name = os.path.join(channel_direc,
"M33_14B-088_HI_mask_channel_"
+ str(mod_mask_num) + ".image")
chan_template = return_template(channel_direc, chan_ms,
model_name, mask_name)
# Write to file
sub_file = os.path.join(channel_direc, "channel_"+str(chan_num)+".sub")
with open(sub_file, 'w') as f:
f.write(chan_template)
# Now submit!
old_direc = os.getcwd()
os.chdir(channel_direc) # Switch to directory so log files are there
os.system("qsub " + sub_file)
os.chdir(old_direc)
# Wait an hour, then check again for new channel ms
time.sleep(3600)
|
Python
| 0.000005
|
@@ -188,16 +188,82 @@
name):%0A%0A
+ # Emailing%0A #PBS -m bea%0A #PBS -M koch.eric.w@gmail.com%0A%0A
temp
@@ -408,50 +408,8 @@
:00%0A
-#PBS -m bea%0A#PBS -M koch.eric.w@gmail.com%0A
#PBS
|
4d101f6c17cbf63a3229ff90eb92c1e6b8ad9dd1
|
add missing versionadded
|
kivy/modules/touchring.py
|
kivy/modules/touchring.py
|
'''
Touchring
=========
Shows rings around every touch on the surface / screen. You can use this module
to check that you don't have any calibration issues with touches.
Configuration
-------------
:Parameters:
`image`: str, defaults to '<kivy>/data/images/ring.png'
Filename of the image to use.
`scale`: float, defaults to 1.
Scale of the image.
`alpha`: float, defaults to 1.
Opacity of the image.
`show_cursor`: boolean, default to False
`cursor_image`: str, defaults to 'atlas://data/images/defaulttheme/slider_cursor'
Image used to represent the cursor if displayed
.. versionadded:: 1.8.0
`cursor_size`: tuple, defaults to (None, None)
Apparent size of the mouse cursor, if displayed, default value
will keep its real size.
.. versionadded:: 1.8.0
`cursor_offset`: tuple, defaults to (None, None)
Offset of the texture image, default value , will align the
top-left corner of the image to the mouse pos.
.. versionadded:: 1.8.0
Example
-------
In your configuration (`~/.kivy/config.ini`), you can add something like
this::
[modules]
touchring = image=mypointer.png,scale=.3,alpha=.7
'''
__all__ = ('start', 'stop')
from kivy.core.image import Image
from kivy.graphics import Color, Rectangle
pointer_image = None
pointer_scale = 1.0
pointer_alpha = 0.7
cursor_image = ''
cursor_offset = (0, 0)
cursor_size = (None, None)
def _touch_down(win, touch):
ud = touch.ud
touch.scale_for_screen(win.width, win.height)
with win.canvas.after:
ud['tr.color'] = Color(1, 1, 1, pointer_alpha)
iw, ih = pointer_image.size
ud['tr.rect'] = Rectangle(
pos=(
touch.x - (pointer_image.width / 2. * pointer_scale),
touch.y - (pointer_image.height / 2. * pointer_scale)),
size=(iw * pointer_scale, ih * pointer_scale),
texture=pointer_image.texture)
if not ud.get('tr.grab', False):
ud['tr.grab'] = True
touch.grab(win)
def _touch_move(win, touch):
ud = touch.ud
ud['tr.rect'].pos = (
touch.x - (pointer_image.width / 2. * pointer_scale),
touch.y - (pointer_image.height / 2. * pointer_scale))
def _touch_up(win, touch):
if touch.grab_current is win:
ud = touch.ud
win.canvas.after.remove(ud['tr.color'])
win.canvas.after.remove(ud['tr.rect'])
if ud.get('tr.grab') is True:
touch.ungrab(win)
ud['tr.grab'] = False
def _mouse_move(win, pos, *args):
global cursor_size
if hasattr(win, '_cursor'):
c = win._cursor
else:
with win.canvas.after:
img = Image(cursor_image)
Color(1, 1, 1, 1, mode='rgba')
size = (
cursor_size[0] or img.texture.size[0],
cursor_size[1] or img.texture.size[1]
)
print(size)
win._cursor = c = Rectangle(texture=img.texture,
size=size)
c.pos = pos[0] + cursor_offset[0], pos[1] - c.size[1] + cursor_offset[1]
def start(win, ctx):
# XXX use ctx !
global pointer_image, pointer_scale, pointer_alpha, cursor_size,\
cursor_image, cursor_offset
pointer_fn = ctx.config.get('image',
'atlas://data/images/defaulttheme/ring')
pointer_scale = float(ctx.config.get('scale', 1.0))
pointer_alpha = float(ctx.config.get('alpha', 1.0))
pointer_image = Image(pointer_fn)
cursor_image = ctx.config.get(
'cursor_image',
'atlas://data/images/defaulttheme/slider_cursor')
cursor_size = ctx.config.get('cursor_size', (None, None))
if isinstance(cursor_size, str):
cursor_size = [int(x) for x in cursor_size.split('x')]
cursor_offset = ctx.config.get('cursor_offset', (0, 0))
if isinstance(cursor_offset, str):
cursor_offset = [int(x) for x in cursor_offset.split('x')]
win.bind(on_touch_down=_touch_down,
on_touch_move=_touch_move,
on_touch_up=_touch_up)
if ctx.config.get('show_cursor', False):
print('adding binding for mouse move')
win.bind(mouse_pos=_mouse_move)
def stop(win, ctx):
win.unbind(on_touch_down=_touch_down,
on_touch_move=_touch_move,
on_touch_up=_touch_up,
on_mouse_pos=_mouse_move)
|
Python
| 0.000003
|
@@ -478,16 +478,48 @@
o False%0A
+ .. versionadded:: 1.8.0%0A
%60cur
|
7ae0ae587ff38d8168a6e18515265bcc7170192c
|
Fix typo
|
laniakea/core/userdata.py
|
laniakea/core/userdata.py
|
# coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""Utility class for UserData scripts."""
import re
import os
import logging
from laniakea.core.common import String
logger = logging.getLogger("laniakea")
class UserDataException(Exception):
"""Exception class for Packet Manager."""
def __init(self, message):
super().__init__(message)
class UserData:
"""Utility functions for dealing with UserData scripts.
"""
@staticmethod
def convert_pair_to_dict(arg):
"""Utility function which transform k=v strings from the command-line into a dict.
"""
return dict(kv.split('=', 1) for kv in arg)
@staticmethod
def parse_only_criterias(conditions):
result = {}
for kv in conditions: # pylint: disable=invalid-name
k, v = kv.split('=', 1) # pylint: disable=invalid-name
if "," in v:
result[k] = v.split(',', 1)
else:
result[k] = [v]
return result
@staticmethod
def convert_str_to_int(arg):
"""
"""
for k, v in list(arg.items()): # pylint: disable=invalid-name
try:
arg[String(k)] = int(v)
except ValueError:
pass
return arg
@staticmethod
def list_tags(userdata):
"""List all used macros within a UserData script.
:param userdata: The UserData script.
:type userdata: str
"""
macros = re.findall('@(.*?)@', userdata)
logging.info('List of available macros:')
for macro in macros:
logging.info('\t%r', macro)
@staticmethod
def handle_tags(userdata, macros):
"""Insert macro values or auto export variables in UserData scripts.
:param userdata: The UserData script.
:type userdata: str
:param macros: UserData macros as key value pair.
:type macros: dict
:return: UserData script with the macros replaced with their values.
:rrtpe: str
"""
macro_vars = re.findall('@(.*?)@', userdata)
for macro_var in macro_vars:
if macro_var == '!all_macros_export':
macro_var_export_list = []
for defined_macro in macros:
macro_var_export_list.append('export %s="%s"' % (defined_macro, macros[defined_macro]))
macro_var_exports = "\n".join(macro_var_export_list)
userdata = userdata.replace('@%s@' % macro_var, macro_var_exports)
elif macro_var == "!all_macros_docker":
macro_var_export_list = []
for defined_macro in macros:
macro_var_export_list.append("-e '%s=%s'" % (defined_macro, macros[defined_macro]))
macro_var_exports = " ".join(macro_var_export_list)
userdata = userdata.replace('@%s@' % macro_var, macro_var_exports)
else:
if "|" in macro_var:
macro_var, default_value = macro_var.split('|')
if macro_var not in macros:
logging.warning('Using default variable value %s for @%s@ ', default_value, macro_var)
value = default_value
else:
value = macros[macro_var]
userdata = userdata.replace('@%s|%s@' % (macro_var, default_value), value)
else:
if macro_var not in macros:
logging.error('Undefined variable @%s@ in UserData script', macro_var)
return None
userdata = userdata.replace('@%s@' % macro_var, macros[macro_var])
return userdata
@staticmethod
def handle_import_tags(userdata, import_root):
"""Handle @import(filepath)@ tags in a UserData script.
:param import_root: Location for imports.
:type import_root: str
:param userdata: UserData script content.
:type userdata: str
:return: UserData script with the contents of the imported files.
:rtype: str
"""
imports = re.findall('@import\((.*?)\)@', userdata) # pylint: disable=anomalous-backslash-in-string
if not imports:
return userdata
for filepath in imports:
logger.info('Processing "import" of %s', filepath)
import_path = os.path.join(import_root, filepath)
try:
with open(import_path) as fo:
content = fo.read()
userdata = userdata.replace('@import(%s)@' % filepath, content)
except FileNotFoundError:
raise UserDataException('Import path {} not found.'.format(import_path))
return userdata
|
Python
| 0.999999
|
@@ -2177,18 +2177,18 @@
:r
-r
t
+y
pe: str%0A
|
d8e7bc454a3dea91f48905d32adeb9b1e8f11c5e
|
Add check_download_path to configuration and changes the get method
|
migmig/configuration.py
|
migmig/configuration.py
|
# Configuration module
#
## tmp note: you should add logger module and modify this module
#
from ConfigParser import SafeConfigParser
import os
program_name = "migmig"
# config file short path (hard coded !)
cfg_short_path = "~/." + program_name + ".ini"
server_address = "localhost"
server_port = "50001"
class Configuration():
def __init__(self, logger):
# constant variables
self.OK = 99
self.BAD_URL = 98
self.BLAHBLAH = 97
self.logger = logger
self.parser = SafeConfigParser()
self.cfg_path = self.validate_path(cfg_short_path)
if not self.cfg_path:
# create config file and initate configurations
self.cfg_path = os.path.expanduser(cfg_short_path)
self.initate()
self.parser.read(self.cfg_path)
def initate(self):
# Sections
self.parser.add_section("Setting")
self.parser.add_section("Client")
# Options
self.parser.set("Setting", "default_download_path", os.path.expanduser("~/Downloads/" + program_name))
self.parser.set("Setting", "default_merge_path", os.path.expanduser("~/Downloads/" + program_name + "/merged"))
self.parser.set("Setting", "max_connections", "6")
self.parser.set("Setting", "number_of_tries", "3")
self.parser.set("Setting", "verbose_level", "1")
self.parser.set('Setting', 'server_address', server_address)
self.parser.set('Setting', 'server_port', server_port)
self.parser.set('Client', 'identifier', 'None')
self.parser.set('Client', 'URL', 'None')
self.parser.set('Client', 'client_id', 'None')
# write settings to ini file
self.write()
def get(self, value):
'''
This method only returns values from "Client" section.
'''
try:
return self.parser.get('Client', value)
except:
return None
def set(self, **kwargs):
'''
Like get method, this only works for "Client" section.
'''
try:
for key, value in kwargs.items():
self.parser.set('Client', key, value)
self.write()
return True
except:
# log the python error. how?
return False
def write(self):
'''
Write (synchronize) the parser object to .ini file !
'''
# TO-DO : ...
with open(self.cfg_path, "wb") as tmp:
self.parser.write(tmp)
def get_server(self):
addr = self.parser.get('Setting','server_address')
port = self.parser.get('Setting','server_port')
return "http://" + addr + ':' + port
def default_download_path(self, new_path):
new_path = self.validate_path(new_path)
if not new_path:
self.parser.set("Setting", "default_download_path", new_path)
def validate_path(self, path):
'''
Check whether path exists or not
'''
path = os.path.expanduser(path)
if os.path.exists(path):
return path
return None
|
Python
| 0
|
@@ -734,16 +734,46 @@
_path)%0A%0A
+%09%09self.check_download_path()%0A%0A
%0A%09def in
@@ -910,32 +910,24 @@
%22Setting%22, %22
-default_
download_pat
@@ -1581,20 +1581,19 @@
t(self,
-valu
+nam
e):%0A%09%09''
@@ -1613,66 +1613,251 @@
hod
-only returns values from %22Client%22 section.%0A%09%09'''
+looks for given name in all secion,%0A%09%09%09it returns the first value that is matched.%0A%09%09'''%0A%09%09for section in self.parser.sections():%0A%09%09%09for item, val in self.parser.items(section):%0A%09%09%09%09if name == item:%0A%09%09%09%09%09return val%0A%09%09return None%0A
%0A%09%09
+#
try:%0A%09%09
+#
%09ret
@@ -1895,16 +1895,18 @@
alue)%0A%09%09
+#
except:%0A
@@ -1903,24 +1903,26 @@
# except:%0A%09%09
+#
%09return None
@@ -2542,23 +2542,21 @@
%0A%0A%0A%09def
-default
+check
_downloa
@@ -2570,61 +2570,121 @@
self
-, new_path):%0A%09%09new_path = self.validate_path(new
+):%0A%09%09'''%0A%09%09%09If download_path does not exist, create !%0A%09%09'''%0A%09%09d_path = self.parser.get('Setting', 'download
_path
+'
)%0A%09%09
@@ -2694,82 +2694,189 @@
not
-new
+os.path.exists(d
_path
+)
:%0A%09%09%09
-self.parser.set(%22Setting%22, %22default_download_path%22, new_path)
+try:%0A%09%09%09%09os.makedirs(d_path)%0A%09%09%09except:%0A%09%09%09%09# TO-D0: log%0A%09%09%09%09# maybe permission denied ?%0A%09%09%09%09print 'path cant be created !'%0A%09%09%09%09return False%0A%09%09return True
%0A%0A%09%0A
|
3b8365f8716add070490e7aca46b728eb8cbc43b
|
Remove square braces from IPv6 addresses (#561)
|
telethon/extensions/tcp_client.py
|
telethon/extensions/tcp_client.py
|
"""
This module holds a rough implementation of the C# TCP client.
"""
import errno
import socket
import time
from datetime import timedelta
from io import BytesIO, BufferedWriter
from threading import Lock
MAX_TIMEOUT = 15 # in seconds
CONN_RESET_ERRNOS = {
errno.EBADF, errno.ENOTSOCK, errno.ENETUNREACH,
errno.EINVAL, errno.ENOTCONN
}
class TcpClient:
"""A simple TCP client to ease the work with sockets and proxies."""
def __init__(self, proxy=None, timeout=timedelta(seconds=5)):
"""
Initializes the TCP client.
:param proxy: the proxy to be used, if any.
:param timeout: the timeout for connect, read and write operations.
"""
self.proxy = proxy
self._socket = None
self._closing_lock = Lock()
if isinstance(timeout, timedelta):
self.timeout = timeout.seconds
elif isinstance(timeout, (int, float)):
self.timeout = float(timeout)
else:
raise TypeError('Invalid timeout type: {}'.format(type(timeout)))
def _recreate_socket(self, mode):
if self.proxy is None:
self._socket = socket.socket(mode, socket.SOCK_STREAM)
else:
import socks
self._socket = socks.socksocket(mode, socket.SOCK_STREAM)
if type(self.proxy) is dict:
self._socket.set_proxy(**self.proxy)
else: # tuple, list, etc.
self._socket.set_proxy(*self.proxy)
self._socket.settimeout(self.timeout)
def connect(self, ip, port):
"""
Tries connecting forever to IP:port unless an OSError is raised.
:param ip: the IP to connect to.
:param port: the port to connect to.
"""
if ':' in ip: # IPv6
# The address needs to be surrounded by [] as discussed on PR#425
if not ip.startswith('['):
ip = '[' + ip
if not ip.endswith(']'):
ip = ip + ']'
mode, address = socket.AF_INET6, (ip, port, 0, 0)
else:
mode, address = socket.AF_INET, (ip, port)
timeout = 1
while True:
try:
while not self._socket:
self._recreate_socket(mode)
self._socket.connect(address)
break # Successful connection, stop retrying to connect
except OSError as e:
# There are some errors that we know how to handle, and
# the loop will allow us to retry
if e.errno in (errno.EBADF, errno.ENOTSOCK, errno.EINVAL,
errno.ECONNREFUSED):
# Bad file descriptor, i.e. socket was closed, set it
# to none to recreate it on the next iteration
self._socket = None
time.sleep(timeout)
timeout = min(timeout * 2, MAX_TIMEOUT)
else:
raise
def _get_connected(self):
"""Determines whether the client is connected or not."""
return self._socket is not None and self._socket.fileno() >= 0
connected = property(fget=_get_connected)
def close(self):
"""Closes the connection."""
if self._closing_lock.locked():
# Already closing, no need to close again (avoid None.close())
return
with self._closing_lock:
try:
if self._socket is not None:
self._socket.shutdown(socket.SHUT_RDWR)
self._socket.close()
except OSError:
pass # Ignore ENOTCONN, EBADF, and any other error when closing
finally:
self._socket = None
def write(self, data):
"""
Writes (sends) the specified bytes to the connected peer.
:param data: the data to send.
"""
if self._socket is None:
self._raise_connection_reset()
# TODO Timeout may be an issue when sending the data, Changed in v3.5:
# The socket timeout is now the maximum total duration to send all data.
try:
self._socket.sendall(data)
except socket.timeout as e:
raise TimeoutError() from e
except ConnectionError:
self._raise_connection_reset()
except OSError as e:
if e.errno in CONN_RESET_ERRNOS:
self._raise_connection_reset()
else:
raise
def read(self, size):
"""
Reads (receives) a whole block of size bytes from the connected peer.
:param size: the size of the block to be read.
:return: the read data with len(data) == size.
"""
if self._socket is None:
self._raise_connection_reset()
# TODO Remove the timeout from this method, always use previous one
with BufferedWriter(BytesIO(), buffer_size=size) as buffer:
bytes_left = size
while bytes_left != 0:
try:
partial = self._socket.recv(bytes_left)
except socket.timeout as e:
raise TimeoutError() from e
except ConnectionError:
self._raise_connection_reset()
except OSError as e:
if e.errno in CONN_RESET_ERRNOS:
self._raise_connection_reset()
else:
raise
if len(partial) == 0:
self._raise_connection_reset()
buffer.write(partial)
bytes_left -= len(partial)
# If everything went fine, return the read bytes
buffer.flush()
return buffer.raw.getvalue()
def _raise_connection_reset(self):
"""Disconnects the client and raises ConnectionResetError."""
self.close() # Connection reset -> flag as socket closed
raise ConnectionResetError('The server has closed the connection.')
|
Python
| 0.000001
|
@@ -1791,210 +1791,49 @@
-# The address needs to be surrounded by %5B%5D as discussed on PR#425%0A if not ip.startswith('%5B'):%0A ip = '%5B' + ip%0A if not ip.endswith('%5D'):%0A ip = ip + '%5D'%0A
+ip = ip.replace('%5B', '').replace('%5D', '')
%0A
|
03aec1decc66fcb259edb8abac114a5fac17d7ab
|
Fix "NotFound" error in _clear_stacks()
|
tempest/api/orchestration/base.py
|
tempest/api/orchestration/base.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os.path
import yaml
from tempest import clients
from tempest.common.utils import data_utils
from tempest import config
from tempest import exceptions
from tempest.openstack.common import log as logging
import tempest.test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class BaseOrchestrationTest(tempest.test.BaseTestCase):
"""Base test case class for all Orchestration API tests."""
@classmethod
def setUpClass(cls):
super(BaseOrchestrationTest, cls).setUpClass()
cls.os = clients.Manager()
if not CONF.service_available.heat:
raise cls.skipException("Heat support is required")
cls.build_timeout = CONF.orchestration.build_timeout
cls.build_interval = CONF.orchestration.build_interval
cls.orchestration_client = cls.os.orchestration_client
cls.client = cls.orchestration_client
cls.servers_client = cls.os.servers_client
cls.keypairs_client = cls.os.keypairs_client
cls.network_client = cls.os.network_client
cls.volumes_client = cls.os.volumes_client
cls.images_v2_client = cls.os.image_client_v2
cls.stacks = []
cls.keypairs = []
cls.images = []
@classmethod
def _get_default_network(cls):
__, networks = cls.network_client.list_networks()
for net in networks['networks']:
if net['name'] == CONF.compute.fixed_network_name:
return net
@classmethod
def _get_identity_admin_client(cls):
"""Returns an instance of the Identity Admin API client."""
manager = clients.AdminManager(interface=cls._interface)
admin_client = manager.identity_client
return admin_client
@classmethod
def create_stack(cls, stack_name, template_data, parameters={},
environment=None, files=None):
resp, body = cls.client.create_stack(
stack_name,
template=template_data,
parameters=parameters,
environment=environment,
files=files)
stack_id = resp['location'].split('/')[-1]
stack_identifier = '%s/%s' % (stack_name, stack_id)
cls.stacks.append(stack_identifier)
return stack_identifier
@classmethod
def _clear_stacks(cls):
for stack_identifier in cls.stacks:
try:
cls.client.delete_stack(stack_identifier)
except exceptions.NotFound:
pass
for stack_identifier in cls.stacks:
cls.client.wait_for_stack_status(
stack_identifier, 'DELETE_COMPLETE')
@classmethod
def _create_keypair(cls, name_start='keypair-heat-'):
kp_name = data_utils.rand_name(name_start)
__, body = cls.keypairs_client.create_keypair(kp_name)
cls.keypairs.append(kp_name)
return body
@classmethod
def _clear_keypairs(cls):
for kp_name in cls.keypairs:
try:
cls.keypairs_client.delete_keypair(kp_name)
except Exception:
pass
@classmethod
def _create_image(cls, name_start='image-heat-', container_format='bare',
disk_format='iso'):
image_name = data_utils.rand_name(name_start)
__, body = cls.images_v2_client.create_image(image_name,
container_format,
disk_format)
image_id = body['id']
cls.images.append(image_id)
return body
@classmethod
def _clear_images(cls):
for image_id in cls.images:
try:
cls.images_v2_client.delete_image(image_id)
except exceptions.NotFound:
pass
@classmethod
def read_template(cls, name, ext='yaml'):
loc = ["stacks", "templates", "%s.%s" % (name, ext)]
fullpath = os.path.join(os.path.dirname(__file__), *loc)
with open(fullpath, "r") as f:
content = f.read()
return content
@classmethod
def load_template(cls, name, ext='yaml'):
loc = ["stacks", "templates", "%s.%s" % (name, ext)]
fullpath = os.path.join(os.path.dirname(__file__), *loc)
with open(fullpath, "r") as f:
return yaml.safe_load(f)
@classmethod
def tearDownClass(cls):
cls._clear_stacks()
cls._clear_keypairs()
cls._clear_images()
super(BaseOrchestrationTest, cls).tearDownClass()
@staticmethod
def stack_output(stack, output_key):
"""Return a stack output value for a given key."""
return next((o['output_value'] for o in stack['outputs']
if o['output_key'] == output_key), None)
def assert_fields_in_dict(self, obj, *fields):
for field in fields:
self.assertIn(field, obj)
def list_resources(self, stack_identifier):
"""Get a dict mapping of resource names to types."""
resp, resources = self.client.list_resources(stack_identifier)
self.assertEqual('200', resp['status'])
self.assertIsInstance(resources, list)
for res in resources:
self.assert_fields_in_dict(res, 'logical_resource_id',
'resource_type', 'resource_status',
'updated_time')
return dict((r['resource_name'], r['resource_type'])
for r in resources)
def get_stack_output(self, stack_identifier, output_key):
resp, body = self.client.get_stack(stack_identifier)
self.assertEqual('200', resp['status'])
return self.stack_output(body, output_key)
|
Python
| 0.000001
|
@@ -3082,32 +3082,53 @@
in cls.stacks:%0A
+ try:%0A
cls.
@@ -3165,32 +3165,36 @@
+
+
stack_identifier
@@ -3213,16 +3213,77 @@
MPLETE')
+%0A except exceptions.NotFound:%0A pass
%0A%0A @c
|
a3eea95b2f7bbd870651657c1a8b41b19304f546
|
version 4.0.10
|
misura/canon/version.py
|
misura/canon/version.py
|
__version__ = '4.0.9'
|
Python
| 0.000001
|
@@ -16,7 +16,8 @@
4.0.
-9
+10
'%0A
|
218e7f2686eea1b55a28d041fc4df0118c2ca911
|
Update ping_extension.py
|
third-party-synthetic/active-gate-extensions/extension-third-party-ping/src/ping_extension.py
|
third-party-synthetic/active-gate-extensions/extension-third-party-ping/src/ping_extension.py
|
from datetime import datetime
import logging
from ruxit.api.base_plugin import RemoteBasePlugin
from dynatrace import Dynatrace
from dynatrace.synthetic_third_party import SYNTHETIC_EVENT_TYPE_OUTAGE
import pingparsing
log = logging.getLogger(__name__)
class PingExtension(RemoteBasePlugin):
def initialize(self, **kwargs):
# The Dynatrace API client
self.dt_client = Dynatrace(
self.config.get("api_url"), self.config.get("api_token"), log=log, proxies=self.build_proxy_url()
)
self.executions = 0
def build_proxy_url(self):
proxy_address = self.config.get("proxy_address")
proxy_username = self.config.get("proxy_username")
proxy_password = self.config.get("proxy_password")
if proxy_address:
protocol, address = proxy_address.split("://")
proxy_url = f"{protocol}://"
if proxy_username:
proxy_url += proxy_username
if proxy_password:
proxy_url += f":{proxy_password}"
proxy_url += f"@{address}"
return {"https": proxy_url}
return {}
def query(self, **kwargs) -> None:
log.setLevel(self.config.get("log_level"))
target = self.config.get("test_target")
step_title = f"{target}"
test_title = self.config.get("test_name") if self.config.get("test_name") else step_title
location = self.config.get("test_location", "") if self.config.get("test_location") else "ActiveGate"
location_id = location.replace(" ", "_").lower()
frequency = int(self.config.get("frequency")) if self.config.get("frequency") else 15
if self.executions % frequency == 0:
ping_result = ping(target)
log.info(ping_result.as_dict())
success = ping_result.packet_loss_rate is not None and ping_result.packet_loss_rate == 0
response_time = ping_result.rtt_avg or 0
self.dt_client.report_simple_thirdparty_synthetic_test(
engine_name="Ping",
timestamp=datetime.now(),
location_id=location_id,
location_name=location,
test_id=self.activation.entity_id,
test_title=test_title,
step_title=step_title,
schedule_interval=frequency * 60,
success=success,
response_time=response_time,
edit_link=f"#settings/customextension;id={self.plugin_info.name}",
icon_url="https://raw.githubusercontent.com/Dynatrace/dynatrace-api/master/third-party-synthetic/active-gate-extensions/extension-third-party-ping/ping.png",
)
self.dt_client.report_simple_thirdparty_synthetic_test_event(
test_id=self.activation.entity_id,
name=f"Ping failed for {step_title}",
location_id=location_id,
timestamp=datetime.now(),
state="open" if not success else "resolved",
event_type=SYNTHETIC_EVENT_TYPE_OUTAGE,
reason=f"Ping failed for {step_title}. Result: {str(ping_result.as_dict())}",
engine_name="Ping",
)
self.executions += 1
def ping(host: str) -> pingparsing.PingStats:
ping_parser = pingparsing.PingParsing()
transmitter = pingparsing.PingTransmitter()
transmitter.destination = host
transmitter.count = 2
transmitter.timeout = 2000
return ping_parser.parse(transmitter.ping())
|
Python
| 0
|
@@ -546,16 +546,50 @@
ons = 0%0A
+ self.failureDetected = 0%0A%0A
%0A def
@@ -1306,24 +1306,76 @@
t_target%22)%0A%0A
+ failureCount = self.config%5B%22failureCount%22%5D%0A%0A
step
@@ -1984,16 +1984,355 @@
ate == 0
+%0A%0A if not success:%0A self.failureDetected += 1%0A if self.failureDetected %3C failureCount and self.failureDetected %3C self.executions:%0A log.info(%22Overriding state%22)%0A success = True%0A else:%0A self.failureDetected = 0%0A
%0A
@@ -2397,32 +2397,59 @@
self.dt_client.
+third_part_synthetic_tests.
report_simple_th
@@ -3160,16 +3160,28 @@
)%0A
+
%0A
@@ -3200,16 +3200,43 @@
_client.
+third_part_synthetic_tests.
report_s
@@ -3715,32 +3715,49 @@
,%0A )%0A
+ %0A
self.exe
@@ -3771,16 +3771,17 @@
+= 1%0A%0A%0A
+%0A
def ping
|
1421866ac3c4e4f1f09d17019d058aa903597df5
|
Add new feature: find out is current week menu created already
|
modules/menus_reader.py
|
modules/menus_reader.py
|
# -*- coding: utf-8 -*-
from json_reader import *
from config import *
def get_menus_data():
old_data = read_json_from_file(filenames["menus"])
if old_data == None or type(old_data) is not dict: # rewrite old_data and create new recipe dictionary
# initialize new dict
old_data = {}
old_data["menus"] = {}
elif "menus" not in old_data and type(old_data) is dict: # save other data (maybe worthless)
# add new row: recipes
old_data["menus"] = {}
return old_data
def get_menus():
data = get_menus_data()
return data["menus"]
def get_menu(index): #get recipe with spesific index
pass
#return get_menus()[index]
|
Python
| 0.000001
|
@@ -593,15 +593,8 @@
ex%0A%09
-pass%0A%09#
retu
@@ -614,8 +614,82 @@
)%5Bindex%5D
+%0A%0Adef is_week_menu_created(week):%0A%09return week in get_menus()%09# True/False
|
0aae53e23a1a0f63499b8ec8dde5c3a1ca93f187
|
Stop continually warning about a missing API key.
|
modules/sfp_emailrep.py
|
modules/sfp_emailrep.py
|
# -*- coding: utf-8 -*-
# -------------------------------------------------------------------------------
# Name: sfp_emailrep
# Purpose: Searches EmailRep.io for email address reputation.
#
# Author: <bcoles[at]gmail[.]com>
#
# Created: 2019-08-07
# Copyright: (c) bcoles 2019
# Licence: GPL
# -------------------------------------------------------------------------------
import json
import time
from spiderfoot import SpiderFootEvent, SpiderFootPlugin
class sfp_emailrep(SpiderFootPlugin):
meta = {
'name': "EmailRep",
'summary': "Search EmailRep.io for email address reputation.",
'flags': ["apikey"],
'useCases': ["Footprint", "Investigate", "Passive"],
'categories': ["Search Engines"],
'dataSource': {
'website': "https://emailrep.io/",
'model': "FREE_AUTH_LIMITED",
'references': [
"https://docs.emailrep.io/"
],
'apiKeyInstructions': [
"Visit https://emailrep.io/free",
"Request a free API Key",
"The API key will be sent to registered email account on approval"
],
'favIcon': "https://emailrep.io/assets/img/favicon.png",
'logo': "https://emailrep.io/assets/img/logo-light.png",
'description': "Illuminate the \"reputation\" behind an email address.\n"
"EmailRep uses hundreds of factors like domain age, traffic rankings, "
"presence on social media sites, professional networking sites, personal connections, "
"public records, deliverability, data breaches, dark web credential leaks, "
"phishing emails, threat actor emails, and more to answer these types of questions.",
}
}
opts = {
'api_key': '',
}
optdescs = {
'api_key': 'EmailRep API key.',
}
results = None
errorState = False
def setup(self, sfc, userOpts=dict()):
self.sf = sfc
self.results = self.tempStorage()
self.errorState = False
for opt in list(userOpts.keys()):
self.opts[opt] = userOpts[opt]
def watchedEvents(self):
return ['EMAILADDR']
def producedEvents(self):
return ['RAW_RIR_DATA', 'EMAILADDR_COMPROMISED', 'MALICIOUS_EMAILADDR']
# https://emailrep.io/docs/
def query(self, qry):
headers = {
'Accept': "application/json"
}
if self.opts['api_key'] != '':
headers['Key'] = self.opts['api_key']
res = self.sf.fetchUrl(
'https://emailrep.io/' + qry,
headers=headers,
useragent='SpiderFoot',
timeout=self.opts['_fetchtimeout']
)
# Documentation does not indicate rate limit threshold (50 queries/day)
time.sleep(1)
if res['content'] is None:
return None
if res['code'] == '400':
self.error('API error: Bad request')
self.errorState = True
return None
if res['code'] == '401':
self.error('API error: Invalid API key')
self.errorState = True
return None
if res['code'] == '429':
self.error('API error: Too Many Requests')
self.errorState = True
return None
if res['code'] != '200':
self.error('Unexpected reply from EmailRep.io: ' + res['code'])
self.errorState = True
return None
try:
return json.loads(res['content'])
except Exception as e:
self.debug(f"Error processing JSON response: {e}")
return None
def handleEvent(self, event):
eventName = event.eventType
srcModuleName = event.module
eventData = event.data
if self.errorState:
return
if eventData in self.results:
return
self.results[eventData] = True
self.debug(f"Received event, {eventName}, from {srcModuleName}")
if self.opts['api_key'] == '':
self.error("Warning: You enabled sfp_emailrep but did not set an API key! Queries will be rate limited.")
res = self.query(eventData)
if res is None:
return
details = res.get('details')
if not details:
return
credentials_leaked = details.get('credentials_leaked')
if credentials_leaked:
evt = SpiderFootEvent('EMAILADDR_COMPROMISED', eventData + " [Unknown]", self.__name__, event)
self.notifyListeners(evt)
malicious_activity = details.get('malicious_activity')
if malicious_activity:
evt = SpiderFootEvent('MALICIOUS_EMAILADDR', 'EmailRep [' + eventData + ']', self.__name__, event)
self.notifyListeners(evt)
if malicious_activity or credentials_leaked:
evt = SpiderFootEvent('RAW_RIR_DATA', str(res), self.__name__, event)
self.notifyListeners(evt)
# End of sfp_emailrep class
|
Python
| 0
|
@@ -1946,24 +1946,48 @@
tate = False
+%0A errorWarned = False
%0A%0A def se
@@ -4124,24 +4124,49 @@
_key'%5D == ''
+ and not self.errorWarned
:%0A
@@ -4272,16 +4272,52 @@
mited.%22)
+%0A self.errorWarned = True
%0A%0A
|
8fe2a8735b11a834895d8a71fffa11349f890847
|
Fix broken test.
|
test/cypher/cypher_record_test.py
|
test/cypher/cypher_record_test.py
|
#/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from py2neo.cypher.core import RecordProducer
def test_record_field_access(graph):
statement = "CREATE (a {name:'Alice',age:33}) RETURN a,a.name as name,a.age as age"
for record in graph.cypher.stream(statement):
alice = record[0]
assert record[1] == alice.properties["name"]
assert record[2] == alice.properties["age"]
assert record["name"] == alice.properties["name"]
assert record["age"] == alice.properties["age"]
assert record.name == alice.properties["name"]
assert record.age == alice.properties["age"]
def test_record_representation(graph):
statement = "CREATE (a {name:'Alice',age:33}) RETURN a,a.name,a.age"
for record in graph.cypher.stream(statement):
alice_id = record[0]._id
assert repr(record) == ("Record(columns=('a', 'a.name', 'a.age'), "
"values=((n%s {age:33,name:\"Alice\"}), 'Alice', 33))" % alice_id)
def test_producer_representation():
producer = RecordProducer(["apple", "banana", "carrot"])
assert repr(producer) == "RecordProducer(columns=('apple', 'banana', 'carrot'))"
def test_producer_length():
producer = RecordProducer(["apple", "banana", "carrot"])
assert len(producer) == 3
def test_producer_column_indexes():
producer = RecordProducer(["apple", "banana", "carrot"])
assert producer.column_indexes == {"apple": 0, "banana": 1, "carrot": 2}
|
Python
| 0.000007
|
@@ -622,16 +622,28 @@
ense.%0A%0A%0A
+import sys%0A%0A
from py2
@@ -1407,16 +1407,57 @@
%5B0%5D._id%0A
+ if sys.version_info %3E= (3,):%0A
@@ -1556,16 +1556,20 @@
+
%22values=
@@ -1600,16 +1600,295 @@
ce%5C%22%7D),
+%22%0A %22'Alice', 33))%22 %25 alice_id)%0A else:%0A assert repr(record) == (%22Record(columns=(u'a', u'a.name', u'a.age'), %22%0A %22values=((n%25s %7Bage:33,name:%5C%22Alice%5C%22%7D), %22%0A %22u
'Alice',
|
fe1d7b70302f56a43bef3c0d400db829510d93da
|
Replace the old tokenizer tests with the refactored attributes.
|
test/test_parser/test_tokenize.py
|
test/test_parser/test_tokenize.py
|
from io import StringIO
from token import NEWLINE, STRING
from jedi._compatibility import u
from jedi import parser
from ..helpers import unittest
class TokenTest(unittest.TestCase):
def test_end_pos_one_line(self):
parsed = parser.Parser(parser.load_grammar(), u('''
def testit():
a = "huhu"
'''))
tok = parsed.module.subscopes[0].statements[0].children[2]
self.assertEqual(tok.end_pos, (3, 14))
def test_end_pos_multi_line(self):
parsed = parser.Parser(parser.load_grammar(), u('''
def testit():
a = """huhu
asdfasdf""" + "h"
'''))
tok = parsed.module.subscopes[0].statements[0].children[2].children[0]
self.assertEqual(tok.end_pos, (4, 11))
def test_simple_no_whitespace(self):
# Test a simple one line string, no preceding whitespace
simple_docstring = '"""simple one line docstring"""'
simple_docstring_io = StringIO(simple_docstring)
tokens = parser.tokenize.generate_tokens(simple_docstring_io.readline)
token_list = list(tokens)
string_token = token_list[0]
self.assertEqual(string_token._preceding_whitespace, '')
self.assertEqual(string_token.string, '"""simple one line docstring"""')
def test_simple_with_whitespace(self):
# Test a simple one line string with preceding whitespace and newline
simple_docstring = ' """simple one line docstring""" \r\n'
simple_docstring_io = StringIO(simple_docstring)
tokens = parser.tokenize.generate_tokens(simple_docstring_io.readline)
token_list = list(tokens)
string_token = token_list[0]
self.assertEqual(string_token._preceding_whitespace, ' ')
self.assertEqual(string_token.string, '"""simple one line docstring"""')
self.assertEqual(string_token.type, STRING)
newline_token = token_list[1]
self.assertEqual(newline_token._preceding_whitespace, ' ')
self.assertEqual(newline_token.type, NEWLINE)
def test_function_whitespace(self):
# Test function definition whitespace identification
fundef = '''def test_whitespace(*args, **kwargs):
x = 1
if x > 0:
print(True)
'''
fundef_io = StringIO(fundef)
tokens = parser.tokenize.generate_tokens(fundef_io.readline)
token_list = list(tokens)
print(token_list)
for t in token_list:
if t.string == 'test_whitespace':
self.assertEqual(t._preceding_whitespace, ' ')
if t.string == '(':
self.assertEqual(t._preceding_whitespace, '')
if t.string == '*':
self.assertEqual(t._preceding_whitespace, '')
if t.string == '**':
self.assertEqual(t._preceding_whitespace, ' ')
if t.string == 'print':
self.assertEqual(t._preceding_whitespace, ' ')
if t.string == 'if':
self.assertEqual(t._preceding_whitespace, ' ')
def test_tokenizer_with_string_literal_backslash():
import jedi
c = jedi.Script("statement = u'foo\\\n'; statement").goto_definitions()
assert c[0]._name.parent.obj == 'foo'
|
Python
| 0
|
@@ -1119,37 +1119,22 @@
g_token.
-_
pre
-ceding_whitespace
+fix
, '')%0A
@@ -1161,38 +1161,37 @@
al(string_token.
-string
+value
, '%22%22%22simple one
@@ -1646,37 +1646,22 @@
g_token.
-_
pre
-ceding_whitespace
+fix
, ' ')%0A
@@ -1698,22 +1698,21 @@
g_token.
-string
+value
, '%22%22%22si
@@ -1869,37 +1869,22 @@
e_token.
-_
pre
-ceding_whitespace
+fix
, ' ')%0A
@@ -2352,30 +2352,29 @@
if t.
-string
+value
== 'test_wh
@@ -2419,37 +2419,22 @@
Equal(t.
-_
pre
-ceding_whitespace
+fix
, ' ')%0A
@@ -2445,30 +2445,29 @@
if t.
-string
+value
== '(':%0A
@@ -2498,37 +2498,22 @@
Equal(t.
-_
pre
-ceding_whitespace
+fix
, '')%0A
@@ -2523,30 +2523,29 @@
if t.
-string
+value
== '*':%0A
@@ -2576,37 +2576,22 @@
Equal(t.
-_
pre
-ceding_whitespace
+fix
, '')%0A
@@ -2601,30 +2601,29 @@
if t.
-string
+value
== '**':%0A
@@ -2655,37 +2655,22 @@
Equal(t.
-_
pre
-ceding_whitespace
+fix
, ' ')%0A
@@ -2681,30 +2681,29 @@
if t.
-string
+value
== 'print':
@@ -2738,37 +2738,22 @@
Equal(t.
-_
pre
-ceding_whitespace
+fix
, '
@@ -2775,22 +2775,21 @@
if t.
-string
+value
== 'if'
@@ -2829,29 +2829,14 @@
l(t.
-_
pre
-ceding_whitespace
+fix
, '
|
c4c2b7936a89ffbf97d63e4b5d33ea59201c94b0
|
Add tests for mine.send
|
tests/integration/modules/mine.py
|
tests/integration/modules/mine.py
|
'''
Test the salt mine system
'''
import integration
class MineTest(integration.ModuleCase):
'''
Test the mine system
'''
def test_get(self):
'''
test mine.get
'''
self.assertTrue(self.run_function('mine.update'))
self.assertTrue(self.run_function('mine.get', ['minion', 'test.ping']))
|
Python
| 0
|
@@ -185,16 +185,32 @@
mine.get
+ and mine.update
%0A
@@ -352,8 +352,322 @@
ing'%5D))%0A
+%0A def test_send(self):%0A '''%0A test mine.send%0A '''%0A self.assertFalse(self.run_function('mine.send', %5B'foo.__spam_and_cheese'%5D))%0A self.assertTrue(self.run_function('mine.send', %5B'test.retcode'%5D))%0A self.assertTrue(self.run_function('mine.get', %5B'minion', 'test.retcode'%5D))%0A
|
e7697caf374b60d69cdd0d089900ac7b300b02fa
|
boolean evals of expressions return strings
|
tests/python_tests/filter_test.py
|
tests/python_tests/filter_test.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from nose.tools import *
from utilities import Todo
import mapnik2
if hasattr(mapnik2,'Expression'):
mapnik2.Filter = mapnik2.Expression
map_ = '''<Map>
<Style name="s">
<Rule>
<Filter><![CDATA[(([region]>=0) and ([region]<=50))]]></Filter>
</Rule>
<Rule>
<Filter><![CDATA[([region]>=0) and ([region]<=50)]]></Filter>
</Rule>
<Rule>
<Filter>
<![CDATA[
([region] >= 0)
and
([region] <= 50)
]]>
</Filter>
</Rule>
<Rule>
<Filter>([region]>=0) and ([region]<=50)</Filter>
</Rule>
<Rule>
<Filter>
([region] >= 0)
and
([region] <= 50)
</Filter>
</Rule>
</Style>
<Style name="s2" filter-mode="first">
<Rule>
</Rule>
<Rule>
</Rule>
</Style>
</Map>'''
def test_filter_init():
m = mapnik2.Map(1,1)
mapnik2.load_map_from_string(m,map_)
filters = []
filters.append(mapnik2.Filter("([region]>=0) and ([region]<=50)"))
filters.append(mapnik2.Filter("(([region]>=0) and ([region]<=50))"))
filters.append(mapnik2.Filter("((([region]>=0) and ([region]<=50)))"))
filters.append(mapnik2.Filter('((([region]>=0) and ([region]<=50)))'))
filters.append(mapnik2.Filter('''((([region]>=0) and ([region]<=50)))'''))
filters.append(mapnik2.Filter('''
((([region]>=0)
and
([region]<=50)))
'''))
filters.append(mapnik2.Filter('''
([region]>=0)
and
([region]<=50)
'''))
filters.append(mapnik2.Filter('''
([region]
>=
0)
and
([region]
<=
50)
'''))
s = m.find_style('s')
for r in s.rules:
filters.append(r.filter)
first = filters[0]
for f in filters:
eq_(str(first),str(f))
s = m.find_style('s2')
eq_(s.filter_mode,mapnik2.filter_mode.FIRST)
def test_regex_match():
f = mapnik2.Feature(0)
f["name"] = 'test'
expr = mapnik2.Expression("[name].match('test')")
eq_(expr.evaluate(f),1) # 1 == True
def test_unicode_regex_match():
f = mapnik2.Feature(0)
f["name"] = 'Québec'
expr = mapnik2.Expression("[name].match('Québec')")
eq_(expr.evaluate(f),1) # 1 == True
def test_regex_replace():
f = mapnik2.Feature(0)
f["name"] = 'test'
expr = mapnik2.Expression("[name].replace('(\B)|( )','$1 ')")
eq_(expr.evaluate(f),'t e s t')
def test_unicode_regex_replace():
f = mapnik2.Feature(0)
f["name"] = 'Québec'
expr = mapnik2.Expression("[name].replace('(\B)|( )','$1 ')")
eq_(expr.evaluate(f),'Q u é b e c')
|
Python
| 0.999997
|
@@ -2237,33 +2237,35 @@
xpr.evaluate(f),
-1
+'1'
) # 1 == True%0A%0Ad
@@ -2428,17 +2428,19 @@
uate(f),
-1
+'1'
) # 1 ==
|
08d1873e39531a8cb453fab91e0bb1c95a236cd2
|
Update Copyright
|
tests/test_cli_write_hierarchy.py
|
tests/test_cli_write_hierarchy.py
|
#!/usr/bin/env python
"""Test that hierarchy below specified GO terms is printed."""
from __future__ import print_function
__copyright__ = "Copyright (c) 2017-2018, DV Klopfenstein. Haiboa Tang. All rights reserved."
from goatools.cli.wr_hierarchy import WrHierCli
# --o Output file in ASCII text format
# --no_indent Do not indent GO terms
# --max_indent max depth for printing relative to GO Term
# --num_child Print count of total number of children for each GO
# --concise If a branch has already been printed, do not re-print.
# Print '===' instead of dashes to note the point of compression
def test_cli():
"""Add and remove markers for a file."""
# pylint: disable=bad-whitespace
args_exp = [
# args exp_set expected_dict
# -------- ------- ---------------------
([], {'dag':'go-basic.obo', 'dash_len':6}),
(['--dag=go-basic.obo'], {'dag':'go-basic.obo', 'dash_len':6}),
(['-o rpt.txt'], {'dag':'go-basic.obo', 'dash_len':6, 'o':'rpt.txt'}),
(['--max_indent=7'], {'dag':'go-basic.obo', 'dash_len':6, 'max_indent':7}),
(['--concise'], {'dag':'go-basic.obo', 'dash_len':6, 'concise':True}),
(['--no_indent'], {'dag':'go-basic.obo', 'dash_len':6, 'no_indent':True}),
(['--concise', '--no_indent'], {'dag':'go-basic.obo', 'dash_len':6,
'concise':True, 'no_indent':True}),
]
for args, exp_dict in args_exp:
print("ARGS={ARGS}".format(ARGS=args))
print("EXP={EXP}".format(EXP=exp_dict))
obj = WrHierCli(args)
print("DCT: {DCT}".format(DCT=obj.kws))
print("WWWWWWWWWWWWWWWWWWW WrHierCli", obj.kws)
assert obj.kws == exp_dict, "DCT: ACT({}) != EXP({})".format(obj.kws, exp_dict)
print("")
if __name__ == '__main__':
test_cli()
# Copyright (c) 2017-2018, DV Klopfenstein, Haibao Tang. All rights reserved.
|
Python
| 0
|
@@ -149,33 +149,33 @@
ght (c) 2017-201
-8
+9
, DV Klopfenstei
@@ -1990,9 +1990,9 @@
-201
-8
+9
, DV
|
43392fd2dd7d8775ef65a0d0767d7e9a3e170f6d
|
Fix test in Python 2.6
|
tests/transport/threaded/tests.py
|
tests/transport/threaded/tests.py
|
import mock
import os
import time
from tempfile import mkstemp
from raven.utils.testutils import TestCase
from raven.base import Client
from raven.transport.threaded import ThreadedHTTPTransport
from raven.utils.urlparse import urlparse
class DummyThreadedScheme(ThreadedHTTPTransport):
def __init__(self, *args, **kwargs):
super(ThreadedHTTPTransport, self).__init__(*args, **kwargs)
self.events = []
self.send_delay = 0
def send_sync(self, data, headers, success_cb, failure_cb):
# delay sending the message, to allow us to test that the shutdown
# hook waits correctly
time.sleep(self.send_delay)
self.events.append((data, headers, success_cb, failure_cb))
class LoggingThreadedScheme(ThreadedHTTPTransport):
def __init__(self, filename, *args, **kwargs):
super(LoggingThreadedScheme, self).__init__(*args, **kwargs)
self.filename = filename
def send_sync(self, data, headers, success_cb, failure_cb):
with open(self.filename, 'a') as log:
log.write("{} {}\n".format(os.getpid(), data['message']))
class ThreadedTransportTest(TestCase):
def setUp(self):
self.url = "threaded+http://some_username:some_password@localhost:8143/1"
self.client = Client(dsn=self.url)
@mock.patch('raven.transport.http.HTTPTransport.send')
def test_does_send(self, send):
self.client.captureMessage(message='foo')
time.sleep(0.1)
# TODO: This test could be more precise by ensuring it's sending the same params that are sent
# to the ThreadedHTTPTransport.send() method
self.assertEqual(send.call_count, 1)
def test_shutdown_waits_for_send(self):
url = urlparse(self.url)
transport = DummyThreadedScheme(url)
transport.send_delay = 0.5
data = self.client.build_msg('raven.events.Message', message='foo')
transport.async_send(data, None, None, None)
time.sleep(0.1)
# this should wait for the message to get sent
transport.get_worker().main_thread_terminated()
self.assertEqual(len(transport.events), 1)
def test_fork_with_active_worker(self):
# Test threaded transport when forking with an active worker.
# Forking a process doesn't clone the worker thread - make sure
# logging from both processes still works.
event1 = self.client.build_msg('raven.events.Message', message='parent')
event2 = self.client.build_msg('raven.events.Message', message='child')
url = urlparse(self.url)
_, filename = mkstemp()
try:
transport = LoggingThreadedScheme(filename, url)
# Log from the parent process - starts the worker thread
transport.async_send(event1, None, None, None)
childpid = os.fork()
if childpid == 0:
# Log from the child process
transport.async_send(event2, None, None, None)
time.sleep(0.1)
os._exit(0)
# Wait for the child process to finish
os.waitpid(childpid, 0)
self.assertTrue(os.path.isfile(filename))
with open(filename, 'r') as logfile:
events = dict(x.strip().split() for x in logfile.readlines())
# Check parent and child both logged successfully
self.assertEqual(events, {str(os.getpid()): 'parent',
str(childpid): 'child'})
finally:
os.remove(filename)
|
Python
| 0.002774
|
@@ -1069,11 +1069,13 @@
e(%22%7B
+0
%7D %7B
+1
%7D%5Cn%22
@@ -2590,17 +2590,18 @@
-_
+fd
, filena
@@ -2628,16 +2628,41 @@
try:%0A
+ os.close(fd)%0A
|
4d38d318f4d4fc909c3ca0ea33530cf3f1d66991
|
Implement error handling test for listing upgrades
|
tests/unit/modules/zypper_test.py
|
tests/unit/modules/zypper_test.py
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
'''
# Import Python Libs
from __future__ import absolute_import
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.mock import (
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
import os
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
def get_test_data(filename):
'''
Return static test data
'''
return open(os.path.join(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'zypp'), filename)).read()
# Import Salt Libs
from salt.modules import zypper
# Globals
zypper.__salt__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class ZypperTestCase(TestCase):
'''
Test cases for salt.modules.zypper
'''
def test_list_products(self):
'''
List products test.
'''
ref_out = get_test_data('zypper-products.xml')
with patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=ref_out)}):
products = zypper.list_products()
assert(len(products) == 5)
assert([u'SLES', u'SLES', u'SUSE-Manager-Proxy',
u'SUSE-Manager-Server',
u'sle-manager-tools-beta'] == sorted([prod['name'] for prod in products]))
assert('SUSE LLC <https://www.suse.com/>' in [product['vendor'] for product in products])
assert([False, False, False, False, True] ==
sorted([product['isbase'] for product in products]))
assert([False, False, False, False, True] ==
sorted([product['installed'] for product in products]))
assert([u'0', u'0', u'0', u'0', u'0'] ==
sorted([product['release'] for product in products]))
assert([False, False, False, False, u'sles'] ==
sorted([product['productline'] for product in products]))
assert([1509408000, 1522454400, 1522454400, 1730332800, 1730332800] ==
sorted([product['eol_t'] for product in products]))
def test_refresh_db(self):
'''
Test if refresh DB handled correctly
'''
ref_out = [
"Repository 'openSUSE-Leap-42.1-LATEST' is up to date.",
"Repository 'openSUSE-Leap-42.1-Update' is up to date.",
"Retrieving repository 'openSUSE-Leap-42.1-Update-Non-Oss' metadata",
"Forcing building of repository cache",
"Building repository 'openSUSE-Leap-42.1-Update-Non-Oss' cache ..........[done]",
"Building repository 'salt-dev' cache",
"All repositories have been refreshed."
]
run_out = {
'stderr': '', 'stdout': '\n'.join(ref_out), 'retcode': 0
}
with patch.dict(zypper.__salt__, {'cmd.run_all': MagicMock(return_value=run_out)}):
result = zypper.refresh_db()
self.assertEqual(result.get("openSUSE-Leap-42.1-LATEST"), False)
self.assertEqual(result.get("openSUSE-Leap-42.1-Update"), False)
self.assertEqual(result.get("openSUSE-Leap-42.1-Update-Non-Oss"), True)
if __name__ == '__main__':
from integration import run_tests
run_tests(ZypperTestCase, needs_daemon=False)
|
Python
| 0
|
@@ -297,16 +297,67 @@
EASON%0A)%0A
+from salt.exceptions import CommandExecutionError%0A%0A
import o
@@ -833,16 +833,998 @@
'''%0A%0A
+ def test_list_upgrades_error_handling(self):%0A '''%0A Test error handling in the list package upgrades.%0A :return:%0A '''%0A # Test handled errors%0A ref_out = %7B%0A 'stderr': 'Some handled zypper internal error',%0A 'retcode': 1%0A %7D%0A with patch.dict(zypper.__salt__, %7B'cmd.run_all': MagicMock(return_value=ref_out)%7D):%0A try:%0A zypper.list_upgrades(refresh=False)%0A except CommandExecutionError as error:%0A assert (error.message == ref_out%5B'stderr'%5D)%0A%0A # Test unhandled error%0A ref_out = %7B%0A 'retcode': 1%0A %7D%0A with patch.dict(zypper.__salt__, %7B'cmd.run_all': MagicMock(return_value=ref_out)%7D):%0A try:%0A zypper.list_upgrades(refresh=False)%0A except CommandExecutionError as error:%0A assert (error.message == 'Zypper returned non-zero system exit. See Zypper logs for more details.')%0A%0A%0A
def
|
de93238fb16173f9d5cca3ef67106758e8039a97
|
Fix test
|
thinc/tests/backends/test_lstm.py
|
thinc/tests/backends/test_lstm.py
|
from thinc.backends.jax_ops import lstm_weights_forward, backprop_lstm_weights
from thinc.backends.jax_ops import lstm_gates_forward, backprop_lstm_gates
import numpy.testing
from hypothesis import given, settings
from ..strategies import ndarrays_of_shape
try:
import jax
except ImportError:
has_jax = False
MAX_EXAMPLES = 20
nL = 6
nB = 3
nO = 4
nI = 2
t = 3
def assert_arrays_equal(arrays1, arrays2):
assert len(arrays1) == len(arrays2)
shapes1 = [tuple(a.shape) for a in arrays1]
shapes2 = [tuple(a.shape) for a in arrays2]
assert shapes1 == shapes2
for arr1, arr2 in zip(arrays1, arrays2):
assert arr1.shape == arr2.shape
numpy.testing.assert_allclose(arr1, arr2, rtol=0.001, atol=0.001)
# See thinc/backends/jax_ops for notation
@pytest.mark.skipif(not has_jax, reason="needs Jax")
@settings(max_examples=MAX_EXAMPLES, deadline=None)
@given(
Xt3=ndarrays_of_shape((nB, nI), dtype="f"),
Yt2=ndarrays_of_shape((nB, nO), dtype="f"),
dAt3=ndarrays_of_shape((nB, nO * 4), dtype="f"),
W=ndarrays_of_shape((nO * 4, nO + nI), dtype="f"),
b=ndarrays_of_shape((nO * 4,), dtype="f"),
)
def test_lstm_weights_gradients(Xt3, Yt2, W, b, dAt3):
At3, jax_backprop = jax.vjp(lstm_weights_forward, Xt3, Yt2, W, b)
jax_grads = jax_backprop(dAt3)
St3 = jax.numpy.hstack((Xt3, Yt2))
our_grads = backprop_lstm_weights(dAt3, (St3, W, b))
assert_arrays_equal(our_grads, jax_grads)
@pytest.mark.skipif(not has_jax, reason="needs Jax")
@settings(max_examples=MAX_EXAMPLES, deadline=None)
@given(
At3=ndarrays_of_shape((nB, nO * 4), dtype="f"),
Ct2=ndarrays_of_shape((nB, nO), dtype="f"),
dYt3=ndarrays_of_shape((nB, nO), dtype="f"),
dCt3=ndarrays_of_shape((nB, nO), dtype="f"),
)
def test_lstm_gates_gradients(At3, Ct2, dYt3, dCt3):
# At3 = (At3 * 0) + 1
# Ct2 = (Ct2 * 0) + 1
# dYt3 = (dYt3 * 0) + 1
# dCt3 = (dCt3 * 0) + 1
(Yt3, Ct3, Gt3), get_jax_grads = jax.vjp(lstm_gates_forward, At3, Ct2)
jax_grads = get_jax_grads((dYt3, dCt3, Gt3 * 0))
Yt3, Ct3, Gt3 = lstm_gates_forward(At3, Ct2)
our_grads = backprop_lstm_gates(dYt3, dCt3, Gt3, Ct3, Ct2)
assert_arrays_equal(our_grads, jax_grads)
|
Python
| 0.000004
|
@@ -167,16 +167,30 @@
.testing
+%0Aimport pytest
%0A%0Afrom h
@@ -286,16 +286,35 @@
ort jax%0A
+ has_jax = True%0A
except I
|
9d798167ca1a8909fd8fb0845a8f55e761e25a1e
|
Update server
|
Server.py
|
Server.py
|
import sublime_plugin, sublime, sys, os, json, socketserver, socket
from sublime import Region
from functools import partial
from threading import Thread
from GulpServer.Utils import ignore, parse_commands
from GulpServer.Settings import Settings
from GulpServer.Logging import Console
END_OF_MESSAGE = b'\n'[0]
IS_FAILURE = 0
IS_SUCCESS = 1
ACTION_UPDATE = 2
ACTION_REMOVE = 4
ACTION_RESET = 8
ON_STATUS_BAR = 2
HOST = '127.0.0.1'
PORT = 30048
on_received_callbacks = []
# Add a callback when data is received
def on_received(callback):
""" Add a callback to the server's on_receive event """
global on_received_callbacks
on_received_callbacks = [callback]
# on_received_callbacks.append(callback)
class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
def __init__(self, server_address, RequestHandlerClass):
socketserver.TCPServer.__init__(self, server_address, RequestHandlerClass)
self.clients = []
def add_client(self, client):
server.clients.append(client)
def remove_client(self, client):
if client in self.clients:
self.clients.remove(client)
def send_all(self, data):
""" Send data to all clients """
for client in self.clients:
client.send(data)
def send(self, data, id_name):
""" Send data to a specific client """
for client in self.clients:
if client.id == id_name:
client.send(data)
def close_requests(self):
""" Close all requests of the server """
for client in self.clients:
client.finish()
self.clients = []
class ThreadedTCPRequestHandler(socketserver.BaseRequestHandler):
""" Server request handler """
encoding = 'UTF-8'
def handle(self):
self.should_receieve = True
self.closed = False
with ignore(Exception, origin="ThreadedTCPRequestHandler.handle"):
data_bytes = self.recvall()
handshake = json.loads(data_bytes.decode('UTF-8'))
if handshake.get('id'):
self.id = handshake['id']
self.server.add_client(self)
console.log('"{0}"'.format(self.id), 'connected', '- Total number connections:', len(self.server.clients))
else:
return self.finish()
while self.should_receieve:
data_bytes = self.recvall()
if not data_bytes:
break
# Sockets may queue messages and send them as a single message
# In order to get each JSON object separately, data_bytes must be
# converted to a string and split by END_OF_MESSAGE. The parse_commands
# function will do that and will also run json.loads on each string
commands = parse_commands(data_bytes)
for command in commands:
for callback in on_received_callbacks:
with ignore(Exception, origin="ThreadedTCPRequestHandler.handle"):
callback(command)
self.finish()
def finish(self):
""" Tie up any loose ends with the request """
# If the client has not been closed for some reason, close it
if not self.closed:
self.request.close()
# Remove self from list of server clients
self.server.remove_client(self)
self.closed = True
if not hasattr(self, 'id'):
return console.log('Disconnected', '- Total number of connections', len(self.server.clients))
console.log('"{0}"'.format(self.id), 'disconnected', '- Total number of connections', len(self.server.clients))
def send(self, data):
# Send data to the client
with ignore(Exception, origin='ThreadedTCPRequestHandler.send'):
data = sublime.encode_value(data)
self.request.sendall((data).encode(self.encoding))
return
self.finish()
def recvall(self, buffer_size=4096):
try:
data_bytes = self.request.recv(buffer_size)
if not data_bytes:
return b''
# Keep receiving until the end of message is hit
while data_bytes[-1] != END_OF_MESSAGE:
data_bytes += self.request.recv(buffer_size)
except Exception as ex:
console.log('Receiving error', ex)
return b''
return data_bytes
server = None
server_thread = None
def start_server():
""" Start the server """
global server, server_thread
if server != None:
return console.log('Server is already running')
server = ThreadedTCPServer((HOST, PORT), ThreadedTCPRequestHandler)
server_thread = Thread(target=server.serve_forever, daemon=True)
server_thread.start()
console.log('Server started')
def stop_server():
""" Stop the server """
global server
if server == None:
return console.log('Server is already shutdown')
server.close_requests()
server.shutdown()
server = None
server_thread = None
console.log('Server stopped')
class StartServerCommand(sublime_plugin.ApplicationCommand):
""" Start the server """
def run(self):
sublime.set_timeout_async(start_server, 2000)
def is_enabled(self):
return server == None and server_thread != None and not server_thread.is_alive()
class StopServerCommand(sublime_plugin.ApplicationCommand):
""" Stop the server """
def run(self):
stop_server()
def is_enabled(self):
return server != None and server_thread != None and server_thread.is_alive()
user_settings = None
console = None
def plugin_loaded():
# Setting a timeout will ensure the socket is clear for reuse
sublime.set_timeout_async(start_server, 2000)
global PORT, user_settings, console
console = Console()
user_settings = Settings()
PORT = user_settings.get('port')
def plugin_unloaded():
stop_server()
|
Python
| 0.000001
|
@@ -2180,24 +2180,28 @@
f.recvall()%0A
+%09%09%09%09
%0A%09%09%09%09if not
@@ -3521,16 +3521,69 @@
ish()%0A%09%0A
+%09# Keep receiving until an END_OF_MESSAGE is hit. %0A%09%0A
%09def rec
@@ -3704,19 +3704,26 @@
%09return
-b''
+data_bytes
%0A%09%09%09%0A%09%09%09
|
b7eefbea83768bd006c677aa5d69cf2a24e6e909
|
Fix shebang line.
|
aeneas.py
|
aeneas.py
|
#!/usr/bin/env python2
import argparse
from os import environ
def bool_from_str(s):
if isinstance(s, basestring):
s = s.lower()
if s in ['true', 't', '1', 'y']:
return True
if s in ['false', 'f', '0', 'n']:
return False
return bool(s)
AENEAS_DEBUG = bool_from_str(environ.get('AENEAS_DEBUG', False))
DEFAULT_AENEAS_PORT = 4935
AENEAS_PORT = environ.get('AENEAS_PORT', DEFAULT_AENEAS_PORT)
try:
AENEAS_PORT = int(AENEAS_PORT)
except:
AENEAS_PORT = DEFAULT_AENEAS_PORT
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--debug', help='Run Flask in debug mode, with '
'auto-reload and debugger page on '
'errors.',
action='store_true', default=AENEAS_DEBUG)
parser.add_argument('--port', help='The port on which to accept incoming '
'HTTP requests. Default is {}.'.format(
AENEAS_PORT),
action='store', default=AENEAS_PORT, type=int)
args = parser.parse_args()
print('aeneas.py')
|
Python
| 0.000003
|
@@ -18,9 +18,8 @@
thon
-2
%0A%0Aim
|
b55a4d1711fd9993e22ba6c5e3fe57380857d160
|
Include firstbyte and terminator in wire logging.
|
aredis.py
|
aredis.py
|
import asyncore
import logging
import optparse
import socket
import sys
try:
NullHandler = logging.NullHandler
except AttributeError:
class NullHandler(logging.Handler):
def emit(self, record): pass
log = logging.getLogger(__name__)
log.addHandler(NullHandler())
getLogger = logging.getLogger
name = log.name
protolog = logging.getLogger("%s.protocol" % __name__)
wirelog = logging.getLogger("%s.wire" % __name__)
class Error(Exception): pass
class HandlerError(Error): pass
class ReplyError(Error): pass
class Redis(asyncore.dispatcher):
terminator = "\r\n"
replyhandlers = {}
def __init__(self, sock=None, map=None):
asyncore.dispatcher.__init__(self, sock=sock, map=map)
self.outbuf = ''
self.inbuf = ''
self.replyhandler = None
def connect(self, host="localhost", port=6379, db=0):
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
asyncore.dispatcher.connect(self, (host, port))
self.socket.setblocking(0)
self.set_socket(self.socket, self._map)
log.debug("connected to %s:%d/%d", host, port, db)
def do(self, command, *args):
arglen = 1 + len(args)
request = ["*%d" % arglen]
for arg in (command,) + args:
arg = str(arg)
request.extend(["$%d" % len(arg), arg])
request.append('')
msg = ["%s"]
msg.extend("%r" for arg in args)
getLogger("%s.protocol.send" % name).debug(
' '.join(msg), command, *args)
request = self.terminator.join(request)
getLogger("%s.wire.send" % name).debug("%r", request)
self.outbuf += request
def log(self, message):
log.debug(message)
def log_info(self, message, type=None):
log.debug(message)
def writable(self):
return self.outbuf and True
def handle_connect(self): pass
def handle_close(self):
self.close()
def handle_error(self):
t, v, tb = sys.exc_info()
if isinstance(v, HandlerError):
log.info(v.args[0])
else:
asyncore.dispatcher.handle_error(self)
def handle_write(self):
sent = self.send(self.outbuf)
self.outbuf = self.outbuf[sent:]
def handle_read(self):
chunk = self.recv(8192)
self.inbuf += chunk
if self.inbuf:
self.dispatch()
def dispatch(self):
if self.replyhandler is None:
firstbyte = self.inbuf[0]
try:
self.replyhandler = self.replyhandlers[firstbyte]
except KeyError:
pass
if self.replyhandler is None:
raise HandlerError(
"unrecognized handler for reply type %r" % firstbyte)
self.inbuf = self.inbuf[1:]
if self.replyhandler(self) is not None:
self.replyhandler = None
if self.inbuf:
self.dispatch()
def handle_singleline_reply(self):
idx = self.inbuf.find(self.terminator)
if idx < 0:
return
reply = self.inbuf[:idx]
self.inbuf = self.inbuf[idx + len(self.terminator):]
name = log.name
getLogger("%s.wire.receive" % name).debug("%r", reply)
getLogger("%s.protocol.receive" % name).debug("%r", reply.strip())
return reply
def handle_error_reply(self):
reply = self.handle_singleline_reply()[4:]
raise ReplyError(reply)
handle_integer_reply = None
handle_bulk_reply = None
handle_multibulk_reply = None
replyhandlers = {
'+': handle_singleline_reply,
'-': handle_error_reply,
':': handle_integer_reply,
'$': handle_bulk_reply,
'*': handle_multibulk_reply,
}
def parseargs(argv):
"""Parse command line arguments.
Returns a tuple (*opts*, *args*), where *opts* is an
:class:`optparse.Values` instance and *args* is the list of arguments left
over after processing.
:param argv: a list of command line arguments, usually :data:`sys.argv`.
"""
prog = argv[0]
usage = "[options] <address>"
parser = optparse.OptionParser(prog=prog, usage=usage)
parser.allow_interspersed_args = False
defaults = {
"quiet": 0,
"silent": False,
"verbose": 0,
}
# Global options.
parser.add_option("-q", "--quiet", dest="quiet",
default=defaults["quiet"], action="count",
help="decrease the logging verbosity")
parser.add_option("-s", "--silent", dest="silent",
default=defaults["silent"], action="store_true",
help="silence the logger")
parser.add_option("-v", "--verbose", dest="verbose",
default=defaults["verbose"], action="count",
help="increase the logging verbosity")
(opts, args) = parser.parse_args(args=argv[1:])
return (opts, args)
def main(argv, stdin=None, stdout=None, stderr=None):
"""Main entry point.
Returns a value that can be understood by :func:`sys.exit`.
:param argv: a list of command line arguments, usually :data:`sys.argv`.
:param out: stream to write messages; :data:`sys.stdout` if None.
:param err: stream to write error messages; :data:`sys.stderr` if None.
"""
if stdin is None: # pragma: nocover
stdin = sys.stdin
if stdout is None: # pragma: nocover
stdout = sys.stdout
if stderr is None: # pragma: nocover
stderr = sys.stderr
(opts, args) = parseargs(argv)
level = logging.WARNING - ((opts.verbose - opts.quiet) * 10)
if opts.silent:
level = logging.CRITICAL + 1
level = max(1, level)
format = "%(name)s %(message)s"
handler = logging.StreamHandler(stderr)
handler.setFormatter(logging.Formatter(format))
log.addHandler(handler)
log.setLevel(level)
db = Redis()
db.connect()
db.do("SELECT", 0)
db.do("SET", "foo", "bar")
db.do("SADD", "foo", "baz")
asyncore.loop()
if __name__ == "__main__": # pragma: nocover
try:
ret = main(sys.argv, sys.stdin, sys.stdout, sys.stderr)
except KeyboardInterrupt:
ret = None
sys.exit(ret)
|
Python
| 0
|
@@ -787,24 +787,54 @@
ndler = None
+%0A self.firstbyte = None
%0A%0A def co
@@ -2497,16 +2497,21 @@
+self.
firstbyt
@@ -2600,16 +2600,21 @@
andlers%5B
+self.
firstbyt
@@ -2811,16 +2811,21 @@
e %25r%22 %25
+self.
firstbyt
@@ -3312,22 +3312,84 @@
ug(%22%25r%22,
- reply
+%0A %22%25s%25s%25s%22 %25 (self.firstbyte, reply, self.terminator)
)%0A
|
79b2ad46334a35a68f334e8df621b208f3c617e2
|
Remove `build-setup-requires-pex` options scope. (#9732)
|
src/python/pants/python/setup_py_runner.py
|
src/python/pants/python/setup_py_runner.py
|
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
from pathlib import Path
from typing import Callable, Iterable, List, Optional
from pex.interpreter import PythonInterpreter
from pex.pex import PEX
from pex.pex_info import PexInfo
from pants.option.optionable import Optionable
from pants.python.executable_pex_tool import ExecutablePexTool
from pants.python.python_requirement import PythonRequirement
from pants.util.contextutil import pushd
from pants.util.dirutil import safe_mkdtemp
from pants.util.memo import memoized_method
from pants.util.strutil import safe_shlex_join
class SetupPyRunner:
"""A utility capable of executing setup.py commands in a hermetic environment.
Supports `setuptools` and `wheel` distutils commands by default.
"""
class Factory(ExecutablePexTool):
options_scope = "setup-py-runner"
deprecated_options_scope = "build-setup-requires-pex"
deprecated_options_scope_removal_version = "1.28.0.dev2"
@classmethod
def register_options(cls, register: Callable[..., None]) -> None:
super().register_options(register)
register(
"--setuptools-version",
advanced=True,
fingerprint=True,
default="44.0.0",
help="The setuptools version to use when executing `setup.py` scripts.",
)
register(
"--wheel-version",
advanced=True,
fingerprint=True,
default="0.34.2",
help="The wheel version to use when executing `setup.py` scripts.",
)
@classmethod
def create(
cls,
*,
pex_file_path: Optional[Path] = None,
extra_reqs: Optional[List[PythonRequirement]] = None,
interpreter: Optional[PythonInterpreter] = None,
scope: Optional[Optionable] = None,
) -> "SetupPyRunner":
factory = cls.scoped_instance(scope) if scope is not None else cls.global_instance()
requirements_pex = factory.bootstrap(
interpreter=interpreter,
pex_file_path=pex_file_path or os.path.join(safe_mkdtemp(), "setup-py-runner.pex"),
extra_reqs=extra_reqs,
)
return SetupPyRunner(requirements_pex=requirements_pex)
@property
def base_requirements(self):
return [
PythonRequirement(f"setuptools=={self.get_options().setuptools_version}"),
PythonRequirement(f"wheel=={self.get_options().wheel_version}"),
]
class CommandFailure(Exception):
"""Indicates an error executing setup.py commands."""
def __init__(self, requirements_pex: PEX) -> None:
self._requirements_pex = requirements_pex
@memoized_method
def __str__(self) -> str:
pex_path = self._requirements_pex.path()
pex_info = PexInfo.from_pex(pex_path)
requirements = "\n ".join(map(str, pex_info.requirements))
return f"{type(self).__name__} at {pex_path} with requirements:\n {requirements} "
def _create_python_args(self, setup_command: Iterable[str]) -> Iterable[str]:
args = ["setup.py", "--no-user-cfg"]
args.extend(setup_command)
return args
def cmdline(self, setup_command: Iterable[str]) -> Iterable[str]:
"""Returns the command line that would be used to execute the given setup.py command."""
args = self._create_python_args(setup_command)
cmdline: List[str] = self._requirements_pex.cmdline(args)
return cmdline
def run_setup_command(
self, *, source_dir: Path, setup_command: Iterable[str], **kwargs
) -> None:
"""Runs the given setup.py command against the setup.py project in `source_dir`.
:raises: :class:`SetupPyRunner.CommandFailure` if there was a problem executing the command.
"""
with pushd(str(source_dir)):
result = self._requirements_pex.run(
args=self._create_python_args(setup_command), **kwargs
)
if result != 0:
pex_command = safe_shlex_join(self.cmdline(setup_command))
raise self.CommandFailure(f"Failed to execute {pex_command} using {self}")
def _collect_distribution(
self, source_dir: Path, setup_command: Iterable[str], dist_dir: Path
) -> Path:
assert source_dir.is_dir()
self._source_dir = source_dir
self.run_setup_command(source_dir=source_dir, setup_command=setup_command)
dists = os.listdir(dist_dir)
if len(dists) == 0:
raise self.CommandFailure("No distribution was produced!")
if len(dists) > 1:
ambiguous_dists = "\n ".join(dists)
raise self.CommandFailure(f"Ambiguous distributions found:\n {ambiguous_dists}")
return dist_dir.joinpath(dists[0])
@memoized_method
def sdist(self, source_dir: Path) -> Path:
"""Generates an sdist from the setup.py project at `source_dir` and returns the sdist
path."""
dist_dir = safe_mkdtemp()
return self._collect_distribution(
source_dir=source_dir,
setup_command=["sdist", "--dist-dir", dist_dir],
dist_dir=Path(dist_dir),
)
@memoized_method
def bdist(self, source_dir: Path) -> Path:
"""Generates a wheel from the setup.py project at `source_dir` and returns the wheel
path."""
dist_dir = safe_mkdtemp()
return self._collect_distribution(
source_dir=source_dir,
setup_command=["bdist_wheel", "--dist-dir", dist_dir],
dist_dir=Path(dist_dir),
)
|
Python
| 0
|
@@ -935,135 +935,8 @@
ner%22
-%0A deprecated_options_scope = %22build-setup-requires-pex%22%0A deprecated_options_scope_removal_version = %221.28.0.dev2%22
%0A%0A
|
600b054b950b26db8609d71a75350aaa995bb26e
|
Add import export to Geography admin
|
scorecard/admin.py
|
scorecard/admin.py
|
from django.contrib import admin
from django.conf import settings
from django_q.tasks import async_task
from constance import config
from .models import (
Geography,
MunicipalityProfilesCompilation,
)
@admin.register(Geography)
class GeographyAdmin(admin.ModelAdmin):
list_display = ("geo_code", "geo_level", "name",)
@admin.register(MunicipalityProfilesCompilation)
class MunicipalityProfilesCompilationAdmin(admin.ModelAdmin):
list_display = (
"datetime",
"user",
"last_audit_year",
"last_opinion_year",
"last_uifw_year",
"last_audit_quarter",
)
readonly_fields = (
"user",
)
def get_form(self, request, obj=None, **kwargs):
form = super(MunicipalityProfilesCompilationAdmin,
self).get_form(request, obj, **kwargs)
form.base_fields["last_audit_year"].disabled = True
form.base_fields["last_opinion_year"].disabled = True
form.base_fields["last_uifw_year"].disabled = True
form.base_fields["last_audit_quarter"].disabled = True
form.base_fields["last_audit_year"].initial = config.LAST_AUDIT_YEAR
form.base_fields["last_opinion_year"].initial = config.LAST_OPINION_YEAR
form.base_fields["last_uifw_year"].initial = config.LAST_UIFW_YEAR
form.base_fields["last_audit_quarter"].initial = config.LAST_AUDIT_QUARTER
return form
def get_exclude(self, request, obj=None):
if obj is None:
return ("user",)
else:
return super(MunicipalityProfilesCompilationAdmin, self).get_exclude(request, obj)
def save_model(self, request, obj, form, change):
# Set the user to the current user
obj.user = request.user
# Process default save behavior
super(MunicipalityProfilesCompilationAdmin, self).save_model(
request, obj, form, change)
# Queue task
async_task(
"scorecard.compile_profiles.compile_data",
settings.API_URL,
obj.last_audit_year,
obj.last_opinion_year,
obj.last_uifw_year,
obj.last_audit_quarter,
task_name="Compile municipal profiles"
)
|
Python
| 0
|
@@ -126,16 +126,107 @@
t config
+%0Afrom import_export import resources%0Afrom import_export.admin import ImportExportModelAdmin
%0A%0Afrom .
@@ -341,30 +341,36 @@
graphyAdmin(
-admin.
+ImportExport
ModelAdmin):
|
6fb4bf39fd460b5f2d69d665d9e4d09e4279d88b
|
Add maximum page constraint for Pagination results
|
common/utils.py
|
common/utils.py
|
# common/utils.py
from flask import url_for, current_app
class PaginateData():
""""Pagination class.
Paginate query results
"""
def __init__(self, request, query, resource_for_url, key_name, schema):
self.request = request
self.query = query
self.resource_for_url = resource_for_url
self.key_name = key_name
self.schema = schema
self.page_argument_name = current_app.config['PAGINATION_PAGE_ARGUMENT_NAME']
if request.args.get('limit'):
self.results_per_page = int(request.args.get('limit'))
else:
self.results_per_page = current_app.config['DEFAULT_PAGINATION_PAGE_SIZE']
def paginate_query(self):
""""Handle query pagination."""
page_number = self.request.args.get(self.page_argument_name, 1, type=int)
paginated_objects = self.query.paginate(page_number, per_page=self.results_per_page, error_out=False)
objects = paginated_objects.items
if paginated_objects.has_prev:
previous_page_url = url_for(
self.resource_for_url, limit=self.results_per_page, page=page_number - 1, _external=True)
else:
previous_page_url = None
if paginated_objects.has_next:
next_page_url = url_for(
self.resource_for_url, limit=self.results_per_page, page=page_number + 1, _external=True)
else:
next_page_url = None
dumped_objects = self.schema.dump(objects, many=True).data
return ({
self.key_name: dumped_objects,
'previous': previous_page_url,
'next': next_page_url,
'count': paginated_objects.total
})
|
Python
| 0
|
@@ -509,16 +509,135 @@
imit'):%0A
+ if int(request.args.get('limit')) %3E 100:%0A self.results_per_page = 100%0A else:%0A
|
3df6175723f1c9f22fc98ae1cc750046f265398e
|
Access ruler through naali
|
bin/pymodules/core/componentpropertyadder.py
|
bin/pymodules/core/componentpropertyadder.py
|
import circuits
import naali
import rexviewer as r
#from PythonQt.QtCore import QVariant
import PythonQt
#the ones not listed here are added using the c++ name, e.g. ent.EC_NetworkPosition
compshorthand = {
'EC_OpenSimPrim': 'prim',
'EC_OgrePlaceable': 'placeable',
'EC_NetworkPosition': 'network',
'EC_OgreMesh':'mesh',
'EC_OgreCamera': 'camera',
'EC_OgreAnimationController': 'animationcontroller',
'EC_Highlight': 'highlight',
'EC_Touchable': 'touchable',
'EC_AttachedSound': 'sound',
"EC_OpenSimPresence": 'opensimpresence',
'EC_SoundRuler': 'soundruler'
}
class ComponentPropertyAdder(circuits.BaseComponent):
"""Adds Naali Entity-Components as Qt Dynamic Properties
for convenient access from PythonQt and QtScript (js) code."""
@circuits.handler("on_sceneadded")
def on_sceneadded(self, name):
#print "Scene added:", name#,
s = naali.getScene(name)
#s.connect("ComponentInitialized(Foundation::ComponentInterface*)", self.onComponentInitialized)
s.connect("ComponentAdded(Scene::Entity*, IComponent*, AttributeChange::Type)", self.onComponentAdded)
s.connect("ComponentRemoved(Scene::Entity*, IComponent*, AttributeChange::Type)", self.onComponentRemoved)
def onComponentAdded(self, ent, comp, changetype):
#print "Comp added:", ent, comp, comp.TypeName, comp.Name, changetype
if comp.TypeName in compshorthand:
propname = compshorthand[comp.TypeName]
else:
propname = comp.TypeName
if propname not in ent.dynamicPropertyNames():
#first come, first (actually: the only one) served
#consistent with how inside the c++ side single GetComponent works
ent.setProperty(propname, comp)
def onComponentRemoved(self, ent, comp, changetype):
#r.logInfo("XXX onComponentRemoved called")
if comp.TypeName in compshorthand:
propname = compshorthand[comp.TypeName]
else:
propname = comp.TypeName
#r.logInfo("XXX propname " +str(propname))
#r.logInfo("XXX dynamicpropertynames " + str(ent.dynamicPropertyNames()))
if propname in ent.dynamicPropertyNames():
# qt docs: "A property can be removed from an instance by
# passing the property name and an invalid QVariant value
# to QObject::setProperty(). The default constructor for
# QVariant constructs an invalid QVariant."
#this is probably impossible on py side 'cause we don't see QVariants here, so there's a helper on the c++ side instead.
#ent.setProperty(propname, invalid_qvariant())
naali._pythonscriptmodule.RemoveQtDynamicProperty(ent, propname)
|
Python
| 0
|
@@ -522,17 +522,17 @@
d',%0A
-%22
+'
EC_OpenS
@@ -541,17 +541,17 @@
Presence
-%22
+'
: 'opens
@@ -596,16 +596,41 @@
ndruler'
+,%0A 'EC_Ruler': 'ruler'
%0A %7D
|
21f1cd585830e9d1b85f4201fb3c4914a4c1bd21
|
Update test_code_style.py
|
_unittests/ut_module/test_code_style.py
|
_unittests/ut_module/test_code_style.py
|
# pylint: disable=R1721
"""
@brief test log(time=0s)
"""
import os
import unittest
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import check_pep8
from pyquickhelper.pycode.utils_tests_helper import _extended_refactoring
class TestCodeStyle(unittest.TestCase):
def test_code_style_src(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
thi = os.path.abspath(os.path.dirname(__file__))
src_ = os.path.normpath(os.path.join(thi, "..", "..", "src"))
check_pep8(src_, fLOG=fLOG, extended=[("fLOG", _extended_refactoring)],
pylint_ignore=('C0103', 'C1801', 'R0201', 'R1705', 'W0108', 'W0613',
'W0231', 'W0212', 'C0111', 'W0107', 'R1728',
'C0209'),
skip=["Redefining built-in 'iter'",
"iter_rows.py:340",
"translation_class.py",
"translation_to_python.py:118",
"translation_to_python.py:185",
"translation_to_python.py:244",
"node_visitor_translator.py:74: E1111",
"R1720",
]
)
def test_code_style_test(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
thi = os.path.abspath(os.path.dirname(__file__))
test = os.path.normpath(os.path.join(thi, "..", ))
check_pep8(test, fLOG=fLOG, neg_pattern="temp_.*",
pylint_ignore=('C0111', 'C0103', 'W0622', 'C1801', 'C0412',
'R0201', 'W0122', 'W0123', 'E1101', 'R1705',
'W0107', 'R1720', 'C0209'),
skip=[],
extended=[("fLOG", _extended_refactoring)])
if __name__ == "__main__":
unittest.main()
|
Python
| 0.000004
|
@@ -1863,32 +1863,41 @@
'R1720', 'C0209'
+, 'R1721'
),%0A
|
9c38604fa8e85efeed723b47e3486f9d09ea9858
|
Fix login cookie
|
adhocracy/lib/auth/instance_auth_tkt.py
|
adhocracy/lib/auth/instance_auth_tkt.py
|
import datetime
from repoze.who.plugins.auth_tkt import AuthTktCookiePlugin, _now
class InstanceAuthTktCookiePlugin(AuthTktCookiePlugin):
def _get_cookies(self, environ, value, max_age=None):
if max_age is not None:
later = _now() + datetime.timedelta(seconds=int(max_age))
# Wdy, DD-Mon-YY HH:MM:SS GMT
expires = later.strftime('%a, %d %b %Y %H:%M:%S')
# the Expires header is *required* at least for IE7 (IE7 does
# not respect Max-Age)
max_age = "; Max-Age=%s; Expires=%s" % (max_age, expires)
else:
max_age = ''
cur_domain = environ.get('adhocracy.domain').split(':')[0]
wild_domain = '.' + cur_domain
cookies = [
#('Set-Cookie', '%s="%s"; Path=/%s' % (
#self.cookie_name, value, max_age)),
#('Set-Cookie', '%s="%s"; Path=/; Domain=%s%s' % (
#self.cookie_name, value, cur_domain, max_age)),
('Set-Cookie', '%s="%s"; Path=/; Domain=%s%s' % (
self.cookie_name, value, wild_domain, max_age))
]
return cookies
|
Python
| 0.000017
|
@@ -75,17 +75,85 @@
n, _now%0A
+from pylons import config%0Afrom paste.deploy.converters import asbool
%0A
-
%0Aclass I
@@ -697,106 +697,181 @@
-cur_domain = environ.get('adhocracy.domain').split(':')%5B0%5D%0A wild_domain = '.' + cur_domain%0A
+if asbool(config.get('adhocracy.relative_urls', 'false')):%0A # Serve the cookie for the current host, which may be%0A # %22localhost%22 or an IP address.%0A
@@ -890,33 +890,36 @@
= %5B%0A
+
-#
+
('Set-Cookie', '
@@ -933,16 +933,18 @@
; Path=/
+;
%25s' %25 (%0A
@@ -947,33 +947,36 @@
%25 (%0A
+
-#
+
self.cookie_name
@@ -985,34 +985,34 @@
value, max_age))
-,
%0A
+
#('S
@@ -1011,120 +1011,170 @@
-#('Set-Cookie', '%25s=%22%25s%22; Path=/; Domain=%25s%25s' %25 (%0A #self.cookie_name, value, cur_domain, max_age)),%0A
+ %5D%0A else:%0A cur_domain = environ.get('adhocracy.domain').split(':')%5B0%5D%0A wild_domain = '.' + cur_domain%0A%0A cookies = %5B%0A
@@ -1235,32 +1235,36 @@
%25 (%0A
+
self.cookie_name
@@ -1287,32 +1287,36 @@
main, max_age))%0A
+
%5D%0A
|
b886a7db3c6669ccf1e69747cca2fdddc54c15ee
|
Set encoding when opening languages files, fix error on windows.
|
lektor/i18n.py
|
lektor/i18n.py
|
import os
import json
from lektor._compat import iteritems
from lektor.uilink import UI_LANG
translations_path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'translations')
KNOWN_LANGUAGES = list(x[:-5] for x in os.listdir(translations_path)
if x.endswith('.json'))
translations = {}
for _lang in KNOWN_LANGUAGES:
with open(os.path.join(translations_path, _lang + '.json')) as f:
translations[_lang] = json.load(f)
def get_translations(language):
"""Looks up the translations for a given language."""
return translations.get(language)
def is_valid_language(lang):
"""Verifies a language is known and valid."""
return lang in KNOWN_LANGUAGES
def get_default_lang():
"""Returns the default language the system should use."""
if UI_LANG is not None:
return UI_LANG
for key in 'LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LANG':
value = os.environ.get(key)
if not value:
continue
lang = value.split('_')[0].lower()
if is_valid_language(lang):
return lang
return 'en'
def load_i18n_block(key):
"""Looks up an entire i18n block from a known translation."""
rv = {}
for lang in KNOWN_LANGUAGES:
val = translations.get(lang, {}).get(key)
if val is not None:
rv[lang] = val
return rv
def get_i18n_block(inifile_or_dict, key, pop=False):
"""Extracts an i18n block from an ini file or dictionary for a given
key. If "pop", delete keys from "inifile_or_dict".
"""
rv = {}
for k in list(inifile_or_dict):
if k == key:
# English is the internal default language with preferred
# treatment.
rv['en'] = inifile_or_dict.pop(k) if pop else inifile_or_dict[k]
elif k.startswith(key + '['):
rv[k[len(key) + 1:-1]] = (inifile_or_dict.pop(k) if pop
else inifile_or_dict[k])
return rv
def generate_i18n_kvs(**opts):
"""Generates key-value pairs based on the kwargs passed into this function.
For every key ending in "_i18n", its corresponding value will be translated
and returned once for every language that has a known translation.
"""
for key, value in opts.items():
if key.endswith('_i18n'):
base_key = key[:-5]
for lang, trans in iteritems(load_i18n_block(value)):
lang_key = '%s[%s]' % (base_key, lang)
yield lang_key, trans
else:
yield key, value
|
Python
| 0
|
@@ -14,16 +14,36 @@
ort json
+%0Afrom io import open
%0A%0Afrom l
@@ -458,16 +458,47 @@
'.json')
+,%0A encoding=%22utf8%22
) as f:%0A
|
1d4c5a38c1690be94b02ad3f8b34e6b62ae8473f
|
remove pdb
|
script/managers.py
|
script/managers.py
|
# This Python file uses the following encoding: utf-8
import os, sys
import datetime
from django.db.models import Manager
from django.db.models.query import QuerySet
from .models import *
from rapidsms_httprouter.models import Message
from poll.models import gettext_db
from django.db.models import Q
class ScriptProgressQuerySet(QuerySet):
def need_to_start(self, script):
"""
Filter to script progress objects that need to be started. This applies when
a ScriptProgress object has None for step (user hasn't even progressed to step
0), and the start_offset for the first step has elapsed.
Returns all ScriptProgress objects for which the above rules apply,
for a given script.
Parameters:
script : The particular script that we're currently concerned with
Example:
script = Script.objects.all()[0] # get the first script in the db
# returns all ScriptProgress objects
# that need to complete start this script
ScriptProgress.objects.all().need_to_start(script)
"""
curtime = datetime.datetime.now()
start_offset = script.steps.get(order=0).start_offset
return self.filter(step=None, time__lte=(curtime - datetime.timedelta(seconds=start_offset)))
def need_to_resend(self, script, step):
"""
Filter the ScriptProgress objects whose time to resend a message/poll has elapsed, based on the
step, rules, status, num_tries, and retry_offset.
Parameters:
script: The script the progress object belongs to
step: The script step the progress is at
"""
if not step.retry_offset:
return self.none()
curtime = datetime.datetime.now()
return self.filter(step=step, script=script,
step__rule__in=[step.RESEND_MOVEON, step.RESEND_GIVEUP, step.STRICT_GIVEUP,
step.STRICT_MOVEON], num_tries__lt=step.num_tries,
time__lte=(curtime - datetime.timedelta(seconds=step.retry_offset)))
def need_to_transition(self, script, step):
"""
Filters ScriptProgress whose script steps that are ready to move to the next step and the start_offset
of the next step has elapsed
Parameters:
script: The script the progress object belongs to
"""
curtime = datetime.datetime.now()
if step:
steps = script.steps.filter(order__gt=step.order)
else:
steps = script.steps.all()
if steps.count():
next_step = steps.order_by('order')[0]
if step and next_step:
return self.filter(step=step, script=script, status='C',
time__lte=(curtime - datetime.timedelta(seconds=next_step.start_offset)))
elif step and not next_step:
last_step = script.steps.order_by('-order')[0].order
if last_step == self.step.order:
return self.filter(status="C", step=step, script=script)
elif not step:
return self.filter(time__lte=(curtime - datetime.timedelta(seconds=next_step.start_offset)))
return self.none()
def expired(self, script, step):
if step.start_offset is None:
return self.none()
else:
curtime = datetime.datetime.now()
give_up_rules = [step.RESEND_MOVEON, step.RESEND_GIVEUP, step.STRICT_GIVEUP,
step.STRICT_MOVEON, step.WAIT_MOVEON, step.WAIT_GIVEUP]
num_tries = step.num_tries
if num_tries:
return self.filter(step=step, script=script, status=self.model.PENDING).filter(\
step__rule__in=give_up_rules,\
num_tries__gte=num_tries,\
time__lte=(curtime - datetime.timedelta(seconds=step.start_offset)))
else:
return self.filter(step=step, script=script, status=self.model.PENDING).filter(
step__rule__in=[step.STRICT_MOVEON, step.WAIT_MOVEON, step.WAIT_GIVEUP],
time__lte=(curtime - datetime.timedelta(seconds=step.start_offset)))
def expire(self, script, step):
give_up_rules = [step.WAIT_GIVEUP, step.RESEND_GIVEUP, step.STRICT_GIVEUP]
self.filter(step__rule__in=give_up_rules).giveup(script, step)
self.exclude(step__rule__in=give_up_rules).update(status=self.model.COMPLETE)
def giveup(self, script, step):
"""
Removes ScriptProgress objects from the table, update ScriptSession, and
fires the appropriate signal.
"""
from script.models import ScriptSession
expired = self.expired(script, step)
spses = expired.filter(step__rule__in=[step.WAIT_GIVEUP, step.RESEND_GIVEUP, step.STRICT_GIVEUP])
for sp in spses:
session = ScriptSession.objects.filter(script=script, connection=sp.connection, end_time=None).latest(
'start_time')
session.end_time = datetime.datetime.now()
session.save()
script_progress_was_completed.send(sender=sp, connection=sp.connection)
spses.delete()
def moveon(self, script, step):
"""
Move the step to the next in order (if one exists, otherwise end the script),
sending the appropriate signals.
"""
import pdb;pdb.set_trace()
if step:
steps = script.steps.filter(order__gt=step.order)
else:
steps = script.steps.all()
script_progres_objects = self.need_to_transition(script, step)
script_progres_list = list(script_progres_objects.values_list('connection', flat=True))
if steps.count():
next_step = steps.order_by('order')[0]
else:
next_step = None
if next_step:
for sp in script_progres_objects:
script_progress_pre_change.send(sender=sp, connection=sp.connection, step=step)
self.update(step=next_step, status=self.model.PENDING)
for sp in self.model._default_manager.filter(pk__in=script_progres_list):
script_progress.send(sender=sp, connection=sp.connection, step=next_step)
return True
else:
return self.giveup(script, step)
def mass_text(self):
#get one scriptprogress since they are all supposed to be on the same step
if self.exists():
prog = self[0]
else:
return False
if prog.step.poll:
text = prog.step.poll.question
elif prog.step.email:
text = prog.step.email
else:
text = prog.step.message
for language in dict(settings.LANGUAGES).keys():
if language == "en":
"""default to English for contacts with no language preference"""
localized_progs = self.filter(Q(language__in=["en", '']) | Q(language=None))
else:
localized_progs = self.filter(language=language)
if localized_progs.exists():
localized_conns = localized_progs.values_list('connection', flat=True)
messages = Message.mass_text(gettext_db(field=text, language=language),
Connection.objects.filter(pk__in=localized_conns).distinct(), status='L')
return True
class ProgressManager(Manager):
def __init__(self, qs_class=QuerySet):
super(ProgressManager, self).__init__()
self.queryset_class = qs_class
def get_query_set(self):
return ScriptProgressQuerySet(self.model, using=self._db)
def __getattr__(self, attr, *args):
try:
return getattr(self.__class__, attr, *args)
except AttributeError:
return getattr(self.get_query_set(), attr, *args)
|
Python
| 0.000024
|
@@ -5490,43 +5490,8 @@
%22%22%22%0A
- import pdb;pdb.set_trace()%0A
|
10dc027ee15428d7ca210e0b74e5ae9274de0fa8
|
Use raw_input instead of the unmodified words
|
lianXiangCi.py
|
lianXiangCi.py
|
#coding:utf-8
import urllib
import urllib2
import re
from random import choice
ipList=['120.76.115.134:80','222.83.14.145:3128','119.188.94.145:80']
thisIp=choice(ipList)
keyWord=urllib.quote('科学')
url='http://search.sina.com.cn/iframe/suggest/index.php?q='+keyWord
headers={
'Get':url,
'Host':'search.sina.com.cn',
'Referer':'http://search.sina.com.cn/',
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.97 Safari/537.36'
}
proxy_support = urllib2.ProxyHandler({'http': 'http://'+thisIp})
opener=urllib2.build_opener(proxy_support)
urllib2.install_opener(opener)
req=urllib2.Request(url)
for key in headers:
req.add_header(key,headers[key])
html=urllib2.urlopen(req).read()
file=open('C:\Users\Ryan\Desktop\lianXC.txt','w')
file.write(html)
|
Python
| 0.00008
|
@@ -177,16 +177,67 @@
ist)%0D%0A%0D%0A
+input = raw_input(%22Please input your key words:%22)%0D%0A
keyWord=
@@ -253,12 +253,13 @@
ote(
-'%E7%A7%91%E5%AD%A6'
+input
)%0D%0A%0D
|
8664723edb16f5787aa35d3bd5d6aea21e205eb9
|
Remove debug code & rename function
|
test-read.py
|
test-read.py
|
#!/usr/bin/python
import unittest
import random
import rarfile
import os, sys
class PyarrCheck(unittest.TestCase):
def setUp(self):
self.scriptdir = os.path.realpath(os.path.dirname(sys.argv[0]))
_, self.scriptname = os.path.split(sys.argv[0])
self.scriptpath = os.path.normpath(os.path.join(self.scriptdir, self.scriptname))
pathname, scriptname = os.path.split(sys.argv[0])
self.testdir = os.path.join(self.scriptdir, 'rartest')
self.rarmntdir = os.path.join(self.testdir, 'rarmnt')
self.testfiledir = os.path.join(self.testdir, 'testfiles')
self.testarchivedir = os.path.join(self.testdir, 'testarchives')
self.pyarrpath = '/home/kll/kod/pyarrfs/pyarrfs'
self.mkdir(self.testdir)
self.mkdir(self.rarmntdir)
self.mkdir(self.testfiledir)
self.mkdir(self.testarchivedir)
# os.system('fusermount -u ' + self.rarmntdir)
try:
os.system(self.pyarrpath + ' ' + self.rarmntdir)
except:
pass
def mkdir(self, path):
if not os.path.exists(path):
os.mkdir(path)
self.assertTrue(os.path.exists(path))
def create_test_files(self, filedata):
for entry in filedata:
filename = entry[0]
filedata = entry[1]
f = open(os.path.join(self.testfiledir, filename), 'w')
f.write(filedata)
f.close()
def create_uncompressed_rar_archive(self, rarfile, files):
os.chdir(self.testarchivedir)
for file in files:
filepath = os.path.join(self.testfiledir, file)
#cmd = 'rar a -inul -ep ' + os.path.join(self.testarchivedir, rarfile) + ' ' + filepath
cmd = 'rar a -inul -ep -m0 ' + os.path.join(self.testarchivedir, rarfile) + ' ' + filepath
os.system(cmd)
def tearDown(self):
os.system('fusermount -z -u ' + self.rarmntdir)
import shutil
# shutil.rmtree(self.testdir)
def generate_content(self, size = 0, population = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'):
result = ''
for i in xrange(0, size):
result += str(random.choice(population))
return result
def generate_content_code(self, size = 0):
# round off size to nearest number divisible by 10
ns = size - ((size + 10) % 10)
result = ''
for i in xrange(0, size):
result += "$%09d" % (i)
return result
def test_sequential_read(self):
filedata = []
# filedata = [
# [ 'test1',
# 'this is testfile1 bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla bla\n' ],
# [ 'test2',
# 'crap crap crap crap\n' ]
# ]
# filedata.append(['test3', self.generate_content(200000)])
filedata.append(['test3', self.generate_content_code(200000)])
# filedata.append(['test3', self.generate_content(200000)])
files = []
for entry in filedata:
files.append(entry[0])
self.create_test_files(filedata)
rar_archive = 'testarchive1.rar'
self.create_uncompressed_rar_archive('testarchive1.rar', files)
for file in files:
rar_file = os.path.normpath(os.path.join(self.rarmntdir, '.' + self.testarchivedir, rar_archive, file))
raw_file = os.path.normpath(os.path.join(self.testfiledir, file))
self.verify_read_sequential(rar_file, raw_file)
self.verify_read_from_offset(rar_file, raw_file, 3)
self.verify_read_random(rar_file, raw_file)
def verify_read_sequential(self, rar_file, raw_file):
file_size = os.path.getsize(raw_file)
rawf = open(raw_file, 'r')
rarf = open(rar_file, 'r')
rawf.seek(0)
rarf.seek(0)
read_bytes = 1000
print "RAW:", rawf.read(read_bytes)
print "RAR:", rarf.read(read_bytes)
self.assertEqual(rarf.read(read_bytes), rawf.read(read_bytes), 'mismatch in sequential read')
rarf.close()
rawf.close()
def verify_read_from_offset(self, rar_file, raw_file, offset = 0):
file_size = os.path.getsize(raw_file)
rarf = open(rar_file, 'r')
rawf = open(raw_file, 'r')
rarf.seek(offset)
rawf.seek(offset)
self.assertEqual(rarf.read(), rawf.read(), 'mismatch in offset read from ' + str(offset))
rarf.close()
rawf.close()
def verify_read_random(self, rar_file, raw_file):
file_size = os.path.getsize(raw_file)
rarf = open(rar_file, 'r')
rawf = open(raw_file, 'r')
print "RAW file: " + raw_file
print "RAR file: " + rar_file
read_bytes = 10
for i in xrange(0, 10000):
# get random number
rb = random.randrange(0, file_size-10)
# align on 10 char boundary
byte = rb - ((rb + 10) % 10)
# make exception if test file is really small
if file_size <= 10:
byte = 0
read_bytes = file_size
rawf.seek(byte)
rarf.seek(byte)
# print "Offset %10d: RAW - RAR: %s %s" % ( byte, rawf.read(read_bytes), str(rarf.read(read_bytes)) )
rawf.seek(byte)
rarf.seek(byte)
self.assertEqual(rarf.read(read_bytes), rawf.read(read_bytes), 'mismatch in random read')
rarf.close()
rawf.close()
if __name__ == '__main__':
unittest.main()
|
Python
| 0
|
@@ -3178,16 +3178,27 @@
d_random
+_from_start
(rar_fil
@@ -3971,16 +3971,27 @@
d_random
+_from_start
(self, r
@@ -4464,150 +4464,8 @@
ze%0A%0A
-%09%09%09rawf.seek(byte)%0A%09%09%09rarf.seek(byte)%0A#%09%09%09print %22Offset %2510d: RAW - RAR: %25s %25s%22 %25 ( byte, rawf.read(read_bytes), str(rarf.read(read_bytes)) )%0A
%09%09%09r
|
5e13528a3c2912b4238f8f531a8f2c2652292b0a
|
Add init_db
|
scripts/init_db.py
|
scripts/init_db.py
|
import os
from pymongo import MongoClient
def main():
try:
client = MongoClient(host=os.environ.get('MONGODB_URI'))
db = client.get_default_database()
# tasks
db.users.create_index(keys='id', name='index_id', unique=True)
create_admin(db)
except Exception as e:
print(e)
def create_admin(db):
admin_data = dict(id=os.environ.get('ADMIN_ID'), password=os.environ.get('ADMIN_PASSWORD'))
try:
db.companies.insert_one(admin_data)
except:
print('admin already exists')
if __name__ == '__main__':
main()
|
Python
| 0.000042
|
@@ -186,24 +186,26 @@
asks%0A
+ #
db.users.cr
@@ -263,16 +263,18 @@
%0A
+ #
create_
@@ -283,16 +283,196 @@
min(db)%0A
+ db.users.update_many(%7B'$or': %5B%7B'events.joi.conference.registered': True%7D, %7B'events.joi.table_ronde.registered': True%7D%5D%7D,%0A %7B'$set': %7B'events.joi.registered': True%7D%7D)%0A
exce
|
4871d8135a754374bb1b31b0e2e51bacc8075c4f
|
Reorder arguments for resource() and default the 'prefix' argument to an empty string
|
respite/urls/__init__.py
|
respite/urls/__init__.py
|
from copy import deepcopy
from django.conf.urls.defaults import *
from django.http import HttpResponse
from respite.inflector import pluralize, cc2us
from respite.urls import routes
def resource(prefix, views, routes):
"""
Route a collection of views.
:param prefix: A string to prefix the routes by (e.g. 'posts/').
:param views: A reference to the class that defines the views.
:param routes: A list of routes.
"""
routes = deepcopy(routes)
def dispatch(request, GET=False, POST=False, PUT=False, DELETE=False, PATCH=False, **kwargs):
"""
Dispatch the request according to the request method and the string contained in
the corresponding keyword argument.
For example, if the request method is HTTP GET and the 'GET' argument to this function is
set to 'index', the 'index' function of the views class will be invoked and returned.
Arguments:
:param request: A django.http.HttpRequest object.
:param GET: A string describing the function to delegate the request to on HTTP GET.
:param POST: A string describing the function to delegate the request to on HTTP POST.
:param PUT: A string describing the function to delegate the request to on HTTP PUT.
:param DELETE: A string describing the function to delegate the request to on HTTP DELETE.
:param PATCH: A string describing the function to delegate the request to on HTTP PATCH.
"""
# Return HTTP 405 Method Not Allowed if the request method isn't routed
if request.method == 'GET' and not GET \
or request.method == 'POST' and not POST \
or request.method == 'PUT' and not PUT \
or request.method == 'DELETE' and not DELETE \
or request.method == 'PATCH' and not PATCH \
or request.method not in ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS', 'HEAD']:
allowed_methods = []
if GET:
allowed_methods.append('GET')
if POST:
allowed_methods.append('POST')
if PUT:
allowed_methods.append('PUT')
if DELETE:
allowed_methods.append('DELETE')
response = HttpResponse()
response.status_code = 405
response['Allow'] = ', '.join(allowed_methods)
return response
# Dispatch the request
if request.method in ['GET', 'HEAD']:
return getattr(views(), GET)(request, **kwargs)
if request.method == 'POST':
return getattr(views(), POST)(request, **kwargs)
if request.method == 'PUT':
return getattr(views(), PUT)(request, **kwargs)
if request.method == 'DELETE':
return getattr(views(), DELETE)(request, **kwargs)
if request.method == 'PATCH':
return getattr(views(), PATCH)(request, **kwargs)
if request.method == 'OPTIONS':
map = {}
if GET:
map['GET'] = getattr(views(), GET)
if POST:
map['POST'] = getattr(views(), POST)
if PUT:
map['PUT'] = getattr(views(), PUT)
if DELETE:
map['DELETE'] = getattr(views(), DELETE)
return views().options(request, map, **kwargs)
def urlify(routes):
"""
Transform routes into urlpatterns.
Arguments:
routes -- A list of routes.
"""
urls = []
# Route regular expressions and names may be lambdas; expand them.
for i, route in enumerate(routes):
if callable(route.regex):
routes[i].regex = route.regex(prefix)
else:
routes[i].regex = '^%s' % prefix + (route.regex[1:] if route.regex[0] == '^' else route.regex)
if callable(route.name):
routes[i].name = route.name(views)
for route in list(routes):
# Collect this route and its siblings (i.e. routes that share
# same regular expression) in a dictionary of keys that describe
# HTTP methods and values that describe the corresponding
# view function.
#
# Example:
#
# {
# 'GET': 'index',
# 'POST': 'create'
# }
kwargs = {}
for sibling in list(routes):
if sibling.regex == route.regex:
kwargs[sibling.method] = sibling.view
routes.remove(sibling)
urls.append(
url(
regex = route.regex,
view = dispatch,
kwargs = kwargs,
name = route.name
)
)
return urls
return urlify(routes)
|
Python
| 0.000006
|
@@ -195,16 +195,8 @@
rce(
-prefix,
view
@@ -200,24 +200,35 @@
iews, routes
+, prefix=''
):%0A %22%22%22%0A
|
aaac39fbf5a38bc95ada280738649ef188425528
|
add imports for instances and sets to pcda
|
disaggregator/PecanStreetDatasetAdapter.py
|
disaggregator/PecanStreetDatasetAdapter.py
|
from ApplianceTrace import ApplianceTrace
import sqlalchemy
import pandas as pd
class PecanStreetDatasetAdapter():
schema_names = {'curated': '\"PecanStreet_CuratedSets\"',
'raw': '\"PecanStreet_RawData\"',
'shared': '\"PecanStreet_SharedData\"'}
time_columns = {'curated': 'utc_15min',
'raw': 'localminute15minute',
'shared': 'localminute'}
invalid_columns = {'curated': ['id', 'utc_15min'],
'raw': ['localminute15minute'],
'shared': ['localminute']}
def __init__(self,db_url):
'''
Initialize an adapter using a database url_string.
Consider the following example:
db_url="postgresql://user_name:password@host.url:port/db"
'''
self.eng = sqlalchemy.create_engine(db_url)
self.source = "PecanStreet"
def get_table_names(self,schema):
'''
Returns a list of tables in the schema.
'''
df = self.get_dataframe('select * from information_schema.tables')
df = df.groupby(['table_schema','table_name'])
groups = [group for group in df.groups]
table_names = [t for (s,t) in groups if s == schema_names[schema]]
return table_names
def verify_same_range(self):
'''
Check that all data points have the same range
'''
pass
def get_table_metadata(self,schema,table):
'''
Returns a tuple where the first element is a list of data ids for this
schema.table and the second element is a list of the appliances
included in this schema.table
'''
q = 'select distinct dataid from {}.{}'.format(schema_names[schema],table)
result = self.eng.execute(q)
ids = result.fetchall()
q = 'select * from {}.{} where dataid={}'.format(schema_names[schema],table,ids[0][0])
result = self.eng.execute(q)
apps = result.keys()
ids= [a[0] for a in ids]
apps = [str(a) for a in apps ]
return ids, apps
def get_unique_dataids(self,schema,month,year,group=None):
'''
Returns a list of dataids for a specifc schema ("curated","shared", or
"raw"), month (int), year (int), and group (int).
'''
if schema == "curated":
schema_name = schema_names[schema]
query = 'select distinct dataid from {0}.group{1}_disaggregated_{2}_{3:02d}'.format(schema_name,group,year,month)
df = self.get_dataframe(query)
return list(df["dataid"])
elif schema == "shared":
raise NotImplementedError
elif schema == "raw":
raise NotImplementedError
else:
raise SchemaError(schema)
def get_month_traces(self,schema,year,month,dataid,group=None,sampling_rate="15T"):
'''
Returns a month-long traces for the specified month and sampling rate. Specify
sampling rate using pd offset aliases (Ex. 15 mins -> "15T")
'''
if schema == "curated": # Lowest possible sampling rate is 15T
# load dataframe and fill with zeros
schema_name = schema_names[schema]
query = 'select * from {0}.group{1}_disaggregated_{2}_{3:02d} where dataid={4}'.format(schema_name,group,year,month,dataid)
df = self.get_dataframe(query).fillna(0)
# column name for a trace series DatetimeIndex should be "time"
df.rename(columns={'utc_15min': 'time'}, inplace=True)
df.index = df['time'].apply(pd.to_datetime)
# drop unneded columns
df = df.drop(['id','dataid','time'], axis=1)
# resample if necessary
if not (sampling_rate == '15T' or sampling_rate == '15Min'):
how = {col:'sum' for col in dataframe.columns}
df = df.resample(sampling_rate, how=how)
elif schema == "shared":
pass
elif schema == "raw":
raise NotImplementedError
else:
raise SchemaError(schema)
# make traces for each column
traces = []
for column, series in df.iteritems():
traces.append(ApplianceTrace(series,self.source))
return traces
def time_align():
'''Checks that for all traces in a home the total time lengths are the same'''
pass
def clean_dataframe(self,df,schema,drop_cols): # TODO update this to use "curated" "shared" or "raw" instead of full frame name
'''
Cleans a dataframe queried directly from the database.
'''
# change the time column name
df = df.rename(columns={time_colums[schema]: 'time'})
# use a DatetimeIndex
df['time'] = pd.to_datetime(df['time'], format='%d/%m/%Y %H:%M:%S')
df.set_index('time', inplace=True)
# get some info about times
start_time = df['time'][0]
end_time = df['time'][-1]
step_size = df['time'][1]-start_time # will error out if we only have one time point
times = (start_time, end_time, step_size)
# drop unnecessary columns
df = df.drop(['dataid'], axis=1)
if schema == 'curated':
df = df.drop(['id'], axis=1)
if len(drop_cols)!=0:
df= df.drop(drop_cols,axis=1)
return df, times
def check_sample_rate(self,schema,sampling_rate):
##get from the data directly not like this
accepted_rates = {'curated':'15T' ,'raw':'15' ,'shared':'1T' }
def get_month_traces_per_dataid(self,schema,table,dataid):
# TODO change this name
if schema not in ['curated','raw','shared']:
raise SchemaError(schema)
schema_name = schema_names[schema]
query = 'select * from {0}.{1} where dataid={2}'.format(schema_name, table, dataid)
# TODO NEED TO CHANGE IDS
# TODO error checking that query worked
df = self.get_dataframe(query).fillna(0)
df,times = self.clean_dataframe(df, schema,[])
traces = []
for col in df.columns:
if not col in invalid_columns[schema]:
meta={'source':self.source,'schema':schema,'table':table ,'dataid':dataid, 'start_time': times[0],'end_time':times[1], 'step_size':times[2] }
traces.append(ApplianceTrace(df[col],meta))
return traces
def get_single_app_trace_need_house_id(self,house_df, app):
'''by house is fastest also have get all apps below'''
pass
def get_app_traces_all(self,schema,table,app):
schema_name = schema_names[schema]
query= 'select {2} from {0}.{1}'.format(schema_name,table,app)
df=self.get_dataframe(query)
# TODO - does this need to be cleaned differently?
def get_dataframe(self,query):
'''
Returns a Pandas dataframe with the query results
'''
eng_object = self.eng.execute(query)
df = pd.DataFrame.from_records(eng_object.fetchall())
df.columns = eng_object.keys()
return df
class SchemaError(Exception):
'''Exception raised for errors in the schema.
Attributes:
schema -- nonexistent schema
'''
def __init__(self,schema):
self.schema = schema
def __str__(self):
return "Schema {} not supported or nonexistent.".format(self.schema)
|
Python
| 0.000005
|
@@ -34,16 +34,102 @@
nceTrace
+%0Afrom ApplianceInstance import ApplianceInstance%0Afrom ApplianceSet import ApplianceSet
%0A%0Aimport
|
ce51d84dc2b3e78e8c1e78e8d00528dcb241a480
|
update get_meta_table to get_table_metadata
|
disaggregator/PecanStreetDatasetAdapter.py
|
disaggregator/PecanStreetDatasetAdapter.py
|
from ApplianceTrace import ApplianceTrace
import sqlalchemy
import pandas as pd
class PecanStreetDatasetAdapter():
schema_names = {'curated': '\"PecanStreet_CuratedSets\"',
'raw': '\"PecanStreet_RawData\"',
'shared': '\"PecanStreet_SharedData\"'}
time_columns = {'curated': 'utc_15min',
'raw': 'localminute15minute',
'shared': 'localminute'}
invalid_columns = {'curated': ['id', 'utc_15min'],
'raw': ['localminute15minute'],
'shared': ['localminute']}
def __init__(self,db_url):
'''
Initialize an adapter using a database url_string.
Consider the following example:
db_url="postgresql://user_name:password@host.url:port/db"
'''
self.eng = sqlalchemy.create_engine(db_url)
self.source = "PecanStreet"
def get_table_names(self,schema):
'''
Returns a list of tables in the schema.
'''
df = self.get_dataframe('select * from information_schema.tables')
df = df.groupby(['table_schema','table_name'])
groups = [group for group in df.groups]
table_names = [t for (s,t) in groups if s == schema_names[schema]]
return table_names
def verify_same_range(self):
'''
Check that all data points have the same range
'''
pass
def get_meta_table(self,schema,table): # TODO change this func name to "get_table_metadata"
'''
Returns a tuple where the first element is a list of data ids for this
schema.table and the second element is a list of the appliances
included in this schema.table
'''
q = 'select distinct dataid from {}.{}'.format(schema_names[schema],table)
result = self.eng.execute(q)
ids = result.fetchall()
q = 'select * from {}.{} where dataid={}'.format(schema_names[schema],table,ids[0][0])
result = self.eng.execute(q)
apps = result.keys()
ids= [a[0] for a in ids]
apps = [str(a) for a in apps ]
return [ids,apps]
def get_unique_dataids(self,schema,month,year,group=None):
'''
Returns a list of dataids for a specifc schema ("curated","shared", or
"raw"), month (int), year (int), and group (int).
'''
if schema == "curated":
schema_name = schema_names[schema]
query = 'select distinct dataid from {0}.group{1}_disaggregated_{2}_{3:02d}'.format(schema_name,group,year,month)
df = self.get_dataframe(query)
return list(df["dataid"])
elif schema == "shared":
raise NotImplementedError
elif schema == "raw":
raise NotImplementedError
else:
raise SchemaError(schema)
def get_month_traces(self,schema,year,month,dataid,group=None,sampling_rate="15T"):
'''
Returns a month-long traces for the specified month and sampling rate. Specify
sampling rate using pd offset aliases (Ex. 15 mins -> "15T")
'''
if schema == "curated": # Lowest possible sampling rate is 15T
# load dataframe and fill with zeros
schema_name = schema_names[schema]
query = 'select * from {0}.group{1}_disaggregated_{2}_{3:02d} where dataid={4}'.format(schema_name,group,year,month,dataid)
df = self.get_dataframe(query).fillna(0)
# column name for a trace series DatetimeIndex should be "time"
df.rename(columns={'utc_15min': 'time'}, inplace=True)
df.index = df['time'].apply(pd.to_datetime)
# drop unneded columns
df = df.drop(['id','dataid','time'], axis=1)
# resample if necessary
if not (sampling_rate == '15T' or sampling_rate == '15Min'):
how = {col:'sum' for col in dataframe.columns}
df = df.resample(sampling_rate, how=how)
elif schema == "shared":
pass
elif schema == "raw":
raise NotImplementedError
else:
raise SchemaError(schema)
# make traces for each column
traces = []
for column, series in df.iteritems():
traces.append(ApplianceTrace(series,self.source))
return traces
def time_align():
'''Checks that for all traces in a home the total time lengths are the same'''
pass
def clean_dataframe(self,df,schema,drop_cols): # TODO update this to use "curated" "shared" or "raw" instead of full frame name
'''
Cleans a dataframe queried directly from the database.
'''
# change the time column name
df = df.rename(columns={time_colums[schema]: 'time'})
# use a DatetimeIndex
df['time'] = pd.to_datetime(df['time'], format='%d/%m/%Y %H:%M:%S')
df.set_index('time', inplace=True)
# get some info about times
start_time = df['time'][0]
end_time = df['time'][-1]
step_size = df['time'][1]-start_time # will error out if we only have one time point
times = (start_time, end_time, step_size)
# drop unnecessary columns
df = df.drop(['dataid'], axis=1)
if schema == 'curated':
df = df.drop(['id'], axis=1)
if len(drop_cols)!=0:
df= df.drop(drop_cols,axis=1)
return df, times
def check_sample_rate(self,schema,sampling_rate):
##get from the data directly not like this
accepted_rates = {'curated':'15T' ,'raw':'15' ,'shared':'1T' }
def get_month_traces_per_dataid(self,schema,table,dataid):
# TODO change this name
if schema not in ['curated','raw','shared']:
raise SchemaError(schema)
schema_name = schema_names[schema]
query = 'select * from {0}.{1} where dataid={2}'.format(schema_name, table, dataid)
# TODO NEED TO CHANGE IDS
# TODO error checking that query worked
df = self.get_dataframe(query).fillna(0)
df,times = self.clean_dataframe(df, schema,[])
traces = []
for col in df.columns:
if not col in invalid_columns[schema]:
meta={'source':self.source,'schema':schema,'table':table ,'dataid':dataid, 'start_time': times[0],'end_time':times[1], 'step_size':times[2] }
traces.append(ApplianceTrace(df[col],meta))
return traces
def get_single_app_trace_need_house_id(self,house_df, app):
'''by house is fastest also have get all apps below'''
pass
def get_app_traces_all(self,schema,table,app):
schema_name = schema_names[schema]
query= 'select {2} from {0}.{1}'.format(schema_name,table,app)
df=self.get_dataframe(query)
# TODO - does this need to be cleaned differently?
def get_dataframe(self,query):
'''
Returns a Pandas dataframe with the query results
'''
eng_object = self.eng.execute(query)
df = pd.DataFrame.from_records(eng_object.fetchall())
df.columns = eng_object.keys()
return df
class SchemaError(Exception):
'''Exception raised for errors in the schema.
Attributes:
schema -- nonexistent schema
'''
def __init__(self,schema):
self.schema = schema
def __str__(self):
return "Schema {} not supported or nonexistent.".format(self.schema)
|
Python
| 0.000005
|
@@ -1464,18 +1464,22 @@
get_
-meta_
table
+_metadata
(sel
@@ -1498,61 +1498,8 @@
le):
- # TODO change this func name to %22get_table_metadata%22
%0A
@@ -2112,18 +2112,17 @@
urn
-%5B
ids,
+
apps
-%5D
%0A%0A
|
de91561ab7e16a125dd403e395f081c78323a47c
|
Fix typobug
|
spectrocrunch/common/listtools.py
|
spectrocrunch/common/listtools.py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 European Synchrotron Radiation Facility, Grenoble, France
#
# Principal author: Wout De Nolf (wout.de_nolf@esrf.eu)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import collections
import operator
import itertools
from . import instance
def flatten(l):
"""Flatten list
Args:
l(list):
Returns:
list
"""
for el in l:
if instance.isiterable(el) and not instance.isstring(el):
for sub in flatten(el):
yield sub
else:
yield el
def listadvanced_bool(lst,barr,bnot=False):
"""Advanced list indexing: boolean array
Args:
lst(list):
barr(array or bool): array of booleans
Returns:
list
"""
if bnot:
barr = itertools.imap(operate.not_,barr)
return list(itertools.compress(lst,barr))
def listadvanced_int(lst,ind):
"""Advanced list indexing: integer array
Args:
lst(list):
ind(array):
Returns:
list
"""
return [lst[i] for i in ind]
def listadvanced(lst,ind):
"""Advanced list indexing: integer or bool array
Args:
lst(list):
ind(array):
Returns:
list
"""
if instance.isboollist(ind):
return listadvanced_bool(lst,ind)
else:
return listadvanced_int(lst,ind)
def where(lst,func):
"""Indices are particular elements
Args:
lst(list):
func(callable): one argument
Returns:
list
"""
return [i for i,l in enumerate(lst) if func(l)]
def sort2lists(list1, list2):
"""Sort list1 and list2 based on list1
Args:
list1(list):
list2(list):
Returns:
list,list
"""
return tuple(list(t) for t in itertools.izip( *sorted(itertools.izip(list1, list2),key=operator.itemgetter(0)) ))
def weightedsum(labels, counts):
"""
Args:
list1(list):
list2(list):
Returns:
list,list
"""
c = collections.Counter()
for l,cnt in itertools.izip(labels,counts):
c.update({l:cnt})
return c.keys(),c.values()
def swap(lst,i,j):
if i!=j:
lst[i],lst[j] = lst[j],lst[i]
return lst
def roll(lst,n):
if n!=0:
n = abs(n)
lst = list(itertools.islice(itertools.cycle(lst),n,n+len(lst)))
return lst
def move(lst,i,j):
if i!=j:
lst.insert(j, lst.pop(i))
return lst
def length(x):
try:
return len(x)
except TypeError:
return 1
|
Python
| 0.99804
|
@@ -1825,17 +1825,18 @@
p(operat
-e
+or
.not_,ba
|
9ac47c86103d9a6db52f8feb8081c0b391f85e96
|
Change Aeon config from string to dict
|
speech/data/ingest_librispeech.py
|
speech/data/ingest_librispeech.py
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright 2015-2016 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
import numpy as np
import os
import logging
import glob
import fnmatch
import json
from neon.data.aeon_shim import AeonDataLoader
from neon.data.dataloaderadapter import DataLoaderAdapter
from neon.data.dataloader_transformers import TypeCast, Retuple
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
def write_manifest(output_file, *filenames):
""" Writes out a manifest file from a series of lists of filenames
"""
with open(output_file, "w") as fid:
fid.write("@FILE\tFILE\n");
for line in zip(*filenames):
fid.write("\t".join(line) + "\n")
return True
def main(input_directory, transcript_directory, manifest_file):
""" Finds all .flac files recursively in input_directory, then extracts the
transcript from the nearby .trans.txt file and stores it in
transcript_directory. Writes a manifest file referring to each .flac file
and its paired transcript.
Arguments:
input_directory (string): Path to librispeech directory
transcript_directory (string): Path to directory in which to write
individual transcript files.
manifest_file (string): Path to manifest file to output.
"""
def librispeech_flac_filename(filestr):
parts = filestr.split("-")
return os.path.join(input_directory, parts[0], parts[1],
"{}.flac".format(filestr))
if not os.path.isdir(input_directory):
raise IOError("Data directory does not exist! {}".format(input_directory))
if not os.path.exists(transcript_directory):
os.makedirs(transcript_directory)
transcript_files = glob.glob(os.path.join(input_directory, '*/*/*.txt'))
if len(transcript_files) == 0:
logger.error("No .txt files were found in {}".format(input_directory))
return
logger.info("Beginning audio conversions")
audio_files = list()
txt_files = list()
for ii, tfile in enumerate(transcript_files):
# transcript file specifies transcript and flac filename for all librispeech files
logger.info("Converting audio corresponding to transcript "
"{} of {}".format(ii, len(transcript_files)))
with open(tfile, "r") as fid:
lines = fid.readlines()
for line in lines:
filestr, transcript = line.split(" ", 1)
try:
flac_file = librispeech_flac_filename(filestr)
except IndexError: # filestr is not the format we are expecting
print("filestr of unexpected formatting: {}".format(filestr))
print("error in {}".format(tfile))
continue
txt_file = os.path.join(transcript_directory,
"{}.txt".format(filestr))
# Write out short transcript file
with open(txt_file, "w") as fid:
fid.write(transcript.strip())
# Add to output lists to be written to manifest
audio_files.append(flac_file)
txt_files.append(txt_file)
logger.info("Writing manifest file to {}".format(manifest_file))
return write_manifest(manifest_file, audio_files, txt_files)
def common_config(manifest_file, batch_size, alphabet, nbands, max_tscrpt_len):
audio_config = {"type": "audio",
"sample_freq_hz": 16000,
"max_duration": "30 seconds",
"frame_length": "25 milliseconds",
"frame_stride": "10 milliseconds",
"feature_type": "mfsc",
"emit_length": True,
"num_filters": nbands}
transcription_config = {"type": "char_map",
"alphabet": alphabet,
"emit_length": True,
"max_length": max_tscrpt_len}
return {'manifest_filename': manifest_file,
'manifest_root': os.path.dirname(manifest_file),
'batch_size': batch_size,
'block_size': batch_size,
'etl': [audio_config, transcription_config]}
def wrap_dataloader(dl):
""" Data is loaded from Aeon as a 4-tuple. We need to cast the audio
(index 0) from int8 to float32 and repack the data into (audio, 3-tuple).
"""
dl = DataLoaderAdapter(dl)
dl = TypeCast(dl, index=0, dtype=np.float32)
dl = Retuple(dl, data=(0,), target=(2, 3, 1))
return dl
def make_loader(manifest_file, alphabet, nbands, max_tscrpt_len, backend_obj):
aeon_config = common_config(manifest_file, backend_obj.bsz, alphabet, nbands, max_tscrpt_len)
return wrap_dataloader(AeonDataLoader(json.dumps(aeon_config)))
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("input_directory",
help="Directory containing librispeech flac files")
parser.add_argument("transcript_directory",
help="Directory to write transcript .txt files")
parser.add_argument("manifest_file",
help="Output file that specifies the filename for each"
" output audio and transcript")
args = parser.parse_args()
main(args.input_directory,
args.transcript_directory,
args.manifest_file)
|
Python
| 0.000003
|
@@ -5543,19 +5543,8 @@
der(
-json.dumps(
aeon
@@ -5552,17 +5552,16 @@
config))
-)
%0A%0Aif __n
|
d85a27db790bb4bc1926fc78bcf0f2d54c876e48
|
bump version
|
pvmismatch/__init__.py
|
pvmismatch/__init__.py
|
# -*- coding: utf-8 -*-
"""
This is the PVMismatch Package. It contains :mod:`~pvmismatch.pvmismatch_lib`
and :mod:`~pvmismatch.pvmismatch_tk`.
:mod:`~pvmismatch.pvmismatch_lib`
=================================
This package contains the basic library modules, methods, classes and
attributes to model PV system mismatch.
.. note::
The main library classes and modules are exposed through this package for
convenience.
For example::
>>> from pvmismatch import PVcell # imports the PVcell class
>>> # import pvconstants, pvcell, pvmodule, pvstring and pvsystem
>>> from pvmismatch import *
:mod:`~pvmismatch.pvmismatch_tk`
================================
This package contains an application that can be run using
:mod:`pvmismatch.pv_tk`.
"""
# import pvmismatch_lib modules so to match old API
import pvmismatch.pvmismatch_lib.pvconstants as pvconstants
import pvmismatch.pvmismatch_lib.pvcell as pvcell
import pvmismatch.pvmismatch_lib.pvmodule as pvmodule
import pvmismatch.pvmismatch_lib.pvstring as pvstring
import pvmismatch.pvmismatch_lib.pvsystem as pvsystem
import pvmismatch.pvmismatch_lib.pvexceptions as pvexceptions
# expose constructors to package's top level
PVconstants = pvconstants.PVconstants
PVcell = pvcell.PVcell
PVmodule = pvmodule.PVmodule
PVstring = pvstring.PVstring
PVsystem = pvsystem.PVsystem
__author__ = 'mmikofski'
__version__ = '2.0'
__release__ = 'Himalayan Honey'
__all__ = ['pvconstants', 'pvcell', 'pvmodule', 'pvstring', 'pvsystem']
|
Python
| 0
|
@@ -1400,9 +1400,9 @@
'2.
-0
+1
'%0A__
@@ -1429,12 +1429,10 @@
an H
-oney
+am
'%0A__
|
45a624e18b9b870163a5852bd4d06c59b3bb5ac2
|
Fix base for got on pie binaries (#618)
|
pwndbg/commands/got.py
|
pwndbg/commands/got.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import pwndbg.chain
import pwndbg.commands
import pwndbg.enhance
import pwndbg.file
import pwndbg.which
import pwndbg.wrappers.checksec
import pwndbg.wrappers.readelf
from pwndbg.color import message
parser = argparse.ArgumentParser(description='Show the state of the Global Offset Table')
parser.add_argument('name_filter', help='Filter results by passed name.',
type=str, nargs='?', default='')
@pwndbg.commands.ArgparsedCommand(parser)
@pwndbg.commands.OnlyWhenRunning
def got(name_filter=''):
relro_status = pwndbg.wrappers.checksec.relro_status()
pie_status = pwndbg.wrappers.checksec.pie_status()
jmpslots = list(pwndbg.wrappers.readelf.get_jmpslots())
if not len(jmpslots):
print(message.error("NO JUMP_SLOT entries available in the GOT"))
return
if "PIE enabled" in pie_status:
bin_text_base = pwndbg.vmmap.find(pwndbg.elf.entry()).start
relro_color = message.off
if 'Partial' in relro_status:
relro_color = message.warn
elif 'Full' in relro_status:
relro_color = message.on
print("\nGOT protection: %s | GOT functions: %d\n " % (relro_color(relro_status), len(jmpslots)))
for line in jmpslots:
address, info, rtype, value, name = line.split()[:5]
if name_filter not in name:
continue
address_val = int(address, 16)
if "PIE enabled" in pie_status: # if PIE, address is only the offset from the binary base address
address_val = bin_text_base + address_val
got_address = pwndbg.memory.pvoid(address_val)
print("[0x%x] %s -> %s" % (address_val, message.hint(name), pwndbg.chain.format(got_address)))
|
Python
| 0
|
@@ -1061,29 +1061,24 @@
bin_
-text_
base = pwndb
@@ -1083,44 +1083,25 @@
dbg.
-vmmap.find(pwndbg.elf.entry()).start
+elf.exe().address
%0A%0A
@@ -1693,13 +1693,8 @@
bin_
-text_
base
|
a735f36102b370415b9f49dae4ee3c5ceda30136
|
Update cycling_light.py
|
apps/hue/cycling_light.py
|
apps/hue/cycling_light.py
|
# -*- coding: utf-8 -*-
# Author : jeonghoonkang, https://github.com/jeonghoonkang
import httplib
import time
conn = httplib.HTTPConnection("10.xxx.xxx.xxxx")
hue_uid = "c274b3c285d19cfxxxxxxxxxx"
restcmd = "/api"+hue_uid+"/lights"
str = " "
xhue = [10000,25000,46000,56280]
def shifthue() :
global str
global xhue
xhue.insert(0,xhue[-1])
xhue = xhue[0:4]
print xhue
callurl = restcmd + "/4/state"
try:
conn.request("PUT",callurl ,'{"on":false}')
response = conn.getresponse()
data = response.read()
except:
print "keep goging...."
time.sleep(2)
time.sleep(1)
for num in [3,2,1,4] :
callurl = restcmd + "/%s/state"%(num)
print callurl
huenumber = (xhue[4-num])
try :
conn.request("PUT",callurl ,'{"on":false}')
response = conn.getresponse()
data = response.read()
time.sleep(1)
conn.request("PUT",callurl ,'{"on":true, "sat":254, "bri":254, "hue":%s}'%huenumber)
response = conn.getresponse()
data = response.read()
print data
time.sleep(1)
except:
print "exception conn.getresponse from Hue GW"
time.sleep(2)
if __name__ == "__main__":
# print web()
while True :
shifthue()
time.sleep(5
|
Python
| 0.000001
|
@@ -1,8 +1,9 @@
+%0A
# -*- co
|
9979ef7149739e6523eddc439287807434090cbd
|
Add EDF readers, using fabio.
|
pyXPCS/pims_readers.py
|
pyXPCS/pims_readers.py
|
from pims import FramesSequence
from ..broker import DataBroker
from filestore.api import retrieve
from skimage import img_as_float
class Images(FramesSequence):
def __init__(self, headers, name, flat_field, dead_pixels,
process_func=None, dtype=None, as_grey=False):
"""
Load images from a detector for given Header(s).
Parameters
----------
headers : Header or list of Headers
name : str
alias (data key) of a detector
flat_field : array
dead_pixels : array
process_func: callable, optional
function to be applied to each image
dtype : numpy.dtype or str, optional
data type to cast each image as
as_grey : boolean, optional
False by default
quick-and-dirty way to ensure images are reduced to greyscale
To take more control over how conversion is performed,
use process_func above.
TODO: Add beamstop mask!
Example
-------
>>> header = DataBroker[-1]
>>> images = Images(header, 'my_detector_lightfield')
>>> for image in images:
# do something
"""
self._dtype = dtype
events = DataBroker.fetch_events(headers, fill=False)
self._datum_uids = [event.data[name] for event in events]
self._validate_process_func(process_func)
self._as_grey(as_grey, process_func)
def get_frame(self, i):
img = retrieve(self._datum_uids[i])
if self._dtype is not None and img.dtype != self._dtype:
img = img.astype(self._dtype)
return Frame(self.process_func(img), frame_no=i)
class SubtractedImages(FramesSequence):
def __init__(self, headers, lightfield_name, darkfield_name,
flat_field, dead_pixels,
process_func=None, dtype=None, as_grey=False):
"""
Load images from a detector for given Header(s). Subtract
dark images from each corresponding light image automatically.
Parameters
----------
headers : Header or list of Headers
lightfield_name : str
alias (data key) of lightfield images
darkfield_name : str
alias (data key) of darkfield images
flat_field : array
dead_pixels : array
process_func: callable, optional
function to be applied to each image
dtype : numpy.dtype or str, optional
data type to cast each image as
as_grey : boolean, optional
False by default
quick-and-dirty way to ensure images are reduced to greyscale
To take more control over how conversion is performed,
use process_func above.
Example
-------
>>> header = DataBroker[-1]
>>> images = SubtractedImages(header, 'my_lightfield', 'my_darkfield')
>>> for image in images:
# do something
"""
self.light = Images(
headers, lightfield_name, process_func, dtype, as_grey)
self.dark = Images(
headers, darkfield_name, process_func, dtype, as_grey)
def get_frame(self, i):
# Convert to float to avoid out-of-bounds wrap-around errors,
# as in 10-11 = 255.
return img_as_float(self.light[i]) - img_as_float(self.dark[i])
class XrayImageSequence(ImageSequence):
def __init__(self, filepath, flat_field, dead_pixels,
process_func=None, dtype=None, as_grey=False):
super(XrayImageSequence, self).__init__(
filepath, process_func=process_func, as_grey=as_grey)
def get_frame(self, i):
frame = super(XrayImageSequence, self).get_frame(i)
result = img_as_float(frame) - img_as_float(flat_field)
# TODO implement dead_pixels and possible beam_stop
return result
class XraySubtractedImageSeuqnce(FramesSequence):
def __init__(self, light_filepath, dark_filepath, flat_field, dead_pixels,
process_func=None, dtype=None, as_grey=False):
self.light = XrayImageSequence(light_filepath, flat_field, dead_pixels,
process_func, dtype, as_grey)
self.dark = XrayImageSequence(dark_filepath, flat_field, dead_pixels,
process_func, dtype, as_grey)
def get_frame(self, i):
# Convert to float to avoid out-of-bounds wrap-around errors,
# as in 10-11 = 255.
return img_as_float(self.light[i]) - img_as_float(self.dark[i])
|
Python
| 0
|
@@ -125,16 +125,29 @@
s_float%0A
+import fabio%0A
%0A%0Aclass
@@ -3497,32 +3497,37 @@
path, flat_field
+=None
, dead_pixels,%0A
@@ -3515,32 +3515,37 @@
one, dead_pixels
+=None
,%0A
@@ -3838,18 +3838,73 @@
t(frame)
+%0A if flat_field is not None:%0A result
-
+=
img_as_
@@ -3969,17 +3969,17 @@
possibl
-e
+y
beam_st
@@ -4036,10 +4036,11 @@
geSe
-u
q
+ue
nce(
@@ -4112,32 +4112,37 @@
path, flat_field
+=None
, dead_pixels,%0A
@@ -4130,32 +4130,37 @@
one, dead_pixels
+=None
,%0A
@@ -4684,28 +4684,302 @@
img_as_float(self.dark%5Bi%5D)%0A
+%0Aclass EDFSequence(XrayImageSequence):%0A%0A def imread(self, filename, **kwargs):%0A return fabio.edfimage(filename).data%0A%0Aclass SubtractedEDFSequence(SubtractedXrayImageSequence):%0A%0A def imread(self, filename, **kwargs):%0A return fabio.edfimage(filename).data%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.