commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
29cebab613c483147fa2c10cf024c05269f64fe4
|
Fix bug in setup_relative_calculation script where setup_dict was erroneously used instead of setup_options
|
scripts/setup_relative_calculation.py
|
scripts/setup_relative_calculation.py
|
import yaml
from perses.dispersed import relative_setup
import numpy as np
import pickle
import progressbar
import os
import sys
import logging
from simtk import unit
logging.basicConfig(level=logging.DEBUG)
if __name__ == "__main__":
try:
yaml_filename = sys.argv[1]
except IndexError as e:
yaml_filename = "/Users/grinawap/perses/examples/cdk2-example/cdk2_setup.yaml"
print("You need to specify the setup yaml file as an argument to the script.")
#raise e
yaml_file = open(yaml_filename, 'r')
setup_options = yaml.load(yaml_file)
yaml_file.close()
trajectory_directory = setup_options['trajectory_directory']
if 'phases' in setup_options:
phases = setup_options['phases']
else:
phases = ['complex', 'solvent']
if not os.path.exists(trajectory_directory):
os.makedirs(trajectory_directory)
setup_dict = relative_setup.run_setup(setup_options)
print("setup complete")
n_equilibration_iterations = setup_dict['n_equilibration_iterations']
trajectory_prefix = setup_options['trajectory_prefix']
#write out topology proposals
np.save(os.path.join(trajectory_directory, trajectory_prefix+"topology_proposals.npy"),
setup_dict['topology_proposals'])
if setup_options['fe_type'] == 'nonequilibrium':
n_cycles = setup_options['n_cycles']
n_iterations_per_cycle = setup_options['n_iterations_per_cycle']
total_iterations = n_cycles*n_iterations_per_cycle
ne_fep = setup_dict['ne_fep']
for phase in phases:
ne_fep_run = ne_fep[phase]
hybrid_factory = ne_fep_run._factory
np.save(os.path.join(trajectory_directory, "%s_%s_hybrid_factory.npy" % (trajectory_prefix, phase)),
hybrid_factory)
print("equilibrating")
ne_fep_run.equilibrate(n_iterations=n_equilibration_iterations)
print("equilibration complete")
bar = progressbar.ProgressBar(redirect_stdout=True, max_value=total_iterations)
bar.update(0)
for i in range(n_cycles):
ne_fep_run.run(n_iterations=n_iterations_per_cycle)
print(i)
# bar.update((i+1)*n_iterations_per_cycle)
print("calculation complete")
df, ddf = ne_fep_run.current_free_energy_estimate
print("The free energy estimate is %f +/- %f" % (df, ddf))
endpoint_file_prefix = os.path.join(trajectory_directory, "%s_%s_endpoint{endpoint_idx}.npy" %
(trajectory_prefix, phase))
endpoint_work_paths = [endpoint_file_prefix.format(endpoint_idx=lambda_state) for lambda_state in [0, 1]]
# try to write out the ne_fep object as a pickle
try:
pickle_outfile = open(os.path.join(trajectory_directory, "%s_%s_ne_fep.pkl" %
(trajectory_prefix, phase)), 'wb')
except Exception as e:
pass
try:
pickle.dump(ne_fep, pickle_outfile)
except Exception as e:
print(e)
print("Unable to save run object as a pickle")
finally:
pickle_outfile.close()
# save the endpoint perturbations
for lambda_state, reduced_potential_difference in ne_fep._reduced_potential_differences.items():
np.save(endpoint_work_paths[lambda_state], np.array(reduced_potential_difference))
else:
np.save(os.path.join(trajectory_directory, trajectory_prefix + "hybrid_factory.npy"),
setup_dict['hybrid_topology_factories'])
hss = setup_dict['hybrid_sams_samplers']
free_energies = dict()
for phase in phases:
hss_run = hss[phase]
hss_run.minimize()
hss_run.equilibrate(n_equilibration_iterations)
hss_run.extend(1000)
free_energies[phase] = hss_run._logZ[-1] - hss_run._logZ[0]
print("Finished phase %s with dG estimated as %.4f kT" % (phase, free_energies[phase]))
print("Total ddG is estimated as %.4f kT" % (free_energies['complex'] - free_energies['solvent']))
|
Python
| 0
|
@@ -965,16 +965,75 @@
mplete%22)
+%0A print('setup_dict keys: %7B%7D'.format(setup_dict.keys()))
%0A%0A n_
@@ -1065,20 +1065,23 @@
= setup_
-dict
+options
%5B'n_equi
@@ -4347,8 +4347,9 @@
vent'%5D))
+%0A
|
ed00bc79d18ccaf40b6c5f3e325e8202ed0f2e77
|
Add necessary setting to support feature
|
isort/settings.py
|
isort/settings.py
|
"""isort/settings.py.
Defines how the default settings for isort should be loaded
(First from the default setting dictionary at the top of the file, then overridden by any settings
in ~/.isort.conf if there are any)
Copyright (C) 2013 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from collections import namedtuple
from pies.functools import lru_cache
from pies.overrides import *
try:
import configparser
except ImportError:
import ConfigParser as configparser
MAX_CONFIG_SEARCH_DEPTH = 25 # The number of parent directories isort will look for a config file within
WrapModes = ('GRID', 'VERTICAL', 'HANGING_INDENT', 'VERTICAL_HANGING_INDENT', 'VERTICAL_GRID', 'VERTICAL_GRID_GROUPED')
WrapModes = namedtuple('WrapModes', WrapModes)(*range(len(WrapModes)))
# Note that none of these lists must be complete as they are simply fallbacks for when included auto-detection fails.
default = {'force_to_top': [],
'skip': ['__init__.py', ],
'line_length': 80,
'known_standard_library': ["abc", "anydbm", "argparse", "array", "asynchat", "asyncore", "atexit", "base64",
"BaseHTTPServer", "bisect", "bz2", "calendar", "cgitb", "cmd", "codecs",
"collections", "commands", "compileall", "ConfigParser", "contextlib", "Cookie",
"copy", "cPickle", "cProfile", "cStringIO", "csv", "datetime", "dbhash", "dbm",
"decimal", "difflib", "dircache", "dis", "doctest", "dumbdbm", "EasyDialogs",
"errno", "exceptions", "filecmp", "fileinput", "fnmatch", "fractions",
"functools", "gc", "gdbm", "getopt", "getpass", "gettext", "glob", "grp", "gzip",
"hashlib", "heapq", "hmac", "imaplib", "imp", "inspect", "itertools", "json",
"linecache", "locale", "logging", "mailbox", "math", "mhlib", "mmap",
"multiprocessing", "operator", "optparse", "os", "pdb", "pickle", "pipes",
"pkgutil", "platform", "plistlib", "pprint", "profile", "pstats", "pwd", "pyclbr",
"pydoc", "Queue", "random", "re", "readline", "resource", "rlcompleter",
"robotparser", "sched", "select", "shelve", "shlex", "shutil", "signal",
"SimpleXMLRPCServer", "site", "sitecustomize", "smtpd", "smtplib", "socket",
"SocketServer", "sqlite3", "string", "StringIO", "struct", "subprocess", "sys",
"sysconfig", "tabnanny", "tarfile", "tempfile", "textwrap", "threading", "time",
"timeit", "trace", "traceback", "unittest", "urllib", "urllib2", "urlparse",
"usercustomize", "uuid", "warnings", "weakref", "webbrowser", "whichdb", "xml",
"xmlrpclib", "zipfile", "zipimport", "zlib", 'builtins', '__builtin__'],
'known_third_party': ['google.appengine.api'],
'known_first_party': [],
'multi_line_output': WrapModes.GRID,
'forced_separate': [],
'indent': ' ' * 4,
'length_sort': False,
'add_imports': [],
'remove_imports': [],
'force_single_line': False,
'default_section': 'FIRSTPARTY',
'import_heading_future': '',
'import_heading_stdlib': '',
'import_heading_thirdparty': '',
'import_heading_firstparty': '',
'import_heading_localfolder': '',
'balanced_wrapping': False,
'order_by_type': False,
'atomic': False}
@lru_cache()
def from_path(path):
computed_settings = default.copy()
_update_settings_with_config(path, '.editorconfig', '~/.editorconfig', ('*', '*.py', '**.py'), computed_settings)
_update_settings_with_config(path, '.isort.cfg', '~/.isort.cfg', ('settings', ), computed_settings)
_update_settings_with_config(path, 'setup.cfg', None, ('isort', ), computed_settings)
return computed_settings
def _update_settings_with_config(path, name, default, sections, computed_settings):
editor_config_file = default and os.path.expanduser(default)
tries = 0
current_directory = path
while current_directory and tries < MAX_CONFIG_SEARCH_DEPTH:
potential_path = os.path.join(current_directory, native_str(name))
if os.path.exists(potential_path):
editor_config_file = potential_path
break
current_directory = os.path.split(current_directory)[0]
tries += 1
if editor_config_file and os.path.exists(editor_config_file):
computed_settings.update(_read_config_file(editor_config_file, sections).copy())
@lru_cache()
def _read_config_file(file_path, sections):
computed_settings = {}
with open(file_path) as config_file:
if file_path.endswith(".editorconfig"):
line = "\n"
last_position = config_file.tell()
while line:
line = config_file.readline()
if "[" in line:
config_file.seek(last_position)
break
last_position = config_file.tell()
config = configparser.SafeConfigParser()
config.readfp(config_file)
settings = dict()
for section in sections:
if config.has_section(section):
settings.update(dict(config.items(section)))
if file_path.endswith(".editorconfig"):
indent_style = settings.pop('indent_style', "").strip()
indent_size = settings.pop('indent_size', "").strip()
if indent_style == "space":
computed_settings['indent'] = " " * (indent_size and int(indent_size) or 4)
elif indent_style == "tab":
computed_settings['indent'] = "\t" * (indent_size and int(indent_size) or 1)
max_line_length = settings.pop('max_line_length', "").strip()
if max_line_length:
computed_settings['line_length'] = int(max_line_length)
for key, value in settings.items():
existing_value_type = type(default.get(key, ''))
if existing_value_type in (list, tuple):
computed_settings[key.lower()] = value.split(",")
else:
computed_settings[key.lower()] = existing_value_type(value)
return computed_settings
|
Python
| 0
|
@@ -4974,16 +4974,54 @@
': False
+,%0A 'lines_after_imports': -1
%7D%0A%0A%0A@lru
|
56f24e52f961da414f0610e6bd815812b4a14ef6
|
Update utils.py
|
classifier/utils.py
|
classifier/utils.py
|
# coding=utf-8
# Copyright 2019 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Utility functions for LaserTagger."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
from typing import Iterator, Mapping, Sequence, Text, Tuple
import tensorflow as tf
def get_token_list(text):
"""Returns a list of tokens.
This function expects that the tokens in the text are separated by space
character(s). Example: "ca n't , touch". This is the case at least for the
public DiscoFuse and WikiSplit datasets.
Args:
text: String to be split into tokens.
"""
return text.split()
def yield_sources_and_targets_meaning(input_file):
"""Reads and yields source lists and targets from the input file.
Args:
input_file: Path to the input file.
Yields:
Tuple with (list of source texts, target text).
"""
with tf.io.gfile.GFile(input_file) as f:
for line in f:
source, summary, score = line.rstrip('\n').split('\t')
yield [source], summary, score
def yield_sources_and_targets_grammar(input_file):
"""Reads and yields source lists and targets from the input file.
Args:
input_file: Path to the input file.
Yields:
Tuple with (list of source texts, target text).
"""
with tf.io.gfile.GFile(input_file) as f:
for line in f:
source, score = line.rstrip('\n').split('\t')
yield [source], None, score
|
Python
| 0.000001
|
@@ -1497,32 +1497,86 @@
for line in f:%0A
+ if len(line.rstrip('%5Cn').split('%5Ct')) == 3:%0A
source, su
@@ -1612,32 +1612,36 @@
n').split('%5Ct')%0A
+
yield %5Bsou
|
a0e1d0c557e37d5a1a76e349c3a75a3ba32e102c
|
Add fixed width for textbox
|
src/aerial_autonomy/aerial_autonomy_gui.py
|
src/aerial_autonomy/aerial_autonomy_gui.py
|
#!/usr/bin/env python
"""
@package aerial_autonomy GUI source code
to provide user the ability to
trigger events from GUI to state machine
@package aerial_autonomy.aerial_autonomy_gui
Generate a GUI to trigger events for state machine
@author: gowtham
"""
import argparse
import sip
from qt_gui.plugin import Plugin
from python_qt_binding.QtWidgets import (QLabel, QVBoxLayout,
QGridLayout, QWidget,
QTextEdit, QPushButton,
QSlider)
from python_qt_binding.QtCore import *
from ros_event_trigger import RosEventTrigger
from argparse import ArgumentParser
from functools import partial
# %%
class EventTransmissionGUI(Plugin):
"""
GUI to send events from User to logic state machine
"""
def __init__(self, context):
"""
Create Qt GUI using the event file
"""
super(EventTransmissionGUI, self).__init__(context)
self.setObjectName('ManualEventTriggerGUI')
parser = ArgumentParser()
# Add argument(s) to the parser.
args = self._parse_args(context.argv())
# Create Event trigger
self.event_trigger = RosEventTrigger(args.event_file)
# Parent container to store buttons, textboxes
self._container = QWidget()
# Set title of the parent container window
self._container.setWindowTitle(self.event_trigger.event_manager_name)
# layout for the parent container
self._layout = QVBoxLayout()
self._container.setLayout(self._layout)
# Create Textboxes and add to Layout
self._layout.addWidget(QLabel('State Machine State'))
# Textbox to show sytem status
self.system_status_textbox = QTextEdit()
self.system_status_textbox.setReadOnly(True)
self._layout.addWidget(self.system_status_textbox)
# Create height slider
self._layout.addWidget(QLabel('Pose Command Height (m)'))
# Height slider to adjust z coordinate for pose command
# \todo Matt: Load slider settings from param file
self.height_slider = QSlider(Qt.Horizontal)
self.height_slider.setMinimum(1.)
self.height_slider.setMaximum(20)
self.height_slider.setValue(2)
self.height_slider.setTickPosition(QSlider.TicksBelow)
self.height_slider.setTickInterval(1)
self._layout.addWidget(self.height_slider)
# Add button for triggering pose command
# Container for pose event related objects: slide etc
# \todo Matt: Reset slider value based on current quad height
self.pose_command_container = QWidget()
# Pose command layout
self.pose_command_layout = QGridLayout()
self.pose_command_container.setLayout(self.pose_command_layout)
# x pose label to display position command from rviz to user
self.pose_x = QLabel('x: -')
# y pose label to display position command from rviz to user
self.pose_y = QLabel('y: -')
# z pose label to display position command from rviz to user
self.pose_z = QLabel("z: {0:.2f}".format(self.height_slider.value()))
self.height_slider.valueChanged.connect(self.updateHeight)
self.pose_command_layout.addWidget(self.pose_x, 0, 0)
self.pose_command_layout.addWidget(self.pose_y, 0, 1)
self.pose_command_layout.addWidget(self.pose_z, 0, 2)
# Button to send the pose command to state machine as poseyaw event
self.send_pose_command_button = QPushButton("Send Pose Command")
self.send_pose_command_button.clicked.connect(
self.poseCommandButtonCallback)
self.pose_command_layout.addWidget(self.send_pose_command_button, 0, 3)
self._layout.addWidget(self.pose_command_container)
# Pose command container to store pose from Rviz and send to state
# machine
self.pose_command = None
# Define and connect buttons
self._layout.addWidget(QLabel('Event Triggers'))
# Continer to store event triggering buttons
self.button_container = QWidget()
# List of push buttons to trigger events
self.push_buttons = list()
# Layout for the push buttons
self.button_layout = QGridLayout()
self.button_container.setLayout(self.button_layout)
button_index = 0
for event_name in self.event_trigger.event_names_list:
self.push_buttons.append(QPushButton(event_name))
partial_fun = partial(self.event_trigger.triggerEvent,
event_name=event_name)
self.push_buttons[-1].clicked.connect(partial_fun)
row, col = self.get_row_col(button_index, args.grid_cols)
self.button_layout.addWidget(self.push_buttons[-1], row, col)
button_index += 1
self._layout.addWidget(self.button_container)
context.add_widget(self._container)
# Add textboxes to update hooks from eventTrigger class
# Define Partial callbacks
systemStatusCallback = partial(
self.updateStatus, text_box=self.system_status_textbox)
# Connect Event Triggers
self.event_trigger.status_signal.connect(
systemStatusCallback)
self.event_trigger.pose_command_signal.connect(
self.poseCommandCallback)
def _parse_args(self, argv):
"""
Parse extra arguments when plugin is deployed in standalone mode
"""
parser = argparse.ArgumentParser(
prog='aerial_autonomy', add_help=False)
EventTransmissionGUI.add_arguments(parser)
return parser.parse_args(argv)
@staticmethod
def add_arguments(parser):
"""
Notify rqt_gui that this plugin can parse these extra arguments
"""
group = parser.add_argument_group(
'Options for aerial autonomy gui plugin')
group.add_argument("-e", "--event_file", type=str,
default='', help="Event file")
group.add_argument("-c", "--grid_cols", type=int,
default=3, help="Number of columns in grid")
def get_row_col(self, button_index, ncols):
"""
Automatically find the row and col to add the button
to in a grid based on index of the button
"""
col_index = button_index % ncols
row_index = int((button_index - col_index) / ncols)
return(row_index, col_index)
def poseCommandCallback(self, pose):
"""
Saves pose command and updates command display
"""
self.pose_command = pose
self.pose_x.setText("x: {0:.2f}".format(
self.pose_command.pose.position.x))
self.pose_y.setText("y: {0:.2f}".format(
self.pose_command.pose.position.y))
def poseCommandButtonCallback(self):
"""
Publishes stored pose command after setting height from slider
"""
if self.pose_command:
self.pose_command.pose.position.z = self.height_slider.value()
self.event_trigger.triggerPoseCommand(self.pose_command)
# Reset pose command to avoid accidental triggering
self.pose_command = None
self.pose_x.setText('x: -')
self.pose_y.setText('y: -')
else:
print "No pose command to trigger"
def updateHeight(self):
"""
Updates height label based on slider value
"""
self.pose_z.setText("z: {0:.2f}".format(self.height_slider.value()))
def updateStatus(self, status, text_box):
"""
Generic placeholder function to update text box
"""
if not sip.isdeleted(text_box):
text_box.setHtml(status)
doc_size = text_box.document().size()
text_box.setFixedHeight(doc_size.height() + 10)
|
Python
| 0.000001
|
@@ -7934,8 +7934,63 @@
) + 10)%0A
+ text_box.setFixedWidth(doc_size.width()+6)%0A
|
c28112624b3c735c610f756a6bff1497e9516c64
|
Revise naive alg to more intuitive one: separate checking index and value
|
alg_find_peak_1D.py
|
alg_find_peak_1D.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def find_peak_naive(arr):
"""Find peak by naive iteration.
Time complexity: O(n).
"""
for i in range(len(arr)):
if i == 0 and arr[i] > arr[i + 1]:
return arr[i]
elif i == (len(arr) - 1) and arr[i] > arr[i - 1]:
return arr[i]
elif (0 < i < (len(arr) - 1) and
arr[i] > arr[i - 1] and arr[i] > arr[i + 1]):
return arr[i]
else:
pass
def find_peak(arr):
"""Find peak by divide-end-conquer algorithm.
Time complexity: O(logn).
"""
if len(arr) == 1:
return arr[0]
else:
mid = len(arr) // 2
if arr[mid] < arr[mid - 1]:
return find_peak(arr[:mid-1])
elif arr[mid] < arr[mid + 1]:
return find_peak(arr[mid+1:])
else:
return arr[mid]
def main():
import time
# Array with peak 4.
arr = [0, 1, 4, 3, 2]
# Find peak by naive version.
time_start = time.time()
peak = find_peak_naive(arr)
time_run = time.time() - time_start
print('Peak: {}'.format(peak))
print('Time for find_peak_naive(): {}'.format(time_run))
# Find peak by divide-end-conquer algorithm.
time_start = time.time()
peak = find_peak(arr)
time_run = time.time() - time_start
print('Peak: {}'.format(peak))
print('Time for find_peak_naive(): {}'.format(time_run))
if __name__ == '__main__':
main()
|
Python
| 0.000016
|
@@ -250,20 +250,32 @@
f i == 0
- and
+:%0A if
arr%5Bi%5D
@@ -284,24 +284,28 @@
arr%5Bi + 1%5D:%0A
+
@@ -350,20 +350,32 @@
rr) - 1)
- and
+:%0A if
arr%5Bi%5D
@@ -384,24 +384,28 @@
arr%5Bi - 1%5D:%0A
+
@@ -432,39 +432,11 @@
el
-if (0 %3C i %3C (len(arr) - 1) and
+se:
%0A
@@ -444,17 +444,18 @@
-
+if
arr%5Bi%5D
@@ -490,17 +490,16 @@
r%5Bi + 1%5D
-)
:%0A
@@ -496,32 +496,36 @@
1%5D:%0A
+
return arr%5Bi%5D%0A
@@ -526,39 +526,8 @@
%5Bi%5D%0A
- else:%0A pass%0A
%0A%0Ade
|
8a44ec213272536d59d16118e2a13c533299242e
|
fix missing = in SystemCSerializer
|
hwt/serializer/systemC/serializer.py
|
hwt/serializer/systemC/serializer.py
|
from jinja2.environment import Environment
from jinja2.loaders import PackageLoader
from hwt.hdlObjects.types.enum import Enum
from hwt.hdlObjects.types.enumVal import EnumVal
from hwt.serializer.generic.serializer import GenericSerializer
from hwt.serializer.serializerClases.nameScope import LangueKeyword
from hwt.serializer.systemC.keywords import SYSTEMC_KEYWORDS
from hwt.serializer.systemC.statements import SystemCSerializer_statements
from hwt.serializer.systemC.type import SystemCSerializer_type
from hwt.serializer.systemC.value import SystemCSerializer_value
from hwt.serializer.utils import maxStmId
from hwt.synthesizer.param import evalParam
class SystemCSerializer(GenericSerializer, SystemCSerializer_value, SystemCSerializer_type, SystemCSerializer_statements):
"""
Serialized used to convert HWT design to SystemC code
"""
fileExtension = '.cpp'
_keywords_dict = {kw: LangueKeyword() for kw in SYSTEMC_KEYWORDS}
env = Environment(loader=PackageLoader('hwt', 'serializer/systemC/templates'))
moduleTmpl = env.get_template('module.cpp')
mehtodTmpl = env.get_template("method.cpp")
ifTmpl = env.get_template("if.cpp")
switchStm env.get_template("switch.cpp")
@classmethod
def comment(cls, comentStr):
return "/* %s */" % comentStr
@classmethod
def PortItem(cls, pi, ctx):
d = cls.DIRECTION(pi.direction)
return "sc_%s<%s> %s;" % (d,
cls.HdlType(pi._dtype, ctx),
pi.name)
@classmethod
def DIRECTION(cls, d):
return d.name.lower()
@classmethod
def Architecture(cls, arch, ctx):
variables = []
procs = []
extraTypes = set()
extraTypes_serialized = []
scope = ctx.scope
childCtx = ctx.withIndent()
arch.variables.sort(key=lambda x: x.name)
arch.processes.sort(key=lambda x: (x.name, maxStmId(x)))
arch.componentInstances.sort(key=lambda x: x._name)
ports = list(map(lambda pi: cls.PortItem(pi, childCtx), arch.entity.ports))
for v in arch.variables:
t = v._dtype
# if type requires extra definition
if isinstance(t, Enum) and t not in extraTypes:
extraTypes.add(v._dtype)
extraTypes_serialized.append(cls.HdlType(t, scope, declaration=True))
v.name = scope.checkedName(v.name, v)
variables.append(v)
def serializeVar(v):
dv = evalParam(v.defaultVal)
if isinstance(dv, EnumVal):
dv = "%s.%s" % (dv._dtype.name, dv.val)
else:
dv = cls.Value(dv, None)
return v.name, cls.HdlType(v._dtype), dv
for p in arch.processes:
procs.append(cls.HWProcess(p, childCtx))
constants = []
return cls.moduleTmpl.render(
name=arch.getEntityName(),
constants=constants,
ports=ports,
signals=list(map(serializeVar, variables)),
extraTypes=extraTypes_serialized,
processes=procs,
processObjects=arch.processes,
componentInstances=arch.componentInstances,
)
|
Python
| 0.001621
|
@@ -1180,16 +1180,18 @@
witchStm
+ =
env.get
|
d767b5990a088fa3fe45639873f2034fd25cb044
|
update precision of accuracy displayed in weight file name
|
all_cnn_96/train.py
|
all_cnn_96/train.py
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
from argparse import ArgumentParser
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint, CSVLogger, LearningRateScheduler
from net import IMG_SIZE_DEFAULT, BETA_DEFAULT, KERNEL_SIZE_DEFAULT, NB_FILTERS_DEFAULT
from utils import prepare_dirs, load_data
NB_EPOCH_DEFAULT = 80
LR_DEFAULT = 0.01
BATCH_SIZE_DEFAULT = 128
p = ArgumentParser(description='Training All-CNN-96 network.')
p.add_argument('datapath', type=str,
help='/path/to/hdf/data/file')
## model parameters
p.add_argument('--img-size', type=int, nargs=2, default=list(IMG_SIZE_DEFAULT),
help='pixel size of input image (default: {})'.format(IMG_SIZE_DEFAULT))
p.add_argument('--beta', type=float, default=BETA_DEFAULT,
help='hyper parameter for KL penalty (default: {})'.format(BETA_DEFAULT))
p.add_argument('--kernel-size', type=int, nargs=2, default=list(KERNEL_SIZE_DEFAULT),
help='kernel size of Conv2D layer (default: {})'.format(KERNEL_SIZE_DEFAULT))
p.add_argument('--nb-filters', type=int, nargs=4, default=list(NB_FILTERS_DEFAULT),
help='number of filters for Conv2D blocks (default: {})'.format(NB_FILTERS_DEFAULT))
## optional arguments for reuse model definition and weights
p.add_argument('--model-path', type=str, default=None,
help='json file defining model architecture (default: None)')
p.add_argument('--weights-path', type=str, default=None,
help='hdf5 file containing network parameters (default: None)')
## learning parameters
p.add_argument('--lr', type=float, default=LR_DEFAULT,
help='initial learning rate (default: {})'.format(LR_DEFAULT))
p.add_argument('--nb-epoch', type=int, default=NB_EPOCH_DEFAULT,
help='number of epochs (default: {})'.format(NB_EPOCH_DEFAULT))
p.add_argument('--batch-size', type=int, default=BATCH_SIZE_DEFAULT,
help='batch size (default: {})'.format(BATCH_SIZE_DEFAULT))
## data management
p.add_argument('--name', type=str, default=None,
help='experiment name (default: None)')
p.add_argument('--rootdir', type=str, default='./',
help='root directory for saving results (default: ./)')
p.add_argument('--pasta-ip', type=str, default=None,
help='ip address for pastalog (default: None)')
if __name__ == '__main__':
args = p.parse_args()
print('... Prepare directories')
dirs = prepare_dirs(args.rootdir, args.name)
if args.model_path:
from keras.models import model_from_json
from net import load_model
print('... Load network from: {}'.format(args.model_path))
model = load_model(args.model_path)
else:
from net import get_model
model = get_model(args.img_size, args.beta, args.kernel_size, args.nb_filters)
model_definition_file = dirs.modelsdir / 'model.json'
print('... Save model architecture to: {}'.format(model_definition_file))
import json
with model_definition_file.open('w') as f:
json.dump(json.loads(model.to_json()), f)
if args.weights_path:
print('... Load parameters from: {}'.format(args.weights_path))
model.load_weights(args.weights_path)
print('... Compile model')
model.compile(optimizer=Adam(lr=args.lr), loss='categorical_crossentropy',
metrics=['accuracy'])
print('... Prepare callbacks')
callbacks = []
model_checkpoint = ModelCheckpoint((dirs.checkpointsdir / 'weights_{epoch:03d}_{val_acc:.2f}.h5').as_posix(),
monitor='val_acc', verbose=0,
save_best_only=False, save_weights_only=True,
mode='auto')
callbacks.append(model_checkpoint)
csv_logger = CSVLogger((dirs.historydir / 'log.tsv').as_posix(), separator='\t', append=False)
callbacks.append(csv_logger)
def schedule(epoch):
if epoch == 30:
new_lr = args.lr * 0.1
print('... Learning rate: {}'.format(new_lr))
return new_lr
elif epoch == 60:
new_lr = args.lr * 0.1 * 0.1
print('... Learning rate: {}'.format(new_lr))
return new_lr
else:
return args.lr
lr_scheduler = LearningRateScheduler(schedule)
callbacks.append(lr_scheduler)
if args.pasta_ip:
from pastalog_monitor import PastalogMonitor
name = args.name if args.name else 'all-cnn-96'
pastalog_monitor = PastalogMonitor(name=name, root=args.pasta_ip)
callbacks.append(pastalog_monitor)
print('... Load data')
(X_train, Y_train), (X_test, Y_test) = load_data(datapath=args.datapath, img_size=args.img_size)
print('... Start training')
model.fit(X_train, Y_train,
shuffle=True, batch_size=args.batch_size, nb_epoch=args.nb_epoch,
validation_data=(X_test, Y_test), callbacks=callbacks)
|
Python
| 0
|
@@ -3498,17 +3498,17 @@
al_acc:.
-2
+4
f%7D.h5').
|
60d2c4142aae168c0b079e94d45529c14d387e1d
|
Fix in the case of no parent node
|
hyperstream/channels/base_channel.py
|
hyperstream/channels/base_channel.py
|
# The MIT License (MIT) # Copyright (c) 2014-2017 University of Bristol
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from ..stream import StreamDict
from ..time_interval import TimeIntervals
from ..utils import Printable, MAX_DATE, StreamNotFoundError, MultipleStreamsFoundError
import logging
class BaseChannel(Printable):
"""
Abstract base class for channels
"""
def __init__(self, channel_id, can_calc=False, can_create=False, calc_agent=None):
self.channel_id = channel_id
self.streams = StreamDict()
self.can_calc = can_calc
self.can_create = can_create
self.calc_agent = calc_agent
self.up_to_timestamp = MAX_DATE
def update_streams(self, up_to_timestamp):
"""
Deriving classes must override this function
"""
raise NotImplementedError
def execute_tool(self, stream, interval):
"""
Executes the stream's tool over the given time interval
:param stream: the stream reference
:param interval: the time interval
:return: None
"""
if interval.end > self.up_to_timestamp:
raise ValueError(
'The stream is not available after ' + str(self.up_to_timestamp) + ' and cannot be calculated')
required_intervals = TimeIntervals([interval]) - stream.calculated_intervals
if not required_intervals.is_empty:
for interval in required_intervals:
stream.tool.execute(stream.input_streams, stream, interval)
stream.calculated_intervals += TimeIntervals([interval])
if not stream.required_intervals.is_empty:
raise RuntimeError('Tool execution did not cover the specified time interval.')
def get_results(self, stream, time_interval):
"""
Must be overridden by deriving classes.
1. Calculates/receives the documents in the stream for the time interval given
2. Returns success or failure and the results (for some channels the values of kwargs can override the
return process, e.g. introduce callbacks)
"""
raise NotImplementedError
def get_or_create_stream(self, stream_id, try_create=True):
"""
Helper function to get a stream or create one if it's not already defined
:param stream_id: The stream id
:param try_create: Whether to try to create the stream if not found
:return: The stream object
"""
if stream_id in self.streams:
logging.debug("found {}".format(stream_id))
return self.streams[stream_id]
elif try_create:
# Try to create the stream
logging.debug("creating {}".format(stream_id))
return self.create_stream(stream_id=stream_id)
def create_stream(self, stream_id, sandbox=None):
"""
Must be overridden by deriving classes, must create the stream according to the tool and return its unique
identifier stream_id
"""
raise NotImplementedError
def find_streams(self, **kwargs):
"""
Finds streams with the given meta data values. Useful for debugging purposes.
:param kwargs: The meta data as keyword arguments
:return: The streams found
"""
found = {}
for stream_id, stream in self.streams.items():
d = stream_id.as_dict()
if all(d['name'] == str(v) if k == 'name'
else k in d['meta_data'] and d['meta_data'][k] == str(v)
for k, v in kwargs.items()):
found[stream_id] = stream
return found
def find_stream(self, **kwargs):
"""
Finds a single stream with the given meta data values. Useful for debugging purposes.
:param kwargs: The meta data as keyword arguments
:return: The stream found
"""
found = list(self.find_streams(**kwargs).values())
if not found:
raise StreamNotFoundError(kwargs)
if len(found) > 1:
raise MultipleStreamsFoundError(kwargs)
return found[0]
def purge_node(self, node_id, sandbox=None):
"""
Purges a node (collection of streams)
:param node_id: The node identifier
:param sandbox: The sandbox
:return: None
"""
for stream_id in self.streams:
if self.streams[stream_id].parent_node.node_id == node_id:
self.purge_stream(stream_id, sandbox=sandbox)
def purge_stream(self, stream_id, sandbox=None):
"""
Must be overridden by deriving classes, purges the stream and removes the calculated intervals
"""
raise NotImplementedError
def get_stream_writer(self, stream):
"""
Must be overridden by deriving classes, must return a function(document_collection) which writes all the
given documents of the form (timestamp,data) from document_collection to the stream
Example:
if stream_id==1:
def f(document_collection):
for (timestamp,data) in document_collection:
database[timestamp] = data
return(f)
else:
raise Exception('No stream with id '+str(stream_id))
"""
raise NotImplementedError
def __str__(self):
s = self.__class__.__name__ + ' with ID: ' + str(self.channel_id)
s += ' and containing {} streams:'.format(len(self.streams))
for stream in self.streams:
calculated_ranges = repr(stream.calculated_intervals)
s += '\nSTREAM ID: ' + str(stream.stream_id)
s += "\n CALCULATED RANGES: " + calculated_ranges
s += "\n STREAM DEFINITION: "
s += str(stream)
return s
def __getitem__(self, item):
return self.streams[item]
def __setitem__(self, key, value):
self.streams[key] = value
def __contains__(self, item):
return item in self.streams
|
Python
| 0.020588
|
@@ -5427,18 +5427,24 @@
-if
+stream =
self.st
@@ -5459,16 +5459,102 @@
ream_id%5D
+%0A if not stream.parent_node:%0A continue%0A if stream
.parent_
|
dbec204b242ab643de162046ba73dca32043c6c2
|
Implement __getattr__ to reduce code
|
space-age/space_age.py
|
space-age/space_age.py
|
class SpaceAge(object):
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def on_earth(self):
return round(self.years, 2)
def on_mercury(self):
return round(self.years/0.2408467, 2)
def on_venus(self):
return round(self.years/0.6151976, 2)
def on_mars(self):
return round(self.years/1.8808158, 2)
def on_jupiter(self):
return round(self.years/11.862615, 2)
def on_saturn(self):
return round(self.years/29.447498, 2)
def on_uranus(self):
return round(self.years/84.016846, 2)
def on_neptune(self):
return round(self.years/164.79132, 2)
|
Python
| 0.99731
|
@@ -25,587 +25,513 @@
-def __init__(self, seconds):%0A self.seconds = seconds%0A%0A @property%0A def years(self):%0A return self.seconds/31557600%0A%0A def on_earth(self):%0A return round(self.years, 2)%0A%0A def on_mercury(self):%0A return round(self.years/0.2408467, 2)%0A%0A def on_venus(self):%0A return round(self.years/0.6151976, 2)%0A%0A def on_mars(self):%0A return round(self.years/1.8808158, 2)%0A%0A def on_jupiter(self):%0A return round(self.years/11.862615, 2)%0A%0A def on_saturn(self):%0A return round(self.years/29.447498, 2)%0A%0A def on_uranus(self):%0A
+YEARS = %7B%22on_earth%22: 1,%0A %22on_mercury%22: 0.2408467,%0A %22on_venus%22: 0.61519726,%0A %22on_mars%22: 1.8808158,%0A %22on_jupiter%22: 11.862615,%0A %22on_saturn%22: 29.447498,%0A %22on_uranus%22: 84.016846,%0A %22on_neptune%22: 164.79132%7D%0A%0A def __init__(self, seconds):%0A self.seconds = seconds%0A%0A @property%0A def years(self):%0A return self.seconds/31557600%0A%0A def __getattr__(self, on_planet):%0A if on_planet in SpaceAge.YEARS:%0A
@@ -532,32 +532,40 @@
return
+ lambda:
round(self.year
@@ -570,91 +570,82 @@
ars/
-84.016846
+SpaceAge.YEARS%5Bon_planet%5D
, 2)%0A
-%0A
+
-def on_neptune(self):%0A return round(self.years/164.79132, 2)
+ else:%0A raise AttributeError
%0A
|
a28bb36aeb887d11b9cf8391e03264a81b40b84a
|
add base entity construct
|
sparc/entity/entity.py
|
sparc/entity/entity.py
|
from BTrees.OOBTree import OOBTree
from zope.annotation.interfaces import IAnnotations
from zope.annotation.interfaces import IAnnotatable
from zope.annotation.interfaces import IAttributeAnnotatable
from zope.component import adapts
from zope.component.factory import Factory
from zope.interface import implements
from zope import schema
from zope.schema import getFields
from zope.schema.fieldproperty import FieldProperty
from interfaces import IIdentified
from interfaces import IEntity
from interfaces import IOwner
from interfaces import IUrlReference
from interfaces import IKeyphraseTags
class SparcEntity(object):
"""A basic Sparc entity"""
implements(IEntity, IAttributeAnnotatable)
def __init__(self, **kwargs):
self.id = kwargs['id'] # required
if 'name' in kwargs: self.name = kwargs['name']
if 'description' in kwargs: self.description = kwargs['description']
if 'details' in kwargs: self.details = kwargs['details']
#IEntity
id = FieldProperty(IIdentified['id'])
def getId(self):
return self.id
name = FieldProperty(IEntity['name'])
description = FieldProperty(IEntity['description'])
details = FieldProperty(IEntity['details'])
sparcEntityFactory = Factory(SparcEntity)
class SparcEntityOwnerForAnnotableObjects(object):
implements(IOwner)
adapts(IAnnotatable)
def __init__(self, context):
self.context = context
self.annotations = IAnnotations(context).\
setdefault('IOwner', OOBTree())
if 'owner' not in self.annotations:
self.annotations['owner'] = None
@property
def owner(self):
return self.annotations['owner']
@owner.setter
def owner(self, value):
getFields(IOwner)['owner'].validate(value)
self.annotations['owner'] = value
class SparcEntityUrlForAnnotableObjects(object):
implements(IUrlReference)
adapts(IAnnotatable)
def __init__(self, context):
self.context = context
self.annotations = IAnnotations(context).\
setdefault('IUrlReference', OOBTree())
if 'url' not in self.annotations:
self.annotations['url'] = None
@property
def url(self):
return self.annotations['url']
@url.setter
def url(self, value):
getFields(IUrlReference)['url'].validate(value)
self.annotations['url'] = value
class SparcEntityKeyphraseTagsForAnnotableObjects(object):
implements(IKeyphraseTags)
adapts(IAnnotatable)
def __init__(self, context):
self.context = context
self.annotations = IAnnotations(context).\
setdefault('IKeyphraseTags', OOBTree())
if 'tags' not in self.annotations:
self.annotations['tags'] = set()
@property
def tags(self):
return self.annotations['tags']
@tags.setter
def tags(self, value):
getFields(IKeyphraseTags)['tags'].validate(value)
self.annotations['tags'] = value
|
Python
| 0.000001
|
@@ -312,32 +312,8 @@
nts%0A
-from zope import schema%0A
from
@@ -566,16 +566,155 @@
seTags%0A%0A
+class BaseSchemaObject(object):%0A def __init__(self, **kwargs):%0A for k, v in kwargs.iteritems():%0A setattr(self, k, v)%0A%0A
class Sp
|
2732ac448d6a31678629324dc47f89a33ecd261b
|
Manage service choicefield display in inline form
|
billjobs/admin.py
|
billjobs/admin.py
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from .models import Bill, BillLine, Service, UserProfile
class BillLineInline(admin.TabularInline):
model = BillLine
extra = 1
class BillAdmin(admin.ModelAdmin):
readonly_fields = ('number', 'billing_date', 'amount')
inlines = [BillLineInline]
list_display = ('__str__', 'coworker_name', 'amount', 'billing_date',
'isPaid', 'pdf_file_url')
list_editable = ('isPaid',)
list_filter = ('isPaid', )
search_fields = ('user__first_name', 'user__last_name', 'number')
def formfield_for_foreignkey(self, db_field, request=None, **kwargs):
field = super(BillAdmin, self).formfield_for_foreignkey(
db_field, request, **kwargs)
if db_field.rel.to == User:
field.initial = request.user.id
field.label_from_instance = self.get_user_label
return field
def get_user_label(self, user):
name = user.get_full_name()
username = user.username
return (name and name != username and '%s (%s)' % (name, username)
or username)
def pdf_file_url(self, obj):
return '<a href="%s">%s.pdf</a>' % (reverse('generate-pdf',
kwargs={'bill_id': obj.id}), obj.number)
pdf_file_url.allow_tags = True
class UserProfileAdmin(admin.StackedInline):
model = UserProfile
class UserAdmin(UserAdmin):
inlines = (UserProfileAdmin, )
fieldsets = (
(None, {
'fields': ('username', 'password')
}),
(_('Personal info'), {
'fields': ('first_name', 'last_name', 'email')
}),
(_('Permissions'), {
'classes': ('collapse',),
'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions')
}),
(_('Important dates'), {
'classes': ('collapse',),
'fields': ('last_login', 'date_joined')
})
)
list_display = ('username', 'get_full_name', 'email')
class ServiceAdmin(admin.ModelAdmin):
model = Service
list_display = ('__str__', 'price', 'is_available')
list_editable = ('is_available',)
list_filter = ('is_available',)
admin.site.register(Bill, BillAdmin)
admin.site.register(Service, ServiceAdmin)
# User have to be unregistered
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
|
Python
| 0
|
@@ -1,24 +1,80 @@
+from django import forms%0Afrom django.db.models import Q%0A
from django.contrib impo
@@ -78,24 +78,24 @@
mport admin%0A
-
from django.
@@ -329,24 +329,576 @@
serProfile%0A%0A
+class BillLineInlineForm(forms.ModelForm):%0A def __init__(self, *args, **kwargs):%0A super(BillLineInlineForm, self).__init__(*args, **kwargs)%0A if self.instance.id:%0A self.fields%5B'service'%5D.queryset = Service.objects.filter(Q(is_available=True) %7C Q(name=self.instance.service.name))%0A print(self.fields%5B'service'%5D.choices)%0A else:%0A self.fields%5B'service'%5D.queryset = Service.objects.filter(is_available=True)%0A%0A class Meta:%0A model = BillLine%0A fields = ('service', 'quantity', 'total')%0A%0A
class BillLi
@@ -945,24 +945,24 @@
= BillLine%0A
-
extra =
@@ -963,16 +963,47 @@
tra = 1%0A
+ form = BillLineInlineForm%0A%0A
%0Aclass B
|
8b275ccb96b8fe3c2c3919e11f08e988219a1e14
|
Add the process PID in the logs
|
src/diamond/utils/log.py
|
src/diamond/utils/log.py
|
# coding=utf-8
import logging
import logging.config
import sys
import os
class DebugFormatter(logging.Formatter):
def __init__(self, fmt=None):
if fmt is None:
fmt = '%(created)s\t[%(processName)s:%(levelname)s]\t%(message)s'
self.fmt_default = fmt
self.fmt_prefix = fmt.replace('%(message)s', '')
logging.Formatter.__init__(self, fmt)
def format(self, record):
self._fmt = self.fmt_default
if record.levelno in [logging.ERROR, logging.CRITICAL]:
self._fmt = ''
self._fmt += self.fmt_prefix
self._fmt += '%(message)s'
self._fmt += '\n'
self._fmt += self.fmt_prefix
self._fmt += '%(pathname)s:%(lineno)d'
return logging.Formatter.format(self, record)
def setup_logging(configfile, stdout=False):
log = logging.getLogger('diamond')
if stdout:
log.setLevel(logging.DEBUG)
streamHandler = logging.StreamHandler(sys.stdout)
streamHandler.setFormatter(DebugFormatter())
streamHandler.setLevel(logging.DEBUG)
log.addHandler(streamHandler)
else:
try:
if sys.version_info >= (2, 6):
logging.config.fileConfig(configfile,
disable_existing_loggers=False)
else:
# python <= 2.5 does not have disable_existing_loggers
# default was to always disable them, in our case we want to
# keep any logger created by handlers
logging.config.fileConfig(configfile)
for logger in logging.root.manager.loggerDict.values():
logger.disabled = 0
except Exception, e:
sys.stderr.write("Error occurs when initialize logging: ")
sys.stderr.write(str(e))
sys.stderr.write(os.linesep)
return log
|
Python
| 0.000001
|
@@ -187,16 +187,17 @@
fmt =
+(
'%25(creat
@@ -202,16 +202,40 @@
ated)s%5Ct
+' +%0A '
%5B%25(proce
@@ -245,16 +245,28 @@
ame)s:%25(
+process)d:%25(
levelnam
@@ -271,16 +271,40 @@
ame)s%5D%5Ct
+' +%0A '
%25(messag
@@ -299,32 +299,33 @@
'%25(message)s'
+)
%0A self.fm
|
d2917cb2131b9b08fa6457b195606fcc0220eef1
|
Fix X509 construcor
|
ctypescrypto/x509.py
|
ctypescrypto/x509.py
|
from ctypes import c_void_p
from ctypescrypto.bio import Membio
from ctypescrypto.pkey import PKey
from ctypescrypto.oid import Oid
from ctypescrypto.exception import LibCryptoError
from ctypescrypto import libcrypto
class X509Error(LibCryptoError):
pass
class X509Name:
def __init__(self,ptr):
self.ptr=ptr
def __del__(self):
libcrypto.X509_NAME_free(self.ptr)
def __str__(self):
b=Membio()
libcrypto.X509_NAME_print_ex(b.bio,self.ptr,0,PRING_FLAG)
return str(b).decode("utf-8")
def __len__(self):
return libcrypto.X509_NAME_entry_count(self.ptr)
def __getattr__(self,key):
if isinstance(key,Oid):
# Return list of strings
raise NotImpemented
elif isinstance(key,int):
# Return OID, sting tuple
raise NotImplemented
else:
raise TypeError("X509 name can be indexed with oids and numbers only")
def __setattr__(self,key,val):
pass
class X509_extlist:
def __init__(self,ptr):
self.ptr=ptr
def __del__(self):
libcrypto.X509_NAME_free(self.ptr)
def __str__(self):
raise NotImplemented
def __len__(self):
return libcrypto.X509_NAME_entry_count(self.ptr)
def __getattr__(self,key):
raise NotImplemented
def __setattr__(self,key,val):
raise NotImplemented
class X509:
def __init__(self,data=None,ptr=None,format="PEM"):
if ptr is not None:
if data is not None:
raise TypeError("Cannot use data and ptr simultaneously")
self.cert = ptr
elif data is None:
raise TypeError("data argument is required")
b=Membio(data)
if format == "PEM":
self.cert=libcrypto.PEM_read_bio_X509(b.bio,None,None,None)
else:
self.cert=libcrypto.d2i_X509_bio(b.bio,None)
if self.cert is None:
raise X509Error("error reading certificate")
def __del__(self):
libcrypto.X509_free(self.cert)
def __str__(self):
""" Returns der string of the certificate """
b=Membio()
if libcrypto.i2d_X509_bio(b.bio,self.cert)==0:
raise X509Error("error serializing certificate")
@property
def pubkey(self):
"""EVP PKEy object of certificate public key"""
return PKey(ptr=libcrypto.X509_get_pubkey(self.cert,False))
def verify(self,key):
""" Verify self on given issuer key """
@property
def subject(self):
""" X509Name for certificate subject name """
return X509Name(libcrypto.X509_get_subject_name(self.cert))
@property
def issuer(self):
""" X509Name for certificate issuer name """
return X509Name(libcrypto.X509_get_issuer_name(self.cert))
@property
def serial(self):
""" Serial number of certificate as integer """
return
@property
def startDate(self):
""" Certificate validity period start date """
raise NotImplemented
@property
def endDate(self):
""" Certificate validity period end date """
raise NotImplemented
def extensions(self):
raise NotImplemented
|
Python
| 0.000018
|
@@ -1480,16 +1480,24 @@
uired%22)%0A
+%09%09else:%0A
%09%09%09b=Mem
|
97dde187c6e660a517639475f95565907db3eec4
|
Fix test.
|
test_check_blockers.py
|
test_check_blockers.py
|
from unittest import TestCase
from mock import Mock, patch
import check_blockers
JUJUBOT_USER = {'login': 'jujubot', 'id': 7779494}
OTHER_USER = {'login': 'user', 'id': 1}
class CheckBlockers(TestCase):
def test_parse_args(self):
args = check_blockers.parse_args(['master', '17'])
self.assertEqual('master', args.branch)
self.assertEqual('17', args.pull_request)
def test_get_lp_bugs_with_master(self):
args = check_blockers.parse_args(['master', '17'])
with patch('check_blockers.get_json') as gj:
data = {'entries': []}
gj.return_value = data
check_blockers.get_lp_bugs(args)
gj.assert_called_with((check_blockers.LP_BUGS.format('juju-core')))
def test_get_lp_bugs_with_devel(self):
args = check_blockers.parse_args(['1.20', '17'])
with patch('check_blockers.DEVEL') as devel:
devel.return_value = '1.20'
with patch('check_blockers.get_json') as gj:
data = {'entries': []}
gj.return_value = data
check_blockers.get_lp_bugs(args)
gj.assert_called_with(
(check_blockers.LP_BUGS.format('juju-core/1.20')))
def test_get_lp_bugs_without_blocking_bugs(self):
args = check_blockers.parse_args(['master', '17'])
with patch('check_blockers.get_json') as gj:
empty_bug_list = {'entries': []}
gj.return_value = empty_bug_list
bugs = check_blockers.get_lp_bugs(args)
self.assertEqual({}, bugs)
def test_get_lp_bugs_with_blocking_bugs(self):
args = check_blockers.parse_args(['master', '17'])
with patch('check_blockers.get_json') as gj:
bug_list = {
'entries': [
{'self_link': 'https://lp/j/98765'},
{'self_link': 'https://lp/j/54321'},
]}
gj.return_value = bug_list
bugs = check_blockers.get_lp_bugs(args)
self.assertEqual(['54321', '98765'], sorted(bugs.keys()))
def test_get_reason_without_blocking_bugs(self):
args = check_blockers.parse_args(['master', '17'])
with patch('check_blockers.get_json') as gj:
code, reason = check_blockers.get_reason({}, args)
self.assertEqual(0, code)
self.assertEqual('No blocking bugs', reason)
self.assertEqual(0, gj.call_count)
def test_get_reason_without_comments(self):
args = check_blockers.parse_args(['master', '17'])
with patch('check_blockers.get_json') as gj:
gj.return_value = []
bugs = {'98765': {'self_link': 'https://lp/j/98765'}}
code, reason = check_blockers.get_reason(bugs, args)
self.assertEqual(1, code)
self.assertEqual('Could not get 17 comments from github', reason)
gj.assert_called_with((check_blockers.GH_COMMENTS.format('17')))
def test_get_reason_with_blockers_no_match(self):
args = check_blockers.parse_args(['master', '17'])
with patch('check_blockers.get_json') as gj:
gj.return_value = [{'body': '$$merge$$', 'user': OTHER_USER}]
bugs = {'98765': {'self_link': 'https://lp/j/98765'}}
code, reason = check_blockers.get_reason(bugs, args)
self.assertEqual(1, code)
self.assertEqual("Does not match ['fixes-98765']", reason)
def test_get_reason_with_blockers_with_match(self):
args = check_blockers.parse_args(['master', '17'])
with patch('check_blockers.get_json') as gj:
gj.return_value = [
{'body': '$$merge$$', 'user': OTHER_USER},
{'body': 'la la __fixes-98765__ ha ha', 'user': OTHER_USER}]
bugs = {'98765': {'self_link': 'https://lp/j/98765'}}
code, reason = check_blockers.get_reason(bugs, args)
self.assertEqual(0, code)
self.assertEqual("Matches fixes-98765", reason)
def test_get_reason_with_blockers_with_jujubot_comment(self):
args = check_blockers.parse_args(['master', '17'])
with patch('check_blockers.get_json') as gj:
gj.return_value = [
{'body': '$$merge$$', 'user': OTHER_USER},
{'body': 'la la $$fixes-98765$$ ha ha', 'user': JUJUBOT_USER}]
bugs = {'98765': {'self_link': 'https://lp/j/98765'}}
code, reason = check_blockers.get_reason(bugs, args)
self.assertEqual(1, code)
self.assertEqual("Does not match ['fixes-98765']", reason)
def test_get_reason_with_blockers_with_reply_jujubot_comment(self):
args = check_blockers.parse_args(['master', '17'])
with patch('check_blockers.get_json') as gj:
gj.return_value = [
{'body': '$$merge$$', 'user': OTHER_USER},
{'body': 'Juju bot wrote $$fixes-98765$$', 'user': OTHER_USER}]
bugs = {'98765': {'self_link': 'https://lp/j/98765'}}
code, reason = check_blockers.get_reason(bugs, args)
self.assertEqual(1, code)
self.assertEqual("Does not match ['fixes-98765']", reason)
def test_get_reason_with_blockers_with_jfdi(self):
args = check_blockers.parse_args(['master', '17'])
with patch('check_blockers.get_json') as gj:
gj.return_value = [
{'body': '$$merge$$', 'user': OTHER_USER},
{'body': 'la la __JFDI__ ha ha', 'user': OTHER_USER}]
bugs = {'98765': {'self_link': 'https://lp/j/98765'}}
code, reason = check_blockers.get_reason(bugs, args)
self.assertEqual(0, code)
self.assertEqual("Engineer says JFDI", reason)
def test_get_json(self):
response = Mock()
response.read.side_effect = ["{\"result\"": []}]
with patch('check_blockers.urllib2.urlopen') as urlopen:
urlopen.return_value = response
json = check_blockers.get_json("http://api.testing/")
request = urlopen.call_args[0][0]
self.assertEqual(request.get_full_url(), "http://api.testing/")
self.assertEqual(request.get_header("Cache-control"),
"max-age=0, must-revalidate")
self.assertEqual(json, {"result": []})
|
Python
| 0.000001
|
@@ -5869,26 +5869,24 @@
= %5B
-%22%7B%5C
+'%7B
%22result
-%5C%22
%22: %5B%5D%7D
+'
%5D%0A
|
419db3c559d836d6ba77c758212a55051e6c8fab
|
Add FIXME for module as function arg
|
test_obj_dict_tools.py
|
test_obj_dict_tools.py
|
from obj_dict_tools import *
from nose.tools import raises
@dict_fields(['name', 'size'])
class Simple:
def __init__(self, name=None, size=None):
self.name = name
self.size = size
@dict_fields(['first', 'second'])
class Pair:
def __init__(self, first=None, second=None):
self.first = first
self.second = second
def test_simple_class_to_dict():
s = Simple('foo', 100)
d = to_dict(s)
assert d['__class__'] == 'Simple'
assert d['name'] == 'foo'
assert d['size'] == 100
def test_simple_class_from_dict():
d = {'__class__': 'Simple', 'name': 'foo', 'size': 100}
s = from_dict(d, globals())
assert isinstance(s, Simple)
assert s.name == 'foo'
assert s.size == 100
def test_null_fields_to_dict():
p = Pair()
d = to_dict(p)
assert d['__class__'] == 'Pair'
assert not 'first' in d
assert not 'second' in d
def test_list_to_dict():
ss = [Simple('foo', 100), Simple('bar', 200)]
d = to_dict(ss)
assert len(d) == 2
assert d[0]['__class__'] == 'Simple'
assert d[0]['name'] == 'foo'
assert d[0]['size'] == 100
assert d[1]['__class__'] == 'Simple'
assert d[1]['name'] == 'bar'
assert d[1]['size'] == 200
def test_list_field_to_dict():
p = Pair([1, 2, 3, 4, 5], Simple('b', 200))
d = to_dict(p)
assert d['__class__'] == 'Pair'
assert len(d['first']) == 5
assert d['second']['__class__'] == 'Simple'
assert d['second']['name'] == 'b'
assert d['second']['size'] == 200
@raises(Exception)
def test_decorator_rejects_underscore_prefixes():
@dict_fields(['_p'])
class bad_attribute_defined:
pass
|
Python
| 0
|
@@ -625,16 +625,77 @@
': 100%7D%0A
+ # FIXME: Explicitly passing in a module is undesirable. %0A
s =
|
b6f57d8aaeff8f85c89c381b972113810a468412
|
use lat, lon
|
models.py
|
models.py
|
from random import choice, randint
import urllib2, json
from pushbullet import PushBullet
from os import environ
'''
Checks if the current login attempt is a security threat or not.
Performs the required action in each case
'''
def is_safe(form, ip, geocoded_ip, mandrill):
ip = ip
latitude = form.get('latitude', None)
longitude = form.get('longitude', None)
os = form.get('os', None)
mobile = form.get('isMobile', None)
browser = form.get('browser', None)
if latitude == None and longitude == None:
latitude = geocoded_ip['latitude']
longitude = geocoded_ip['longitude']
safety_status = choice(range(-1, 2))
auth_code = '%06d' % randint(0,999999)
if safety_status < 1:
send_push("Confirm your access", "Suspicious access detected from IP %s, confirm with code %s" % (ip, auth_code))
send_mail(mandrill, 'zen@itram.es', latitude, longitude, ip, auth_code)
return {
'safety_code': safety_status,
'token': auth_code,
'debug': [ip, latitude, longitude, os, mobile, browser]
}# send SMS, mail...
def send_push(message, body, lat=40.4086, lon=-3.6922, pushbullet_token=environ.get('PUSHBULLET_TOKEN')):
""" Sends a foo location to Pushbullet """
pb = PushBullet(pushbullet_token)
success, push = pb.push_link("Login from suspicious location detected now!", "http://maps.google.com/maps?&z=10&q=%f,+%f&ll=%f+%f" % (lat, lon, lat, lon), "A suspicious login has appeared, try to guess who is it")
return success
def send_mail(mandrill, to, latitude, longitude, ip, safety_code):
gmaps_uri = "http://maps.googleapis.com/maps/api/staticmap?center=%s,%s&zoom=15&size=400x400&markers=color:red%%7Clabel:S%%7C%s,%s&sensor=true" % (latitude, longitude, latitude, longitude)
mandrill.send_email(
from_email='someone@yourdomain.com',
subject='[LogLock] Suspicious login attempt detected',
to=[{'email': to}],
html='''
An access attempt has been logged from a suspicious location:
<p><img src="%s" /></p>
<p>IP address: %s</p>
Please confirm it is you with the following code: <b>%s</b>
''' %(gmaps_uri, ip, safety_code)
)
def geocode_ip(ip_addr):
""" Geocodes a given IP Address """
data = json.load(urllib2.urlopen("http://ip-api.com/json/%s" % ip_addr))
print "Geocoded data: %s" % data
return data
|
Python
| 0.000137
|
@@ -581,21 +581,16 @@
_ip%5B'lat
-itude
'%5D%0A
@@ -620,22 +620,16 @@
_ip%5B'lon
-gitude
'%5D%0A%0A
|
a0f96b2b25d309c8934ffe9a197f3d66c9097b52
|
replace phone to email for privacy
|
models.py
|
models.py
|
from app import db
from datetime import datetime
class Profile(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80), unique=True)
gender = db.Column(db.String(1))
age = db.Column(db.Integer())
description = db.Column(db.String(300))
date = db.Column(db.DateTime, default = datetime.utcnow)
# get profile by id : Profile.query.get(id)
# get profile by param : Profile.query.filter_by(name = "").all()
|
Python
| 0.000189
|
@@ -238,16 +238,66 @@
eger())%0A
+ email = db.Column(db.String(50), unique=True)%0A
desc
|
ab331ab886a024f73fb7de4a8448d99c096a638c
|
Add .dev suffix and bump to next version.
|
haiku/__init__.py
|
haiku/__init__.py
|
# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Haiku is a neural network library for JAX."""
from haiku import data_structures
from haiku import experimental
from haiku import initializers
from haiku import nets
from haiku import pad
from haiku import testing
from haiku._src.attention import MultiHeadAttention
from haiku._src.base import get_parameter
from haiku._src.base import get_state
from haiku._src.base import maybe_next_rng_key
from haiku._src.base import next_rng_key
from haiku._src.base import next_rng_keys
from haiku._src.base import PRNGSequence
from haiku._src.base import reserve_rng_keys
from haiku._src.base import set_state
from haiku._src.base import with_rng
from haiku._src.basic import BatchApply
from haiku._src.basic import dropout
from haiku._src.basic import expand_apply
from haiku._src.basic import Linear
from haiku._src.basic import multinomial
from haiku._src.basic import one_hot
from haiku._src.basic import Sequential
from haiku._src.basic import to_module
from haiku._src.batch_norm import BatchNorm
from haiku._src.bias import Bias
from haiku._src.conv import Conv1D
from haiku._src.conv import Conv1DTranspose
from haiku._src.conv import Conv2D
from haiku._src.conv import Conv2DTranspose
from haiku._src.conv import Conv3D
from haiku._src.conv import Conv3DTranspose
from haiku._src.conv import ConvND
from haiku._src.conv import ConvNDTranspose
from haiku._src.depthwise_conv import DepthwiseConv2D
from haiku._src.depthwise_conv import SeparableDepthwiseConv2D
from haiku._src.embed import Embed
from haiku._src.embed import EmbedLookupStyle
from haiku._src.group_norm import GroupNorm
from haiku._src.layer_norm import InstanceNorm
from haiku._src.layer_norm import LayerNorm
from haiku._src.module import Module
from haiku._src.module import transparent
from haiku._src.moving_averages import EMAParamsTree
from haiku._src.moving_averages import ExponentialMovingAverage
from haiku._src.pool import avg_pool
from haiku._src.pool import AvgPool
from haiku._src.pool import max_pool
from haiku._src.pool import MaxPool
from haiku._src.recurrent import Conv1DLSTM
from haiku._src.recurrent import Conv2DLSTM
from haiku._src.recurrent import Conv3DLSTM
from haiku._src.recurrent import deep_rnn_with_skip_connections
from haiku._src.recurrent import DeepRNN
from haiku._src.recurrent import dynamic_unroll
from haiku._src.recurrent import GRU
from haiku._src.recurrent import IdentityCore
from haiku._src.recurrent import LSTM
from haiku._src.recurrent import LSTMState
from haiku._src.recurrent import ResetCore
from haiku._src.recurrent import RNNCore
from haiku._src.recurrent import static_unroll
from haiku._src.recurrent import VanillaRNN
from haiku._src.reshape import Flatten
from haiku._src.reshape import Reshape
from haiku._src.spectral_norm import SNParamsTree
from haiku._src.spectral_norm import SpectralNorm
from haiku._src.stateful import cond
from haiku._src.stateful import fori_loop
from haiku._src.stateful import grad
from haiku._src.stateful import jit
from haiku._src.stateful import remat
from haiku._src.stateful import scan
from haiku._src.stateful import switch
from haiku._src.stateful import value_and_grad
from haiku._src.stateful import vmap
from haiku._src.stateful import while_loop
from haiku._src.transform import running_init
from haiku._src.transform import transform
from haiku._src.transform import transform_with_state
from haiku._src.transform import Transformed
from haiku._src.transform import TransformedWithState
from haiku._src.transform import without_apply_rng
from haiku._src.transform import without_state
from haiku._src.typing import Params
from haiku._src.typing import State
__version__ = "0.0.3"
__all__ = (
"AvgPool",
"BatchApply",
"BatchNorm",
"Bias",
"Conv1D",
"Conv1DLSTM",
"Conv1DTranspose",
"Conv2D",
"Conv2DLSTM",
"Conv2DTranspose",
"Conv3D",
"Conv3DLSTM",
"Conv3DTranspose",
"ConvND",
"ConvNDTranspose",
"DeepRNN",
"DepthwiseConv2D",
"EMAParamsTree",
"Embed",
"EmbedLookupStyle",
"ExponentialMovingAverage",
"Flatten",
"GRU",
"GroupNorm",
"IdentityCore",
"InstanceNorm",
"LSTM",
"LSTMState",
"LayerNorm",
"Linear",
"MaxPool",
"Module",
"MultiHeadAttention",
"PRNGSequence",
"Params",
"RNNCore",
"ResetCore",
"Reshape",
"SNParamsTree",
"Sequential",
"SpectralNorm",
"State",
"Transformed",
"TransformedWithState",
"VanillaRNN",
"avg_pool",
"cond",
"data_structures",
"deep_rnn_with_skip_connections",
"dropout",
"dynamic_unroll",
"expand_apply",
"fori_loop",
"get_parameter",
"get_state",
"grad",
"initializers",
"jit",
"max_pool",
"maybe_next_rng_key",
"multinomial",
"nets",
"next_rng_key",
"next_rng_keys",
"one_hot",
"pad",
"remat",
"reserve_rng_keys",
"running_init",
"scan",
"set_state",
"static_unroll",
"switch",
"testing",
"to_module",
"transform",
"transform_with_state",
"transparent",
"value_and_grad",
"vmap",
"while_loop",
"with_rng",
"without_apply_rng",
"without_state",
)
# _________________________________________
# / Please don't use symbols in `_src` they \
# \ are not part of the Haiku public API. /
# -----------------------------------------
# \ ^__^
# \ (oo)\_______
# (__)\ )\/\
# ||----w |
# || ||
#
try:
del _src # pylint: disable=undefined-variable
except NameError:
pass
|
Python
| 0.000007
|
@@ -4337,17 +4337,21 @@
= %220.0.
-3
+4.dev
%22%0A%0A__all
|
d0c4e48268af96bf809118529132254e050eeabd
|
Fix dataclass_json docstring (#201)
|
dataclasses_json/api.py
|
dataclasses_json/api.py
|
import abc
import json
from enum import Enum
from typing import (Any, Callable, Dict, List, Optional, Tuple, Type, TypeVar,
Union)
from stringcase import (camelcase, pascalcase, snakecase,
spinalcase) # type: ignore
from dataclasses_json.cfg import config
from dataclasses_json.core import (Json, _ExtendedEncoder, _asdict,
_decode_dataclass)
from dataclasses_json.mm import (JsonData, SchemaType, build_schema)
from dataclasses_json.undefined import Undefined
from dataclasses_json.utils import (_handle_undefined_parameters_safe,
_undefined_parameter_action_safe)
A = TypeVar('A', bound="DataClassJsonMixin")
B = TypeVar('B')
C = TypeVar('C')
Fields = List[Tuple[str, Any]]
class LetterCase(Enum):
CAMEL = camelcase
KEBAB = spinalcase
SNAKE = snakecase
PASCAL = pascalcase
class DataClassJsonMixin(abc.ABC):
"""
DataClassJsonMixin is an ABC that functions as a Mixin.
As with other ABCs, it should not be instantiated directly.
"""
dataclass_json_config = None
def to_json(self,
*,
skipkeys: bool = False,
ensure_ascii: bool = True,
check_circular: bool = True,
allow_nan: bool = True,
indent: Optional[Union[int, str]] = None,
separators: Tuple[str, str] = None,
default: Callable = None,
sort_keys: bool = False,
**kw) -> str:
return json.dumps(self.to_dict(encode_json=False),
cls=_ExtendedEncoder,
skipkeys=skipkeys,
ensure_ascii=ensure_ascii,
check_circular=check_circular,
allow_nan=allow_nan,
indent=indent,
separators=separators,
default=default,
sort_keys=sort_keys,
**kw)
@classmethod
def from_json(cls: Type[A],
s: JsonData,
*,
encoding=None,
parse_float=None,
parse_int=None,
parse_constant=None,
infer_missing=False,
**kw) -> A:
kvs = json.loads(s,
encoding=encoding,
parse_float=parse_float,
parse_int=parse_int,
parse_constant=parse_constant,
**kw)
return cls.from_dict(kvs, infer_missing=infer_missing)
@classmethod
def from_dict(cls: Type[A],
kvs: Json,
*,
infer_missing=False) -> A:
return _decode_dataclass(cls, kvs, infer_missing)
def to_dict(self, encode_json=False) -> Dict[str, Json]:
return _asdict(self, encode_json=encode_json)
@classmethod
def schema(cls: Type[A],
*,
infer_missing: bool = False,
only=None,
exclude=(),
many: bool = False,
context=None,
load_only=(),
dump_only=(),
partial: bool = False,
unknown=None) -> SchemaType:
Schema = build_schema(cls, DataClassJsonMixin, infer_missing, partial)
if unknown is None:
undefined_parameter_action = _undefined_parameter_action_safe(cls)
if undefined_parameter_action is not None:
# We can just make use of the same-named mm keywords
unknown = undefined_parameter_action.name.lower()
return Schema(only=only,
exclude=exclude,
many=many,
context=context,
load_only=load_only,
dump_only=dump_only,
partial=partial,
unknown=unknown)
def dataclass_json(_cls=None, *, letter_case=None,
undefined: Optional[Union[str, Undefined]] = None):
"""
Based on the code in the `dataclasses` module to handle optional-parens
decorators. See example below:
@dataclass_json
@dataclass_json(letter_case=Lettercase.CAMEL)
class Example:
...
"""
def wrap(cls):
return _process_class(cls, letter_case, undefined)
if _cls is None:
return wrap
return wrap(_cls)
def _process_class(cls, letter_case, undefined):
if letter_case is not None or undefined is not None:
cls.dataclass_json_config = config(letter_case=letter_case,
undefined=undefined)[
'dataclasses_json']
cls.to_json = DataClassJsonMixin.to_json
# unwrap and rewrap classmethod to tag it to cls rather than the literal
# DataClassJsonMixin ABC
cls.from_json = classmethod(DataClassJsonMixin.from_json.__func__)
cls.to_dict = DataClassJsonMixin.to_dict
cls.from_dict = classmethod(DataClassJsonMixin.from_dict.__func__)
cls.schema = classmethod(DataClassJsonMixin.schema.__func__)
cls.__init__ = _handle_undefined_parameters_safe(cls, kvs=(), usage="init")
# register cls as a virtual subclass of DataClassJsonMixin
DataClassJsonMixin.register(cls)
return cls
|
Python
| 0
|
@@ -4374,17 +4374,17 @@
e=Letter
-c
+C
ase.CAME
|
7a1be98a80104c01c6262502307d1cdee3d21a5e
|
Fix event change streaming when start/end times change
|
models.py
|
models.py
|
import secrets
from os import environ
import dateutil.parser
import pytz
import sqlalchemy
from sqlalchemy import create_engine, Column, Integer, String, TIMESTAMP
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.inspection import inspect
import zulip_util
import rc
engine = create_engine(environ['DATABASE_URL'], echo=True)
Base = declarative_base()
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)
class Event(Base):
__tablename__ = 'events'
id = Column(Integer, primary_key=True)
recurse_id = Column(Integer, unique=True)
_created_at = Column("created_at", TIMESTAMP(timezone=True))
created_by = Column(String)
url = Column(String)
timezone = Column(String)
_start_time = Column("start_time", TIMESTAMP(timezone=True))
_end_time = Column("end_time", TIMESTAMP(timezone=True))
title = Column(String)
stream = Column(String)
subject = Column(String)
@property
def created_at(self):
return self._created_at.astimezone(pytz.utc)
@created_at.setter
def created_at(self, value):
self._created_at = value
@property
def start_time(self):
return self._start_time.astimezone(pytz.timezone(self.timezone))
@start_time.setter
def start_time(self, value):
self._start_time = value
@property
def end_time(self):
return self._end_time.astimezone(pytz.timezone(self.timezone))
@end_time.setter
def end_time(self, value):
self._end_time = value
def update(self, **updates):
assign_attributes(self, updates)
Session.add(self)
Session.commit()
def refresh_from_api(self, include_participants=False):
data = rc.get_event(self.recurse_id, include_participants=include_participants)
event_data = event_dict(data)
self.update(**event_data)
return data
def already_initialized(self):
return bool(self.stream or self.subject)
# 5–7pm EDT, Wednesday, May 17, 2017
def timestamp(self):
start = self.start_time.strftime("%-I:%M%p").lower()
end = self.end_time.strftime("%-I:%M%p").lower()
zone = self.start_time.tzinfo.tzname(self.start_time)
date = self.start_time.strftime("%A, %b %-d, %Y")
return "{}–{} {}, {}".format(start, end, zone, date)
def zulip_link(self):
# This format doesn't autolink yet. Should create an issue for it.
# return "#**{} > {}**".format(self.stream, self.subject)
url = zulip_util.stream_topic_to_narrow_url(self.stream, self.subject)
return "**[#{} > {}]({})**".format(self.stream, self.subject, url)
@sqlalchemy.event.listens_for(Event, 'after_insert')
def announce_on_zulip(mapper, conn, event):
zulip_util.announce_event(event)
@sqlalchemy.event.listens_for(Event, 'after_update')
def post_changes_to_zulip(mapper, conn, event):
messages = []
if event.already_initialized():
changes = get_changes(event)
if 'title' in changes:
messages.append("The title has changed: " + event.title)
if 'start_time' in changes or 'end_time' in changes:
messages.append("The time has changed: " + event.timestamp())
if messages:
zulip_util.send_message({
"type": "stream",
"display_recipient": event.stream,
"subject": event.subject,
"body": "**This event has changed!**\n" + "\n".join(messages)
})
def assign_attributes(model, attributes):
for k, v in attributes.items():
setattr(model, k, v)
return model
def event_dict(e):
return {
"recurse_id": e['id'],
"created_at": parse_time(e, 'created_at', utc=True),
"timezone": e['timezone'],
"start_time": parse_time(e, 'start_time'),
"end_time": parse_time(e, 'end_time'),
"created_by": e['created_by']['name'],
"url": e['url'],
"title": e['title']
}
def make_event(e):
return Event(**event_dict(e))
def insert_event(e):
event = make_event(e)
Session.add(event)
Session.commit()
return event
def parse_time(event, attr, utc=False):
if utc:
tz = pytz.utc
else:
tz = pytz.timezone(event['timezone'])
return dateutil.parser.parse(event[attr]).astimezone(tz)
def get_changes(obj):
state = inspect(obj)
changes = {}
for attr in state.attrs:
history = state.get_history(attr.key, True)
if history.has_changes():
changes[attr.key] = history.added
return changes
|
Python
| 0.000001
|
@@ -3183,16 +3183,17 @@
if '
+_
start_ti
@@ -3211,16 +3211,17 @@
ges or '
+_
end_time
|
ebfc34686ab893b4b84b4cd810e18213123dc1ee
|
remove comment
|
datajoint/connection.py
|
datajoint/connection.py
|
"""
This module hosts the Connection class that manages the connection to the mysql database,
and the `conn` function that provides access to a persistent connection in datajoint.
"""
import warnings
from contextlib import contextmanager
import pymysql as client
import logging
from . import config
from . import DataJointError
from .dependencies import Dependencies
from .jobs import JobManager
from pymysql import err
logger = logging.getLogger(__name__)
def conn(host=None, user=None, passwd=None, init_fun=None, reset=False):
"""
Returns a persistent connection object to be shared by multiple modules.
If the connection is not yet established or reset=True, a new connection is set up.
If connection information is not provided, it is taken from config which takes the
information from dj_local_conf.json. If the password is not specified in that file
datajoint prompts for the password.
:param host: hostname
:param user: mysql user
:param passwd: mysql password
:param init_fun: initialization function
:param reset: whether the connection should be reseted or not
"""
if not hasattr(conn, 'connection') or reset:
host = host if host is not None else config['database.host']
user = user if user is not None else config['database.user']
passwd = passwd if passwd is not None else config['database.password']
if passwd is None:
passwd = input("Please enter database password: ")
init_fun = init_fun if init_fun is not None else config['connection.init_function']
conn.connection = Connection(host, user, passwd, init_fun)
return conn.connection
class Connection:
"""
A dj.Connection object manages a connection to a database server.
It also catalogues modules, schemas, tables, and their dependencies (foreign keys).
Most of the parameters below should be set in the local configuration file.
:param host: host name
:param user: user name
:param passwd: password
:param init_fun: initialization function
"""
def __init__(self, host, user, passwd, init_fun=None):
if ':' in host:
host, port = host.split(':')
port = int(port)
else:
port = config['database.port']
self.conn_info = dict(host=host, port=port, user=user, passwd=passwd)
self.init_fun = init_fun
self.connect()
if self.is_connected:
logger.info("Connected {user}@{host}:{port}".format(**self.conn_info))
else:
raise DataJointError('Connection failed.')
self._conn.autocommit(True)
self._in_transaction = False
self.jobs = JobManager(self)
self.schemas = dict()
self.dependencies = Dependencies(self)
def __del__(self):
logger.info('Disconnecting {user}@{host}:{port}'.format(**self.conn_info))
self._conn.close()
def __eq__(self, other):
return self.conn_info == other.conn_info
def __repr__(self):
connected = "connected" if self.is_connected else "disconnected"
return "DataJoint connection ({connected}) {user}@{host}:{port}".format(
connected=connected, **self.conn_info)
def connect(self, init_fun=None):
"""
Connects to the database server.
:param init_fun: initialization function passed to pymysql
"""
self._conn = client.connect(init_command=self.init_fun, **self.conn_info)
def register(self, schema):
self.schemas[schema.database] = schema
@property
def is_connected(self):
"""
Returns true if the object is connected to the database server.
"""
return self._conn.ping()
def query(self, query, args=(), as_dict=False):
"""
Execute the specified query and return the tuple generator (cursor).
:param query: mysql query
:param args: additional arguments for the client.cursor
:param as_dict: If as_dict is set to True, the returned cursor objects returns
query results as dictionary.
"""
cursor = client.cursors.DictCursor if as_dict else client.cursors.Cursor
cur = self._conn.cursor(cursor=cursor)
# Log the query
try:
logger.debug("Executing SQL:" + query[0:300])
cur.execute(query, args) # TODO insert reconnect
except err.OperationalError as e:
if 'MySQL server has gone away' in str(e) and config['database.reconnect']:
warnings.warn('''Mysql server has gone away.
Reconnected to the server. Data from transactions might be lost and referential constraints may
be violated. You can switch off this behavior by setting the 'database.reconnect' to False.
''')
self.connect()
else:
raise
return cur
# ---------- transaction processing
@property
def in_transaction(self):
"""
:return: True if there is an open transaction.
"""
self._in_transaction = self._in_transaction and self.is_connected
return self._in_transaction
def start_transaction(self):
"""
Starts a transaction error.
:raise DataJointError: if there is an ongoing transaction.
"""
if self.in_transaction:
raise DataJointError("Nested connections are not supported.")
self.query('START TRANSACTION WITH CONSISTENT SNAPSHOT')
self._in_transaction = True
logger.info("Transaction started")
def cancel_transaction(self):
"""
Cancels the current transaction and rolls back all changes made during the transaction.
"""
self.query('ROLLBACK')
self._in_transaction = False
logger.info("Transaction cancelled. Rolling back ...")
def commit_transaction(self):
"""
Commit all changes made during the transaction and close it.
"""
self.query('COMMIT')
self._in_transaction = False
logger.info("Transaction committed and closed.")
# -------- context manager for transactions
@property
@contextmanager
def transaction(self):
"""
Context manager for transactions. Opens an transaction and closes it after the with statement.
If an error is caught during the transaction, the commits are automatically rolled back.
All errors are raised again.
Example:
>>> import datajoint as dj
>>> with dj.conn().transaction as conn:
>>> # transaction is open here
"""
try:
self.start_transaction()
yield self
except:
self.cancel_transaction()
raise
else:
self.commit_transaction()
|
Python
| 0
|
@@ -4389,31 +4389,8 @@
gs)
-# TODO insert reconnect
%0A
|
9c8bde1e57ad2e70c7b3b9188bff9b90e85434e6
|
Send email and push
|
models.py
|
models.py
|
from random import choice
import urllib2, json
from pushbullet import PushBullet
'''
Checks if the current login attempt is a security threat or not.
Performs the required action in each case
'''
def is_safe(form, ip, mandrill):
ip = ip
geo = form.get('geo', None)
os = form.get('os', None)
browser = form.get('browser', None)
# check against our database
safety_status = choice(range(-1, 2))
return {
'safety_code': safety_status,
'token': 'fake_token',
'debug': [ip, geo, os, browser]} # send SMS, mail...
def send_push(pushbullet_token, message, lat=40.4086, lon=-3.6922):
""" Sends a foo location to Pushbullet """
pb = PushBullet(pushbullet_token)
success, push = pb.push_link("Login from suspicious location detected now!", "http://maps.google.com/maps?&z=10&q=%f,+%f&ll=%f+%f" % (lat, lon, lat, lon), "A suspicious login has appeared, try to guess who is it")
return success
def send_mail(mandrill, to):
mandrill.send_email(
from_email='someone@yourdomain.com',
subject='Blocked suspicious login attempt @twitter',
to=[{'email': to}],
text='''An attack has been detected and blocked (LND=>NY login with 5h difference).
Authorize this access by [...]'''
)
def geocode_ip(ip_addr):
""" Geocodes a given IP Address """
data = json.load(urllib2.urlopen("http://ip-api.com/json/%s" % ip_addr))
print "Geocoded data: %s" % data
return data
|
Python
| 0
|
@@ -18,16 +18,25 @@
t choice
+, randint
%0A%0Aimport
@@ -85,16 +85,38 @@
hBullet%0A
+from os import environ
%0A''' %0ACh
@@ -269,16 +269,18 @@
ip
+
= ip%0A
@@ -284,15 +284,60 @@
-geo
+latitude = form.get('latitude', None)%0A longitude
= f
@@ -345,19 +345,25 @@
rm.get('
-geo
+longitude
', None)
@@ -375,16 +375,18 @@
os
+
= form.g
@@ -396,24 +396,67 @@
'os', None)%0A
+ mobile = form.get('isMobile', None)%0A
browser
@@ -455,16 +455,18 @@
browser
+
= form.g
@@ -559,16 +559,291 @@
-1, 2))%0A
+ if safety_status %3C 1:%0A auth_code = '%2506d' %25 randint(0,999999)%0A send_push(%22Confirm your access%22, %22Suspicious access detected from IP %25s, confirm with code %25s%22 %25 (ip, auth_code))%0A send_mail(mandrill, 'zen@itram.es', latitude, longitude, ip, auth_code)%0A
retu
@@ -907,20 +907,17 @@
n':
-'fake_token'
+auth_code
,%0A
@@ -940,27 +940,59 @@
ip,
-geo, os, browser%5D%7D
+latitude, longitude, os, mobile, browser%5D%0A %7D
# se
@@ -1026,34 +1026,16 @@
ush(
-pushbullet_token,
message,
lat
@@ -1030,16 +1030,22 @@
message,
+ body,
lat=40.
@@ -1061,16 +1061,66 @@
=-3.6922
+, pushbullet_token=environ.get('PUSHBULLET_TOKEN')
):%0A %22
@@ -1463,18 +1463,249 @@
rill, to
+, latitude, longitude, ip, safety_code):%0A gmaps_uri = %22http://maps.googleapis.com/maps/api/staticmap?center=%25s,%25s&zoom=15&size=400x400&markers=color:red%25%257Clabel:S%25%257C%25s,%25s&sensor=true%22 %25 (latitude, longitude, latitude, longitude
)
-:
%0A man
@@ -1788,17 +1788,19 @@
ct='
-Blocked s
+%5BLogLock%5D S
uspi
@@ -1823,16 +1823,16 @@
mpt
-@twitter
+detected
',%0A
@@ -1870,137 +1870,260 @@
-text='''An attack has been detected and blocked (LND=%3ENY login with 5h difference).%0A Authorize this access by %5B...%5D'''
+html='''%0A An access attempt has been logged from a suspicious location:%0A %3Cp%3E%3Cimg src=%22%25s%22 /%3E%3C/p%3E%0A %3Cp%3EIP address: %25s%3C/p%3E%0A%0A Please confirm it is you with the following code: %3Cb%3E%25s%3C/b%3E%0A%0A ''' %25(gmaps_uri, ip, safety_code)
%0A
|
2f4aa70b86ad3a6d7672b7fc14d054a3c87bf1a3
|
Add result param to registration callback for easier client-side parsing
|
models.py
|
models.py
|
# Copyright (c) 2010, Scott Ferguson
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the software nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY SCOTT FERGUSON ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL SCOTT FERGUSON BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from django.db import models
from django.conf import settings
from django.db.models.signals import post_save
import urllib, urllib2
from urllib2 import URLError
import datetime
C2DM_URL = 'https://android.apis.google.com/c2dm/send'
class AndroidDevice(models.Model):
'''
Profile of a c2dm-enabled Android device
device_id - Unique ID for the device. Simply used as a default method to specify a device.
registration_id - Result of calling registration intent on the device. Subject to change.
collapse_key - Required arbitrary collapse_key string.
last_messaged - When did we last send a push to the device
failed_push - Have we had a failure when pushing to this device? Flag it here.
'''
device_id = models.CharField(max_length = 64, unique = True)
registration_id = models.CharField(max_length = 140)
collapse_key = models.CharField(max_length = 50)
last_messaged = models.DateTimeField(blank = True, default = datetime.datetime.now)
failed_push = models.BooleanField(default = False)
def send_message(self, delay_while_idle = False, **kwargs):
'''
Sends a message to the device.
delay_while_idle - If included, indicates that the message should not be sent immediately if the device is idle. The server will wait for the device to become active, and then only the last message for each collapse_key value will be sent.
data.keyX fields are populated via kwargs.
'''
if self.failed_push:
return
values = {
'registration_id': self.registration_id,
'collapse_key': self.collapse_key,
}
if delay_while_idle:
values['delay_while_idle'] = ''
for key,value in kwargs.items():
values['data.%s' % key] = value
headers = {
'Authorization': 'GoogleLogin auth=%s' % settings.AUTH_TOKEN,
}
try:
params = urllib.urlencode(values)
request = urllib2.Request(C2DM_URL, params, headers)
# Make the request
response = urllib2.urlopen(request)
result = response.read().split('=')
if 'Error' in result:
if result[1] == 'InvalidRegistration' or result[1] == 'NotRegistered':
self.failed_push = True
self.save()
raise Exception(result[1])
except URLError:
return false
except Exception, error:
return false
def __unicode__(self):
return '%s' % self.device_id
def send_multiple_messages(device_list, **kwargs):
'''
Same as send_message but sends to a list of devices.
data.keyX fields are populated via kwargs.
'''
for device in device_list:
device.send_message(kwargs)
def filter_failed_devices():
'''
Removes any devices with failed registration_id's from the database
'''
for device in AndroidDevice.objects.filter(failed_push = True):
device.delete()
def registration_completed_callback(sender, **kwargs):
'''
Returns a push response when the device has successfully registered.
'''
profile = kwargs['instance']
profile.send_message(message = 'Registration successful')
post_save.connect(registration_completed_callback, sender = AndroidDevice)
|
Python
| 0
|
@@ -4795,16 +4795,30 @@
cessful'
+, result = '1'
)%0Apost_s
|
0980b517300cd9838497de8ae7ccf7276be2e365
|
support atshit.com
|
attracker_project/settings.py
|
attracker_project/settings.py
|
"""
Django settings for attracker_project project.
Generated by 'django-admin startproject' using Django 1.8.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '_k6sactgdq*w4qp1&-^lnjgih8&lie-%y-%kcii*owbs)dl&=d'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['whispering-mesa-73799.herokuapp.com','localhost','0.0.0.0']
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'attracker_app',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'attracker_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'attracker_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
#'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
#}
'default': {
#'ENGINE': 'django.db.backends.postgresql',
'ENGINE': 'django.db.backends.postgresql_psycopg2',
#'NAME': 'attracker',
'USER': 'cpadmin',
'PASSWORD': 'Reve4444',
'HOST': '127.0.0.1',
'PORT': '5432',
}
#2010 recommendation: 'default': dj_database_url.config(default='postgres://localhost:5432/attracker'),
}
import dj_database_url
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/New_York' # 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Default date format
DATE_FORMAT = 'Y-m-d'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_DIR, 'static')
STATIC_URL = '/static/'
# Constants
AT_TRAIL_MILES = 2189.2
|
Python
| 0
|
@@ -893,16 +893,29 @@
0.0.0.0'
+,'atshit.com'
%5D%0A%0A%0A# Ap
|
ea8cbcaf41f01a46390882fbc99e6e14d70a49d1
|
Create an API auth token for every newly created user
|
src/mmw/apps/user/models.py
|
src/mmw/apps/user/models.py
|
# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.db import models
class ItsiUserManager(models.Manager):
def create_itsi_user(self, user, itsi_id):
itsi_user = self.create(user=user, itsi_id=itsi_id)
return itsi_user
class ItsiUser(models.Model):
user = models.OneToOneField(User, primary_key=True)
itsi_id = models.IntegerField()
objects = ItsiUserManager()
def __unicode__(self):
return unicode(self.user.username)
|
Python
| 0
|
@@ -16,16 +16,50 @@
tf-8 -*-
+%0A%0Afrom django.conf import settings
%0Afrom dj
@@ -124,16 +124,409 @@
models%0A
+from django.db.models.signals import post_save%0Afrom django.dispatch import receiver%0A%0Afrom rest_framework.authtoken.models import Token%0A%0A%0A@receiver(post_save, sender=settings.AUTH_USER_MODEL)%0Adef create_auth_token(sender, instance=None, created=False, **kwargs):%0A %22%22%22%0A Create an auth token for every newly created user.%0A %22%22%22%0A if created:%0A Token.objects.create(user=instance)%0A
%0A%0Aclass
|
8a304e7c09a2e6a01454d686a882a8e139be7a3d
|
make async dbus call, return result in deferred method
|
dbus-tools/dbus-send.py
|
dbus-tools/dbus-send.py
|
###############################################################################
# Copyright 2012 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import sys, dbus, json
from twisted.internet import glib2reactor
# Configure the twisted mainloop to be run inside the glib mainloop.
# This must be done before importing the other twisted modules
glib2reactor.install()
from twisted.internet import reactor, defer
from autobahn.websocket import listenWS
from autobahn.wamp import exportRpc, WampServerFactory, WampServerProtocol
from dbus.mainloop.glib import DBusGMainLoop
import gobject
gobject.threads_init()
from dbus import glib
glib.init_threads()
# enable debug log
from twisted.python import log
log.startLogging(sys.stdout)
###############################################################################
class DbusSendService:
@exportRpc
def dbusSend(self, list):
if len(list) < 5:
raise Exception("Error: expected arguments: bus, destination, object, interface, message, [args])")
if list[0] == "session":
bus = dbus.SessionBus()
elif list[0] == "system":
bus = dbus.SystemBus()
else:
raise Exception("Error: invalid bus: %s" % list[0])
# parse JSON arg list
args = []
if len(list) == 6:
args = json.loads(list[5])
# get dbus proxy
object = bus.get_object(list[1], list[2])
method = object.get_dbus_method(list[4], list[3])
# defer dbus call
request = defer.Deferred()
request.addCallback(self.dbusCallback)
reactor.callLater(0, request.callback, (method, args))
return request
def dbusCallback(self, list):
# call dbus method
result = list[0](*list[1])
# return JSON string result
return json.dumps(result)
###############################################################################
class DbusSendServerProtocol(WampServerProtocol):
def onSessionOpen(self):
# create dbus-send service instance and register it for RPC.
self.dbusSendService = DbusSendService()
self.registerForRpc(self.dbusSendService)
###############################################################################
if __name__ == '__main__':
port = "9000"
if len(sys.argv) == 2:
port = sys.argv[1]
uri = "ws://localhost:" + port
factory = WampServerFactory(uri, debugWamp = True)
factory.protocol = DbusSendServerProtocol
factory.setProtocolOptions(allowHixie76 = True)
listenWS(factory)
DBusGMainLoop(set_as_default=True)
reactor.run()
|
Python
| 0.000001
|
@@ -2107,18 +2107,40 @@
efer
- dbus call
+red reply to return dbus results
%0A
@@ -2140,24 +2140,29 @@
lts%0A
+self.
request = de
@@ -2188,108 +2188,115 @@
-request.addCallback(self.dbusCallback)%0A reactor.callLater(0, request.callback, (method, args)
+# dbus method async call%0A method(*args, reply_handler=self.dbusSuccess, error_handler=self.dbusError
)%0A
@@ -2317,16 +2317,21 @@
return
+self.
request%0A
@@ -2348,34 +2348,36 @@
dbus
-Callback
+Success
(self,
-lis
+*resul
t):%0A
%09# c
@@ -2376,134 +2376,347 @@
+
-%09# call dbus method%0A result = list%5B0%5D(*list%5B1%5D)%0A # return JSON string result%0A return json.dumps(result
+ # return JSON string result array%0A self.request.callback(json.dumps(result))%0A %0A %0A def dbusError(self, error):%0A %09# raise exception in the deferred reply context%0A %09self.request.addCallback(self.raiseError)%0A self.request.callback(error)%0A%0A %0A def raiseError(self, error):%0A raise Exception(error
)%0A%0A
+%09
%0A###
|
1bbfce6c64debb9f9377d1a912604f32ace9dca5
|
Update runsegment.py
|
bin/runsegment.py
|
bin/runsegment.py
|
#!/usr/bin/python
import os
import numpy as np
import shutil
import common
from segment import normalizefile, segmentfile
def runAll(args):
print('\n\n\nYou have requested to normalize and segment bincounts files')
print('\tWARNING:')
print('\t\tIF USING ANY REFERENCES OTHER THAN THOSE I PROVIDE I CANNOT GUARANTEE RESULT ACCURACY')
print('\n')
#Set up environment#
args.CountDirectory = common.fixDirName(args.CountDirectory)
lowessDir = os.path.dirname(args.CountDirectory[:-1]) + '/LowessBinCounts/'
segmentDir = os.path.dirname(args.CountDirectory[:-1]) + '/Segments/'
tempDir = os.path.dirname(args.CountDirectory[:-1]) + '/Temp/'
if args.output:
lowessDir = common.fixDirName(args.output) + 'LowessBinCounts/'
segmentDir = common.fixDirName(args.output) + 'Segments/'
common.makeDir(lowessDir)
if not args.normalizeonly:
common.makeDir(segmentDir)
common.makeDir(tempDir)
sampleFiles = common.getSampleList(args.CountDirectory, args.samples, 'bincounts')
info = common.importInfoFile(args.infofile, args.columns, 'normalize')
if args.infofile:
refArray = info
else:
thisDtype = info
refArray = np.array(
[ (os.path.basename(x)[:-14], 'unk', 1,) for x in sampleFiles],
dtype=thisDtype)
sampleDict = {x: [y for y in sampleFiles if x == os.path.basename(y)[:len(x)]][0] for x in refArray['name']}
#Run normalization for all samples#
methodDict = {x: False for x in np.unique(refArray['method'])}
methodDict['NA'] = False
print methodDict
sampleNormMethodDict = {x: 'NA' for x in methodDict}
print sampleNormMethodDict
if not args.gconly:
for i in methodDict:
refSlice = refArray[(refArray['method'] == i) & (refArray['cells'] == 1)]
methodSamples = [sampleDict[x] for x in refSlice['name']]
methodDict[i] = normalizefile.runMakeMethodRef(args.species, methodSamples, i, lowessDir)
if methodDict[i] != False:
for j in refSlice['name']:
sampleNormMethodDict[j] = i
print methodDict
print sampleNormMethodDict
raise SystemExit
#run multiprocessing for gc (+ method) correction
normArgs = [(args.species, sampleDict[x], methodDict[sampleNormMethodDict[x]], lowessDir + x + '.lowess.txt') for x in sampleDict.keys()]
common.daemon(normalizefile.runNormalizeOne, normArgs, 'normalize bincount files')
print('\nNormalization complete\n\n\n')
# if args.normalizeonly:
# shutil.rmtree(tempDir[:-1])
# return 0
#Run CBS for all samples#
if not args.normalizeonly:
segArgs = [(x, args.species, tempDir, lowessDir, segmentDir) for x in refArray['name']]
common.daemon(segmentfile.segmentOne, segArgs, 'segment bincount data')
shutil.rmtree(tempDir[:-1])
print('\nSegmentation complete\n\n\n')
|
Python
| 0.000001
|
@@ -1480,16 +1480,17 @@
od'%5D)%7D%09%0A
+#
%09methodD
@@ -1507,34 +1507,16 @@
= False%0A
-%09print methodDict%0A
%09sampleN
@@ -1565,36 +1565,8 @@
ct%7D%0A
-%09print sampleNormMethodDict%0A
%09%0A%09i
@@ -1684,16 +1684,34 @@
== 1)%5D%0A
+%09%09%09print refSlice%0A
%09%09%09metho
@@ -1763,16 +1763,39 @@
name'%5D%5D%0A
+%09%09%09print methodSamples%0A
%09%09%09%0A%09%09%09m
@@ -1885,16 +1885,37 @@
sDir)%0A%09%09
+%09print methodDict%0A%09%09%09
%0A%09%09%09if m
|
4f27b87b9ee600c7c1c05ae9fece549b9c18e2a4
|
Create default instance in middleware if not found.
|
speeches/middleware.py
|
speeches/middleware.py
|
from instances.models import Instance
class InstanceMiddleware:
"""This middleware sets request.instance to the default Instance for all
requests. This can be changed/overridden if you use SayIt in a way that
uses multiple instances."""
def process_request(self, request):
request.instance = Instance.objects.get(label='default')
request.is_user_instance = (
request.user.is_authenticated() and
( request.instance in request.user.instances.all() or request.user.is_superuser )
)
|
Python
| 0
|
@@ -307,16 +307,19 @@
instance
+, _
= Insta
@@ -333,16 +333,26 @@
ects.get
+_or_create
(label='
|
cb54c04049050d853f874e92c83061aad911a19a
|
Update qotd-parser.py
|
intelmq/bots/parsers/shadowserver/qotd-parser.py
|
intelmq/bots/parsers/shadowserver/qotd-parser.py
|
import csv
import StringIO
from intelmq.lib.bot import Bot, sys
from intelmq.lib.event import Event
from intelmq.bots import utils
class ShadowServerQotdParserBot(Bot):
def process(self):
report = self.receive_message()
if report:
report = report.strip()
columns = {
"timestamp": "source_time",
"ip": "source_ip",
"protocol" : "transport_protocol",
"port" : "source_port",
"hostname": "source_reverse_dns",
"tag" : "__IGNORE__",
"quote" : "__IGNORE__",
"asn": "source_asn",
"geo": "source_cc",
"region" : "source_region",
"city" : "source_city"
}
rows = csv.DictReader(StringIO.StringIO(report))
for row in rows:
event = Event()
for key, value in row.items():
key = columns[key]
if not value:
continue
value = value.strip()
if key is "__IGNORE__" or key is "__TDB__":
continue
event.add(key, value)
event.add('feed', 'shadowserver-qotd')
event.add('type', 'vulnerable service')
event.add('protocol', 'qotd')
event = utils.parse_source_time(event, "source_time")
event = utils.generate_observation_time(event, "observation_time")
event = utils.generate_reported_fields(event)
self.send_message(event)
self.acknowledge_message()
if __name__ == "__main__":
bot = ShadowServerQotdParserBot(sys.argv[1])
bot.start()
|
Python
| 0.000001
|
@@ -1489,16 +1489,28 @@
nt.add('
+application_
protocol
|
08ee872020b9365d7b79a305328b62a18a9a4fa1
|
fix the problem output format is wrong
|
mondai.py
|
mondai.py
|
import random
import sys
field_size_max = 32
field_size_min = 20
field_threshold_min = 0.00 #threshold of obstacle
field_threshold_max = 0.30
block_size_max = 8
block_cells_min = 4
blocks_ratio_min = 0.7 #blank cells in field / cells of blocks
blocks_ratio_max = 1.5
blocks_threshold_min = 0.05 #threshold of making random block
blocks_threshold_max = 0.50
blocks_max = 200
sys.setrecursionlimit(100000)
def rand(threshold=0.5):
return random.random() < threshold
def draw(problem, true="1", false="0"):
draw_block(problem[0], true, false)
print()
print(len(problem[1]))
for block in problem[1]:
draw_block(block, true, false)
print()
def draw_block(lines, true="1", false="0"):
for line in lines:
print(" ".join([(true if x else false) for x in line]))
def create():
field_width = (int)(random.random() * (field_size_max - field_size_min) + field_size_min)
field_height = (int)(random.random() * (field_size_max - field_size_min) + field_size_min)
field_threshold = random.random() * (field_threshold_max - field_threshold_min) + field_threshold_min
field, blank_blocks = create_panel(field_width, field_height, field_threshold)
cells_min = (int) (blank_blocks * (random.random() * (blocks_ratio_max - blocks_ratio_min) + blocks_ratio_min))
cells_sum = 0
blocks = []
while True:
block, cells = create_block(random.random() * (blocks_threshold_max - blocks_threshold_min) + blocks_threshold_min)
blocks.append(block)
cells_sum = cells_sum + cells
if cells_sum >= cells_min or len(blocks) == blocks_max:
break
return field, blocks
def create_panel(field_width, field_height, field_threshold):
field = [[rand(field_threshold) if (i < field_height and j < field_width) else True for j in range(0, field_size_max)] for i in range(0, field_size_max)]
if has_hole(get_groups(field, False)[0]):
return create_panel(field_width, field_height, field_threshold)
n = 0
for line in field:
for item in line:
if item:
n = n + 1
return field, n
def create_block(block_threshold):
block = [[rand(block_threshold) for j in range(0, block_size_max)] for i in range(0, block_size_max)]
blocks, count = get_groups(block, True)
if count < block_cells_min:
return create_block(block_threshold)
max = None
result_number = 0
for item in blocks:
item_length = len(item)
if 4 <= item_length <= 16:
if max == None or max[1] < item_length:
max = [[item], item_length]
elif max[1] == item_length:
max[0].append(item);
if max == None:
return create_block(block_threshold)
result = cells_to_block(max[0][random.randint(0, len(max[0]) - 1)])
negative_blocks, count = get_groups(result, False)
if count > 0:
if has_hole(negative_blocks):
return create_block(block_threshold)
return result, max[1]
def has_hole(blocks):
for block in blocks:
contact_with_edge = False
for cell in block:
if cell[0] == 0 or cell[0] == block_size_max - 1 or cell[1] == 0 or cell[1] == block_size_max - 1:
contact_with_edge = True
break
if not contact_with_edge:
return True
return False
def cells_to_block(cells):
result = [[False for j in range(0, block_size_max)] for i in range(0, block_size_max)]
for cell in cells:
result[cell[0]][cell[1]] = True
return result
def get_groups(block, target=True):
used_block = [[False for j in range(0, block_size_max)] for i in range(0, block_size_max)]
blocks = []
count = 0
def check(i, j, target=True):
if 0 <= i < block_size_max and 0 <= j < block_size_max and block[i][j] == target and not used_block[i][j]:
return True
else:
return False
def find(i, j, target=True):
nonlocal count
result = []
if not used_block[i][j]:
count = count + 1
used_block[i][j] = True
result.append((i, j))
if check(i - 1, j, target):
result.extend(find(i - 1, j, target))
if check(i, j - 1, target):
result.extend(find(i, j - 1, target))
if check(i + 1, j, target):
result.extend(find(i + 1, j, target))
if check(i, j + 1, target):
result.extend(find(i, j + 1, target))
return result
for i in range(0, block_size_max):
for j in range(0, block_size_max):
if check(i, j, target):
blocks.append(find(i, j, target))
return blocks, count
if __name__ == "__main__":
draw(create(), "▪︎", " ")
|
Python
| 1
|
@@ -546,28 +546,16 @@
false)%0A
- print()%0A
prin
|
17b57ac411d1be0794dec1e3dd5a62206aad389e
|
version number format
|
build.py
|
build.py
|
import os
import sys
from pybuilder.core import init, use_plugin
from pybuilder.vcs import VCSRevision
use_plugin("python.core")
use_plugin("python.unittest")
use_plugin("python.install_dependencies")
use_plugin("python.flake8")
use_plugin("python.coverage")
use_plugin("python.distutils")
use_plugin('python.cram')
name = "gaius"
summary = 'The deployment client that triggers Crassus to deploy artefacts'
description = """
Deployment client which pushs an AWS SNS message with CloudFormation-Stack
parameters as Payload to trigger
Crassus <https://github.com/ImmobilienScout24/crassus> as deployment Lambda
function"""
license = 'Apache License 2.0'
url = 'https://github.com/ImmobilienScout24/gaius'
version = VCSRevision().get_git_revision_count()
default_task = "publish"
@init
def set_properties(project):
project.depends_on("boto3")
project.depends_on("docopt")
if sys.version_info[0:2] < (2, 7):
project.depends_on("ordereddict")
project.build_depends_on("unittest2")
project.build_depends_on("mock")
project.build_depends_on("moto")
project.build_depends_on("mockito-without-hardcoded-distribute-version")
project.set_property("coverage_threshold_warn", 70)
project.set_property("coverage_branch_threshold_warn", 80)
project.set_property("coverage_branch_partial_threshold_warn", 80)
project.set_property('coverage_break_build', True)
project.set_property('distutils_classifiers', [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: System :: Systems Administration'
])
project.set_property(
'distutils_console_scripts', ['gaius=gaius.cli:send_message'])
project.version = '%s-%s' % (project.version,
os.environ.get('BUILD_NUMBER', 0))
|
Python
| 0.000078
|
@@ -1908,17 +1908,17 @@
on = '%25s
--
+.
%25s' %25 (p
|
2980c30a8de6cbdf5d22bb269c16f8c5ad499ba8
|
Fix build output (dots on one line)
|
build.py
|
build.py
|
import argparse
from utils import build_docker_image
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--nocache', action='store_true', default=False)
args = parser.parse_args()
for item in build_docker_image(nocache=args.nocache):
print item.values()[0]
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -1,12 +1,22 @@
+import re%0A
import argpa
@@ -19,16 +19,16 @@
rgparse%0A
-
from uti
@@ -221,19 +221,33 @@
-for item in
+content = ''%0A stream =
bui
@@ -287,40 +287,277 @@
che)
-:%0A print item.values()%5B0%5D
+%0A for item in stream:%0A buff = item.get('stream', item.get('status'))%0A%0A if not content or re.search('.+%5C%5B%5B. %5D*$', content):%0A content += buff%0A%0A if not re.search('.+%5C%5B%5B. %5D*$', content):%0A print(content)%0A content = ''
%0A%0A%0Ai
|
442de67baae52e6d7382c7c9618a9c1c7c831713
|
version bump after fixing debian tests again
|
curtsies/__init__.py
|
curtsies/__init__.py
|
"""Terminal-formatted strings"""
__version__='0.2.5'
from .window import FullscreenWindow, CursorAwareWindow
from .input import Input
from .termhelpers import Nonblocking, Cbreak, Termmode
from .formatstring import FmtStr, fmtstr
from .formatstringarray import FSArray, fsarray
|
Python
| 0
|
@@ -47,9 +47,9 @@
0.2.
-5
+6
'%0A%0Af
|
c16fb96de6154dec7bf0fc934dd9f7e1ac4b69f4
|
bump version
|
curtsies/__init__.py
|
curtsies/__init__.py
|
"""Terminal-formatted strings"""
__version__='0.1.14'
from .window import FullscreenWindow, CursorAwareWindow
from .input import Input
from .termhelpers import Nonblocking, Cbreak, Termmode
from .formatstring import FmtStr, fmtstr
from .formatstringarray import FSArray, fsarray
|
Python
| 0
|
@@ -48,9 +48,9 @@
.1.1
-4
+5
'%0A%0Af
|
0601e5214a75921696f50691285166dcda06288b
|
switch VR separator to --
|
tcpbridge/tcpbridge.py
|
tcpbridge/tcpbridge.py
|
#!/usr/bin/env python3
import select
import socket
import sys
class TcpBridge:
def __init__(self):
self.sockets = []
self.socket2remote = {}
def routerintf2addr(self, hostintf):
hostname, interface = hostintf.split("/")
try:
res = socket.getaddrinfo(hostname, "100%02d" % int(interface))
except socket.gaierror:
raise NoVR("Unable to resolve %s" % hostname)
sockaddr = res[0][4]
return sockaddr
def add_p2p(self, p2p):
source, destination = p2p.split("-")
src_router, src_interface = source.split("/")
dst_router, dst_interface = destination.split("/")
src = self.routerintf2addr(source)
dst = self.routerintf2addr(destination)
self.add_bridge(src, dst)
def add_bridge(self, left_addr, right_addr):
left = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
right = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
left.connect(left_addr)
right.connect(right_addr)
# add to list of sockets
self.sockets.append(left)
self.sockets.append(right)
# dict for looking up remote in pair
self.socket2remote[left] = right
self.socket2remote[right] = left
def work(self):
while True:
try:
ir,_,_ = select.select(self.sockets, [], [])
except select.error as exc:
break
for i in ir:
remote = self.socket2remote[i]
buf = i.recv(2048)
if len(buf) == 0:
return
remote.send(buf)
class NoVR(Exception):
""" No virtual router
"""
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='')
parser.add_argument('--p2p', nargs='+', help='point-to-point link')
args = parser.parse_args()
tt = TcpBridge()
for p2p in args.p2p:
try:
tt.add_p2p(p2p)
except NoVR as exc:
print(exc, " Is it started and did you link it?")
sys.exit(1)
tt.work()
|
Python
| 0
|
@@ -554,16 +554,17 @@
split(%22-
+-
%22)%0A
|
e468abbc033a48d0222f50cf85319802f05fc57a
|
Check doctest
|
custom/onse/tests.py
|
custom/onse/tests.py
|
from datetime import date
from nose.tools import assert_equal
from custom.onse.tasks import get_last_quarter
def test_get_last_quarter():
test_dates = [
(date(2020, 1, 1), '2019Q4'),
(date(2020, 3, 31), '2019Q4'),
(date(2020, 4, 1), '2020Q1'),
(date(2020, 6, 30), '2020Q1'),
(date(2020, 7, 1), '2020Q2'),
(date(2020, 9, 30), '2020Q2'),
(date(2020, 10, 1), '2020Q3'),
(date(2020, 12, 31), '2020Q3'),
]
for test_date, expected_value in test_dates:
last_quarter = get_last_quarter(test_date)
assert_equal(last_quarter, expected_value)
|
Python
| 0
|
@@ -1,12 +1,27 @@
+import doctest%0A
from datetim
@@ -92,22 +92,16 @@
onse
-.tasks
import
get_
@@ -96,32 +96,21 @@
import
-get_last_quarter
+tasks
%0A%0A%0Adef t
@@ -540,16 +540,22 @@
arter =
+tasks.
get_last
@@ -625,8 +625,99 @@
_value)%0A
+%0A%0Adef test_doctests():%0A results = doctest.testmod(tasks)%0A assert results.failed == 0%0A
|
fe97597fa2cce2a8e6671c0b8975ecab3a78918f
|
update should be centralized
|
depl/deploy/__init__.py
|
depl/deploy/__init__.py
|
"""
Deploys have the same name as in ``grammar.yml``, are stored as python modules
in the ``deploy`` package. These need to contain a ``load()`` function that
returns a list of commands - either python functions (remember to use fabric!)
- or strings that need to be executed, as well as a list of dependencies.
Dependencies are stored in a ``dependencies.yml`` file. You easily change the
packages for your package manager.
"""
import os
from datetime import datetime
import yaml
from fabric.api import settings, run, sudo, warn_only
from fabric.context_managers import quiet
from depl import helpers
def load(name, settings):
"""Returns an iterable of commands to execute - basically callbacks."""
module = __import__('depl.deploy.' + name, globals(), locals(), [name], -1)
module_dependencies, commands = module.load(settings)
return [package_manager.run_update] + list(module_dependencies) + commands
def _apt_add_repo(repo, pgp=None, no_deb_src=False):
APT_PATH = '/etc/apt/sources.list'
package_manager.install('software-properties-common')
sudo('add-apt-repository -y "%s"' % repo)
if no_deb_src:
# annoying case of mongodb, doesn't work with deb-src, so remove it.
src_repo = 'deb-src' + repo[3:]
txt = helpers.read_file(APT_PATH)
lines = txt.splitlines()
try:
del lines[lines.index(src_repo)]
except ValueError:
pass
else:
txt = '\n'.join(lines)
helpers.write_file(txt, APT_PATH, True)
sudo('apt-key adv --keyserver keyserver.ubuntu.com --recv %s' % pgp)
sudo('apt-get -q update')
class Package(object):
"""
Represents a depl package - see ``deploy/dependencies.yml``.
Possible to install the package with ``self.__call__``.
"""
def __init__(self, name):
self.name = name
def __eq__(self, other):
return self.name == other.name
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.name)
def __call__(self):
"""installation call"""
dep_string = dependencies[self.name][package_manager.system()]
if isinstance(dep_string, dict):
repo = dep_string['repo']
pgp = dep_string.get('pgp')
if package_manager.system() == 'apt':
_apt_add_repo(repo, pgp, dep_string.get('no-deb-src', False))
else:
raise NotImplementedError()
dep_string = dep_string['name']
package_manager.install(dep_string)
class _PackageManager(object):
"""Lookup the package manager lazily"""
MANAGERS = ['apt-get', 'pacman', 'yum']
def __init__(self):
self.__manager = None
def install(self, package_str):
man = self._manager()
if man == 'pacman':
sudo(man + ' -S {0}'.format(package_str))
elif man == 'yum':
sudo(man + ' install {0}'.format(package_str))
elif man == 'apt-get':
install = False
with quiet():
# Improve the speed by asking dpkg first if package exists
# already.
output = sudo('dpkg -s {0}'.format(package_str))
# sometimes a project is deinstalled - also check that.
if output.failed or 'Status: deinstall' in output:
install = True
if install:
# -q -> quiet, always say yes (-y) - no prompts!
sudo(man + ' -q install -y {0}'.format(package_str))
def system(self):
return 'apt' if self._manager() == 'apt-get' else self._manager()
def _manager(self):
if self.__manager:
return self.__manager
for name in self.MANAGERS:
with settings(warn_only=True):
# Everything must be run with fabric - otherwise detection is
# not possible.
result = run('which ' + name)
if result.return_code == 0:
break
else:
raise NotImplementedError("Didn't find a package manager for your OS.")
self.__manager = name
return name
def run_update(self):
if self.system() == 'apt':
with warn_only():
timestamp = run('stat -c %Y /var/lib/apt/periodic/update-success-stamp')
if timestamp.succeeded:
date = datetime.fromtimestamp(int(timestamp))
if timestamp.failed or (datetime.now() - date).days > 1:
# update unless the package info is older
sudo('apt-get -q update')
else:
raise NotImplementedError()
with open(os.path.join(os.path.dirname(__file__), 'dependencies.yml')) as f:
dependencies = yaml.load(f)
package_manager = _PackageManager()
|
Python
| 0.000002
|
@@ -1614,32 +1614,45 @@
-sudo('apt-get -q update'
+package_manager.run_update(force=True
)%0A%0A%0A
@@ -4239,27 +4239,40 @@
_update(self
+, force=False
):%0A
-
if s
@@ -4286,32 +4286,62 @@
tem() == 'apt':%0A
+ if not force:%0A
with
@@ -4350,24 +4350,28 @@
arn_only():%0A
+
@@ -4455,32 +4455,36 @@
+
if timestamp.suc
@@ -4511,16 +4511,20 @@
+
+
date = d
@@ -4568,32 +4568,41 @@
%0A if
+force or
timestamp.failed
@@ -4697,16 +4697,16 @@
s older%0A
-
@@ -4736,16 +4736,28 @@
q update
+ %3E /dev/null
')%0A
|
e6357827a670c71e2489b5468b89a65153719ba4
|
Fix syntax (backward compatible)
|
social/strategies/tornado_strategy.py
|
social/strategies/tornado_strategy.py
|
import json
from tornado.template import Loader, Template
from social.utils import build_absolute_uri
from social.strategies.base import BaseStrategy, BaseTemplateStrategy
class TornadoTemplateStrategy(BaseTemplateStrategy):
def render_template(self, tpl, context):
path, tpl = tpl.rsplit('/', 1)
return Loader(path).load(tpl).generate(**context)
def render_string(self, html, context):
return Template(html).generate(**context)
class TornadoStrategy(BaseStrategy):
DEFAULT_TEMPLATE_STRATEGY = TornadoTemplateStrategy
def __init__(self, storage, request_handler, tpl=None):
self.request_handler = request_handler
self.request = self.request_handler.request
super(TornadoStrategy, self).__init__(storage, tpl)
def get_setting(self, name):
return self.request_handler.settings[name]
def request_data(self, merge=True):
# Multiple valued arguments not supported yet
return {key: val[0] for key, val in self.request.arguments.iteritems()}
def request_host(self):
return self.request.host
def redirect(self, url):
return self.request_handler.redirect(url)
def html(self, content):
self.request_handler.write(content)
def session_get(self, name, default=None):
return self.request_handler.get_secure_cookie(name) or default
def session_set(self, name, value):
self.request_handler.set_secure_cookie(name, str(value))
def session_pop(self, name):
value = self.request_handler.get_secure_cookie(name)
self.request_handler.set_secure_cookie(name, '')
return value
def session_setdefault(self, name, value):
pass
def build_absolute_uri(self, path=None):
return build_absolute_uri('{0}://{1}'.format(self.request.protocol,
self.request.host),
path)
def partial_to_session(self, next, backend, request=None, *args, **kwargs):
return json.dumps(super(TornadoStrategy, self).partial_to_session(
next, backend, request=request, *args, **kwargs
))
def partial_from_session(self, session):
if session:
return super(TornadoStrategy, self).partial_to_session(
json.loads(session)
)
|
Python
| 0.000001
|
@@ -973,20 +973,42 @@
urn
-%7Bkey:
+dict((key,
val%5B0%5D
+)%0A
for
@@ -1054,17 +1054,17 @@
ritems()
-%7D
+)
%0A%0A de
|
e6c994b87fed7c12fae4b52c6311d105fe45ddbf
|
Make logging even better
|
giftwrap_plugins/builders/package_meta.py
|
giftwrap_plugins/builders/package_meta.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2015, IBM
# Copyright 2015, Craig Tracey <craigtracey@gmail.com>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import csv
import logging
import requests
from collections import OrderedDict
from giftwrap.builders.package_builder import PackageBuilder
from six import StringIO
BASE_PYPI_URL = 'http://pypi.python.org/pypi/%(package)s/%(version)s/json'
ordered_fieldnames = OrderedDict([
('project_name', None),
('package', None),
('version', None),
('homepage', None),
('license_info', None),
])
LOG = logging.getLogger(__name__)
class PackageMetaBuilder(PackageBuilder):
def __init__(self, build_spec):
super(PackageMetaBuilder, self).__init__(build_spec)
self._project_deps = {}
logging.getLogger("requests").setLevel(logging.WARNING)
def _finalize_project_build(self, project):
super(PackageMetaBuilder, self)._finalize_project_build(project)
self._log_metadata(project)
def _finalize_build(self):
super(PackageMetaBuilder, self)._finalize_build()
LOG.info("Python Dependency metadata:\n\n")
for (project_name, deps_info) in self._project_deps.iteritems():
LOG.info(deps_info)
def _log_metadata(self, project):
dependencies = self._extract_dependencies(project)
output = StringIO()
writer = csv.DictWriter(output, delimiter=',',
quoting=csv.QUOTE_MINIMAL,
fieldnames=ordered_fieldnames)
for dep in dependencies:
license, homepage = self._get_package_license_homepage(**dep)
info = dep
info['license_info'] = license
info['homepage'] = homepage
info['project_name'] = project.name
writer.writerow(info)
self._project_deps[project.name] = output.getvalue()
output.close()
def _get_package_license_homepage(self, package, version):
url = BASE_PYPI_URL % locals()
resp = requests.get(url)
license = None
homepage = None
if resp.status_code == 200:
data = resp.json()
license = data['info'].get('license', None)
homepage = data['info'].get('home_page', None)
return license, homepage
def _extract_dependencies(self, project):
pip_path = self._get_venv_pip_path(project.install_path)
cmd = "%s freeze" % pip_path
freeze = self._execute(cmd)
dependencies = []
for dep in freeze.split('\n'):
parts = dep.split('==')
if len(parts) == 2:
data = {'package': parts[0],
'version': parts[1]}
dependencies.append(data)
return dependencies
|
Python
| 0
|
@@ -1639,24 +1639,25 @@
build()%0A
+%0A
LOG.info
@@ -1652,51 +1652,24 @@
-LOG.info(%22Python Dependency metadata:%5Cn%5Cn%22)
+logged_deps = %22%22
%0A
@@ -1754,26 +1754,98 @@
-LOG.info(deps_info
+logged_deps += deps_info%0A LOG.info(%22Python Dependency metadata:%5Cn%5Cn%25s%22, logged_deps
)%0A%0A
|
07cc567d4270d46981ce28f682186546033fc500
|
Implement recursion for runcommand
|
myfuse.py
|
myfuse.py
|
#!/usr/bin/env python
from __future__ import with_statement
import subprocess
import inspect
import os
import sys
import errno
import fuse
#from fuse import Fuse, FuseOSError
class Passthrough(fuse.Operations):
def __init__(self, root):
self.root = root
# Helpers
# =======
def runcommand(self, path):
start = path[6:]
cmd = "gcc -P -E -xc++-header " + os.path.join(self.root, start) + " -o - > /tmp/.output"
subprocess.call(cmd, shell=True)
return "/tmp/.output"
def getrealpath(self, path):
return self._full_path(path[5:])
def _full_path(self, partial):
if partial.startswith("/"):
partial = partial[1:]
path = os.path.join(self.root, partial)
return path
# Filesystem methods
# ==================
def access(self, path, mode):
full_path = self._full_path(path)
if not os.access(full_path, mode):
raise fuse.FuseOSError(errno.EACCES)
def chmod(self, path, mode):
full_path = self._full_path(path)
return os.chmod(full_path, mode)
def chown(self, path, uid, gid):
full_path = self._full_path(path)
return os.chown(full_path, uid, gid)
def getattr(self, path, fh=None):
if path.startswith("/@@@@"):
full_path = self.getrealpath(path)
else:
full_path = self._full_path(path)
st = os.lstat(full_path)
return dict((key, getattr(st, key)) for key in ('st_atime', 'st_ctime',
'st_gid', 'st_mode', 'st_mtime', 'st_nlink', 'st_size', 'st_uid'))
def readdir(self, path, fh):
if path.startswith("/@@@@"):
path = path[6:]
full_path = self._full_path(path)
dirents = ['.', '..']
if path == '/':
dirents.append('@@@@')
if os.path.isdir(full_path):
dirents.extend(os.listdir(full_path))
for r in dirents:
yield r
def readlink(self, path):
pathname = os.readlink(self._full_path(path))
if pathname.startswith("/"):
# Path name is absolute, sanitize it.
return os.path.relpath(pathname, self.root)
else:
return pathname
def mknod(self, path, mode, dev):
return os.mknod(self._full_path(path), mode, dev)
def rmdir(self, path):
full_path = self._full_path(path)
return os.rmdir(full_path)
def mkdir(self, path, mode):
return os.mkdir(self._full_path(path), mode)
def statfs(self, path):
full_path = self._full_path(path)
stv = os.statvfs(full_path)
return dict((key, getattr(stv, key)) for key in ('f_bavail', 'f_bfree',
'f_blocks', 'f_bsize', 'f_favail', 'f_ffree', 'f_files', 'f_flag',
'f_frsize', 'f_namemax'))
def unlink(self, path):
return os.unlink(self._full_path(path))
def symlink(self, target, name):
return os.symlink(self._full_path(target), self._full_path(name))
def rename(self, old, new):
return os.rename(self._full_path(old), self._full_path(new))
def link(self, target, name):
return os.link(self._full_path(target), self._full_path(name))
def utimens(self, path, times=None):
return os.utime(self._full_path(path), times)
# File methods
# ============
def open(self, path, flags):
if path.startswith("/@@@@"):
full_path = self.getrealpath(path)
return os.open(full_path, flags)
else:
full_path = self._full_path(path)
return os.open(full_path, flags)
def create(self, path, mode, fi=None):
full_path = self._full_path(path)
return os.open(full_path, os.O_WRONLY | os.O_CREAT, mode)
def read(self, path, length, offset, fh):
os.lseek(fh, offset, os.SEEK_SET)
return os.read(fh, length)
def write(self, path, buf, offset, fh):
os.lseek(fh, offset, os.SEEK_SET)
return os.write(fh, buf)
def truncate(self, path, length, fh=None):
full_path = self._full_path(path)
with open(full_path, 'r+') as f:
f.truncate(length)
def flush(self, path, fh):
return os.fsync(fh)
def release(self, path, fh):
return os.close(fh)
def fsync(self, path, fdatasync, fh):
return self.flush(path, fh)
def main(mountpoint, root):
fuse.FUSE(Passthrough(root), mountpoint, foreground=True)
if __name__ == '__main__':
main(sys.argv[2], sys.argv[1])
|
Python
| 0.000019
|
@@ -121,16 +121,30 @@
rt errno
+%0Aimport string
%0A%0Aimport
@@ -353,109 +353,630 @@
-start = path%5B6:%5D%0A cmd = %22gcc -P -E -xc++-header %22 + os.path.join(self.root, start) + %22 -o - %3E
+cmd_template= string.Template(%22gcc -P -E -xc++-header $input -o - %3E /tmp/.output%22)%0A if path.find(%22@@@@%22) == -1:%0A return os.path.join(self.root, path)%0A%0A partial = path.rfind(%22@@@@%22)%0A base = os.path.join(self.root, path%5Bpartial+5:%5D)%0A%0A count = path.count(%22/@@@@%22)%0A subprocess.call(cmd_template.substitute(input=os.path.join(self.root, base)), shell=True)%0A%0A for iteration in range(count-1):%0A subprocess.call(%22cat /tmp/.output%22, shell=True)%0A subprocess.call(%22mv /tmp/.output /tmp/.output2%22, shell=True)%0A cmd = cmd_template.substitute(input=%22
/tmp
@@ -983,18 +983,24 @@
/.output
-%22%0A
+2%22)%0A
@@ -1024,32 +1024,33 @@
md, shell=True)%0A
+%0A
return %22
@@ -4014,35 +4014,34 @@
path = self.
-getrealpath
+runcommand
(path)%0A
|
6bfab23170c108c50c9b2dc4988e8670ed677d65
|
Allow including html files. Build script made executable.
|
build.py
|
build.py
|
import distutils.core
from os import path
import re
include_folder = 'slides'
include_template = '{}.md'
include_regex = re.compile('@@([a-zA-Z0-9-_]+)')
in_file = 'index.html'
out_folder = '../dist'
out_file_name = 'index.html'
dirs_to_copy = ['css', 'js', 'lib', 'plugin']
def main():
print('Copying static directories...')
for directory in dirs_to_copy:
target = path.join(out_folder, directory)
if path.exists(target):
distutils.dir_util.remove_tree(target) #WARNING: THIS ACTUALLY REPLACES THE OLD ONE, SO BE CAREFUL
distutils.dir_util.copy_tree(directory, target)
print('{} copied'.format(directory))
print('All copied.')
print('Processing {} file...'.format(in_file))
with open(path.join(out_folder, out_file_name), 'w+') as fout:
with open(in_file, 'r') as fin:
text = fin.read()
matches = include_regex.findall(text) #save matches to print them
text = include_regex.sub(processIncludeMatch, text)
fout.write(text)
if matches is not None:
for match in matches:
print('>> File {} included'.format(include_template.format(match)))
print('{} file processed.'.format(in_file))
print('All done!')
def processIncludeMatch(match):
return includeFile(match.group(1))
def includeFile(name):
filename = path.join(include_folder, include_template.format(name))
with open(filename, 'r') as f:
return f.read()
main()
|
Python
| 0
|
@@ -1,12 +1,30 @@
+#!/usr/bin/python%0A
import distu
@@ -110,18 +110,31 @@
late
+s
=
+ %5B'%7B%7D.html',
'%7B%7D.md'
%0Ainc
@@ -129,16 +129,17 @@
'%7B%7D.md'
+%5D
%0Ainclude
@@ -906,86 +906,8 @@
()%0A%0A
- matches = include_regex.findall(text) #save matches to print them%0A
@@ -1001,170 +1001,8 @@
t)%0A%0A
- if matches is not None:%0A for match in matches:%0A print('%3E%3E File %7B%7D included'.format(include_template.format(match)))%0A
@@ -1183,58 +1183,275 @@
e =
-path.join(include_folder, include_template
+''%0A exists = False%0A%0A for template in include_templates:%0A filename = path.join(include_folder, template.format(name))%0A if path.isfile(filename):%0A exists = True%0A break%0A%0A if exists:%0A print('%3E%3E File %7B%7D included'
.format(
name
@@ -1450,16 +1450,20 @@
mat(
+file
name))%0A%0A
@@ -1458,16 +1458,20 @@
name))%0A%0A
+
with
@@ -1497,16 +1497,20 @@
) as f:%0A
+
|
9179907357c6e8aad33a8a5e5cd39b164b2f9cc0
|
Update BUILD_OSS to 4680.
|
src/data/version/mozc_version_template.bzl
|
src/data/version/mozc_version_template.bzl
|
# Copyright 2010-2021, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
MAJOR = 2
MINOR = 26
# BUILD number used for the OSS version.
BUILD_OSS = 4666
# Number to be increased. This value may be replaced by other tools.
BUILD = BUILD_OSS
# Represent the platform and release channel.
REVISION = 100
REVISION_MACOS = REVISION + 1
# This version represents the version of Mozc IME engine (converter, predictor,
# etc.). This version info is included both in the Mozc server and in the Mozc
# data set file so that the Mozc server can accept only the compatible version
# of data set file. The engine version must be incremented when:
# * POS matcher definition and/or conversion models were changed,
# * New data are added to the data set file, and/or
# * Any changes that loose data compatibility are made.
ENGINE_VERSION = 24
# This version is used to manage the data version and is included only in the
# data set file. DATA_VERSION can be incremented without updating
# ENGINE_VERSION as long as it's compatible with the engine.
# This version should be reset to 0 when ENGINE_VERSION is incremented.
DATA_VERSION = 10
|
Python
| 0
|
@@ -1605,18 +1605,18 @@
OSS = 46
-66
+80
%0A%0A# Numb
|
0f81fd138b1d3a83cc6ba5beab97ac34924da23f
|
use newer API to start activities
|
harmony/client.py
|
harmony/client.py
|
"""Client class for connecting to the Logitech Harmony."""
import json
import logging
import time
import sleekxmpp
from sleekxmpp.xmlstream import ET
LOGGER = logging.getLogger(__name__)
class HarmonyClient(sleekxmpp.ClientXMPP):
"""An XMPP client for connecting to the Logitech Harmony."""
def __init__(self, auth_token):
user = '%s@connect.logitech.com/gatorade.' % auth_token
password = auth_token
plugin_config = {
# Enables PLAIN authentication which is off by default.
'feature_mechanisms': {'unencrypted_plain': True},
}
super(HarmonyClient, self).__init__(
user, password, plugin_config=plugin_config)
def get_config(self):
"""Retrieves the Harmony device configuration.
Returns:
A nested dictionary containing activities, devices, etc.
"""
iq_cmd = self.Iq()
iq_cmd['type'] = 'get'
action_cmd = ET.Element('oa')
action_cmd.attrib['xmlns'] = 'connect.logitech.com'
action_cmd.attrib['mime'] = (
'vnd.logitech.harmony/vnd.logitech.harmony.engine?config')
iq_cmd.set_payload(action_cmd)
result = iq_cmd.send(block=True)
payload = result.get_payload()
assert len(payload) == 1
action_cmd = payload[0]
assert action_cmd.attrib['errorcode'] == '200'
device_list = action_cmd.text
return json.loads(device_list)
def get_current_activity(self):
"""Retrieves the current activity.
Returns:
A int with the activity ID.
"""
iq_cmd = self.Iq()
iq_cmd['type'] = 'get'
action_cmd = ET.Element('oa')
action_cmd.attrib['xmlns'] = 'connect.logitech.com'
action_cmd.attrib['mime'] = (
'vnd.logitech.harmony/vnd.logitech.harmony.engine?getCurrentActivity')
iq_cmd.set_payload(action_cmd)
result = iq_cmd.send(block=True)
payload = result.get_payload()
assert len(payload) == 1
action_cmd = payload[0]
assert action_cmd.attrib['errorcode'] == '200'
activity = action_cmd.text.split("=")
return int(activity[1])
def start_activity(self, activity_id):
"""Starts an activity.
Args:
activity_id: An int or string identifying the activity to start
Returns:
A nested dictionary containing activities, devices, etc.
"""
iq_cmd = self.Iq()
iq_cmd['type'] = 'get'
action_cmd = ET.Element('oa')
action_cmd.attrib['xmlns'] = 'connect.logitech.com'
action_cmd.attrib['mime'] = ('harmony.engine?startactivity')
cmd = 'activityId=' + str(activity_id) + ':timestamp=0'
action_cmd.text = cmd
iq_cmd.set_payload(action_cmd)
result = iq_cmd.send(block=True)
payload = result.get_payload()
assert len(payload) == 1
action_cmd = payload[0]
return action_cmd.text
def sync(self):
"""Syncs the harmony hub with the web service.
"""
iq_cmd = self.Iq()
iq_cmd['type'] = 'get'
action_cmd = ET.Element('oa')
action_cmd.attrib['xmlns'] = 'connect.logitech.com'
action_cmd.attrib['mime'] = ('setup.sync')
iq_cmd.set_payload(action_cmd)
result = iq_cmd.send(block=True)
payload = result.get_payload()
assert len(payload) == 1
def send_command(self, device_id, command):
"""Send a simple command to the Harmony Hub.
"""
iq_cmd = self.Iq()
iq_cmd['type'] = 'get'
iq_cmd['id'] = '5e518d07-bcc2-4634-ba3d-c20f338d8927-2'
action_cmd = ET.Element('oa')
action_cmd.attrib['xmlns'] = 'connect.logitech.com'
action_cmd.attrib['mime'] = (
'vnd.logitech.harmony/vnd.logitech.harmony.engine?holdAction')
action_cmd.text = 'action={"type"::"IRCommand","deviceId"::"'+str(device_id)+'","command"::"'+command+'"}:status=press'
iq_cmd.set_payload(action_cmd)
result = iq_cmd.send(block=False)
# FIXME: This is an ugly hack, we need to follow the actual
# protocol for sending a command, since block=True does not
# work.
time.sleep(0.5)
return True
def turn_off(self):
"""Turns the system off if it's on, otherwise it does nothing.
Returns:
True.
"""
activity = self.get_current_activity()
print activity
if activity != -1:
print "OFF"
self.start_activity(-1)
return True
def create_and_connect_client(ip_address, port, token):
"""Creates a Harmony client and initializes session.
Args:
ip_address: IP Address of the Harmony device.
port: Port that the Harmony device is listening on.
token: A string containing a session token.
Returns:
An instance of HarmonyClient that is connected.
"""
client = HarmonyClient(token)
client.connect(address=(ip_address, port),
use_tls=False, use_ssl=False)
client.process(block=False)
while not client.sessionstarted:
time.sleep(0.1)
return client
|
Python
| 0
|
@@ -2197,16 +2197,94 @@
ty%5B1%5D)%0A%0A
+ def _timestamp(self):%0A return str(int(round(time.time() * 1000)))%0A%0A
def
@@ -2743,20 +2743,26 @@
ony.
+activity
engine?
-start
+run
acti
@@ -2829,17 +2829,49 @@
mestamp=
-0
+' + self._timestamp() + ':async=1
'%0A
|
db2bb0356cfdf486a9e628726cd4e5879311fe8b
|
update version
|
src/BJRobot/version.py
|
src/BJRobot/version.py
|
VERSION = '0.4.0'
|
Python
| 0
|
@@ -10,9 +10,9 @@
'0.
-4
+5
.0'%0A
|
137271045313a12bbe9388ab1ac6c8cb786b32b7
|
Reset mock befor running test.
|
guardian/testapp/tests/test_management.py
|
guardian/testapp/tests/test_management.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
from guardian.compat import get_user_model
from guardian.compat import mock
from guardian.compat import unittest
from guardian.management import create_anonymous_user
import django
mocked_get_init_anon = mock.Mock()
class TestGetAnonymousUser(unittest.TestCase):
@unittest.skipUnless(django.VERSION >= (1, 5), "Django >= 1.5 only")
@mock.patch('guardian.management.guardian_settings')
def test_uses_custom_function(self, guardian_settings):
path = 'guardian.testapp.tests.test_management.mocked_get_init_anon'
guardian_settings.GET_INIT_ANONYMOUS_USER = path
guardian_settings.ANONYMOUS_USER_ID = 219
User = get_user_model()
anon = mocked_get_init_anon.return_value = mock.Mock()
create_anonymous_user('sender')
mocked_get_init_anon.assert_called_once_with(User)
self.assertEqual(anon.pk, 219)
anon.save.assert_called_once_with()
|
Python
| 0
|
@@ -531,16 +531,59 @@
tings):%0A
+ mocked_get_init_anon.reset_mock()%0A%0A
|
30020d3826a2460288b6a57963753787020a945a
|
Implement support for the 'D' type in packb()
|
temporenc/temporenc.py
|
temporenc/temporenc.py
|
def packb(type=None, year=None, month=None, day=None):
raise NotImplementedError()
|
Python
| 0.00022
|
@@ -1,59 +1,1081 @@
%0A
-def packb(type=None, year=None, month=None, day=None):
+import struct%0A%0ASUPPORTED_TYPES = set(%5B%0A 'D',%0A 'T',%0A 'DT',%0A 'DTZ',%0A 'DTS',%0A 'DTSZ',%0A%5D)%0A%0ASTRUCT_32 = struct.Struct('%3EL')%0A%0A%0Adef packb(type=None, year=None, month=None, day=None):%0A %22%22%22%0A Pack date and time information into a byte string.%0A%0A :return: encoded temporenc value%0A :rtype: bytes%0A %22%22%22%0A%0A # Input validation%0A if type not in SUPPORTED_TYPES:%0A raise ValueError(%22invalid temporenc type: %7B0!r%7D%22.format(type))%0A%0A if year is None:%0A year = 4095%0A elif not 0 %3C= year %3C= 4094:%0A raise ValueError(%22'year' not in supported range%22)%0A%0A if month is None:%0A month = 15%0A elif not 1 %3C= month %3C= 12:%0A raise ValueError(%22'month' not in supported range%22)%0A%0A if day is None:%0A day = 31%0A elif not 1 %3C= day %3C= 31:%0A raise ValueError(%22'day' not in supported range%22)%0A%0A # Component packing%0A if 'D' in type:%0A d = (year %3C%3C 9) %7C (month - 1 %3C%3C 5) %7C (day - 1)%0A%0A # Byte packing%0A if type == 'D':%0A # Format: 100DDDDD DDDDDDDD DDDDDDDD%0A return STRUCT_32.pack(0b100 %3C%3C 21 %7C d)%5B1:%5D%0A
%0A
|
7c63030bd70b32ec4c13ff4273d103ddbb0ffa0f
|
include tumblrprofile in djangoadmin
|
hackathon_starter/hackathon/admin.py
|
hackathon_starter/hackathon/admin.py
|
from django.contrib import admin
from hackathon.models import UserProfile, Profile, InstagramProfile, TwitterProfile, MeetupToken, GithubProfile, LinkedinProfile
# Register your models here.
class TwitterProfileAdmin(admin.ModelAdmin):
list_display = ('user','twitter_user')
admin.site.register(UserProfile)
admin.site.register(Profile)
admin.site.register(InstagramProfile)
admin.site.register(TwitterProfile, TwitterProfileAdmin)
admin.site.register(GithubProfile)
admin.site.register(MeetupToken)
admin.site.register(LinkedinProfile)
|
Python
| 0.000002
|
@@ -154,16 +154,31 @@
nProfile
+, TumblrProfile
%0A%0A# Regi
@@ -548,9 +548,43 @@
rofile)%0A
+admin.site.register(TumblrProfile)
%0A
|
4f842014ee96506c3198a2da373b91067b8eec9d
|
Add ability to forget hive device when removing integration (#74144)
|
homeassistant/components/hive/__init__.py
|
homeassistant/components/hive/__init__.py
|
"""Support for the Hive devices and services."""
from __future__ import annotations
from collections.abc import Awaitable, Callable, Coroutine
from functools import wraps
import logging
from typing import Any, TypeVar
from aiohttp.web_exceptions import HTTPException
from apyhiveapi import Hive
from apyhiveapi.helper.hive_exceptions import HiveReauthRequired
from typing_extensions import Concatenate, ParamSpec
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import DeviceInfo, Entity
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN, PLATFORM_LOOKUP, PLATFORMS
_HiveEntityT = TypeVar("_HiveEntityT", bound="HiveEntity")
_P = ParamSpec("_P")
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
vol.All(
cv.deprecated(DOMAIN),
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_SCAN_INTERVAL, default=2): cv.positive_int,
},
)
},
),
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Hive configuration setup."""
hass.data[DOMAIN] = {}
if DOMAIN not in config:
return True
conf = config[DOMAIN]
if not hass.config_entries.async_entries(DOMAIN):
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
CONF_USERNAME: conf[CONF_USERNAME],
CONF_PASSWORD: conf[CONF_PASSWORD],
},
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Hive from a config entry."""
web_session = aiohttp_client.async_get_clientsession(hass)
hive_config = dict(entry.data)
hive = Hive(web_session)
hive_config["options"] = {}
hive_config["options"].update(
{CONF_SCAN_INTERVAL: dict(entry.options).get(CONF_SCAN_INTERVAL, 120)}
)
hass.data[DOMAIN][entry.entry_id] = hive
try:
devices = await hive.session.startSession(hive_config)
except HTTPException as error:
_LOGGER.error("Could not connect to the internet: %s", error)
raise ConfigEntryNotReady() from error
except HiveReauthRequired as err:
raise ConfigEntryAuthFailed from err
for ha_type, hive_type in PLATFORM_LOOKUP.items():
device_list = devices.get(hive_type)
if device_list:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, ha_type)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
def refresh_system(
func: Callable[Concatenate[_HiveEntityT, _P], Awaitable[Any]]
) -> Callable[Concatenate[_HiveEntityT, _P], Coroutine[Any, Any, None]]:
"""Force update all entities after state change."""
@wraps(func)
async def wrapper(self: _HiveEntityT, *args: _P.args, **kwargs: _P.kwargs) -> None:
await func(self, *args, **kwargs)
async_dispatcher_send(self.hass, DOMAIN)
return wrapper
class HiveEntity(Entity):
"""Initiate Hive Base Class."""
def __init__(self, hive, hive_device):
"""Initialize the instance."""
self.hive = hive
self.device = hive_device
self._attr_name = self.device["haName"]
self._attr_unique_id = f'{self.device["hiveID"]}-{self.device["hiveType"]}'
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self.device["device_id"])},
model=self.device["deviceData"]["model"],
manufacturer=self.device["deviceData"]["manufacturer"],
name=self.device["device_name"],
sw_version=self.device["deviceData"]["version"],
via_device=(DOMAIN, self.device["parentDevice"]),
)
self.attributes = {}
async def async_added_to_hass(self):
"""When entity is added to Home Assistant."""
self.async_on_remove(
async_dispatcher_connect(self.hass, DOMAIN, self.async_write_ha_state)
)
|
Python
| 0
|
@@ -280,24 +280,30 @@
veapi import
+ Auth,
Hive%0Afrom a
@@ -3568,16 +3568,337 @@
ad_ok%0A%0A%0A
+async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -%3E None:%0A %22%22%22Remove a config entry.%22%22%22%0A hive = Auth(entry.data%5B%22username%22%5D, entry.data%5B%22password%22%5D)%0A await hive.forget_device(%0A entry.data%5B%22tokens%22%5D%5B%22AuthenticationResult%22%5D%5B%22AccessToken%22%5D,%0A entry.data%5B%22device_data%22%5D%5B1%5D,%0A )%0A%0A%0A
def refr
|
c3df7d5adf551213c94f2d0e0598552ce6ee9aaf
|
move collection list filtering logic to db query
|
hs_collection_resource/page_processors.py
|
hs_collection_resource/page_processors.py
|
from django.http import HttpResponseRedirect
from mezzanine.pages.page_processors import processor_for
from hs_core import page_processors
from hs_core.models import BaseResource
from hs_core.views import add_generic_context
from hs_core.views.utils import get_my_resources_list
from .models import CollectionResource
@processor_for(CollectionResource)
def landing_page(request, page):
content_model = page.get_content_model()
edit_resource = page_processors.check_resource_mode(request)
user = request.user
if user.is_authenticated():
user_all_accessible_resource_list = get_my_resources_list(user)
else: # anonymous user
user_all_accessible_resource_list = list(BaseResource.discoverable_resources.all())
# resource is collectable if
# 1) Shareable=True
# 2) OR, current user is a owner of it
user_all_collectable_resource_list = []
for res in user_all_accessible_resource_list:
if res.raccess.shareable or res.raccess.owners.filter(pk=user.pk).exists():
user_all_collectable_resource_list.append(res)
# current contained resources list
collection_items_list = list(content_model.resources.all())
# get the context from hs_core
context = page_processors.get_page_context(page, request.user,
resource_edit=edit_resource,
extended_metadata_layout=None,
request=request)
if edit_resource:
candidate_resources_list = []
for res in user_all_collectable_resource_list:
if content_model.short_id == res.short_id:
continue # skip current collection resource object
elif res in content_model.resources.all():
continue # skip resources that are already in current collection
candidate_resources_list.append(res)
context['collection_candidate'] = candidate_resources_list
context['collection_res_id'] = content_model.short_id
elif isinstance(context, HttpResponseRedirect):
# resource view mode
# sending user to login page
return context
context['deleted_resources'] = content_model.deleted_resources.all()
context['collection'] = collection_items_list
context['edit_mode'] = edit_resource
hs_core_dublin_context = add_generic_context(request, page)
context.update(hs_core_dublin_context)
return context
|
Python
| 0
|
@@ -37,16 +37,70 @@
Redirect
+, HttpResponseForbidden%0Afrom django.db.models import Q
%0Afrom me
@@ -551,595 +551,8 @@
t)%0A%0A
- user = request.user%0A if user.is_authenticated():%0A user_all_accessible_resource_list = get_my_resources_list(user)%0A else: # anonymous user%0A user_all_accessible_resource_list = list(BaseResource.discoverable_resources.all())%0A%0A # resource is collectable if%0A # 1) Shareable=True%0A # 2) OR, current user is a owner of it%0A user_all_collectable_resource_list = %5B%5D%0A for res in user_all_accessible_resource_list:%0A if res.raccess.shareable or res.raccess.owners.filter(pk=user.pk).exists():%0A user_all_collectable_resource_list.append(res)%0A%0A
@@ -1005,56 +1005,119 @@
-candidate_resources_list = %5B%5D%0A for res in
+user = request.user%0A if not user.is_authenticated():%0A return HttpResponseForbidden();%0A
use
@@ -1126,16 +1126,15 @@
all_
-collecta
+accessi
ble_
@@ -1150,157 +1150,346 @@
list
-:%0A if content_model.short_id == res.short_id:%0A continue # skip current collection resource object%0A elif res in
+ = get_my_resources_list(user)%0A%0A # resource is collectable if%0A # 1) Shareable=True%0A # 2) OR, current user is a owner of it%0A # 3) exclude this resource as well as resources already in the collection%0A user_all_accessible_resource_list.exclude(short_id=content_model.short_id)%5C%0A .exclude(id__in=
cont
@@ -1513,17 +1513,18 @@
es.all()
-:
+)%5C
%0A
@@ -1532,126 +1532,83 @@
- continue # skip resources that are already in current collection%0A%0A candidate_resources_list.append(res
+.exclude(Q(raccess__shareable=False) %7C Q(raccess__owners__contains=user.pk)
)%0A%0A
@@ -1648,24 +1648,34 @@
ate'%5D =
-candidat
+user_all_accessibl
e_resour
@@ -1676,22 +1676,27 @@
resource
-s
_list
+.all()
%0A
|
75a0dec32210432374b45dbed2845dfe171b9b36
|
Set version number to 0.4.1
|
climlab/__init__.py
|
climlab/__init__.py
|
__version__ = '0.4.1dev'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.domain.initial import column_state, surface_state
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
|
Python
| 0.999999
|
@@ -17,11 +17,8 @@
.4.1
-dev
'%0A%0A#
|
cb70fbc9d104cae2433b3a6eccc719718d7c2b63
|
Fix `changed` status that always returns False
|
lib/ansible/modules/extras/packaging/composer.py
|
lib/ansible/modules/extras/packaging/composer.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Dimitrios Tydeas Mengidis <tydeas.dr@gmail.com>
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: composer
author: Dimitrios Tydeas Mengidis
short_description: Dependency Manager for PHP
version_added: "1.6"
description:
- Composer is a tool for dependency management in PHP. It allows you to declare the dependent libraries your project needs and it will install them in your project for you
options:
command:
version_added: "1.8"
description:
- Composer command like "install", "update" and so on
required: false
default: install
working_dir:
description:
- Directory of your project ( see --working-dir )
required: true
default: null
aliases: [ "working-dir" ]
prefer_source:
description:
- Forces installation from package sources when possible ( see --prefer-source )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "prefer-source" ]
prefer_dist:
description:
- Forces installation from package dist even for de versions ( see --prefer-dist )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "prefer-dist" ]
no_dev:
description:
- Disables installation of require-dev packages ( see --no-dev )
required: false
default: "yes"
choices: [ "yes", "no" ]
aliases: [ "no-dev" ]
no_scripts:
description:
- Skips the execution of all scripts defined in composer.json ( see --no-scripts )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "no-scripts" ]
no_plugins:
description:
- Disables all plugins ( see --no-plugins )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "no-plugins" ]
optimize_autoloader:
description:
- Optimize autoloader during autoloader dump ( see --optimize-autoloader ). Convert PSR-0/4 autoloading to classmap to get a faster autoloader. This is recommended especially for production, but can take a bit of time to run so it is currently not done by default.
required: false
default: "yes"
choices: [ "yes", "no" ]
aliases: [ "optimize-autoloader" ]
requirements:
- php
- composer installed in bin path (recommended /usr/local/bin)
notes:
- Default options that are always appended in each execution are --no-ansi, --no-progress, and --no-interaction
'''
EXAMPLES = '''
# Downloads and installs all the libs and dependencies outlined in the /path/to/project/composer.lock
- composer: command=install working_dir=/path/to/project
'''
import os
import re
def parse_out(string):
return re.sub("\s+", " ", string).strip()
def has_changed(string):
return (re.match("Nothing to install or update", string) != None)
def composer_install(module, command, options):
php_path = module.get_bin_path("php", True, ["/usr/local/bin"])
composer_path = module.get_bin_path("composer", True, ["/usr/local/bin"])
cmd = "%s %s %s %s" % (php_path, composer_path, command, " ".join(options))
return module.run_command(cmd)
def main():
module = AnsibleModule(
argument_spec = dict(
command = dict(default="install", type="str", required=False),
working_dir = dict(aliases=["working-dir"], required=True),
prefer_source = dict(default="no", type="bool", aliases=["prefer-source"]),
prefer_dist = dict(default="no", type="bool", aliases=["prefer-dist"]),
no_dev = dict(default="yes", type="bool", aliases=["no-dev"]),
no_scripts = dict(default="no", type="bool", aliases=["no-scripts"]),
no_plugins = dict(default="no", type="bool", aliases=["no-plugins"]),
optimize_autoloader = dict(default="yes", type="bool", aliases=["optimize-autoloader"]),
),
supports_check_mode=True
)
module.params["working_dir"] = os.path.abspath(module.params["working_dir"])
options = set([])
# Default options
options.add("--no-ansi")
options.add("--no-progress")
options.add("--no-interaction")
if module.check_mode:
options.add("--dry-run")
# Get composer command with fallback to default
command = module.params['command']
del module.params['command'];
# Prepare options
for i in module.params:
opt = "--%s" % i.replace("_","-")
p = module.params[i]
if isinstance(p, (bool)) and p:
options.add(opt)
elif isinstance(p, (str)):
options.add("%s=%s" % (opt, p))
rc, out, err = composer_install(module, command, options)
if rc != 0:
output = parse_out(err)
module.fail_json(msg=output)
else:
output = parse_out(out)
module.exit_json(changed=has_changed(output), msg=output)
# import module snippets
from ansible.module_utils.basic import *
main()
|
Python
| 0
|
@@ -3584,25 +3584,11 @@
-return (re.match(
+if
%22Not
@@ -3617,26 +3617,70 @@
ate%22
-,
+ in
string
-) != None)
+:%0A return False%0A else:%0A return True
%0A%0Ade
|
85af2e031479c78aaef433e2294648125916251a
|
Improve color palette for cycling Curves
|
src/rnaseq_lib/plot/opts.py
|
src/rnaseq_lib/plot/opts.py
|
gene_curves_opts = {
'Curve': {'plot': dict(height=120, width=600, tools=['hover'], invert_xaxis=True, yrotation=45, yaxis='left'),
'style': dict(line_width=1.5)},
'Curve.Percentage_of_Normal_Samples': {'plot': dict(xaxis=None, invert_yaxis=True),
'style': dict(color='Blue')},
'Curve.Gene_Expression': {'plot': dict(xaxis=None),
'style': dict(color='Green')},
'Curve.Log2_Fold_Change': {'plot': dict(height=150),
'style': dict(color='Purple')},
'Scatter': {'style': dict(color='red', size=3)}}
gene_kde_opts = {'Overlay': {'plot': dict(width=500, legend_position='left')}}
gene_distribution_opts = {'BoxWhisker': {'plot': dict(width=875, xrotation=70)}}
gene_de_opts = {
'Scatter': {'plot': dict(color_index='Tissue', legend_position='left', width=700, height=500, tools=['hover']),
'style': dict(cmap='tab20', size=10, alpha=0.5)}}
sample_count_opts = {
'Bars': {'plot': dict(width=875, xrotation=70, tools=['hover'], show_legend=False)}
}
l2fc_by_perc_samples_opts = {
'Curve': {'plot': dict(tools=['hover'])},
'Overlay': {'plot': dict(legend_position='left', width=500)},
'Spikes': {'plot': dict(spike_length=100),
'style': dict(line_alpha=0.4, line_width=5)}
}
|
Python
| 0.000001
|
@@ -1,12 +1,329 @@
+import holoviews as hv%0A%0Acolor_sequence = %5B'#1f77b4', '#aec7e8', '#ff7f0e', '#ffbb78', '#2ca02c',%0A '#98df8a', '#d62728', '#ff9896', '#9467bd', '#c5b0d5',%0A '#8c564b', '#c49c94', '#e377c2', '#f7b6d2', '#7f7f7f',%0A '#c7c7c7', '#bcbd22', '#dbdb8d', '#17becf', '#9edae5'%5D%0A%0A
gene_curves_
@@ -1492,16 +1492,84 @@
hover'%5D)
+,%0A 'style': dict(color=hv.Cycle(values=color_sequence))
%7D,%0A '
|
72c5168ff71223db32ef37a12fd8781f28bfc433
|
change CTCP VERSION reply
|
circa.py
|
circa.py
|
#!/usr/bin/env python3
import sdirc
import yaml
import threading
import importlib
import modules
VERSION = "1.0"
class Circa(sdirc.Client):
def __init__(self, **conf):
conf["autoconn"] = False
conf["prefix"] = conf["prefix"] if "prefix" in conf else "!"
sdirc.Client.__init__(self, **conf)
self.modules = {}
self.add_listener("registered",
lambda m: (self.send("UMODE2", "+B"), self.say("groupserv", "join !bots")))
for module in "cmd module leave".split() + self.conf["modules"]:
self.load_module(module)
self.add_listener("invite", lambda to, by, m: self.join(to))
self.add_listener("ctcp-version", self.version)
self.connect()
def version(self, fr, to, msg):
self.say(fr, "\x01VERSION circa {0}\x01".format(VERSION))
@staticmethod
def wrap(line):
words = []
width = 80
for word in line.split():
if len(word) + 1 > width:
words.append("\xFF")
width = 80 - len(word)
else:
width = width - len(word) - 1
words.append(word)
line2 = " ".join(words)
sublines = line2.split(" \xFF ")
return sublines
def say(self, to, msg):
msg = [line.rstrip() for line in msg.split("\n")]
for line in msg:
for subline in Circa.wrap(line):
sdirc.Client.say(self, to, subline)
def load_module(self, name):
if name in self.modules:
return 2
try:
m = importlib.import_module("modules." + name).module
if hasattr(m, "require"):
for mod in m.require.split():
self.load_module(mod)
self.modules[name] = module = m(self)
for event, listeners in module.listeners.items():
for listener in listeners:
self.add_listener(event, listener)
return 0
except ImportError:
return 1
except AttributeError:
return 1
except TypeError:
return 1
def unload_module(self, name):
if name not in self.modules:
return 1
module = self.modules[name]
for event, listeners in module.listeners.items():
for listener in listeners:
self.remove_listener(event, listener)
del self.modules[name]
return 0
if __name__ == "__main__":
try:
file = open("config.yaml")
config = yaml.load(file)
file.close()
for c in config:
threading.Thread(target=lambda: Circa(**c)).start()
except KeyboardInterrupt:
print("Bye")
|
Python
| 0
|
@@ -696,19 +696,22 @@
%0A%09%09self.
-say
+notice
(fr, %22%5Cx
|
d29f88ae898a64896bc63bbdf79daba0a496c19f
|
Add lm task.
|
cli/r.py
|
cli/r.py
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
R CLI Fabric File
"""
__author__ = 'Dongjoon Hyun (dongjoon@apache.org)'
__copyright__ = 'Copyright (c) 2015'
__license__ = 'Apache License'
__version__ = '0.1'
from fabric.api import *
@task
def word_cloud(inpath, topk, outpath, sep='\01'):
"""
fab r.word_cloud:/user/hadoop/tf_result/part-00000,100,/user/hadoop/wordcloud.png
"""
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<'EOF' > r.word_cloud.R
library(wordcloud)
df <- read.table("/hdfs%(inpath)s", header=F, sep="\\001", quote="\\002", stringsAsFactors=F, col.names=c('word','freq'),nrows=%(topk)s)
png("/hdfs%(outpath)s", width=400,height=400)
wordcloud(df$word,df$freq, scale=c(8,.2),min.freq=3,max.words=Inf, random.order=FALSE, rot.per=.15, colors=brewer.pal(8,"Dark2"))
dev.off()
EOF''' % locals())
cmd = '/usr/bin/Rscript --vanilla r.word_cloud.R 2> /dev/null'
run(cmd)
@task
def sql(inpath, sql):
"""
fab r.sql:/sample/people.json,'SELECT name FROM people WHERE age <\= 19'
"""
import os
table = os.path.splitext(os.path.basename(inpath))[0]
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<'EOF' > r.sql.R
suppressMessages(library(SparkR))
sc <- sparkR.init(appName="SparkR SQL")
sqlContext <- sparkRSQL.init(sc)
jsondf <- jsonFile(sqlContext, "%(inpath)s")
registerTempTable(jsondf, "%(table)s")
result <- sql(sqlContext, "%(sql)s")
resultDF <- collect(result)
print(resultDF)
sparkR.stop()
EOF''' % locals())
cmd = '/opt/spark/bin/spark-submit r.sql.R 2> /dev/null | tail -n +4'
run(cmd)
@task
def summary(inpath):
"""
fab r.summary:/sample/people.json
"""
import os
table = os.path.splitext(os.path.basename(inpath))[0]
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<'EOF' > r.summary.R
suppressMessages(library(SparkR))
sc <- sparkR.init(appName="SparkR Summary")
sqlContext <- sparkRSQL.init(sc)
jsondf <- jsonFile(sqlContext, "%(inpath)s")
summary(collect(jsondf))
sparkR.stop()
EOF''' % locals())
cmd = '/opt/spark/bin/spark-submit r.summary.R 2> /dev/null | tail -n +4'
run(cmd)
@task
def nn_visualize(inpath, formula, hidden, outpath):
"""
fab r.nn_visualize:/model/r/nn.train,y1~x1+x2+x3,6:12:8,/user/hadoop/nn.png
"""
hidden = hidden.replace(':', ',')
run('mkdir %s' % env.dir)
with cd(env.dir):
run('''cat <<'EOF' > r.nn_visualize.R
library(NeuralNetTools)
library(neuralnet)
AND <- c(rep(0, 7), 1)
OR <- c(0, rep(1, 7))
data <- read.table("/hdfs%(inpath)s")
model <- neuralnet(%(formula)s, data, hidden = c(%(hidden)s), rep = 10, err.fct = 'ce', linear.output = FALSE)
png("/hdfs%(outpath)s", width=400,height=400)
par(mar = numeric(4), family = 'serif')
plotnet(model, alpha = 0.6)
dev.off()
EOF''' % locals())
cmd = '/usr/bin/Rscript --vanilla r.nn_visualize.R &> /dev/null'
run(cmd)
|
Python
| 0.015995
|
@@ -1023,16 +1023,380 @@
ort *%0A%0A%0A
+@task%0Adef lm(inpath):%0A %22%22%22%0A fab r.lm:/sample/sample_regression%0A %22%22%22%0A run('mkdir %25s' %25 env.dir)%0A with cd(env.dir):%0A run('''cat %3C%3C'EOF' %3E r.lm.R%0Adf %3C- read.table(%22%25(inpath)s%22, header=F, sep=%22 %22, stringsAsFactors=F, col.names=c('x','y'))%0Alm(y ~ x, data=df)%0AEOF''' %25 locals())%0A cmd = '/usr/bin/Rscript --vanilla r.lm.R'%0A run(cmd)%0A%0A%0A
@task%0Ade
|
dc6100fea3097d97e7065bd653093798eac84909
|
Allow passing in of timezone
|
kairios/templatetags/kairios_tags.py
|
kairios/templatetags/kairios_tags.py
|
import calendar as cal
import datetime
from django import template
register = template.Library()
def delta(year, month, d):
mm = month + d
yy = year
if mm > 12:
mm, yy = mm % 12, year + mm / 12
elif mm < 1:
mm, yy = 12 + mm, year - 1
return yy, mm
@register.inclusion_tag("kairios/calendar.html", takes_context=True)
def calendar(context, events, date=None, **kwargs):
cal.setfirstweekday(cal.SUNDAY)
today = datetime.date.today()
if date is None:
date = today
plus_year, plus_month = delta(date.year, date.month, 1)
minus_year, minus_month = delta(date.year, date.month, -1)
next = events.month_url(plus_year, plus_month)
prev = events.month_url(minus_year, minus_month)
events_by_day = events.events_by_day(date.year, date.month)
title = "%s %s" % (cal.month_name[date.month], date.year)
matrix = cal.monthcalendar(date.year, date.month)
grid = []
for week in matrix:
row = []
for day in week:
is_today = date.year == today.year and date.month == today.month and today.day == day
if day:
day_events = events_by_day.get(day, [])
link = events.day_url(date.year, date.month, day, bool(day_events))
row.append((day, day_events, link, is_today))
else:
row.append(None)
grid.append(row)
context.update({
"title": title,
"calendar_date": date,
"prev": prev,
"next": next,
"grid": grid,
})
return context
|
Python
| 0.000001
|
@@ -61,16 +61,61 @@
emplate%0A
+from django.util import timezone%0A%0Aimport pytz
%0A%0Aregist
@@ -438,16 +438,25 @@
te=None,
+ tz=None,
**kwarg
@@ -496,16 +496,118 @@
UNDAY)%0A%0A
+ if tz:%0A today = timezone.localtime(timezone.now(), pytz.timezone(tz)).date()%0A else:%0A
toda
|
fd7577d34ef206869517f3717070880d098d4d8b
|
change URL dispach rules
|
cms_content/urls.py
|
cms_content/urls.py
|
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from cms_content.views import *
from cms_content.models import *
from cms_content.utils.queryset import queryset_iterator
urlpatterns = patterns ('',
url(r'^$', section_list, {'sections': CMSSection.objects.all()}, name='section'),
url(r'^(?P<slug>\w*)/$', category_list, name='category_list'),
url(r'^(?P<slug>\w*)/(?P<path>\w*)/$', article_list),
url(r'^(?P<slug>\w*)/(?P<path>\w*)/(?P<name>[\w-]*)/$', article_view),
)
|
Python
| 0
|
@@ -433,35 +433,38 @@
url(r'%5E(?P%3Cslug%3E
-%5Cw*
+%5B-%5Cw%5D+
)/(?P%3Cpath%3E%5Cw*)/
@@ -458,19 +458,22 @@
?P%3Cpath%3E
-%5Cw*
+%5B-%5Cw%5D+
)/(?P%3Cna
@@ -480,13 +480,13 @@
me%3E%5B
+-
%5Cw
--%5D*
+%5D+
)/$'
|
8686bf54eab7358dbd096aac277155b86db94d42
|
add new keyword
|
src/IpmiLibrary/bmc.py
|
src/IpmiLibrary/bmc.py
|
#
# Kontron IpmiLibrary
#
# author: Michael Walle <michael.walle@kontron.com>
# author: Heiko Thiery <heiko.thiery@kontron.com>
#
from mapping import *
from robot import utils
from robot.utils import asserts
from utils import int_any_base
class Bmc:
def issue_bmc_cold_reset(self):
"""Sends a _bmc cold reset_ to the given controler.
"""
self._ipmi.cold_reset()
def get_bmc_device_id(self):
"""Sends a _bmc get device id_ command to the given controller.
"""
device_id = self._ipmi.get_device_id()
def product_id_should_be(self, product_id):
"""Fails if the GetDeviceID command response does not contain
the given `device_id`.
"""
product_id = int_any_base(product_id)
device_id = self._ipmi.get_device_id()
asserts.fail_unless_equal(device_id.product_id, product_id)
def manufacturer_id_should_be(self, manufacturer_id):
"""Fails if the GetDeviceID command response does not contain
the given `manufacturer_id`.
"""
manufacturer_id = int_any_base(manufacturer_id)
device_id = self._ipmi.get_device_id()
asserts.fail_unless_equal(device_id.manufacturer_id, manufacturer_id)
def start_watchdog_timer(self, value, action="Hard Reset",
timer_use="SMS OS"):
"""Sets and starts IPMI watchdog timer.
The watchdog is set to `value` and after that it is started.
The maximum value is 6553 seconds. `value` is given in Robot
Framework's time format (e.g. 1 minute 20 seconds) that is explained in
the User Guide.
`action` can be:
No Action, Hard Reset, Power Down, Power Cycle
`timer_use` can be:
OEM, SMS OS, OS Load, BIOS Post, BIOS Frb2
"""
timer_use = find_watchdog_timer_use(timer_use)
config = pyipmi.bmc.Watchdog()
config.timer_use = timer_use
config.dont_stop = 1
config.dont_log = 0
config.pre_timeout_interval = 0
config.pre_timeout_interrupt = 0
config.timer_use_expiration_flags = 0xff
# convert to 100ms
config.initial_countdown = int(utils.timestr_to_secs(value) * 10)
if (config.initial_countdown > 0xffff):
raise RuntimeError('Watchdog value out of range')
config.timeout_action = find_watchdog_action(action)
# set watchdog
self._ipmi.set_watchdog_timer(config)
# start watchdog
self._ipmi.reset_watchdog_timer()
def reset_watchdog_timer(self):
"""Send the Reset Watchdog Timer Command
"""
self._ipmi.reset_watchdog_timer()
def stop_watchdog_timer(self, msg=None):
"""Stops the IPMI wachtdog timer.
"""
config = pyipmi.bmc.Watchdog()
config.timer_use = pyipmi.bmc.Watchdog.TIMER_USE_OEM
config.dont_stop = 0
config.dont_log = 0
config.pre_timeout_interval = 0
config.pre_timeout_interrupt = 0
# 0xff means clear all expiration flags
config.timer_use_expiration_flags = 0xff
config.initial_countdown = 0
config.timeout_action = pyipmi.bmc.Watchdog.TIMEOUT_ACTION_NO_ACTION
self._ipmi.set_watchdog_timer(config)
def get_watchdog_timer_countdown_value(self):
"""Returns the present watchdog countdown value."""
config = self._ipmi.get_watchdog_timer()
return config.present_countdown
def watchdog_timeout_action_should_be(self, action, msg=None):
"""Fails if the IPMI Watchdog timeout action is not `action`
`action` can be:
No Action, Hard Reset, Power Down, Power Cycle
"""
action = find_watchdog_action(action)
config = self._ipmi.get_watchdog_timer()
asserts.fail_unless_equal(action, config.timeout_action, msg)
def watchdog_timer_use_should_be(self, timer_use, msg=None):
"""Fails if the IPMI Watchdog timer use is not `timer_use`
`timer_use` can be:
OEM, SMS OS, OS Load, BIOS POST, BIOS FRB2
"""
timer_use = find_watchdog_timer_use(timer_use)
config = self._ipmi.get_watchdog_timer()
asserts.fail_unless_equal(timer_use, config.timer_use, msg)
def watchdog_initial_timeout_value_should_be(self, value, msg=None):
"""
"""
value = int_any_base(value)
config = self._ipmi.get_watchdog_timer()
asserts.fail_unless_equal(value, config.initial_countdown, msg)
def watchdog_should_be_started(self, msg=None):
config = self._ipmi.get_watchdog_timer()
asserts.fail_unless(config.is_running, msg)
def watchdog_should_be_stopped(self, msg=None):
config = self._ipmi.get_watchdog_timer()
asserts.fail_if(config.is_running, msg)
|
Python
| 0
|
@@ -1228,24 +1228,453 @@
cturer_id)%0A%0A
+ def device_should_support(self, supported_function):%0A %22%22%22The device can support the following functions:%0A 'SENSOR', 'SDR_REPOSITORY', 'SEL', 'FRU_INVENTORY',%0A 'IPMB_EVENT_RECEIVER', 'IPMB_EVENT_GENERATOR', 'BRIDGE', 'CHASSIS'.%0A %22%22%22%0A device_id = self._ipmi.get_device_id()%0A supports = device_id.supports_function(supported_function)%0A asserts.fail_unless_equal(supports, True)%0A%0A
def star
|
56b98c3f8a091132cd2dc9c1a717df9cdd96439c
|
Improve titles
|
src/sentry/constants.py
|
src/sentry/constants.py
|
"""
sentry.constants
~~~~~~~~~~~~~~~~
These settings act as the default (base) settings for the Sentry-provided web-server
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_lazy as _
SORT_OPTIONS = SortedDict((
('priority', _('Priority')),
('date', _('Last Seen')),
('new', _('First Seen')),
('freq', _('Frequency')),
('tottime', _('Total Time Spent')),
('avgtime', _('Average Time Spent')),
('accel_15', _('Trending: %(minutes)d minutes' % {'minutes': 15})),
('accel_60', _('Trending: %(minutes)d minutes' % {'minutes': 60})),
))
SORT_CLAUSES = {
'priority': 'sentry_groupedmessage.score',
'date': 'EXTRACT(EPOCH FROM sentry_groupedmessage.last_seen)',
'new': 'EXTRACT(EPOCH FROM sentry_groupedmessage.first_seen)',
'freq': 'sentry_groupedmessage.times_seen',
'tottime': 'sentry_groupedmessage.time_spent_total',
'avgtime': '(sentry_groupedmessage.time_spent_total / sentry_groupedmessage.time_spent_count)',
}
SCORE_CLAUSES = SORT_CLAUSES.copy()
SQLITE_SORT_CLAUSES = SORT_CLAUSES.copy()
SQLITE_SORT_CLAUSES.update({
'date': 'sentry_groupedmessage.last_seen',
'new': 'sentry_groupedmessage.first_seen',
})
SQLITE_SCORE_CLAUSES = SQLITE_SORT_CLAUSES.copy()
MYSQL_SORT_CLAUSES = SORT_CLAUSES.copy()
MYSQL_SORT_CLAUSES.update({
'date': 'sentry_groupedmessage.last_seen',
'new': 'sentry_groupedmessage.first_seen',
})
MYSQL_SCORE_CLAUSES = SCORE_CLAUSES.copy()
MYSQL_SCORE_CLAUSES.update({
'date': 'UNIX_TIMESTAMP(sentry_groupedmessage.last_seen)',
'new': 'UNIX_TIMESTAMP(sentry_groupedmessage.first_seen)',
})
SEARCH_SORT_OPTIONS = SortedDict((
('score', _('Score')),
('date', _('Last Seen')),
('new', _('First Seen')),
))
STATUS_VISIBLE = 0
STATUS_HIDDEN = 1
STATUS_UNRESOLVED = 0
STATUS_RESOLVED = 1
STATUS_MUTED = 2
STATUS_LEVELS = (
(STATUS_UNRESOLVED, _('Unresolved')),
(STATUS_RESOLVED, _('Resolved')),
(STATUS_MUTED, _('Muted')),
)
MEMBER_OWNER = 0
MEMBER_USER = 50
MEMBER_SYSTEM = 100
MEMBER_TYPES = (
(MEMBER_OWNER, _('Admin')),
(MEMBER_USER, _('User')),
(MEMBER_SYSTEM, _('System Agent')),
)
# A list of values which represent an unset or empty password on
# a User instance.
EMPTY_PASSWORD_VALUES = ('!', '', '$')
PLATFORM_LIST = (
'python',
'django',
'flask',
'php',
'java',
'node.js',
'ios',
'express',
'connect',
'r',
'ruby',
'rails3',
'javascript',
)
PLATFORM_ROOTS = {
'rails3': 'ruby',
'django': 'python',
'flask': 'python',
'express': 'node.js',
'connect': 'node.js',
}
PLATFORM_TITLES = {
'rails3': 'Rails 3',
'php': 'PHP',
'ios': 'iOS',
}
|
Python
| 0.001441
|
@@ -2797,16 +2797,23 @@
'Rails 3
+ (Ruby)
',%0A '
@@ -2843,10 +2843,146 @@
'iOS',%0A
+ 'express': 'Express (Node.js)',%0A 'connect': 'Express (Node.js)',%0A 'django': 'Django (Python)',%0A 'flask': 'Flask (Python)',%0A
%7D%0A
|
2f24f483dbd8ed860556dd934c8923c89e378fce
|
whoops - null text, return 0 length
|
library/pyjamas/ui/platform/TextBoxBasemshtml.py
|
library/pyjamas/ui/platform/TextBoxBasemshtml.py
|
class TextBoxBase:
def getCursorPos(self):
try :
elem = self.getElement()
tr = elem.document.selection.createRange()
if tr.parentElement().uniqueID != elem.uniqueID:
return -1
return -tr.move("character", -65535)
except:
print traceback.print_exc()
return 0
def getSelectionLength(self):
try :
elem = self.getElement()
tr = elem.document.selection.createRange()
if tr.parentElement().uniqueID != elem.uniqueID:
return 0
return len(tr.text)
except:
print traceback.print_exc()
return 0
def setSelectionRange(self, pos, length):
try :
elem = self.getElement()
tr = elem.createTextRange()
tr.collapse(True)
tr.moveStart('character', pos)
tr.moveEnd('character', length)
tr.select()
except :
print traceback.print_exc()
pass
|
Python
| 0.999352
|
@@ -605,16 +605,28 @@
return
+tr.text and
len(tr.t
@@ -629,16 +629,21 @@
tr.text)
+ or 0
%0A
|
a549c863deeb15952aa710273ae7a448892d227e
|
Add support for debug messages
|
cocoa/controller.py
|
cocoa/controller.py
|
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cocoa frontend implementation for namebench."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
from objc import YES, NO, IBAction, IBOutlet
from Foundation import *
from AppKit import *
import datetime
import operator
import urllib
import time
import webbrowser
import tempfile
import traceback
import sys
import os
import re
# TODO(tstromberg): Research best practices for bundling cocoa frontends.
pwd = os.getcwd()
if 'namebench/cocoa' in pwd:
NSLog("Enabling development mode resource hack")
RSRC_DIR = pwd[0:pwd.index('/cocoa')]
sys.path.append(RSRC_DIR)
else:
RSRC_DIR = os.path.dirname(__file__)
NSLog("Resource directory is %s" % RSRC_DIR)
import third_party
from libnamebench import base_ui
from libnamebench import config
from libnamebench import nameserver_list
from libnamebench import benchmark
from libnamebench import util
from libnamebench import history_parser
class controller(NSWindowController, base_ui.BaseUI):
"""Controller class associated with the main window."""
nameserver_form = IBOutlet()
include_global = IBOutlet()
include_regional = IBOutlet()
data_source = IBOutlet()
selection_mode = IBOutlet()
num_tests = IBOutlet()
num_runs = IBOutlet()
status = IBOutlet()
spinner = IBOutlet()
def awakeFromNib(self):
"""Initializes our class, called automatically by Cocoa"""
self.resource_dir = RSRC_DIR
conf_file = os.path.join(self.resource_dir, 'namebench.cfg')
NSLog("Using configuration: %s" % conf_file)
(self.options, self.supplied_ns, self.global_ns, self.regional_ns) = config.GetConfiguration(filename=conf_file)
# TODO(tstromberg): Consider moving this into a thread for faster loading.
self.UpdateStatus('Discovering sources')
self.DiscoverSources()
self.UpdateStatus('Populating Form...')
self.setFormDefaults()
self.UpdateStatus('Ready')
@IBAction
def startJob_(self, sender):
"""Trigger for the 'Start Benchmark' button, starts benchmark thread."""
self.ProcessForm()
self.UpdateStatus('Starting benchmark thread')
t = NSThread.alloc().initWithTarget_selector_object_(self, self.benchmarkThread, None)
t.start()
# TODO(tstromberg): Hook this method in
def applicationShouldTerminateAfterLastWindowClosed_(self, sender):
return True
def UpdateStatus(self, message, count=None, total=None, error=False):
"""Update the status message at the bottom of the window."""
if error:
return self.displayError(message, error)
if total:
state = '%s [%s/%s]' % (message, count, total)
elif count:
state = '%s%s' % (message, '.' * count)
else:
state = message
NSLog(state)
self.status.setStringValue_(state)
def ProcessForm(self):
"""Parse the form fields and populate class variables."""
self.UpdateStatus('Processing form inputs')
self.primary = self.supplied_ns
if not int(self.include_global.stringValue()):
self.UpdateStatus('Not using primary')
else:
self.primary.extend(self.global_ns)
if not int(self.include_regional.stringValue()):
self.UpdateStatus('Not using secondary')
self.secondary = []
else:
self.secondary = self.regional_ns
self.options.select_mode = self.selection_mode.titleOfSelectedItem().lower()
self.options.data_source = self.ParseSourceSelection(self.data_source.titleOfSelectedItem())
self.UpdateStatus('Supplied servers: %s' % self.nameserver_form.stringValue())
self.primary.extend(util.ExtractIPTuplesFromString(self.nameserver_form.stringValue()))
self.options.test_count = int(self.num_tests.stringValue())
self.options.run_count = int(self.num_runs.stringValue())
self.UpdateStatus("Source %s, mode %s, %s tests, %s runs" % (self.options.data_source, self.options.select_mode, self.options.test_count, self.options.run_count))
def benchmarkThread(self):
"""Run the benchmarks, designed to be run in a thread."""
pool = NSAutoreleasePool.alloc().init()
self.spinner.startAnimation_(self)
self.UpdateStatus('Preparing benchmark')
try:
self.PrepareBenchmark()
self.RunBenchmark()
except nameserver_list.OutgoingUdpInterception:
(exc_type, exception, tb) = sys.exc_info()
self.UpdateStatus('Outgoing requests were intercepted!',
error=str(exception))
except nameserver_list.TooFewNameservers:
(exc_type, exception, tb) = sys.exc_info()
self.UpdateStatus('Too few nameservers to test', error=str(exception))
except:
(exc_type, exception, tb) = sys.exc_info()
traceback.print_exc(tb)
error_msg = '\n'.join(traceback.format_tb(tb)[-4:])
self.UpdateStatus('FAIL: %s' % exception, error=error_msg)
self.spinner.stopAnimation_(self)
pool.release()
def displayError(self, msg, details):
"""Display an alert drop-down message"""
NSLog("ERROR: %s - %s" % (msg, details))
alert = NSAlert.alloc().init()
alert.setMessageText_(msg)
alert.setInformativeText_(details)
buttonPressed = alert.runModal()
def setFormDefaults(self):
"""Set up the form with sane initial values."""
nameservers_string = ', '.join(util.InternalNameServers())
self.nameserver_form.setStringValue_(nameservers_string)
self.num_tests.setStringValue_(self.options.test_count)
self.num_runs.setStringValue_(self.options.run_count)
self.selection_mode.removeAllItems()
self.selection_mode.addItemsWithTitles_(['Weighted', 'Random', 'Chunk'])
self.data_source.removeAllItems()
for source in self.sources:
self.data_source.addItemWithTitle_(history_parser.sourceToTitle(source))
|
Python
| 0
|
@@ -2959,16 +2959,29 @@
or=False
+, debug=False
):%0A %22
|
5c8f7ba13ae33c0f87b89e9721a66b0e3f001f5e
|
Change the number of urls catched once
|
crawl.py
|
crawl.py
|
#coding=utf-8
import os
import time
import urllib2
from bs4 import BeautifulSoup
import socket
from Indexbuild import IndexBuilder
class crawl:
baseurl=''
req_header = {'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6'}
req_timeout = 5
urlqueue=[]
urls=[]
indegree=[]
outdegree=[]
length = []
head=[]
totalcount=0
count=0
read_web=set()
def __init__(self,baseurl):#将主网址加入集合
self.baseurl=baseurl
self.indexbuilder = IndexBuilder()
def user_agent(self,url): #宽度优先遍历网页
try:
if(url in self.read_web):
try:
self.indegree[self.urls.index(url)]+=1
except:
pass
if(len(self.urlqueue)>0):
self.user_agent(self.urlqueue.pop(0))
else:
self.read_web.add(url)
tmpoutdegree=0
print("it's the %d time"%(self.count))
self.count=self.count+1
if(self.count <= 10):#搜索网页数
req = urllib2.Request(url,None,self.req_header)
page = urllib2.urlopen(req,None,self.req_timeout)
html = page.read()
page.close()
soup = BeautifulSoup(html)
self.urls.append(url)
self.indegree.append(1)
self.length.append(self.indexbuilder.process(soup,len(self.urls)-1))
a = soup.find_all(['a'])
for i in a:
tmpurl=i.get('href')
if(tmpurl is not None and tmpurl.find('javascript')==-1):
if(tmpurl.find('http')==-1):
tmpurl=self.baseurl+'/'+tmpurl
if(tmpurl.find('www.cc98.org')!=-1):
# print(tmpurl)
self.urlqueue.append(tmpurl)
tmpurl=''
tmpoutdegree=tmpoutdegree+1
#c=raw_input()
self.outdegree.append(tmpoutdegree)
time.sleep(0.1)
nexturl=self.urlqueue.pop(0)
self.user_agent(nexturl)
else: #结束了
self.indexbuilder.save()
with open('queue','w') as qq:
print('Writing queue back into file...')
for item in self.urlqueue:
try:
if(item is not None):
qq.write(item+'\n')
except:
print ('queue wrong but things well')
pass
with open('urllist','w') as uu:
uu.write('%d\n'%(len(self.urls)))
i=0
print('Writing urllist back into file...')
for item in self.urls:
try:
uu.write('%d %s %d %d %d\n'%(i, item, self.indegree[i], self.outdegree[i], self.length[i]))
i+=1
except:
print('%d %s %d %d %d\n'%(i, item, self.indegree[i], self.outdegree[i], self.length[i]))
print ('urls output wrong')
pass
#return html
except urllib2.URLError as e:
print e.message
self.user_agent(self.urlqueue.pop(0))
except socket.timeout as e:
self.user_agent(self.urlqueue.pop(0))
except:
pass
def fillset(self,urllist,queue):#将以前访问过的网站加入set,重新获取queue
with open(urllist,'r') as FILE:
totalcount=FILE.readline()
for item in FILE.readlines():
try:
(tmpid,tmpurl,tmpind,tmpoud,tmplen)=(item.strip('\n').split(' '))
self.urls.append(tmpurl)
self.read_web.add(tmpurl)
self.indegree.append(int(tmpind))
self.outdegree.append(int(tmpoud))
self.length.append(int(tmplen))
except:
print('read in data error')
with open(queue,'r') as FILE1:
for item in FILE1.readlines():
try:
self.urlqueue.append(item.strip('\n'))
except:
print('read queue in error but well')
self.baseurl=self.urlqueue.pop(0)#重设主网址
#main
if __name__ == '__main__':
baseurl="http://www.cc98.org"
cc=crawl(baseurl)
if(os.path.exists('urllist') and os.path.exists('queue')):
cc.fillset('urllist','queue') #检查是否继续上次爬取
cc.user_agent(cc.baseurl)
else:
cc.user_agent(baseurl)
|
Python
| 0.000016
|
@@ -867,17 +867,18 @@
ount %3C=
-1
+50
0):#%E6%90%9C%E7%B4%A2%E7%BD%91%E9%A1%B5
@@ -1647,17 +1647,17 @@
sleep(0.
-1
+2
)%0A%09%09%09%09%09n
@@ -2669,17 +2669,36 @@
cept
-:%0A%09%09%09pass
+ Exception as e:%0A%09%09%09print(e)
%0A%09%0A%0A
|
ce83a4fb2f650380b7683ea688791e078b6fe7ec
|
Fix wrong redirect on logout
|
src/sleepy/web/views.py
|
src/sleepy/web/views.py
|
from django.contrib import messages
from django.contrib.auth import REDIRECT_FIELD_NAME, logout
from django.core.urlresolvers import reverse_lazy
from django.views.generic import RedirectView, TemplateView
from django.utils.http import is_safe_url
from django.utils.translation import ugettext
class IndexView(TemplateView):
"""View for the index page"""
template_name = 'sleepy/web/index.html'
class LogoutView(RedirectView):
url = reverse_lazy('home')
permanent = False
def dispatch(self, request, *args, **kwargs):
if request.user.is_authenticated():
logout(self.request)
messages.success(request, ugettext('You have successfully logged out.'))
return super(LogoutView, self).get(request, *args, **kwargs)
def get_redirect_url(self, *args, **kwargs):
url = super(LogoutView, self).get_redirect_url(*args, **kwargs)
next_url = self.request.REQUEST.get(REDIRECT_FIELD_NAME, None)
if next_url and is_safe_url(url=next_url, host=self.request.get_host()):
url = next_url
return url
|
Python
| 0.000003
|
@@ -456,16 +456,23 @@
e_lazy('
+sleepy-
home')%0A
|
e8389c211ef56869cd9c6c1177aa6a610a915aa2
|
Fix manifest and add format to properties
|
combine/manifest.py
|
combine/manifest.py
|
# Copyright (c) 2010 John Reese
# Licensed under the MIT license
import yaml
from combine import Change, CombineError
MANIFEST_FORMAT = 1
class Manifest:
def __init__(self):
self.properties = {}
self.actions = []
def add_property(self, name, value):
self.properties[name] = value
def add_action(self, action):
self.actions.append(action)
def to_dict(self):
"""
Generate a dictionary representation of the Manifest object.
"""
return dict(self.properties, actions=self.actions)
@classmethod
def from_dict(cls, data):
"""
Given a dictionary object, generate a new Manifest object.
"""
format = data["manifest-format"]
if (format > MANIFEST_FORMAT or format < 0):
raise CombineError("Unsupported manifest format")
mft = Manifest()
for key, value in data.items():
if key == "actions":
for action in value:
mft.add_action(action)
else:
mft.add_property(key, value)
for action in data["actions"]:
mft.add_action(action)
return mft
def to_yaml(self):
"""
Generate a YAML data string representing the Manifest object.
"""
str = yaml.safe_dump(self.to_dict(), default_flow_style=False)
return str
@classmethod
def from_yaml(cls, str):
"""
Given a string of YAML data, generate a new Manifest object.
"""
data = yaml.safe_load(str)
return cls.from_dict(data)
|
Python
| 0
|
@@ -95,16 +95,8 @@
port
- Change,
Com
@@ -195,16 +195,50 @@
ties = %7B
+%22manifest-format%22: MANIFEST_FORMAT
%7D%0A
|
0c5f8c97e98499478fe3261303af106c5c701de7
|
fix handouts, oops
|
commands/handout.py
|
commands/handout.py
|
import config
import random
import datetime, time
import math
def execute(parser, bot, user, args):
argslow = args.lower().strip()
# saidPlease = argslow.startswith("please") or argslow.startswith("pls") or argslow.startswith("plz")
saidPlease = False # lol
userData = bot.getUserDetails(user)
timeNow = int(time.time())
if userData["handout_ban"] > timeNow:
return
canPlay = bot.canPlayGame(userData)
if canPlay == 0:
# let's look at sdevs
listOfHandouts = bot.execQuerySelectMultiple("SELECT * FROM handouts WHERE twitchname = ? ORDER BY rowid DESC LIMIT 10", (user,))
if len(listOfHandouts) == 10:
meantotal = 0.0
timestamps = []
timestamps.append(float(timeNow - 1430000000))
for handoutRow in listOfHandouts:
thistime = time.mktime(time.strptime(handoutRow["whenHappened"].encode("utf-8").split(".")[0], "%Y-%m-%d %H:%M:%S"))
timestamps.append(float(thistime - 1430000000))
diffs = []
for i in xrange(0, 10):
thisdiff = timestamps[i] - timestamps[i+1]
diffs.append(thisdiff)
meantotal += (thisdiff)
meantotal = meantotal/10.0
vartotal = 0.0
for diffval in diffs:
vartotal += (diffval - meantotal)*(diffval - meantotal)
sdevtotal = math.sqrt(vartotal/10.0)
print "sdev=", sdevtotal
if sdevtotal < 1.3:
bannedUntil = timeNow + config.handoutScriptBan
bot.channelMsg("%s -> stop scripting! (banned from handouts for 1 hour)" % user)
bot.execQueryModify("UPDATE users SET handout_ban = ? WHERE twitchname = ?", (bannedUntil, user))
return
# success
handout = 0
while True:
if saidPlease:
randRoll = random.randint(2, 10)
else:
randRoll = random.randint(1, 10)
handout = handout + randRoll
if randRoll != 10:
break
newHighest = max(userData["highest_handout"], handout)
queryArgs = (userData["balance"]+handout, timeNow, datetime.datetime.now(), newHighest, user, userData["balance"])
bot.execQueryModify("UPDATE users SET balance = ?, last_game = ?, last_activity = ?, handouts = handouts + 1, highest_handout = ? WHERE twitchname = ? AND balance = ?", queryArgs)
bot.updateHighestBalance(userData, userData["balance"]+handout)
logArgs = (user, handout, datetime.datetime.now())
bot.execQueryModify("INSERT INTO handouts (twitchname, amount, whenHappened) VALUES(?, ?, ?)", logArgs)
currencyNow = config.currencyName if (handout == 1) else config.currencyPlural
if userData["balance"] > 0:
handoutMessages = ["Here, take %d %s." % (handout, currencyNow), "If I give you %d %s will you leave me alone?" % (handout, currencyNow), "Fine. %d %s for you. Now shoo!" % (handout, currencyNow), "I'm actually feeling pretty generous today, so have %d %s." % (handout, currencyNow), "I-It's not like I wanted to give you %d %s or anything! B-Baka!" % (handout, currencyNow), "I present %d %s to Mr. Beggar Extraordinaire over here." % (handout, currencyNow), "I present %d %s to Ms. Beggar Extraordinaire over here." % (handout, currencyNow), "The Goldenrod Gods have spoken. Thou shalt receive %d %s." % (handout, currencyNow)]
else:
handoutMessages = ["You irresponsible gambler, how dare you waste my generosity. But I feel obligated to get you back on your feet again, so here's %d %s." % (handout, currencyNow)]
bot.channelMsg("%s -> %s" % (user, random.choice(handoutChoices)))
else:
if config.showCooldowns:
bot.channelMsg("%s -> On cooldown. (%d secs)" % (user, canPlay))
def requiredPerm():
return "anyone"
|
Python
| 0
|
@@ -3874,13 +3874,14 @@
dout
-Choic
+Messag
es))
|
bdba5fb157862880af786090bb02f236207e2f37
|
fix precedence bug
|
src/apipkg/__init__.py
|
src/apipkg/__init__.py
|
"""
apipkg: control the exported namespace of a Python package.
see https://pypi.python.org/pypi/apipkg
(c) holger krekel, 2009 - MIT license
"""
import os
import sys
from types import ModuleType
from .version import version as __version__ # NOQA:F401
def _py_abspath(path):
"""
special version of abspath
that will leave paths from jython jars alone
"""
if path.startswith("__pyclasspath__"):
return path
else:
return os.path.abspath(path)
def distribution_version(name):
"""try to get the version of the named distribution,
returs None on failure"""
from pkg_resources import get_distribution, DistributionNotFound
try:
dist = get_distribution(name)
except DistributionNotFound:
pass
else:
return dist.version
def initpkg(pkgname, exportdefs, attr=None, eager=False):
""" initialize given package from the export definitions. """
attr = attr or {}
oldmod = sys.modules.get(pkgname)
d = {}
f = getattr(oldmod, "__file__", None)
if f:
f = _py_abspath(f)
d["__file__"] = f
if hasattr(oldmod, "__version__"):
d["__version__"] = oldmod.__version__
if hasattr(oldmod, "__loader__"):
d["__loader__"] = oldmod.__loader__
if hasattr(oldmod, "__path__"):
d["__path__"] = [_py_abspath(p) for p in oldmod.__path__]
if hasattr(oldmod, "__package__"):
d["__package__"] = oldmod.__package__
if "__doc__" not in exportdefs and getattr(oldmod, "__doc__", None):
d["__doc__"] = oldmod.__doc__
d["__spec__"] = getattr(oldmod, "__spec__", None)
d.update(attr)
if hasattr(oldmod, "__dict__"):
oldmod.__dict__.update(d)
mod = ApiModule(pkgname, exportdefs, implprefix=pkgname, attr=d)
sys.modules[pkgname] = mod
# eagerload in bypthon to avoid their monkeypatching breaking packages
if "bpython" in sys.modules or eager:
for module in list(sys.modules.values()):
if isinstance(module, ApiModule):
module.__dict__
return mod
def importobj(modpath, attrname):
"""imports a module, then resolves the attrname on it"""
module = __import__(modpath, None, None, ["__doc__"])
if not attrname:
return module
retval = module
names = attrname.split(".")
for x in names:
retval = getattr(retval, x)
return retval
class ApiModule(ModuleType):
"""the magical lazy-loading module standing"""
def __docget(self):
try:
return self.__doc
except AttributeError:
if "__doc__" in self.__map__:
return self.__makeattr("__doc__")
def __docset(self, value):
self.__doc = value
__doc__ = property(__docget, __docset)
def __init__(self, name, importspec, implprefix=None, attr=None):
self.__name__ = name
self.__all__ = [x for x in importspec if x != "__onfirstaccess__"]
self.__map__ = {}
self.__implprefix__ = implprefix or name
if attr:
for name, val in attr.items():
# print "setting", self.__name__, name, val
setattr(self, name, val)
for name, importspec in importspec.items():
if isinstance(importspec, dict):
subname = "{}.{}".format(self.__name__, name)
apimod = ApiModule(subname, importspec, implprefix)
sys.modules[subname] = apimod
setattr(self, name, apimod)
else:
parts = importspec.split(":")
modpath = parts.pop(0)
attrname = parts and parts[0] or ""
if modpath[0] == ".":
modpath = implprefix + modpath
if not attrname:
subname = "{}.{}".format(self.__name__, name)
apimod = AliasModule(subname, modpath)
sys.modules[subname] = apimod
if "." not in name:
setattr(self, name, apimod)
else:
self.__map__[name] = (modpath, attrname)
def __repr__(self):
repr_list = []
if hasattr(self, "__version__"):
repr_list.append("version=" + repr(self.__version__))
if hasattr(self, "__file__"):
repr_list.append("from " + repr(self.__file__))
if repr_list:
return "<ApiModule {!r} {}>".format(self.__name__, " ".join(repr_list))
return "<ApiModule {!r}>".format(self.__name__)
def __makeattr(self, name):
"""lazily compute value for name or raise AttributeError if unknown."""
# print "makeattr", self.__name__, name
target = None
if "__onfirstaccess__" in self.__map__:
target = self.__map__.pop("__onfirstaccess__")
importobj(*target)()
try:
modpath, attrname = self.__map__[name]
except KeyError:
if target is not None and name != "__onfirstaccess__":
# retry, onfirstaccess might have set attrs
return getattr(self, name)
raise AttributeError(name)
else:
result = importobj(modpath, attrname)
setattr(self, name, result)
try:
del self.__map__[name]
except KeyError:
pass # in a recursive-import situation a double-del can happen
return result
__getattr__ = __makeattr
@property
def __dict__(self):
# force all the content of the module
# to be loaded when __dict__ is read
dictdescr = ModuleType.__dict__["__dict__"]
dict = dictdescr.__get__(self)
if dict is not None:
hasattr(self, "some")
for name in self.__all__:
try:
self.__makeattr(name)
except AttributeError:
pass
return dict
def AliasModule(modname, modpath, attrname=None):
mod = []
def getmod():
if not mod:
x = importobj(modpath, None)
if attrname is not None:
x = getattr(x, attrname)
mod.append(x)
return mod[0]
x = modpath + ("." + attrname) if attrname else ""
repr_result = "<AliasModule {!r} for {!r}>".format(modname, x)
class AliasModule(ModuleType):
def __repr__(self):
return repr_result
def __getattribute__(self, name):
try:
return getattr(getmod(), name)
except ImportError:
if modpath == "pytest" and attrname is None:
# hack for pylibs py.test
return None
else:
raise
def __setattr__(self, name, value):
setattr(getmod(), name, value)
def __delattr__(self, name):
delattr(getmod(), name)
return AliasModule(str(modname))
|
Python
| 0.000002
|
@@ -6261,17 +6261,16 @@
attrname
-)
if attr
@@ -6281,16 +6281,17 @@
else %22%22
+)
%0A rep
|
799f0bbd87fe5d455d4859201e604490f493ae0c
|
Fix form stuff so that we get all the form details
|
browser.py
|
browser.py
|
# Copyright (C) Adam Piper, 2012
# See COPYING for licence details (GNU AGPLv3)
import pycurl
import StringIO
from lxml.html import fromstring
from urllib import urlencode
from datetime import datetime
class Browser(object):
@classmethod
def check_curl(cls, item):
return item in pycurl.version_info()[8]
def __init__(self):
self._curl = pycurl.Curl() # note: this is an "easy" connection
self._curl.setopt(pycurl.FOLLOWLOCATION, 1) # follow location headers
self._curl.setopt(pycurl.AUTOREFERER, 1)
self._curl.setopt(pycurl.MAXREDIRS, 20)
self._curl.setopt(pycurl.ENCODING, "gzip")
self._buf = StringIO.StringIO()
self._curl.setopt(pycurl.WRITEFUNCTION, self._buf.write) # callback for content buffer
self._curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.24 (KHTML, like Gecko) Ubuntu/10.10 Chromium/11.0.696.65 Chrome/11.0.696.65 Safari/534.24")
self._curl.setopt(pycurl.COOKIEFILE, "") # use cookies
self._curl.setopt(pycurl.CONNECTTIMEOUT, 2)
self._curl.setopt(pycurl.TIMEOUT, 4);
self.reset()
def reset(self):
self._tree = None
self._form = None
self._curl.setopt(pycurl.HTTPGET, 1)
self._form_data = {}
self._roundtrip = None
roundtrip = property(lambda self: self._roundtrip)
def go(self, url):
self._buf.truncate(0)
self._curl.setopt(pycurl.URL, url)
# execute
try:
before = datetime.now()
self._curl.perform()
except pycurl.error, e:
code, message = e
if code == 60:
# SSL cert error; retry
before = datetime.now()
self._curl.perform()
else:
raise e
self.reset()
self._roundtrip = datetime.now() - before
return self._curl.getinfo(pycurl.RESPONSE_CODE)
def save(self, filename):
with open(filename, 'w') as fp:
fp.write(self.src)
def parse(self):
if self._tree is not None:
return
self._tree = fromstring(self.src)
self._tree.make_links_absolute(self._curl.getinfo(pycurl.EFFECTIVE_URL))
# form selection/submission
def select_form(self, idx):
self.parse()
try:
self._form = self._tree.forms[idx]
except TypeError:
# perhaps we've been given a name/id
if idx is None:
raise
self._form = self._tree.forms[filter(lambda f: idx in (f.get('name'), f.get('id')),
self.forms)[0]['__number']]
def __setitem__(self, *args, **kwargs):
self._form_data.__setitem__(*args, **kwargs)
def set_form_data(self, **kwargs):
self._form_data.update(kwargs)
def get_form_fields(self):
return dict(self._form.form_values())
def submit(self, submit_button = None):
data = self.get_form_fields()
submits = self.submits
assert len(submits) <= 1 or submit_button is not None, "Implicit submit is not possible; an explicit choice must be passed: %s" % submits
if len(submits) > 0:
try:
submit = submits[0 if submit_button is None else submit_button]
except TypeError:
# perhaps we've been given a name/id
submit = submits[filter(lambda b: submit_button in b.values(),
submits)[0]['__number']]
data[submit['name']] = submit['value'] if 'value' in submit else ''
if self._form_data:
data.update(self._form_data)
data = urlencode(data)
if self._form.method.upper() == 'POST':
self._curl.setopt(pycurl.POST, 1)
self._curl.setopt(pycurl.POSTFIELDS, data)
return self.go(self._form.action)
sep = '?' if self._form.action.find('?') == -1 else '&'
return self.go("%(current)s%(sep)s%(data)s" % {'current': self._form.action,
'sep' : sep,
'data' : data})
def post(self, url, data):
data = urlencode(data)
self._curl.setopt(pycurl.POST, 1)
self._curl.setopt(pycurl.POSTFIELDS, data)
return self.go(url)
# helpers
@property
def src(self):
return self._buf.getvalue()
@property
def url(self):
return self._curl.getinfo(pycurl.EFFECTIVE_URL)
@property
def title(self):
self.parse()
try:
return self._tree.xpath("/html/head/title/text()")[0].strip()
except IndexError:
return None
@property
def forms(self):
self.parse()
forms = []
for i, form in enumerate(self._tree.forms):
items = {'__number': i}
for name, value in form.items():
if name in ('name', 'id', 'class'):
items[name] = value
forms.append(items)
return forms
@property
def submits(self):
assert self._form is not None, "A form must be selected: %s" % self.forms
submit_lst = self._form.xpath("//input[@type='submit']")
assert len(submit_lst) > 0, "The selected form must contain a submit button"
submits = []
for i, submit in enumerate(submit_lst):
items = {'__number': i}
for name, value in submit.items():
if name in ('name', 'value'):
items[name] = value
submits.append(items)
return submits
def xpath(self, *argv, **kwargs):
self.parse()
return self._tree.xpath(*argv, **kwargs)
def set_follow(self, switch):
self._curl.setopt(pycurl.FOLLOWLOCATION, 1 if switch else 0)
def set_debug(self, switch):
def debug(typ, msg):
indicators = {pycurl.INFOTYPE_TEXT: '%',
pycurl.INFOTYPE_HEADER_IN: '<',
pycurl.INFOTYPE_HEADER_OUT: '>',
pycurl.INFOTYPE_DATA_OUT: '>>'}
if typ in indicators.keys():
print "%(ind)s %(msg)s" % {'ind': indicators[typ], 'msg': msg.strip()}
self._curl.setopt(pycurl.VERBOSE, 1 if switch else 0)
self._curl.setopt(pycurl.DEBUGFUNCTION, debug)
|
Python
| 0
|
@@ -2924,16 +2924,51 @@
rn dict(
+filter(lambda pair: pair%5B0%5D != '',
self._fo
@@ -2975,21 +2975,23 @@
rm.f
-orm_value
+ields.item
s())
+)
%0A%0A
|
291f11c6325a1ae082845be81692bc64521eab7e
|
refactor create-kdtree script
|
py/legacypipe/create-kdtrees.py
|
py/legacypipe/create-kdtrees.py
|
import os
from astrometry.libkd.spherematch import *
from astrometry.util.fits import fits_table
import numpy as np
# This script creates the survey-ccd-*.kd.fits kd-trees from
# survey-ccds-*.fits.gz (zeropoints) files
#
indir = '/global/projecta/projectdirs/cosmo/work/legacysurvey/dr8/DECaLS/'
outdir = '/global/cscratch1/sd/dstn/dr8new'
bands = 'grizY'
for band in bands:
infn = indir + 'survey-ccds-decam-%s.fits.gz' % band
print('Input:', infn)
# gunzip
tfn = '/tmp/survey-ccd-%s.fits' % band
cmd = 'gunzip -cd %s > %s' % (infn, tfn)
print(cmd)
os.system(cmd)
# startree
sfn = '/tmp/startree-%s.fits' % band
cmd = 'startree -i %s -o %s -P -T -k -n ccds' % (tfn, sfn)
print(cmd)
os.system(cmd)
# add expnum-tree
T = fits_table(sfn, columns=['expnum'])
ekd = tree_build(np.atleast_2d(T.expnum.copy()).T.astype(float),
nleaf=60, bbox=False, split=True)
ekd.set_name('expnum')
efn = '/tmp/ekd-%s.fits' % band
ekd.write(efn)
# merge
cmd = 'fitsgetext -i %s -o /tmp/ekd-%s-%%02i -a -M' % (efn, band)
print(cmd)
os.system(cmd)
outfn = outdir + '/survey-ccds-decam-%s.kd.fits' % band
cmd = 'cat %s /tmp/ekd-%s-0[123456] > %s' % (sfn, band, outfn)
os.system(cmd)
|
Python
| 0.000004
|
@@ -222,261 +222,104 @@
%0A#%0A%0A
-indir = '/global/projecta/projectdirs/cosmo/work/legacysurvey/dr8/DECaLS/'%0Aoutdir = '/global/cscratch1/sd/dstn/dr8new'%0A%0Abands = 'grizY'%0A%0Afor band in bands:%0A infn = indir + 'survey-ccds-decam-%25s.fits.gz' %25 band%0A print('Input:', infn)%0A%0A # gunzip%0A
+def create_kdtree(infn, outfn):%0A readfn = infn%0A # gunzip%0A if infn.endswith('.gz'):%0A
@@ -334,35 +334,23 @@
tmp/
-survey-
ccd
--%25
s.fits'
- %25 band%0A
+%0A
@@ -386,32 +386,36 @@
(infn, tfn)%0A
+
+
print(cmd)%0A o
@@ -404,32 +404,42 @@
print(cmd)%0A
+ rtn =
os.system(cmd)%0A
@@ -429,32 +429,78 @@
= os.system(cmd)
+%0A assert(rtn == 0)%0A readfn = tfn
%0A%0A # startree
@@ -524,32 +524,22 @@
startree
--%25s
.fits'
- %25 band
%0A cmd
@@ -584,17 +584,20 @@
cds' %25 (
-t
+read
fn, sfn)
@@ -607,32 +607,38 @@
print(cmd)%0A
+ rtn =
os.system(cmd)%0A
@@ -632,24 +632,45 @@
.system(cmd)
+%0A assert(rtn == 0)
%0A%0A # add
@@ -899,24 +899,14 @@
/ekd
--%25s
.fits'
- %25 band
%0A
@@ -979,11 +979,8 @@
kd-%25
-s-%25
%2502i
@@ -997,51 +997,463 @@
(efn
-, band)%0A print(cmd)%0A os.system(cmd)%0A%0A
+)%0A print(cmd)%0A rtn = os.system(cmd)%0A assert(rtn == 0)%0A%0A cmd = 'cat %25s /tmp/ekd-0%5B123456%5D %3E %25s' %25 (sfn, outfn)%0A rtn = os.system(cmd)%0A assert(rtn == 0)%0A%0A%0Adef pre_depthcut():%0A indir = '/global/projecta/projectdirs/cosmo/work/legacysurvey/dr8/DECaLS/'%0A outdir = '/global/cscratch1/sd/dstn/dr8new'%0A bands = 'grizY'%0A for band in bands:%0A infn = indir + 'survey-ccds-decam-%25s.fits.gz' %25 band%0A print('Input:', infn)%0A
@@ -1516,91 +1516,344 @@
-%0A
c
-md = 'cat %25s /tmp/ekd-%25s-0%5B123456%5D %3E %25s' %25 (sfn, band, outfn
+reate_kdtree(infn, outfn)%0A%0Aif __name__ == '__main__':%0A import argparse%0A parser = argparse.ArgumentParser()%0A parser.add_argument('infn', help='Input filename (CCDs file)')%0A parser.add_argument('outfn', help='Output filename (survey-ccds-X.kd.fits file'
)%0A
+%0A
o
-s.system(cmd
+pt = parser.parse_args()%0A create_kdtree(opt.infn, opt.outfn
)%0A
|
e01875e5be3d41ba0631f26254d22666c851bb10
|
Handle all exceptions on webapp2. Pretty ghettoish.
|
src/auth_controller.py
|
src/auth_controller.py
|
import credentials
import logging
import webapp2
from webapp2_extras import (auth, sessions, jinja2)
from simpleauth import SimpleAuthHandler
class BaseRequestHandler(webapp2.RequestHandler):
"""This class is mostly copy-paste from
https://github.com/crhym3/simpleauth/blob/master/example/handlers.py
"""
def dispatch(self):
# Get a session store for this request.
self.session_store = sessions.get_store(request=self.request)
try:
# Dispatch the request.
webapp2.RequestHandler.dispatch(self)
finally:
# Save all sessions.
self.session_store.save_sessions(self.response)
@webapp2.cached_property
def jinja2(self):
"""Returns a Jinja2 renderer cached in the app registry."""
return jinja2.get_jinja2(app=self.app)
@webapp2.cached_property
def session(self):
"""Returns a session using the default cookie key."""
return self.session_store.get_session()
@webapp2.cached_property
def auth(self):
return auth.get_auth()
@webapp2.cached_property
def current_user(self):
"""Returns currently logged in user."""
user_dict = self.auth.get_user_by_session()
if user_dict:
return self.auth.store.user_model.get_by_id(user_dict['user_id'])
@webapp2.cached_property
def user_session(self):
"""Returns the user session dictionary."""
return self.auth.get_user_by_session()
@webapp2.cached_property
def auth_info(self):
"""Return's oauth info."""
return self.session.get('auth_info')
@webapp2.cached_property
def logged_in(self):
"""Returns true if a user is currently logged in, false otherwise."""
return self.auth.get_user_by_session() is not None
def render(self, template_name, template_vars={}):
# Preset values for the template.
v = {
'user': self.current_user if self.logged_in else None
}
# Add manually supplied template values.
v.update(template_vars)
self.response.write(self.jinja2.render_template(template_name, **v))
class AuthHandler(BaseRequestHandler, SimpleAuthHandler):
"""Authentication handler for OAuth 2.0, 1.0(a) and OpenID."""
USER_ATTRS = {
'twitter': {
'profile_image_url': 'avatar_url',
'screen_name': 'name',
'link': 'link'
},
}
def _on_signin(self, data, auth_info, provider, extra=None):
"""Callback whenever a new or existing user is logging in.
data is a user info dictionary.
auth_info contains access token or oauth token and secret.
extra is a dict with additional params passed to the auth init handler.
"""
auth_id = '%s:%s' % (provider, data['id'])
user = self.auth.store.user_model.get_by_auth_id(auth_id)
_attrs = self._to_user_model_attrs(data, self.USER_ATTRS[provider])
if user:
user.populate(**_attrs)
user.put()
self.auth.set_session(self.auth.store.user_to_dict(user))
else:
if self.logged_in:
u = self.current_user
u.populate(**_attrs)
u.add_auth_id(auth_id)
else:
ok, user = self.auth.store.user_model.create_user(auth_id, **_attrs)
if ok:
self.auth.set_session(self.auth.store.user_to_dict(user))
else:
return self.redirect('/')
# Store OAuth tokens in session.
self.session['auth_info'] = auth_info
return self.redirect('/')
def logout(self):
self.session.pop('auth_info', None)
self.auth.unset_session()
self.redirect('/')
def _callback_uri_for(self, provider):
return self.uri_for('auth_callback', provider=provider, _full=True)
def _get_consumer_info_for(self, provider):
"""Returns a tuple (key, secret) for auth init requests."""
return credentials.auth_config[provider]
def _get_optional_params_for(self, provider):
"""Returns optional parameters for auth init requests."""
return credentials.auth_optional_params.get(provider)
def _to_user_model_attrs(self, data, attrs_map):
"""Get the needed information from the provider dataset."""
user_attrs = {}
for k, v in attrs_map.iteritems():
attr = (v, data.get(k)) if isinstance(v, str) else v(data.get(k))
user_attrs.setdefault(*attr)
return user_attrs
class PageHandler(BaseRequestHandler):
def index(self):
"""Index page."""
self.render('index.html')
|
Python
| 0
|
@@ -4625,16 +4625,137 @@
_attrs%0A%0A
+ def handle_exception(self, exception, debug):%0A logging.exception(exception)%0A self.redirect_to('home')%0A%0A
%0Aclass P
|
d397d38ebbc4124180af4c8be36f014936681e6e
|
update default product plan when DefaultProductPlan already exists
|
corehq/apps/accounting/bootstrap/utils.py
|
corehq/apps/accounting/bootstrap/utils.py
|
# Use modern Python
from __future__ import absolute_import, print_function, unicode_literals
from corehq.apps.accounting.models import (
FeatureType,
SoftwarePlanEdition,
SoftwarePlanVisibility,
)
from corehq.apps.accounting.utils import log_accounting_error, log_accounting_info
import six
FEATURE_TYPES = [
FeatureType.USER,
FeatureType.SMS,
]
def ensure_plans(config, verbose, apps):
DefaultProductPlan = apps.get_model('accounting', 'DefaultProductPlan')
SoftwarePlan = apps.get_model('accounting', 'SoftwarePlan')
SoftwarePlanVersion = apps.get_model('accounting', 'SoftwarePlanVersion')
Role = apps.get_model('django_prbac', 'Role')
for plan_key, plan_deets in six.iteritems(config):
edition, is_trial, is_report_builder_enabled = plan_key
features = _ensure_features(edition, verbose, apps)
try:
role = _ensure_role(plan_deets['role'], apps)
except Role.DoesNotExist:
return
product, product_rate = _ensure_product_rate(
plan_deets['product_rate_monthly_fee'], edition,
verbose=verbose, apps=apps,
)
feature_rates = _ensure_feature_rates(
plan_deets['feature_rates'], features, edition,
verbose=verbose, apps=apps,
)
software_plan = SoftwarePlan(
name=(
(('%s Trial' % product_rate.name) if is_trial else ('%s Edition' % product_rate.name))
if product is None else product.name # TODO - remove after squashing migrations
),
edition=edition,
visibility=SoftwarePlanVisibility.PUBLIC
)
if is_report_builder_enabled:
software_plan.name = '%s - Report Builder (5 Reports)' % software_plan.name
try:
software_plan = SoftwarePlan.objects.get(name=software_plan.name)
if verbose:
log_accounting_info(
"Plan '%s' already exists. Using existing plan to add version." % software_plan.name
)
except SoftwarePlan.DoesNotExist:
software_plan.save()
if verbose:
log_accounting_info("Creating Software Plan: %s" % software_plan.name)
product_rate.save()
software_plan_version = SoftwarePlanVersion(role=role, plan=software_plan, product_rate=product_rate)
software_plan_version.save()
for feature_rate in feature_rates:
feature_rate.save()
software_plan_version.feature_rates.add(feature_rate)
software_plan_version.save()
default_product_plan = DefaultProductPlan(
edition=edition, is_trial=is_trial
)
default_product_plan.is_report_builder_enabled = is_report_builder_enabled
try:
default_product_plan = DefaultProductPlan.objects.get(
edition=edition,
is_trial=is_trial,
is_report_builder_enabled=is_report_builder_enabled,
)
if verbose:
log_accounting_info(
"Default for edition '%s' with is_trial='%s' already exists."
% (default_product_plan.edition, is_trial)
)
except DefaultProductPlan.DoesNotExist:
default_product_plan.plan = software_plan
default_product_plan.save()
if verbose:
log_accounting_info(
"Setting plan as default for edition '%s' with is_trial='%s'."
% (default_product_plan.edition, is_trial)
)
def _ensure_role(role_slug, apps):
Role = apps.get_model('django_prbac', 'Role')
try:
role = Role.objects.get(slug=role_slug)
except Role.DoesNotExist:
log_accounting_error(
"Could not find the role '%s'. Did you forget to run cchq_prbac_bootstrap?"
% role_slug
)
log_accounting_error("Aborting. You should figure this out.")
raise
return role
def _ensure_product_rate(monthly_fee, edition, verbose, apps):
"""
Ensures that all the necessary SoftwareProductRates are created for the plan.
"""
if verbose:
log_accounting_info('Ensuring Product Rates')
SoftwareProductRate = apps.get_model('accounting', 'SoftwareProductRate')
product_name = 'CommCare %s' % edition
if edition == SoftwarePlanEdition.ENTERPRISE:
product_name = "Dimagi Only %s" % product_name
product_rate = SoftwareProductRate(monthly_fee=monthly_fee)
try:
# TODO - remove after squashing migrations
SoftwareProduct = apps.get_model('accounting', 'SoftwareProduct')
product = SoftwareProduct(name=product_name, product_type='CommCare')
try:
product = SoftwareProduct.objects.get(name=product.name)
if verbose:
log_accounting_info(
"Product '%s' already exists. Using existing product to add rate."
% product.name
)
except SoftwareProduct.DoesNotExist:
if verbose:
log_accounting_info("Creating Product: %s" % product)
product.save()
product_rate.product = product
if verbose:
log_accounting_info("Corresponding product rate of $%d created." % product_rate.monthly_fee)
return product, product_rate
except LookupError:
product_rate.name = product_name
if verbose:
log_accounting_info("Corresponding product rate of $%d created." % product_rate.monthly_fee)
return None, product_rate # TODO - don't return tuple after squashing migrations
def _ensure_features(edition, verbose, apps):
"""
Ensures that all the Features necessary for the plans are created.
"""
Feature = apps.get_model('accounting', 'Feature')
if verbose:
log_accounting_info('Ensuring Features for plan: %s' % edition)
features = []
for feature_type in FEATURE_TYPES:
feature = Feature(name='%s %s' % (feature_type, edition), feature_type=feature_type)
if edition == SoftwarePlanEdition.ENTERPRISE:
feature.name = "Dimagi Only %s" % feature.name
try:
feature = Feature.objects.get(name=feature.name)
if verbose:
log_accounting_info(
"Feature '%s' already exists. Using existing feature to add rate."
% feature.name
)
except Feature.DoesNotExist:
feature.save()
if verbose:
log_accounting_info("Creating Feature: %s" % feature)
features.append(feature)
return features
def _ensure_feature_rates(feature_rates, features, edition, verbose, apps):
"""
Ensures that all the FeatureRates necessary for the plans are created.
"""
FeatureRate = apps.get_model('accounting', 'FeatureRate')
if verbose:
log_accounting_info('Ensuring Feature Rates')
db_feature_rates = []
for feature in features:
feature_rate = FeatureRate(**feature_rates[feature.feature_type])
feature_rate.feature = feature
if verbose:
log_accounting_info("Creating rate for feature '%s': %s" % (feature.name, feature_rate))
db_feature_rates.append(feature_rate)
return db_feature_rates
|
Python
| 0
|
@@ -2612,200 +2612,8 @@
()%0A%0A
- default_product_plan = DefaultProductPlan(%0A edition=edition, is_trial=is_trial%0A )%0A default_product_plan.is_report_builder_enabled = is_report_builder_enabled%0A%0A
@@ -3103,32 +3103,255 @@
n.DoesNotExist:%0A
+ default_product_plan = DefaultProductPlan(%0A edition=edition,%0A is_trial=is_trial,%0A is_report_builder_enabled=is_report_builder_enabled,%0A )%0A finally:%0A
defa
|
bab795306cbba98a4f64e478af3c19f814b962cb
|
reset structlog context after each test
|
relengapi/blueprints/base/__init__.py
|
relengapi/blueprints/base/__init__.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
import logging
import os
import sys
from alembic import command
from alembic.config import Config
from flask import Blueprint
from flask import Flask
from flask import current_app
import relengapi
from relengapi.blueprints.base.alembic_wrapper import AlembicSubcommand
from relengapi.lib import subcommands
bp = Blueprint('base', __name__)
logger = logging.getLogger(__name__)
__all__ = ['AlembicSubcommand', ]
class ServeSubcommand(subcommands.Subcommand):
def make_parser(self, subparsers):
parser = subparsers.add_parser('serve', help='run the server')
parser.add_argument("-a", "--all-interfaces", action='store_true',
help='Run on all interfaces, not just localhost')
parser.add_argument("-p", "--port", type=int, default=5000,
help='Port on which to serve')
parser.add_argument("--no-debug", action='store_true',
help="Don't run in debug mode")
return parser
def run(self, parser, args):
kwargs = {}
if args.all_interfaces:
kwargs['host'] = '0.0.0.0'
kwargs['debug'] = not args.no_debug
kwargs['port'] = args.port
current_app.run(**kwargs)
class CreateDBSubcommand(subcommands.Subcommand):
def make_parser(self, subparsers):
parser = subparsers.add_parser(
'createdb', help='create configured databases')
return parser
def run(self, parser, args):
# alembic.ini uses relative paths, so set the working directory
os.chdir(os.path.dirname(os.path.dirname(relengapi.__file__)))
for dbname in current_app.db.database_names:
logger.info("creating tables for database %s", dbname)
meta = current_app.db.metadata[dbname]
engine = current_app.db.engine(dbname)
meta.create_all(bind=engine)
# load the Alembic config and stamp it with the most recent rev
config_path = os.path.join(os.path.dirname(relengapi.__file__),
'alembic', dbname, 'alembic.ini')
if os.path.isfile(config_path):
logger.info("stamping database %s with head", dbname)
alembic_cfg = Config(config_path)
command.stamp(alembic_cfg, "head")
class RunTestsSubcommand(subcommands.Subcommand):
want_logging = False
def make_parser(self, subparsers):
parser = subparsers.add_parser(
'run-tests', help='run RelengAPI tests')
parser.add_argument("nose_args", metavar='NOSE-ARGS', nargs='*',
help="Arguments to nosetests")
return parser
def run(self, parser, args):
import nose
sys.argv = [sys.argv[0]] + args.nose_args
if 'RELENGAPI_SETTINGS' in os.environ:
del os.environ['RELENGAPI_SETTINGS']
# enable sqlalchemy logging
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
# push a fake app context to avoid tests accidentally using the
# runtime app context (for example, the development DB)
with Flask(__name__).app_context():
nose.main()
class SQSListenSubcommand(subcommands.Subcommand):
def make_parser(self, subparsers):
parser = subparsers.add_parser(
'sqs-listen', help='Listen to SQS queues registered with @app.sqs_listen')
return parser
def run(self, parser, args):
current_app.aws._spawn_sqs_listeners()
class ReplSubcommand(subcommands.Subcommand):
def make_parser(self, subparsers):
parser = subparsers.add_parser(
'repl', help='Open a Python REPL in the RelengAPI application context; '
'`app` is the current app.')
parser.add_argument("-c", "--command", metavar='COMMAND',
help="Python program passed in as string")
return parser
def run(self, parser, args):
if args.command:
exec args.command in {'app': current_app}
else: # pragma: no-cover
import code
# try to get readline for the interactive interpreter (it
# only uses it if it's already loaded)
try:
import readline
assert readline
except ImportError:
readline = None
print "'app' is the current application."
code.InteractiveConsole(locals={'app': current_app}).interact()
|
Python
| 0
|
@@ -413,16 +413,53 @@
rent_app
+%0Afrom nose.plugins.base import Plugin
%0A%0Aimport
@@ -537,24 +537,79 @@
cSubcommand%0A
+from relengapi.lib import logging as relengapi_logging%0A
from relenga
@@ -2655,24 +2655,341 @@
, %22head%22)%0A%0A%0A
+class ResetLogging(Plugin):%0A%0A %22%22%22Reset the logging context after each test.%22%22%22%0A%0A def configure(self, options, conf):%0A super(ResetLogging, self).configure(options, conf)%0A # enable automatically%0A self.enabled = True%0A%0A def afterTest(self, test):%0A relengapi_logging.reset_context()%0A%0A%0A
class RunTes
@@ -3847,16 +3847,43 @@
se.main(
+addplugins=%5BResetLogging()%5D
)%0A%0A%0Aclas
|
7b746d2d4ae732ee1eae326254f3a6df676a7973
|
Add __str__ function for SgTable
|
components/table.py
|
components/table.py
|
"""A class to store tables."""
class SgTable:
"""A class to store tables."""
def __init__(self):
self._fields = []
self._table = []
def __len__(self):
return len(self._table)
def __iter__(self):
for row in self._table:
yield row
def __getitem__(self, key):
if not ((type(key) == int or type(key) == long) and key >= 0 and key < len(self._table)):
raise ValueError("Index illegal")
else:
return self._table[key]
def __setitem__(self, key, value):
if not ((type(key) == int or type(key) == long) and key >= 0 and key < len(self._table)):
raise ValueError("Index illegal")
else:
self._table[key] = value
def Append(self, row):
self._table.append(row)
def GetTable(self):
return self._table
def SetTable(self, table):
self._table = table
def GetFields(self):
return self._fields
def SetFields(self, fields):
self._fields = fields
|
Python
| 0.999052
|
@@ -748,16 +748,158 @@
= value
+%0A%0A def __str__(self):%0A ret = str(self._fields)%0A for row in self._table:%0A ret += %22%5Cn%22 + str(row)%0A return ret
%0A %0A
|
8a45ca4dff9957a6fce07dfa067633fcd842bc51
|
Update cpp.py
|
conda/libdev/cpp.py
|
conda/libdev/cpp.py
|
import os
from SCons.Defaults import Move
def generate(env):
"""Add Builders and construction variables to the Environment."""
if not 'cpp' in env['TOOLS'][:-1]:
env.Tool('system')
env.Tool('prefix')
def BuildCpp(env, target, sources):
# Code to build "target" from "source"
SYSTEM = env['SYSTEM']
targets = env.Install(os.path.join(env['PREFIX'], "include", *target.split('_')),
[source for source in sources if source.suffix in ['.h', '.hpp', '.hxx', '.h++']])
if SYSTEM == 'osx':
kwargs = dict(FRAMEWORKSFLAGS = '-flat_namespace -undefined suppress')
else:
kwargs = dict()
targets += env.SharedLibrary(os.path.join(env['PREFIX'], "lib", target),
[source for source in sources if source.suffix in ['.c', '.cpp', '.cxx', '.c++']],
**kwargs)
if SYSTEM == 'win':
dll = [target for target in targets if target.suffix == '.dll'].pop()
exp = [target for target in targets if target.suffix == '.exp'].pop()
lib = [target for target in targets if target.suffix == '.lib'].pop()
targets = [target for target in targets if not target.suffix in ['.dll', '.exp', '.lib']]
targets += env.Install(os.path.join(env['PREFIX'], "bin"), dll)
targets += env.Command(lib, [exp, dll], [Delete("$SOURCE")])
return targets
env.AddMethod(BuildCpp)
def exists(env):
return 1
|
Python
| 0.000001
|
@@ -34,11 +34,13 @@
ort
-Mov
+Delet
e%0A%0Ad
|
e58b94f29888ac1c48bec77cb08fc90919c7720b
|
add filename attribute
|
src/twelve_tone/midi.py
|
src/twelve_tone/midi.py
|
from miditime.miditime import MIDITime
class MIDIFile(object):
def __init__(self, BPM=120, filename='example.mid'):
self.pattern = MIDITime(BPM, filename)
self.step_counter = 0
def create(self, notes):
midinotes = []
offset = 60
attack = 200
beats = 1
for note in notes:
pitch = (note - 1) + offset
midinote = [self.step_counter, pitch, attack, beats]
midinotes.append(midinote)
self.step_counter = self.step_counter + 1
# Add a track with those notes
self.pattern.add_track(midinotes)
# Output the .mid file
self.pattern.save_midi()
|
Python
| 0.000002
|
@@ -192,16 +192,49 @@
nter = 0
+%0A self.filename = filename
%0A%0A de
|
70f0d321325f3a7d9966c11c39dfb2ef6ecea97e
|
add testcase for SNMPv3
|
scripts/cli/test_service_snmp.py
|
scripts/cli/test_service_snmp.py
|
#!/usr/bin/env python3
#
# Copyright (C) 2019-2020 VyOS maintainers and contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 or later as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import unittest
from vyos.validate import is_ipv4
from psutil import process_iter
import vyos.config
import vyos.configsession
import vyos.util as util
SNMPD_CONF = '/etc/snmp/snmpd.conf'
base_path = ['service', 'snmp']
def get_config_value(key):
tmp = util.read_file(SNMPD_CONF)
return re.findall(r'\n?{}\s+(.*)'.format(key), tmp)
class TestSystemNameServer(unittest.TestCase):
def setUp(self):
self.session = vyos.configsession.ConfigSession(os.getpid())
env = self.session.get_session_env()
self.config = vyos.config.Config(session_env=env)
def tearDown(self):
# Delete SNNP configuration
self.session.delete(base_path)
self.session.commit()
def test_snmp(self):
""" Check if SNMP can be configured and service runs """
clients = ['192.0.2.1', '2001:db8::1']
networks = ['192.0.2.128/25', '2001:db8:babe::/48']
listen = ['127.0.0.1', '::1']
for auth in ['ro', 'rw']:
community = 'VyOS' + auth
self.session.set(base_path + ['community', community, 'authorization', auth])
for client in clients:
self.session.set(base_path + ['community', community, 'client', client])
for network in networks:
self.session.set(base_path + ['community', community, 'network', network])
for addr in listen:
self.session.set(base_path + ['listen-address', addr])
self.session.set(base_path + ['contact', 'maintainers@vyos.io'])
self.session.set(base_path + ['location', 'qemu'])
self.session.commit()
# verify listen address, it will be returned as
# ['unix:/run/snmpd.socket,udp:127.0.0.1:161,udp6:[::1]:161']
# thus we need to transfor this into a proper list
config = get_config_value('agentaddress')[0]
expected = 'unix:/run/snmpd.socket'
for addr in listen:
if is_ipv4(addr):
expected += ',udp:{}:161'.format(addr)
else:
expected += ',udp6:[{}]:161'.format(addr)
self.assertTrue(expected in config)
# Check for running process
self.assertTrue("snmpd" in (p.name() for p in process_iter()))
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000004
|
@@ -2932,16 +2932,1056 @@
er()))%0A%0A
+ def test_snmpv3(self):%0A %22%22%22 Check if SNMPv3 can be configured and service runs%22%22%22%0A%0A self.session.set(base_path + %5B'v3', 'engineid', '0xaffedeadbeef'%5D)%0A self.session.set(base_path + %5B'v3', 'group', 'default', 'mode', 'ro'%5D)%0A # check validate() - a view must be created before this can be comitted%0A with self.assertRaises(vyos.configsession.ConfigSessionError):%0A self.session.commit()%0A%0A self.session.set(base_path + %5B'v3', 'view', 'default', 'oid', '1'%5D)%0A self.session.set(base_path + %5B'v3', 'group', 'default', 'view', 'default'%5D)%0A self.session.commit()%0A%0A # create user%0A for authpriv in %5B'auth', 'privacy'%5D:%0A self.session.set(base_path + %5B'v3', 'user', 'vyos', authpriv, 'plaintext-key', 'vyos1234'%5D)%0A self.session.set(base_path + %5B'v3', 'user', 'vyos', 'group', 'default'%5D)%0A%0A # TODO: read in config file and check values%0A%0A # Check for running process%0A self.assertTrue(%22snmpd%22 in (p.name() for p in process_iter()))%0A%0A
if __nam
|
b470de3eafbb8ac8526874cf4e3f112fdbdd27c7
|
Add /e and /h to swarm xcopy to ensure subfolders and permissions are correctly copied over.
|
scripts/tools/swarm_bot_setup.py
|
scripts/tools/swarm_bot_setup.py
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Setup a given bot to become a swarm bot by installing the
required files and setting up any required scripts. The bot's OS must be
specified. We assume the bot already has python installed and a ssh server
enabled."""
import optparse
import os
import subprocess
import sys
SWARM_DIRECTORY_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'swarm_bootstrap')
# The swarm server links.
SWARM_SERVER_PROD = 'https://chromium-swarm.appspot.com'
SWARM_SERVER_DEV = 'https://chromium-swarm-dev.appspot.com'
# The directories containing the swarm code initially.
SWARM_STARTING_DIRECTORY = {
'linux': '/b/build/scripts/tools/swarm_bootstrap',
'mac': '/b/build/scripts/tools/swarm_bootstrap',
'win': 'e:\\b\\build\\scripts\\tools\\swarm_bootstrap',
}
# The directories to store the swarm code.
SWARM_DIRECTORY = {
'linux': '/b/swarm_slave',
'mac': '/b/swarm_slave',
'win': 'e:\\b\\swarm_slave\\',
}
class Options(object):
def __init__(self, swarm_server):
self.swarm_server = swarm_server
def OpenSSHCommand(user, host):
return ['ssh', '-o ConnectTimeout=5', '-t', user + '@' + host]
def BuildSetupCommand(user, host, platform, options):
assert platform in ('linux', 'mac', 'win')
bot_setup_commands = []
# Update the swarm files on the machines
if platform == 'win':
bot_setup_commands.extend(['e:', '&&'])
bot_setup_commands.extend([
'cd %s' % SWARM_STARTING_DIRECTORY[platform],
'&&',
'svn update',
'&&'])
# Copy the swarm files to the new swarm directory
if platform == 'win':
copy_func = 'xcopy /i'
else:
copy_func = 'cp -r'
bot_setup_commands.extend([
'%s %s %s' % (copy_func,
SWARM_STARTING_DIRECTORY[platform],
SWARM_DIRECTORY[platform]),
'&&'])
# Run the final swarm setup script.
bot_setup_commands.extend(['cd %s' % SWARM_DIRECTORY[platform], '&&'])
if platform == 'win':
bot_setup_commands.extend([
'call swarm_bot_setup.bat %s %s' %
(options.swarm_server, SWARM_DIRECTORY[platform])])
else:
bot_setup_commands.append('./swarm_bot_setup.sh %s %s' %
(options.swarm_server, SWARM_DIRECTORY[platform]))
# On windows the command must be executed by cmd.exe
if platform == 'win':
bot_setup_commands = ['cmd.exe /c',
'"' + ' '.join(bot_setup_commands) + '"']
return OpenSSHCommand(user, host) + bot_setup_commands
def BuildCleanCommand(user, host, platform):
assert platform in ('linux', 'mac', 'win')
command = OpenSSHCommand(user, host)
if platform == 'win':
command.append('del /q /s %s' % SWARM_DIRECTORY[platform])
else:
command.append('rm -r %s' % SWARM_DIRECTORY[platform])
return command
def main():
parser = optparse.OptionParser(usage='%prog [options]',
description=sys.modules[__name__].__doc__)
parser.add_option('-b', '--bot', action='append', default=[],
help='The bot to setup as a swarm bot')
parser.add_option('-r', '--raw',
help='The name of a file containing line separated slaves '
'to setup. The slaves must all be the same os.')
parser.add_option('-c', '--clean', action='store_true',
help='Removes any old swarm files before setting '
'up the bot.')
parser.add_option('-d', '--use_dev', action='store_true',
help='Set when the swarm bots being setup should use the '
'development swarm server instead of the production one.')
parser.add_option('-u', '--user', default='chrome-bot',
help='The user to use when setting up the machine. '
'Defaults to %default')
parser.add_option('-p', '--print_only', action='store_true',
help='Print what command would be executed to setup the '
'swarm bot.')
parser.add_option('-w', '--win', action='store_true')
parser.add_option('-l', '--linux', action='store_true')
parser.add_option('-m', '--mac', action='store_true')
options, args = parser.parse_args()
if len(args) > 0:
parser.error('Unknown arguments, ' + str(args))
if not options.bot and not options.raw:
parser.error('Must specify a bot or bot file.')
if len([x for x in [options.win, options.linux, options.mac] if x]) != 1:
parser.error('Must specify the bot\'s OS.')
if options.win:
platform = 'win'
elif options.linux:
platform = 'linux'
elif options.mac:
platform = 'mac'
bots = options.bot
if options.raw:
# Remove extra spaces and empty lines.
bots.extend(filter(None, (s.strip() for s in open(options.raw, 'r'))))
for bot in bots:
commands = []
if options.clean:
commands.append(BuildCleanCommand(options.user, bot, platform))
command_options = Options(
swarm_server=SWARM_SERVER_DEV if options.use_dev else SWARM_SERVER_PROD)
commands.append(BuildSetupCommand(options.user, bot, platform,
command_options))
if options.print_only:
print commands
else:
for command in commands:
subprocess.check_call(command)
if __name__ == '__main__':
sys.exit(main())
|
Python
| 0.000002
|
@@ -1808,16 +1808,22 @@
xcopy /i
+ /e /h
'%0A else
|
aa46499c43bd7e4162dc657fa898b1df5e2dcee9
|
Exclude windows from extended ascii mode because travis is unhappy
|
src/compas/__main__.py
|
src/compas/__main__.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
import pkg_resources
import compas
if __name__ == '__main__':
c = 'DCDHDCACDHDCAEDEACDHDCAEDEACDHDCAEDCDEACDHDCADCACDEADHDCAEDADEACDHDADADADHDCACDCAEDEACDCACDHDCAEDEACDCAEDEACDCAEDBACDHDAEDEACDADADCAEDBADHDAGDEACDADEADCAEDEADHDBADEDCAEDEACDEDAGDHDADCAEDACDCADADADHDAGDADEACAEDADBADHDAGDCADEAEDEACDBADHDAGDCAEDADEACDBADHDBADADADADAGDHDAGDCADEDADBADHDBADADAGDHDEADEAEDEAEDADHDEADEDADEDADHDEACDADCAEDHDACDADCADHDEACDADCAEDHDEACDADCAEDHDEACDADCAEDHDEAFCDADCAEDHDEAEDHDEDH' # noqa: E501
r = 'fGfB]DSD]BYBHEIEHCXBUCFBYBFCUBSBEBOEOBEBSBQBEPBGBPBEQBOBDBRIRBDBOBNEUGUENBLBECRBCBCBCBRCEBLBKBDBBBDBNBCBEBCBNBDBBBDBKBKDBFCDBIDIDIBDCFBDKBJDBKCCCDDKBCDCCCKBDJBIBDPCBBCBMBCBBCPDBIBIERBCBBBCGCBCDREIBIDBQDEBDCDBEDQBDIBIDBOBDIBCBIBCBOBDIBIDBNBCBKCKBCBNBDIBIBDMDMCMDMDBIBJDBHBFNCNGHBDJBJBDGkGDBJBKBDFBGB[BGBFEKBLBDHCPCPCHELBMBDBWCWBDBMBOEBUCUBEOBPBEBSCSBEBPBRBEBQCQBEBRBUBECMCMCECTBXBFBDGCGDGCWB[DXC[BbObB' # noqa: E501
maps = ' !-X_`|\n' if compas.IPY else ' ▌▀█▄`▐\n'
for n, o in zip(r, c):
print((ord(n) - 65) * maps[ord(o) - 65], end='')
print()
print('Yay! COMPAS is installed correctly!')
print()
print('COMPAS: {}'.format(compas.__version__))
print('Python: {}'.format(str(sys.version)))
working_set = pkg_resources.working_set
packages = set([p.project_name for p in working_set]) - set(['COMPAS'])
compas_pkgs = [p for p in packages if p.lower().startswith('compas')]
if compas_pkgs:
print('Installed COMPAS extensions: {}'.format([p for p in compas_pkgs]))
|
Python
| 0
|
@@ -1019,16 +1019,34 @@
pas.IPY
+or compas.WINDOWS
else ' %E2%96%8C
|
ee9646c5e71dcbaf776d9f9f929dead5e5c1fa82
|
Revert "cookie.value() didn't really need to be a string, since QSettings will take a QVariant anyways."
|
python/pyphantomjs/cookiejar.py
|
python/pyphantomjs/cookiejar.py
|
'''
This file is part of the PyPhantomJS project.
Copyright (C) 2011 James Roe <roejames12@hotmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from PyQt4.QtCore import QSettings
from PyQt4.QtNetwork import QNetworkCookie, QNetworkCookieJar
class CookieJar(QNetworkCookieJar):
def __init__(self, parent, cookiesFile):
super(CookieJar, self).__init__(parent)
self.m_cookiesFile = cookiesFile
def setCookiesFromUrl(self, cookieList, url):
settings = QSettings(self.m_cookiesFile, QSettings.IniFormat)
settings.beginGroup(url.host())
for cookie in cookieList:
settings.setValue(str(cookie.name()), cookie.value())
settings.sync()
return True
def cookiesForUrl(self, url):
settings = QSettings(self.m_cookiesFile, QSettings.IniFormat)
cookieList = []
settings.beginGroup(url.host())
for cname in settings.childKeys():
cookieList.append(QNetworkCookie(cname, settings.value(cname)))
return cookieList
|
Python
| 0
|
@@ -1255,16 +1255,20 @@
ame()),
+str(
cookie.v
@@ -1274,16 +1274,17 @@
value())
+)
%0A%0A
|
1c8bd21fe895260254684d3e2b2f9f5b70fdb91f
|
Fix error msg
|
python/smurff/smurff/prepare.py
|
python/smurff/smurff/prepare.py
|
import numpy as np
import scipy as sp
import pandas as pd
import scipy.sparse
import numbers
from .helper import SparseTensor
def make_train_test(Y, ntest):
"""Splits a sparse matrix Y into a train and a test matrix.
Y scipy sparse matrix (coo_matrix, csr_matrix or csc_matrix)
ntest either a float below 1.0 or integer.
if float, then indicates the ratio of test cells
if integer, then indicates the number of test cells
returns Ytrain, Ytest (type coo_matrix)
"""
if type(Y) not in [sp.sparse.coo.coo_matrix, sp.sparse.csr.csr_matrix, sp.sparse.csc.csc_matrix]:
raise TypeError("Unsupported Y type: %s" + type(Y))
if not isinstance(ntest, numbers.Real) or ntest < 0:
raise TypeError("ntest has to be a non-negative number (number or ratio of test samples).")
Y = Y.tocoo(copy = False)
if ntest < 1:
ntest = Y.nnz * ntest
ntest = int(round(ntest))
rperm = np.random.permutation(Y.nnz)
train = rperm[ntest:]
test = rperm[0:ntest]
Ytrain = sp.sparse.coo_matrix( (Y.data[train], (Y.row[train], Y.col[train])), shape=Y.shape )
Ytest = sp.sparse.coo_matrix( (Y.data[test], (Y.row[test], Y.col[test])), shape=Y.shape )
return Ytrain, Ytest
def make_train_test_df(Y, ntest, shape = None):
"""Splits rows of dataframe Y into a train and a test dataframe.
Y pandas dataframe
ntest either a float below 1.0 or integer.
if float, then indicates the ratio of test cells
if integer, then indicates the number of test cells
returns Ytrain, Ytest (type coo_matrix)
"""
if type(Y) != pd.core.frame.DataFrame:
raise TypeError("Y should be DataFrame.")
if not isinstance(ntest, numbers.Real) or ntest < 0:
raise TypeError("ntest has to be a non-negative number (number or ratio of test samples).")
## randomly spliting train-test
if ntest < 1:
ntest = Y.shape[0] * ntest
ntest = int(round(ntest))
rperm = np.random.permutation(Y.shape[0])
train = rperm[ntest:]
test = rperm[0:ntest]
Ytrain = SparseTensor(Y.iloc[train], shape)
Ytest = SparseTensor(Y.iloc[test], Ytrain.shape)
return Ytrain, Ytest
|
Python
| 0.000023
|
@@ -677,22 +677,24 @@
pe:
-%25s
%22 +
+str(
type(Y))
%0A
@@ -689,16 +689,17 @@
type(Y))
+)
%0A if
|
4f92ec41493ab115d11b4068c175b98b7afac22f
|
Use the DESCRIPTOR.full_name instead of __class__.__name__ for ProtobufProcess
|
src/compactor/process.py
|
src/compactor/process.py
|
import threading
from .context import Context
from .pid import PID
class Process(object):
ROUTE_ATTRIBUTE = '__route__'
INSTALL_ATTRIBUTE = '__mailbox__'
class Error(Exception): pass
class UnboundProcess(Error): pass
@classmethod
def route(cls, path):
if not path.startswith('/'):
raise ValueError('Routes must start with "/"')
def wrap(fn):
setattr(fn, cls.ROUTE_ATTRIBUTE, path)
return fn
return wrap
# We'll probably need to make route and install opaque, and just have them delegate to
# some argument container that can then be introspected by the process implementations.
@classmethod
def install(cls, mbox):
def wrap(fn):
setattr(fn, cls.INSTALL_ATTRIBUTE, mbox)
return fn
return wrap
def __init__(self, name):
self.name = name
self._delegates = {}
self._http_handlers = {}
self._message_handlers = {}
self._context = None
def initialize(self):
self._http_handlers.update(self.iter_routes())
self._message_handlers.update(self.iter_handlers())
def _assert_bound(self):
if not self._context:
raise self.UnboundProcess('Cannot get pid of unbound process.')
def bind(self, context):
if not isinstance(context, Context):
raise TypeError('Can only bind to a Context, got %s' % type(context))
self._context = context
@property
def pid(self):
self._assert_bound()
return PID(self._context.ip, self._context.port, self.name)
@property
def route_paths(self):
return self._http_handlers.keys()
@property
def message_names(self):
return self._message_handlers.keys()
def iter_callables(self):
for attribute_name in dir(self):
attribute = getattr(self, attribute_name)
if not callable(attribute):
continue
yield attribute
def iter_routes(self):
for function in self.iter_callables():
if hasattr(function, self.ROUTE_ATTRIBUTE):
yield getattr(function, self.ROUTE_ATTRIBUTE), function
def iter_handlers(self):
for function in self.iter_callables():
if hasattr(function, self.INSTALL_ATTRIBUTE):
yield getattr(function, self.INSTALL_ATTRIBUTE), function
def delegate(self, name, pid):
self._delegates[name] = pid
def handle_message(self, name, from_pid, body):
if name in self._message_handlers:
self._message_handlers[name](from_pid, body)
elif name in self._delegates:
to = self._delegates[name]
self._context.transport(to, name, body, from_pid)
def handle_http(self, route, handler, *args, **kw):
return self._http_handlers[route](handler, *args, **kw)
def exited(self, pid):
pass
def lost(self, pid):
pass
def send(self, to, method, body=None):
self._assert_bound()
self._context.send(self.pid, to, method, body)
def link(self, to):
self._assert_bound()
self._context.link(self.pid, to)
def terminate(self):
self._assert_bound()
self._context.terminate(self.pid)
class ProtobufProcess(Process):
MESSAGE_TYPE_ATTRIBUTE = '__pb_msgtype__'
@classmethod
def install(cls, message_type, endpoint=None):
endpoint = endpoint or message_type.__class__.__name__
def wrap(fn):
setattr(fn, cls.MESSAGE_TYPE_ATTRIBUTE, message_type)
return Process.install(endpoint)(fn)
return wrap
def send(self, to, message, method_name=None):
super(ProtobufProcess, self).send(
to, method_name or message.__class__.__name__, message.SerializeToString())
def handle_message(self, name, from_pid, body):
handler = self._message_handlers[name]
message_type = getattr(handler, self.MESSAGE_TYPE_ATTRIBUTE)
message = message_type()
message.MergeFromString(body)
super(ProtobufProcess, self).handle_message(name, from_pid, message)
"""
class QueueProcess(Process):
def __init__(self, **kw):
...
super(QueueProcess, self).__init__('queue', **kw)
@install('enqueue')
def enqueue(self, body):
pass
@install('dequeue')
def dequeue(self):
pass
class ExecutorProcess(ProtobufProcess):
def __init__(self, slave_pid, driver, executor):
self.slave_pid = slave_pid
self.driver = driver
self.executor = executor
super(SlaveProcess, self).__init__('slave')
def initialize(self):
regiser_executor_message = RegisterExecutorMessage(framework_id, executor_id)
self.send(
self.slave_pid,
register_executor_message,
method_name='mesos.internal.RegisterExecutorMessage')
@install(ExecutorRegisteredMessage, endpoint='mesos.internal.ExecutorRegisteredMessage')
def registered(self, message):
executor_info, framework_id, framework_info, slave_id, slave_info = (
message.executor_info, message.framework_id, message.framework_info, message.slave_id,
message.slave_info)
# stuff
@route('/vars.json')
def vars(self, handler):
handler.write(json.dumps(self._vars))
@route('/expensive')
@asynchronous
def vars(self, handler):
result1 = yield self.some_expensive_op1()
result2 = yield self.some_expensive_op2(result1)
handler.write(result2.serialize())
@route('/redirect_me')
def redirector(self, handler):
handler.redirect('some_other_url')
slave = PID.from_string(sys.argv[1])
executor_process = ExecutorProcess(slave, driver, executor)
context.spawn(executor_process)
"""
|
Python
| 0.000068
|
@@ -3172,34 +3172,36 @@
ge_type.
-__class__._
+DESCRIPTOR.full
_name
-__
%0A def
@@ -3458,26 +3458,28 @@
age.
-__class__._
+DESCRIPTOR.full
_name
-__
, me
|
b85787a27eac97ed6c9617203aab0e0a62a4960a
|
Add file checkk
|
camgrab.py
|
camgrab.py
|
#!/usr/bin/python
# The MIT License (MIT)
#
# Copyright (c) 2014 Corrado Ubezio
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Capturing a single image from webcam
In Linux there are the following methods:
METHOD 1: RTSP protocol
avconv -i rtsp://<user>:<pass>@<local_ip>:<port>/video.mjpg -vframes 1 -r 1 -s 640x480 image.jpg
METHOD 2: HTTP protocol
avconv -i http://<user>:<pass>@<local_ip>:<port>/video.mjpg -vframes 1 -r 1 -s 640x480 image.jpg
METHOD 3: If the camera is smart enough, it is possible to send an http request to take a snapshot
wget --tries=2 --timeout=10 http://<user>:<pass>@<local_ip>:<port>/cgi-bin/jpg/image -O snapshot.jpg
See also: Link: http://stackoverflow.com/a/11094891
"""
from cv2 import *
from os import remove, stat
from os.path import isfile
import requests
def imageCaptureFromIP(cameraUrl, username, password, imageFileName):
# See: http://stackoverflow.com/a/13137873
try:
r = requests.get(cameraUrl, auth=(username, password), timeout=10, stream=True)
except Exception:
# TODO: better to handle exceptions as in:
# http://docs.python-requests.org/en/latest/user/quickstart/#errors-and-exceptions
return False
if r.status_code != 200:
return False
with open(imageFileName, 'wb') as f:
for chunk in r.iter_content(1024):
f.write(chunk)
if not isfile(imageFileName):
return False
statinfo = stat(imageFileName)
if statinfo.st_size == 0:
remove(imageFileName)
return False
return True
def imageCaptureFromUSB(cameraNumber, imageFileName):
# initialize the camera
cam = VideoCapture(cameraNumber)
s, img = cam.read()
if not s:
# frame captured returns errors
return False
imwrite(imageFileName, img) #save JPG image
return True
def imageCapture(cameraDesc, imageFileName):
camProtAndAddr = cameraDesc['source'].split('://')
if camProtAndAddr[0] == 'usb':
s = imageCaptureFromUSB(eval(camProtAndAddr[1]), imageFileName)
elif camProtAndAddr[0] == 'http':
s = imageCaptureFromIP(cameraDesc['source'],
cameraDesc['optional-auth']['user-name'],
cameraDesc['optional-auth']['password'],
imageFileName)
else:
s = False
return s
if __name__ == "__main__":
from camshotcfg import ConfigDataLoad
from datetime import datetime
cfg = ConfigDataLoad('camshotcfg.json')
# Make the grabbed picture file path
now = datetime.now()
picturesDirName = '{0:s}/CAMSHOT_{1:%Y%m%d}'.format(cfg.data['camshot-datastore'], now)
cameraIndex = 0
for camera in cfg.data['cameras-list']:
print 'Get image from', camera['source']
pictureFileFullName = '{0:s}/CS{1:%Y%m%d%H%M}_{2:02d}.jpg'.format(picturesDirName, now, cameraIndex)
imageCapture(camera, pictureFileFullName)
cameraIndex = cameraIndex + 1
|
Python
| 0.000001
|
@@ -3461,16 +3461,51 @@
atetime%0A
+ from os import makedirs, path%0A%0A
cfg
@@ -3540,16 +3540,16 @@
.json')%0A
-
%0A # M
@@ -3698,16 +3698,343 @@
'%5D, now)
+%0A%0A try:%0A makedirs(picturesDirName)%0A except OSError, e:%0A if not path.isdir(picturesDirName):%0A # If the directory doesn't already exist, there was an error on creation%0A print %22%7B0%7D: create directory %7B1%7D %5BOS errno %7B2%7D%5D: %7B3%7D%22.format(MAIN_SCRIPT_NAME, picturesDirName, e.errno, e.strerror)%0A%0A
%0A %0A c
@@ -4263,49 +4263,144 @@
-imageCapture(camera, pictureFileFullName)
+print 'Save in',pictureFileFullName %0A s = imageCapture(camera, pictureFileFullName)%0A if not s:%0A print '...Fail'
%0A
|
bca7388a1186d2557def93ff016f1a6ab82b1d7c
|
Fix another moved module
|
carebot.py
|
carebot.py
|
# from analytics import Analytics
import datetime
from models import Story
import re
from slackbot.bot import Bot
from slackbot.bot import respond_to
from slackbot.bot import listen_to
# Set up analytics to handle inquiries
# a = Analytics()
LINGER_RATE_REGEX = '[Ww]hat is the linger rate on ((\w*-*)+)?'
START_TRACKING_REGEX = '[Tt]rack ((\w*-*)+)'
"""
Given a message, figure out what is being requested.def
"""
def parse_message(text):
m = re.search('[Hh]ow many people donated on (\w*)?', text)
if m:
return 'donation'
m = re.search(LINGER_RATE_REGEX, text)
if m:
return 'linger'
m = re.search(START_TRACKING_REGEX, text)
if m:
return 'track'
return False
"""
Get information on how many people clicked the donate button on a story
"""
def get_donation_data(message, category):
data = a.donation_data(category)
if data.get('rows', []):
row = data.get('rows')[0]
print(row[0])
return row[0]
else:
return False
def handle_donation_question(message):
m = re.search('[Hh]ow many people donated on (\w*)?', message.body['text'])
if not m:
return False
category = m.group(1)
if category:
count = get_donation_data(message, category)
if count:
message.reply(u"I don't know exactly how many people donated yet, but there were %s donation events on %s." % (count, category))
return True
else:
message.reply("I wasn't able to figure out how many people donated on %s" % category)
return False
else:
return False
def handle_linger_question(message):
m = re.search(LINGER_RATE_REGEX, message.body['text'])
if not m:
return False
slug = m.group(1)
if slug:
rate = a.get_linger_rate(slug)
if rate:
message.reply(u"%s people spent an average of %s minutes and %s seconds on %s." % (rate[0], rate[1], rate[2], slug))
return True
else:
message.reply("I wasn't able to figure out the linger rate of %s" % slug)
return False
else:
return False
def start_tracking(message):
m = re.search(START_TRACKING_REGEX, message.body['text'])
if not m:
return False
slug = m.group(1)
if slug:
# Check if the slug is in the database.
try:
story = Story.select().where(Story.slug == slug).get()
message.reply("Thanks! I'm already tracking %s, so you should start seeing results within a couple hours." % slug)
except Story.DoesNotExist:
# If it's not in the database, start tracking it.
story = Story.create(slug=slug, tracking_started=datetime.datetime.now())
story.save()
message.reply("Ok, I've started tracking %s. The first stats will come in about 4 hours." % slug)
else:
message.reply("Sorry, I wasn't able to start tracking %s right now." % slug)
return True
# We start out responding to everything -- there doesn't seem to be a way for a
# more specific regex to take precedence over the generic case.
@respond_to('.*', re.IGNORECASE)
def response_dispatcher(message, text=None):
if not text:
text = message.body['text']
message_type = parse_message(text);
print("Got message", text, message_type)
if message_type == 'track':
start_tracking(message)
elif message_type == 'donation':
handle_donation_question(message)
elif message_type == 'help':
pass
elif message_type == 'linger':
print("handling linger")
handle_linger_question(message)
else:
message.reply("Hi! I got your message, but I don't know enough yet to respond to it.")
# Listen passively for any mention of Carebot: at the beginning of the line
@listen_to('^@*[Cc]arebot[:|,]\s*(.*)', re.IGNORECASE)
def reply(message, text):
response_dispatcher(message, text)
def main():
bot = Bot()
# bot._client.rtm_send_message('secret-carebot-test', 'hi there')
bot.run()
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -48,16 +48,21 @@
me%0Afrom
+util.
models i
|
980b3eded1e06c8f152b873531273c1b0154a755
|
Update Visualization-commandCenter.py
|
dataCenter/Visualization-commandCenter.py
|
dataCenter/Visualization-commandCenter.py
|
import matplotlib.pyplot as plt; plt.rcdefaults()
import numpy as np
import matplotlib.pyplot as plt
import pickle
with open('firefox-bot/config/iframe.txt', 'r') as loginInfo:
newName = loginInfo.readline()
newName = newName.rstrip()
def load_obj(name):
with open(name + '.pkl', 'rb') as f:
return pickle.load(f)
stats = load_obj('firefox-bot/statistics/' + newName')
print(stats)
d = stats['draws']
comItems = ('skill', 'super')
y_pos = np.arange(len(comItems))
width=(1/5)
for index, item in enumerate(comItems):
plt.bar(index, stats[item], width, label=item + ' ' + str(round((stats[item]/d)*100, 3)) + '%')
#' frequency: 1 / ' + str(round(spins/stats[item])))
if(stats[item]):
print(item, '1 out of ', round(d/stats[item]), ' draws')
plt.legend(loc='best')
plt.xticks(y_pos, comItems)
plt.ylabel('total collected')
plt.xlabel('items')
plt.title('totalDraws: ' + str(int(d)))
plt.show()
|
Python
| 0
|
@@ -243,21 +243,16 @@
rstrip()
-%0D%0A
%0D%0Adef l
@@ -392,17 +392,16 @@
newName
-'
)%0D%0Aprint
|
18be6e0d3ee656f150e54bc0abe3959d92e2b35c
|
add message for script completion to dashboard
|
cea/api.py
|
cea/api.py
|
"""
Provide access to the scripts exported by the City Energy Analyst.
"""
from __future__ import print_function
def register_scripts():
import cea.config
import cea.scripts
import importlib
config = cea.config.Configuration()
def script_wrapper(cea_script):
module_path = cea_script.module
script_module = importlib.import_module(module_path)
def script_runner(config=config, **kwargs):
option_list = cea_script.parameters
config.restrict_to(option_list)
for section, parameter in config.matching_parameters(option_list):
parameter_py_name = parameter.name.replace('-', '_')
if parameter_py_name in kwargs:
parameter.set(kwargs[parameter_py_name])
# run the script
cea_script.print_script_configuration(config)
script_module.main(config)
if script_module.__doc__:
script_runner.__doc__ = script_module.__doc__.strip()
else:
script_runner.__doc__ = 'FIXME: Add API documentation to {}'.format(module_path)
return script_runner
for cea_script in sorted(cea.scripts.list_scripts()):
script_py_name = cea_script.name.replace('-', '_')
globals()[script_py_name] = script_wrapper(cea_script)
register_scripts()
if __name__ == '__main__':
print(demand.__doc__)
|
Python
| 0
|
@@ -107,16 +107,31 @@
unction%0A
+import datetime
%0A%0Adef re
@@ -898,32 +898,298 @@
-script_module.main(confi
+t0 = datetime.datetime.now()%0A script_module.main(config)%0A%0A # print success message%0A msg = %22Script completed. Execution time: %25.2fs%22 %25 (datetime.datetime.now() - t0).total_seconds()%0A print(%22%22)%0A print(%22-%22 * len(msg))%0A print(ms
g)%0A
|
f4c6a4f9ca1ce27e122e9c277706a7bd41ade6c5
|
Fix OSS build problems after cl/483572970
|
testing/build_defs.bzl
|
testing/build_defs.bzl
|
# Copyright 2022 The Centipede Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains rules that build a fuzz target with sanitizer coverage.
https://clang.llvm.org/docs/SanitizerCoverage.html
To instrument a target with sancov, we apply a bazel transition
https://bazel.build/rules/lib/transition
to change its configuration (i.e., add the necessary compilation flags). The configuration will
affect all its transitive dependencies as well.
"""
# Change the flags from the default ones to sancov:
# https://clang.llvm.org/docs/SanitizerCoverage.html.
def _sancov_transition_impl(settings, attr):
features_to_strip = ["asan", "tsan", "msan"]
filtered_features = [
x
for x in settings["//command_line_option:features"]
if x not in features_to_strip
]
# some of the valid sancov flag combinations:
# trace-pc-guard,pc-table
# trace-pc-guard,pc-table,trace-cmp
# trace-pc-guard,pc-table,trace-loads
sancov = "-fsanitize-coverage=" + attr.sancov
return {
"//command_line_option:copt": settings["//command_line_option:copt"] + [
"-O1",
"-fno-builtin", # prevent memcmp & co from inlining.
sancov,
"-gline-tables-only", # debug info, for coverage reporting tools.
# https://llvm.org/docs/LibFuzzer.html#fuzzer-friendly-build-mode
"-DFUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION",
],
"//command_line_option:compilation_mode": "opt",
"//command_line_option:strip": "never", # preserve debug info.
"//command_line_option:features": filtered_features,
"//command_line_option:compiler": None,
"//command_line_option:dynamic_mode": "off",
}
sancov_transition = transition(
implementation = _sancov_transition_impl,
inputs = [
"//command_line_option:copt",
"//command_line_option:features",
],
outputs = [
"//command_line_option:copt",
"//command_line_option:compilation_mode",
"//command_line_option:strip",
"//command_line_option:features",
"//command_line_option:compiler",
"//command_line_option:dynamic_mode",
],
)
def __sancov_fuzz_target_impl(ctx):
# We need to copy the executable because starlark doesn't allow
# providing an executable not created by the rule
executable_src = ctx.executable.fuzz_target
executable_dst = ctx.actions.declare_file(ctx.label.name)
ctx.actions.run_shell(
inputs = [executable_src],
outputs = [executable_dst],
command = "cp %s %s" % (executable_src.path, executable_dst.path),
)
# We need to explicitly collect the runfiles from all relevant attributes.
# See https://docs.bazel.build/versions/main/skylark/rules.html#runfiles
runfiles = ctx.runfiles()
# The transition transforms scalar attributes into lists,
# so we need to index into the list first.
fuzz_target = ctx.attr.fuzz_target[0]
runfiles = runfiles.merge(fuzz_target[DefaultInfo].default_runfiles)
return [DefaultInfo(runfiles = runfiles, executable = executable_dst)]
# Wrapper to build a fuzz target with sanitizer coverage.
# By default it uses some pre-defined set of sancov instrumentations.
# It can be overridden with more advanced ones, see _sancov_transition_impl.
__sancov_fuzz_target = rule(
implementation = __sancov_fuzz_target_impl,
attrs = {
"fuzz_target": attr.label(
cfg = sancov_transition,
executable = True,
mandatory = True,
),
"_allowlist_function_transition": attr.label(
default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
),
"sancov": attr.string(),
},
executable = True,
)
def centipede_fuzz_target(
name,
fuzz_target = None,
srcs = None,
# TODO(navidem): edit --config=centipede too.
sancov = "trace-pc-guard,pc-table,trace-loads,trace-cmp,control-flow",
deps = []):
"""Generates a fuzz target target instrumented with sancov.
Args:
name: A unique name for this target
srcs: Test source(s); the default is [`name` + ".cc"]; mutually exclusive
with `fuzz_target`
fuzz_target: A fuzz target to wrap into sancov; by default, a new target
named "_" + `name`, compiled from provided or default `srcs`, will be
created
sancov: The sancov instrumentations to use, eg. "trace-pc-guard,pc-table";
see https://clang.llvm.org/docs/SanitizerCoverage.html%29-instrumented
deps: Dependency for srcs
"""
if not fuzz_target:
# Our own intermediate fuzz target rule.
fuzz_target = "_" + name
# A dummy binary that is going to be wrapped by sancov.
# __sancov_fuzz_target() below uses the dependencies here
# to rebuild an instrumented binary using transition.
native.cc_binary(
name = fuzz_target,
srcs = srcs or [name + ".cc"],
deps = deps + ["@centipede//:centipede_runner"],
linkopts = [
"-ldl",
"-lrt",
"-lpthread"
],
testonly = True,
)
elif srcs:
fail("`srcs` are mutually exclusive with `fuzz_target`")
# Bazel transition to build with the right sancov flags.
__sancov_fuzz_target(
name = name,
fuzz_target = fuzz_target,
sancov = sancov,
testonly = True,
)
|
Python
| 0.000002
|
@@ -4510,21 +4510,8 @@
-cmp
-,control-flow
%22,%0A
|
ef5c049a4c32e69c9ce88c958ae8272bdfddeba4
|
Add area info in check price result
|
check_price.py
|
check_price.py
|
# -*- coding:utf-8 -*-
import pymysql
import pymysql.cursors
from prettytable import PrettyTable
from colorama import init, Fore
database_name = "house_price_04"
# 打开数据库连接
db=pymysql.connect("localhost","root","aB123456",database_name,charset='utf8mb4')
# 使用cursor()方法获取操作游标
cursor=db.cursor()
#输入要查询的小区名称
check_name= input("请输入小区名称:");
#用于存储查询到包含关键字的小区信息
data=[]
def main():
header = 'id 小区名称 价格 在售'.split()
pt = PrettyTable()
pt._set_field_names(header)
#获取所有table
tables=show_tables()
for table in tables:
select_info(table)
for row in data:
row_list=list(row)
new_row=[
row[0],
Fore.GREEN + row[1] + Fore.RESET,
Fore.RED + str(row[2]) + Fore.RESET,
row[3],
]
pt.add_row(new_row)
print(pt)
def show_tables():
sql="show tables;"
try:
cursor.execute(sql)
tables=cursor.fetchall()
except:
print ("Error: unable to fetch data")
return tables
def select_info(table):
sql = "SELECT * FROM %s;" % table
try:
# 执行SQL语句
cursor.execute(sql)
# 获取所有记录列表
results = cursor.fetchall()
for row in results:
name=row[1]
if(check_name in name):
data.append(row)
except:
print ("Error: unable to fetch data")
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -121,16 +121,27 @@
it, Fore
+%0Aimport pdb
%0A%0Adataba
@@ -312,16 +312,60 @@
%E6%9F%A5%E8%AF%A2%E7%9A%84%E5%B0%8F%E5%8C%BA%E5%90%8D%E7%A7%B0%0A
+%0Adata=%5B%5D%0A%0Adef main():%0A%0A%09global check_name %0A%09
check_na
@@ -387,16 +387,17 @@
%E5%8C%BA%E5%90%8D%E7%A7%B0%EF%BC%9A%22);%0A
+%09
#%E7%94%A8%E4%BA%8E%E5%AD%98%E5%82%A8%E6%9F%A5%E8%AF%A2%E5%88%B0
@@ -406,37 +406,16 @@
%E5%85%B3%E9%94%AE%E5%AD%97%E7%9A%84%E5%B0%8F%E5%8C%BA%E4%BF%A1%E6%81%AF
-%0Adata=%5B%5D%0A%0Adef main():
%0A%0A%09heade
@@ -419,16 +419,19 @@
ader = '
+%E5%9C%B0%E5%8C%BA
id %E5%B0%8F%E5%8C%BA%E5%90%8D%E7%A7%B0
@@ -573,16 +573,18 @@
table)%0A%0A
+%0A%0A
%09for row
@@ -595,16 +595,18 @@
data:%0A%09%09
+#
row_list
@@ -636,13 +636,51 @@
%09%09%09%09
-row%5B0
+Fore.GREEN + row%5B0%5D + Fore.RESET,%0A%09%09%09%09row%5B1
%5D,%0A%09
@@ -699,17 +699,17 @@
N + row%5B
-1
+2
%5D + Fore
@@ -739,17 +739,17 @@
str(row%5B
-2
+3
%5D) + For
@@ -765,17 +765,17 @@
%09%09%09%09row%5B
-3
+4
%5D,%0A%09%09%5D%0A%09
@@ -942,24 +942,30 @@
le to fetch
+table
data%22)%0A%09retu
@@ -1199,16 +1199,102 @@
name):%0A
+%09 %09%09%09area= table%5B0%5D%0A%09 %09%09%09rowList= list(row)%0A%09 %09%09%09rowList.insert(0,area)%0A%09 %09%09%09%0A
%09 %09%09%09d
@@ -1311,13 +1311,14 @@
(row
-)%09
+List)%0A
%0A%0A%09e
@@ -1353,21 +1353,18 @@
able to
-fetch
+%E5%B0%8F%E5%8C%BA
data%22)%0A
|
864669eb606f0831c6503894c87c62ea3841654e
|
fix for HUnion
|
hwt/hdl/types/utils.py
|
hwt/hdl/types/utils.py
|
from typing import Union, List
from hwt.hdl.types.array import HArray
from hwt.hdl.types.bits import Bits
from hwt.hdl.types.hdlType import HdlType
from hwt.hdl.types.stream import HStream
from hwt.hdl.types.struct import HStruct
from hwt.hdl.types.typeCast import toHVal
from hwt.hdl.types.union import HUnion
from hwt.hdl.value import Value
from hwt.synthesizer.rtlLevel.mainBases import RtlSignalBase
def walkFlattenFields(sigOrVal: Union[RtlSignalBase, Value], skipPadding=True):
"""
Walk all simple values in HStruct or HArray
"""
t = sigOrVal._dtype
if isinstance(t, Bits):
yield sigOrVal
elif isinstance(t, HUnion):
yield from walkFlattenFields(sigOrVal._val, skipPadding=skipPadding)
elif isinstance(t, HStruct):
for f in t.fields:
isPadding = f.name is None
if not isPadding or not skipPadding:
if isPadding:
v = f.dtype.from_py(None)
else:
v = getattr(sigOrVal, f.name)
yield from walkFlattenFields(v)
elif isinstance(t, HArray):
for item in sigOrVal:
yield from walkFlattenFields(item)
elif isinstance(t, HStream):
assert isinstance(sigOrVal, Value), sigOrVal
for v in sigOrVal:
yield from walkFlattenFields(v)
else:
raise NotImplementedError(t)
def HdlValue_unpack(t: HdlType,
data: List[Union[Value, RtlSignalBase, int]],
getDataFn=None, dataWidth=None):
"""
Parse raw Bits array to a value of specified HdlType
"""
if getDataFn is None:
assert dataWidth is not None
def _getDataFn(x):
return toHVal(x)._auto_cast(Bits(dataWidth))
getDataFn = _getDataFn
val = t.from_py(None)
fData = iter(data)
# actual is storage variable for items from frameData
actualOffset = 0
actual = None
for v in walkFlattenFields(val, skipPadding=False):
# walk flatten fields and take values from fData and parse them to
# field
required = v._dtype.bit_length()
if actual is None:
actualOffset = 0
try:
actual = getDataFn(next(fData))
except StopIteration:
raise ValueError("Input data too short")
if dataWidth is None:
dataWidth = actual._dtype.bit_length()
actuallyHave = dataWidth
else:
actuallyHave = actual._dtype.bit_length() - actualOffset
while actuallyHave < required:
# collect data for this field
try:
d = getDataFn(next(fData))
except StopIteration:
raise ValueError("Input data too short")
actual = d._concat(actual)
actuallyHave += dataWidth
if actuallyHave >= required:
# parse value of actual to field
# skip padding
_v = actual[(required + actualOffset):actualOffset]
_v = _v._auto_cast(v._dtype)
v.val = _v.val
v.vld_mask = _v.vld_mask
# update slice out what was taken
actuallyHave -= required
actualOffset += required
if actuallyHave == 0:
actual = None
if actual is not None:
assert actual._dtype.bit_length(
) - actualOffset < dataWidth, (
"It should be just a padding at the end of frame"
)
return val
def is_only_padding(t: HdlType):
if isinstance(t, (HStruct, HUnion)):
for f in t.fields:
if f.name is not None and not is_only_padding(f.dtype):
return False
return True
elif isinstance(t, (HArray, HStream)):
return is_only_padding(t.element_t)
return False
|
Python
| 0.000005
|
@@ -3578,25 +3578,15 @@
(t,
-(
HStruct
-, HUnion)
):%0A
|
94dfdbeae55d4c47c7b1161c68795429ebc0687a
|
fix pprintInterface for unit with array intf
|
hwt/simulator/utils.py
|
hwt/simulator/utils.py
|
from random import Random
import sys
from hwt.serializer.serializerClases.indent import getIndent
from hwt.synthesizer.interfaceLevel.interfaceUtils.proxy import InterfaceProxy
def valueHasChanged(valA, valB):
return valA.val is not valB.val or valA.vldMask != valB.vldMask
def agent_randomize(agent, timeQuantum, seed):
random = Random(seed)
def randomEnProc(simulator):
# small space at start to modify agents when they are inactive
yield simulator.wait(timeQuantum / 4)
while True:
agent.enable = random.random() < 0.5
delay = int(random.random() * timeQuantum)
yield simulator.wait(delay)
return randomEnProc
def pprintInterface(intf, prefix="", indent=0, file=sys.stdout):
"""
Pretty print interface
"""
try:
s = intf._sig
except AttributeError:
s = ""
if s is not "":
s = repr(s)
file.write("".join([getIndent(indent), prefix, repr(intf._getFullName()), " ", s]))
file.write("\n")
for i in intf._interfaces:
if isinstance(intf, InterfaceProxy):
assert isinstance(i, InterfaceProxy), (intf, i)
pprintInterface(i, indent=indent + 1, file=file)
if intf._arrayElemCache:
assert len(intf) == len(intf._arrayElemCache)
for i, p in enumerate(intf):
pprintInterface(p, prefix="p%d:" % i, indent=indent + 1, file=file)
def _pprintAgents(intf, indent, prefix="", file=sys.stdout):
if intf._ag is not None:
file.write("%s%s%r\n" % (getIndent(indent), prefix, intf._ag))
for i in intf._interfaces:
_pprintAgents(i, indent + 1, file=file)
if intf._arrayElemCache:
assert len(intf) == len(intf._arrayElemCache)
for i, p in enumerate(intf):
_pprintAgents(p, indent + 1, prefix="p%d:" % i, file=file)
def pprintAgents(unitOrIntf, indent=0, file=sys.stdout):
"""
Pretty print agents
"""
prefix = unitOrIntf._name + ":"
for intf in unitOrIntf._interfaces:
_pprintAgents(intf, indent, prefix, file=file)
|
Python
| 0
|
@@ -171,16 +171,83 @@
ceProxy%0A
+from hwt.synthesizer.interfaceLevel.mainBases import InterfaceBase%0A
%0A%0Adef va
@@ -970,16 +970,22 @@
s =
+ %22 %22 +
repr(s)
@@ -1067,13 +1067,8 @@
()),
- %22 %22,
s%5D)
@@ -1491,17 +1491,16 @@
)%0A%0A%0Adef
-_
pprintAg
@@ -1496,33 +1496,39 @@
ef pprintAgents(
-i
+unitOrI
ntf, indent, pre
@@ -1514,32 +1514,34 @@
itOrIntf, indent
+=0
, prefix=%22%22, fil
@@ -1567,100 +1567,383 @@
if i
-ntf._ag is not None:%0A file.write(%22%25s%25s%25r%5Cn%22 %25 (getIndent(indent), prefix, intf._ag
+sinstance(unitOrIntf, InterfaceBase):%0A ag = unitOrIntf._ag%0A arrayElemCache = unitOrIntf._arrayElemCache%0A else:%0A ag = None%0A arrayElemCache = None%0A%0A if ag is not None:%0A file.write(%22%25s%25s%25r%5Cn%22 %25 (getIndent(indent), prefix, ag))%0A elif arrayElemCache:%0A file.write(%22%25s%25s%5Cn%22 %25 (getIndent(indent), prefix + unitOrIntf._name + %22:%22
))%0A
+%0A
@@ -1943,33 +1943,39 @@
)%0A%0A for i in
-i
+unitOrI
ntf._interfaces:
@@ -1975,33 +1975,32 @@
rfaces:%0A
-_
pprintAgents(i,
@@ -2022,38 +2022,32 @@
e=file)%0A%0A if
-intf._
arrayElemCache:%0A
@@ -2057,33 +2057,39 @@
assert len(
-i
+unitOrI
ntf) == len(intf
@@ -2076,38 +2076,32 @@
tOrIntf) == len(
-intf._
arrayElemCache)%0A
@@ -2122,33 +2122,39 @@
p in enumerate(
-i
+unitOrI
ntf):%0A
@@ -2147,33 +2147,32 @@
f):%0A
-_
pprintAgents(p,
@@ -2217,234 +2217,4 @@
le)%0A
-%0A%0Adef pprintAgents(unitOrIntf, indent=0, file=sys.stdout):%0A %22%22%22%0A Pretty print agents%0A %22%22%22%0A prefix = unitOrIntf._name + %22:%22%0A for intf in unitOrIntf._interfaces:%0A _pprintAgents(intf, indent, prefix, file=file)%0A
|
210c5e14084224f936aeb12652dd2772efd75a08
|
Remove a test logging statement.
|
heat-translator/tosca/elements/nodetype.py
|
heat-translator/tosca/elements/nodetype.py
|
from capabilitytype import CapabilityTypeDef
from interfacestype import InterfacesTypeDef
from tosca.log.toscalog import logger
import os
from properties import PropertyDef
import relationshiptype
from relationshiptype import RelationshipType
from statefulentitytype import StatefulEntityType
from yaml_parser import Parser
nodetype_def_file = (os.path.dirname(os.path.abspath(__file__))
+ os.sep + 'defs' + os.sep + "nodetypesdef.yaml")
nodetype_def = Parser(nodetype_def_file).load()
SECTIONS = (DERIVED_FROM, PROPERTIES, REQUIREMENTS,
INTERFACES, CAPABILITIES) = \
('derived_from', 'properties', 'requirements', 'interfaces',
'capabilities')
class NodeTypes(object):
'''Tosca built-in node types'''
def __init__(self):
logger.info('adsf')
self.defs = nodetype_def
def __contains__(self, key):
return key in self.defs
def __iter__(self):
return iter(self.defs)
def __len__(self):
return len(self.defs)
def __getitem__(self, key):
'''Get a section.'''
return self.defs[key]
class NodeType(StatefulEntityType):
'''Tosca built-in node type'''
def __init__(self, type):
super(NodeType, self).__init__()
self.type = type
self.defs = NodeTypes()[type]
self.related = {}
def _derivedfrom(self):
return self._get_value(DERIVED_FROM)
def derivedfrom(self):
if self._derivedfrom():
return NodeType(self._get_value(DERIVED_FROM))
def properties(self):
'''returns a list of property objects '''
properties = []
props = self._get_value(PROPERTIES)
if props:
for prop in props:
properties.append(PropertyDef(prop, self.type))
return properties
def relationship(self):
'''returns a dictionary containing relationship to a particular
node type '''
relationship = {}
requirs = self.requirements()
if requirs is None:
requirs = self._get_value(REQUIREMENTS, True)
if requirs:
for req in requirs:
for x, y in req.iteritems():
relation = self.get_relation(x, y)
rtype = RelationshipType(relation)
relatednode = self.ntype(x, y)
relationship[rtype] = relatednode
return relationship
@classmethod
def ntype(cls, key, ndtype):
return cls(ndtype)
def capabilities(self):
'''returns a list of capability objects '''
capabilities = []
self.prop_val = None
caps = self._get_value(CAPABILITIES)
if caps is None:
caps = self._get_value(CAPABILITIES, True)
if caps:
for name, value in caps.iteritems():
for x, y in value.iteritems():
if x == 'type':
self.__set_cap_type(y)
if x == 'properties':
self.__set_prop_type(y)
cap = CapabilityTypeDef(name, self.type_val,
self.type, self.prop_val)
capabilities.append(cap)
else:
logger.info('%s does not provide capabilities. ' % self.type)
return capabilities
def requirements(self):
return self._get_value(REQUIREMENTS)
def has_relationship(self):
return self.relationship()
@classmethod
def get_relation(cls, key, ndtype):
relation = None
ntype = cls(ndtype)
cap = ntype.capabilities()
for c in cap:
if c.name == key:
rtypedef = relationshiptype.relationship_def
for relationship, properties in rtypedef.iteritems():
for y in properties.itervalues():
if c.type in y:
relation = relationship
break
if relation:
break
return relation
def interfaces(self):
return self._get_value(INTERFACES)
def lifecycle_inputs(self):
inputs = []
interfaces = self.interfaces()
if interfaces:
for name, value in interfaces.iteritems():
if name == 'tosca.interfaces.node.Lifecycle':
for x, y in value.iteritems():
if x == 'inputs':
for i in y.iterkeys():
inputs.append(i)
else:
logger.info('%s does not have life cycle input. ' % self.type)
return inputs
def lifecycle_operations(self):
'''return available life cycle operations if found, None otherwise.'''
ops = None
interfaces = self.interfaces()
if interfaces:
i = InterfacesTypeDef(self.type, 'tosca.interfaces.node.Lifecycle')
ops = i.lifecycle_ops()
else:
logger.info('%s does not have life cycle operation. ' % self.type)
return ops
def __set_cap_type(self, value):
self.type_val = value
def __set_prop_type(self, value):
self.prop_val = value
def get_capability(self, name):
for key, value in self.capabilities():
if key == name:
return value
def get_capability_type(self, name):
for key, value in self.get_capability(name):
if key == type:
return value
def _get_value(self, ndtype, parent=None):
value = None
if ndtype in self.defs:
value = self.defs[ndtype]
if parent and not value:
p = self.derivedfrom()
while value is None:
#check parent node
if not p:
break
if p and p.type == 'tosca.nodes.Root':
break
value = p._get_value(ndtype)
p = p.derivedfrom()
return value
def add_next(self, nodetpl, relationship):
self.related[nodetpl] = relationship
def get_relatednodes(self):
return self.related.keys()
def get_type(self):
return self.type
def get_relationship(self, nodetpl):
if nodetpl in self.related:
return self.related[nodetpl]
|
Python
| 0.000001
|
@@ -787,36 +787,8 @@
f):%0A
- logger.info('adsf')%0A
|
222e2bf4728440fdff2675756b4aa08aba4585fb
|
Update __init__.py
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask, render_template
from flask.ext.mail import Mail
from flask.ext.login import LoginManager
from flask.ext.moment import Moment
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.pagedown import PageDown
from flask.ext.flatpages import FlatPages
from config import config
mail = Mail()
moment = Moment()
pagedown = PageDown()
pages = FlatPages()
db = SQLAlchemy()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
if not app.debug and not app.testing and not app.config['SSL_DISABLE']:
from flask.ext.sslify import SSLify
sslify = SSLify(app)
mail.init_app(app)
moment.init_app(app)
pagedown.init_app(app)
pages.init_app(app)
db.init_app(app)
login_manager.init_app(app)
from main import main as main_blueprint
from .auth import auth as auth_blueprint
from .api_1_0 import api as api_1_0_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
app.register_blueprint(main_blueprint)
app.register_blueprint(api_1_0_blueprint, url_prefix='/api/v1.0')
return app
|
Python
| 0.000072
|
@@ -296,16 +296,42 @@
config%0A%0A
+from .util import assets%0A%0A
mail = M
|
690696493f110899282ad22f9b02d3d0fd91fe31
|
Rewrite wirecloud.catalogue.admin module
|
src/wirecloud/catalogue/admin.py
|
src/wirecloud/catalogue/admin.py
|
# -*- coding: utf-8 -*-
#...............................licence...........................................
#
# (C) Copyright 2008 Telefonica Investigacion y Desarrollo
# S.A.Unipersonal (Telefonica I+D)
#
# This file is part of Morfeo EzWeb Platform.
#
# Morfeo EzWeb Platform is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Morfeo EzWeb Platform is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Morfeo EzWeb Platform. If not, see <http://www.gnu.org/licenses/>.
#
# Info about members and contributors of the MORFEO project
# is available at
#
# http://morfeo-project.org
#
#...............................licence...........................................#
#
from django.contrib import admin
from wirecloud.catalogue.models import CatalogueResource, WidgetWiring
from wirecloud.catalogue.models import UserTag, UserVote, Tag, Category
class CategoyAdminView(admin.ModelAdmin):
filter_horizontal = ('tags',)
verbose_name_plural = 'Categories'
class CatalogueResourceAdmin(admin.ModelAdmin):
search_fields = ['short_name', 'vendor', 'author']
list_display = ['short_name', 'vendor', 'author', 'resource_type']
verbose_name_plural = 'Resources'
admin.site.register(CatalogueResource, CatalogueResourceAdmin)
admin.site.register(WidgetWiring)
admin.site.register(UserTag)
admin.site.register(UserVote)
admin.site.register(Tag)
admin.site.register(Category, CategoyAdminView)
|
Python
| 0.000002
|
@@ -23,201 +23,77 @@
-%0A%0A#
-...............................licence...........................................%0A#%0A# (C) Copyright 2008 Telefonica Investigacion y Desarrollo%0A# S.A.Unipersonal (Telefonica I+D)%0A#%0A#
+ Copyright (c) 2013 CoNWeT Lab., Universidad Polit%C3%A9cnica de Madrid%0A%0A#
Thi
@@ -114,60 +114,31 @@
of
-Morfeo EzWeb Platform
+Wirecloud
.%0A
-#
%0A#
- Morfeo EzWeb Platform
+Wirecloud
is
@@ -191,20 +191,16 @@
modify%0A#
-
it unde
@@ -269,20 +269,16 @@
hed by%0A#
-
the Fre
@@ -337,20 +337,16 @@
se, or%0A#
-
(at you
@@ -378,37 +378,20 @@
on.%0A
-#
%0A#
- Morfeo EzWeb Platform
+Wirecloud
is
@@ -439,20 +439,16 @@
seful,%0A#
-
but WIT
@@ -504,20 +504,16 @@
nty of%0A#
-
MERCHAN
@@ -568,20 +568,16 @@
ee the%0A#
-
GNU Aff
@@ -621,23 +621,18 @@
etails.%0A
-#
%0A#
-
You sho
@@ -698,20 +698,16 @@
icense%0A#
-
along w
@@ -714,29 +714,17 @@
ith
-Morfeo EzWeb Platform
+Wirecloud
. I
@@ -770,514 +770,99 @@
/%3E.%0A
-#%0A# Info about members and contributors of the MORFEO project%0A# is available at%0A#%0A# http://morfeo-project.org%0A#%0A#...............................licence...........................................#%0A%0A%0A#%0A%0Afrom django.contrib import admin%0A%0Afrom wirecloud.catalogue.models import CatalogueResource, WidgetWiring%0Afrom wirecloud.catalogue.models import UserTag, UserVote, Tag, Category%0A%0A%0Aclass CategoyAdminView(admin.ModelAdmin):%0A filter_horizontal = ('tags',)%0A verbose_name_plural = 'Categories'
+%0Afrom django.contrib import admin%0A%0Afrom wirecloud.catalogue.models import CatalogueResource
%0A%0A%0Ac
@@ -929,17 +929,27 @@
ields =
-%5B
+('vendor',
'short_n
@@ -949,36 +949,37 @@
short_name', 've
-ndor
+rsion
', 'author'%5D%0A
@@ -973,17 +973,17 @@
'author'
-%5D
+)
%0A lis
@@ -998,9 +998,19 @@
y =
-%5B
+('vendor',
'sho
@@ -1026,22 +1026,13 @@
've
-ndor', 'author
+rsion
', '
@@ -1049,9 +1049,9 @@
ype'
-%5D
+)
%0A
@@ -1153,170 +1153,4 @@
in)%0A
-admin.site.register(WidgetWiring)%0Aadmin.site.register(UserTag)%0Aadmin.site.register(UserVote)%0Aadmin.site.register(Tag)%0Aadmin.site.register(Category, CategoyAdminView)%0A
|
bc467365ebd287d96109ea0771403a10d3f56580
|
set upload limit
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask
from flask_bootstrap import Bootstrap
from flask_mail import Mail
from flask_moment import Moment
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_pagedown import PageDown
from config import config
import os
import flask_sijax
bootstrap = Bootstrap()
mail = Mail()
moment = Moment()
db = SQLAlchemy()
pagedown = PageDown()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
mail.init_app(app)
moment.init_app(app)
db.init_app(app)
login_manager.init_app(app)
pagedown.init_app(app)
if not app.debug and not app.testing and not app.config['SSL_DISABLE']:
from flask_sslify import SSLify
sslify = SSLify(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
from .api_1_0 import api as api_1_0_blueprint
app.register_blueprint(api_1_0_blueprint, url_prefix='/api/v1.0')
app.config['SIJAX_STATIC_PATH'] = os.path.join('.', os.path.dirname(__file__), 'static/js/sijax/')
app.config['SIJAX_JSON_URI'] = '/static/js/sijax/json2.js'
flask_sijax.Sijax(app)
return app
|
Python
| 0.000001
|
@@ -553,16 +553,72 @@
name__)%0A
+ app.config%5B'MAX_CONTENT_LENGTH'%5D = 16 * 1024 * 1024%0A
app.
|
a2e5e2d5b75acafe5b1de0b92a9206a6a2ec4d25
|
Fix py36 unit tests
|
blazar/tests/api/test_root.py
|
blazar/tests/api/test_root.py
|
# Copyright (c) 2014 Bull.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_serialization import jsonutils
from blazar.tests import api
class TestRoot(api.APITest):
def setUp(self):
super(TestRoot, self).setUp()
self.versions = jsonutils.dump_as_bytes(
{"versions":
[{"status": "CURRENT",
"id": "v2.0",
"links": [{"href": "http://localhost/v2", "rel": "self"}]}]})
def test_version_discovery_root(self):
response = self.get_json('/',
expect_errors=True,
path_prefix='')
self.assertEqual(300, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertEqual(self.versions, response.body)
def test_version_discovery_versions(self):
response = self.get_json('/versions',
expect_errors=True,
path_prefix='')
self.assertEqual(300, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertEqual(self.versions, response.body)
def test_bad_uri(self):
response = self.get_json('/bad/path',
expect_errors=True,
path_prefix='')
self.assertEqual(response.status_int, 404)
self.assertEqual(response.content_type, "text/plain")
|
Python
| 0.000008
|
@@ -572,50 +572,8 @@
e.%0A%0A
-from oslo_serialization import jsonutils%0A%0A
from
@@ -597,16 +597,16 @@
ort api%0A
+
%0A%0Aclass
@@ -715,32 +715,9 @@
s =
-jsonutils.dump_as_bytes(
+%7B
%0A
@@ -725,17 +725,16 @@
-%7B
%22version
@@ -737,17 +737,16 @@
sions%22:%0A
-
@@ -786,17 +786,16 @@
-
%22id%22: %22v
@@ -800,17 +800,16 @@
%22v2.0%22,%0A
-
@@ -874,17 +874,16 @@
lf%22%7D%5D%7D%5D%7D
-)
%0A%0A de
@@ -1219,36 +1219,36 @@
sions, response.
-body
+json
)%0A%0A def test_
@@ -1601,12 +1601,12 @@
nse.
-body
+json
)%0A%0A
|
1bde8a92f47d49c6bea286a66fe89a3ccaca80a0
|
Fix for .env being loaded for manage.py commands
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask
from flask_bootstrap import Bootstrap
from flask_moment import Moment
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_pagedown import PageDown
from config import config
bootstrap = Bootstrap()
moment = Moment()
db = SQLAlchemy()
pagedown = PageDown()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
# Set jquery version
from flask_bootstrap import WebCDN
app.extensions['bootstrap']['cdns']['jquery'] = WebCDN(
'//cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/'
)
moment.init_app(app)
db.init_app(app)
login_manager.init_app(app)
pagedown.init_app(app)
if not app.debug and not app.testing and not app.config['SSL_DISABLE']:
from flask_sslify import SSLify
sslify = SSLify(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
# Tell browser not to cache any HTML responses, as most pages have
# sensitive information in them. (But CSS should be cached as normal.)
@app.after_request
def apply_caching(response):
if response.headers.get('Content-Type', '').startswith('text/html'):
response.headers['Cache-control'] = 'no-store'
response.headers['Pragma'] = 'no-cache'
return response
return app
|
Python
| 0
|
@@ -203,34 +203,8 @@
Down
-%0Afrom config import config
%0A%0Abo
@@ -456,16 +456,193 @@
name__)%0A
+ # import config here rather than at module level to ensure that .env values%0A # are loaded into the environment first when running manage.py%0A from config import config%0A
app.
|
cea50cbe5e0b16758c5eada3a16d121d2880c6ce
|
Fix PEP8 issue
|
i3pystatus/pomodoro.py
|
i3pystatus/pomodoro.py
|
import subprocess
from datetime import datetime, timedelta
from i3pystatus import IntervalModule
STOPPED = 0
RUNNING = 1
BREAK = 2
class Pomodoro(IntervalModule):
"""
This plugin shows Pomodoro timer.
Left click starts/restarts timer.
Right click stops it.
"""
settings = (
('sound',
'Path to sound file to play as alarm. Played by "aplay" utility'),
('pomodoro_duration',
'Working (pomodoro) interval duration in seconds'),
('break_duration', 'Short break duration in seconds'),
('long_break_duration', 'Long break duration in seconds'),
('short_break_count', 'Short break count before first long break'),
('format', 'format string, available formatters: current_pomodoro, '
'total_pomodoro, time')
)
required = ('sound',)
color_stopped = '#2ECCFA'
color_running = '#FFFF00'
color_break = '#37FF00'
interval = 1
short_break_count = 3
format = '☯ {current_pomodoro}/{total_pomodoro} {time}'
pomodoro_duration = 25 * 60
break_duration = 5 * 60
long_break_duration = 15 * 60
on_rightclick = "stop"
on_leftclick = "start"
def init(self):
# state could be either running/break or stopped
self.state = STOPPED
self.current_pomodoro = 0
self.total_pomodoro = self.short_break_count + 1 # and 1 long break
self.time = None
def run(self):
if self.time and datetime.utcnow() >= self.time:
if self.state == RUNNING:
self.state = BREAK
if self.breaks == self.short_break_count:
self.time = datetime.utcnow() + \
timedelta(seconds=self.long_break_duration)
else:
self.time = datetime.utcnow() + \
timedelta(seconds=self.break_duration)
text = 'Go for a break!'
else:
self.state = RUNNING
self.time = datetime.utcnow() + \
timedelta(seconds=self.pomodoro_duration)
text = 'Back to work!'
self.current_pomodoro = (self.current_pomodoro + 1) % self.total_pomodoro
self._alarm(text)
if self.state == RUNNING or self.state == BREAK:
min, sec = divmod((self.time - datetime.utcnow()).total_seconds(), 60)
text = '{:02}:{:02}'.format(int(min), int(sec))
sdict = {
'time': text,
'current_pomodoro': self.current_pomodoro + 1,
'total_pomodoro': self.total_pomodoro,
}
color = self.color_running if self.state == RUNNING else self.color_break
text = self.format.format(**sdict)
else:
text = 'Start pomodoro',
color = self.color_stopped
self.output = {
'full_text': text,
'color': color
}
def start(self):
self.state = RUNNING
self.time = datetime.utcnow() + timedelta(seconds=self.pomodoro_duration)
self.current_pomodoro = 0
def stop(self):
self.state = STOPPED
self.time = None
def _alarm(self, text):
subprocess.call(['notify-send',
'Alarm!',
text])
subprocess.Popen(['aplay',
self.sound,
'-q'],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
Python
| 0
|
@@ -2500,16 +2500,17 @@
+
'time':
@@ -2507,32 +2507,33 @@
'time': text,%0A
+
'
@@ -2585,32 +2585,33 @@
%0A
+
'total_pomodoro'
@@ -2631,17 +2631,16 @@
pomodoro
-,
%0A
|
fb236951e1658beb32bd6dc45cf8d49a4636162a
|
Add tests for repr on tables
|
blaze/api/tests/test_table.py
|
blaze/api/tests/test_table.py
|
from blaze.api.table import Table, compute, table_repr
from blaze.data.python import Python
from blaze.compute.core import compute
from blaze.compute.python import compute
from datashape import dshape
data = (('Alice', 100),
('Bob', 200))
t = Table(data, columns=['name', 'amount'])
def test_resources():
assert t.resources() == {t: t.data}
def test_compute():
assert compute(t) == data
def test_compute():
assert list(compute(t['amount'] + 1)) == [101, 201]
def test_create_with_schema():
t = Table(data, schema='{name: string, amount: float32}')
assert t.schema == dshape('{name: string, amount: float32}')
def test_create_with_raw_data():
t = Table(data, columns=['name', 'amount'])
assert t.schema == dshape('{name: string, amount: int64}')
assert t.name
assert t.data == data
def test_create_with_data_descriptor():
schema='{name: string, amount: int64}'
ddesc = Python(data, schema=schema)
t = Table(ddesc)
assert t.schema == dshape(schema)
assert t.name
assert t.data == ddesc
def test_repr():
result = table_repr(t['name'])
print(result)
assert isinstance(result, str)
assert 'Alice' in result
assert 'Bob' in result
assert '...' not in result
result = table_repr(t['amount'] + 1)
print(result)
assert '101' in result
t2 = Table(tuple((i, i**2) for i in range(100)), columns=['x', 'y'])
result = table_repr(t2)
print(result)
assert len(result.split('\n')) < 20
assert '...' in result
|
Python
| 0.000005
|
@@ -195,16 +195,36 @@
dshape%0A%0A
+import pandas as pd%0A
%0Adata =
@@ -1536,20 +1536,382 @@
ert '...' in result%0A
+%0A%0Adef test_mutable_backed_repr():%0A mutable_data = %5Brange(2)%5D%0A mutable_backed_table = Table(mutable_data, columns=%5B%22mutable%22%5D)%0A repr(mutable_backed_table)%0A%0A%0Adef test_dataframe_backed_repr():%0A mutable_data = range(2)%0A df = pd.DataFrame(data=mutable_data, columns=%5B%22mutable%22%5D)%0A dataframe_backed_table = Table(df)%0A repr(dataframe_backed_table)%0A%0A
|
a085573261c0ed69b6bcabc40c4914a1623dc757
|
Add link to FB
|
bot/app/buffer.py
|
bot/app/buffer.py
|
from buffpy import API
from buffpy.managers.profiles import Profiles
from spacelaunchnow import config
hashtags = '''\n
.
.
.⠀⠀
.⠀⠀
.⠀⠀
#SpaceLaunchNow #space #spacex #nasa #rocket #mars #aerospace #earth #solarsystem #iss #elonmusk
#moonlanding #spaceshuttle #spacewalk #esa #science #picoftheday #blueorigin #Florida #Falcon9
#falconheavy #starship #ULA'''
class BufferAPI:
def __init__(self, debug=None):
if debug is None:
self.DEBUG = config.DEBUG
else:
self.DEBUG = debug
self.api = API(client_id=config.BUFFER_CLIENT_ID,
client_secret=config.BUFFER_SECRET_ID,
access_token=config.BUFFER_ACCESS_TOKEN)
def send_to_all(self, message: str = None, image: str = None, link: str = None, now: bool = False):
profiles = Profiles(api=self.api).all()
for profile in profiles:
_message = message
if profile['service'] == 'instagram' and image is None:
continue
if profile['service'] == 'twitter':
if len(_message) > 280:
_message = (_message[:277] + '...')
profile.updates.new(text=_message, photo=image, link=link, now=now)
def send_to_instagram(self, message: str = None, image: str = None, now: bool = False):
profile = Profiles(api=self.api).filter(service='instagram')[0]
return profile.updates.new(text=message, photo=image, now=now)
def send_to_facebook(self, message: str = None, image: str = None, link: str = None, now: bool = False):
profile = Profiles(api=self.api).filter(service='facebook')[0]
return profile.updates.new(text=message, photo=image, now=now)
def send_to_twitter(self, message: str = None, image: str = None, link: str = None, now: bool = False):
if len(message) > 280:
message = (message[:277] + '...')
profile = Profiles(api=self.api).filter(service='twitter')[0]
return profile.updates.new(text=message, photo=image, link=link, now=now)
|
Python
| 0
|
@@ -1650,24 +1650,85 @@
cebook')%5B0%5D%0A
+ if link:%0A message = message + %22%5Cn%22 + link%0A
retu
|
d89252a2bbbe0677d2ad184f4c519e2b4d6ee9bd
|
Add JSON to data.
|
bot/serializer.py
|
bot/serializer.py
|
from bot.models import Launch, Notification, DailyDigestRecord
from rest_framework import serializers
class NotificationSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Notification
fields = (
'launch', 'url', 'wasNotifiedTwentyFourHour', 'wasNotifiedOneHour', 'wasNotifiedTenMinutes',
'wasNotifiedDailyDigest', 'last_twitter_post', 'last_net_stamp',
'last_net_stamp_timestamp'
)
extra_kwargs = {
'id': {'read_only': False},
'slug': {'validators': []},
}
class DailyDigestRecordSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = DailyDigestRecord
fields = (
'url', 'timestamp', 'messages', 'count', 'data'
)
class LaunchSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Launch
fields = (
'id', 'name', 'url', 'status', 'netstamp', 'wsstamp', 'westamp', 'location_name', 'rocket_name',
'mission_name'
)
def create(self, validated_data):
launch = Launch.objects.get_or_create(**validated_data)
try:
if Notification.objects.get(launch=launch[0]) is None:
Notification.objects.get_or_create(launch=launch[0])
except:
Notification.objects.get_or_create(launch=launch[0])
return launch
def update(self, instance, validated_data):
instance.id = validated_data.get('id', instance.id)
instance.name = validated_data.get('name', instance.name)
instance.status = validated_data.get('status', instance.status)
instance.netstamp = validated_data.get('netstamp', instance.netstamp)
instance.wsstamp = validated_data.get('wsstamp', instance.wsstamp)
instance.westamp = validated_data.get('westamp', instance.westamp)
instance.location_name = validated_data.get('location_name', instance.location_name)
instance.rocket_name = validated_data.get('rocket_name', instance.rocket_name)
instance.mission_name = validated_data.get('mission_name', instance.mission_name)
instance.save()
return instance
def get_object(self):
return self.model(self.validated_data)
|
Python
| 0.000001
|
@@ -727,79 +727,17 @@
s =
-(%0A 'url', 'timestamp', 'messages', 'count', 'data'%0A )
+'__all__'
%0A%0A%0Ac
|
43a53981c3da2db8a4d06c883cd72442b72eb4be
|
Update spec_driven_model/tests/fake_mixin.py
|
spec_driven_model/tests/fake_mixin.py
|
spec_driven_model/tests/fake_mixin.py
|
# Copyright 2021 Akretion - Raphael Valyi <raphael.valyi@akretion.com>
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl-3.0.en.html).
from odoo import fields, models
class PoXsdMixin(models.AbstractModel):
_description = "Abstract Model for PO XSD"
_name = "spec.mixin.poxsd"
_field_prefix = "poxsd10_"
_schema_name = "poxsd"
_schema_version = "1.0"
_odoo_module = "poxsd"
_spec_module = "odoo.addons.spec_driven_model.tests.spec_poxsd"
_binding_module = "odoo.addons.spec_driven_model.tests.purchase_order_lib"
# TODO rename
brl_currency_id = fields.Many2one(
comodel_name="res.currency",
string="Moeda",
compute="_compute_brl_currency_id",
default=lambda self: self.env.ref("base.EUR").id,
)
def _compute_brl_currency_id(self):
for item in self:
item.brl_currency_id = self.env.ref("base.EUR").id
|
Python
| 0
|
@@ -911,8 +911,220 @@
UR%22).id%0A
+%0A def _valid_field_parameter(self, field, name):%0A if name in (%22xsd_type%22, %22xsd_required%22, %22choice%22):%0A return True%0A else:%0A return super()._valid_field_parameter(field, name)%0A
|
462d394a755675347bbc3c9751ddc0def72dfd08
|
refactor add_host function save host only if confirmed
|
botnet/fabfile.py
|
botnet/fabfile.py
|
#!/usr/bin/env python
import os
from fabric.api import env, run, sudo, execute, local, settings, hide, open_shell, parallel
from fabric.contrib.console import confirm
import paramiko
import getpass
from tabulate import tabulate
file_hosts = "hosts.txt"
paramiko.util.log_to_file("paramiko.log")
env.colorize_errors = True
# The selected hosts are the hosts in env (at the beginning)
selected_hosts = env.hosts
running_hosts = {}
def load_hosts():
"""
Load hosts from hosts.txt.
A host can either be in form
username@host[:port] password
or
username@host[:port]
If no port is specified, port 22 is selected.
"""
with open(file_hosts, "r") as f:
data = f.readlines()
for line in data:
try:
host, password = line.strip().split()
except Exception:
host = line.strip()
password = None
if len(host.split(':')) == 1:
host = host + ":22"
env.hosts.append(host)
if password is not None:
env.passwords[host] = password.strip()
def add_host():
"""
Add a new host to the running hosts.
Add the new host also to the external host file.
"""
name = raw_input("Username: ")
host = raw_input("Host: ")
port = input("Port: ")
new_host = name + "@" + host + ":" + str(port)
selected_hosts.append(new_host)
password = None
if confirm("Authenticate using a password "):
password = getpass.getpass("Password: ").strip()
env.passwords[new_host] = password
# Append the new host to the hosts file
if password is not None:
line = new_host + " " + password + "\n"
else:
line = new_host + "\n"
with open(file_hosts, 'a') as f:
f.write(line)
def print_hosts():
"""
Print selected hosts.
If you haven't hand-select hosts yet, all hosts are selected.
"""
global selected_hosts
hosts = map(lambda x: [x, env.passwords.get(x, None)], selected_hosts)
print tabulate(hosts, ["Host", "Password"])
def check_hosts():
"""
Check if hosts are running or not.
"""
global running_hosts
running_hosts = dict()
for host in selected_hosts:
print "\nPing host " + str(selected_hosts.index(host) + 1) + " of " + str(len(selected_hosts))
response = os.system("ping -c 1 " + host.split("@")[1].split(":")[0])
if response == 0:
running_hosts[host] = True
else:
running_hosts[host] = False
# Convert running_hosts in order to print it as table
mylist = map(lambda index: [index[0], str(index[1])], running_hosts.items())
print tabulate(mylist, ["Host", "Running"])
def select_running_hosts():
"""
Select all running hosts.
"""
global selected_hosts
with hide('stdout'):
check_hosts()
host_up = filter(lambda x: running_hosts.get(x, False), running_hosts.keys())
selected_hosts = host_up
def choose_hosts():
"""
Select hosts you want to use.
"""
global selected_hosts
selected_hosts = []
mylist = map(lambda (num, h): [num, h], enumerate(env.hosts))
print "Select Hosts:"
print tabulate(mylist, ["Number", "Host"])
choices = raw_input("> ").split()
# Avoid letters in string index
choices = filter(lambda x: x.isalnum(), choices)
# Convert to int list
choices = map(lambda x: int(x), choices)
# Avoid IndexError
choices = filter(lambda x: x < len(env.hosts), choices)
# Get only selected hosts
selected_hosts = map(lambda i: env.hosts[i], choices)
@parallel
def execute_command(command):
"""
Execute a command on a host.
"""
with settings(warn_only=True):
if command.strip()[:5] == "sudo":
results = sudo(command, shell=False)
else:
results = run(command)
return results
def run_locally(cmd=None):
"""
Execute a command locally.
"""
if cmd is None:
cmd = raw_input("Insert command: ")
with settings(warn_only=True):
local(cmd)
@parallel
def run_command(cmd=None):
"""
Execute a command on a host.
"""
if cmd is None:
cmd = raw_input("Insert command: ")
execute(execute_command, cmd, hosts=selected_hosts)
def execute_script():
script_file = raw_input("Name of the script: ")
host_path = "/tmp"
# Copy the script on bots
for h in selected_hosts:
with hide('stdout', 'running'):
run_locally('scp %s %s:%s' % (script_file, h.split(':')[0], host_path))
# Execute script
run_command(host_path + "/" + script_file)
# Delete script
with hide('running'):
run_command("rm " + host_path + "/" + script_file)
def open_sh():
mylist = map(lambda (num, h): [num, h], enumerate(selected_hosts))
print tabulate(mylist, ["Number", "Host"])
n = input("Open shell in host number: ")
try:
h = selected_hosts[n]
execute(open_shell, host=h)
except Exception:
print "Error. Shell not opened."
|
Python
| 0
|
@@ -823,33 +823,34 @@
except
-Exception
+ValueError
:%0A
@@ -1586,24 +1586,25 @@
= password%0A
+%0A
# Append
@@ -1635,16 +1635,76 @@
ts file%0A
+ if confirm(%22Add the new host to the hosts file? %22):%0A
if p
@@ -1724,32 +1724,36 @@
t None:%0A
+
line = new_host
@@ -1772,24 +1772,28 @@
word + %22%5Cn%22%0A
+
else:%0A
@@ -1790,32 +1790,36 @@
else:%0A
+
+
line = new_host
@@ -1821,24 +1821,28 @@
host + %22%5Cn%22%0A
+
with ope
@@ -1858,32 +1858,36 @@
sts, 'a') as f:%0A
+
f.write(
|
2934120b3743fac2b388eba19d8c0a22b44d8f0a
|
Update error message
|
tests/core/test_evaluation_parameters.py
|
tests/core/test_evaluation_parameters.py
|
from timeit import timeit
import pytest
from great_expectations.data_asset.evaluation_parameters import parse_evaluation_parameter
from great_expectations.exceptions import EvaluationParameterError
def test_parse_evaluation_parameter():
# Substitution alone is ok
assert parse_evaluation_parameter("a", {"a": 1}) == 1
assert parse_evaluation_parameter("urn:great_expectations:validations:blarg",
{"urn:great_expectations:validations:blarg": 1}) == 1
# Very basic arithmetic is allowed as-is:
assert parse_evaluation_parameter("1 + 1", {}) == 2
# So is simple variable substitution:
assert parse_evaluation_parameter("a + 1", {"a": 2}) == 3
# URN syntax works
assert parse_evaluation_parameter("urn:great_expectations:validations:source_patient_data.default"
":expect_table_row_count_to_equal.result.observed_value * 0.9",
{"urn:great_expectations:validations:source_patient_data.default"
":expect_table_row_count_to_equal.result.observed_value": 10}) == 9
# We have basic operations (trunc)
assert parse_evaluation_parameter("urn:great_expectations:validations:source_patient_data.default"
":expect_table_row_count_to_equal.result.observed_value * 0.9",
{"urn:great_expectations:validations:source_patient_data.default"
":expect_table_row_count_to_equal.result.observed_value": 11}) != 9
assert parse_evaluation_parameter("trunc(urn:great_expectations:validations:source_patient_data.default"
":expect_table_row_count_to_equal.result.observed_value * 0.9)",
{"urn:great_expectations:validations:source_patient_data.default"
":expect_table_row_count_to_equal.result.observed_value": 11}) == 9
# Non GE URN syntax fails
with pytest.raises(EvaluationParameterError) as err:
parse_evaluation_parameter("urn:ieee:not_ge * 10", {"urn:ieee:not_ge": 1})
assert "Parse Failure" in str(err.value)
# Valid variables but invalid expression is no good
with pytest.raises(EvaluationParameterError) as err:
parse_evaluation_parameter("1 / a", {"a": 0})
assert "Error while evaluating evaluation parameter expression: division by zero" in str(err.value)
# It is okay to *substitute* strings in the expression...
assert parse_evaluation_parameter("foo", {"foo": "bar"}) == "bar"
# ...and to have whitespace in substituted values...
assert parse_evaluation_parameter("foo", {"foo": "bar "}) == "bar "
# ...but whitespace is *not* preserved from the parameter name if we evaluate it
assert parse_evaluation_parameter("foo ", {"foo": "bar"}) == "bar" # NOT "bar "
# We can use multiple parameters...
assert parse_evaluation_parameter("foo * bar", {"foo": 2, "bar": 3}) == 6
# ...but we cannot leave *partially* evaluated expressions (phew!)
with pytest.raises(EvaluationParameterError) as e:
parse_evaluation_parameter("foo + bar", {"foo": 2})
assert "unsupported operand type(s) for +" in str(e.value)
def test_parser_timing():
"""We currently reuse the parser, clearing the stack between calls, which is about 10 times faster than not
doing so. But these operations are really quick, so this may not be necessary."""
assert timeit("parse_evaluation_parameter('x', {'x': 1})",
setup="from great_expectations.data_asset.evaluation_parameters import parse_evaluation_parameter",
number=100) < 1
|
Python
| 0
|
@@ -3269,41 +3269,97 @@
rt %22
-unsupported operand type(s) for +
+Error while evaluating evaluation parameter expression: could not convert string to float
%22 in
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.