commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
20ef0133953a5b72475286a3f0c4353089d5cd78
|
Remove duplicates
|
python-practice/questions-and-answers.py
|
python-practice/questions-and-answers.py
|
##########################################
###### Compare use of 'is' and '==' ######
##########################################
'''
# Create two lists
a = [1, 2, 3]
c = [1, 2, 3]
# Assign one list to a variable
b = a
# Verify eqivalence of contents
print("## Verify eqivalence of contents.")
print(f"a == b: {a == b}")
print(f"a == c: {a == c}")
print("")
# Display identity of objects
print("## Display identity of objects.")
print(f"a is b: {a is b}")
print(f"a is c: {a is c}")
print("")
print(f"ID a: {id(a)}")
print(f"ID b: {id(b)}")
print(f"ID c: {id(c)}")
print("")
# Add to the list, show variable updated too
a.append(4)
print("## Add to the list, show variable updated too.")
print(f"a: {a}")
print(f"b: {b}")
print("")
# Add to the variable, show list updated too
b.append(5)
print("## Add to the variable, show list updated too.")
print(f"a: {a}")
print(f"b: {b}")
print("")
# Make a copy of list a, verify differnt objects
d = a[:]
print("## Make a copy of list a, verify differnt objects.")
print(f"ID a: {id(a)}")
print(f"ID d: {id(d)}")
print("")
# Update the copied list, show original unchanged.
d.append(6)
print("## Update the copied list, show original unchanged.")
print(f"a: {a}")
print(f"d: {d}")
print("")
'''
########################
###### Decorators ######
########################
'''
# A function that takes another function as an argument,
# does something, then executes the passed function
# Sample decorator
def decorator_function(input_function):
def do_something():
# Do something
print("Inside the decorator")
print(f"Called: {input_fuction}")
# Execute the original function
input_function()
return do_something()
# Sample functions
def func_one():
print("ONE")
def func_two(input_string):
print(input_string)
# Use the decorator
#@decorator_function
func_one()
#@decorator_function
func_two("TWO")
'''
############################
###### Range Operator ######
############################
'''
# Count backwards from 100 to 0 by 10s
# NOTE: stop value is not included
counts = list(range(100, -10, -10))
print(counts)
'''
###############################
###### Class defintition ######
###############################
'''
# Define a class, instantiate it, return an instance value and a class value
class Sample:
class_var = 100
def __init__(self, a, b=10):
self.a = a
self.b = b
def get_b(self):
return self.b
sample_1 = Sample(1)
sample_2 = Sample(2, 20)
print(f"sample_1.a {sample_1.a}")
print(f"sample_2.a {sample_2.a}")
print("")
print(f"sample_1.b {sample_1.b}")
print(f"sample_1.b {sample_2.b}")
print("")
print(f"sample_1.class_var {sample_1.class_var}")
print(f"sample_1.class_var {sample_2.class_var}")
# Update class variable
Sample.class_var = 200
print("\n## Update class variable")
print(f"sample_1.class_var {sample_1.class_var}")
print(f"sample_1.class_var {sample_2.class_var}")
'''
##########################
###### Map function ######
##########################
# map will apply a function to each member of an iterable
def cuadrado(x):
return x * x
lista = [1, 2, 3, 4, 5]
print(list(map(cuadrado, lista)))
|
Python
| 0.999996
|
@@ -3229,16 +3229,21 @@
######%0D%0A
+'''%0D%0A
# map wi
@@ -3391,12 +3391,546 @@
, lista)))%0D%0A
+'''%0D%0A%0D%0A#####################################%0D%0A###### Remove List Duplicates ######%0D%0A#####################################%0D%0A# If order is not important%0D%0Aa = %5B3, 2, 1, 5, 2, 2%5D%0D%0Anormalized_a = list(set(a))%0D%0Aprint(a)%0D%0Aprint(normalized_a)%0D%0Aprint(%22%22)%0D%0A%0D%0A# If order matters%0D%0Afrom collections import defaultdict%0D%0A%0D%0Aseen = defaultdict(lambda:0) %0D%0Ab = %5B3, 2, 1, 5, 2, 2%5D%0D%0Anormalized_b = list()%0D%0Afor val in b:%0D%0A if seen%5Bval%5D == 0:%0D%0A normalized_b.append(val)%0D%0A seen%5Bval%5D += 1%0D%0A%0D%0Aprint(b)%0D%0Aprint(normalized_b)%0D%0A %0D%0A %0D%0A
|
fc79afb66dc625bd00f560f2e56eec945169192c
|
Move to {:f}.format everywhere
|
python/ccxt/base/decimal_to_precision.py
|
python/ccxt/base/decimal_to_precision.py
|
import re
import decimal
import numbers
import itertools
__all__ = [
'TRUNCATE',
'ROUND',
'DECIMAL_PLACES',
'SIGNIFICANT_DIGITS',
'NO_PADDING',
'PAD_WITH_ZERO',
'decimal_to_precision',
]
# rounding mode
TRUNCATE = 0
ROUND = 1
# digits counting mode
DECIMAL_PLACES = 2
SIGNIFICANT_DIGITS = 3
# padding mode
NO_PADDING = 4
PAD_WITH_ZERO = 5
def decimal_to_precision(n, rounding_mode=ROUND, precision=None, counting_mode=DECIMAL_PLACES, padding_mode=NO_PADDING):
assert precision is not None and isinstance(precision, numbers.Integral)
assert rounding_mode in [TRUNCATE, ROUND]
assert counting_mode in [DECIMAL_PLACES, SIGNIFICANT_DIGITS]
assert padding_mode in [NO_PADDING, PAD_WITH_ZERO]
context = decimal.getcontext()
precision = min(context.prec - 2, precision)
# all default except decimal.Underflow (raised when a number is rounded to zero)
context.traps[decimal.Underflow] = True
context.rounding = decimal.ROUND_HALF_UP # rounds 0.5 away from zero
dec = decimal.Decimal(str(n))
precise = None
string = '{:f}'.format(dec) # convert to string using .format to avoid engineering notation
if not re.match(r'[\d.-]+', string):
raise ValueError('{} is an invalid decimal number.'.format(string))
def power_of_10(x):
return decimal.Decimal('10') ** (-x)
if precision < 0:
to_nearest = power_of_10(precision)
if rounding_mode == ROUND:
return str(to_nearest * decimal.Decimal(decimal_to_precision("{:f}".format(dec / to_nearest), rounding_mode, 0, DECIMAL_PLACES, padding_mode)))
elif rounding_mode == TRUNCATE:
return decimal_to_precision("{:f}".format(dec - dec % to_nearest), rounding_mode, 0, DECIMAL_PLACES, padding_mode)
if rounding_mode == ROUND:
if counting_mode == DECIMAL_PLACES:
precise = '{:f}'.format(dec.quantize(power_of_10(precision))) # ROUND_HALF_EVEN is default context
elif counting_mode == SIGNIFICANT_DIGITS:
q = precision - dec.adjusted() - 1
sigfig = power_of_10(q)
if q < 0:
string_to_precision = string[:precision]
# string_to_precision is '' when we have zero precision
below = sigfig * decimal.Decimal(string_to_precision if string_to_precision else '0')
above = below + sigfig
precise = '{:f}'.format(min((below, above), key=lambda x: abs(x - dec)))
else:
precise = '{:f}'.format(dec.quantize(sigfig))
elif rounding_mode == TRUNCATE:
# Slice a string
if counting_mode == DECIMAL_PLACES:
before, after = string.split('.') if '.' in string else (string, '')
precise = before + '.' + after[:precision]
elif counting_mode == SIGNIFICANT_DIGITS:
if precision == 0:
return '0'
dot = string.index('.') if '.' in string else 0
start = dot - dec.adjusted()
end = start + precision
# need to clarify these conditionals
if dot >= end:
end -= 1
if precision >= len(string.replace('.', '')):
precise = string
else:
precise = string[:end].ljust(dot, '0')
precise = precise.rstrip('.')
if padding_mode == NO_PADDING:
return precise.rstrip('0').rstrip('.') if '.' in precise else precise
elif padding_mode == PAD_WITH_ZERO:
if '.' in precise:
if counting_mode == DECIMAL_PLACES:
before, after = precise.split('.')
return before + '.' + after.ljust(precision, '0')
elif counting_mode == SIGNIFICANT_DIGITS:
fsfg = len(list(itertools.takewhile(lambda x: x == '.' or x == '0', precise)))
if '.' in precise[fsfg:]:
precision += 1
return precise[:fsfg] + precise[fsfg:].rstrip('0').ljust(precision, '0')
else:
if counting_mode == SIGNIFICANT_DIGITS:
if precision > len(precise):
return precise + '.' + (precision - len(precise)) * '0'
elif counting_mode == DECIMAL_PLACES:
if precision > 0:
return precise + '.' + precision * '0'
return precise
|
Python
| 0.000787
|
@@ -1481,19 +1481,29 @@
return
-str
+%22%7B:f%7D%22.format
(to_near
|
7d2e3682f2e162c2ca6ddb945f7c1c0301bcf8c3
|
Address review comment: Remove reference to non-existent attribute.
|
flocker/node/gear.py
|
flocker/node/gear.py
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""Client implementation for talking to the geard daemon."""
import json
from zope.interface import Interface, implementer
from twisted.web.http import OK, NO_CONTENT, NOT_FOUND
from twisted.internet.defer import succeed, fail
from twisted.internet import reactor
from twisted.internet.task import deferLater
from treq import request, content
def workaround_geard_187():
"""Slight delay as workaround to
https://github.com/openshift/geard/issues/187.
:param value: Value to set as value of result ``Deferred``.
:return: ``Deferred`` that fires after short delay.
"""
return deferLater(reactor, 1, lambda: None)
class AlreadyExists(Exception):
"""A unit with the given name already exists."""
class GearError(Exception):
"""Unexpected error received from gear daemon."""
class IGearClient(Interface):
"""A client for the geard HTTP API."""
def add(unit_name, image_name):
"""Install and start a new unit.
:param unicode unit_name: The name of the unit to create.
:param unicode image_name: The Docker image to use for the unit.
:return: ``Deferred`` that fires on success, or errbacks with
:class:`AlreadyExists` if a unit by that name already exists.
"""
def exists(unit_name):
"""Check whether the unit exists.
:param unicode unit_name: The name of the unit to create.
:return: ``Deferred`` that fires with ``True`` if unit exists,
otherwise ``False``.
"""
def remove(unit_name):
"""Stop and delete the given unit.
This can be done multiple times in the row for the same unit.
:param unicode unit_name: The name of the unit to stop.
:return: ``Deferred`` that fires on success.
"""
@implementer(IGearClient)
class GearClient(object):
"""Talk to the gear daemon over HTTP.
:ivar Agent _agent: HTTP client used to talk to gear.
:ivar bytes _base_url: Base URL for gear.
"""
def __init__(self, hostname):
"""
:param reactor: Reactor to use for HTTP connections.
:param bytes hostname: Gear host to connect to.
"""
self._base_url = b"http://%s:43273" % (hostname,)
def _request(self, method, unit_name, operation=None, data=None):
"""Send HTTP request to gear.
:param bytes method: The HTTP method to send, e.g. ``b"GET"``.
:param unicode unit_name: The name of the unit.
:param operation: ``None``, or extra ``unicode`` path element to add to
the request URL path.
:param data: ``None``, or object with a body for the request that
can be serialized to JSON.
:return: A ``Defered`` that fires with a response object.
"""
url = self._base_url + b"/container/" + unit_name.encode("ascii")
if operation is not None:
url += b"/" + operation
if data is not None:
data = json.dumps(data)
d = workaround_geard_187()
d.addCallback(lambda _: request(method, url, data=data,
persistent=False))
return d
def _ensure_ok(self, response):
"""Make sure response indicates success.
Also reads the body to ensure connection is closed.
:param response: Response from treq request.
:return: ``Deferred`` that errbacks with ``GearError`` if response
is not OK.
"""
d = content(response)
# geard uses a variaty of 2xx response codes. Filed treq issue
# about having "is this a success?" API:
# https://github.com/dreid/treq/issues/62
if response.code // 100 != 2:
d.addCallback(lambda data: fail(GearError(response.code, data)))
return d
def add(self, unit_name, image_name):
checked = self.exists(unit_name)
checked.addCallback(
lambda exists: fail(AlreadyExists(unit_name)) if exists else None)
checked.addCallback(
lambda _: self._request(b"PUT", unit_name,
data={u"Image": image_name,
u"Started": True}))
checked.addCallback(self._ensure_ok)
return checked
def exists(self, unit_name):
# status isn't really intended for this usage; better to use
# listing (with option to list all) as part of
# https://github.com/openshift/geard/issues/187
d = self._request(b"GET", unit_name, operation=b"status")
def got_response(response):
result = content(response)
if response.code in (OK, NO_CONTENT):
result.addCallback(lambda _: True)
elif response.code == NOT_FOUND:
result.addCallback(lambda _: False)
else:
result.addCallback(
lambda data: fail(GearError(response.code, data)))
return result
d.addCallback(got_response)
return d
def remove(self, unit_name):
d = self._request(b"PUT", unit_name, operation=b"stopped")
d.addCallback(self._ensure_ok)
d.addCallback(lambda _: self._request(b"DELETE", unit_name))
d.addCallback(self._ensure_ok)
return d
@implementer(IGearClient)
class FakeGearClient(object):
"""In-memory fake that simulates talking to a gear daemon.
:ivar dict _units: Map ``unicode`` names of added units to dictionary
containing information about them.
"""
def __init__(self):
self._units = {}
def add(self, unit_name, image_name):
if unit_name in self._units:
return fail(AlreadyExists(unit_name))
self._units[unit_name] = {}
return succeed(None)
def exists(self, unit_name):
return succeed(unit_name in self._units)
def remove(self, unit_name):
if unit_name in self._units:
del self._units[unit_name]
return succeed(None)
|
Python
| 0
|
@@ -1948,66 +1948,8 @@
P.%0A%0A
- :ivar Agent _agent: HTTP client used to talk to gear.%0A
|
41d6c18aee851c9b2430d74c51ef51b49948b0f4
|
raise version
|
brilws/_version.py
|
brilws/_version.py
|
__version__ = "3.4.1"
|
Python
| 0
|
@@ -14,9 +14,9 @@
%223.
-4.1
+5.0
%22%0A
|
fadab627469d008a2bf39a9544a77a3bd6518b20
|
use the local path in the gui to run stuff.
|
rp-mt-scripts-graphical.py
|
rp-mt-scripts-graphical.py
|
#! /usr/bin/env python
"""Main module to create GTK interface to the MT scripts."""
import gtk
import gobject
import subprocess
class ScriptsWindow:
"""Class to manage the demo window for the pile manager."""
def __init__(self):
self.builder = gtk.Builder()
self.builder.add_from_file("rp-mt-scripts-interface.glade")
self.builder.connect_signals(self)
self.window = self.builder.get_object("winMain")
self.window.show()
self.filename = self.builder.get_object("filechooserbutton1")
## GTK+ Signal Handlers
def on_winMain_destroy(self, _widget, _callback_data=None):
"""Callback to exit mainloop on window destruction."""
print "destroy signal occurred"
gtk.main_quit()
## Automatic tab
def on_btnConfigInstall_clicked(self, _widget, _callback_data=None):
run("./configure")
## Manual tab
def on_btnConfigureNoInstall_clicked(self, _widget, _callback_data=None):
run("scripts/1_mt_scripts_configuration.sh")
def on_btnInstallTbeta_clicked(self, _widget, _callback_data=None):
run("scripts/install_tbeta_1.1.sh")
def on_btnInstallPocoLocal_clicked(self, _widget, _callback_data=None):
run("scripts/install_libpoco_workaround_local.sh")
def on_btnInstallPocoGlobal_clicked(self, _widget, _callback_data=None):
run("scripts/install_libpoco_workaround_local.sh")
def on_btnInstallFlash_clicked(self, _widget, _callback_data=None):
run("scripts/install_flashplayer_standalone.sh")
def on_btnInstallPyMT_clicked(self, _widget, _callback_data=None):
run("scripts/install_pymt_hg.sh")
## Update tab
def on_btnUpdatePyMT_clicked(self, _widget, _callback_data=None):
run("scripts/update_pymt_hg.sh")
## Run tab
def on_btnRunTbeta_clicked(self, _widget, _callback_data=None):
self.tbeta = run("scripts/install_flashplayer_standalone.sh",
wait=False)
def on_btnRunFlashPlayer_clicked(self, _widget, _callback_data=None):
run("scripts/run_flashplayer_standalone.sh")
def on_btnRunPyMT_clicked(self, _widget, _callback_data=None):
run("scripts/run_pymt_hg_examples.sh")
def on_btnRunOtherPy_clicked(self, _widget, _callback_data=None):
run("python "+self.filename.get_filename())
def main(self):
"""Start the GTK mainloop"""
gtk.main()
def run(cmdline, show_terminal=True, wait=True):
if show_terminal:
cmdline="gnome-terminal -x " + cmdline
process = subprocess.Popen(cmdline)
if wait:
return process.wait()
else:
return process
if __name__ == "__main__":
print __file__
app = ScriptsWindow()
app.main()
|
Python
| 0
|
@@ -78,16 +78,32 @@
ts.%22%22%22%0A%0A
+import os.path%0A%0A
import g
@@ -105,16 +105,16 @@
ort gtk%0A
-
import g
@@ -809,16 +809,23 @@
:%0A%09%09run(
+MTROOT+
%22./confi
@@ -923,32 +923,39 @@
ta=None):%0A%09%09run(
+MTROOT+
%22scripts/1_mt_sc
@@ -1048,32 +1048,39 @@
ta=None):%0A%09%09run(
+MTROOT+
%22scripts/install
@@ -1168,32 +1168,39 @@
ta=None):%0A%09%09run(
+MTROOT+
%22scripts/install
@@ -1304,32 +1304,39 @@
ta=None):%0A%09%09run(
+MTROOT+
%22scripts/install
@@ -1435,32 +1435,39 @@
ta=None):%0A%09%09run(
+MTROOT+
%22scripts/install
@@ -1563,32 +1563,39 @@
ta=None):%0A%09%09run(
+MTROOT+
%22scripts/install
@@ -1690,32 +1690,39 @@
ta=None):%0A%09%09run(
+MTROOT+
%22scripts/update_
@@ -1828,24 +1828,31 @@
tbeta = run(
+MTROOT+
%22scripts/ins
@@ -1970,32 +1970,39 @@
ta=None):%0A%09%09run(
+MTROOT+
%22scripts/run_fla
@@ -2098,16 +2098,23 @@
:%0A%09%09run(
+MTROOT+
%22scripts
@@ -2215,16 +2215,23 @@
:%0A%09%09run(
+MTROOT+
%22python
@@ -2572,16 +2572,16 @@
ain__%22:%0A
-
%09print _
@@ -2588,16 +2588,108 @@
_file__%0A
+%09%0A%09# Assume we're located in MTROOT, and go from there.%0A%09MTROOT=os.path.dirname(__file__)%0A%09%0A
%09app = S
|
6e2362351d9ccaa46a5a2bc69c4360e4faff166d
|
Add encoding spec to comply Python 2
|
iclib/qibla.py
|
iclib/qibla.py
|
from . import formula
def direction(lat, lng):
return formula.qibla(lat, lng)
def direction_dms(lat, lng):
return _dms(formula.qibla(lat, lng))
def direction_str(lat, lng, prec=0):
d, m, s = direction_dms(lat, lng)
# negative input might returns wrong result
return '{}° {}\' {:.{}f}"'.format(d, m, s, prec)
def _dms(deg):
seconds = deg * 3600
m, s = divmod(seconds, 60)
d, m = divmod(m, 60)
return (int(d), int(m), s)
|
Python
| 0.000002
|
@@ -1,8 +1,32 @@
+# -*- coding: utf-8 -*-%0A
from . i
|
9f1913ca658228c2c6551b2c8de1d48ddd73c8aa
|
raise version to 2
|
brilws/_version.py
|
brilws/_version.py
|
__version__ = "1.0.3"
|
Python
| 0.000001
|
@@ -12,11 +12,11 @@
= %22
-1.0.3
+2.0.0
%22%0A
|
3c2172419aaec3c0031e4a355b20785a3888ae6f
|
fix label dict
|
python/federatedml/util/sample_weight.py
|
python/federatedml/util/sample_weight.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import numpy as np
from fate_flow.entity.metric import Metric, MetricMeta
from federatedml.model_base import ModelBase
from federatedml.param.sample_weight_param import SampleWeightParam
from federatedml.statistic.data_overview import get_label_count
from federatedml.util import consts, LOGGER
def compute_weight_array(data_instances):
weight_inst = data_instances.mapValues(lambda v: v.weight)
return np.array([v[1] for v in list(weight_inst.collect())])
class SampleWeight(ModelBase):
def __init__(self):
super().__init__()
self.model_param = SampleWeightParam()
self.metric_name = "sample_weight"
self.metric_namespace = "train"
self.metric_type = "SAMPLE_WEIGHT"
self.weight_mode = None
def _init_model(self, params):
self.model_param = params
self.class_weight = params.class_weight
self.sample_weight_name = params.sample_weight_name
self.normalize = params.normalize
self.need_run = params.need_run
@staticmethod
def get_class_weight(data_instances):
class_weight = get_label_count(data_instances)
n_samples = data_instances.count()
n_classes = len(class_weight.keys())
class_weight.update((k, n_samples / (n_classes * v)) for k, v in class_weight.items())
return class_weight
@staticmethod
def replace_weight(data_instance, class_weight, weight_loc=None, weight_base=None):
weighted_data_instance = copy.copy(data_instance)
original_features = weighted_data_instance.features
if weight_loc is not None:
weighted_data_instance.set_weight(original_features[weight_loc] / weight_base)
weighted_data_instance.features = original_features[np.arange(original_features.shape[0]) != weight_loc]
else:
weighted_data_instance.set_weight(class_weight.get(data_instance.label, 1))
return weighted_data_instance
@staticmethod
def assign_sample_weight(data_instances, class_weight, weight_loc, normalize):
weight_base = 1
if weight_loc is not None and normalize:
def sum_sample_weight(kv_iterator):
sample_weight = 0
for _, inst in kv_iterator:
sample_weight += inst.features[weight_loc]
return sample_weight
weight_sum = data_instances.mapPartitions(sum_sample_weight).reduce(lambda x, y: x + y)
weight_base = weight_sum / data_instances.count()
return data_instances.mapValues(lambda v: SampleWeight.replace_weight(v, class_weight, weight_loc, weight_base))
@staticmethod
def get_weight_loc(data_instances, sample_weight_name):
weight_loc = None
if sample_weight_name:
try:
weight_loc = data_instances.schema["header"].index(sample_weight_name)
except ValueError:
return
return weight_loc
def transform_weighted_instance(self, data_instances, weight_loc):
if self.class_weight and self.class_weight == 'balanced':
self.class_weight = SampleWeight.get_class_weight(data_instances)
return SampleWeight.assign_sample_weight(data_instances, self.class_weight, weight_loc, self.normalize)
def callback_info(self):
class_weight = None
classes = None
if self.class_weight:
class_weight = {str(k): v for k, v in self.class_weight.items()}
classes = sorted([str(k) for k in self.class_weight.keys()])
LOGGER.debug(f"final class weight is: {class_weight}")
metric_meta = MetricMeta(name='train',
metric_type=self.metric_type,
extra_metas={
"weight_mode": self.weight_mode,
"class_weight": class_weight,
"classes": classes,
"sample_weight_name": self.sample_weight_name
})
self.callback_metric(metric_name=self.metric_name,
metric_namespace=self.metric_namespace,
metric_data=[Metric(self.metric_name, 0)])
self.tracker.set_metric_meta(metric_namespace=self.metric_namespace,
metric_name=self.metric_name,
metric_meta=metric_meta)
def fit(self, data_instances):
if self.sample_weight_name is None and self.class_weight is None:
return data_instances
if self.class_weight and isinstance(self.class_weight, dict):
self.class_weight = {int(k): v for k, v in self.class_weight.items()}
if self.class_weight:
self.weight_mode = "class weight"
if self.sample_weight_name and self.class_weight:
LOGGER.warning(f"Both 'sample_weight_name' and 'class_weight' provided."
f"Only weight from 'sample_weight_name' is used.")
new_schema = copy.deepcopy(data_instances.schema)
new_schema["sample_weight"] = "weight"
weight_loc = None
if self.sample_weight_name:
self.weight_mode = "sample weight name"
weight_loc = SampleWeight.get_weight_loc(data_instances, self.sample_weight_name)
if weight_loc is not None:
new_schema["header"].pop(weight_loc)
else:
raise ValueError(f"Cannot find weight column of given sample_weight_name '{self.sample_weight_name}'.")
result_instances = self.transform_weighted_instance(data_instances, weight_loc)
result_instances.schema = new_schema
self.callback_info()
return result_instances
|
Python
| 0
|
@@ -1888,32 +1888,36 @@
keys())%0A
+res_
class_weight.upd
@@ -1916,19 +1916,19 @@
ight
-.update((k,
+ = %7Bstr(k):
n_s
@@ -1951,17 +1951,16 @@
ses * v)
-)
for k,
@@ -1984,17 +1984,17 @@
.items()
-)
+%7D
%0A%0A
@@ -2002,16 +2002,20 @@
return
+res_
class_we
@@ -2564,16 +2564,20 @@
ght.get(
+str(
data_ins
@@ -2587,16 +2587,17 @@
ce.label
+)
, 1))%0A
@@ -5311,32 +5311,34 @@
stances%0A%0A
+ #
if self.class_w
@@ -5384,32 +5384,33 @@
dict):%0A
+#
self.class_w
|
97831652f0d06236d83d0731813ffcdc44a4e190
|
Update pypi version
|
fontdump/__init__.py
|
fontdump/__init__.py
|
__version__ = '0.1.0'
|
Python
| 0
|
@@ -8,14 +8,14 @@
on__ = '
-0
+1
.1.0'
|
22461c6ddc1a6bff0ee8637139146b8531b3e0b4
|
improve python error message when tp fails to start
|
python/perfetto/trace_processor/shell.py
|
python/perfetto/trace_processor/shell.py
|
#!/usr/bin/env python3
# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
import time
from urllib import request, error
from perfetto.trace_processor.platform import PlatformDelegate
# Default port that trace_processor_shell runs on
TP_PORT = 9001
def load_shell(bin_path: str, unique_port: bool, verbose: bool,
ingest_ftrace_in_raw: bool, platform_delegate: PlatformDelegate):
addr, port = platform_delegate.get_bind_addr(
port=0 if unique_port else TP_PORT)
url = f'{addr}:{str(port)}'
shell_path = platform_delegate.get_shell_path(bin_path=bin_path)
if os.name == 'nt' and not shell_path.endswith('.exe'):
tp_exec = [sys.executable, shell_path]
else:
tp_exec = [shell_path]
args = ['-D', '--http-port', str(port)]
if not ingest_ftrace_in_raw:
args.append('--no-ftrace-raw')
p = subprocess.Popen(
tp_exec + args,
stdout=subprocess.DEVNULL,
stderr=None if verbose else subprocess.DEVNULL)
while True:
try:
if p.poll() != None:
if unique_port:
raise Exception(
"Random port allocation failed, please file a bug at https://goto.google.com/perfetto-bug"
)
raise Exception(
"Trace processor failed to start, please file a bug at https://goto.google.com/perfetto-bug"
)
_ = request.urlretrieve(f'http://{url}/status')
time.sleep(1)
break
except error.URLError:
pass
return url, p
|
Python
| 0.000001
|
@@ -1553,18 +1553,43 @@
%0A%0A
-while True
+success = False%0A for i in range(3)
:%0A
@@ -1617,10 +1617,10 @@
l()
-!=
+is
Non
@@ -1634,172 +1634,157 @@
-if unique_port:%0A raise Exception(%0A %22Random port allocation failed, please file a bug at https://goto.google.com/perfetto-bug%22%0A
+_ = request.urlretrieve(f'http://%7Burl%7D/status')%0A success = True%0A break%0A except error.URLError:%0A time.sleep(1
)%0A
+%0A
-
+if not success:%0A
@@ -1800,20 +1800,16 @@
eption(%0A
-
@@ -1844,16 +1844,121 @@
tart
-, please
+. Try rerunning with %22%0A %22verbose=True in TraceProcessorConfig for more detailed %22%0A %22information and
fil
@@ -2008,143 +2008,80 @@
-bug
-%22%0A )%0A _ = request.urlretrieve(f'
+ %22%0A %22or
http
+s
://
-%7Burl%7D/status')%0A time.sleep(1)%0A break%0A except error.URLError:%0A pass
+github.com/google/perfetto/issues if necessary.%22)
%0A%0A
|
eb7daa8f4ac932782f7016e8fbdfc010999e4c21
|
support attribute deletion from a row
|
src/main/python/pyspark_cassandra/types.py
|
src/main/python/pyspark_cassandra/types.py
|
from collections import Set, Iterable, Mapping
from datetime import datetime
from operator import itemgetter
from time import mktime
def _create_row(fields, values):
return _create_struct(Row, fields, values)
def _create_udt(fields, values):
return _create_struct(UDT, fields, values)
def _create_struct(cls, fields, values):
d = {k: v for k, v in zip(fields, values)}
return cls(**d)
class Struct(tuple):
"""Adaptation from the pyspark.sql.Row which better supports adding fields"""
def __new__(cls, **kwargs):
if not kwargs:
raise ValueError("Cannot construct empty %s" % cls)
struct = tuple.__new__(cls)
struct.__FIELDS__ = kwargs
return struct
def asDict(self):
return self.__dict__()
def __dict__(self):
return self.__FIELDS__
def __iter__(self):
return iter(self.__FIELDS__.values())
def keys(self):
return self.__FIELDS__.keys()
def values(self):
return self.__FIELDS__.values()
def __len__(self):
return len(self.__FIELDS__)
def __eq__(self, other):
return self.__FIELDS__.__eq__(other.__FIELDS__)
def __ne__(self, other):
return not self == other
def __getattr__(self, name):
try:
return self.__FIELDS__[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
if name == "__FIELDS__":
tuple.__setattr__(self, name, value)
else:
self.__FIELDS__[name] = value
def __getstate__(self):
return self.__dict__()
def __reduce__(self):
keys = self.__FIELDS__.keys()
values = [self.__FIELDS__[k] for k in keys]
return (self._creator(), (keys, values,))
def __repr__(self, *args, **kwargs):
fields = sorted(self.__FIELDS__.items(), key=itemgetter(0))
values = ", ".join("%s=%r" % (k, v) for k, v in fields if k != '__FIELDS__')
return "%s(%s)" % (self.__class__.__name__, values)
class Row(Struct):
def _creator(self):
return _create_row
class UDT(Struct):
def _creator(self):
return _create_udt
def as_java_array(gateway, java_type, iterable):
"""Creates a Java array from a Python iterable, using the given p4yj gateway"""
java_type = gateway.jvm.__getattr__(java_type)
lst = list(iterable)
arr = gateway.new_array(java_type, len(lst))
for i, e in enumerate(lst):
jobj = as_java_object(gateway, e)
arr[i] = jobj
return arr
def as_java_object(gateway, obj):
"""Converts a limited set of types to their corresponding types in java. Supported are 'primitives' (which aren't
converted), datetime.datetime and the set-, dict- and iterable-like types.
"""
t = type(obj)
if issubclass(t, (bool, int, float, str)):
return obj
elif issubclass(t, UDT):
field_names = as_java_array(gateway, "String", obj.keys())
field_values = as_java_array(gateway, "Object", obj.values())
udt = gateway.jvm.UDTValueConverter(field_names, field_values)
return udt.toConnectorType()
elif issubclass(t, datetime):
timestamp = int(mktime(obj.timetuple()) * 1000)
return gateway.jvm.java.util.Date(timestamp)
elif issubclass(t, (list, Iterable)):
array_list = gateway.jvm.java.util.ArrayList()
for e in obj: array_list.append(e)
return array_list
elif issubclass(t, (dict, Mapping)):
hash_map = gateway.jvm.java.util.HashMap()
for (k, v) in obj.items(): hash_map[k] = v
return hash_map
elif issubclass(t, (set, Set)):
hash_set = gateway.jvm.java.util.HashSet()
for e in obj: hash_set.add(e)
return hash_set
else:
return obj
|
Python
| 0
|
@@ -1387,16 +1387,132 @@
= value
+%0A%0A%09def __delattr__(self, name):%0A%09%09try:%0A%09%09%09del self.__FIELDS__%5Bname%5D%0A%09%09except KeyError:%0A%09%09%09raise AttributeError(name)
%0A%09%09%0A%09%0A%09d
|
141eafa531c6c09a06efe6a694251a1eea84908d
|
bump skip version on attrs test
|
h5py/tests/old/test_attrs.py
|
h5py/tests/old/test_attrs.py
|
# This file is part of h5py, a Python interface to the HDF5 library.
#
# http://www.h5py.org
#
# Copyright 2008-2013 Andrew Collette and contributors
#
# License: Standard 3-clause BSD; see "license.txt" for full license terms
# and contributor agreement.
"""
Attributes testing module
Covers all operations which access the .attrs property, with the
exception of data read/write and type conversion. Those operations
are tested by module test_attrs_data.
"""
from __future__ import absolute_import
import six
import numpy as np
try:
from collections.abc import MutableMapping
except ImportError: # Python < 3.3
from collections import MutableMapping
from ..common import TestCase, ut
import h5py
from h5py import File
from h5py import h5a, h5t
from h5py import AttributeManager
class BaseAttrs(TestCase):
def setUp(self):
self.f = File(self.mktemp(), 'w')
def tearDown(self):
if self.f:
self.f.close()
class TestAccess(BaseAttrs):
"""
Feature: Attribute creation/retrieval via special methods
"""
def test_create(self):
""" Attribute creation by direct assignment """
self.f.attrs['a'] = 4.0
self.assertEqual(list(self.f.attrs.keys()), ['a'])
self.assertEqual(self.f.attrs['a'], 4.0)
def test_overwrite(self):
""" Attributes are silently overwritten """
self.f.attrs['a'] = 4.0
self.f.attrs['a'] = 5.0
self.assertEqual(self.f.attrs['a'], 5.0)
def test_rank(self):
""" Attribute rank is preserved """
self.f.attrs['a'] = (4.0, 5.0)
self.assertEqual(self.f.attrs['a'].shape, (2,))
self.assertArrayEqual(self.f.attrs['a'], np.array((4.0,5.0)))
def test_single(self):
""" Attributes of shape (1,) don't become scalars """
self.f.attrs['a'] = np.ones((1,))
out = self.f.attrs['a']
self.assertEqual(out.shape, (1,))
self.assertEqual(out[()], 1)
def test_access_exc(self):
""" Attempt to access missing item raises KeyError """
with self.assertRaises(KeyError):
self.f.attrs['a']
class TestDelete(BaseAttrs):
"""
Feature: Deletion of attributes using __delitem__
"""
def test_delete(self):
""" Deletion via "del" """
self.f.attrs['a'] = 4.0
self.assertIn('a', self.f.attrs)
del self.f.attrs['a']
self.assertNotIn('a', self.f.attrs)
def test_delete_exc(self):
""" Attempt to delete missing item raises KeyError """
with self.assertRaises(KeyError):
del self.f.attrs['a']
class TestUnicode(BaseAttrs):
"""
Feature: Attributes can be accessed via Unicode or byte strings
"""
def test_ascii(self):
""" Access via pure-ASCII byte string """
self.f.attrs[b"ascii"] = 42
out = self.f.attrs[b"ascii"]
self.assertEqual(out, 42)
def test_raw(self):
""" Access via non-ASCII byte string """
name = b"non-ascii\xfe"
self.f.attrs[name] = 42
out = self.f.attrs[name]
self.assertEqual(out, 42)
def test_unicode(self):
""" Access via Unicode string with non-ascii characters """
name = u"Omega" + six.unichr(0x03A9)
self.f.attrs[name] = 42
out = self.f.attrs[name]
self.assertEqual(out, 42)
class TestCreate(BaseAttrs):
"""
Options for explicit attribute creation
"""
def test_named(self):
""" Attributes created from named types link to the source type object
"""
self.f['type'] = np.dtype('u8')
self.f.attrs.create('x', 42, dtype=self.f['type'])
self.assertEqual(self.f.attrs['x'], 42)
aid = h5a.open(self.f.id, b'x')
htype = aid.get_type()
htype2 = self.f['type'].id
self.assertEqual(htype, htype2)
self.assertTrue(htype.committed())
class TestMutableMapping(BaseAttrs):
'''Tests if the registration of AttributeManager as a MutableMapping
behaves as expected
'''
def test_resolution(self):
assert issubclass(AttributeManager, MutableMapping)
assert isinstance(self.f.attrs, MutableMapping)
def test_validity(self):
'''
Test that the required functions are implemented.
'''
AttributeManager.__getitem__
AttributeManager.__setitem__
AttributeManager.__delitem__
AttributeManager.__iter__
AttributeManager.__len__
class TestVlen(BaseAttrs):
def test_vlen(self):
a = np.array([np.arange(3), np.arange(4)],
dtype=h5t.vlen_dtype(int))
self.f.attrs['a'] = a
self.assertArrayEqual(self.f.attrs['a'][0], a[0])
class TestTrackOrder(BaseAttrs):
def fill_attrs(self, track_order):
attrs = self.f.create_group('test', track_order=track_order).attrs
for i in range(100):
attrs[str(i)] = i
return attrs
@ut.skipUnless(h5py.version.hdf5_version_tuple >= (1, 10, 5), 'HDF5 1.10.5 required')
# https://forum.hdfgroup.org/t/bug-h5arename-fails-unexpectedly/4881
def test_track_order(self):
attrs = self.fill_attrs(track_order=True) # creation order
self.assertEqual(list(attrs),
[u'' + str(i) for i in range(100)])
def test_no_track_order(self):
attrs = self.fill_attrs(track_order=False) # name alphanumeric
self.assertEqual(list(attrs),
sorted([u'' + str(i) for i in range(100)]))
|
Python
| 0
|
@@ -5052,17 +5052,17 @@
(1, 10,
-5
+6
), 'HDF5
@@ -5067,17 +5067,17 @@
F5 1.10.
-5
+6
require
|
a713bbb1226863b4417362019431de0266faa2d9
|
Update automateprojectscript.py
|
automateprojectscript.py
|
automateprojectscript.py
|
#!/usr/bin/python
"""
This python file just runs all of the terminal commands needed to run the project. It just saves time not having to manually type in these commands every time you want to run the project.
At the moment it only works for the example project, as the project later develops this script might be updated if the other people in the team decide to use this.
This is a first version, next I might work on getting a seperate terminal open to run each robot in order for it to be easy to see the positions of each robot. At the moment, since only 1 terminal is used, all of the output is put in it (which of course makes it messy)
To run the script, simply open up a terminal and type: python automateprojectscript.py
Author: ttho618
"""
import os
from subprocess import Popen, PIPE, signal
from os.path import join
findRoscorePro = Popen("pgrep roscore", stdout=PIPE, shell=True)
killroscorePro = Popen("kill "+findRoscorePro.communicate()[0], shell=True)
# The world file to look for
lookfor = "myworld.world"
# I assume that the project on your computer is located within the home directory
for root, dirs, files in os.walk('/home', topdown=True):
#print "searching", root
if '.local' in dirs:
dirs.remove('.local')
if 'catkin_ws' in dirs: # If the project is within this directory, then you need to change this to rosbuild_ws
dirs.remove('catkin_ws')
if lookfor in files:
print "found: %s" % join(root, lookfor)
worldfile = join(root, lookfor)
print worldfile
core = Popen('roscore',shell=True)
stagePro = Popen('rosrun stage stageros %s' %worldfile,shell=True)
# These below lines would need to be changed to fit what you are wanting to run.
runNode= Popen('rosrun se306Project R0',shell=True)
runNode= Popen('rosrun se306Project R1',shell=True)
|
Python
| 0
|
@@ -1527,16 +1527,207 @@
dfile%0A%0A%0A
+# This would need to be changed if your project is named something different%0ArosmakePro= Popen('rosmake se306Project',shell=True)%0ArosmakePro.communicate() # Waits until rosmake has finished%0A%0A
core = P
|
0be59cee807ef6167883fe32680f2c177202f2d5
|
add missing document_types
|
frappe/model/sync.py
|
frappe/model/sync.py
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
"""
Sync's doctype and docfields from txt files to database
perms will get synced only if none exist
"""
import frappe
import os
from frappe.modules.import_file import import_file_by_path
from frappe.modules.patch_handler import block_user
from frappe.utils import update_progress_bar
def sync_all(force=0, reset_permissions=False):
block_user(True)
for app in frappe.get_installed_apps():
sync_for(app, force, reset_permissions=reset_permissions)
block_user(False)
frappe.clear_cache()
def sync_for(app_name, force=0, reset_permissions=False):
files = []
if app_name == "frappe":
# these need to go first at time of install
for d in (("core", "docfield"),
("core", "docperm"),
("core", "doctype_action"),
("core", "doctype_link"),
("core", "role"),
("core", "has_role"),
("core", "doctype"),
("core", "user"),
("custom", "custom_field"),
("custom", "property_setter"),
("website", "web_form"),
("website", "web_template"),
("website", "web_form_field"),
("website", "portal_menu_item"),
("data_migration", "data_migration_mapping_detail"),
("data_migration", "data_migration_mapping"),
("data_migration", "data_migration_plan_mapping"),
("data_migration", "data_migration_plan"),
("desk", "number_card"),
("desk", "dashboard_chart"),
("desk", "dashboard"),
("desk", "onboarding_permission"),
("desk", "onboarding_step"),
("desk", "onboarding_step_map"),
("desk", "module_onboarding"),
("desk", "workspace_link"),
("desk", "workspace_chart"),
("desk", "workspace_shortcut"),
("desk", "workspace")):
files.append(os.path.join(frappe.get_app_path("frappe"), d[0],
"doctype", d[1], d[1] + ".json"))
for module_name in frappe.local.app_modules.get(app_name) or []:
folder = os.path.dirname(frappe.get_module(app_name + "." + module_name).__file__)
files = get_doc_files(files=files, start_path=folder)
l = len(files)
if l:
for i, doc_path in enumerate(files):
import_file_by_path(doc_path, force=force, ignore_version=True, reset_permissions=reset_permissions, for_sync=True)
frappe.db.commit()
# show progress bar
update_progress_bar("Updating DocTypes for {0}".format(app_name), i, l)
# print each progress bar on new line
print()
def get_doc_files(files, start_path):
"""walk and sync all doctypes and pages"""
files = files or []
# load in sequence - warning for devs
document_types = ['doctype', 'page', 'report', 'dashboard_chart_source', 'print_format',
'website_theme', 'web_form', 'web_template', 'notification', 'print_style',
'data_migration_mapping', 'data_migration_plan', 'workspace',
'onboarding_step', 'module_onboarding']
for doctype in document_types:
doctype_path = os.path.join(start_path, doctype)
if os.path.exists(doctype_path):
for docname in os.listdir(doctype_path):
if os.path.isdir(os.path.join(doctype_path, docname)):
doc_path = os.path.join(doctype_path, docname, docname) + ".json"
if os.path.exists(doc_path):
if not doc_path in files:
files.append(doc_path)
return files
|
Python
| 0.00013
|
@@ -2529,38 +2529,47 @@
= %5B
-'
+%0A%09%09%22
doctype
-', 'page', 'report', '
+%22,%0A%09%09%22page%22,%0A%09%09%22report%22,%0A%09%09%22
dash
@@ -2586,20 +2586,22 @@
t_source
-', '
+%22,%0A%09%09%22
print_fo
@@ -2604,22 +2604,36 @@
t_format
-'
+%22
,%0A%09%09
-'
+%22web_page%22,%0A%09%09%22
website_
@@ -2641,20 +2641,22 @@
heme
-', '
+%22,%0A%09%09%22
web_form
', '
@@ -2651,20 +2651,22 @@
web_form
-', '
+%22,%0A%09%09%22
web_temp
@@ -2669,20 +2669,22 @@
template
-', '
+%22,%0A%09%09%22
notifica
@@ -2687,20 +2687,22 @@
fication
-', '
+%22,%0A%09%09%22
print_st
@@ -2704,22 +2704,22 @@
nt_style
-'
+%22
,%0A%09%09
-'
+%22
data_mig
@@ -2732,20 +2732,22 @@
_mapping
-', '
+%22,%0A%09%09%22
data_mig
@@ -2757,20 +2757,22 @@
ion_plan
-', '
+%22,%0A%09%09%22
workspac
@@ -2776,14 +2776,14 @@
pace
-'
+%22
,%0A%09%09
-'
+%22
onbo
@@ -2797,12 +2797,14 @@
step
-', '
+%22,%0A%09%09%22
modu
@@ -2816,17 +2816,112 @@
boarding
-'
+%22,%0A%09%09%22form_tour%22,%0A%09%09%22client_script%22,%0A%09%09%22server_script%22,%0A%09%09%22custom_field%22,%0A%09%09%22property_setter%22,%0A%09
%5D%0A%09for d
@@ -3312,12 +3312,13 @@
return files
+%0A
|
252d4212e7952db3d36e0324ba237cc109d62279
|
Replace . by _ in signal and entity names.
|
src/dynamic_graph/sot/core/feature_position.py
|
src/dynamic_graph/sot/core/feature_position.py
|
# -*- coding: utf-8 -*-
# Copyright 2011, Florent Lamiraux, Thomas Moulard, JRL, CNRS/AIST
#
# This file is part of dynamic-graph.
# dynamic-graph is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# dynamic-graph is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# dynamic-graph. If not, see <http://www.gnu.org/licenses/>.
from dynamic_graph.sot.core import FeaturePoint6d
from dynamic_graph import plug
from dynamic_graph.entity import Entity
class FeaturePosition (Entity):
"""
Position of a rigid-body in space as a feature
Input:
a string: name of the feature,
a signal<MatrixHomo>: defining the value of the feature,
a signal<Matrix>: defining the jacobian of the feature with respect
to the robot configuration,
an homogeneous matrix: defining the reference value of the feature.
Members containing a signal:
position: position input signal (MatrixHomo),
reference: reference position input signal (MatrixHomo),
Jq: Jacobian input signal (Matrix),
select: selection flag "RzRyRxTzTyTx" (string).
"""
signalMap = dict()
def __init__(self, name, signalPosition=None, signalJacobian = None,
referencePosition = None):
self._feature = FeaturePoint6d(name)
self.obj = self._feature.obj
self._reference = FeaturePoint6d(name + '.ref')
if referencePosition:
self._reference.signal('position').value = tuple(referencePosition)
if signalPosition:
plug(signalPosition, self._feature.signal('position'))
if signalJacobian:
plug(signalJacobian, self._feature.signal('Jq'))
self._feature.signal('sdes').value = self._reference
self._feature.signal('selec').value = '111111'
self._feature.frame('current')
# Signals stored in members
self.position = self._feature.signal('position')
self.reference = self._reference.signal('position')
self.Jq = self._feature.signal('Jq')
self.error = self._feature.signal('error')
self.select = self._feature.signal('selec')
self.signalMap = {'position':self.position,
'reference':self.reference,
'Jq':self.Jq,
'error':self.error,
'selec':self.select}
@property
def name(self) :
return self._feature.name
def signal (self, name):
"""
Get a signal of the entity from signal name
"""
if name in self.signalMap.keys():
return self.signalMap[name]
else:
raise RunTimeError('No signal with this name')
def signals(self) :
"""
Return the list of signals
"""
return self.signalMap.values()
def commands(self):
"""
Return the list of commands.
"""
return self._feature.commands()
def frame(self, f):
return self._feature.frame(f)
|
Python
| 0.000027
|
@@ -1869,17 +1869,17 @@
name + '
-.
+_
ref')%0A
|
ae9b94f28b3677be2867bfffb9e1dcec8851aaa0
|
Fix typo in example usage for extract_variable.py script.
|
prompt_tuning/scripts/extract_variable.py
|
prompt_tuning/scripts/extract_variable.py
|
# Copyright 2022 Google.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Extract a variable from a t5x checkpoint and save it as a numpy file.
Example usage:
python -m prompt_tuning.scripts.extract_variable \
--checkpoint_dir=/path/to/t5x/checkpoint_step \
--variable_path=target/encoder/prompt/prompt/prompt \
--restore_type=float32 \
--output_path=/path/to/save/prompt.npy
"""
import os
import re
from typing import Mapping, Any, Sequence
from absl import app
from absl import flags
from absl import logging
import jax.numpy as jnp
import numpy as np
from t5x import checkpoints
from tensorflow.io import gfile
FLAGS = flags.FLAGS
flags.DEFINE_string(
"checkpoint_dir", None, "The path to the t5x checkpoint directory")
flags.DEFINE_string(
"variable_path",
None,
"The path to the variable in the checkpoint tree, using `/` for scoping. "
"Leading `/` or `/target` is optional.")
flags.DEFINE_enum(
"restore_dtype",
"float32",
["float32", "bfloat16"],
"The data type to use when restoring the variable.")
flags.DEFINE_string(
"output_path",
None,
"The path to where the numpy variable should be saved.")
flags.mark_flag_as_required("checkpoint_dir")
flags.mark_flag_as_required("variable_path")
flags.mark_flag_as_required("output_path")
def normalize_variable_path(path: str, sep: str = "/") -> str:
"""Make sure path starts with `target/`."""
# TODO: enable saving all variables within a scope if the path
# ends in the separator.
path = path.strip(sep)
path = re.sub(r"^target/", "", path)
return f"target/{path}"
def extract_nested_key(
nested_key: str, blob: Mapping[str, Any], sep: str = "/") -> Any:
"""Extract a key nested dicts using a scoping separator."""
# TODO: Add nicer error handling that shows where in the nested
# dicts your key lookup fails.
for key in nested_key.split(sep):
blob = blob[key]
return blob
def save_variable(output_path: str, variable: np.ndarray):
"""Save variable at output path using numpy."""
dir_name = os.path.dirname(output_path)
if not gfile.exists(dir_name):
gfile.makedirs(dir_name)
with gfile.GFile(output_path, "wb") as wf:
np.save(wf, variable)
def main(argv: Sequence[str]):
"""Extract a numpy value from a t5x checkpoint."""
if len(argv) > 1:
raise app.UsageError("Too many command-line-arguments.")
restore_dtype = jnp.dtype(FLAGS.restore_dtype)
checkpoint = checkpoints.load_t5x_checkpoint(
FLAGS.checkpoint_dir,
restore_dtype=restore_dtype,
lazy_parameters=True)
logging.info("Reading variables from %s as dtype=%s",
FLAGS.checkpoint_dir,
restore_dtype)
variable_path = normalize_variable_path(FLAGS.variable_path)
logging.info("Extracting variable found at %s", variable_path)
variable = extract_nested_key(variable_path, checkpoint)
variable = variable.get()
logging.info("Read variable with shape %s", variable.shape)
logging.info("Saving variable to %s", FLAGS.output_path)
save_variable(FLAGS.output_path, variable)
if __name__ == "__main__":
app.run(main)
|
Python
| 0.99978
|
@@ -824,16 +824,17 @@
restore_
+d
type=flo
|
3f1d30c2aeff73bb4863f2d0fd0660a264715739
|
Tidy up
|
src/planner.py
|
src/planner.py
|
from collections import deque
class GamePlan(object):
"""
initialise the tournament object with an overall list of players' IDs
input:
a list of players
output:
a list (len = number of rounds) of lists of tuples
with players' names (maybe change to IDs from db) in white, black order
GamePlans with odd number of players have each person sitting out
Created as a tuple with ('_BYE', 'real player')
Template needs to check for '_BYE' in each tuple and
"""
def __init__(self, players):
self.players = list(players)
def berger_robin(self, players):
"""
Input:
array of player names/ids
Returns:
tournament - an array of hashmaps,
each containing matches and bye for the round
taken from
https://en.wikipedia.org/wiki/Round-robin_tournament#Scheduling_algorithm
"""
number_of_players = len(players)
shift = number_of_players / 2
last = players.pop()
pl_deque = deque(players)
tournament = []
for x in xrange(number_of_players - 1):
round_dict = {'matches': [], 'bye': "__NONE"}
if last == '_BYE':
round_dict['bye'] = pl_deque[0]
else:
if x % 2 == 0:
round_dict['matches'].append((last, pl_deque[0]))
else:
round_dict['matches'].append((pl_deque[0], last))
other_games = [(pl_deque[idx], pl_deque[idx + 1])
for idx in xrange(1, (len(pl_deque) - 1), 2)]
round_dict['matches'] += other_games
tournament.append(round_dict)
pl_deque.rotate(shift) # for the next for-loop iteration
return tournament
def generate(self):
if len(self.players) % 2 == 0:
players = self.players
else:
players = self.players
players.append('_BYE')
return self.berger_robin(players)
|
Python
| 0.000001
|
@@ -1106,17 +1106,21 @@
for
-x
+stage
in xran
@@ -1320,17 +1320,21 @@
if
-x
+stage
%25 2 ==
@@ -1858,122 +1858,64 @@
-if len(self.players) %25 2 == 0:%0A players = self.players%0A else:%0A players = self.players
+players = self.players%0A if len(players) %25 2 == 1:
%0A
@@ -1989,10 +1989,8 @@
layers)%0A
-%0A%0A
|
856171e4933b872b1537945d3e6033da4313a1cb
|
enable gzip in django
|
ses_maker/settings.py
|
ses_maker/settings.py
|
"""
Django settings for ses_maker project.
Generated by 'django-admin startproject' using Django 1.10.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ')h0rgw)fug4%m70t73cqyxc80v1fv8lc4%20e^vm-m3qua=k@r'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'canteen',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'ses_maker.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ses_maker.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': 'DEBUG',
},
},
}
# For easier local development
if 'POSTGRESQL_PASSWORD' in os.environ:
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
DATABASES['default'] = {
'ENGINE': 'django.db.backends.postgresql',
'NAME': os.environ['POSTGRESQL_INSTANCE_NAME'],
'USER': os.environ['POSTGRESQL_USERNAME'],
'PASSWORD': os.environ['POSTGRESQL_PASSWORD'],
'HOST': os.environ['POSTGRESQL_PORT_5432_TCP_ADDR'],
'PORT': os.environ['POSTGRESQL_PORT_5432_TCP_PORT']
}
DEBUG = False
|
Python
| 0.000001
|
@@ -1042,24 +1042,69 @@
DLEWARE = %5B%0A
+ 'django.middleware.gzip.GZipMiddleware',%0A
'django.
|
d9ee9620e5c018e9869bd9f05a8b4c40659ab068
|
version 0.1.10
|
docxtpl/__init__.py
|
docxtpl/__init__.py
|
# -*- coding: utf-8 -*-
'''
Created : 2015-03-12
@author: Eric Lapouyade
'''
__version__ = '0.1.9'
from lxml import etree
from docx import Document
from jinja2 import Template
from cgi import escape
import re
import six
class DocxTemplate(object):
""" Class for managing docx files as they were jinja2 templates """
def __init__(self, docx):
self.docx = Document(docx)
def __getattr__(self, name):
return getattr(self.docx, name)
def get_docx(self):
return self.docx
def get_xml(self):
# Be careful : pretty_print MUST be set to False, otherwise patch_xml() won't work properly
return etree.tostring(self.docx._element.body, encoding='unicode', pretty_print=False)
def write_xml(self,filename):
with open(filename,'w') as fh:
fh.write(self.get_xml())
def patch_xml(self,src_xml):
# strip all xml tags inside {% %} and {{ }} that MS word can insert into xml source
src_xml = re.sub(r'(?<={)(<[^>]*>)+(?=[\{%])|(?<=[%\}])(<[^>]*>)+(?=\})','',src_xml,flags=re.DOTALL)
def striptags(m):
return re.sub('</w:t>.*?(<w:t>|<w:t [^>]*>)','',m.group(0),flags=re.DOTALL)
src_xml = re.sub(r'{%(?:(?!%}).)*|{{(?:(?!}}).)*',striptags,src_xml,flags=re.DOTALL)
# manage table cell background color
def cellbg(m):
cell_xml = m.group(1) + m.group(3)
cell_xml = re.sub(r'<w:r[ >](?:(?!<w:r[ >]).)*<w:t></w:t>.*?</w:r>','',cell_xml,flags=re.DOTALL)
cell_xml = re.sub(r'<w:shd[^/]*/>','', cell_xml, count=1)
return re.sub(r'(<w:tcPr[^>]*>)',r'\1<w:shd w:val="clear" w:color="auto" w:fill="{{%s}}"/>' % m.group(2), cell_xml)
src_xml = re.sub(r'(<w:tc[ >](?:(?!<w:tc[ >]).)*){%\s*cellbg\s+([^%]*)\s*%}(.*?</w:tc>)',cellbg,src_xml,flags=re.DOTALL)
for y in ['tr', 'p', 'r']:
# replace into xml code the row/paragraph/run containing {%y xxx %} or {{y xxx}} template tag
# by {% xxx %} or {{ xx }} without any surronding xml tags :
# This is mandatory to have jinja2 generating correct xml code
pat = r'<w:%(y)s[ >](?:(?!<w:%(y)s[ >]).)*({%%|{{)%(y)s ([^}%%]*(?:%%}|}})).*?</w:%(y)s>' % {'y':y}
src_xml = re.sub(pat, r'\1 \2',src_xml,flags=re.DOTALL)
src_xml = src_xml.replace(r"‘","'")
return src_xml
def render_xml(self,src_xml,context,jinja_env=None):
if jinja_env:
template = jinja_env.from_string(src_xml)
else:
template = Template(src_xml)
dst_xml = template.render(context)
dst_xml = dst_xml.replace('{_{','{{').replace('}_}','}}').replace('{_%','{%').replace('%_}','%}')
return dst_xml
def build_xml(self,context,jinja_env=None):
xml = self.get_xml()
xml = self.patch_xml(xml)
xml = self.render_xml(xml, context, jinja_env)
return xml
def map_xml(self,xml):
root = self.docx._element
body = root.body
root.replace(body,etree.fromstring(xml))
def render(self,context,jinja_env=None):
xml = self.build_xml(context,jinja_env)
self.map_xml(xml)
def new_subdoc(self):
return Subdoc(self)
class Subdoc(object):
""" Class for subdocument to insert into master document """
def __init__(self, tpl):
self.tpl = tpl
self.docx = tpl.get_docx()
self.subdocx = Document()
self.subdocx._part = self.docx._part
def __getattr__(self, name):
return getattr(self.subdocx, name)
def _get_xml(self):
xml = re.sub(r'</?w:body[^>]*>','',etree.tostring(self.subdocx._element.body, encoding='unicode', pretty_print=False))
return xml
def __unicode__(self):
return self._get_xml()
def __str__(self):
return self._get_xml()
class RichText(object):
""" class to generate Rich Text when using templates variables
This is much faster than using Subdoc class, but this only for texts INSIDE an existing paragraph.
"""
def __init__(self, text=None, **text_prop):
self.xml = ''
if text:
self.add(text, **text_prop)
def add(self, text, style=None,
color=None,
highlight=None,
size=None,
bold=False,
italic=False,
underline=False,
strike=False):
if not isinstance(text, six.text_type):
text = text.decode('utf-8',errors='ignore')
text = escape(text).replace('\n','<w:br/>')
prop = u''
if style:
prop += u'<w:rStyle w:val="%s"/>' % style
if color:
if color[0] == '#':
color = color[1:]
prop += u'<w:color w:val="%s"/>' % color
if highlight:
if highlight[0] == '#':
highlight = highlight[1:]
prop += u'<w:highlight w:val="%s"/>' % highlight
if size:
prop += u'<w:sz w:val="%s"/>' % size
prop += u'<w:szCs w:val="%s"/>' % size
if bold:
prop += u'<w:b/>'
if italic:
prop += u'<w:i/>'
if underline:
if underline not in ['single','double']:
underline = 'single'
prop += u'<w:u w:val="%s"/>' % underline
if strike:
prop += u'<w:strike/>'
self.xml += u'<w:r>'
if prop:
self.xml += u'<w:rPr>%s</w:rPr>' % prop
self.xml += u'<w:t xml:space="preserve">%s</w:t></w:r>\n' % text
def __unicode__(self):
return self.xml
def __str__(self):
return self.xml
|
Python
| 0.000002
|
@@ -95,9 +95,10 @@
0.1.
-9
+10
'%0A%0Af
|
65783ec0baac5886232a5334905a748750b3c0c2
|
fix NameError
|
sfa/methods/Update.py
|
sfa/methods/Update.py
|
### $Id: update.py 16477 2010-01-05 16:31:37Z thierry $
### $URL: https://svn.planet-lab.org/svn/sfa/trunk/sfa/methods/update.py $
import time
from sfa.util.faults import *
from sfa.util.method import Method
from sfa.util.parameter import Parameter, Mixed
from sfa.trust.credential import Credential
class Update(Method):
"""
Update an object in the registry. Currently, this only updates the
PLC information associated with the record. The SFA fields (name, type,
GID) are fixed.
@param cred credential string specifying rights of the caller
@param record a record dictionary to be updated
@return 1 if successful, faults otherwise
"""
interfaces = ['registry']
accepts = [
Parameter(dict, "Record dictionary to be updated"),
Parameter(str, "Credential string"),
]
returns = Parameter(int, "1 if successful")
def call(self, record_dict, creds):
# validate the cred
valid_creds = self.api.auth.checkCredentials(creds, "update")
# verify permissions
api.auth.verify_object_permission(record.get('hrn', ''))
# log
origin_hrn = Credential(string=valid_creds[0]).get_gid_caller().get_hrn()
self.api.logger.info("interface: %s\tcaller-hrn: %s\ttarget-hrn: %s\tmethod-name: %s"%(self.api.interface, origin_hrn, None, self.name))
manager = self.api.get_interface_manager()
return manager.update(self.api, record_dict)
|
Python
| 0.000003
|
@@ -1117,16 +1117,21 @@
n(record
+_dict
.get('hr
|
5d8629f2f0e024b08411a1a710ac927f0f02bcfd
|
remove the 'name' field from the slice record before updateing. Cant allow user to attempt to change the slice name
|
sfa/methods/update.py
|
sfa/methods/update.py
|
### $Id$
### $URL$
from sfa.util.faults import *
from sfa.util.method import Method
from sfa.util.parameter import Parameter, Mixed
from sfa.trust.auth import Auth
from sfa.util.record import GeniRecord
from sfa.trust.certificate import Keypair, convert_public_key
from sfa.trust.gid import *
from sfa.util.debug import log
class update(Method):
"""
Update an object in the registry. Currently, this only updates the
PLC information associated with the record. The Geni fields (name, type,
GID) are fixed.
@param cred credential string specifying rights of the caller
@param record a record dictionary to be updated
@return 1 if successful, faults otherwise
"""
interfaces = ['registry']
accepts = [
Parameter(str, "Credential string"),
Parameter(dict, "Record dictionary to be updated")
]
returns = Parameter(int, "1 if successful")
def call(self, cred, record_dict):
self.api.auth.check(cred, "update")
record = GeniRecord(dict = record_dict)
type = record.get_type()
self.api.auth.verify_object_permission(record.get_name())
auth_name = self.api.auth.get_authority(record.get_name())
if not auth_name:
auth_name = record.get_name()
table = self.api.auth.get_auth_table(auth_name)
# make sure the record exists
existing_record_list = table.resolve(type, record.get_name())
if not existing_record_list:
raise RecordNotFound(record.get_name())
existing_record = existing_record_list[0]
# Update_membership needs the membership lists in the existing record
# filled in, so it can see if members were added or removed
self.api.fill_record_info(existing_record)
# Use the pointer from the existing record, not the one that the user
# gave us. This prevents the user from inserting a forged pointer
pointer = existing_record.get_pointer()
# update the PLC information that was specified with the record
if (type == "authority"):
self.api.plshell.UpdateSite(self.api.plauth, pointer, record)
elif type == "slice":
hrn=record.get_name()
pl_record=self.api.geni_fields_to_pl_fields(type, hrn, record)
self.api.plshell.UpdateSlice(self.api.plauth, pointer, pl_record)
elif type == "user":
# SMBAKER: UpdatePerson only allows a limited set of fields to be
# updated. Ideally we should have a more generic way of doing
# this. I copied the field names from UpdatePerson.py...
update_fields = {}
all_fields = record
for key in all_fields.keys():
if key in ['first_name', 'last_name', 'title', 'email',
'password', 'phone', 'url', 'bio', 'accepted_aup',
'enabled']:
update_fields[key] = all_fields[key]
self.api.plshell.UpdatePerson(self.api.plauth, pointer, update_fields)
if 'key' in record and record['key']:
# must check this key against the previous one if it exists
persons = self.api.plshell.GetPersons(self.api.plauth, [pointer], ['key_ids'])
person = persons[0]
keys = person['key_ids']
keys = self.api.plshell.GetKeys(self.api.plauth, person['key_ids'])
key_exists = False
if isinstance(record['key'], list):
new_key = record['key'][0]
else:
new_key = record['key']
# Delete all stale keys
for key in keys:
if record['key'] != key['key']:
self.api.plshell.DeleteKey(self.api.plauth, key['key_id'])
else:
key_exists = True
if not key_exists:
self.api.plshell.AddPersonKey(self.api.plauth, pointer, {'key_type': 'ssh', 'key': new_key})
# find the existing geni record
hrn = record['hrn']
auth_name = self.api.auth.get_authority(hrn)
auth_info = self.api.auth.get_auth_info(auth_name)
table = self.api.auth.get_auth_table(auth_name)
person_records = table.resolve('user', hrn)
person_record = person_records[0]
# update the openssl key and gid
pkey = convert_public_key(new_key)
uuid = create_uuid()
gid_object = self.api.auth.hierarchy.create_gid(hrn, uuid, pkey)
gid = gid_object.save_to_string(save_parents=True)
record['gid'] = gid
record.set_gid(gid)
table.update(record)
elif type == "node":
self.api.plshell.UpdateNode(self.api.plauth, pointer, record)
else:
raise UnknownGeniType(type)
# update membership for researchers, pis, owners, operators
self.api.update_membership(existing_record, record)
return 1
|
Python
| 0
|
@@ -2299,32 +2299,106 @@
e, hrn, record)%0A
+ if 'name' in pl_reocrd:%0A pl_record.pop('name')%0A
self
|
6a582b6e2fa852d6a80268c7ddd305d45416c8ef
|
Fix YUM and DNF usage.
|
hotness/repository.py
|
hotness/repository.py
|
import logging
import subprocess
import os
import ConfigParser
from six import StringIO
from hotness.cache import cache
log = logging.getLogger('fedmsg')
thn_section = 'thn'
class ThnConfigParser(ConfigParser.ConfigParser):
def read(self, filename):
try:
text = open(filename).read()
except IOError:
pass
else:
section = "[%s]\n" % thn_section
file = StringIO.StringIO(section + text)
self.readfp(file, filename)
def get_pkg_manager():
release_file = '/etc/os-release'
config = ThnConfigParser()
config.read(release_file)
name = config.get(thn_section, 'ID')
if name == 'fedora':
return 'dnf'
else:
return 'yum'
def get_version(package_name, yumconfig):
nvr_dict = build_nvr_dict(yumconfig)
try:
version = nvr_dict[package_name]
except KeyError:
log.warn("Did not find %r in nvr_dict, forcing refresh" % package_name)
force_cache_refresh(yumconfig)
nvr_dict = build_nvr_dict(yumconfig)
# This might still fail.. but we did the best we could.
version = nvr_dict[package_name]
return version
def force_cache_refresh(yumconfig):
# First, invalidate our in-memory cache of the results
cache.invalidate(hard=True)
# But also ask yum/dnf to kill its on-disk cache
pkg_manager = get_pkg_manager()
cmdline = [os.path.join("/usr/bin", pkg_manager),
"--config", yumconfig,
"clean",
"all"]
log.info("Running %r" % ' '.join(cmdline))
cleanall = subprocess.Popen(cmdline, stdout=subprocess.PIPE)
(stdout, stderr) = cleanall.communicate()
if stderr:
log.warn(stderr)
log.debug("Done with cache cleaning.")
@cache.cache_on_arguments()
def build_nvr_dict(yumconfig):
pkg_manager = get_pkg_manager()
cmdline = []
if pkg_manager == 'yum':
cmdline.append("/usr/bin/repoquery")
else:
cmdline.append("/usr/bin/dnf",
"repoquery")
cmdline.append("--config", yumconfig,
"--quiet",
#"--archlist=src",
"--all",
"--qf",
"%{name}\t%{version}\t%{release}")
log.info("Running %r" % ' '.join(cmdline))
repoquery = subprocess.Popen(cmdline, stdout=subprocess.PIPE)
(stdout, stderr) = repoquery.communicate()
log.debug("Done with repoquery.")
if stderr:
log.warn(stderr)
new_nvr_dict = {}
for line in stdout.split("\n"):
line = line.strip()
if line:
name, version, release = line.split("\t")
new_nvr_dict[name] = (version, release)
log.info("Rebuilt nvr_dict with %r entries" % len(new_nvr_dict))
return new_nvr_dict
|
Python
| 0
|
@@ -429,17 +429,8 @@
e =
-StringIO.
Stri
@@ -1362,44 +1362,8 @@
che%0A
- pkg_manager = get_pkg_manager()%0A
@@ -1398,16 +1398,20 @@
r/bin%22,
+get_
pkg_mana
@@ -1413,16 +1413,18 @@
_manager
+()
),%0A
@@ -1897,32 +1897,28 @@
cmdline
-.append(
+ = %5B
%22/usr/bin/re
@@ -1921,25 +1921,25 @@
n/repoquery%22
-)
+%5D
%0A else:%0A
@@ -1956,36 +1956,51 @@
line
-.append(%22/usr/bin/dnf%22,%0A
+ = %5Bos.path.join(%22/usr/bin%22, pkg_manager),%0A
@@ -2025,17 +2025,17 @@
poquery%22
-)
+%5D
%0A cmd
@@ -2043,15 +2043,16 @@
ine.
-app
+ext
end(
+%5B
%22--c
@@ -2085,24 +2085,25 @@
+
%22--quiet%22,%0A
@@ -2101,16 +2101,17 @@
quiet%22,%0A
+
@@ -2159,16 +2159,17 @@
+
%22--all%22,
@@ -2192,16 +2192,17 @@
+
%22--qf%22,%0A
@@ -2197,16 +2197,17 @@
%22--qf%22,%0A
+
@@ -2250,16 +2250,17 @@
elease%7D%22
+%5D
)%0A%0A l
|
559b2fc17d4a28f2d3faa7bd1bf5316fbf11019e
|
Add magnitude attribute to siem events
|
siemstress/trigger.py
|
siemstress/trigger.py
|
#!/usr/bin/env python
#_MIT License
#_
#_Copyright (c) 2017 Dan Persons (dpersonsdev@gmail.com)
#_
#_Permission is hereby granted, free of charge, to any person obtaining a copy
#_of this software and associated documentation files (the "Software"), to deal
#_in the Software without restriction, including without limitation the rights
#_to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#_copies of the Software, and to permit persons to whom the Software is
#_furnished to do so, subject to the following conditions:
#_
#_The above copyright notice and this permission notice shall be included in all
#_copies or substantial portions of the Software.
#_
#_THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#_IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#_FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#_AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#_LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#_OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#_SOFTWARE.
import time
from time import strftime
from time import sleep
from time import daylight
from time import timezone
from time import altzone
from random import randrange
from datetime import datetime
import MySQLdb as mdb
import json
import threading
import os
from sys import exit
#import signal
class SiemTrigger:
def __init__(self, server, user, password, database, rule):
"""Initialize trigger object"""
self.server = server
self.user = user
self.password = password
self.database = database
self.rule = rule
self.tzone = None
#signal.signal(signal.SIGTERM, self.sigterm_handler)
#def sigterm_handler(self, signal, frame):
# """Exit cleanly on sigterm"""
# exit(0)
def watch_rule(self):
"""Watch a trigger rule"""
# Set up thread stopping:
#me = threading.currentThread()
# Set time zone:
if daylight:
self.tzone = \
str(int(float(altzone) / 60 // 60)).rjust(2,
'0') + \
str(int(float(altzone) / 60 % 60)).ljust(2, '0')
else:
self.tzone = \
str(int(float(timezone) / 60 // 60)).rjust(2,
'0') + \
str(int(float(timezone) / 60 % 60)).ljust(2, '0')
if not '-' in self.tzone:
self.tzone = '+' + self.tzone
while True:
# Check for stopped thread:
#if me.stopped():
# with open('dead.letter', 'a') as f:
# f.write('Thread stopped.')
# exit(0)
# Check the rule:
self.check_rule()
# Wait until the next interval
sleep(int(self.rule['TimeInt']) * 60)
def check_rule(self):
"""Check a trigger rule"""
if not self.tzone:
# Set time zone:
if daylight:
self.tzone = \
str(int(float(altzone) / 60 // 60)).rjust(2,
'0') + \
str(int(float(altzone) / 60 % 60)).ljust(2, '0')
else:
self.tzone = \
str(int(float(timezone) / 60 // 60)).rjust(2,
'0') + \
str(int(float(timezone) / 60 % 60)).ljust(2, '0')
if not '-' in self.tzone:
self.tzone = '+' + self.tzone
# Query the database:
con = mdb.connect(self.server, self.user, self.password,
self.database)
with con:
cur = con.cursor()
cur.execute(self.rule['SQLQuery'])
rows = cur.fetchall()
cur.close()
con.close()
# Evaluate the results:
if len(rows) > int(self.rule['EventLimit']):
idtags = json.dumps([int(row[0]) for row in rows])
datestamp = datetime.now().strftime('%Y%m%d%H%M%S')
outstatement = 'INSERT INTO ' + \
self.rule['OutTable'] + \
'(DateStamp, TZone, ' + \
'SourceRule, Severity, SourceTable, EventLimit, ' + \
'EventCount, TimeInt, Message, SourceIDs) ' + \
'VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)'
# Send an event to the database:
con = mdb.connect(self.server, self.user,
self.password, self.database)
with con:
cur = con.cursor()
cur.execute(outstatement, (datestamp, self.tzone,
self.rule['RuleName'], self.rule['Severity'],
self.rule['SourceTable'],
self.rule['EventLimit'], len(rows),
self.rule['TimeInt'], self.rule['Message'],
idtags))
cur.close()
con.close()
def start_rule(server, user, password, database, rule, oneshot):
"""Initialize trigger object and start watching"""
# Create table if it doesn't exist:
con = mdb.connect(server, user, password, database)
with con:
cur = con.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS ' + rule['OutTable'] + \
'(Id INT PRIMARY KEY AUTO_INCREMENT, ' + \
'DateStamp TIMESTAMP, ' + \
'TZone NVARCHAR(5), ' + \
'SourceRule NVARCHAR(25), ' + \
'Severity TINYINT UNSIGNED, ' + \
'SourceTable NVARCHAR(25), ' + \
'EventLimit INT, EventCount INT, ' + \
'TimeInt INT, ' + \
'Message NVARCHAR(1000), ' + \
'SourceIDs NVARCHAR(2000))')
cur.close()
con.close()
sentry = SiemTrigger(server, user, password, database, rule)
if oneshot:
sentry.check_rule()
else:
# Before starting, sleep randomly up to rule interval to stagger
# database use:
sleep(randrange(0, int(rule['TimeInt']) * 60))
sentry.watch_rule()
|
Python
| 0.000001
|
@@ -4177,16 +4177,75 @@
%25H%25M%25S')
+%0A magnitute = len(rows) // self.rule%5B'Severity'%5D
%0A%0A
@@ -4482,16 +4482,27 @@
ntCount,
+ Magnitude,
TimeInt
@@ -4593,16 +4593,20 @@
%25s, %25s,
+ %25s,
%25s)'%0A%0A
@@ -5043,16 +5043,27 @@
n(rows),
+ magnitude,
%0A
@@ -5859,24 +5859,112 @@
INT
-, EventCount INT
+ UNSIGNED, ' + %5C%0A 'EventCount INT UNSIGNED, ' + %5C%0A 'Magnitute INT UNSIGNED
, '
@@ -5995,16 +5995,25 @@
eInt INT
+ UNSIGNED
, ' + %5C%0A
|
d307b65f8bf5f9ae8eaaefa071fd2055304a6725
|
Remove custom form from admin.
|
saskatoon/harvest/admin.py
|
saskatoon/harvest/admin.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from forms import RFPForm, PropertyForm, HarvestForm, HarvestYieldForm, EquipmentForm
from member.models import *
from harvest.models import *
from harvest.forms import *
class PropertyInline(admin.TabularInline):
model = Property
extra = 0
class PersonInline(admin.TabularInline):
model = RequestForParticipation
verbose_name = "Cueilleurs pour cette récolte"
verbose_name_plural = "Cueilleurs pour cette récolte"
form = RFPForm
exclude = ['creation_date', 'confirmation_date']
extra = 3
class OrganizationAdmin(admin.ModelAdmin):
inlines = [
PropertyInline,
]
search_fields = ['name', 'description']
class HarvestYieldInline(admin.TabularInline):
model = HarvestYield
form = HarvestYieldForm
class HarvestAdmin(admin.ModelAdmin):
form = HarvestForm
inlines = (PersonInline, HarvestYieldInline)
class RequestForParticipationAdmin(admin.ModelAdmin):
form = RFPForm
class EquipmentAdmin(admin.ModelAdmin):
form = EquipmentForm
class PropertyImageInline(admin.TabularInline):
model = PropertyImage
extra = 3
class PropertyAdmin(admin.ModelAdmin):
model = Property
inlines = [ PropertyImageInline, ]
form = PropertyForm
admin.site.register(Property, PropertyAdmin)
admin.site.register(Harvest, HarvestAdmin)
admin.site.register(RequestForParticipation, RequestForParticipationAdmin)
admin.site.register(TreeType)
admin.site.register(Equipment, EquipmentAdmin)
admin.site.register(EquipmentType)
admin.site.register(HarvestYield)
admin.site.register(Comment)
admin.site.register(Actor)
admin.site.register(Language)
admin.site.register(Person)
admin.site.register(Organization)
admin.site.register(Neighborhood)
admin.site.register(City)
admin.site.register(State)
admin.site.register(Country)
admin.site.register(PropertyImage)
|
Python
| 0
|
@@ -874,24 +874,25 @@
Admin):%0A
+#
form = Harve
|
e28a41e5996651aefdf7966ead73310a5a761040
|
fix flake8 violation
|
simphony/cuds/bond.py
|
simphony/cuds/bond.py
|
class Bond(object):
"""
Bond entity
"""
def __init__(self, id, particles, data=None):
self.id = id
self.particles = particles
if data is None:
self.data = {}
else:
self.data = data
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self.id == other.id and
self.particles == other.particles and
self.data == self.data)
else:
return False
def __ne__(self, other):
return not self == other
|
Python
| 0
|
@@ -247,17 +247,16 @@
= data%0A%0A
-%0A
def
|
7f34b774d47019a4cd062622396576f4410defed
|
Fix tempest config
|
fuel_test/helpers.py
|
fuel_test/helpers.py
|
import logging
import re
from root import root
from settings import controllers
from keystoneclient.v2_0 import client
#from glanceclient import Client
def execute(remote, command):
chan, stdin, stderr, stdout = execute_async(remote, command)
result = {
'stdout': [],
'stderr': [],
'exit_code': 0
}
for line in stdout:
result['stdout'].append(line)
print line
for line in stderr:
result['stderr'].append(line)
print line
result['exit_code'] = chan.recv_exit_status()
chan.close()
return result
def udp_ping(remote, host, port):
result = execute(remote,'nmap -PU -sU -p%s %s' % (port, host))
for line in result['stdout']:
if line.find('udp open') != -1:
return True
return False
def tcp_ping(remote, host, port):
result = execute(remote,'nmap -PU -p%s %s' % (port, host))
for line in result['stdout']:
if line.find('tcp open') != -1:
return True
return False
def load(path):
with open(path) as f:
return f.read()
def execute_async(remote, command):
logging.debug("Executing command: '%s'" % command.rstrip())
chan = remote._ssh.get_transport().open_session()
stdin = chan.makefile('wb')
stdout = chan.makefile('rb')
stderr = chan.makefile_stderr('rb')
cmd = "%s\n" % command
if remote.sudo_mode:
cmd = 'sudo -S bash -c "%s"' % cmd.replace('"', '\\"')
chan.exec_command(cmd)
if stdout.channel.closed is False:
stdin.write('%s\n' % remote.password)
stdin.flush()
return chan, stdin, stderr, stdout
def extract_virtual_ips(ipaout):
pattern = '(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}).*(eth\d{1,}):keepalived'
return dict((v,k) for k, v in re.findall(pattern, ipaout))
def tempest_build_config(host, image_ref, image_ref_alt):
sample = load(root('fuel', 'fuel_test', 'config', 'tempest.conf.sample'))
config = sample % {
'HOST': host,
'USERNAME': 'tempest1',
'PASSWORD': 'secret',
'TENANT_NAME': 'openstack',
'ALT_USERNAME': 'tempest2',
'ALT_PASSWORD': 'secret',
'ALT_TENANT_NAME': 'openstack',
'IMAGE_ID': image_ref,
'IMAGE_ID_ALT': image_ref_alt,
'ADMIN_USERNAME': 'tempest1',
'ADMIN_PASSWORD': 'secret',
'ADMIN_TENANT_NAME': 'openstack',
}
return config
def tempest_write_config(host, image_ref, image_ref_alt):
with open(root('tempest.conf'), 'w') as f:
f.write(tempest_build_config(host, image_ref, image_ref_alt))
def get_auth_url(auth_host):
auth_url = 'http://%s:5000/v2.0/' % auth_host
print auth_url
return auth_url
def credentials(auth_host, tenant_id):
credentials = '--os-username admin --os-password nova --os-auth-url "%s" --os-tenant-id %s' % (get_auth_url(auth_host), tenant_id)
print credentials
return credentials
def glance_command(auth_host, tenant_id):
return 'glance ' + credentials(auth_host, tenant_id) + ' '
def tempest_add_images(remote, auth_host, tenant_id):
execute(remote, 'wget https://launchpad.net/cirros/trunk/0.3.0/+download/cirros-0.3.0-x86_64-disk.img')
result = execute(remote, glance_command(auth_host, tenant_id) +' add name=cirros_0.3.0 is_public=true container_format=bare disk_format=qcow2 < cirros-0.3.0-x86_64-disk.img')
pattern = 'Added new image with ID: (\S*)'
image_ref = re.findall(pattern, string='\n'.join(result['stdout']))
print image_ref
execute(remote, glance_command(auth_host, tenant_id) + ' add name=cirros_0.3.0 is_public=true container_format=bare disk_format=qcow2 < cirros-0.3.0-x86_64-disk.img')
image_ref_any = re.findall(pattern, string='\n'.join(result['stdout']))
print image_ref_any
return image_ref, image_ref_any
def tempest_share_glance_images(remote, network):
execute(remote, 'echo "/var/lib/glance/images %s(rw,no_root_squash)" >> /etc/exports' % network)
execute(remote, '/etc/init.d/nfs restart')
def tempest_mount_glance_images(remote):
execute(remote, '/etc/init.d/nfslock restart')
execute(remote, 'mount %s:/var/lib/glance/images /var/lib/glance/images -o vers=3' % controllers[0])
|
Python
| 0.002982
|
@@ -2062,33 +2062,31 @@
ANT_NAME': '
-openstack
+tenant1
',%0A '
@@ -2170,33 +2170,31 @@
ANT_NAME': '
-openstack
+tenant2
',%0A '
@@ -3484,32 +3484,24 @@
'%5D))
+%5B0%5D
%0A
-print image_ref%0A
+result =
exe
@@ -3743,32 +3743,11 @@
'%5D))
-%0A print image_ref_any
+%5B0%5D
%0A
|
2e2f6d2a6480a4ca43c76e6559cfe6aadc434a8b
|
change to dumps
|
functions/webhook.py
|
functions/webhook.py
|
#!/usr/bin/python
# Written by: Andrew Jackson
# This is used to send a JSON payload to a webhook.
import json
import logging
import os
import time
import uuid
import boto3
import requests
import decimal
#def default(obj):
# if isinstance(obj, decimal.Decimal):
# return int(obj)
# return o.__dict__
def handler(event, context):
print "event.dump = " + json.dumps(event)
data = json.loads(event)
url = data['webhookurl']
payload = data['payload']
headers = {'content-type': 'application/json'}
r = requests.post(url, data=json.dumps(payload))
print(r.text)
|
Python
| 0.000004
|
@@ -399,20 +399,20 @@
= json.
-load
+dump
s(event)
@@ -576,24 +576,25 @@
yload))%0A
+#
print(r.text
|
f83369a263fb606a6f92b62a45d72e8faf0f1770
|
Add RunGM and RunBench steps for Android Review URL: https://codereview.appspot.com/5987049
|
master/skia_master_scripts/android_factory.py
|
master/skia_master_scripts/android_factory.py
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility class to build the Skia master BuildFactory's for Android buildbots.
Overrides SkiaFactory with any Android-specific steps."""
from skia_master_scripts import factory as skia_factory
class AndroidFactory(skia_factory.SkiaFactory):
"""Overrides for Android builds."""
def Build(self, clobber=None):
"""Build and return the complete BuildFactory.
clobber: boolean indicating whether we should clean before building
"""
if clobber is None:
clobber = self._default_clobber
if clobber:
self._skia_cmd_obj.AddClean()
self._skia_cmd_obj.AddRunCommand(
command='../android/bin/android_make all -d nexus_s %s' % (
self._make_flags),
description='BuildAll')
self.PushBinaryToDeviceAndRun(binary_name='tests', description='RunTests')
return self._factory
def PushBinaryToDeviceAndRun(self, binary_name, description, timeout=None):
"""Adds a build step: push a binary file to the USB-connected Android
device and run it.
binary_name: which binary to run on the device
description: text description (e.g., 'RunTests')
timeout: timeout in seconds, or None to use the default timeout
The shell command (running on the buildbot slave) will exit with a nonzero
return code if and only if the command running on the Android device
exits with a nonzero return code... so a nonzero return code from the
command running on the Android device will turn the buildbot red.
"""
path_to_adb = self.TargetPathJoin('..', 'android', 'bin', 'linux', 'adb')
command_list = [
'%s root' % path_to_adb,
'%s remount' % path_to_adb,
'%s push out/%s/%s /system/bin/skia_%s' % (
path_to_adb, self._configuration, binary_name, binary_name),
'%s logcat -c' % path_to_adb,
'STDOUT=$(%s shell "skia_%s && echo ADB_SHELL_SUCCESS")' % (
path_to_adb, binary_name),
'echo $STDOUT',
'%s logcat -d' % path_to_adb,
'echo $STDOUT | grep ADB_SHELL_SUCCESS',
]
self._skia_cmd_obj.AddRunCommandList(
command_list=command_list, description=description)
|
Python
| 0
|
@@ -972,16 +972,268 @@
nTests')
+%0A self.PushBinaryToDeviceAndRun(binary_name='gm',%0A arguments='--nopdf --noreplay',%0A description='RunGM')%0A self.PushBinaryToDeviceAndRun(binary_name='bench', description='RunBench')
%0A%0A re
@@ -1301,16 +1301,61 @@
ry_name,
+ arguments='',%0A
descrip
@@ -1358,16 +1358,21 @@
cription
+=None
, timeou
@@ -1525,24 +1525,98 @@
the device%0A
+ arguments: additional arguments to pass to the binary when running it%0A
descript
@@ -2025,24 +2025,91 @@
ed.%0A %22%22%22%0A
+ if not description:%0A description = 'Run %25s' %25 binary_name%0A
path_to_
@@ -2463,16 +2463,19 @@
skia_%25s
+%25s
&& echo
@@ -2527,32 +2527,43 @@
adb, binary_name
+, arguments
),%0A 'echo
|
984422fe3fb0b34a17e42910a9c1b98afa572452
|
Revert r9607 -- it caused a BuildbotSelfTest failure
|
master/skia_master_scripts/android_factory.py
|
master/skia_master_scripts/android_factory.py
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility class to build the Skia master BuildFactory's for Android buildbots.
Overrides SkiaFactory with any Android-specific steps."""
from buildbot.process.properties import WithProperties
from skia_master_scripts import factory as skia_factory
class AndroidFactory(skia_factory.SkiaFactory):
"""Overrides for Android builds."""
def __init__(self, device, **kwargs):
""" Instantiates an AndroidFactory with properties and build steps specific
to Android devices.
device: string indicating which Android device type we are targeting
"""
skia_factory.SkiaFactory.__init__(self, bench_pictures_cfg=device,
deps_target_os='android',
flavor='android',
build_targets=['all'],
**kwargs)
self._device = device
self._common_args += ['--device', self._device,
'--serial', WithProperties('%(serial:-None)s'),
'--has_root', WithProperties('%(has_root:-True)s'),
'--android_sdk_root',
WithProperties('%(android_sdk_root)s')]
self._default_clobber = True
def PreRender(self):
""" Before chaining to SkiaFactory.PreRender(), build tools (skdiff,
skimage) that we might need on the buildslave host machine. """
# We bypass the Android-flavored compile in order to build tools for
# the host.
self.AddSlaveScript(script='compile.py',
description='BuildHostTools',
is_rebaseline_step=True,
args=['--target', 'tools',
'--gyp_defines',
' '.join('%s=%s' % (k, v)
for k, v in self._gyp_defines.items())])
skia_factory.SkiaFactory.PreRender(self)
|
Python
| 0
|
@@ -1417,25 +1417,26 @@
%0A%0A def
-PreRender
+CompareGMs
(self):%0A
@@ -1447,135 +1447,122 @@
%22%22%22
-Before chaining to SkiaFactory.PreRender(), build tools (skdiff,%0A skimage) that we might ne
+Run the %22skdiff%22 tool to compare the %22actual%22 GM images we just%0A generat
ed
+t
o
-n
the b
-uildslave host machine
+aselines in _gm_image_subdir
. %22%22
@@ -1572,16 +1572,21 @@
# We
+have
bypass t
@@ -1631,21 +1631,22 @@
o build
-tools
+SkDiff
for%0A
@@ -1749,17 +1749,14 @@
uild
-HostTools
+SkDiff
',%0A
@@ -2070,17 +2070,18 @@
ory.
-PreRender
+CompareGMs
(sel
|
45ceb7d945208c7c25f21fe6e869155e60c66edf
|
Remove unused import.
|
payload/usr/local/sal/checkin_modules/munki_checkin.py
|
payload/usr/local/sal/checkin_modules/munki_checkin.py
|
#!/usr/local/sal/Python.framework/Versions/3.8/bin/python3
import datetime
import os
import pathlib
import plistlib
import sys
import sal
sys.path.insert(0, '/usr/local/munki')
from munkilib import munkicommon
__version__ = '1.1.0'
def main():
# If we haven't successfully submitted to Sal, pull the existing
# munki section rather than start from scratch, as we want to
# keep any install/removal history that may be there.
munki_submission = sal.get_checkin_results().get('munki', {})
munki_report = get_managed_install_report()
extras = {}
extras['munki_version'] = munki_report['MachineInfo'].get('munki_version')
extras['manifest'] = munki_report.get('ManifestName')
extras['runtype'] = munki_report.get('RunType', 'custom')
munki_submission['extra_data'] = extras
munki_submission['facts'] = {
'checkin_module_version': __version__,
'RunType': munki_report['RunType'],
'StartTime': munki_report['StartTime'],
'EndTime': munki_report['EndTime'],
}
if munki_report.get('Conditions'):
for condition, value in munki_report['Conditions'].items():
# Join lists of strings into a comma-delimited string, as
# the server wants just text.
if hasattr(value, 'append'):
value = ', '.join(value)
munki_submission['facts'][condition] = value
munki_submission['messages'] = []
for key in ('Errors', 'Warnings'):
for msg in munki_report[key]:
# We need to drop the final 'S' to match Sal's message types.
munki_submission['messages'].append({'message_type': key.upper()[:-1], 'text': msg})
now = datetime.datetime.now().astimezone(datetime.timezone.utc).isoformat()
# Process managed items and update histories.
munki_submission['managed_items'] = {}
optional_manifest = get_optional_manifest()
for item in munki_report.get('ManagedInstalls', []):
submission_item = {'date_managed': now}
submission_item['status'] = 'PRESENT' if item['installed'] else 'PENDING'
version_key = 'version_to_install' if not item['installed'] else 'installed_version'
version = item[version_key]
name = f'{item["name"]} {version}'
submission_item['name'] = name
# Pop off these two since we already used them.
item.pop('name')
item.pop('installed')
item['type'] = 'ManagedInstalls'
self_serve = 'True' if name in optional_manifest.get('managed_installs', []) else 'False'
item['self_serve'] = self_serve
submission_item['data'] = item
munki_submission['managed_items'][name] = submission_item
for item in munki_report.get('managed_uninstalls_list', []):
submission_item = {'date_managed': now, 'status': 'ABSENT'}
self_serve = 'True' if name in optional_manifest.get('managed_uninstalls', []) else 'False'
submission_item['data'] = {'self_serve': self_serve, 'type': 'ManagedUninstalls'}
munki_submission['managed_items'][item] = submission_item
# Process InstallResults and RemovalResults into update history
for report_key, result_type in (('InstallResults', 'PRESENT'), ('RemovalResults', 'ABSENT')):
for item in munki_report.get(report_key, []):
# Skip Apple software update items.
if item.get('applesus'):
continue
history = {}
# history = {'update_type': 'apple' if item.get('applesus') else 'third_party'}
history['status'] = 'ERROR' if item.get('status') != 0 else result_type
# This UTC datetime gets converted to a naive datetime by
# plistlib. Fortunately, we can just tell it that it's UTC.
history['date_managed'] = item['time'].replace(
tzinfo=datetime.timezone.utc).isoformat()
history['data'] = {'version': item.get('version', '0')}
# Add over top of any pending items we may have already built.
if item['name'] in munki_submission['managed_items']:
munki_submission['managed_items'][item['name']].update(history)
else:
munki_submission['managed_items'][item['name']] = history
sal.set_checkin_results('Munki', munki_submission)
def get_managed_install_report():
"""Return Munki ManagedInstallsReport.plist as a plist dict.
Returns:
ManagedInstalls report for last Munki run as a plist
dict, or an empty dict.
"""
# Checks munki preferences to see where the install directory is set to.
managed_install_dir = munkicommon.pref('ManagedInstallDir')
# set the paths based on munki's configuration.
managed_install_report = pathlib.Path(managed_install_dir) / 'ManagedInstallReport.plist'
try:
munki_report = plistlib.loads(managed_install_report.read_bytes())
except (IOError, plistlib.InvalidFileException):
munki_report = {}
if 'MachineInfo' not in munki_report:
munki_report['MachineInfo'] = {}
return sal.unobjctify(munki_report)
def get_optional_manifest():
"""Return Munki SelfServeManifest as a plist dict.
Returns:
SelfServeManifest for last Munki run as a plist
dict, or an empty dict.
"""
# Checks munki preferences to see where the install directory is set to.
managed_install_dir = munkicommon.pref('ManagedInstallDir')
# set the paths based on munki's configuration.
optional_manifest_path = pathlib.Path(managed_install_dir) / 'manifests/SelfServeManifest'
try:
optional_manifest = plistlib.loads(optional_manifest_path.read_bytes())
except (IOError, plistlib.InvalidFileException):
optional_manifest = {}
return optional_manifest
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -74,18 +74,8 @@
ime%0A
-import os%0A
impo
|
4b740ddb11fb5c4b2b29bc6eef0a5569349272f8
|
make random_metadata compliant
|
datalake_common/tests/conftest.py
|
datalake_common/tests/conftest.py
|
import pytest
import random
import string
from datetime import datetime, timedelta
@pytest.fixture
def basic_metadata():
return {
'version': 0,
'start': 1426809600000,
'end': 1426895999999,
'where': 'nebraska',
'what': 'apache',
'hash': '12345'
}
def random_word(length):
return ''.join(random.choice(string.lowercase) for i in xrange(length))
def random_interval():
now = datetime.now()
start = now - timedelta(days=random.randint(0, 365*3))
end = start - timedelta(days=random.randint(1, 10))
return start.isoformat(), end.isoformat()
@pytest.fixture
def random_metadata():
start, end = random_interval()
return {
'version': 0,
'start': start,
'end': end,
'where': random_word(10),
'what': random_word(10),
}
|
Python
| 0.000004
|
@@ -402,16 +402,110 @@
ngth))%0A%0A
+def random_hex(length):%0A return ('%250' + str(length) + 'x') %25 random.randrange(16**length)%0A%0A
def rand
@@ -706,16 +706,155 @@
rmat()%0A%0A
+def random_work_id():%0A if random.randint(0, 1):%0A return None%0A return '%7B%7D-%7B%7D'.format(random_word(5), random.randint(0,2**15))%0A%0A
@pytest.
@@ -998,16 +998,53 @@
': end,%0A
+ 'work_id': random_work_id(),%0A
@@ -1098,18 +1098,80 @@
m_word(10),%0A
+ 'id': random_hex(40),%0A 'hash': random_hex(40),%0A
%7D%0A
|
4507c0cb56ed72253d52c92f621ec33600e5e36b
|
Add version number for future use
|
sla_bot.py
|
sla_bot.py
|
import asyncio
import datetime as dt
import math
import os
import traceback
import discord
from discord.ext import commands
from SLA_bot.config import Config as cf
from SLA_bot.schedule import Schedule
curr_dir = os.path.dirname(__file__)
configs = [
os.path.join(curr_dir, 'docs', 'default_config.ini'),
os.path.join(curr_dir, 'config.ini')
]
cf.cal_path = os.path.join(curr_dir, cf.cal_path)
cf.chan_path = os.path.join(curr_dir, cf.chan_path)
cf.load_config(configs)
bot = commands.Bot(command_prefix='!', description='test')
event_schedule = Schedule(bot)
bot.add_cog(event_schedule)
async def update_schedule():
while not bot.is_closed:
await event_schedule.update()
await asyncio.sleep(cf.refresh_time)
bot.loop.create_task(update_schedule())
@bot.event
async def on_ready():
print('Logged in as: {}'.format(bot.user.name))
print('------')
#bot.loop.create_task(make_alert())
@bot.command(pass_context=True, no_pm=True)
async def announce(ctx, filters='1,2,3,4,5,6,7,8,9,10'):
perm = ctx.message.channel.permissions_for(ctx.message.author)
id = ctx.message.channel.id
if perm.manage_channels:
cf.set_chan(id, filters)
def alert_text(event, ref_time):
time_left = math.ceil((event.start - ref_time).total_seconds() / 60)
return '[{}min] - {}'.format(time_left, event.duration(cf.tz))
async def alert(id, event, first_resend, resend_time):
channel = bot.get_channel(id)
now = dt.datetime.now(dt.timezone.utc)
resend = first_resend
message = None
while now < event.start:
now = dt.datetime.now(dt.timezone.utc)
alert_msg = alert_text(event, now)
if now >= resend:
try:
await bot.delete_message(message)
resend = resend + resend_time
message = None
except discord.errors.HTTPException:
continue
if message == None:
try:
message = await bot.send_message(channel, alert_msg)
except (discord.errors.HTTPException, discord.errors.Forbidden):
continue
except (discord.errors.NotFound, discord.errors.InvalidArgument):
break
else:
try:
message = await bot.edit_message(message, alert_msg)
#not found should break
except discord.errors.HTTPException:
continue
await asyncio.sleep(60)
if message != None:
try:
alert_msg = '[Started] - {}'.format(event.duration(cf.tz))
message = await bot.edit_message(message, alert_msg)
except discord.errors.HTTPException:
pass
async def make_alert():
await bot.wait_until_ready()
last_alert = dt.datetime.now(dt.timezone.utc)
while not bot.is_closed:
now = dt.datetime.now(dt.timezone.utc)
alert_time = now + cf.alert_before
alertable = event_schedule.from_range(last_alert, alert_time)
for event in alertable:
first_resend = event.start
while first_resend > now:
first_resend -= cf.alert_every
first_resend += cf.alert_every
for chan in cf.channels:
bot.loop.create_task(alert(chan[0], event, first_resend,
cf.alert_every))
if len(alertable) > 0:
last_alert = alert_time
await asyncio.sleep(60)
bot.run(cf.token)
|
Python
| 0
|
@@ -121,17 +121,16 @@
mmands%0A%0A
-%0A
from SLA
@@ -200,16 +200,32 @@
hedule%0A%0A
+VERSION = 0.10%0A%0A
curr_dir
|
5d82c2d9f6d2874ae4621edb4dc1e6455652666b
|
Remove Dropout and unnecessary imports
|
examples/imdb_fasttext.py
|
examples/imdb_fasttext.py
|
'''This example demonstrates the use of fasttext for text classification
Based on Joulin et al's paper:
Bags of Tricks for Efficient Text Classification
https://arxiv.org/abs/1607.01759
Can achieve accuracy around 88% after 5 epochs in 70s.
'''
from __future__ import print_function
import numpy as np
np.random.seed(1337) # for reproducibility
from keras.preprocessing import sequence
from keras.models import Sequential
from keras.layers import Dense, Activation, Flatten
from keras.layers import Embedding
from keras.layers import AveragePooling1D
from keras.datasets import imdb
from keras import backend as K
# set parameters:
max_features = 20000
maxlen = 400
batch_size = 32
embedding_dims = 20
nb_epoch = 5
print('Loading data...')
(X_train, y_train), (X_test, y_test) = imdb.load_data(nb_words=max_features)
print(len(X_train), 'train sequences')
print(len(X_test), 'test sequences')
print('Pad sequences (samples x time)')
X_train = sequence.pad_sequences(X_train, maxlen=maxlen)
X_test = sequence.pad_sequences(X_test, maxlen=maxlen)
print('X_train shape:', X_train.shape)
print('X_test shape:', X_test.shape)
print('Build model...')
model = Sequential()
# we start off with an efficient embedding layer which maps
# our vocab indices into embedding_dims dimensions
model.add(Embedding(max_features,
embedding_dims,
input_length=maxlen))
# we add a AveragePooling1D, which will average the embeddings
# of all words in the document
model.add(AveragePooling1D(pool_length=model.output_shape[1]))
# We flatten the output of the conv layer,
# so that we can add a dense layer:
model.add(Flatten())
# We project onto a single unit output layer, and squash it with a sigmoid:
model.add(Dense(1, activation = 'sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
model.fit(X_train, y_train,
batch_size=batch_size,
nb_epoch=nb_epoch,
validation_data=(X_test, y_test))
|
Python
| 0
|
@@ -1599,45 +1599,8 @@
ayer
-,%0A# so that we can add a dense layer:
%0Amod
|
20ef3aed661d5b77bedf48df9ed6917e24319c01
|
Fix typo
|
factory/glideFactoryLogParser.py
|
factory/glideFactoryLogParser.py
|
#
# Description:
# This module implements classes to track
# changes in glidein status logs
#
# Author:
# Igor Sfiligoi (Feb 2nd 2007)
#
import os, os.path
import condorLogParser
# for now it is just a constructor wrapper
# Further on it will need to implement glidein exit code checks
class dirSummaryTimings(condorLogParser.dirSummary):
def __init__(self,dirname,client_name,inactive_files=None):
condorLogParser.dirSummaryTimings.__init__(self,dirname,log_prefix="condor_activity_",log_suffix="_"+client_name+".log",inactive_files=inactive_files)
|
Python
| 0.999999
|
@@ -338,16 +338,23 @@
rSummary
+Timings
):%0A d
|
12f9217fb1dfd76f57c33340c79769f4bc0f20ee
|
version update + adding changelog
|
packages/python-packages/doc-warden/warden/version.py
|
packages/python-packages/doc-warden/warden/version.py
|
VERSION = '0.2.5'
|
Python
| 0
|
@@ -12,7 +12,7 @@
0.2.
-5
+6
'%0A
|
ae38884444be3b3e0f98ca406352fe92037423f1
|
making the products model abstract
|
scofield/product/models.py
|
scofield/product/models.py
|
from django.db import models
from scofield.category.models import *
from scofield.manufacturer.models import Manufacturer
class Product(models.Model):
"""
Base class for products
"""
name = models.CharField(max_length=200, null=False, blank=False, help_text='Product Name')
slug = models.SlugField(max_length=210, null=False, blank=False, help_text='Used for URLs, auto-generated from name if blank')
sku = models.CharField(max_length=100, null=True, blank=True)
category = models.ManyToManyField(Category, blank=False, null=False)
manufacturer = models.ForeignKey(Manufacturer, blank=True, null=True)
msrp = models.DecimalField(max_digits=14, decimal_places=2)
class Meta:
abstract = True
|
Python
| 0.999999
|
@@ -21,16 +21,46 @@
t models
+%0Afrom datetime import datetime
%0A%0Afrom s
@@ -161,16 +161,21 @@
Product
+Model
(models.
@@ -228,16 +228,260 @@
%22%22%22%0A%0A
+ #timestamps%0A date_added = models.DateTimeField(default=datetime.now)%0A date_updated = models.DateTimeField(default=datetime.now)%0A%0A class Meta:%0A abstract = True%0A%0Aclass Product(ProductModel):%0A %22%22%22%0A Product Model%0A %22%22%22%0A%0A
name
@@ -980,45 +980,192 @@
2)%0A%0A
-
+%0A
class
-Meta
+Price(models.Model)
:%0A
+%0A
- abstract = True
+%22%22%22%0A Base class for product pricing%0A %22%22%22%0A%0A product = models.ForeignKey(Product)%0A price = models.DecimalField(max_digits=14, decimal_places=2)
%0A%0A
|
5f940dc91e2da529ab13b1931db5f52188755a5d
|
return correct dimension
|
ichwrapper/cluster.py
|
ichwrapper/cluster.py
|
import os
import yaml
from bcbio import utils
from bcbio.install import _get_data_dir
from bcbio.distributed import clargs
from bcbio.provenance import system
import bcbio.distributed.resources as res
from bcbio.distributed.ipython import create
# from bcbio import log
import log
from cluster_helper import cluster as ipc
config_default = {'name': 'std', 'mem': 8, 'cores': 1}
def get_cluster_view(args):
if not os.path.exists("ipython"):
utils.safe_makedir("ipython")
utils.safe_makedir("checkpoint")
return ipc.cluster_view(args['scheduler'], args['queue'],
args['num_jobs'], args['cores_per_job'],
start_wait=args['timeout'],
profile="ipython",
extra_params={"resources": args['resources'],
"mem": args['mem'],
"tag": "ichwrapper",
"run_local": args['run_local']})
def wait_until_complete(jobs):
return [j.get() for j in jobs]
def is_done(step):
if os.path.exists(os.path.join("checkpoint", step)):
return True
return False
def flag_done(step):
with open(os.path.join("checkpoint", step), "w") as handle:
handle.write("done")
def _calculate_resources(data, args, resources):
parallel = clargs.to_parallel(args)
config = data[0][0]['config']
config['resources'].update({resources['name']: {'memory': "%sg" % resources['mem'], 'cores': resources['cores']}})
parallel.update({'progs': [resources['name']]})
# parallel = log.create_base_logger(config, parallel)
# log.setup_local_logging(config, parallel)
log.setup_log(config, parallel)
dirs = {'work': os.path.abspath(os.getcwd())}
system.write_info(dirs, parallel, config)
sysinfo = system.machine_info()[0]
log.logger.info("Number of items %s" % len(data))
parallel = res.calculate(parallel, data, sysinfo, config)
log.logger.info(parallel)
# print parallel
# raise
return parallel
def _check_items(data):
"""
First check items are as expected
"""
msg = ("\nYou can use ichwrapper.cluster.update_samples to add the config structure."
"\nExample of list of samples to parallelize:"
"\n[sample1, sample2, sample3]"
"\nsample1=[{..., 'config':{'algorithm', ...}}]")
assert isinstance(data, list), "data needs to be a list"
assert isinstance(data[0], list), "each item inside data needs to be like this [{}]"
assert data[0][0]['config'], "each item inside data needs to have a config key with the info from galaxy/bcbio_system.yaml." + msg
assert data[0][0]['config']['algorithm'], "config key inside item dict needs to have algorithm key." + msg
def send_job(fn, data, args, resources=None):
"""decide if send jobs with ipython or run locally"""
utils.safe_makedir("checkpoint")
_check_items(data)
res = []
dirs = {'work': os.path.abspath(os.getcwd())}
config = data[0][0]['config']
if not resources:
resources = config_default
step = resources['name']
if 'mem' not in resources or 'cores' not in resources:
raise ValueError("resources without mem or cores keys: %s" % resources)
par = _calculate_resources(data, args, resources)
# args.memory_per_job = resources['mem']
# args.cores_per_job = resources['cores']
# log.setup_log(args)
log.logger.debug("doing %s" % step)
if par['type'] == "ipython":
if not is_done(step):
with create(par, dirs, config) as view:
for sample in data:
res.append(view.apply_async(fn, sample[0], args))
res = wait_until_complete(res)
flag_done(step)
return res
for sample in data:
res.append(fn(sample[0], args))
return res
def update_samples(data, resources, args):
"""
Update algorithm dict with new cores set
"""
if args.galaxy:
system_config = args.galaxy
else:
system_config = os.path.join(_get_data_dir(), "galaxy", "bcbio_system.yaml")
config = yaml.load(open(system_config))
config['algorithm'] = {}
new_data = []
for sample in data:
sample['config'] = config
sample['config']['algorithm'] = resources
new_data.append([sample])
return new_data
|
Python
| 0.999999
|
@@ -1076,16 +1076,17 @@
urn
+%5B
%5Bj.get()
for
@@ -1081,16 +1081,17 @@
%5Bj.get()
+%5D
for j i
@@ -1097,16 +1097,17 @@
in jobs%5D
+
%0A%0A%0Adef i
@@ -3565,20 +3565,12 @@
hon%22
-:%0A if
+ and
not
@@ -3585,20 +3585,16 @@
(step):%0A
-
@@ -3645,20 +3645,16 @@
-
-
for samp
@@ -3661,28 +3661,24 @@
le in data:%0A
-
@@ -3731,20 +3731,16 @@
args))%0A
-
@@ -3782,20 +3782,16 @@
-
flag_don
@@ -3798,20 +3798,16 @@
e(step)%0A
-
@@ -3860,16 +3860,17 @@
.append(
+%5B
fn(sampl
@@ -3876,24 +3876,25 @@
le%5B0%5D, args)
+%5D
)%0A return
|
c47a51db4f7ccc514aa687a1859ed592574d1a58
|
Change API Endpoint to BzAPI Compatibility Layer
|
bugzilla/agents.py
|
bugzilla/agents.py
|
from bugzilla.models import *
from bugzilla.utils import *
class InvalidAPI_ROOT(Exception):
def __str__(self):
return "Invalid API url specified. " + \
"Please set BZ_API_ROOT in your environment " + \
"or pass it to the agent constructor"
class BugzillaAgent(object):
def __init__(self, api_root=None, username=None, password=None):
if not api_root:
api_root = os.environ.get('BZ_API_ROOT')
if not api_root:
raise InvalidAPI_ROOT
self.API_ROOT = api_root
self.username, self.password = username, password
def get_bug(self, bug, include_fields='_default,token,cc,keywords,whiteboard,comments', exclude_fields=None, params={}):
params['include_fields'] = [include_fields]
params['exclude_fields'] = [exclude_fields]
url = urljoin(self.API_ROOT, 'bug/%s?%s' % (bug, self.qs(**params)))
return Bug.get(url)
def get_bug_list(self, params={}):
url = url = urljoin(self.API_ROOT, 'bug/?%s' % (self.qs(**params)))
return BugSearch.get(url).bugs
def qs(self, **params):
if self.username and self.password:
params['username'] = [self.username]
params['password'] = [self.password]
return qs(**params)
class BMOAgent(BugzillaAgent):
def __init__(self, username=None, password=None):
super(BMOAgent, self).__init__('https://api-dev.bugzilla.mozilla.org/latest/', username, password)
|
Python
| 0
|
@@ -1451,16 +1451,8 @@
s://
-api-dev.
bugz
@@ -1472,14 +1472,13 @@
org/
-latest
+bzapi
/',
|
22b91d3f58eb9a6c021645a4aea56c864d151bba
|
Fix get_favorite_for typo in templatetags
|
favit/templatetags/favit_tags.py
|
favit/templatetags/favit_tags.py
|
# -*- coding: utf-8 -*-
from django import template
from django.db.models import get_model
from django.template.loader import render_to_string
from ..models import Favorite
register = template.Library()
@register.simple_tag(takes_context=True)
def favorite_button(context, target):
user = context['request'].user
# do nothing when user isn't authenticated
if not user.is_authenticated():
return ''
target_model = '.'.join((target._meta.app_label, target._meta.object_name))
undo = False
# prepare button to unfave if the user
# already faved this object
if Favorite.objects.get_favorite(user, target):
undo = True
return render_to_string(
'favit/button.html', {
'target_model': target_model,
'target_object_id': target.id,
'undo': undo,
'fav_count': Favorite.objects.for_object(target).count()
}
)
@register.simple_tag(takes_context=True)
def unfave_button(context, target):
user = context['request'].user
# do nothing when user isn't authenticated
if not user.is_authenticated():
return ''
if Favorite.objects.get_favorite(user, target) is None:
return ''
target_model = '.'.join((target._meta.app_label, target._meta.object_name))
return render_to_string(
'favit/unfave-button.html', {
'target_model': target_model,
'target_object_id': target.id,
}
)
@register.filter
def get_favorite_for(obj, user):
"""
Get Favorite instance for an object (obj) and a user (user)
Usage:
{% with obj|get_favorite_for:user as fav_object %}
...
{% endwith %}
"""
return Favorites.objects.get_favorite(user, obj)
@register.filter
def favorites_count(obj):
"""
Usage:
Given an object `obj` you may show it fav count like this:
<p>Favorite Count {{ obj|favorites_count }}</p>
"""
return Favorite.objects.for_object(obj).count()
@register.assignment_tag
def user_favorites(user, app_model=None):
"""
Usage:
Get all user favorited objects:
{% with user_favorites <user> as favorite_list %}
{% for fav_obj in favorite_list %}
{# do something with fav_obj #}
{% endfor %}
{% endwith %}
or, just favorites from one model:
{% with user_favorites <user> "app_label.model" as favorite_list %}
{% for fav_obj in favorite_list %}
{# do something with fav_obj #}
{%
{% endwith %}
"""
return Favorite.objects.for_user(user, app_model)
@register.assignment_tag
def model_favorites(app_model):
"""
Gets all favorited objects that are instances of a model
given in module notation.
Usage:
{% with model_favorites "app_label.model" as favorite_list %}
{% for fav_obj in favorite_list %}
{# do something with fav_obj #}
{% endfor %}
{% endwith %}
"""
return Favorite.objects.for_model(app_model)
|
Python
| 0
|
@@ -1714,17 +1714,16 @@
Favorite
-s
.objects
|
35b5900c156b4c6fca401ab2097879d98761befe
|
Make all timestamps comparable.
|
bugzilla2fedmsg.py
|
bugzilla2fedmsg.py
|
# -*- coding: utf-8 -*-
""" Moksha consumer that listens to BZ over STOMP and reproduces to fedmsg.
Authors: Ralph Bean <rbean@redhat.com>
"""
import datetime
import socket
import time
import bugzilla
import dateutil.parser
import fedmsg
import moksha.hub.api
import moksha.hub.reactor
# These are bug fields we're going to try and pass on to fedmsg.
bug_fields = [
'alias',
'assigned_to',
'attachments',
'blocks',
'cc',
'classification',
'comments',
'component',
'components',
'creation_time',
'creator',
'depends_on',
'description',
'docs_contact',
'estimated_time',
'external_bugs',
'fixed_in',
'flags',
'groups',
'id',
'is_cc_accessible',
'is_confirmed',
'is_creator_accessible',
'is_open',
'keywords',
'last_change_time',
'op_sys',
'platform',
'priority',
'product',
'qa_contact',
'actual_time',
'remaining_time',
'resolution',
'see_also',
'severity',
'status',
'summary',
'target_milestone',
'target_release',
'url',
'version',
'versions',
'weburl',
'whiteboard',
]
def convert_datetimes(obj):
""" Recursively convert bugzilla DateTimes to stdlib datetimes. """
if isinstance(obj, list):
return [convert_datetimes(item) for item in obj]
elif isinstance(obj, dict):
return dict([
(k, convert_datetimes(v))
for k, v in obj.items()
])
elif hasattr(obj, 'timetuple'):
timestamp = time.mktime(obj.timetuple())
return datetime.datetime.fromtimestamp(timestamp)
else:
return obj
class BugzillaConsumer(moksha.hub.api.Consumer):
# This is the fedora_from_esb A-MQ queue.
topic = '/queue/fedora_from_esb'
def __init__(self, hub):
super(BugzillaConsumer, self).__init__(hub)
self.config = config = hub.config
# Backwards compat. We used to have a self.debug...
self.debug = self.log.info
products = config.get('bugzilla.products', 'Fedora, Fedora EPEL')
self.products = [product.strip() for product in products.split(',')]
# First, initialize fedmsg and bugzilla in this thread's context.
hostname = socket.gethostname().split('.', 1)[0]
fedmsg.init(name='bugzilla2fedmsg.%s' % hostname)
url = self.config.get('bugzilla.url', 'https://bugzilla.redhat.com')
username = self.config.get('bugzilla.username', None)
password = self.config.get('bugzilla.password', None)
self.bugzilla = bugzilla.Bugzilla(url=url)
if username and password:
self.debug("Logging in to %s" % url)
self.bugzilla.login(username, password)
else:
self.debug("No credentials found. Not logging in to %s" % url)
self.debug("Initialized bz2fm STOMP consumer.")
def consume(self, msg):
topic, msg = msg['topic'], msg['body']
# As of https://bugzilla.redhat.com/show_bug.cgi?id=1248259, bugzilla
# will send the product along with the initial message, so let's check
# it.
if not 'product' in msg:
self.debug("DROP: message does not bear a 'product' field.")
return
if msg['product'] not in self.products:
self.debug("DROP: %r not in %r" % (msg['product'], self.products))
return
# Now, look up our bug in bugzilla to get more details.
self.debug("Gathering metadata for #%s" % msg['bug_id'])
bug = self.bugzilla.getbug(msg['bug_id'])
# Parse the timestamp in msg. It looks like 2013-05-17T02:33:00+00:00
# Format changed https://bugzilla.redhat.com/show_bug.cgi?id=1139955
msg['timestamp'] = dateutil.parser.parse(msg['timestamp'])
# Find the event from the bz history that most likely corresponds here.
self.debug("Gathering history for #%s" % msg['bug_id'])
history = bug.get_history()['bugs'][0]['history']
history = convert_datetimes(history)
self.debug("Organizing metadata for #%s" % msg['bug_id'])
bug = dict([(attr, getattr(bug, attr, None)) for attr in bug_fields])
bug = convert_datetimes(bug)
comment = self.find_relevant_item(msg, bug['comments'], 'time')
event = self.find_relevant_item(msg, history, 'when')
# If there are no events in the history, then this is a new bug.
topic = 'bug.update'
if not event and len(bug['comments']) == 1:
topic = 'bug.new'
self.debug("Republishing #%s" % msg['bug_id'])
fedmsg.publish(
modname='bugzilla',
topic=topic,
msg=dict(
bug=bug,
event=event,
comment=comment,
),
)
@staticmethod
def find_relevant_item(msg, history, key):
""" Find the change from the BZ history with the closest timestamp to a
given message. Unfortunately, we can't rely on matching the timestamps
exactly so instead we say that if the best match is within 60s of the
message, then return it. Otherwise return None.
"""
if not history:
return None
best = history[0]
best_delta = abs(best[key] - msg['timestamp'])
for event in history[1:]:
if abs(event[key] - msg['timestamp']) < best_delta:
best = event
best_delta = abs(best[key] - msg['timestamp'])
if best_delta < datetime.timedelta(seconds=60):
return best
else:
return None
|
Python
| 0.000663
|
@@ -3746,16 +3746,83 @@
1139955%0A
+ timezone_naive_timestamp = msg%5B'timestamp'%5D.rsplit('+')%5B0%5D%0A
@@ -3862,21 +3862,31 @@
r.parse(
-msg%5B'
+timezone_naive_
timestam
@@ -3878,34 +3878,32 @@
_naive_timestamp
-'%5D
)%0A%0A # Fin
|
66ad5e449b1f28dbde2bc30a37ad3c568ae9166f
|
Fix bins
|
examples/plot_dom_hits.py
|
examples/plot_dom_hits.py
|
# -*- coding: utf-8 -*-
"""
==================
DOM hits.
==================
Estimate track/DOM distances using the number of hits per DOM.
"""
from __future__ import absolute_import, print_function, division
# Author: Tamas Gal <tgal@km3net.de>
# License: BSD-3
from collections import defaultdict, Counter
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
import km3pipe as kp
from km3pipe.dataclasses import Table
from km3pipe.math import pld3
from km3modules.common import StatusBar
import km3pipe.style
km3pipe.style.use("km3pipe")
filename = "data/atmospheric_muons_sample.h5"
cal = kp.calib.Calibration(filename="data/KM3NeT_-00000001_20171212.detx")
def filter_muons(blob):
"""Write all muons from McTracks to Muons."""
tracks = blob['McTracks']
muons = tracks[tracks.type == -13] # PDG particle code
blob["Muons"] = Table(muons)
return blob
class DOMHits(kp.Module):
"""Create histogram with n_hits and distance of hit to track."""
def configure(self):
self.hit_statistics = defaultdict(list)
def process(self, blob):
hits = blob['Hits']
muons = blob['Muons']
highest_energetic_muon = Table(muons[np.argmax(muons.energy)])
muon = highest_energetic_muon
triggered_hits = hits.triggered_rows
dom_hits = Counter(triggered_hits.dom_id)
for dom_id, n_hits in dom_hits.items():
try:
distance = pld3(
cal.detector.dom_positions[dom_id], muon.pos, muon.dir
)
except KeyError:
self.log.warning("DOM ID %s not found!" % dom_id)
continue
self.hit_statistics['n_hits'].append(n_hits)
self.hit_statistics['distance'].append(distance)
return blob
def finish(self):
df = pd.DataFrame(self.hit_statistics)
print(df)
sdf = df[(df['distance'] < 200) & (df['n_hits'] < 50)]
bins = (max(sdf['distance']) - 1, max(sdf['n_hits']) - 1)
plt.hist2d(
sdf['distance'],
sdf['n_hits'],
cmap='plasma',
bins=bins,
norm=LogNorm()
)
plt.xlabel('Distance between hit and muon track [m]')
plt.ylabel('Number of hits on DOM')
plt.show()
pipe = kp.Pipeline()
pipe.attach(kp.io.HDF5Pump, filename=filename)
pipe.attach(StatusBar, every=100)
pipe.attach(filter_muons)
pipe.attach(DOMHits)
pipe.drain()
|
Python
| 0.000001
|
@@ -2013,16 +2013,20 @@
bins =
+(int
(max(sdf
@@ -2042,14 +2042,19 @@
e'%5D)
+)
- 1,
+int(
max(
@@ -2072,16 +2072,17 @@
'%5D) - 1)
+)
%0A
|
f79342cc30c434033d218d7a3f260afcbdec9e31
|
Version bump (3.0.0 alpha 2)
|
euca2ools/__init__.py
|
euca2ools/__init__.py
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2009-2013, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms, with or
# without modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os.path
import subprocess
__version__ = '3.0.0-alpha1'
if '__file__' in globals():
# Check if this is a git repo; maybe we can get more precise version info
try:
repo_path = os.path.join(os.path.dirname(__file__), '..')
git = subprocess.Popen(['git', 'describe'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env={'GIT_DIR': os.path.join(repo_path, '.git')})
git.wait()
git.stderr.read()
if git.returncode == 0:
__version__ = git.stdout.read().strip()
if type(__version__).__name__ == 'bytes':
__version__ = __version__.decode()
except:
# Not really a bad thing; we'll just use what we had
pass
|
Python
| 0
|
@@ -1481,17 +1481,17 @@
.0-alpha
-1
+2
'%0A%0Aif '_
|
e17062502af60fecb15bd14315836ae415a84654
|
use argparse instead of optparse in script/sync_module_docs.py
|
script/sync_module_docs.py
|
script/sync_module_docs.py
|
#!/usr/bin/env python
"""
Synchronize the documentation files in a given directory ``doc_dir`` with the
actual state of the SfePy sources in ``top_dir``. Missing files are created,
files with no corresponding source file are removed, other files are left
untouched.
Notes
-----
The developer guide needs to be edited manually to reflect the changes.
"""
from __future__ import absolute_import
import sys
sys.path.append('.')
import os
import fnmatch
from optparse import OptionParser
from sfepy.base.base import output
from sfepy.base.ioutils import locate_files, edit_filename, ensure_path
usage = '%prog [options] doc_dir top_dir\n' + __doc__.rstrip()
omits = [
'__init__.py',
'__config__.py',
'debug.py',
'setup.py',
'site_cfg.py',
'site_cfg_template.py',
]
omits_pyx = [
'lobatto_template.pyx',
]
doc_template = """%s
%s
.. automodule:: %s
:members:
:undoc-members:
"""
help = {
'dry_run' :
'only show what changes would be made',
}
def main():
parser = OptionParser(usage=usage, version='%prog')
parser.add_option('-n', '--dry-run',
action='store_true', dest='dry_run',
default=False, help=help['dry_run'])
options, args = parser.parse_args()
if len(args) == 2:
doc_dir, top_dir = [os.path.realpath(ii) for ii in args]
else:
parser.print_help(),
return
docs = set(ii for ii in locate_files('*.rst', root_dir=doc_dir))
sources = set(ii for ii in
locate_files('*.py',
root_dir=os.path.join(top_dir, 'sfepy'))
if os.path.basename(ii) not in omits)
sources.update(ii for ii in
locate_files('*.pyx',
root_dir=os.path.join(top_dir, 'sfepy'))
if os.path.basename(ii) not in omits_pyx)
scripts = set(ii for ii in
locate_files('*.py',
root_dir=os.path.join(top_dir, 'script'))
if os.path.basename(ii) not in omits)
top_scripts = set(os.path.realpath(ii)
for ii in fnmatch.filter(os.listdir(top_dir), '*.py')
if os.path.basename(ii) not in omits)
all_sources = set()
all_sources.update(sources, scripts, top_scripts)
cwd = os.path.realpath(os.path.curdir) + os.path.sep
output.prefix = 'smd:'
output('removing unneeded rst files in "%s"...' % doc_dir)
for doc in sorted(docs):
aux = edit_filename(doc, new_ext='.py')
src1 = os.path.normpath(aux.replace(doc_dir, top_dir))
aux = edit_filename(doc, new_ext='.pyx')
src2 = os.path.normpath(aux.replace(doc_dir, top_dir))
if (src1 not in all_sources) and (src2 not in all_sources):
output('remove: %s' % doc.replace(cwd, ''))
if not options.dry_run:
os.remove(doc)
output('...done')
output('creating missing rst files in "%s"...' % doc_dir)
for src in sorted(all_sources):
aux = edit_filename(src, new_ext='.rst')
doc = os.path.normpath(aux.replace(top_dir, doc_dir))
if doc not in docs:
output('create: %s' % doc.replace(cwd, ''))
if not options.dry_run:
mod_filename = src.replace(top_dir + os.path.sep, '')
mod_name = mod_filename.replace(os.path.sep, '.')
mod_name = edit_filename(mod_name, new_ext='')
if mod_name.startswith('sfepy'): # Module.
title = mod_name + ' module'
else: # Script.
title = mod_filename + ' script'
mod_name = mod_name.split('.')[-1]
underlines = '=' * len(title)
contents = doc_template % (title, underlines, mod_name)
ensure_path(doc)
fd = open(doc, 'w')
fd.write(contents)
fd.close()
output('...done')
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -449,19 +449,19 @@
ch%0Afrom
-opt
+arg
parse im
@@ -465,26 +465,57 @@
import
-OptionPars
+ArgumentParser, RawDescriptionHelpFormatt
er%0A%0Afrom
@@ -622,72 +622,8 @@
th%0A%0A
-usage = '%25prog %5Boptions%5D doc_dir top_dir%5Cn' + __doc__.rstrip()%0A%0A
omit
@@ -977,32 +977,170 @@
r =
-OptionParser(usage=usage
+ArgumentParser(description=__doc__,%0A formatter_class=RawDescriptionHelpFormatter)%0A parser.add_argument('--version', action='version'
, ve
@@ -1151,12 +1151,15 @@
n='%25
+(
prog
+)s
')%0A
@@ -1172,22 +1172,24 @@
ser.add_
-option
+argument
('-n', '
@@ -1196,24 +1196,26 @@
--dry-run',%0A
+
@@ -1283,16 +1283,18 @@
+
default=
@@ -1330,76 +1330,113 @@
-options, args = parser.parse_args()%0A%0A if len(args) == 2:%0A
+parser.add_argument('doc_dir')%0A parser.add_argument('top_dir')%0A options = parser.parse_args()%0A%0A
-
doc_
@@ -1475,78 +1475,79 @@
(ii)
- for ii in args%5D%0A else:%0A parser.print_help(),%0A return
+%0A for ii in %5Boptions.doc_dir, options.top_dir%5D%5D%0A
%0A%0A
|
14c31307fd31631ecce0378aedbef95cec8531f2
|
Fix autodiscovery
|
gargoyle/__init__.py
|
gargoyle/__init__.py
|
"""
gargoyle
~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
from django.utils.module_loading import autodiscover_modules
from gargoyle.manager import gargoyle
__version__ = '1.2.0'
VERSION = __version__ # old version compat
__all__ = ('gargoyle', 'autodiscover', '__version__', 'VERSION')
default_app_config = 'gargoyle.apps.GargoyleAppConfig'
def autodiscover():
"""
Auto-discover INSTALLED_APPS' gargoyle modules and fail silently when
not present. This forces an import on them to register any gargoyle bits they
may want.
"""
autodiscover_modules('gargoyle')
|
Python
| 0
|
@@ -602,24 +602,61 @@
nt.%0A %22%22%22%0A
+ import gargoyle.builtins # noqa%0A
autodisc
|
e0def112fda555307cc9d8249056b92c7f86f29a
|
Pass the amount of values to softmax
|
eva/models/wavenet.py
|
eva/models/wavenet.py
|
from keras.models import Model
from keras.layers import Input, Convolution1D, Activation, Merge, Lambda
from keras.layers.advanced_activations import PReLU
from keras.optimizers import Nadam
from eva.layers.causal_atrous_convolution1d import CausalAtrousConvolution1D
from eva.layers.wavenet_block import WavenetBlock, WavenetBlocks
def Wavenet(input_shape, filters, depth, stacks, learn_all=False, h=None, build=True):
# TODO: Soft targets? A float to make targets a gaussian with stdev.
# TODO: Train only receptive field. The temporal-first outputs are computed from zero-padding.
# TODO: Global conditioning?
# TODO: Local conditioning?
_, nb_bins = input_shape
input_audio = Input(input_shape, name='audio_input')
model = CausalAtrousConvolution1D(filters, 2, mask_type='A', atrous_rate=1, border_mode='valid')(input_audio)
out, skip_connections = WavenetBlocks(filters, depth, stacks)(model)
out = Merge(mode='sum', name='merging_skips')(skip_connections)
out = PReLU()(out)
out = Convolution1D(nb_bins, 1, border_mode='same')(out)
out = PReLU()(out)
out = Convolution1D(nb_bins, 1, border_mode='same')(out)
# https://storage.googleapis.com/deepmind-live-cms/documents/BlogPost-Fig2-Anim-160908-r01.gif
if not learn_all:
out = Lambda(lambda x: x[:, -1, :], output_shape=(out._keras_shape[-1],), name='last_out')(out)
out = Activation('softmax')(out)
if build:
model = Model(input_audio, out)
model.compile(Nadam(), 'sparse_categorical_crossentropy')
return model
def compute_receptive_field(sample_rate, depth, stacks):
receptive_field = stacks * (2 ** depth * 2) - (stacks - 1)
receptive_field_ms = (receptive_field * 1000) / sample_rate
return receptive_field, receptive_field_ms
|
Python
| 0.9994
|
@@ -383,22 +383,13 @@
s, l
-earn_all=False
+ast=0
, h=
@@ -1271,21 +1271,16 @@
if
-not learn_all
+last %3E 0
:%0A
@@ -1314,19 +1314,20 @@
: x%5B:, -
-1,
+last
:%5D, outp
@@ -1336,16 +1336,22 @@
_shape=(
+last,
out._ker
@@ -1363,12 +1363,10 @@
ape%5B
--1%5D,
+2%5D
), n
|
f4063d86404adbb5489edefd6c12d855de246dee
|
test that we can decode all doubly-encoded characters (doesn't pass yet)
|
ftfy/test_unicode.py
|
ftfy/test_unicode.py
|
# -*- coding: utf-8 -*-
from ftfy.fixes import fix_text_encoding
import unicodedata
import sys
if sys.hexversion >= 0x03000000:
unichr = chr
# Most single-character strings which have been misencoded should be restored.
def test_all_bmp_characters():
for index in range(0xa0, 0xfffd):
char = unichr(index)
# Exclude code points that are not assigned
if unicodedata.category(char) not in ('Co', 'Cn', 'Cs', 'Mc', 'Mn'):
garble = char.encode('utf-8').decode('latin-1')
assert fix_text_encoding(garble) == char
phrases = [
u"\u201CI'm not such a fan of Charlotte Brontë\u2026\u201D",
u"\u201CI'm not such a fan of Charlotte Brontë\u2026\u201D",
u"\u2039ALLÍ ESTÁ\u203A",
u"\u2014ALLÍ ESTÁ\u2014",
u"AHÅ™, the new sofa from IKEA®",
#u"\u2014a radius of 10 Å\u2014",
]
# These phrases should not be erroneously "fixed"
def test_valid_phrases():
for phrase in phrases:
yield check_phrase, phrase
# make it not just confirm based on the opening punctuation
yield check_phrase, phrase[1:]
def check_phrase(text):
assert fix_text_encoding(text) == text, text
|
Python
| 0.000001
|
@@ -87,16 +87,43 @@
port sys
+%0Afrom nose.tools import eq_
%0A%0Aif sys
@@ -549,48 +549,189 @@
-assert fix_text_encoding(garble) ==
+garble2 = char.encode('utf-8').decode('latin-1').encode('utf-8').decode('latin-1')%0A eq_(fix_text_encoding(garble), char)%0A eq_(fix_text_encoding(garble2),
char
+)
%0A%0Aph
@@ -1286,15 +1286,12 @@
-assert
+eq_(
fix_
@@ -1313,20 +1313,13 @@
ext)
- == text
, text
+)
%0A%0A
|
c9e37f9b241c2bef2ffdb4811cec41c951b21ef9
|
Update fluid_cat_slim.py
|
cat_boxing/caged_cat/python/fluid_cat_slim.py
|
cat_boxing/caged_cat/python/fluid_cat_slim.py
|
from random import randint
def generate_cat():
cat_size = randint(1,100)
return cat_size
def fill_box():
box_size = 400
empty_room = 400
j = 0
while empty_room > 0:
cat = generate_cat()
empty_room = empty_room - cat
j = j + 1
return j
def fill_truck():
truck_size = 40
cat_num = 0
i = 0
while i <= truck_size:
cats_in_box = fill_box()
cat_num = cat_num + cats_in_box
i = i + 1
print("There are ", cat_num, " cats in our truck. Let's move out!")
|
Python
| 0.000003
|
@@ -113,27 +113,8 @@
():%0A
- box_size = 400%0A
|
0e519e2cafc1d384c6e4b2c7d5d3d8687b7896e8
|
use os.path to concat dirs
|
etc/scene-viewer.py
|
etc/scene-viewer.py
|
# GamePlay 3D Blender Scene Viewer
#
# Copyright 2013 Ithai Levi
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import bpy,subprocess,shutil,os
bl_info = {"name": "GamePlay 3D Scene Viewer", "category": "User"}
bpy.types.Scene.encoder_group = bpy.props.BoolProperty(name="Group Animations",default=False)
bpy.types.Scene.encoder_genmat = bpy.props.BoolProperty(name="Generate Materials",default=True)
bpy.types.Scene.encoder_path = bpy.props.StringProperty(name="Encoder Path",subtype="FILE_PATH",default="")
bpy.types.Scene.viewer_path = bpy.props.StringProperty(name="Viewer Path",subtype="FILE_PATH",default="")
class SceneView(bpy.types.Operator):
"""the GamePlay 3D scene viewer"""
bl_idname = "scene.gameplayview"
bl_label = "Preview"
@classmethod
def poll(cls, context):
svp = bpy.context.scene.viewer_path
enc = bpy.context.scene.encoder_path
try:
with open(svp): pass
except IOError:
return False
try:
with open(enc): pass
except IOError:
return False
return True
def execute(self, context):
sve = bpy.context.scene.viewer_path
svp = os.path.dirname(sve)
enc = bpy.context.scene.encoder_path
sfp = svp+"/res/scene"
bpy.ops.export_scene.fbx(filepath=sfp+".fbx",
check_existing=True,
filter_glob="*.fbx",
use_selection=False,
global_scale=1.0,
axis_forward='Z',
axis_up='-Y',
object_types={'EMPTY', 'MESH','LAMP', 'CAMERA', 'ARMATURE'},
use_mesh_modifiers=True,
mesh_smooth_type='FACE',
use_mesh_edges=False,
use_armature_deform_only=False,
use_anim=True,
use_anim_action_all=True,
use_default_take=True,
use_anim_optimize=False,
anim_optimize_precision=6.0,
path_mode='AUTO',
use_rotate_workaround=False,
xna_validate=False,
batch_mode='OFF',
use_batch_own_dir=True,
use_metadata=True)
args = [enc]
if bpy.context.scene.encoder_group:
args.append("g:auto")
if bpy.context.scene.encoder_genmat:
args.append("-m")
args.append(sfp+".fbx")
subprocess.call(args)
for img in bpy.data.images.keys():
if bpy.data.images[img].source=='FILE':
shutil.copy(bpy.data.images[img].filepath,svp+"res/")
subprocess.Popen([sve],cwd=svp)
return {"FINISHED"}
class GameplayPanel(bpy.types.Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_label = "Gameplay"
bl_idname = "SCENE_PT_layout"
def draw(self, context):
layout = self.layout
layout.prop(context.scene, "encoder_path")
layout.prop(context.scene, "viewer_path")
layout.prop(context.scene, "encoder_genmat")
layout.prop(context.scene, "encoder_group")
layout.operator("scene.gameplayview")
def register():
bpy.utils.register_class(SceneView)
bpy.utils.register_class(GameplayPanel)
def unregister():
bpy.utils.unregister_class(SceneView)
bpy.utils.unregister_class(GameplayPanel)
|
Python
| 0
|
@@ -1778,24 +1778,39 @@
p =
-svp+%22/res/
+os.path.join(svp,%22res%22,%22
scene%22
+)
%0A
@@ -3529,18 +3529,31 @@
ath,
+os.path.join(
svp
-+
+,
%22res
-/
%22
+)
)%0A
|
bfd0f554ef1a4f18afe3113c53ce20a756109abe
|
Add nbip
|
sniffer.py
|
sniffer.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Author : Oros
#
# documentations :
# http://secdev.org/projects/scapy/doc/usage.html
# http://sdz.tdct.org/sdz/manipulez-les-paquets-reseau-avec-scapy.html
#
# Install :
# apt-get install tcpdump graphviz imagemagick python-gnuplot python-crypto python-pyx python-scapy nmap
# apt-get install python-pyspatialite
# sudo ./sniffer.py
#
import sys
from scapy.all import *
from pyspatialite import dbapi2 as sqlite3
import json
import io
conn = sqlite3.connect('ips.db')
c = conn.cursor()
c.execute("CREATE TABLE if not exists connexions ( ip_from varchar(30), ip_to varchar(30), UNIQUE(ip_from,ip_to));")
conn.commit()
def add_ips(x):
c.execute("INSERT OR IGNORE INTO connexions ( ip_from, ip_to) VALUES ('{}', '{}');".format(x.sprintf("%IP.src%"),x.sprintf("%IP.dst%")))
conn.commit()
def run_sniff():
sniff(prn=add_ips)
def show_ips():
c.execute("SELECT * FROM connexions;")
a= c.fetchone()
print("num : IP from -> IP to")
i=1
while a:
print("{} : {} -> {}".format(i,a[0],a[1]))
a= c.fetchone()
i+=1
def get_uniques_ips():
c.execute("SELECT ip_from FROM connexions GROUP BY ip_from;")
a= c.fetchone()
ips=[]
while a:
if a[0] not in ips and a[0] != "??":
ips.append(a[0])
a= c.fetchone()
c.execute("SELECT ip_to FROM connexions WHERE ip_to NOT IN ('{}') GROUP BY ip_to;".format("','".join(ips)))
a= c.fetchone()
while a:
if a[0] not in ips and a[0] != "??":
ips.append(a[0])
a= c.fetchone()
return ips
def gen_map():
res,unans = traceroute(get_uniques_ips(),dport=[80,443],maxttl=20,retry=-2)
res.graph()
res.graph(type="ps",target="| lp")
res.graph(target="> graph.svg")
def gen_links():
nodes=[]
for ip in get_uniques_ips():
nodes.append({ 'data': { 'id': ip, 'name': ip, 'weight': '100', 'height': '100' } })
edges=[]
c.execute("SELECT ip_from, ip_to FROM connexions;")
a= c.fetchone()
while a:
if a[0] != "??" and a[0] != "??":
edges.append({ 'data': { 'source': a[0], 'target': a[1] } })
a= c.fetchone()
elements={'nodes':nodes,'edges':edges}
with io.open('elements.js', 'w', encoding='utf-8') as f:
f.write("var nb_ips="+unicode(str(len(nodes)))+";")
f.write("var elements="+unicode(json.dumps(elements, ensure_ascii=False))+";")
def get_ips():
i=0;
for ip in get_uniques_ips():
i+=1
print("{} : {}".format(i,ip))
print("\nNb ips : {}".format(i))
if len(sys.argv) < 2:
print("""Need parameters :
run : start sniffing
show : show connexions
ip : list all IPs
js : dump the DB into a JS file
Exemples :
In Terminal 1
sudo ./sniffer.py run
#sniff all IPs on the network
In Terminal 2
./sniffer.py show
192.168.0.42 -> 192.168.0.1
192.168.0.1 <- 192.168.0.42
192.168.0.42 -> 192.168.0.64
...""")
sys.exit(1)
action=sys.argv[1]
if action == "show":
show_ips()
elif action == "map":
gen_map()
elif action == "js":
gen_links()
elif action == "ip":
get_ips()
else:
run_sniff()
|
Python
| 0.000181
|
@@ -2263,89 +2263,17 @@
get_
+nb_
ips():
-%0A%09i=0;%0A%09for ip in get_uniques_ips():%0A%09%09i+=1%0A%09%09print(%22%7B%7D : %7B%7D%22.format(i,ip))
%0A%09pr
@@ -2299,17 +2299,38 @@
.format(
-i
+len(get_uniques_ips())
))%0A%0Aif l
@@ -2471,16 +2471,37 @@
JS file
+%0Anbip : number of IPs
%0A%0AExempl
@@ -2871,21 +2871,95 @@
)%0Ael
-se:%0A%09run_sniff(
+if action == %22nbip%22:%0A%09get_nb_ips()%0Aelif action ==%22run%22:%0A%09run_sniff()%0Aelse:%0A%09print(%22What?%22
)%0A
|
b819aa7ad2f390c06fec4b45174f3f001ba48baf
|
Make sure to check that a summary exists before using it
|
bcbio/upload/__init__.py
|
bcbio/upload/__init__.py
|
"""Handle extraction of final files from processing pipelines into storage.
"""
import datetime
from bcbio.upload import shared, filesystem, galaxy, s3
from bcbio.utils import file_exists
_approaches = {"filesystem": filesystem,
"galaxy": galaxy,
"s3": s3}
def from_sample(sample):
"""Upload results of processing from an analysis pipeline sample.
"""
upload_config = sample.get("upload")
if upload_config:
approach = _approaches[upload_config.get("method", "filesystem")]
for finfo in _get_files(sample):
approach.update_file(finfo, sample, upload_config)
for finfo in _get_files_project(sample, upload_config):
approach.update_file(finfo, None, upload_config)
# ## File information from sample
def _get_files(sample):
"""Retrieve files for the sample, dispatching by analysis type.
Each file is a dictionary containing the path plus associated
metadata about the file and pipeline versions.
"""
analysis = sample.get("analysis")
if analysis in ["variant", "SNP calling", "variant2"]:
return _get_files_variantcall(sample)
elif analysis in ["RNA-seq"]:
return _get_files_rnaseq(sample)
else:
return []
def _get_files_rnaseq(sample):
out = []
algorithm = sample["config"]["algorithm"]
out = _maybe_add_summary(algorithm, sample, out)
out = _maybe_add_alignment(algorithm, sample, out)
return _add_meta(out, sample)
def _add_meta(xs, sample=None, config=None):
out = []
for x in xs:
x["mtime"] = shared.get_file_timestamp(x["path"])
if sample:
x["sample"] = sample["name"][-1]
if config:
if "fc_name" in config and "fc_date" in config:
x["run"] = "%s_%s" % (config["fc_date"], config["fc_name"])
else:
x["run"] = "project_%s" % datetime.datetime.now().strftime("%Y-%m-%d")
out.append(x)
return out
def _get_files_variantcall(sample):
"""Return output files for the variant calling pipeline.
"""
out = []
algorithm = sample["config"]["algorithm"]
out = _maybe_add_summary(algorithm, sample, out)
out = _maybe_add_alignment(algorithm, sample, out)
out = _maybe_add_variant_file(algorithm, sample, out)
return _add_meta(out, sample)
def _maybe_add_variant_file(algorithm, sample, out):
if sample["work_bam"] is not None and sample.get("vrn_file"):
for x in sample["variants"]:
out.append({"path": x["vrn_file"],
"type": "vcf",
"ext": x["variantcaller"],
"variantcaller": x["variantcaller"]})
if x.get("bed_file"):
out.append({"path": x["bed_file"],
"type": "bed",
"ext": "%s-callregions" % x["variantcaller"],
"variantcaller": x["variantcaller"]})
return out
def _maybe_add_summary(algorithm, sample, out):
if algorithm.get("write_summary", True) and "summary" in sample:
if sample["summary"].get("pdf"):
out = [{"path": sample["summary"]["pdf"],
"type": "pdf",
"ext": "summary"}]
return out
def _maybe_add_alignment(algorithm, sample, out):
if _has_alignment_file(algorithm, sample):
out.append({"path": sample["work_bam"],
"type": "bam",
"ext": "ready"})
if file_exists(sample["work_bam"] + ".bai"):
out.append({"path": sample["work_bam"] + ".bai",
"type": "bai",
"ext": "ready"})
return out
def _has_alignment_file(algorithm, sample):
return (((algorithm.get("aligner") or algorithm.get("realign")
or algorithm.get("recalibrate")) and
algorithm.get("merge_bamprep", True)) and
sample["work_bam"] is not None)
# ## File information from full project
def _get_files_project(sample, upload_config):
"""Retrieve output files associated with an entire analysis project.
"""
out = [{"path": sample["provenance"]["programs"]}]
if sample["summary"].get("project"):
out.append({"path": sample["summary"]["project"]})
for x in sample.get("variants", []):
if "pop_db" in x:
out.append({"path": x["pop_db"],
"type": "sqlite",
"variantcaller": x["variantcaller"]})
for x in sample.get("variants", []):
if "population" in x:
pop_db = x["population"].get("db")
if pop_db:
out.append({"path": pop_db,
"type": "sqlite",
"variantcaller": x["variantcaller"]})
pop_vcf = x["population"].get("vcf")
if pop_vcf:
out.append({"path": pop_vcf,
"type": "vcf",
"variantcaller": x["variantcaller"]})
for x in sample.get("variants", []):
if x.get("validate") and x["validate"].get("grading_summary"):
out.append({"path": x["validate"]["grading_summary"]})
break
return _add_meta(out, config=upload_config)
|
Python
| 0
|
@@ -4203,32 +4203,56 @@
rams%22%5D%7D%5D%0A if
+%22summary%22 in sample and
sample%5B%22summary%22
|
605e621d000e38fabb61a347322cb3828dcb136d
|
update to one import per line
|
slacksocket/client.py
|
slacksocket/client.py
|
import os,json,logging,websocket,requests,time,thread
from .config import slackurl,event_types
log = logging.getLogger('slacksocket')
class SlackSocketEventNameError(NameError):
"""
Invalid name
"""
pass
class SlackSocketAPIError(RuntimeError):
"""
Error response from Slack API
"""
pass
class SlackEvent(object):
"""
Event received from the Slack RTM API
params:
- event(dict)
attributes:
- type: Slack event type
- time: UTC time event was received
"""
def __init__(self,event):
self.type = event['type']
self.time = int(time.time())
self.json = json.dumps(event)
self.event = event
class SlackClient(requests.Session):
"""
"""
def __init__(self, token):
super(SlackClient, self).__init__()
self.token = token
def get_json(self,url):
return self._result(self._get(url))
def _post(self, url):
return self.post(url, params={'token':self.token})
def _get(self, url):
return self.get(url, params={'token':self.token})
def _result(self, res):
try:
res.raise_for_status()
except requests.exceptions.HTTPError as e:
raise errors.SlackSocketAPIError(e, res, explanation=explanation)
rj = res.json()
if not rj['ok']:
raise SlackSocketAPIError('Error from slack api:\n %s' % r.text)
return rj
class SlackSocket(object):
"""
SlackSocket class provides a streaming interface to the Slack Real Time
Messaging API
params:
- slacktoken(str): token to authenticate with slack
- translate(bool): yield events with human-readable names
rather than id. default true
"""
def __init__(self,slacktoken,translate=True):
if type(translate) != bool:
raise TypeError('translate must be a boolean')
self.eventq = []
self.translate = translate
self.client = SlackClient(slacktoken)
self.team,self.user = self._auth_test()
self.thread = thread.start_new_thread(self._open,())
def get_event(self,event_filter='all'):
"""
return a single event object or block until an event is
received and return it.
params:
- event_filter(list): Slack event type(s) to filter by. Excluding a
filter returns all slack events. See https://api.slack.com/events
for a listing of valid event types.
"""
self._validate_filters(event_filter)
#return or block until we have something to return
while True:
try:
e = self.eventq.pop(0)
if e.type in event_filter:
return e
except IndexError:
time.sleep(.2)
def events(self,event_filter='all'):
"""
returns a blocking generator yielding Slack event objects
params:
- event_filter(list): Slack event type(s) to filter by. Excluding a
filter returns all slack events. See https://api.slack.com/events
for a listing of valid event types.
"""
self._validate_filters(event_filter)
while True:
e = self.get_event(event_filter=event_filter)
yield(e)
#######
# Internal Methods
#######
def _open(self):
#reset id for sending messages with each new socket
self.send_id = 1
ws = websocket.WebSocketApp(self._get_websocket_url(),
on_message = self._event_handler,
on_error = self._error_handler,
on_open = self._open_handler,
on_close = self._exit_handler)
ws.run_forever()
def _validate_filters(self,filters):
if filters == 'all':
filters = event_types
if type(filters) != list:
raise TypeError('filters must be given as a list')
for f in filters:
if f not in event_types:
raise SlackSocketEventNameError('unknown event type %s\n \
see https://api.slack.com/events' % filters)
def _get_websocket_url(self):
"""
Retrieve a fresh websocket url from slack api
"""
return self.client.get_json(slackurl['rtm'])['url']
def _auth_test(self):
"""
Perform API auth test and get our user and team
"""
test = self.client.get_json(slackurl['test'])
if self.translate:
return (test['team'],test['user'])
else:
return (test['team_id'],test['user_id'])
def _lookup_user(self,user_id):
"""
Look up a username from user id
"""
if user_id == 'USLACKBOT':
return "slackbot"
members = self.client.get_json(slackurl['users'])['members']
for user in members:
if user['id'] == user_id:
return user['name']
else:
return "unknown"
#TODO: add ability for lookup via cname
def _lookup_channel(self,id):
"""
Look up a channelname from channel id
"""
for ctype in ['channels','groups','ims']:
channel_list = self.client.get_json(slackurl[ctype])[ctype]
matching = [ c for c in channel_list if c['id'] == id ]
if matching:
channel = matching[0]
if ctype == 'ims':
cname = self._lookup_user(channel['user'])
else:
cname = channel['name']
return { 'channel_type' : ctype,
'channel_name' : cname }
#if no matches were found
return { 'ctype' : 'unknown',
'channel_name' : 'unknown' }
#######
# Websocket Handlers
#######
def _event_handler(self,ws,event):
log.debug('event recieved: %s' % event)
event = json.loads(event)
#TODO: make use of ctype returned from _lookup_channel
if self.translate:
if event.has_key('user'):
event['user'] = self._lookup_user(event['user'])
if event.has_key('channel'):
c = self._lookup_channel(event['channel'])
event['channel'] = c['channel_name']
self.eventq.append(SlackEvent(event))
def _open_handler(self,ws):
log.info('websocket connection established')
def _error_handler(self,ws,error):
log.critical('websocket error:\n %s' % error)
def _exit_handler(self,ws):
self.thread = thread.start_new_thread(self.open,())
|
Python
| 0
|
@@ -6,46 +6,88 @@
t os
-,json,logging,websocket,requests,time,
+%0Aimport json%0Aimport logging%0Aimport websocket%0Aimport requests%0Aimport time%0Aimport
thre
|
56227360ee3f6b94b3c92e1282034c0cddaced07
|
Add ASCIINEMA_URL
|
bears/python/MypyBear.py
|
bears/python/MypyBear.py
|
from collections import namedtuple
import textwrap
import sys
from coalib.bearlib.abstractions.Linter import linter
from coalib.bears.requirements.PipRequirement import PipRequirement
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
from coalib.results.Result import Result
class FlagInfo(namedtuple('FlagInfo', 'arg doc inverse')):
"""
Information about a command-line flag.
:param arg:
The argument to pass to enable the flag.
:param doc:
A help text for the flag.
:param inverse:
Set to ``True`` when the coala option is the inverse of
the subprocess one, for example coala's ``allow_untyped_calls``
is the inverse of mypy's ``--disallow-untyped-calls``.
"""
def want_flag(self, value):
"""
Check if the flag should be added to the argument list.
:param value: The configuration value.
:return: The flag value, may be negated if the flag specifies so.
"""
if self.inverse:
value = not value
return value
FLAG_MAP = {
'allow_untyped_functions': FlagInfo(
arg='--disallow-untyped-defs',
doc='Allow defining functions without type annotations or with '
'incomplete type annotations.',
inverse=True),
'allow_untyped_calls': FlagInfo(
arg='--disallow-untyped-calls',
doc='Allow calling functions without type annotations from '
'typed functions.',
inverse=True),
'check_untyped_function_bodies': FlagInfo(
arg='--check-untyped-defs',
doc='Do not check the interior of functions without type annotations.',
inverse=False),
'strict_optional': FlagInfo(
arg='--strict-optional',
doc='Enable experimental strict checks related to Optional types. See '
'<http://mypy-lang.blogspot.com.es/2016/07/mypy-043-released.html>'
' for an explanation.',
inverse=False),
}
def add_param_docs(param_map):
"""
Append documentation from FLAG_MAP to a function's docstring.
:param param_map:
A mapping of argument names (strings) to FlagInfo objects.
:return:
A decorator that appends flag information to a function's docstring.
"""
def decorator(func):
func.__doc__ = textwrap.dedent(func.__doc__) + '\n'.join(
':param {}:\n{}'.format(name, textwrap.indent(arg.doc, ' '))
for name, arg in param_map.items())
return func
return decorator
# Mypy generates messages in the format:
# blabla.py: note: In function "f":
# blabla.py:2: error: Unsupported operand types for ...
# The "note" messages are only adding info coala should already know,
# so discard those. We're only capturing the errors.
@linter(executable=sys.executable,
prerequisite_check_command=(sys.executable, '-m', 'mypy', '-V'),
output_format="regex",
output_regex=r'(?P<filename>[^:]+):((?P<line>\d+):)? '
'(?P<severity>error): (?P<message>.*)')
class MypyBear:
"""
Type-checks your Python files!
Checks optional static typing using the mypy tool.
See <http://mypy.readthedocs.io/en/latest/basics.html> for info on how to
add static typing.
"""
LANGUAGES = {"Python", "Python 2", "Python 3"}
AUTHORS = {'Petr Viktorin'}
REQUIREMENTS = {PipRequirement('mypy-lang', '0.*')}
AUTHORS_EMAILS = {'encukou@gmail.com'}
LICENSE = 'AGPL-3.0'
# This detects typing errors, which is pretty unique -- it doesn't
# make sense to add a category for it.
CAN_DETECT = set()
@add_param_docs(FLAG_MAP)
def create_arguments(self, filename, file, config_file,
language: str="Python 3",
python_version: str=None,
allow_untyped_functions: bool=True,
allow_untyped_calls: bool=True,
check_untyped_function_bodies: bool=False,
strict_optional: bool=False):
"""
:param language:
Set to ``Python`` or ``Python 3`` to check Python 3.x source.
Use ``Python 2`` for Python 2.x.
:param python_version:
Set the specific Python version, e.g. ``3.5``.
"""
args = ['-m', 'mypy']
if language.lower() == 'python 2':
args.append('--py2')
elif language.lower() not in ('python 3', 'python'):
# Ideally, this would fail the check, but there's no good
# way to fail from create_arguments.
# See https://github.com/coala-analyzer/coala/issues/2573
self.err(
'Language needs to be "Python", "Python 2" or "Python 3". '
'Assuming Python 3.')
if python_version:
args.extend(['--python-version', python_version])
loc = locals()
args.extend(flag.arg for name, flag in FLAG_MAP.items()
if flag.want_flag(loc[name]))
args.append(filename)
return args
|
Python
| 0.000001
|
@@ -3479,16 +3479,68 @@
GPL-3.0'
+%0A ASCIINEMA_URL = 'https://asciinema.org/a/90736'
%0A%0A #
|
c958593e1f1c86555c0069e744d9df7f4fe2412f
|
remove todo
|
slacksocket/client.py
|
slacksocket/client.py
|
import os,json,logging,websocket,requests,time,thread
logging.basicConfig(level=logging.INFO)
log = logging.getLogger('slacksocket')
slackurl = { 'rtm' : 'https://slack.com/api/rtm.start',
'users' : 'https://slack.com/api/users.list' }
#TODO: add event_type groups for all channel events, all group events, etc.
event_types = [ 'hello',
'message',
'channel_marked',
'channel_created',
'channel_joined',
'channel_left',
'channel_deleted',
'channel_rename',
'channel_archive',
'channel_unarchive',
'channel_history_changed',
'im_created',
'im_open',
'im_close',
'im_marked',
'im_history_changed',
'group_joined',
'group_left',
'group_open',
'group_close',
'group_archive',
'group_unarchive',
'group_rename',
'group_marked',
'group_history_changed',
'file_created',
'file_shared',
'file_unshared',
'file_public',
'file_private',
'file_change',
'file_deleted',
'file_comment_added',
'file_comment_edited',
'file_comment_deleted',
'presence_change',
'manual_presence_change',
'pref_change',
'user_change',
'team_join',
'star_added',
'star_removed',
'emoji_changed',
'commands_changed',
'team_pref_change',
'team_rename',
'team_domain_change',
'email_domain_changed',
'bot_added',
'bot_changed',
'accounts_changed',
'team_migration_started' ]
class SlackSocketEventNameError(NameError):
"""
Invalid name
"""
pass
class SlackSocketAPIError(RuntimeError):
"""
Error response from Slack API
"""
pass
class SlackEvent(object):
"""
SlackEvent is an event received from the Slack RTM API
params:
- event(dict)
attributes:
- type: Slack event type
- time: UTC time event was received
"""
def __init__(self,event):
self.type = event['type']
self.time = int(time.time())
self.json = json.dumps(event)
self.event = event
class SlackSocket(object):
#TODO: add method to properly exit, close socket
"""
SlackSocket class provides a streaming interface to the Slack Real Time
Messaging API
params:
- slacktoken(str): token to authenticate with slack
- translate(bool): yield events with human-readable names
rather than id. default true
"""
def __init__(self,slacktoken,translate=True):
if type(translate) != bool:
raise TypeError('translate must be a boolean')
self.events = []
self.token = slacktoken
self.translate = translate
self.thread = thread.start_new_thread(self.open,())
def open(self):
ws = websocket.WebSocketApp(self._get_websocket_url(),
on_message = self._event_handler,
on_error = self._error_handler,
on_open = self._open_handler,
on_close = self._exit_handler)
ws.run_forever()
def get_event(self,type='all'):
"""
return event object in the order received or block until an event is
received and return it.
params:
- type(str): A slack event type to filter by. Default 'all' returns
all slack events. See https://api.slack.com/events for a listing
of valid event types.
"""
if type != 'all':
if type not in event_types:
raise SlackSocketEventNameError('unknown event type %s\n \
see https://api.slack.com/events' % type)
while True:
try:
if type == 'all':
return self.events.pop(0)
else:
e = self.events.pop(0)
if e.type == type:
return e
except IndexError:
pass
def _get_websocket_url(self):
"""
retrieve a fresh websocket url from slack api
"""
r = requests.get(slackurl['rtm'],params={'token':self.token})
rj = r.json()
if not rj['ok']:
raise SlackSocketAPIError('Error from slack api:\n %s' % r.text)
return rj['url']
def _lookup_user(self,user_id):
"""
Look up a username from user id
"""
if user_id == 'USLACKBOT':
return "slackbot"
r = requests.get(slackurl['users'],params={'token':self.token})
rj = r.json()
if not rj['ok']:
log.critical('error from slack api:\n %s' % r)
for user in rj['members']:
if user['id'] == user_id:
return user['name']
else:
return "unknown"
#######
# Handlers
#######
def _event_handler(self,ws,event):
log.debug('event recieved: %s' % event)
event = json.loads(event)
if self.translate:
if event.has_key('user'):
event['user'] = self._lookup_user(event['user'])
#TODO: add channel id lookup
self.events.append(SlackEvent(event))
def _open_handler(self,ws):
log.info('websocket connection established')
def _error_handler(self,ws,error):
log.critical('websocket error:\n %s' % error)
def _exit_handler(self,ws):
self.thread = thread.start_new_thread(self.open,())
|
Python
| 0.000005
|
@@ -2263,61 +2263,8 @@
t):%0A
- #TODO: add method to properly exit, close socket%0A
|
f9e543f8c84f8a6f9d6ead0d2a1f9979d6a0ab8b
|
add write timing
|
humanhive/audio_interface.py
|
humanhive/audio_interface.py
|
import pyaudio
import time
class AudioInterface:
"""
Manages the sound interface. This manages the main callback for the audio
interface and delegates behaviour to the Playback and Recording modules.
"""
def __init__(self,
playback,
recording_queue,
n_channels,
sample_rate,
sample_width,
device_id,
frame_count=1024):
self.playback = playback
self.recording_queue = recording_queue
self.n_channels = n_channels
self.sample_rate = sample_rate
self.sample_width = sample_width
self.frame_count = frame_count
print("frame_count: {}".format(frame_count))
# Initialise pyaudio interface
self.p = pyaudio.PyAudio()
print("Device parameters for device with id: {}\n{}".format(
device_id, self.p.get_device_info_by_index(device_id)))
self.stream = self.p.open(
format=self.p.get_format_from_width(2),
channels=self.n_channels,
rate=self.sample_rate,
output_device_index=device_id,
# input=True,
output=True,
#stream_callback=self.audio_callback,
)
print("Finished initialising audio")
def audio_callback(self, in_data, frame_count, time_info, status):
st = time.time()
# Send recording data
if self.recording_queue is not None:
self.recording_queue.put((in_data, frame_count))
# Get output audio
samples = self.playback.get()
te = time.time() - st
print("Time elapsed: {}".format(te))
return (samples, pyaudio.paContinue)
def start_stream(self):
self.stream.start_stream()
def close_stream(self):
self.stream.stop_stream()
self.stream.close()
self.p.terminate()
def is_active(self):
return self.stream.is_active()
def run(self):
while True:
(data, status) = self.audio_callback(
None, self.frame_count, None, None)
self.stream.write(data, self.frame_count, exception_on_underflow=False)
|
Python
| 0.00001
|
@@ -1548,32 +1548,89 @@
frame_count))%0A%0A
+ print(%22qsize: %7B%7D%22.format(self.playback.qsize()))%0A
# Get ou
@@ -1675,24 +1675,25 @@
back.get()%0A%0A
+%0A
te =
@@ -2194,16 +2194,45 @@
, None)%0A
+ st = time.time()%0A
@@ -2307,8 +2307,69 @@
=False)%0A
+ print(%22Write time: %7B%7D%22.format(time.time() - st))%0A
|
d1e66c414aac60cc7770ddeff091dedc5c0047f6
|
Remove debug `print` from feature extraction
|
feature_extraction/extraction.py
|
feature_extraction/extraction.py
|
import numpy as np
import skimage.exposure as exposure
from .util import AttributeDict
def extract_features(image, measurements):
"""
Given an image as a Numpy array and a set of measurement objects
implementing a compute method returning a feature vector, return a combined
feature vector.
"""
# TODO(liam): parallelize multiple measurements on an image by using Celery
return np.hstack([m.compute(image) for m in measurements])
def normalize_features(X):
# recenter features and normalize over the dataset
X -= np.mean(X, axis=0)
X /= np.linalg.norm(X, axis=0)
# normalize for each record
X /= np.vstack(np.linalg.norm(X, axis=1))
return X
def feature_postprocessing(X, options):
_options = AttributeDict({'normalize': True, 'fill_nans': False})
_options.update(options or {}); options = _options
if options.fill_nans:
X = np.nan_to_num(X)
if options.normalize:
X = normalize_features(X)
return X
def image_preprocessing(im, options):
_options = AttributeDict({'normalize': True, 'equalize': None})
_options.update(options or {}); options = _options
if options.normalize:
im = exposure.rescale_intensity(im)
print options
if options.equalize:
if options.equalize['method'] == "histogram":
im = exposure.equalize_hist(im)
elif options.equalize['method'] == "stretch":
pmin, pmax = np.percentile(im,
(options.equalize['saturation'], 100-options.equalize['saturation']))
im = exposure.rescale_intensity(im, in_range=(pmin, pmax))
return im
|
Python
| 0.000001
|
@@ -1148,24 +1148,8 @@
m)%0A%0A
-%09print options%0A%0A
%09if
|
15652a0b80b0fa0c87ac9ccd33eaada22859bfa2
|
Update the_most_numbers.py
|
checkio/python/elementary/the_most_numbers.py
|
checkio/python/elementary/the_most_numbers.py
|
Python
| 0.998495
|
@@ -0,0 +1,273 @@
+def distance(*args):%0D%0A if args:%0D%0A min = args%5B0%5D%0D%0A max = args%5B0%5D%0D%0A for x in args:%0D%0A if x %3C min:%0D%0A min = x%0D%0A if x %3E max:%0D%0A max = x%0D%0A else:%0D%0A min = 0%0D%0A max = 0%0D%0A return max - min%0D%0A
|
|
2e3341c7e32182cc35f6a658d613c77a72b9b377
|
Modify comments
|
src/marketdata/access/remote/google.py
|
src/marketdata/access/remote/google.py
|
import urllib2
import urllib
from marketdata.utils.transform.google.rawquote_intraday import TranformIntradayQuote
def _getUrl(url, urlconditions):
url_values = urllib.urlencode(urlconditions)
return url + '?' + url_values
def _pullQuote(url, urlconditions):
req = urllib2.Request(_getUrl(url, urlconditions))
response = urllib2.urlopen(req).readlines()
return response
class IntradayMinutes(object):
'''Extract intraday market data from Google finance.
URL to access market data from Google finance:
http://www.google.com/finance/getprices?q=IBM&x=NYSE&i=60&p=5d&f=d,c,h,l,o,v
Description of abbreviations present in the above URL:
q = quote symbol
x = exchange symbol
i = interval in seconds i.e. 60 = 1 minute
p = number of past trading days (max has been 15d)
f = quote format (date, close, high, low, open, volume)
'''
def __init__(self, symbol, exchange, minutes=1, days=1):
'''Constructor
'''
self.url = 'http://www.google.com/finance/getprices'
quoteformat = 'd,c,h,l,o,v'
self.urlconditions = {}
self.urlconditions['q'] = symbol # 'IBM', 'JPM', 'GE', 'AMD'
self.urlconditions['x'] = exchange # 'NYSE', 'INDEXNASDAQ'
self.urlconditions['i'] = str(minutes * 60) # 60 refers to 1 minute interval
self.urlconditions['p'] = str(days) + 'd' # 1d refers to 1 day (max 15 days)
self.urlconditions['f'] = quoteformat # date, close, high, low, open, volume
self.quote = self.__extractTransform()
def __extractRawQuote(self):
return _pullQuote(self.url, self.urlconditions)
def __transformRawQuote(self, raw_quote):
interval = self.urlconditions['i']
return TranformIntradayQuote(raw_quote, interval)
def __extractTransform(self):
raw_quote = self.__extractRawQuote()
return self.__transformRawQuote(raw_quote)
def json(self):
return self.quote.json_uts_chlov()
def dict_np(self):
return self.quote.dts_chlov()
|
Python
| 0
|
@@ -507,36 +507,16 @@
ket data
- from Google finance
:%0A ht
@@ -599,56 +599,27 @@
-Description of abbreviations present in the abov
+Abbreviations in th
e UR
|
3577de6383053e0f8e05d531c8a632be12e89ca6
|
fix for route parser to handle when path=None
|
python/marvin/utils/general/decorators.py
|
python/marvin/utils/general/decorators.py
|
from functools import wraps
# General Decorators
def parseRoutePath(f):
''' Decorator to parse generic route path '''
@wraps(f)
def decorated_function(inst, *args, **kwargs):
for kw in kwargs['path'].split('/'):
if len(kw) == 0:
continue
var, value = kw.split('=')
kwargs[var] = value
kwargs.pop('path')
return f(inst, *args, **kwargs)
return decorated_function
|
Python
| 0.000106
|
@@ -184,16 +184,68 @@
wargs):%0A
+ if 'path' in kwargs and kwargs%5B'path'%5D:%0A
@@ -293,16 +293,20 @@
+
if len(k
@@ -314,16 +314,20 @@
) == 0:%0A
+
@@ -355,16 +355,20 @@
+
var, val
@@ -386,16 +386,20 @@
it('=')%0A
+
|
4c31e94752e635c0826dd6b223201fe7ce0d5220
|
Fix cache_home to expand path
|
rplugin/python3/deoplete/sources/jedi.py
|
rplugin/python3/deoplete/sources/jedi.py
|
import os
import re
import sys
current_dir = os.path.dirname(os.path.abspath(__file__))
jedi_dir = os.path.join(os.path.dirname(current_dir), 'jedi')
sys.path.insert(0, jedi_dir)
import jedi
from .base import Base
class Source(Base):
def __init__(self, vim):
Base.__init__(self, vim)
self.name = 'jedi'
self.mark = '[jedi]'
self.filetypes = ['python']
self.input_pattern = (r'[^. \t0-9]\.\w*|^\s*@\w*|' +
r'^\s*from\s.+import \w*|' +
r'^\s*from \w*|^\s*import \w*')
def get_complete_position(self, context):
m = re.search(r'\w*$', context['input'])
return m.start() if m else -1
def gather_candidates(self, context):
source = '\n'.join(self.vim.current.buffer)
try:
completions = self.get_script(
source, self.vim.current.window.cursor[1]).completions()
except Exception:
return []
out = []
for c in completions:
word = c.name
# TODO(zchee): configurable and refactoring
# Add '(' bracket
if c.type == 'function':
word += '('
# Add '.' for 'self' and 'class'
elif (word == 'self' or
c.type == 'class' or
c.type == 'module') and (not re.match(
r'^\s*from\s.+import \w*' +
r'^\s*from \w*|^\s*import \w*',
self.vim.current.line)):
word += '.'
# Format c.docstring() for abbr
if re.match(c.name, c.docstring()):
abbr = re.sub('"(|)| ",', '',
c.docstring().split("\n\n")[0]
.split("->")[0]
.replace('\n', ' ')
)
else:
abbr = c.name
out.append(dict(word=word,
abbr=abbr,
kind=re.sub('\n| ', '', c.description),
info=c.docstring(),
icase=1,
dup=1
))
return out
def get_script(self, source, column):
# http://jedi.jedidjah.ch/en/latest/docs/settings.html#jedi.settings.add_dot_after_module
# Adds a dot after a module, because a module that is not accessed this
# way is definitely not the normal case. However, in VIM this doesn’t
# work, that’s why it isn’t used at the moment.
jedi.settings.add_dot_after_module = True
# http://jedi.jedidjah.ch/en/latest/docs/settings.html#jedi.settings.add_bracket_after_function
# Adds an opening bracket after a function, because that's normal
# behaviour. Removed it again, because in VIM that is not very
# practical.
jedi.settings.add_bracket_after_function = True
# http://jedi.jedidjah.ch/en/latest/docs/settings.html#jedi.settings.additional_dynamic_modules
# Additional modules in which Jedi checks if statements are to be
# found. This is practical for IDEs, that want to administrate their
# modules themselves.
jedi.settings.additional_dynamic_modules = [
b.name for b in self.vim.buffers
if b.name is not None and b.name.endswith('.py')]
cache_home = os.getenv('XDG_CACHE_HOME')
if cache_home is None:
cache_home = '~/.cache'
jedi.settings.cache_directory = os.path.join(cache_home, 'jedi')
row = self.vim.current.window.cursor[0]
buf_path = self.vim.current.buffer.name
encoding = self.vim.eval('&encoding')
return jedi.Script(source, row, column, buf_path, encoding)
|
Python
| 0
|
@@ -3540,16 +3540,35 @@
_home =
+os.path.expanduser(
'~/.cach
@@ -3569,16 +3569,17 @@
/.cache'
+)
%0A
|
9044018db0a909884ada225af12c7252f85aece8
|
Remove dead code
|
examples/tictactoe_td0.py
|
examples/tictactoe_td0.py
|
from capstone.environment import Environment
from capstone.game import TicTacToe
from capstone.mdp import GameMDP
from capstone.player import AlphaBeta, RandPlayer
from capstone.util import ZobristHashing
# class TabularTD0(object):
# def __init__(self, env, policy, alpha, gamma, n_episodes):
# self.env = env
# self.policy = policy
# self.alpha = alpha
# self.gamma = gamme
# self.n_episodes = n_episodes
# def learn(self):
# for episode in range(self.n_episodes):
# pass
# def step(self):
# action = self.policy.choose_action(self.env)
# cur_state = env.cur_state()
# reward = env.make_action(action)
# next_state = env.cur_state()
# new_value = self.table[
class TabularTD0(object):
def __init__(self, env, policy=RandPlayer(), alpha=0.01, gamma=0.99, n_episodes=1000):
self.env = env
self.policy = RandPlayer()
self.alpha = alpha
self.gamma = gamma
self.n_episodes = n_episodes
self.zobrist_hash = ZobristHashing(n_positions=9, n_pieces=2)
self._table = {}
self._boards = {}
def learn(self):
import random
for episode in range(self.n_episodes):
print('Episode {}'.format(episode))
self.env.reset()
step = 0
while not self.env.is_terminal():
print('Step {}'.format(step))
cur_state = self.env.cur_state()
action = random.choice(self.env.actions())
reward = self.env.do_action(action)
# print('Reward {}'.format(reward))
next_state = self.env.cur_state()
cur_state_hash = self.zobrist_hash(cur_state.board)
# print('cur_state_value_hash: {}'.format(cur_state_hash))
cur_state_value = self._table.get(cur_state_hash, 0.1)
next_state_hash = self.zobrist_hash(next_state.board)
# print('next_state_value_hash: {}'.format(next_state_hash))
next_state_value = self._table.get(next_state_hash, 0.3)
new_value = cur_state_value + (self.alpha * (reward + (self.gamma * next_state_value) - cur_state_value))
# print('new_value {}'.format(new_value))
self._table[cur_state_hash] = new_value
self._boards[cur_state_hash] = cur_state
# print('cur_state_hash' + str(cur_state_hash))
# print(env.cur_state())
step += 1
if env.is_terminal():
self._table[next_state_hash] = reward;
print('Results:')
print(self._table)
print(self._boards)
game = TicTacToe(
'X-O'
'XO-'
'-XO'
)
ab = AlphaBeta()
mdp = GameMDP(game, ab, 1)
env = Environment(mdp)
td0 = TabularTD0(env)
td0.learn()
|
Python
| 0.001497
|
@@ -203,580 +203,8 @@
ng%0A%0A
-# class TabularTD0(object):%0A%0A# def __init__(self, env, policy, alpha, gamma, n_episodes):%0A# self.env = env%0A# self.policy = policy%0A# self.alpha = alpha%0A# self.gamma = gamme%0A# self.n_episodes = n_episodes%0A%0A# def learn(self):%0A# for episode in range(self.n_episodes):%0A# pass%0A%0A# def step(self):%0A# action = self.policy.choose_action(self.env)%0A# cur_state = env.cur_state()%0A# reward = env.make_action(action)%0A# next_state = env.cur_state()%0A# new_value = self.table%5B%0A%0A
%0Acla
|
114793d6abce14ece5fbd537cce38230366db365
|
Fix compilation
|
rsqueakvm/plugins/immutability_plugin.py
|
rsqueakvm/plugins/immutability_plugin.py
|
"""
RSqueak/VM plugin which provides support for immutable objects.
Immutable objects can be created as copy of existing objects
or from a list of arguments. The package `ImmutableObjects`, located in
`/repository`, needs to be loaded in the image.
"""
from rsqueakvm.error import PrimitiveFailedError
from rsqueakvm.model.variable import W_BytesObject, W_WordsObject
from rsqueakvm.plugins.immutability import patch_w_object
from rsqueakvm.plugins.immutability.bytes import W_Immutable_BytesObject
from rsqueakvm.plugins.immutability.pointers import (
select_immutable_pointers_class)
from rsqueakvm.plugins.immutability.words import W_Immutable_WordsObject
from rsqueakvm.plugins.plugin import Plugin
from rsqueakvm.storage_classes import BYTES, POINTERS, WORDS
ImmutabilityPlugin = Plugin()
patch_w_object()
@ImmutabilityPlugin.expose_primitive(unwrap_spec=[object])
def primitiveIsImmutable(interp, s_frame, w_recv):
"""
Tests if `w_recv` is an immutable object.
:param interp: The interpreter proxy.
:param s_frame: The stack frame.
:param w_recv: The receiver object.
:returns: `w_true` if `w_recv` is immutable object, otherwise `w_false`.
"""
if w_recv.is_immutable():
return interp.space.w_true
return interp.space.w_false
@ImmutabilityPlugin.expose_primitive(unwrap_spec=[object, object])
def primitiveImmutableFrom(interp, s_frame, w_cls, w_obj):
"""
Creates an immutable copy of a given Smalltalk object.
:param interp: The interpreter proxy.
:param s_frame: The stack frame.
:param w_cls: The imutable objects target class.
:param w_obj: The Smalltalk object to produce an immutable copy from.
:returns: An immutable copy of `w_obj` with class `w_cls`.
:raises: PrimitiveFailedError
"""
space = interp.space
instance_kind = w_cls.as_class_get_shadow(space).get_instance_kind()
if instance_kind == POINTERS:
pointers = w_obj.fetch_all(space)
cls = select_immutable_pointers_class(pointers)
return cls(space, w_cls, pointers)
elif instance_kind == BYTES and isinstance(w_obj, W_BytesObject):
return W_Immutable_BytesObject(space, w_cls, w_obj.bytes)
elif instance_kind == WORDS and isinstance(w_obj, W_WordsObject):
return W_Immutable_WordsObject(space, w_cls, w_obj.words)
raise PrimitiveFailedError
@ImmutabilityPlugin.expose_primitive(unwrap_spec=None)
def primitiveImmutableFromArgs(interp, s_frame, argcount):
"""
Returns an immutable instance of the receiver (which is a class) with
all fields initialized with the arguments given.
:param interp: The interpreter proxy.
:param s_frame: The stack frame.
:param argcount: The number of arguments.
:returns: An immutable object.
:raises: PrimitiveFailedError
"""
if argcount == 0:
raise PrimitiveFailedError
w_args = s_frame.pop_and_return_n(argcount)[:]
w_cls = s_frame.pop()
space = interp.space
instance_kind = w_cls.as_class_get_shadow(space).get_instance_kind()
if instance_kind == POINTERS:
cls = select_immutable_pointers_class(w_args)
return cls(space, w_cls, w_args)
elif (instance_kind == BYTES and
len(w_args) == 1 and isinstance(w_args[0], W_BytesObject)):
return W_Immutable_BytesObject(space, w_cls, w_args[0].bytes)
elif (instance_kind == WORDS and
len(w_args) == 1 and isinstance(w_args[0], W_WordsObject)):
return W_Immutable_WordsObject(space, w_cls, w_args[0].words)
raise PrimitiveFailedError
|
Python
| 0.000001
|
@@ -2927,16 +2927,44 @@
unt)%5B:%5D%0A
+ w_first_arg = w_args%5B0%5D%0A
w_cl
@@ -3286,31 +3286,33 @@
sinstance(w_
-args%5B0%5D
+first_arg
, W_BytesObj
@@ -3373,23 +3373,25 @@
_cls, w_
-args%5B0%5D
+first_arg
.bytes)%0A
@@ -3471,23 +3471,25 @@
tance(w_
-args%5B0%5D
+first_arg
, W_Word
@@ -3554,23 +3554,25 @@
_cls, w_
-args%5B0%5D
+first_arg
.words)%0A
|
0b67fd158ffc027f8245a8211f4d7a21f3ad486d
|
refine the display of rest of line. #24
|
pythonx/cm_sources/cm_keyword_continue.py
|
pythonx/cm_sources/cm_keyword_continue.py
|
# -*- coding: utf-8 -*-
# For debugging, use this command to start neovim:
#
# NVIM_PYTHON_LOG_FILE=nvim.log NVIM_PYTHON_LOG_LEVEL=INFO nvim
#
#
# Please register source before executing any other code, this allow cm_core to
# read basic information about the source without loading the whole module, and
# modules required by this module
from cm import get_src, register_source, get_pos, getLogger
# A completion source with CTRL-X CTRL-N like feature
#
# sort=0 for not using NCM's builtin sorting
register_source(name='cm-keyword-continue',
priority=6,
abbreviation='',
word_pattern=r'\S+',
sort=0,
cm_refresh_patterns=[r'\s+$',r'^$'],)
import re
import copy
logger = getLogger(__name__)
class Source:
def __init__(self,nvim):
self._nvim = nvim
def cm_refresh(self,info,ctx,*args):
force = ctx.get('force',False)
compiled = re.compile(info['word_pattern'])
typed = ctx['typed']
if typed.strip()=='' and not force:
# At the beginning of the line, need force to trigger the popup,
# Otherwise this will be annoying.
return
try:
# fetch the previous line for better sorting
last_line = self._nvim.current.buffer[ctx['lnum']-2]
typed = last_line + '\n' + typed
except:
pass
typed_words = re.findall(compiled,typed)
if not typed_words:
return
prev_word = ''
if ctx['base']=='':
prev_word = typed_words[-1]
prev_words = typed_words
else:
if len(typed_words)<2:
return
prev_word = typed_words[-2]
prev_words = typed_words[0:-1]
if not isinstance(prev_word,str):
prev_word = prev_word[0]
prev_words = [e[0] for e in prev_words]
reversed_prev_words = list(reversed(prev_words))
matches = []
# rank for sorting
def get_rank(word,span,line,last_line):
prev = last_line+"\n"+line[0:span[0]]
words = re.findall(compiled,prev)
if not words:
return 0
if not isinstance(words[0],str):
words = [e[0] for e in words]
ret = 0
reserved_words = list(reversed(words))
for z in zip(reversed_prev_words,reserved_words):
if z[0].lower()==z[1].lower():
ret += 1
else:
break
return ret
for buffer in self._nvim.buffers:
def word_generator():
step = 200
line_cnt = len(buffer)
for i in range(0,line_cnt,step):
lines = buffer[i:i+step]
last_line = ''
for line in lines:
for word in re.finditer(compiled,line):
yield word.group(),word.span(),line,last_line
last_line = line
try:
tmp_prev_word = ''
for word,span,line,last_line in word_generator():
if tmp_prev_word==prev_word:
rest_of_line = line[span[1]:]
hint = rest_of_line
matched = re.compile('\s*(\S+(\s+|$))*').search(rest_of_line,0,50)
logger.info('hint: [%s]', hint)
if matched:
hint = matched.group()
logger.info('new hint: [%s]', hint)
hint = hint.strip()
matches.append(dict(word=word + re.findall(r'\s*',line[span[1]:])[0], menu=hint, _rest_of_line=rest_of_line, _rank=get_rank(word,span,line,last_line)))
tmp_prev_word = word
except Exception as ex:
logger.exception("Parsing buffer [%s] failed", buffer)
# sort the result based on total match
matches.sort(key=lambda e: e['_rank'], reverse=True)
if matches:
# add rest_of_line completion for the highest rank
e = copy.deepcopy(matches[0])
e['abbr'] = e['word'] + e['menu'] + '...'
e['word'] = e['word'].rstrip() + e['_rest_of_line']
e['menu'] = ''
matches.insert(1,e)
if not force:
# filter by ranking
matches = [e for e in matches if e['_rank']>=3 ]
logger.info('matches %s', matches)
ret = self._nvim.call('cm#complete', info['name'], ctx, ctx['startcol'], matches)
|
Python
| 0.999761
|
@@ -4258,24 +4258,26 @@
%0A
+ #
e%5B'abbr'%5D =
@@ -4307,16 +4307,108 @@
+ '...'%0A
+ e%5B'abbr'%5D = '%3Cthe rest%3E '%0A e%5B'menu'%5D = e%5B'word'%5D + e%5B'menu'%5D + '...'%0A
@@ -4467,35 +4467,8 @@
e'%5D%0A
- e%5B'menu'%5D = ''%0A
|
66586d0fa74a7b109305d6330b2448c32a54bd1b
|
Fix lints
|
flask_fs/backends/__init__.py
|
flask_fs/backends/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import six
from flask_fs import files
__all__ = [i.encode('ascii') for i in ('BaseBackend', 'DEFAULT_BACKEND')]
DEFAULT_BACKEND = 'local'
class BaseBackend(object):
'''
Abstract class to implement backend.
'''
root = None
DEFAULT_MIME = 'application/octet-stream'
def __init__(self, name, config):
self.name = name
self.config = config
def exists(self, filename):
'''Test wether a file exists or not given its filename in the storage'''
raise NotImplementedError('Existance checking is not implemented')
def open(self, filename, *args, **kwargs):
'''Open a file given its filename relative to the storage root'''
raise NotImplementedError('Open operation is not implemented')
def read(self, filename):
'''Read a file content given its filename in the storage'''
raise NotImplementedError('Read operation is not implemented')
def write(self, filename, content):
'''Write content into a file given its filename in the storage'''
raise NotImplementedError('Write operation is not implemented')
def delete(self, filename):
'''Delete a file given its filename in the storage'''
raise NotImplementedError('Delete operation is not implemented')
def copy(self, filename, target):
'''Copy a file given its filename to another path in the storage'''
raise NotImplementedError('Copy operation is not implemented')
def move(self, filename, target):
'''
Move a file given its filename to another path in the storage
Default implementation perform a copy then a delete.
Backends should overwrite it if there is a better way.
'''
self.copy(filename, target)
self.delete(filename)
def save(self, file_or_wfs, filename, overwrite=False):
'''
Save a file-like object or a `werkzeug.FileStorage` with the specified filename.
:param storage: The file or the storage to be saved.
:param filename: The destination in the storage.
:param overwrite: if `False`, raise an exception if file exists in storage
:raises FileExists: when file exists and overwrite is `False`
'''
self.write(filename, file_or_wfs.read())
return filename
def metadata(self, filename):
'''
Fetch all available metadata for a given file
'''
meta = self.get_metadata(filename)
# Fix backend mime misdetection
meta['mime'] = meta.get('mime') or files.mime(filename, self.DEFAULT_MIME)
return meta
def get_metadata(self, filename):
'''
Backend specific method to retrieve metadata for a given file
'''
raise NotImplementedError('Copy operation is not implemented')
def serve(self, filename):
'''Serve a file given its filename'''
raise NotImplementedError('serve operation is not implemented')
def as_binary(self, content, encoding='utf8'):
'''Perform content encoding for binary write'''
if hasattr(content, 'read'):
return content.read()
elif isinstance(content, six.text_type):
return content.encode(encoding)
else:
return content
|
Python
| 0.000006
|
@@ -2677,20 +2677,16 @@
rn meta%0A
-
%0A def
|
35399c57d0bc309220d20bcbf443c5483171ca1f
|
Initialize received queue in test client
|
flask_socketio/test_client.py
|
flask_socketio/test_client.py
|
import uuid
from socketio import packet
from werkzeug.test import EnvironBuilder
class SocketIOTestClient(object):
"""
This class is useful for testing a Flask-SocketIO server. It works in a
similar way to the Flask Test Client, but adapted to the Socket.IO server.
:param app: The Flask application instance.
:param socketio: The application's ``SocketIO`` instance.
:param namespace: The namespace for the client. If not provided, the client
connects to the server on the global namespace.
"""
queue = {}
ack = None
def __init__(self, app, socketio, namespace=None):
def _mock_send_packet(sid, pkt):
if pkt.packet_type == packet.EVENT or \
pkt.packet_type == packet.BINARY_EVENT:
if sid not in self.queue:
self.queue[sid] = []
if pkt.data[0] == 'message' or pkt.data[0] == 'json':
self.queue[sid].append({'name': pkt.data[0],
'args': pkt.data[1],
'namespace': pkt.namespace or '/'})
else:
self.queue[sid].append({'name': pkt.data[0],
'args': pkt.data[1:],
'namespace': pkt.namespace or '/'})
elif pkt.packet_type == packet.ACK or \
pkt.packet_type == packet.BINARY_ACK:
self.ack = {'args': pkt.data,
'namespace': pkt.namespace or '/'}
self.app = app
self.sid = uuid.uuid4().hex
self.callback_counter = 0
self.socketio = socketio
socketio.server._send_packet = _mock_send_packet
socketio.server.environ[self.sid] = {}
socketio.server.manager.initialize(socketio.server)
self.connect(namespace)
def connect(self, namespace=None):
"""Connect the client.
:param namespace: The namespace for the client. If not provided, the
client connects to the server on the global
namespace.
Note that it is usually not necessary to explicitly call this method,
since a connection is automatically established when an instance of
this class is created. An example where it this method would be useful
is when the application accepts multiple namespace connections.
"""
environ = EnvironBuilder('/socket.io').get_environ()
environ['flask.app'] = self.app
self.socketio.server._handle_eio_connect(self.sid, environ)
if namespace is not None and namespace != '/':
pkt = packet.Packet(packet.CONNECT, namespace=namespace)
with self.app.app_context():
self.socketio.server._handle_eio_message(self.sid,
pkt.encode())
def disconnect(self, namespace=None):
"""Disconnect the client.
:param namespace: The namespace to disconnect. The global namespace is
assumed if this argument is not provided.
"""
pkt = packet.Packet(packet.DISCONNECT, namespace=namespace)
with self.app.app_context():
self.socketio.server._handle_eio_message(self.sid, pkt.encode())
def emit(self, event, *args, **kwargs):
"""Emit an event to the server.
:param event: The event name.
:param *args: The event arguments.
:param callback: ``True`` if the client requests a callback, ``False``
if not. Note that client-side callbacks are not
implemented, a callback request will just tell the
server to provide the arguments to invoke the
callback, but no callback is invoked. Instead, the
arguments that the server provided for the callback
are returned by this function.
:param namespace: The namespace of the event. The global namespace is
assumed if this argument is not provided.
"""
namespace = kwargs.pop('namespace', None)
callback = kwargs.pop('callback', False)
id = None
if callback:
self.callback_counter += 1
id = self.callback_counter
pkt = packet.Packet(packet.EVENT, data=[event] + list(args),
namespace=namespace, id=id, binary=False)
self.ack = None
with self.app.app_context():
self.socketio.server._handle_eio_message(self.sid, pkt.encode())
if self.ack is not None:
return self.ack['args'][0] if len(self.ack['args']) == 1 \
else self.ack['args']
def send(self, data, json=False, callback=False, namespace=None):
"""Send a text or JSON message to the server.
:param data: A string, dictionary or list to send to the server.
:param json: ``True`` to send a JSON message, ``False`` to send a text
message.
:param callback: ``True`` if the client requests a callback, ``False``
if not. Note that client-side callbacks are not
implemented, a callback request will just tell the
server to provide the arguments to invoke the
callback, but no callback is invoked. Instead, the
arguments that the server provided for the callback
are returned by this function.
:param namespace: The namespace of the event. The global namespace is
assumed if this argument is not provided.
"""
if json:
msg = 'json'
else:
msg = 'message'
return self.emit(msg, data, callback=callback, namespace=namespace)
def get_received(self, namespace=None):
"""Return the list of messages received from the server.
Since this is not a real client, any time the server emits an event,
the event is simply stored. The test code can invoke this method to
obtain the list of events that were received since the last call.
:param namespace: The namespace to get events from. The global
namespace is assumed if this argument is not
provided.
"""
namespace = namespace or '/'
r = [pkt for pkt in self.queue[self.sid]
if pkt['namespace'] == namespace]
self.queue[self.sid] = [pkt for pkt in self.queue[self.sid]
if pkt not in r]
return r
|
Python
| 0
|
@@ -1657,16 +1657,50 @@
4().hex%0A
+ self.queue%5Bself.sid%5D = %5B%5D%0A
|
27acea8beae7876159f142add8d3e55b62d61f8f
|
Add read method to modulators
|
feder/questionaries/modulator.py
|
feder/questionaries/modulator.py
|
from django import forms
from django.utils.translation import ugettext as _
class BaseBlobFormModulator(object):
description = None
def __init__(self, blob=None):
self.blob = blob or {}
super(BaseBlobFormModulator, self).__init__()
def create(self):
raise NotImplementedError("")
def answer(self):
raise NotImplementedError("")
class BaseSimpleModulator(BaseBlobFormModulator):
output_field_cls = None
def create(self, fields):
fields['name'] = forms.CharField(label=_("Question"))
fields['help_text'] = forms.CharField(label=_("Description of question"))
fields['required'] = forms.BooleanField(label=_("This fields is required?"))
def answer(self, fields):
fields['value'] = self.output_field_cls(label=self.blob['name'],
help_text=self.blob['help_text'], required=self.blob.get('required', True))
class CharModulator(BaseSimpleModulator):
description = "Char modulator"
output_field_cls = forms.CharField
class IntegerModulator(BaseSimpleModulator):
description = "Integer modulator"
output_field_cls = forms.CharField
class EmailModulator(BaseSimpleModulator):
description = "E-mail modulator"
output_field_cls = forms.CharField
modulators = {'char': CharModulator, 'int': IntegerModulator, 'email': EmailModulator}
|
Python
| 0.000001
|
@@ -268,24 +268,32 @@
create(self
+, fields
):%0A r
@@ -314,24 +314,47 @@
entedError(%22
+Provide method 'create'
%22)%0A%0A def
@@ -364,16 +364,24 @@
wer(self
+, fields
):%0A
@@ -410,16 +410,133 @@
dError(%22
+Provide method 'answer'%22)%0A%0A def read(self, cleaned_data):%0A raise NotImplementedError(%22Provide method 'read'
%22)%0A%0A%0Acla
@@ -1063,16 +1063,88 @@
True))%0A%0A
+ def read(self, cleaned_data):%0A return cleaned_data%5B'value'%5D%0A%0A
%0Aclass C
@@ -1534,16 +1534,30 @@
dulator,
+%0A
'int':
@@ -1573,16 +1573,30 @@
dulator,
+%0A
'email'
|
90e1b254266155abded62bc3155785961acc0ff0
|
Split filepath and count in credential module
|
bin/Credential.py
|
bin/Credential.py
|
#!/usr/bin/env python2
# -*-coding:UTF-8 -*
import time
from packages import Paste
from pubsublogger import publisher
from Helper import Process
import re
if __name__ == "__main__":
publisher.port = 6380
publisher.channel = "Script"
config_section = "Credential"
p = Process(config_section)
publisher.info("Find credentials")
critical = 10
regex_web = "/^(https?:\/\/)?([\da-z\.-]+)\.([a-z\.]{2,6})([\/\w \.-]*)*\/?$/"
regex_cred = "[a-zA-Z0-9._-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}:[a-zA-Z0-9\_\-]+"
while True:
filepath = p.get_from_set()
if filepath is None:
publisher.debug("Script Credential is Idling 10s")
print('Sleeping')
time.sleep(10)
continue
paste = Paste.Paste(filepath)
content = paste.get_p_content()
creds = set(re.findall(regex_cred, content))
if len(creds) == 0:
continue
sites = set(re.findall(regex_web, content))
message = '{} credentials found.'.format(len(creds))
if sites:
message += ' Related websites: {}'.format(', '.join(sites))
to_print = 'Credential;{};{};{};{}'.format(paste.p_source, paste.p_date, paste.p_name, message)
print('\n '.join(creds))
if len(creds) > critical:
print("========> Found more than 10 credentials on this file : {}".format(filepath))
publisher.warning(to_print)
if sites:
print("=======> Probably on : {}".format(', '.join(sites)))
else:
publisher.info(to_print)
|
Python
| 0
|
@@ -356,18 +356,17 @@
tical =
-10
+8
%0A%0A re
@@ -548,24 +548,23 @@
-filepath
+message
= p.get
@@ -586,24 +586,23 @@
if
-filepath
+message
is None
@@ -696,16 +696,16 @@
eping')%0A
-
@@ -737,32 +737,197 @@
continue%0A%0A
+ filepath, count = message.split()%0A%0A if count %3C 5:%0A # Less than 5 matches from the top password list, false positive.%0A continue%0A%0A
paste =
@@ -1529,17 +1529,17 @@
entials
-o
+i
n this f
|
e18047a3cb3c8303bf64dc9ce5fc230e29b25b56
|
Fix fac-gitall.py
|
bin/fac-gitall.py
|
bin/fac-gitall.py
|
#!/usr/bin/env python3
import sys
import os
import lnls
#import git
from termcolor import colored
import subprocess
git_functions = ('pull','push','status','diff','clone')
def run_git_clone():
if not os.path.exists(lnls.folder_code):
print('gitall.py: please create ' + lnls.folder_code + ' folder with correct permissions first!')
return
all_repos = ('collective_effects',
'fieldmaptrack',
'job_manager',
'lnls',
'mathphys',
'MatlabMiddleLayer',
'pyaccel',
'scripts',
'sirius',
'sirius_parameters',
'sirius_wiki',
'tools',
'trackcpp',
'tracy_sirius',
'va',
)
for repo in all_repos:
cmd = 'git clone ssh://git@github.com/lnls-fac/' + repo + '.git'
os.system(cmd)
def run_git(func):
if func == 'clone': return run_git_clone()
fnames = os.listdir(lnls.folder_code)
for fname in fnames:
repo_folder = os.path.join(lnls.folder_code, fname)
if not os.path.exists(os.path.join(repo_folder,'.git')): continue
print('processing ' + func + colored(' <'+fname+'>','yellow')+'...')
cmd = 'cd ' + repo_folder + '; git ' + func
text = subprocess.call([cmd], shell=True, stdout=sys.stdout)
print('...ok')
print()
if __name__ == '__main__':
if len(sys.argv) != 2 or sys.argv[1] not in git_functions:
print('usage: gitall.py [' + '|'.join(git_functions) + ']')
else:
print()
run_git(sys.argv[1])
|
Python
| 0
|
@@ -252,16 +252,20 @@
print('
+fac-
gitall.p
@@ -1589,16 +1589,20 @@
'usage:
+fac-
gitall.p
|
1b172c592bb5efc1a0dcf8f18d6ea6a1037ec9ff
|
Clean things up a bit
|
filebutler_upload/filehandler.py
|
filebutler_upload/filehandler.py
|
import requests
#import os
#from ConfigParser import RawConfigParser
#from text_table import TextTable
class Filemanager:
def __init__(self, url, username, password):
self.headers = {'Accept': 'application/json'}
self.username = username
self.password = password
self.url = url
def list(self):
'''
List all files uploaded by user
'''
data = {
'username': self.username,
'password': self.password
}
response = requests.post(
self.url + 'files',
data=data,
headers=self.headers
)
if response.status_code == 200:
return response.json['message']
else:
return {}
def delete(self, hash):
''' delete specified hash '''
if hash == 'all':
pass
data = {
'username': self.username,
'password': self.password,
}
response = requests.post(
self.url + hash + '/delete',
data=data,
headers=self.headers
)
return response.text
def upload(self, upload_file,
download_password, one_time_download, expire):
files = {'file': upload_file}
data = {
'username': self.config.get('settings', 'username'),
'password': self.config.get('settings', 'password'),
'download_password': self.options.password,
'one_time_download': '1' if self.options.onetime else '0',
'expire': self.options.lifetime
}
response = requests.post(
self.url,
data=data,
files=files, headers=self.headers
)
return response
# For testing, remove when finished.
#config = RawConfigParser()
#config.read(os.path.expanduser('~/.filebutler-upload.conf'))
#username = config.get('settings', 'username')
#password = config.get('settings', 'password')
#url = config.get('settings', 'upload_url')
#fm = Filemanager(url, username, password)
#t = TextTable((40, 'Download hash'), (35, 'Filename'))
#for hash, filename in fm.list().iteritems():
# t.row(hash, filename)
#print t.draw()
print fm.delete('a13170f4cdbd96743e18126306ddba484785ba6b')
|
Python
| 0.000008
|
@@ -13,95 +13,8 @@
sts%0A
-#import os%0A#from ConfigParser import RawConfigParser%0A#from text_table import TextTable%0A
%0A%0Acl
@@ -809,36 +809,32 @@
= %7B%0A
-
'username': self
@@ -836,36 +836,32 @@
self.username,%0A
-
'pas
@@ -875,36 +875,32 @@
self.password,%0A
-
%7D%0A%0A
@@ -1248,40 +1248,16 @@
elf.
-config.get('settings', '
username
'),%0A
@@ -1252,18 +1252,16 @@
username
-')
,%0A
@@ -1291,40 +1291,16 @@
elf.
-config.get('settings', '
password
'),%0A
@@ -1295,18 +1295,16 @@
password
-')
,%0A
@@ -1334,29 +1334,25 @@
sword':
-self.options.
+download_
password
@@ -1401,28 +1401,25 @@
if
-self.options.
one
+_
time
+_download
els
@@ -1455,28 +1455,13 @@
e':
-self.options.lifetim
+expir
e%0A
@@ -1576,16 +1576,28 @@
s=files,
+%0A
headers
@@ -1648,525 +1648,5 @@
onse
-%0A%0A%0A# For testing, remove when finished.%0A#config = RawConfigParser()%0A#config.read(os.path.expanduser('~/.filebutler-upload.conf'))%0A%0A#username = config.get('settings', 'username')%0A#password = config.get('settings', 'password')%0A#url = config.get('settings', 'upload_url')%0A%0A#fm = Filemanager(url, username, password)%0A%0A%0A#t = TextTable((40, 'Download hash'), (35, 'Filename'))%0A#for hash, filename in fm.list().iteritems():%0A# t.row(hash, filename)%0A#print t.draw()%0A%0Aprint fm.delete('a13170f4cdbd96743e18126306ddba484785ba6b')
%0A
|
31659d51c81390bebeddac44cd3618543dc90c51
|
Add support for timeout values
|
googlemaps/common.py
|
googlemaps/common.py
|
"""
Common functionality for modules in the googlemaps package, such as performing
HTTP requests.
"""
import base64
from datetime import datetime
from datetime import timedelta
import hashlib
import hmac
import requests
import urllib
_VERSION = "0.1"
_USER_AGENT = "GoogleGeoApiClientPython/%s" % _VERSION
class Context(object):
"""Holds state between requests, such as credentials (API key), timeout
settings"""
def __init__(self, key=None, client_id=None, client_secret=None,
timeout=None, retry_timeout=60):
"""
:param key: Maps API key. Required, unless "client_id" and
"client_secret" are set.
:type key: basestring
:param timeout: Timeout for requests, in seconds.
:type timeout: int
:param retry_timeout: Timeout across multiple retriable requests, in seconds.
:type retry_timeout: int
:param client_id: (for Maps API for Work customers) Your client ID.
:type client_id: basestring
:param client_secret: (for Maps API for Work customers) Your client
secret (base64 encoded).
:type client_secret: basestring
"""
if not key and not (client_secret and client_id):
raise Exception("Must provide API key or enterprise credentials "
"with context object.")
if key and not key.startswith("AIza"):
raise Exception("Invalid API key provided.")
self.key = key
self.timeout = timeout
self.client_id = client_id
self.client_secret = client_secret
self.retry_timeout = timedelta(seconds=retry_timeout)
def _auth_url(self, path, params):
"""Returns the path and query string portion of the request URL, first
adding any necessary parameters.
:param path: The path portion of the URL.
:type path: basestring
:param params: URL parameters.
:type params: dict
:rtype: basestring
"""
if self.key:
params["key"] = self.key
return path + "?" + urllib.urlencode(params)
if self.client_id and self.client_secret:
params["client"] = self.client_id
path = path + "?" + urllib.urlencode(params)
sig = _hmac_sign(self.client_secret, path)
return path + "&signature=" + sig
def _hmac_sign(secret, s):
"""Returns a basee64-encoded HMAC-SHA1 signature of a given string.
:param secret: The key used for the signature, base64 encoded.
:type secret: basestring
:param s: The string.
:type s: basestring
:rtype: basestring
"""
sig = hmac.new(base64.urlsafe_b64decode(secret), s, hashlib.sha1)
return base64.urlsafe_b64encode(sig.digest())
def _get(ctx, url, params, first_request_time=None):
"""Performs HTTP GET request with credentials, returning the body as JSON.
:param ctx: Shared context parameters.
:type ctx: googlemaps.Context
:param url: URL path for the request
:type url: basestring
:param params: HTTP GET parameters
:type params: dict
:param first_request_time: The time of the first request (None if no retries
have occurred).
:type first_request_time: datetime.datetime
"""
if not first_request_time:
first_request_time = datetime.now()
# TODO(mdr-eng) implement back-off.
if datetime.now() - first_request_time > ctx.retry_timeout:
raise Exception("Timed out while retrying.")
# TODO(mdr-eng): implement rate limiting, etc.
# TODO(mdr-eng): add jitter (might not be necessary since most uses will be
# single threaded)
resp = requests.get(
"https://maps.googleapis.com" + ctx._auth_url(url, params),
headers={"User-Agent": _USER_AGENT},
verify=True) # NOTE(cbro): verify SSL certs.
if resp.status_code in [500, 503, 504]:
# Retry request.
return _get(ctx, url, params, first_request_time)
if resp.status_code != 200:
resp.raise_for_status() # raises a requests.exceptions.HTTPError
body = resp.json()
if body["status"] == "OK" or body["status"] == "ZERO_RESULTS":
return body
if body["status"] == "OVER_QUERY_LIMIT":
# Retry request.
return _get(ctx, url, params, first_request_time)
# TODO(mdr-eng): use body["error_message"] if present.
raise Exception("API error: %s" % body["status"])
|
Python
| 0.000001
|
@@ -518,16 +518,74 @@
ut=None,
+ connect_timeout=None, read_timeout=None,%0A
retry_t
@@ -763,25 +763,204 @@
am timeout:
-T
+Combined connect and read timeout for HTTP requests, in%0A seconds. Specify %22None%22 for no timeout.%0A :type timeout: int%0A%0A :param connect_timeout: Connection t
imeout for r
@@ -958,16 +958,21 @@
out for
+HTTP
requests
@@ -979,32 +979,305 @@
, in
- seconds.%0A :type
+%0A seconds. You should specify read_timeout in addition to this option.%0A :type connect_timeout: int%0A%0A :param read_timeout: Read timeout for HTTP requests, in%0A seconds. You should specify connect_timeout in addition to this%0A option.%0A :type read_
time
@@ -2005,38 +2005,225 @@
key%0A
- self.timeout = timeout
+%0A if timeout and (connect_timeout or read_timeout):%0A raise ValueError(%22Specify either timeout, or connect_timeout and read_timeout%22)%0A%0A self.timeout = timeout or (connect_timeout, read_timeout)
%0A
@@ -4217,168 +4217,8 @@
%22)%0A%0A
- # TODO(mdr-eng): implement rate limiting, etc.%0A # TODO(mdr-eng): add jitter (might not be necessary since most uses will be%0A # single threaded)%0A
@@ -4351,16 +4351,45 @@
AGENT%7D,%0A
+ timeout=ctx.timeout,%0A
|
9faf6195f0b5418a991c51f820fa446649ba05c0
|
Clean up formatting. Allow overlap of sections' text.
|
generate_sections.py
|
generate_sections.py
|
import csv
import sys
class FormatLine:
def __init__(self, text, is_start=False):
self.text = text
self.score = 0
self.is_start = is_start
class NormalLine:
def __init__(self, row):
self.row = row
class Character:
def __init__(self, row):
self.row = row
def key(self):
return self.row[0]
def value(self):
return self.row[2]
class Para:
def __init__(self, row):
self.row = row
def play(self):
return self.row[0]
def character(self):
return self.row[3]
class Chapter:
def __init__(self, row):
self.row = row
def play(self):
return self.row[0]
def key(self):
return (self.row[2], self.row[3])
def value(self):
return self.row[4]
def hash_array(array):
res = {}
for row in array:
res[ row.key() ] = row.value()
return res
def read_into_array(filename, classname):
array = []
fh = open(filename, 'rb')
spamreader = csv.reader(fh, delimiter=',', quotechar='~')
for row in spamreader:
array.append(classname(row))
fh.close()
return array
def format_play(play_paras, play_acts):
chapter_dict = hash_array(acts)
formatted_lines = []
# Calculate number of lines
last_line = int(play_paras[-1].row[2])
line_jump = 0
last_act = None
last_scene = None
for row in play_paras:
play = row.play()
ln = int(row.row[2])
dialog = row.row[4]
act = row.row[8]
scene = row.row[9]
if act != last_act:
# Insert this chapter
formatted_lines.append(FormatLine("{: ^100}".format("ACT " + act), True))
if scene != last_scene:
# Insert this chapter
formatted_lines.append(FormatLine("{: ^100}".format("SCENE " + scene, True)))
desc = chapter_dict[ (act, scene) ]
formatted_lines.append(FormatLine("{: ^100}".format(desc), True))
# Stage direction
if row.character() == 'xxx':
formatted_lines.append(FormatLine("{0: <20} {1}".format('', dialog), True))
else:
# Speech
# Look up character name
character = char_dict[row.character()]
dlines = dialog.split('[p]')
for d in dlines:
d = d.rstrip()
formatted_lines.append(FormatLine("{0: <20} {1: <60} {2}".format(character, d, ln)))
character = ''
ln += 1
formatted_lines.append(FormatLine(''))
last_act = act
last_scene = scene
return formatted_lines
def generate_play(playcode, output_path):
def is_x(a):
if a.play() == playcode:
return True
return False
# Filter to 12th Night
para12 = filter(is_x, paras)
acts12 = filter(is_x, acts)
formatted_lines = format_play(para12, acts12)
# Split into chunks of N lines
# Simplest possible atm
base = 0
chunk_id = 1
chunk_size = 75
while base < len(formatted_lines):
end = base + 75
# Find the end
text = [x.text for x in formatted_lines[base:base + chunk_size]]
import os
filename = "section_%d.html" % (chunk_id)
fh = open(os.path.join(output_path, filename), 'w')
fh.write('\n'.join(text))
fh.close()
base += chunk_size
chunk_id += 1
paras = read_into_array('oss/Paragraphs.txt', Para)
chars = read_into_array('oss/Characters.txt', Character)
acts = read_into_array('oss/Chapters.txt', Chapter)
char_dict = hash_array(chars)
char_dict['xxx'] = ''
if __name__ == '__main__':
import sys
playcode = sys.argv[1]
output_path = sys.argv[2]
generate_play(playcode, output_path)
|
Python
| 0.000001
|
@@ -14,16 +14,26 @@
port sys
+%0Aimport os
%0A%0Aclass
@@ -2228,25 +2228,25 @@
tLine(%22%7B0: %3C
-2
+1
0%7D %7B1%7D%22.form
@@ -2584,17 +2584,17 @@
0: %3C
-2
+1
0%7D %7B1: %3C
60%7D
@@ -2593,9 +2593,9 @@
1: %3C
-6
+8
0%7D %7B
@@ -3231,18 +3231,18 @@
_size =
-7
5
+0
%0A whi
@@ -3269,24 +3269,25 @@
ted_lines):%0A
+%0A
end
@@ -3299,42 +3299,66 @@
e +
-75%0A # Find the end%0A
+chunk_size + 10 # add 10 lines of overlap for next time...
%0A
@@ -3411,46 +3411,14 @@
ase:
-base + chunk_size%5D%5D%0A%0A import os
+end%5D%5D%0A
%0A
|
3fea731e62653dfc847e82b8185feb029d844fd8
|
Revert "minifiying doctype json's"
|
frappe/modules/export_file.py
|
frappe/modules/export_file.py
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, os, json
import frappe.model
from frappe.modules import scrub, get_module_path, lower_case_files_for, scrub_dt_dn
def export_doc(doc):
export_to_files([[doc.doctype, doc.name]])
def export_to_files(record_list=None, record_module=None, verbose=0, create_init=None):
"""
Export record_list to files. record_list is a list of lists ([doctype],[docname] ) ,
"""
if frappe.flags.in_import:
return
if record_list:
for record in record_list:
write_document_file(frappe.get_doc(record[0], record[1]), record_module, create_init=create_init)
def write_document_file(doc, record_module=None, create_init=None):
newdoc = doc.as_dict(no_nulls=True)
# strip out default fields from children
for df in doc.meta.get_table_fields():
for d in newdoc.get(df.fieldname):
for fieldname in frappe.model.default_fields:
if fieldname in d:
del d[fieldname]
for fieldname in d.keys():
if d[fieldname] == 0 or d[fieldname] == "":
del d[fieldname]
module = record_module or get_module_name(doc)
if create_init is None:
create_init = doc.doctype in lower_case_files_for
# create folder
folder = create_folder(module, doc.doctype, doc.name, create_init)
# write the data file
fname = (doc.doctype in lower_case_files_for and scrub(doc.name)) or doc.name
with open(os.path.join(folder, fname +".json"),'w+') as txtfile:
txtfile.write(frappe.as_json(newdoc))
def get_module_name(doc):
if doc.doctype == 'Module Def':
module = doc.name
elif doc.doctype=="Workflow":
module = frappe.db.get_value("DocType", doc.document_type, "module")
elif hasattr(doc, 'module'):
module = doc.module
else:
module = frappe.db.get_value("DocType", doc.doctype, "module")
return module
def create_folder(module, dt, dn, create_init):
module_path = get_module_path(module)
dt, dn = scrub_dt_dn(dt, dn)
# create folder
folder = os.path.join(module_path, dt, dn)
frappe.create_folder(folder)
# create init_py_files
if create_init:
create_init_py(module_path, dt, dn)
return folder
def create_init_py(module_path, dt, dn):
def create_if_not_exists(path):
initpy = os.path.join(path, '__init__.py')
if not os.path.exists(initpy):
open(initpy, 'w').close()
create_if_not_exists(os.path.join(module_path))
create_if_not_exists(os.path.join(module_path, dt))
create_if_not_exists(os.path.join(module_path, dt, dn))
|
Python
| 0
|
@@ -1025,108 +1025,8 @@
ame%5D
-%0A%09%09%09for fieldname in d.keys():%0A%09%09%09%09if d%5Bfieldname%5D == 0 or d%5Bfieldname%5D == %22%22:%0A%09%09%09%09%09del d%5Bfieldname%5D
%0A%0A%09m
|
2f4e0d0b82a12f118cca24efa975d7fa5e09a06d
|
use splitlines() to get speech moving faster
|
software/bmbb_fish.py
|
software/bmbb_fish.py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# rpi.gpio documentation at https://sourceforge.net/p/raspberry-gpio-python/wiki/
import RPi.GPIO as GPIO
from time import sleep as sleep
import logging
import threading
import sqlite3 # used to write to the tts database
import re
class BmBB:
""" interface with the controls and motors of the big mouth billy bass """
# assign names to the GPIO pins.
# fishMOUTH = 13 # The mouth is now controlled by hardware
fishTAIL = 11
fishHEAD = 7
# fishHEAD_reverse = 15
fishMotorEnable = 18
fishIsSpeaking = 13
# variables for SQlite
dbconnect = None
cursor = None
# other variables
PWMstatus = None #declaring PWMstatus here for later assignment
SpeechWordObjects = []
logger = None #declaring logger here for later use
def __init__(self):
GPIO.cleanup()
GPIO.setmode(GPIO.BOARD) #use P1 header pin numbering convention
# set up gpio pins for fish
# GPIO.setup(self.fishMOUTH, GPIO.OUT, initial=GPIO.LOW)
GPIO.setup(self.fishTAIL, GPIO.OUT, initial=GPIO.LOW)
GPIO.setup(self.fishHEAD, GPIO.OUT, initial=GPIO.LOW)
GPIO.setup(self.fishIsSpeaking, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
# set up PWM for the enable pin on the motor driver
GPIO.setup(self.fishMotorEnable, GPIO.OUT)
self.PWMstatus = GPIO.PWM(self.fishMotorEnable, 50) #frequency 50 hz
self.PWMstatus.start(0) #duty cycle of zero. Enabled but silent
# set up error logging
self.logger = logging.getLogger('FishControl')
hdlr = logging.FileHandler('/var/tmp/fish.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
self.logger.addHandler(hdlr)
self.logger.setLevel(logging.DEBUG)
# set up SQLite
self.dbconnect = sqlite3.connect("/home/pi/rubberfish/textToSpeech.db", check_same_thread=False)
self.cursor = self.dbconnect.cursor()
def shut_down_fish(self):
self.logger.info('killing the fish')
self.PWMstatus.stop() # turn off PWM
GPIO.cleanup() #resets the GPIO state to neutral
def mouth(self,fishDuration=.5,enthusiasm=50):
pass # mouth is controlled by hardware. This function deprecated
def head(self,fishDuration=.4,enthusiasm=60):
self.logger.info('head: duration={durate}, enthusiasm={enth}.'.format(durate=fishDuration, enth=enthusiasm))
self.headOut(enthusiasm)
fishDuration = fishDuration if fishDuration < 1 else 1
t = threading.Timer(fishDuration,self.headBack)
t.start() # after 'fishDuration' seconds, the head will return
def headOut(self,enthusiasm=60):
self.logger.info('headOut: enthusiasm={enth}.'.format(enth=enthusiasm))
enthusiasm = enthusiasm if enthusiasm < 60 else 60 # more than 60 will throw the head past it's limit
self.adjustPWM(enthusiasm)
GPIO.output(self.fishHEAD,GPIO.HIGH)
def headBack(self):
self.logger.info('headBack: No Parameters')
GPIO.output(self.fishHEAD,GPIO.LOW)
def tail(self,fishDuration=.4,enthusiasm=75):
self.logger.info('tail: duration={durate}, enthusiasm={enth}.'.format(durate=fishDuration, enth=enthusiasm))
self.tailOut(enthusiasm)
fishDuration = fishDuration if fishDuration < 1 else 1
t = threading.Timer(fishDuration,self.tailBack)
t.start() # after 'fishDuration' seconds, the tail will return
def tailOut(self,enthusiasm=75):
self.logger.info('tailOut: enthusiasm={enth}.'.format(enth=enthusiasm))
self.adjustPWM(enthusiasm)
GPIO.output(self.fishTAIL,GPIO.HIGH)
def tailBack(self):
self.logger.info('tailBack: No Parameters')
GPIO.output(self.fishTAIL,GPIO.LOW)
def adjustPWM(self,PWMDutyCycle=50):
# where 0.0 <= PWMDutyCycle <= 100.0
PWMDutyCycle = 100 if PWMDutyCycle > 100 else PWMDutyCycle
PWMDutyCycle = 0 if PWMDutyCycle < 0 else PWMDutyCycle
self.PWMstatus.ChangeDutyCycle(PWMDutyCycle)
def fishSays(self,phraseToSay="Hello World",priorityToSay=5):
sqlDoThis = 'insert into TTS (priority,stringToSay) values (?, ?)'
phraseToSay = re.sub(r'[^\x00-\x7f]',r'',phraseToSay)
self.cursor.execute(sqlDoThis,[priorityToSay,phraseToSay]);
self.dbconnect.commit()
def get_fishIsSpeaking(self):
return GPIO.input(self.fishIsSpeaking)
def fishShutUp(self):
# stops the fish from talking
pass
|
Python
| 0.000002
|
@@ -4322,16 +4322,93 @@
eToSay)%0A
+ for aline in phraseToSay.splitlines():%0A aline.strip()%0A
@@ -4456,23 +4456,21 @@
Say,
-phraseToSay%5D);%0A
+aline%5D);%0A
|
f03ba99cd7c4db064b2ece3d226b30c8e9ca63bf
|
Add a test for scipy.integrate.newton_cotes. A more comprehensive set of tests would be better, but it's a start.
|
scipy/integrate/tests/test_quadrature.py
|
scipy/integrate/tests/test_quadrature.py
|
import numpy
from numpy import cos, sin, pi
from numpy.testing import *
from scipy.integrate import quadrature, romberg, romb
class TestQuadrature(TestCase):
def quad(self, x, a, b, args):
raise NotImplementedError
def test_quadrature(self):
# Typical function with two extra arguments:
def myfunc(x,n,z): # Bessel function integrand
return cos(n*x-z*sin(x))/pi
val, err = quadrature(myfunc,0,pi,(2,1.8))
table_val = 0.30614353532540296487
assert_almost_equal(val, table_val, decimal=7)
def test_romberg(self):
# Typical function with two extra arguments:
def myfunc(x, n, z): # Bessel function integrand
return cos(n*x-z*sin(x))/pi
val = romberg(myfunc,0,pi, args=(2, 1.8))
table_val = 0.30614353532540296487
assert_almost_equal(val, table_val, decimal=7)
def test_romb(self):
assert_equal(romb(numpy.arange(17)),128)
def test_non_dtype(self):
# Check that we work fine with functions returning float
import math
valmath = romberg(math.sin, 0, 1)
expected_val = 0.45969769413185085
assert_almost_equal(valmath, expected_val, decimal=7)
if __name__ == "__main__":
run_module_suite()
|
Python
| 0.996962
|
@@ -120,16 +120,30 @@
rg, romb
+, newton_cotes
%0A%0Aclass
@@ -1244,16 +1244,829 @@
mal=7)%0A%0A
+ def test_newton_cotes(self):%0A %22%22%22Test the first few degrees, for evenly spaced points.%22%22%22%0A n = 1%0A wts, errcoff = newton_cotes(n, 1)%0A assert_equal(wts, n*numpy.array(%5B0.5, 0.5%5D))%0A assert_almost_equal(errcoff, -n**3/12.0)%0A%0A n = 2%0A wts, errcoff = newton_cotes(n, 1)%0A assert_almost_equal(wts, n*numpy.array(%5B1.0, 4.0, 1.0%5D)/6.0)%0A assert_almost_equal(errcoff, -n**5/2880.0)%0A%0A n = 3%0A wts, errcoff = newton_cotes(n, 1)%0A assert_almost_equal(wts, n*numpy.array(%5B1.0, 3.0, 3.0, 1.0%5D)/8.0)%0A assert_almost_equal(errcoff, -n**5/6480.0)%0A%0A n = 4%0A wts, errcoff = newton_cotes(n, 1)%0A assert_almost_equal(wts, n*numpy.array(%5B7.0, 32.0, 12.0, 32.0, 7.0%5D)/90.0)%0A assert_almost_equal(errcoff, -n**7/1935360.0)%0A%0A
%0Aif __na
|
79ae4fccf8bc9856a1b28afb50deabffc70bf432
|
Add --disable-default-logs option to crawl script
|
grab/script/crawl.py
|
grab/script/crawl.py
|
import logging
import os
from argparse import ArgumentParser
from grab.util.config import build_spider_config, build_root_config
from grab.util.module import load_spider_class
from grab.tools.logs import default_logging
from grab.tools.lock import assert_lock
from grab.spider.save_result import save_result
from grab.tools.files import clear_directory
from grab.tools.encoding import make_str
logger = logging.getLogger('grab.script.crawl')
def setup_arg_parser(parser):
parser.add_argument('spider_name', type=str)
parser.add_argument('-t', '--thread-number', default=None, type=int,
help='Number of network threads')
parser.add_argument('--slave', action='store_true', default=False,
help='Enable the slave-mode')
parser.add_argument('-n', '--network-logs', action='store_true', default=False,
help='Dump to console details about network requests')
parser.add_argument('--save-result', action='store_true', default=False,
help='Save crawling state to database')
parser.add_argument('--disable-proxy', action='store_true', default=False,
help='Disable proxy servers')
parser.add_argument('--ignore-lock', action='store_true', default=False)
parser.add_argument('--disable-report', action='store_true', default=False)
parser.add_argument('--settings-module', type=str, default='settings')
#def get_spider_setting(spider_config, key, deprecated_key=None, key_type=None,
#default=None):
#"""
#Get setting's value from the config that could be either in
#deprecated or in actual format.
#"""
## try actual format
#try:
#value = spider_config[key]
#except TypeError:
#raise
##import pdb; pdb.set_trace()
#except KeyError:
#if deprecated_key is not None:
#try:
#value = spider_config[deprecated_key]
#except KeyError:
#value = default
#else:
#value = default
#if key_type is None:
#return value
#elif key_type == 'int':
#return int(value)
def get_lock_key(spider_name, lock_key=None, ignore_lock=False, slave=False, **kwargs):
# --ignore-lock has highest precedence
if ignore_lock:
return None
# If --lock-key is specified explicitly use it
if lock_key is not None:
return lock_key
# Do not lock --slave spiders
if slave:
return None
# As fallback, if no information has been given about locking
# generate lock key from the spider name and use it
lock_key = 'crawl.%s' % spider_name
return lock_key
@save_result
def main(spider_name, thread_number=None, slave=False,
settings_module='settings', network_logs=False,
disable_proxy=False, ignore_lock=False,
disable_report=False,
*args, **kwargs):
default_logging(propagate_network_logger=network_logs)
root_config = build_root_config(settings_module)
spider_class = load_spider_class(root_config, spider_name)
spider_config = build_spider_config(spider_class, root_config)
spider_args = None
if hasattr(spider_class, 'setup_arg_parser'):
parser = ArgumentParser()
spider_class.setup_arg_parser(parser)
opts, trash = parser.parse_known_args()
spider_args = vars(opts)
if thread_number is None:
thread_number = \
int(spider_config.get('thread_number',
deprecated_key='GRAB_THREAD_NUMBER'))
stat_task_object = kwargs.get('stat_task_object', None)
bot = spider_class(
thread_number=thread_number,
slave=slave,
config=spider_config,
network_try_limit=int(spider_config.get(
'network_try_limit', deprecated_key='GRAB_NETWORK_TRY_LIMIT')),
task_try_limit=int(spider_config.get(
'task_try_limit', deprecated_key='GRAB_TASK_TRY_LIMIT')),
args=spider_args,
)
opt_queue = spider_config.get('queue', deprecated_key='GRAB_QUEUE')
if opt_queue:
bot.setup_queue(**opt_queue)
opt_cache = spider_config.get('cache', deprecated_key='GRAB_CACHE')
if opt_cache:
bot.setup_cache(**opt_cache)
opt_proxy_list = spider_config.get(
'proxy_list', deprecated_key='GRAB_PROXY_LIST')
if opt_proxy_list:
if disable_proxy:
logger.debug('Proxy servers disabled via command line')
else:
bot.load_proxylist(**opt_proxy_list)
opt_ifaces = spider_config.get(
'command_interfaces', deprecated_key='GRAB_COMMAND_INTERFACES')
if opt_ifaces:
for iface_config in opt_ifaces:
bot.controller.add_interface(**iface_config)
# Dirty hack
# FIXIT: REMOVE
bot.dump_spider_stats = kwargs.get('dump_spider_stats')
bot.stats_object = kwargs.get('stats_object')
try:
bot.run()
except KeyboardInterrupt:
pass
stats = bot.render_stats(
timing=spider_config.get('display_timing',
deprecated_key='GRAB_DISPLAY_TIMING'))
stats_with_time = bot.render_stats(timing=True)
if spider_config.get('display_stats', deprecated_key='GRAB_DISPLAY_STATS'):
logger.debug(stats)
pid = os.getpid()
logger.debug('Spider pid is %d' % pid)
if not disable_report:
if spider_config.get('save_report', deprecated_key='GRAB_SAVE_REPORT'):
for subdir in (str(pid), 'last'):
dir_ = 'var/%s' % subdir
if not os.path.exists(dir_):
os.mkdir(dir_)
else:
clear_directory(dir_)
for key, lst in bot.items.items():
fname_key = key.replace('-', '_')
bot.save_list(key, '%s/%s.txt' % (dir_, fname_key))
with open('%s/report.txt' % dir_, 'wb') as out:
out.write(make_str(stats_with_time))
return {
'spider_stats': bot.render_stats(timing=False),
'spider_timing': bot.render_timing(),
}
|
Python
| 0.000001
|
@@ -1360,32 +1360,142 @@
default=False)%0A
+ parser.add_argument('--disable-default-logs', action='store_true',%0A default=False)%0A
parser.add_a
@@ -3039,26 +3039,226 @@
-*args, **kwargs):%0A
+disable_default_logs=False,%0A *args, **kwargs):%0A if disable_default_logs:%0A default_logging(propagate_network_logger=network_logs,%0A grab_log=None, network_log=None)%0A else:%0A
|
3aa198bc49b32db49abe5653ed82f1ede7081df7
|
make fix work for both versions of GeoSteiner
|
geonet/geosteiner.py
|
geonet/geosteiner.py
|
'''
Wrapper for GeoSteiner program
'''
from itertools import dropwhile, takewhile, ifilter
import os
from subprocess import Popen, PIPE
from geonet.network import SteinerTree
# TODO: check if GeoSteiner is available
def geosteiner(pos):
'''Call geosteiner to compute and return'''
def parse_ps(output):
lines = output.splitlines()
no_pre = dropwhile(lambda l:' % fs' not in l, lines)
no_post = takewhile(lambda l:'Euclidean SMT' not in l, no_pre)
filter_comments = ifilter(lambda l: ' % fs' not in l, no_post)
arcs = [l.split()[:4] for l in filter_comments]
return arcs
def build_tree(nodes, raw_arcs, pos):
_nodes = list(nodes)
_arcs = []
_steiner_pos = {}
num = 0
for ra in raw_arcs:
if ra[1] == 'T':
tail = nodes[int(ra[0])]
else: # must be Steiner node
coords = '_'.join(ra[0:2])
if coords in _steiner_pos:
tail = _steiner_pos[coords]
else:
node = '_%d' % num
_nodes.append(node)
tail = _steiner_pos.setdefault(coords, node)
num += 1
if ra[3] == 'T':
head = nodes[int(ra[2])]
else: # must be Steiner node
coords = '_'.join(ra[2:4])
if coords in _steiner_pos:
head = _steiner_pos[coords]
else:
node = '_%d' % num
_nodes.append(node)
head = _steiner_pos.setdefault(coords, node)
num += 1
_arcs.append((tail, head))
tree = SteinerTree(_nodes, _arcs, pos)
steiner_pos = {}
for k,v in _steiner_pos.items():
node = v
coords = k.split('_')
steiner_pos[node] = float(coords[0]), float(coords[1])
return tree, steiner_pos
nodes = list(sorted(pos.keys()))
nodeset = ''.join('%4d %4d\n' % pos[n] for n in nodes)
efst = Popen(['efst'], stdin=PIPE, stdout=PIPE)
efst_output, _ = efst.communicate(nodeset)
bb = Popen(['bb'], stdin=PIPE, stdout=PIPE)
output, _ = bb.communicate(efst_output)
raw_arcs = parse_ps(output)
tree, steiner_pos = build_tree(nodes, raw_arcs, pos)
return tree, steiner_pos
|
Python
| 0
|
@@ -417,61 +417,130 @@
-no_post = takewhile(lambda l:'Euclidean SMT' not in l
+end_kwds = %5B'Euclidean SMT', '(Steiner Minimal'%5D%0A no_post = takewhile(lambda l: all(kw not in l for kw in end_kwds)
, no
|
a9b2b6fe868ab564653f40e611ce6a788f396981
|
Fix wrong variable replacement
|
backend/globaleaks/tests/jobs/test_pgp_check_sched.py
|
backend/globaleaks/tests/jobs/test_pgp_check_sched.py
|
# -*- coding: utf-8 -*-
from twisted.internet.defer import inlineCallbacks
from globaleaks.tests import helpers
from globaleaks.jobs import secure_file_delete_sched
class TestPGPCheckSchedule(helpers.TestGLWithPopulatedDB):
encryption_scenario = 'ONE_VALID_ONE_EXPIRED'
@inlineCallbacks
def test_pgp_check_schedule(self):
# FIXME: complete this unit test by performing checks
# on the actions performed by the scheduler.
yield pgp_check_sched.PGPCheckSchedule().operation()
|
Python
| 0.000019
|
@@ -139,26 +139,17 @@
ort
-secure_file_delete
+pgp_check
_sch
|
6ef76159ab32e454241f7979a1cdf320c463dd9e
|
add config file option
|
planetstack/planetstack-backend.py
|
planetstack/planetstack-backend.py
|
#!/usr/bin/env python
import os
import argparse
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "planetstack.settings")
from observer.backend import Backend
from planetstack.config import Config
config = Config()
# after http://www.erlenstar.demon.co.uk/unix/faq_2.html
def daemon():
"""Daemonize the current process."""
if os.fork() != 0: os._exit(0)
os.setsid()
if os.fork() != 0: os._exit(0)
os.umask(0)
devnull = os.open(os.devnull, os.O_RDWR)
os.dup2(devnull, 0)
# xxx fixme - this is just to make sure that nothing gets stupidly lost - should use devnull
logdir=os.path.dirname(config.observer_logfile)
# when installed in standalone we might not have httpd installed
if not os.path.isdir(logdir): os.mkdir(logdir)
crashlog = os.open('%s'%config.observer_logfile, os.O_RDWR | os.O_APPEND | os.O_CREAT, 0644)
os.dup2(crashlog, 1)
os.dup2(crashlog, 2)
def main():
# Generate command line parser
parser = argparse.ArgumentParser(usage='%(prog)s [options]')
parser.add_argument('-d', '--daemon', dest='daemon', action='store_true', default=False,
help='Run as daemon.')
args = parser.parse_args()
if args.daemon: daemon()
backend = Backend()
backend.run()
if __name__ == '__main__':
main()
|
Python
| 0.000002
|
@@ -1162,16 +1162,358 @@
emon.')%0A
+ # smbaker: util/config.py parses sys.argv%5B%5D directly to get config file name; include the option here to avoid%0A # throwing unrecognized argument exceptions%0A parser.add_argument('-C', '--config', dest='config_file', action='store', default=%22/opt/planetstack/plstackapi_config%22,%0A help='Name of config file.')%0A
args
|
a5942402fdf8f8013dbe62636ea29582538e33c6
|
fix argument name
|
bin/trait_mapping/create_table_for_manual_curation.py
|
bin/trait_mapping/create_table_for_manual_curation.py
|
#!/usr/bin/env python3
import argparse
from eva_cttv_pipeline.trait_mapping.ols import (
get_ontology_label_from_ols, is_current_and_in_efo, is_in_efo,
)
def find_previous_mapping(trait_name, previous_mappings):
if trait_name not in previous_mappings:
return ''
uri = previous_mappings[trait_name]
label = get_ontology_label_from_ols(uri)
uri_is_current_and_in_efo = is_current_and_in_efo(uri)
uri_in_efo = is_in_efo(uri)
if uri_in_efo:
trait_status = 'EFO_CURRENT' if uri_is_current_and_in_efo else 'EFO_OBSOLETE'
else:
trait_status = 'NOT_CONTAINED'
trait_string = '|'.join([uri, label, 'NOT_SPECIFIED', 'previously-used', trait_status])
return trait_string
def find_exact_mapping(trait_name, mappings):
for mapping in mappings:
if mapping.lower().split('|')[1] == trait_name:
return mapping
return ''
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'-t', '--traits-for-curation',
help='Table with traits for which the pipeline failed to make a confident prediction')
parser.add_argument(
'-m', '--previous-mappings',
help='Table with all mappings previously issued by EVA')
parser.add_argument(
'-o', '--output',
help='Output TSV to be loaded in Google Sheets for manual curation')
args = parser.parse_args()
outfile = open(args.final_table_for_curation, 'w')
# Load all previous mappings
previous_mappings = dict(l.rstrip().split('\t') for l in open(args.previous_mappings))
# Process all mappings which require manual curation
for line in open(args.traits_for_curation):
fields = line.rstrip().split('\t')
trait_name, trait_freq = fields[:2]
mappings = fields[2:]
previous_mapping = find_previous_mapping(trait_name, previous_mappings)
exact_mapping = find_exact_mapping(trait_name, mappings)
out_line = '\t'.join(
[trait_name, trait_freq,
# Mapping to use, if ready, comment, mapping URI, mapping label, whether exact, in EFO
'', '', '', '', '', '', '',
previous_mapping, exact_mapping] + mappings
) + '\n'
outfile.write(out_line)
|
Python
| 0.005603
|
@@ -1436,32 +1436,14 @@
rgs.
-final_table_for_curation
+output
, 'w
|
662608e6a183810072cb5e9dc7545145c866cf34
|
Add missing import
|
byceps/services/shop/order/action_registry_service.py
|
byceps/services/shop/order/action_registry_service.py
|
"""
byceps.services.shop.order.action_registry_service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from ...seating.models.category import CategoryID
from ...user_badge.models.badge import BadgeID
from ..article.models.article import ArticleNumber
from .models.payment import PaymentState
def register_badge_awarding(article_number: ArticleNumber, badge_id: BadgeID
) -> None:
# Award badge to orderer when order is marked as paid.
params = {
'badge_id': str(badge_id),
}
action_service.create_action(article_number, PaymentState.paid,
'create_tickets', params_create)
def register_tickets_creation(article_number: ArticleNumber,
ticket_category_id: CategoryID) -> None:
# Create tickets for order when it is marked as paid.
params_create = {
'category_id': str(ticket_category_id),
}
action_service.create_action(article_number, PaymentState.paid,
'create_tickets', params_create)
# Revoke tickets that have been created for order when it is
# canceled after being marked as paid.
params_revoke = {}
action_service.create_action(article_number, PaymentState.canceled_after_paid,
'revoke_tickets', params_revoke)
|
Python
| 0.000466
|
@@ -389,16 +389,46 @@
tState%0A%0A
+from . import action_service%0A%0A
%0Adef reg
|
5cdf89e64ab9dabf277a867a774a88f12e1ece5e
|
Fix broken exception `BadHeader`
|
src/pyload/core/network/http/exceptions.py
|
src/pyload/core/network/http/exceptions.py
|
# -*- coding: utf-8 -*-
PROPRIETARY_RESPONSES = {
440: "Login Timeout - The client's session has expired and must log in again.",
449: "Retry With - The server cannot honour the request because the user has not provided the required information",
451: "Redirect - Unsupported Redirect Header",
509: "Bandwidth Limit Exceeded",
520: "Unknown Error",
521: "Web Server Is Down - The origin server has refused the connection from CloudFlare",
522: "Connection Timed Out - CloudFlare could not negotiate a TCP handshake with the origin server",
523: "Origin Is Unreachable - CloudFlare could not reach the origin server",
524: "A Timeout Occurred - CloudFlare did not receive a timely HTTP response",
525: "SSL Handshake Failed - CloudFlare could not negotiate a SSL/TLS handshake with the origin server",
526: "Invalid SSL Certificate - CloudFlare could not validate the SSL/TLS certificate that the origin server presented",
527: "Railgun Error - CloudFlare requests timeout or failed after the WAN connection has been established",
530: "Site Is Frozen - Used by the Pantheon web platform to indicate a site that has been frozen due to inactivity",
}
class BadHeader(Exception):
def __init__(self, code, header=b"", content=b""):
int_code = int(code)
response = responses.get(
int_code, PROPRIETARY_RESPONSES.get(int_code, "unknown error code")
)
super().__init__(f"Bad server response: {code} {response}")
self.code = int_code
self.header = header
self.content = content
|
Python
| 0.000001
|
@@ -1284,28 +1284,24 @@
%22):%0A
-int_
code = int(c
@@ -1327,45 +1327,8 @@
se =
- responses.get(%0A int_code,
PRO
@@ -1350,20 +1350,16 @@
SES.get(
-int_
code, %22u
@@ -1382,18 +1382,8 @@
e%22)%0A
- )%0A
@@ -1446,16 +1446,16 @@
onse%7D%22)%0A
+
@@ -1470,12 +1470,8 @@
e =
-int_
code
|
05bd3a42ffb260daddc8051d79ec21f20a23bdde
|
Improve performance of report case pillow test
|
testapps/test_pillowtop/tests/test_report_case_pillow.py
|
testapps/test_pillowtop/tests/test_report_case_pillow.py
|
import uuid
from django.test import TestCase, override_settings
from corehq.util.es.elasticsearch import ConnectionError
from corehq.apps.es import CaseES
from corehq.apps.hqcase.management.commands.ptop_reindexer_v2 import reindex_and_clean
from corehq.elastic import get_es_new
from corehq.form_processor.tests.utils import FormProcessorTestUtils, run_with_all_backends
from corehq.pillows.mappings.reportcase_mapping import REPORT_CASE_INDEX_INFO
from corehq.util.elastic import ensure_index_deleted
from corehq.util.test_utils import trap_extra_setup, create_and_save_a_case
from pillowtop.es_utils import initialize_index_and_mapping
from testapps.test_pillowtop.utils import process_pillow_changes
DOMAIN = 'report-case-pillowtest-domain'
@override_settings(ES_CASE_FULL_INDEX_DOMAINS=[DOMAIN])
class ReportCasePillowTest(TestCase):
def setUp(self):
super(ReportCasePillowTest, self).setUp()
FormProcessorTestUtils.delete_all_xforms()
FormProcessorTestUtils.delete_all_cases()
with trap_extra_setup(ConnectionError):
self.elasticsearch = get_es_new()
ensure_index_deleted(REPORT_CASE_INDEX_INFO.index)
initialize_index_and_mapping(self.elasticsearch, REPORT_CASE_INDEX_INFO)
def tearDown(self):
ensure_index_deleted(REPORT_CASE_INDEX_INFO.index)
FormProcessorTestUtils.delete_all_xforms()
FormProcessorTestUtils.delete_all_cases()
super(ReportCasePillowTest, self).tearDown()
@run_with_all_backends
def test_report_case_pillow(self):
case_id, case_name = self._create_case_and_sync_to_es(DOMAIN)
# confirm change made it to elasticserach
results = CaseES('report_cases').run()
self.assertEqual(1, results.total)
case_doc = results.hits[0]
self.assertEqual(DOMAIN, case_doc['domain'])
self.assertEqual(case_id, case_doc['_id'])
self.assertEqual(case_name, case_doc['name'])
@run_with_all_backends
def test_unsupported_domain(self):
self._create_case_and_sync_to_es('unsupported-domain')
results = CaseES('report_cases').run()
self.assertEqual(0, results.total)
def _create_case_and_sync_to_es(self, domain):
case_id = uuid.uuid4().hex
case_name = 'case-name-{}'.format(uuid.uuid4().hex)
with process_pillow_changes('case-pillow'):
with process_pillow_changes('DefaultChangeFeedPillow'):
create_and_save_a_case(domain, case_id, case_name)
self.elasticsearch.indices.refresh(REPORT_CASE_INDEX_INFO.index)
return case_id, case_name
@override_settings(ES_CASE_FULL_INDEX_DOMAINS=[DOMAIN])
class ReportCaseReindexerTest(TestCase):
def setUp(self):
super(ReportCaseReindexerTest, self).setUp()
FormProcessorTestUtils.delete_all_xforms()
FormProcessorTestUtils.delete_all_cases()
with trap_extra_setup(ConnectionError):
self.elasticsearch = get_es_new()
ensure_index_deleted(REPORT_CASE_INDEX_INFO.index)
def tearDown(self):
FormProcessorTestUtils.delete_all_xforms()
FormProcessorTestUtils.delete_all_cases()
ensure_index_deleted(REPORT_CASE_INDEX_INFO.index)
super(ReportCaseReindexerTest, self).tearDown()
@run_with_all_backends
def test_report_case_reindexer(self):
cases_included = set()
for i in range(3):
case = create_and_save_a_case(DOMAIN, uuid.uuid4().hex, 'case_name-{}'.format(i))
cases_included.add(case.case_id)
# excluded case
create_and_save_a_case('unsupported', uuid.uuid4().hex, 'unsupported')
reindex_and_clean('report-case')
# verify there
results = CaseES("report_cases").run()
self.assertEqual(3, results.total, results.hits)
ids_in_es = {doc['_id'] for doc in results.hits}
self.assertEqual(cases_included, ids_in_es)
|
Python
| 0.001312
|
@@ -4,16 +4,48 @@
ort uuid
+%0Afrom unittest.mock import patch
%0A%0Afrom d
@@ -94,186 +94,417 @@
ngs%0A
-from corehq.util.es.elasticsearch import ConnectionError%0A%0Afrom corehq.apps.es import CaseES%0Afrom corehq.apps.hqcase.management.commands.ptop_reindexer_v2 import reindex_and_clean
+%0Afrom pillowtop.es_utils import initialize_index_and_mapping%0A%0Afrom corehq.apps.callcenter.tests.test_utils import CallCenterDomainMockTest%0Afrom corehq.apps.es import CaseES%0Afrom corehq.apps.hqcase.management.commands.ptop_reindexer_v2 import reindex_and_clean%0Afrom corehq.apps.userreports.models import StaticDataSourceConfiguration%0Afrom corehq.apps.userreports.pillow import ConfigurableReportPillowProcessor
%0Afro
@@ -782,126 +782,123 @@
til.
-t
es
-t_utils import trap_extra_setup, create_and_save_a_case%0Afrom pillowtop.es_utils import initialize_index_and_mapping
+.elasticsearch import ConnectionError%0Afrom corehq.util.test_utils import create_and_save_a_case, trap_extra_setup
%0Afro
@@ -1082,34 +1082,579 @@
ePillowTest(
-TestCase):
+CallCenterDomainMockTest):%0A%0A @classmethod%0A def setUpClass(cls):%0A super().setUpClass()%0A # patch time savings: ~25s per _create_case_and_sync_to_es()%0A cls.patches = %5B%0A patch.object(StaticDataSourceConfiguration, %22_all%22, lambda: %5B%5D),%0A patch.object(ConfigurableReportPillowProcessor, %22rebuild_tables_if_necessary%22),%0A %5D%0A for px in cls.patches:%0A px.start()%0A%0A @classmethod%0A def tearDownClass(cls):%0A for px in cls.patches:%0A px.stop()%0A super().tearDownClass()
%0A%0A def se
|
a23c6132792bd6aff420791cf4b78a955cc0dfad
|
add headless
|
inscrawler/browser.py
|
inscrawler/browser.py
|
import os
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.chrome.options import Options
from time import sleep
class Browser:
def __init__(self):
dir_path = os.path.dirname(os.path.realpath(__file__))
service_args = ['--ignore-ssl-errors=true']
chrome_options = Options()
# chrome_options.add_argument("--headless")
chrome_options.add_argument("--no-sandbox")
self.driver = webdriver.Chrome(
executable_path='%s/bin/chromedriver' % dir_path,
service_args=service_args,
chrome_options=chrome_options)
self.driver.implicitly_wait(5)
@property
def page_height(self):
return self.driver.execute_script('return document.body.scrollHeight')
def get(self, url):
self.driver.get(url)
def find_one(self, css_selector, elem=None):
obj = elem or self.driver
try:
return obj.find_element(By.CSS_SELECTOR, css_selector)
except NoSuchElementException:
return None
def find(self, css_selector, elem=None):
obj = elem or self.driver
try:
return obj.find_elements(By.CSS_SELECTOR, css_selector)
except NoSuchElementException:
return None
def scroll_down(self, wait=0.5):
self.driver.execute_script(
'window.scrollTo(0, document.body.scrollHeight)')
sleep(wait)
def scroll_up(self, wait=2):
self.driver.execute_script(
'window.scrollTo(0, 0)')
sleep(wait)
def js_click(self, elem):
self.driver.execute_script("arguments[0].click();", elem)
def __del__(self):
try:
self.driver.quit()
except Exception:
pass
|
Python
| 0.999995
|
@@ -419,10 +419,8 @@
- #
chr
|
8d9b5cf8f9e53f28f4a286c2851249757d88a52b
|
Set LOGIN_URL in settings.py
|
getaride/settings.py
|
getaride/settings.py
|
"""
Django settings for getaride project.
Generated by 'django-admin startproject' using Django 1.11.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
from . import private_settings
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = private_settings.key
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.staticfiles',
'django.contrib.sessions',
'planner.apps.PlannerConfig',
'cities_light',
'leaflet',
'users',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'getaride.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'getaride.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
import sys
if 'test' in sys.argv:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'testdb',
},
}
else:
DATABASES = {
'default': private_settings.database,
}
AUTH_USER_MODEL = 'users.User'
LOGIN_REDIRECT_URL = 'planner:homepage'
LOGOUT_REDIRECT_URL = 'planner:homepage'
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'it'
TIME_ZONE = 'Europe/Rome'
USE_I18N = True
USE_L10N = True
USE_TZ = True
DATE_INPUT_FORMATS = (
'%d.%m.%Y', '%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.2006', '25.10.06'
'%d-%m-%Y', '%d/%m/%Y', '%d/%m/%y', # '25-10-2006', '25/10/2006', '25/10/06'
'%d %b %Y', # '25 Oct 2006',
'%d %B %Y', # '25 October 2006',
)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
CITIES_LIGHT_TRANSLATION_LANGUAGES = [LANGUAGE_CODE]
CITIES_LIGHT_INCLUDE_COUNTRIES = [LANGUAGE_CODE.upper()]
CITIES_LIGHT_INCLUDE_CITY_TYPES = ['PPLA', 'PPLA2', 'PPLA3']
LEAFLET_CONFIG = {
'SPATIAL_EXTENT': (6.345, 36.0, 20.0, 47.3),
'MIN_ZOOM': 6,
'MAX_ZOOM': 10,
}
|
Python
| 0.000001
|
@@ -2425,16 +2425,45 @@
.User'%0A%0A
+LOGIN_URL = 'planner:login'%0A%0A
LOGIN_RE
|
b44a9dfbf26e07b9db6a31119044b8347907a5a5
|
disable fid map
|
examples/fitsdiff2.py
|
examples/fitsdiff2.py
|
#! /usr/bin/env python
#
# This routine can diff images from its neighbors. For a series i=1,N
# this can loop over i=2,N to produce N-1 difference images
#
# B_i = A_i - A_i-1
#
from __future__ import print_function
import glob
import sys
import shutil
import os
from astropy.io import fits
import numpy as np
if len(sys.argv) == 3:
f1 = sys.argv[1]
f2 = sys.argv[2]
print("Using %s %s" % (f1,f2))
hdu1 = fits.open(f1)
hdu2 = fits.open(f2)
h2 = hdu2[0].header
d1 = hdu1[0].data.astype(np.float32)
d2 = hdu2[0].data.astype(np.float32)
print(f1,d1.min(),d1.max())
print(f2,d2.min(),d2.max())
diff = d2 - d1
max1 = d1.max()
std1 = diff.std()
fidelity = max1 / std1
fid = np.abs(d2) / np.max(np.abs(diff),std1/1.4)
print("MEAN/STD/FID:",diff.mean(), std1, fidelity)
fits.writeto('diff.fits',diff,h2,overwrite=True)
fits.writeto('fidelity.fits',fid,h2,overwrite=True)
try:
import matplotlib.pyplot as plt
plt.figure(1)
plt.hist(diff.ravel())
plt.show()
except:
print("Failing to plot")
|
Python
| 0.000001
|
@@ -687,58 +687,8 @@
d1%0A%0A
-fid = np.abs(d2) / np.max(np.abs(diff),std1/1.4)%0A
%0A%0Apr
@@ -785,16 +785,70 @@
e=True)%0A
+#%0A#fid = np.abs(d2) / np.max(np.abs(diff),std1/1.4)%0A#
fits.wri
|
f1ac9c7a41ae066f62f3a93773c08ec08f70b941
|
Add finally if running processes
|
coalib/processes/SectionExecutor.py
|
coalib/processes/SectionExecutor.py
|
import multiprocessing
import queue
import threading
from coalib.collecting.Collectors import collect_files
from coalib.collecting import Dependencies
from coalib.output.printers.ConsolePrinter import ConsolePrinter
from coalib.processes.BearRunner import BearRunner
from coalib.processes.CONTROL_ELEMENT import CONTROL_ELEMENT
from coalib.processes.Barrier import Barrier
from coalib.settings.Section import Section
from coalib.settings.Setting import path_list
def get_cpu_count():
try:
return multiprocessing.cpu_count()
# cpu_count is not implemented for some CPU architectures/OSes
except NotImplementedError: # pragma: no cover
return 2
class SectionExecutor:
"""
The section executor does the following things:
1. Prepare a BearRunner
* Load files
* Create queues
2. Spawn up one or more BearRunner's
3. Output results from the BearRunner's
4. Join all processes
"""
class LogPrinterThread(threading.Thread):
"""
This is the Thread object that outputs all log messages it gets from its message_queue.
"""
def __init__(self, message_queue, log_printer=ConsolePrinter()):
threading.Thread.__init__(self)
self.running = True
self.message_queue = message_queue
self.log_printer = log_printer
def run(self):
while self.running:
try:
elem = self.message_queue.get(timeout=0.1)
self.log_printer.log_message(elem)
except queue.Empty:
pass
def __init__(self,
section,
local_bear_list,
global_bear_list):
if not isinstance(section, Section):
raise TypeError("section has to be of type Section")
if not isinstance(local_bear_list, list):
raise TypeError("local_bear_list has to be of type list")
if not isinstance(global_bear_list, list):
raise TypeError("global_bear_list has to be of type list")
self.section = section
self.local_bear_list = Dependencies.resolve(local_bear_list)
self.global_bear_list = Dependencies.resolve(global_bear_list)
def run(self):
running_processes = get_cpu_count()
processes, arg_dict = self._instantiate_processes(running_processes)
logger_thread = self.LogPrinterThread(arg_dict["message_queue"],
self.section.log_printer)
# Start and join the logger thread along with the BearRunner's
processes.append(logger_thread)
for runner in processes:
runner.start()
self._process_queues(processes,
arg_dict["control_queue"],
arg_dict["local_result_dict"],
arg_dict["global_result_dict"],
arg_dict["file_dict"])
logger_thread.running = False
for runner in processes:
runner.join()
@staticmethod
def _get_running_processes(processes):
return sum((1 if process.is_alive() else 0) for process in processes)
def _process_queues(self,
processes,
control_queue,
local_result_dict,
global_result_dict,
file_dict):
running_processes = self._get_running_processes(processes)
interactor = self.section.interactor
# One process is the logger thread
while running_processes > 1:
try:
control_elem, index = control_queue.get(timeout=0.1)
if control_elem == CONTROL_ELEMENT.LOCAL:
interactor.print_results(local_result_dict[index],
file_dict)
elif control_elem == CONTROL_ELEMENT.GLOBAL:
interactor.print_results(global_result_dict[index],
file_dict)
elif control_elem == CONTROL_ELEMENT.FINISHED:
running_processes = self._get_running_processes(processes)
except queue.Empty:
running_processes = self._get_running_processes(processes)
self.section.interactor.finalize(file_dict)
def _instantiate_bears(self, file_dict, message_queue):
for i in range(len(self.local_bear_list)):
self.local_bear_list[i] = self.local_bear_list[i](self.section,
message_queue,
TIMEOUT=0.1)
for i in range(len(self.global_bear_list)):
self.global_bear_list[i] = self.global_bear_list[i](file_dict,
self.section,
message_queue,
TIMEOUT=0.1)
def _instantiate_processes(self, job_count):
filename_list = collect_files(path_list(self.section['files']))
file_dict = self._get_file_dict(filename_list)
manager = multiprocessing.Manager()
global_bear_queue = multiprocessing.Queue()
filename_queue = multiprocessing.Queue()
local_result_dict = manager.dict()
global_result_dict = manager.dict()
message_queue = multiprocessing.Queue()
control_queue = multiprocessing.Queue()
barrier = Barrier(parties=job_count)
bear_runner_args = {"file_name_queue": filename_queue,
"local_bear_list": self.local_bear_list,
"global_bear_list": self.global_bear_list,
"global_bear_queue": global_bear_queue,
"file_dict": file_dict,
"local_result_dict": local_result_dict,
"global_result_dict": global_result_dict,
"message_queue": message_queue,
"control_queue": control_queue,
"barrier": barrier,
"TIMEOUT": 0.1}
self._instantiate_bears(file_dict,
message_queue)
self._fill_queue(filename_queue, filename_list)
self._fill_queue(global_bear_queue, range(len(self.global_bear_list)))
return ([BearRunner(**bear_runner_args) for i in range(job_count)],
bear_runner_args)
@staticmethod
def _fill_queue(_queue, any_list):
for elem in any_list:
_queue.put(elem)
@staticmethod
def _get_file_dict(filename_list):
file_dict = {}
for filename in filename_list:
with open(filename, "r") as f:
file_dict[filename] = f.readlines()
return file_dict
|
Python
| 0.000001
|
@@ -2708,24 +2708,41 @@
er.start()%0A%0A
+ try:%0A
self
@@ -2790,32 +2790,36 @@
+
arg_dict%5B%22contro
@@ -2850,32 +2850,36 @@
+
+
arg_dict%5B%22local_
@@ -2885,32 +2885,36 @@
_result_dict%22%5D,%0A
+
@@ -2979,32 +2979,36 @@
+
+
arg_dict%5B%22file_d
@@ -3014,17 +3014,37 @@
dict%22%5D)%0A
-%0A
+ finally:%0A
@@ -3074,32 +3074,36 @@
False%0A%0A
+
for runner in pr
@@ -3103,32 +3103,36 @@
r in processes:%0A
+
runn
|
737a320ff8adbe2a0619087125a0a731c5e2bca8
|
Bump to v1.1
|
bindings/python/setup.py
|
bindings/python/setup.py
|
import os
import sys
import shutil
import setuptools
from distutils import log
from distutils.command.build_clib import build_clib
VERSION_MAJOR = 1
VERSION_MINOR = 0
VERSION_VBOXBIN = "5-0-18-6667"
VERSION = "{major:d}.{minor:d}".format(
major = VERSION_MAJOR,
minor = VERSION_MINOR,
)
BINDING_DIRECTORY = os.path.abspath(os.path.dirname(__file__))
ROOT_DIRECTORY = os.path.abspath(os.path.join(BINDING_DIRECTORY, "../../"))
# path to compiled libraries for Windows
PATH_LIB64 = os.path.join(ROOT_DIRECTORY, "out_x64/Release/FDP_x64.dll")
PATH_LIB32 = os.path.join(ROOT_DIRECTORY, "out_x86/Release/FDP_x86.dll")
FDP_BUILD_SCRIPT = lambda VS_VERSION : os.path.join(ROOT_DIRECTORY, "buildVS%s.bat" % VS_VERSION)
VS_VERSIONS = [
"2017",
"2015",
"2013"
]
class PyFDPCustomBuildClib(build_clib):
""" Customized build_clib command. """
description = "Custom build lib step used to compile the FDP client dll from sources."
def run(self):
log.info('running PyFDPCustomBuildClib')
build_clib.run(self)
def initialize_options(self):
""" Initialize custom command line switches """
build_clib.initialize_options(self)
# Visual Studio version
# Customize it by setting the env variable FDP_MSVC_VER
self.vs = os.environ.get("FDP_MSVC_VER", "2017")
def finalize_options(self):
""" Validate custom command line switches values """
build_clib.finalize_options(self)
def build_libraries(self, libraries):
# platform description refers at https://docs.python.org/2/library/sys.html#sys.platform
if not sys.platform == "win32":
raise ValueError("Can not build on a platform that is not native Windows")
assert self.vs in VS_VERSIONS, 'Unrecognized visual studio compiler version. Supported versions : %s ' % ",".join(VS_VERSIONS)
log.info("building FPD library with visual studio %s " % self.vs)
_cwd = os.getcwd()
os.chdir(ROOT_DIRECTORY)
# Cleanup old build folders
shutil.rmtree(os.path.join(ROOT_DIRECTORY, "out_x64"), ignore_errors=True)
shutil.rmtree(os.path.join(ROOT_DIRECTORY, "out_x86"), ignore_errors=True)
# Windows build: this process requires few things:
# - CMake + MSVC installed
# - Run this command in an environment setup for MSVC
os.system(FDP_BUILD_SCRIPT(self.vs))
if not os.path.exists(PATH_LIB64):
raise FileNotFoundError("Could not find %s : compilation step went wrong", PATH_LIB64)
if not os.path.exists(PATH_LIB32):
raise FileNotFoundError("Could not find %s : compilation step went wrong", PATH_LIB32)
# copy compiled dll files into source dir
shutil.copy(PATH_LIB32, os.path.join(BINDING_DIRECTORY, "PyFDP", "FDP_x86.dll"))
shutil.copy(PATH_LIB64, os.path.join(BINDING_DIRECTORY, "PyFDP", "FDP_x64.dll"))
os.chdir(_cwd)
def dummy_src():
return []
setuptools.setup(
provides=['PyFDP'],
packages = setuptools.find_packages(),
name="PyFDP",
version=VERSION,
description='FDP (Fast Debug Protocol) : KD client for debugging Windows VM without /DEBUG enabled.',
url='https://winbagility.github.io',
classifiers=[
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Operating System :: Microsoft :: Windows',
'Topic :: Software Development :: Debuggers',
],
requires=[],
# PyFDP need to compile the client FDP dlls
# but there are not Python module compliant so
# the standard build_clib command will raise errors. That's
# why we hook build_clib in order to launch the cmake build script.
libraries=[(
'PyFDP', dict(
package = 'PyFDP',
sources = dummy_src()
),
)],
cmdclass=dict(
build_clib=PyFDPCustomBuildClib,
),
# Tell setuptools not to zip into an egg file
# That's mandatory whenever there is a filepath involved
# (in our case via the LoadLibrary)
zip_safe=False,
# We have two dlls to package with the python lib.
include_package_data=True,
package_data={
"PyFDP": ["*.dll"],
}
)
|
Python
| 0.000001
|
@@ -160,17 +160,17 @@
MINOR =
-0
+1
%0AVERSION
|
2c2d2024abf0eaa34b25038d2eb4cd5d8aeb6323
|
remove defunct test
|
fsspec/tests/test_registry.py
|
fsspec/tests/test_registry.py
|
import sys
from unittest.mock import create_autospec, patch
import pytest
from fsspec.registry import (
ReadOnlyError,
_registry,
get_filesystem_class,
known_implementations,
register_implementation,
registry,
)
from fsspec.spec import AbstractFileSystem
try:
from importlib.metadata import EntryPoint
except ImportError: # python < 3.8
from importlib_metadata import EntryPoint
@pytest.fixture()
def clear_registry():
try:
yield
finally:
_registry.clear()
known_implementations.pop("test", None)
@pytest.fixture()
def clean_imports():
try:
real_module = sys.modules["fsspec"]
del sys.modules["fsspec"]
yield
finally:
sys.modules["fsspec"] = real_module
@pytest.mark.parametrize(
"protocol,module,minversion,oldversion",
[("s3", "s3fs", "0.3.0", "0.1.0"), ("gs", "gcsfs", "0.3.0", "0.1.0")],
)
def test_minversion_s3fs(protocol, module, minversion, oldversion, monkeypatch):
_registry.clear()
mod = pytest.importorskip(module, minversion)
assert get_filesystem_class("s3") is not None
_registry.clear()
monkeypatch.setattr(mod, "__version__", oldversion)
with pytest.raises(RuntimeError, match=minversion):
get_filesystem_class(protocol)
def test_registry_readonly():
get_filesystem_class("file")
assert "file" in registry
assert "file" in list(registry)
with pytest.raises(ReadOnlyError):
del registry["file"]
with pytest.raises(ReadOnlyError):
registry["file"] = None
with pytest.raises(ReadOnlyError):
registry.clear()
def test_register_cls(clear_registry):
with pytest.raises(ValueError):
get_filesystem_class("test")
register_implementation("test", AbstractFileSystem)
cls = get_filesystem_class("test")
assert cls is AbstractFileSystem
def test_register_str(clear_registry):
with pytest.raises(ValueError):
get_filesystem_class("test")
register_implementation("test", "fsspec.AbstractFileSystem")
assert "test" not in registry
cls = get_filesystem_class("test")
assert cls is AbstractFileSystem
assert "test" in registry
def test_register_fail(clear_registry):
register_implementation("test", "doesntexist.AbstractFileSystem")
with pytest.raises(ImportError):
get_filesystem_class("test")
register_implementation("test", "doesntexist.AbstractFileSystem")
with pytest.raises(ValueError):
register_implementation("test", "doesntexist.AbstractFileSystem", clobber=False)
register_implementation(
"test", "doesntexist.AbstractFileSystem", errtxt="hiho", clobber=True
)
with pytest.raises(ImportError) as e:
get_filesystem_class("test")
assert "hiho" in str(e.value)
register_implementation("test", AbstractFileSystem)
with pytest.raises(ValueError):
register_implementation("test", AbstractFileSystem, clobber=False)
register_implementation("test", AbstractFileSystem, clobber=True)
def test_entry_points_registered_on_import(clear_registry, clean_imports):
mock_ep = create_autospec(EntryPoint, module="fsspec.spec.AbstractFileSystem")
mock_ep.name = "test" # this can't be set in the constructor...
if sys.version_info < (3, 8):
import_location = "importlib_metadata.entry_points"
else:
import_location = "importlib.metadata.entry_points"
with patch(import_location, return_value={"fsspec.specs": [mock_ep]}):
assert "test" not in registry
import fsspec # noqa
get_filesystem_class("test")
assert "test" in registry
|
Python
| 0.998747
|
@@ -765,536 +765,8 @@
e%0A%0A%0A
-@pytest.mark.parametrize(%0A %22protocol,module,minversion,oldversion%22,%0A %5B(%22s3%22, %22s3fs%22, %220.3.0%22, %220.1.0%22), (%22gs%22, %22gcsfs%22, %220.3.0%22, %220.1.0%22)%5D,%0A)%0Adef test_minversion_s3fs(protocol, module, minversion, oldversion, monkeypatch):%0A _registry.clear()%0A mod = pytest.importorskip(module, minversion)%0A%0A assert get_filesystem_class(%22s3%22) is not None%0A _registry.clear()%0A%0A monkeypatch.setattr(mod, %22__version__%22, oldversion)%0A with pytest.raises(RuntimeError, match=minversion):%0A get_filesystem_class(protocol)%0A%0A%0A
def
|
e0663d81c6763ac3e4240acb0812af5b3ef5c7bb
|
Handle setting breakpoint on no file, as per #12
|
plugin/python/vdebug/breakpoint.py
|
plugin/python/vdebug/breakpoint.py
|
import base64
import vdebug.log
class Store:
def __init__(self):
self.breakpoints = {}
self.api = None
def link_api(self,api):
self.api = api
num_bps = len(self.breakpoints)
if num_bps > 0:
vdebug.log.Log("Registering %i breakpoints with the debugger" % num_bps)
for id, bp in self.breakpoints.iteritems():
res = self.api.breakpoint_set(bp.get_cmd())
bp.set_debugger_id(res.get_id())
def unlink_api(self):
self.api = None
def add_breakpoint(self,breakpoint):
vdebug.log.Log("Adding " + str(breakpoint))
self.breakpoints[str(breakpoint.get_id())] = breakpoint
breakpoint.on_add()
if self.api is not None:
res = self.api.breakpoint_set(breakpoint.get_cmd())
breakpoint.set_debugger_id(res.get_id())
def remove_breakpoint(self,breakpoint):
self.remove_breakpoint_by_id(\
breakpoint.get_id())
def remove_breakpoint_by_id(self,id):
id = str(id)
if id not in self.breakpoints:
raise BreakpointError, "No breakpoint matching ID %s" % id
vdebug.log.Log("Removing breakpoint id %s" % id)
if self.api is not None:
dbg_id = self.breakpoints[id].get_debugger_id()
if dbg_id is not None:
self.api.breakpoint_remove(dbg_id)
self.breakpoints[id].on_remove()
del self.breakpoints[id]
def clear_breakpoints(self):
for id in self.breakpoints.keys():
self.remove_breakpoint_by_id(id)
self.breakpoints = {}
def find_breakpoint(self,file,line):
found = None
for id, bp in self.breakpoints.iteritems():
if bp.type == "line":
if bp.get_file() == file and\
bp.get_line() == line:
found = bp.get_id()
break
return found
def get_sorted_list(self):
keys = self.breakpoints.keys()
keys.sort()
return map(self.breakpoints.get,keys)
class BreakpointError(Exception):
pass
class Breakpoint:
""" Abstract factory for creating a breakpoint object.
Use the class method parse to create a concrete subclass
of a specific type.
"""
type = None
id = 11000
dbg_id = None
def __init__(self,ui):
self.id = Breakpoint.id
Breakpoint.id += 1
self.ui = ui
def get_id(self):
return self.id
def set_debugger_id(self,dbg_id):
self.dbg_id = dbg_id
def get_debugger_id(self):
return self.dbg_id
def on_add(self):
self.ui.register_breakpoint(self)
def on_remove(self):
self.ui.remove_breakpoint(self)
@classmethod
def parse(self,ui,args):
if args is None:
args = ""
args = args.strip()
if len(args) == 0:
""" Line breakpoint """
row = ui.get_current_row()
file = ui.get_current_file()
return LineBreakpoint(ui,file,row)
else:
arg_parts = args.split(' ')
type = arg_parts.pop(0)
type.lower()
if type == 'conditional':
row = ui.get_current_row()
file = ui.get_current_file()
if len(arg_parts) == 0:
raise BreakpointError, "Conditional breakpoints " +\
"require a condition to be specified"
cond = " ".join(arg_parts)
return ConditionalBreakpoint(ui,file,row,cond)
elif type == 'watch':
if len(arg_parts) == 0:
raise BreakpointError, "Watch breakpoints " +\
"require a condition to be specified"
expr = " ".join(arg_parts)
vdebug.log.Log("Expression: %s"%expr)
return WatchBreakpoint(ui,expr)
elif type == 'exception':
if len(arg_parts) == 0:
raise BreakpointError, "Exception breakpoints " +\
"require an exception name to be specified"
return ExceptionBreakpoint(ui,arg_parts[0])
elif type == 'return':
l = len(arg_parts)
if l == 0:
raise BreakpointError, "Return breakpoints " +\
"require a function name to be specified"
return ReturnBreakpoint(ui,arg_parts[0])
elif type == 'call':
l = len(arg_parts)
if l == 0:
raise BreakpointError, "Call breakpoints " +\
"require a function name to be specified"
return CallBreakpoint(ui,arg_parts[0])
else:
raise BreakpointError, "Unknown breakpoint type, " +\
"please choose one of: conditional, exception,"+\
"call or return"
def get_cmd(self):
pass
def __str__(self):
return "%s breakpoint, id %i" %(self.type,self.id)
class LineBreakpoint(Breakpoint):
type = "line"
def __init__(self,ui,file,line):
Breakpoint.__init__(self,ui)
self.file = file
self.line = line
def get_line(self):
return self.line
def get_file(self):
return self.file
def get_cmd(self):
cmd = "-t " + self.type
cmd += " -f file://" + self.file.as_remote()
cmd += " -n " + str(self.line)
cmd += " -s enabled"
return cmd
class TemporaryLineBreakpoint(LineBreakpoint):
def on_add(self):
pass
def on_remove(self):
pass
def get_cmd(self):
cmd = LineBreakpoint.get_cmd(self)
return cmd + " -r 1"
class ConditionalBreakpoint(LineBreakpoint):
type = "conditional"
def __init__(self,ui,file,line,condition):
LineBreakpoint.__init__(self,ui,file,line)
self.condition = condition
def get_cmd(self):
cmd = LineBreakpoint.get_cmd(self)
cmd += " -- " + base64.encodestring(self.condition)
return cmd
class WatchBreakpoint(Breakpoint):
type = "watch"
def __init__(self,ui,expr):
Breakpoint.__init__(self,ui)
self.expr = expr
def get_cmd(self):
cmd = "-t " + self.type
cmd += " -- " + base64.encodestring(self.expr)
return cmd
class ExceptionBreakpoint(Breakpoint):
type = "exception"
def __init__(self,ui,exception):
Breakpoint.__init__(self,ui)
self.exception = exception
def get_cmd(self):
cmd = "-t " + self.type
cmd += " -x " + self.exception
cmd += " -s enabled"
return cmd
class CallBreakpoint(Breakpoint):
type = "call"
def __init__(self,ui,function):
Breakpoint.__init__(self,ui)
self.function = function
def get_cmd(self):
cmd = "-t " + self.type
cmd += " -m %s" % self.function
cmd += " -s enabled"
return cmd
class ReturnBreakpoint(CallBreakpoint):
type = "return"
|
Python
| 0
|
@@ -2976,32 +2976,53 @@
t_current_row()%0A
+ try:%0A
file
@@ -3038,32 +3038,150 @@
_current_file()%0A
+ except vdebug.util.FilePathError:%0A raise BreakpointError, 'No file, cannot set breakpoint'%0A
retu
|
7a39c7a433e909b58ad0fdf8adaaa5c944e91e0e
|
Fix non-array samples in multinomial estimator.
|
src/python/cargo/statistics/multinomial.py
|
src/python/cargo/statistics/multinomial.py
|
"""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
import numpy
from cargo.log import get_logger
from cargo.statistics.base import (
Estimator,
Distribution,
)
log = get_logger(__name__)
def smooth_multinomial_mixture(mixture, epsilon = 1e-3):
"""
Apply a smoothing term to the multinomial mixture components.
"""
log.info("heuristically smoothing a multinomial mixture")
for m in xrange(mixture.ndomains):
for k in xrange(mixture.ncomponents):
beta = mixture.components[m, k].beta + epsilon
beta /= numpy.sum(beta)
mixture.components[m, k] = Multinomial(beta)
class Multinomial(Distribution):
"""
The multinomial distribution.
Relevant types:
- sample: D-shaped uint ndarray
- sequence: ND-shaped uint ndarray
"""
def __init__(self, beta, norm = 1):
"""
Instantiate the distribution.
@param beta: The distribution parameter vector.
"""
# initialization
self._beta = numpy.asarray(beta)
self._log_beta = numpy.nan_to_num(numpy.log(self._beta))
self._norm = norm
# let's not let us be idiots
self._beta.flags.writeable = False
self._log_beta.flags.writeable = False
def random_variate(self, random = numpy.random):
"""
Return a sample from this distribution.
"""
return random.multinomial(self._norm, self._beta).astype(numpy.uint)
def random_variates(self, size, random = numpy.random):
"""
Return an array of samples from this distribution.
"""
return random.multinomial(self._norm, self._beta, size).astype(numpy.uint)
def log_likelihood(self, sample):
"""
Return the log likelihood of C{sample} under this distribution.
"""
from cargo.statistics._multinomial import multinomial_log_probability
return multinomial_log_probability(self._log_beta, sample)
def total_log_likelihood(self, samples):
"""
Return the log likelihood of C{samples} under this distribution.
"""
return self.log_likelihood(numpy.sum(samples, 0))
@property
def beta(self):
"""
Return the multinomial parameter vector.
"""
return self._beta
@property
def log_beta(self):
"""
Return the multinomial log parameter vector.
"""
return self._log_beta
class MultinomialEstimator(Estimator):
"""
Estimate the parameters of a multinomial distribution.
"""
def __init__(self, norm = 1):
"""
Initialize.
"""
self._norm = norm
def estimate(self, samples, random = numpy.random, weights = None):
"""
Return the estimated maximum likelihood distribution.
"""
from numpy import newaxis
if weights is None:
weights = numpy.ones(samples.shape[0])
mean = numpy.sum(samples * weights[:, newaxis], 0)
mean /= numpy.sum(mean)
return Multinomial(mean, self._norm)
|
Python
| 0.000021
|
@@ -2932,33 +2932,61 @@
-from numpy import newaxis
+# parameters%0A samples = numpy.asarray(samples)
%0A%0A
@@ -3062,16 +3062,94 @@
ape%5B0%5D)%0A
+ else:%0A weights = numpy.asarray(weights)%0A%0A # estimate
%0A
@@ -3192,15 +3192,12 @@
%5B:,
+No
ne
-waxis
%5D, 0
|
2319534cecf4ed475469a8ded468b348a21947ce
|
Fix shape of array returned from Arnoldi matrix exponential
|
src/WaveBlocksND/MatrixExponential.py
|
src/WaveBlocksND/MatrixExponential.py
|
"""The WaveBlocks Project
This file contains several different algorithms to compute the
matrix exponential. Currently we have an exponential based on
Pade approximations and an Arnoldi iteration method.
@author: R. Bourquin
@copyright: Copyright (C) 2007 V. Gradinaru
@copyright: Copyright (C) 2010, 2011, 2012, 2015 R. Bourquin
@license: Modified BSD License
"""
from numpy import zeros, dot, complexfloating, conjugate
from scipy.linalg import norm, expm
def matrix_exp_pade(A, v, factor):
r"""Compute the solution of :math:`v' = A v` with a full
matrix exponential via Pade approximation.
:param A: The matrix :math:`A` of shape :math:`N \times N`.
:param v: The vector :math:`v` of length :math:`N`.
:param factor: An additional scalar factor :math:`\alpha`.
:return: The (approximate) value of :math:`\exp\left(-i \alpha A\right) v`
"""
return dot(expm(-1.0j*A*factor), v)
def arnoldi(A, v0, k):
r"""Arnoldi algorithm to compute the Krylov approximation :math:`H` of a matrix :math:`A`.
:param A: The matrix :math:`A` of shape :math:`N \times N` to approximate.
:param v0: The initial vector :math:`v_0` of length :math:`N`.
:param k: The number :math:`k` of Krylov steps performed.
:return: A tuple :math:`(V, H)` where :math:`V` is the large matrix of shape
:math:`N \times (k+1)` containing the orthogonal vectors and :math:`H` is the
small matrix of shape :math:`(k+1) \times k` containing the Krylov approximation
of :math:`A`.
"""
r, c = A.shape
V = zeros((r, k+1), dtype=complexfloating)
H = zeros((k+1, k), dtype=complexfloating)
V[:,0] = v0.reshape(-1) / norm(v0)
for i in xrange(1, k+1):
vi = dot(A, V[:,i-1])
for j in xrange(i):
H[j,i-1] = dot(conjugate(V[:,j]), vi)
vi -= H[j,i-1] * V[:,j]
H[i,i-1] = norm(vi)
V[:,i] = vi / H[i,i-1]
return V, H
def matrix_exp_arnoldi(A, v, factor, k):
r"""Compute the solution of :math:`v' = A v` via :math:`k`
steps of a the Arnoldi krylov method.
:param A: The matrix :math:`A` of shape :math:`N \times N`.
:param v: The vector :math:`v` of length :math:`N`.
:param factor: An additional scalar factor :math:`\alpha`.
:param k: The number :math:`k` of Krylov steps performed.
:return: The (approximate) value of :math:`\exp\left(-i \alpha A\right) v`.
"""
V, H = arnoldi(A, v, min(min(A.shape), k))
eH = expm(-1.0j*factor*H[:-1,:])
r = dot(V[:,:-1], eH[:,0])
return r * norm(v)
|
Python
| 0.000008
|
@@ -2516,16 +2516,26 @@
%0A r =
+ norm(v) *
dot(V%5B:
@@ -2566,15 +2566,22 @@
rn r
- * norm(v
+.reshape(v.shape
)%0A
|
304760823382e72efb8f98ab3b5a98147f98c0e8
|
Improve userlist liveness guarentees
|
geventirc/channel.py
|
geventirc/channel.py
|
import gevent
from geventirc.message import Join, Part, Privmsg
from geventirc.replycodes import replies
from geventirc.userlist import UserList
class Channel(object):
"""Object representing an IRC channel.
This is the reccomended way to do operations like joins, or tracking user lists.
A channel may be join()ed and part()ed multiple times.
The user list will be the most recent info available, or None before first join.
Can be used in a with statement to join then part.
"""
joined = False
userlist = None
def __init__(self, client, name):
self.client = client
self.name = name
self.client.add_handler(self._recv_part, command=Part, channels=lambda value: self.name in value)
def join(self, block=True):
"""Join the channel if not already joined. If block=True, do not return until name list is received."""
if self.joined: return
self.joined = True
self.userlist = UserList(self.client, self.name)
self.client.send(Join(self.name))
if not block: return
self.client.wait_for(command=replies.ENDOFNAMES, params=[None, self.name, None])
def part(self, block=True):
"""Part from the channel if joined. If block=True, do not return until fully parted."""
if not self.joined: return
self.joined = False
@gevent.spawn
def _part():
# we delay unregistering until the part is sent.
self.client.send(Part(self.name), block=True)
self.userlist.unregister()
if block: _part.get()
def msg(self, content, block=False):
self.client.msg(self.name, content, block=block)
def action(self, content, block=False):
self.client.send(Privmsg.action(self.name, content), block=block)
def _recv_part(self, client, msg):
# we receive a forced PART from the server
self.joined = False
self.userlist.unregister()
def __enter__(self):
self.join()
def __exit__(self, *exc_info):
# if we're cleaning up after an exception, ignore errors in part()
# as they are most likely a carry-on error or same root cause.
try:
self.part()
except Exception:
if exc_info == (None, None, None):
raise
|
Python
| 0
|
@@ -430,81 +430,228 @@
in.%0A
-%0A%09Can be used in a with statement to join then part.%0A%09%22%22%22%0A%09joined = False
+%09In particular, the user list can be considered up to date iff users_ready is set.%0A%0A%09Can be used in a with statement to join then part.%0A%09%22%22%22%0A%0A%09USERS_READY_TIMEOUT = 10%0A%0A%09joined = False%0A%09users_ready = gevent.event.Event()
%0A%09us
@@ -841,16 +841,127 @@
n value)
+%0A%09%09self.client.add_handler(self._recv_end_of_names, command=replies.ENDOFNAMES, params=%5BNone, self.name, None%5D)
%0A%0A%09def j
@@ -1135,16 +1135,43 @@
= True%0A
+%09%09self.users_ready.clear()%0A
%09%09self.u
@@ -1283,82 +1283,49 @@
elf.
-client.wait_for(command=replies.ENDOFNAMES, params=%5BNone, self.name, None%5D
+users_ready.wait(self.USERS_READY_TIMEOUT
)%0A%0A%09
@@ -1876,24 +1876,94 @@
ock=block)%0A%0A
+%09def _recv_end_of_names(self, client, msg):%0A%09%09self.users_ready.set()%0A%0A
%09def _recv_p
|
4038357efb3494e796d884b8c5d48506d6f69fcb
|
Revise inline formset logic to only render formsets whose related managers are in view form's fields.
|
foundation/forms/views/object.py
|
foundation/forms/views/object.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import router
from django.forms.models import _get_foreign_key
from django.utils.encoding import force_text
from django.views.generic import edit
from django import forms
from ...utils import get_deleted_objects
from ...backend import views
from .base import ControllerTemplateMixin
from .components import BaseModelFormMixin
__all__ = 'AddView', 'EditView', 'DisplayView', 'DeleteView'
class ObjectMixin(views.ObjectMixin):
def get_success_url(self):
return self.get_url('list')
class DeleteView(ObjectMixin, ControllerTemplateMixin, edit.BaseDeleteView):
mode = 'delete'
mode_title = 'delete'
def get_context_data(self, **kwargs):
object_name = force_text(self.object._meta.verbose_name)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
(deleted_objects, model_count, perms_needed, protected) = get_deleted_objects(
[self.object], self.object._meta, self.request.user,
self.backend, router.db_for_write(self.model))
kwargs.update(
object_name=object_name,
deleted_objects=deleted_objects,
model_count=dict(model_count).items(),
)
return super(DeleteView, self).get_context_data(**kwargs)
class ProcessFormView(BaseModelFormMixin, ObjectMixin, ControllerTemplateMixin,
edit.ModelFormMixin, edit.ProcessFormView):
""" Single-Object ModelForm View Mixin """
def handle_common(self, handler, request, *args, **kwargs):
handler = super(ProcessFormView, self).handle_common(
handler, request, *args, **kwargs)
self.object = None if self.add else self.get_object()
self.form = self.get_form()
return handler
def get_inline_formsets(self, obj):
"""
Return the InlineFormSet for this View via the ViewChild.
TODO: Better handle the case where of no child controller (e.g. a check)
"""
obj = None if self.add else self.object
# fields = flatten_fieldsets(self.get_fieldsets(self.mode))
inline_formsets = {}
for name, view_child in self.view_children.items():
inline_fk_field = _get_foreign_key(
self.model, view_child.model, view_child.fk_name)
# if inline_fk_field.remote_field.name not in fields:
# continue # TODO: Fail Check
formset_class = view_child.get_formset_class(obj)
formset_kwargs = view_child.get_formset_kwargs(
formset_class=formset_class, obj=obj)
inline_formsets[name] = formset_class(**formset_kwargs)
return inline_formsets
def get(self, request, *args, **kwargs):
self.inline_formsets = self.get_inline_formsets(self.object)
return super(ProcessFormView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
if self.form.is_valid():
form_validated = True
new_object = self.save_form(change=not self.add)
else:
form_validated = False
new_object = self.form.instance
new_object._controller = self
self.inline_formsets = self.get_inline_formsets(new_object)
# val all formsets *first* to ensure we report them when form invalid
if forms.all_valid(self.inline_formsets.values()) and form_validated:
self.object = new_object
self.save_model(not self.add)
self.save_related(not self.add)
return self.form_valid(self.form)
else:
return self.form_invalid(self.form)
def get_media(self):
media = super(ProcessFormView, self).get_media()
media += self.form.media
for inline_formset in self.inline_formsets.values():
media += inline_formset.media
return media
def get_context_data(self, **kwargs):
# from render_change_form
request = self.request
# from changeform_view
object_id = None
if hasattr(self.object, 'pk') and '_saveasnew' not in request.POST:
object_id = self.object.pk
add = object_id is None
kwargs.update({
'form': self.form,
'object_id': object_id,
})
return super(ProcessFormView, self).get_context_data(**kwargs)
class AddView(ProcessFormView):
mode = 'add'
mode_title = 'add a'
class EditView(ProcessFormView):
mode = 'edit'
mode_title = 'Editing'
class DisplayView(ProcessFormView):
mode = 'display'
mode_title = ''
|
Python
| 0
|
@@ -401,16 +401,63 @@
rmMixin%0A
+from foundation.utils import flatten_fieldsets%0A
%0A%0A__all_
@@ -2170,16 +2170,94 @@
#
+ we will only generate formsets for the fields specified for this view%0A
fields
@@ -2412,109 +2412,72 @@
-inline_fk_field = _get_foreign_key(%0A self.model, view_child.model, view_child.fk_name)
+# do not make the inline formset if not an accessible form field
%0A
@@ -2488,43 +2488,12 @@
- #
if
-inline_fk_field.remote_field.
name
@@ -2519,18 +2519,16 @@
- #
con
@@ -2536,271 +2536,75 @@
inue
- # TODO: Fail Check%0A formset_class = view_child.get_formset_class(obj)%0A formset_kwargs = view_child.get_formset_kwargs(%0A formset_class=formset_class, obj=obj)%0A inline_formsets%5Bname%5D = formset_class(**formset_kwargs
+%0A inline_formsets%5Bname%5D = view_child.get_formset(obj=obj
)%0A
|
118756c54163d448026dacb531fbad859ece3458
|
add table as global var
|
sqlcell.py
|
sqlcell.py
|
import re
import fileinput
from os.path import expanduser
from IPython.core.magic import (register_line_magic, register_cell_magic,
register_line_cell_magic)
import IPython
from sqlalchemy import create_engine
# from engine_config import driver, username, password, host, port, default_db
engine = create_engine(driver+'://'+username+':'+password+'@'+host+':'+port+'/'+default_db)
class HTMLTable(list):
"""
Creates an HTML table if pandas isn't installed.
The .empty attribute takes the place of df.empty,
and to_csv takes the place of df.to_csv.
"""
empty = []
def _repr_html_(self):
table = '<table width=100%>'
thead = '<thead><tr>'
tbody = '<tbody><tr>'
for n,row in enumerate(self):
if n == 0:
thead += ''.join([('<th>' + str(r) + '</th>') for r in row])
else:
tbody += '<tr>' + ''.join([('<td>' + str(r) + '</td>') for r in row]) + '</tr>'
thead += '</tr></thead>'
tbody += '</tbody>'
table += thead + tbody
return table
def to_csv(self, path):
import csv
with open(path, 'w') as fp:
a = csv.writer(fp, delimiter=',')
a.writerows(self)
try:
import pandas as pd
pd.options.display.max_columns = None
to_table = pd.DataFrame
except ImportError as e:
to_table = HTMLTable
def build_dict(output, row):
output[row.replace('%(','').replace(')s','')] = eval(row.replace('%(','').replace(')s',''))
return output
@register_line_cell_magic
def sql(path, cell=None):
"""
Create magic cell function to treat cell text as SQL
to remove the need of third party SQL interfaces. The
args are split on spaces so don't use spaces except to
input a new argument.
Args:
PATH (str): path to write dataframe to in csv.
MAKE_GLOBAL: make dataframe available globally.
DB: name of database to connect to.
RAW: when used with MAKE_GLOBAL, will return the
raw RowProxy from sqlalchemy.
Returns:
DataFrame:
"""
global driver, username, password, host, port, db
if cell.strip() == '\d':
cell = """
SELECT n.nspname as "Schema",
c.relname as "Name",
CASE c.relkind WHEN 'r' THEN 'table' WHEN 'v' THEN 'view' WHEN 'm' THEN 'materialized view' WHEN 'i' THEN 'index' WHEN 'S' THEN 'sequence' WHEN 's' THEN 'special' WHEN 'f' THEN 'foreign table' END as "Type",
pg_catalog.pg_get_userbyid(c.relowner) as "Owner",
pg_catalog.pg_size_pretty(pg_catalog.pg_table_size(c.oid)) as "Size",
pg_catalog.obj_description(c.oid, 'pg_class') as "Description"
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('r','v','m','S','f','')
AND n.nspname <> 'pg_catalog'
AND n.nspname <> 'information_schema'
AND n.nspname !~ '^pg_toast'
AND pg_catalog.pg_table_is_visible(c.oid)
ORDER BY "Type" desc;
"""
elif cell.startswith("\d"):
table = cell.replace('\d', '').strip()
cell = """
SELECT a.attname,
pg_catalog.format_type(a.atttypid, a.atttypmod),
(SELECT substring(pg_catalog.pg_get_expr(d.adbin, d.adrelid) for 128)
FROM pg_catalog.pg_attrdef d
WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum AND a.atthasdef),
a.attnotnull, a.attnum,
(SELECT c.collname FROM pg_catalog.pg_collation c, pg_catalog.pg_type t
WHERE c.oid = a.attcollation AND t.oid = a.atttypid AND a.attcollation <> t.typcollation) AS attcollation,
NULL AS indexdef,
NULL AS attfdwoptions,
a.attstorage,
CASE WHEN a.attstattarget=-1 THEN NULL ELSE a.attstattarget END AS attstattarget, pg_catalog.col_description(a.attrelid, a.attnum)
FROM pg_catalog.pg_attribute a
JOIN pg_catalog.pg_class c on c.oid = a.attrelid
WHERE c.relname = %(table)s AND a.attnum > 0 AND NOT a.attisdropped
ORDER BY a.attnum;
"""
args = path.split(' ')
for i in args:
if i.startswith('MAKE_GLOBAL'):
glovar = i.split('=')
exec(glovar[0]+'='+glovar[1]+'=None')
elif i.startswith('DB'):
db = i.replace('DB=', '')
exec("global engine\nengine=create_engine('"+driver+"://"+username+":"+password+"@"+host+":"+port+"/"+db+"')")
exec('global DB\nDB=db')
home = expanduser("~")
filepath = home + '/.ipython/profile_default/startup/ac_engine_config.py'
for line in fileinput.FileInput(filepath,inplace=1):
line = re.sub("default_db = '.*'","default_db = '"+db+"'", line)
print line,
elif i.startswith('ENGINE'):
exec("global engine\nengine=create_engine("+i.replace('ENGINE=', "")+")")
conn_str = engine.url
driver, username = conn_str.drivername, conn_str.username
password, host = conn_str.password, conn_str.host
port, db = conn_str.port, conn_str.database
else:
exec(i)
matches = re.findall(r'%\(.*\)s', cell)
data = engine.execute(cell, reduce(build_dict, matches, {}))
columns = data.keys()
table_data = [i for i in data] if 'pd' in globals() else [columns] + [i for i in data]
df = to_table(table_data)
if df.empty:
return 'No data available'
df.columns = columns
if 'PATH' in locals():
df.to_csv(PATH)
if 'MAKE_GLOBAL' in locals():
exec('global ' + glovar[1] + '\n' + glovar[1] + '=df if \'RAW\' not in locals() else table_data')
return df
def send_to_client(data, filename=None, key=None):
import json
filename = filename if filename else 'data.json'
key = key if key else 'data'
with open('/Users/tdobbins/bidirect/' + filename, 'w') as f:
f.write(json.dumps(data))
return None
js = "IPython.CodeCell.config_defaults.highlight_modes['magic_sql'] = {'reg':[/^%%sql/]};"
IPython.core.display.display_javascript(js, raw=True)
|
Python
| 0.000004
|
@@ -2195,16 +2195,23 @@
port, db
+, table
%0A %0A
|
cda111aecdd650d1f08b75e2c92774526bf9e06d
|
Change Misc to Miscellaneous Utilities
|
bipy/util/misc.py
|
bipy/util/misc.py
|
#!/usr/bin/env python
r"""
Misc (:mod:`bipy.util.misc`)
============================
.. currentmodule:: bipy.util.misc
This module provides miscellaneous useful utility classes and methods that do
not fit in any specific module.
Functions
---------
.. autosummary::
:toctree: generated/
safe_md5
"""
from __future__ import division
#-----------------------------------------------------------------------------
# Copyright (c) 2013--, bipy development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import hashlib
def safe_md5(open_file, block_size=2**20):
"""Computes an md5 sum without loading the file into memory
Parameters
----------
open_file : file object
open file handle to the archive to compute the checksum
block_size : int, optional
size of the block taken per iteration
Returns
-------
md5 : md5 object from the hashlib module
object with the loaded file
Notes
-----
This method is based on the answers given in:
http://stackoverflow.com/a/1131255/379593
Examples
--------
>>> from StringIO import StringIO
>>> from bipy.util.misc import safe_md5
>>> fd = StringIO("foo bar baz") # open file like object
>>> x = safe_md5(fd)
>>> x.hexdigest()
'ab07acbb1e496801937adfa772424bf7'
>>> fd.close()
"""
md5 = hashlib.md5()
data = True
while data:
data = open_file.read(block_size)
if data:
md5.update(data)
return md5
|
Python
| 0
|
@@ -24,16 +24,35 @@
%22%22%22%0AMisc
+ellaneous Utilities
(:mod:%60
|
0a0d55a2a9aa07b0841b2a221e8b7bc9b844b976
|
update version numbers and project details
|
butter/__init__.py
|
butter/__init__.py
|
#!/usr/bin/env python
"""Butter: library to give python access to linux's more lower level features"""
__author__ = "Da_Blitz"
__version__ = "0.1"
__email__ = "code@pocketnix.org"
__license__ = "BSD (3 Clause)"
__url__ = "http://code.pocketnix.org/"
__testsuite__ = "tests.testall"
|
Python
| 0
|
@@ -142,9 +142,9 @@
%220.
-1
+2
%22%0A__
@@ -246,38 +246,12 @@
org/
-%22%0A__testsuite__ = %22tests.testall
+butter
%22%0A
|
39d4f9c0df535c13c6f37eaaccaaeabb0b92b8e0
|
Bump version number
|
fabric_colors/_version.py
|
fabric_colors/_version.py
|
__version__ = "0.9.41"
|
Python
| 0.000002
|
@@ -17,7 +17,7 @@
.9.4
-1
+2
%22%0A
|
4e09200b83f986ce333f5b1143e13a4b2d7df2ce
|
determine site activity on process_view
|
pykeg/src/pykeg/web/middleware.py
|
pykeg/src/pykeg/web/middleware.py
|
# Copyright 2011 Mike Wakerly <opensource@hoho.com>
#
# This file is part of the Pykeg package of the Kegbot project.
# For more information on Pykeg or Kegbot, see http://kegbot.org/
#
# Pykeg is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Pykeg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pykeg. If not, see <http://www.gnu.org/licenses/>.
from pykeg.core import models
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
class KegbotSiteMiddleware:
def process_view(self, request, view_func, view_args, view_kwargs):
kbsite_name = view_kwargs.pop('kbsite_name', None)
if kbsite_name is not None:
if kbsite_name == '':
kbsite_name = 'default'
request.kbsite = get_object_or_404(models.KegbotSite, name=kbsite_name)
return None
class SiteActiveMiddleware:
"""Middleware which throws 503s when KegbotSite.is_active is false."""
ALLOWED_PATHS = (
'/accounts/login/',
'/admin/',
'/site_media/',
)
def _path_allowed(self, path):
for p in self.ALLOWED_PATHS:
if path.startswith(p):
return True
return False
def process_request(self, request):
kbsite = None
if hasattr(request, 'kbsite'):
kbsite = request.kbsite
# We have a KegbotSite, and that site is active: nothing to do.
if kbsite and kbsite.is_active:
return None
# If the request is for a whitelisted path, allow it.
if self._path_allowed(request.path):
return None
# Allow staff/superusers access if inactive.
if request.user.is_staff or request.user.is_superuser:
return None
else:
return HttpResponse('Site temporarily unavailable', status=503)
|
Python
| 0.00001
|
@@ -1676,23 +1676,20 @@
process_
-request
+view
(self, r
@@ -1698,36 +1698,53 @@
uest
-):%0A kbsite = None
+, view_func, view_args, view_kwargs):
%0A if
hasa
@@ -1739,16 +1739,20 @@
%0A if
+not
hasattr(
@@ -1769,24 +1769,40 @@
kbsite'):%0A
+ return None%0A
kbsite =
@@ -1890,26 +1890,15 @@
do.%0A
+
if
-kbsite and
kbsi
@@ -2178,20 +2178,9 @@
one%0A
- else:%0A
+%0A
|
9346332cb9051ac753c3ee2cccd829d6bcf80b9e
|
support preflight requests
|
pyramid_swagger_spec/namespace.py
|
pyramid_swagger_spec/namespace.py
|
from collections import defaultdict
from pyramid.path import DottedNameResolver
import functools
import inspect
import venusian
import zope.interface
from zope.interface.declarations import implementer
# copied from tomb_routes, modified
ACCEPT_RENDERER_MAP = {
'json': 'application/json',
'string': 'text/plain',
}
class MatchdictMapper(object):
def __init__(self, **kwargs):
self.view_settings = kwargs
self.attr = self.view_settings.get('attr')
self.blacklist = [
'optional_slash',
]
def __call__(self, view):
@functools.wraps(view)
def wrapper(context, request):
kwargs = request.matchdict.copy()
for k in self.blacklist:
if k in kwargs:
del kwargs[k]
if inspect.isclass(view):
arg_len = len(inspect.getargspec(view.__init__).args)
if arg_len == 2:
inst = view(request)
elif arg_len == 3:
inst = view(context, request)
else:
raise Exception("Class should accept `context` and "
"`request` args only")
meth = getattr(inst, self.attr)
return meth(**kwargs)
else:
return view(request, **kwargs)
return wrapper
def add_simple_route(
config, path, target,
append_matchdict=True,
default_accept='text/html',
route_kwargs={},
*args, **kwargs
):
"""Configuration directive that can be used to register a simple route to
a view.
Examples:
with view callable::
config.add_simple_route(
'/path/to/view', view_callable,
renderer='json'
)
with dotted path to view callable::
config.add_simple_route(
'/path/to/view', 'dotted.path.to.view_callable',
renderer='json'
)
"""
target = DottedNameResolver().maybe_resolve(target)
mapper = config.get_routes_mapper()
route_name = target.__name__
route_name_count = 0
if 'accept' in kwargs:
val = kwargs.pop('accept')
route_kwargs['accept'] = val
else:
# Disable */* by default, only accept 'text/html'
renderer = kwargs.get('renderer', 'html')
acceptor = ACCEPT_RENDERER_MAP.get(renderer, default_accept)
route_kwargs['accept'] = acceptor
if 'attr' in kwargs:
route_name += '.' + kwargs['attr']
routes = {route.name: route for route in mapper.get_routes()}
orig_route_name = route_name
while route_name in routes:
route_name = '%s_%s' % (orig_route_name, route_name_count)
route_name_count += 1
current_pregen = kwargs.pop('pregenerator', None)
orig_route_prefix = config.route_prefix
# We are nested with a route_prefix but are trying to
# register a default route, so clear the route prefix
# and register the route there.
if (path == '/' or path == '') and config.route_prefix:
path = config.route_prefix
config.route_prefix = ''
config.add_route(
route_name, path, pregenerator=current_pregen,
**route_kwargs
)
kwargs['route_name'] = route_name
if append_matchdict and 'mapper' not in kwargs:
kwargs['mapper'] = MatchdictMapper
config.add_view(target, *args, **kwargs)
config.commit()
config.route_prefix = orig_route_prefix
def create_api_namespace(namespace):
namespace = namespace.strip("/")
class DRoute(object):
def __init__(self, path, *args, **kwargs):
"""Constructor just here to accept parameters for decorator"""
self.path = path
view_name = kwargs.get("name", "")
self.route_path = self.path.rstrip("/") + "/" + view_name.lstrip("/") if view_name else self.path
self.prefixed_route_path = "/" + namespace + "/" + self.route_path.lstrip("/")
self.args = args
self.kwargs = kwargs
def __call__(self, wrapped):
"""Attach the decorator with Venusian"""
args = self.args
kwargs = self.kwargs
def callback(scanner, _name, wrapped):
"""Register a view; called on config.scan"""
config = scanner.config
# pylint: disable=W0142
add_simple_route(config, self.prefixed_route_path, wrapped, *args, route_kwargs=dict(traverse=self.prefixed_route_path), **kwargs)
request_method = kwargs.get("request_method", "GET")
registry = config.registry
registry.getUtility(IRouteRegistry).register(namespace, self.route_path, request_method, kwargs.get("api",{}))
if request_method.upper() != "OPTIONS":
# We should add an OPTIONS route too, for preflight requests
registry.getUtility(IRouteRegistry).register(namespace, self.route_path, "OPTIONS", kwargs.get("api", {}))
info = venusian.attach(wrapped, callback)
if info.scope == 'class': # pylint:disable=E1101
# if the decorator was attached to a method in a class, or
# otherwise executed at class scope, we need to set an
# 'attr' into the settings if one isn't already in there
if kwargs.get('attr') is None:
kwargs['attr'] = wrapped.__name__
return wrapped
return DRoute
class IRouteRegistry(zope.interface.Interface):
registrations = zope.interface.Attribute("""blahblah""")
def register(namespace, url, method, params):
"""bar blah blah"""
@implementer(IRouteRegistry)
class RouteRegistry:
def __init__(self):
self.registrations = defaultdict(lambda: defaultdict(dict))
def register(self, namespace, url, method, params):
self.registrations[namespace][url][method.lower()] = params
|
Python
| 0
|
@@ -3436,24 +3436,198 @@
, **kwargs)%0A
+ request_method = kwargs.get(%22request_method%22, %22GET%22)%0A if request_method != %22OPTIONS%22:%0A config.add_view(target, *args, **dict(kwargs, request_method=%22OPTIONS%22))%0A
config.c
@@ -4983,272 +4983,8 @@
%7B%7D))
-%0A if request_method.upper() != %22OPTIONS%22:%0A # We should add an OPTIONS route too, for preflight requests%0A registry.getUtility(IRouteRegistry).register(namespace, self.route_path, %22OPTIONS%22, kwargs.get(%22api%22, %7B%7D))
%0A%0A
|
7b21270ca893e90790a0a60c8417df12052ea9a0
|
Add alternate MDP-ID aleph API if the first fails
|
falcom/api/reject_list.py
|
falcom/api/reject_list.py
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from os import environ
from urllib.request import urlopen
from .uri import URI, APIQuerier
from .marc import get_marc_data_from_xml
from .worldcat import get_worldcat_data_from_json
from .hathi import get_oclc_counts_from_json
from .common import ReadOnlyDataStructure
AlephURI = URI("http://mirlyn-aleph.lib.umich.edu/cgi-bin/bc2meta")
WorldCatURI = URI("http://www.worldcat.org/webservices/catalog"
"/content/libraries/{oclc}")
HathiURI = URI("http://catalog.hathitrust.org/api/volumes/brief"
"/oclc/{oclc}.json")
aleph_api = APIQuerier(AlephURI, url_opener=urlopen)
worldcat_api = APIQuerier(WorldCatURI, url_opener=urlopen)
hathi_api = APIQuerier(HathiURI, url_opener=urlopen)
wc_key = environ.get("MDP_REJECT_WC_KEY", "none")
class VolumeDataFromBarcode:
def __init__ (self, barcode):
self.barcode = barcode
self.marc = get_marc_data_from_xml(aleph_api.get(
id=barcode,
type="bc",
schema="marcxml"))
if self.marc.oclc is None:
worldcat, hathi = None, None
else:
worldcat = worldcat_api.get(
oclc=self.marc.oclc,
wskey=wc_key,
format="json",
maximumLibraries="50")
hathi = hathi_api.get(oclc=self.marc.oclc)
self.worldcat = get_worldcat_data_from_json(worldcat)
self.hathi = get_oclc_counts_from_json(hathi, "mdp." + barcode)
|
Python
| 0.000005
|
@@ -1208,16 +1208,201 @@
xml%22))%0A%0A
+ if not self.marc:%0A self.marc = get_marc_data_from_xml(aleph_api.get(%0A id=%22mdp.%22 + barcode,%0A schema=%22marcxml%22))%0A%0A
|
fae13bf07e3b336f52911cb23291c6db029922cb
|
fix timing issues with new test
|
selfdrive/controls/tests/test_startup.py
|
selfdrive/controls/tests/test_startup.py
|
#!/usr/bin/env python3
import time
import unittest
from parameterized import parameterized
from cereal import log, car
import cereal.messaging as messaging
from common.params import Params
from selfdrive.boardd.boardd_api_impl import can_list_to_can_capnp # pylint: disable=no-name-in-module,import-error
from selfdrive.car.fingerprints import _FINGERPRINTS
from selfdrive.car.hyundai.values import CAR as HYUNDAI
from selfdrive.car.mazda.values import CAR as MAZDA
from selfdrive.controls.lib.events import EVENT_NAME
from selfdrive.test.helpers import with_processes
EventName = car.CarEvent.EventName
class TestStartup(unittest.TestCase):
@parameterized.expand([
# TODO: test EventName.startup for release branches
# officially supported car
(EventName.startupMaster, HYUNDAI.SONATA, False),
(EventName.startupMaster, HYUNDAI.SONATA, True),
# community supported car
(EventName.startupMaster, HYUNDAI.KIA_STINGER, True),
(EventName.startupMaster, HYUNDAI.KIA_STINGER, False),
# dashcamOnly car
(EventName.startupMaster, MAZDA.CX5, True),
(EventName.startupMaster, MAZDA.CX5, False),
# unrecognized car
(EventName.startupNoCar, None, True),
(EventName.startupNoCar, None, False),
])
@with_processes(['controlsd'])
def test_startup_alert(self, expected_event, car, toggle_enabled):
# TODO: this should be done without any real sockets
sm = messaging.SubMaster(['controlsState'])
pm = messaging.PubMaster(['can', 'health'])
Params().put("CommunityFeaturesToggle", b"1" if toggle_enabled else b"0")
time.sleep(2) # wait for controlsd to be ready
health = messaging.new_message('health')
health.health.hwType = log.HealthData.HwType.uno
pm.send('health', health)
# fingerprint
if car is None:
finger = {addr: 1 for addr in range(1, 100)}
else:
finger = _FINGERPRINTS[car][0]
for _ in range(500):
msgs = [[addr, 0, b'\x00'*length, 0] for addr, length in finger.items()]
pm.send('can', can_list_to_can_capnp(msgs))
time.sleep(0.05)
sm.update(0)
if sm.updated["controlsState"]:
event_name = sm["controlsState"].alertType.split("/")[0]
self.assertEqual(EVENT_NAME[expected_event], event_name,
f"expected {EVENT_NAME[expected_event]} for '{car}', got {event_name}")
break
else:
self.fail(f"failed to fingerprint {car}")
if __name__ == "__main__":
unittest.main()
|
Python
| 0.000001
|
@@ -1406,18 +1406,29 @@
ets%0A
-sm
+controls_sock
= messa
@@ -1436,20 +1436,18 @@
ing.
-SubMaster(%5B'
+sub_sock(%22
cont
@@ -1455,18 +1455,17 @@
olsState
-'%5D
+%22
)%0A pm
@@ -2082,17 +2082,17 @@
(0.0
-5
+1
)%0A
sm.u
@@ -2091,57 +2091,69 @@
-sm.update(0)%0A if sm.updated%5B%22controlsState%22%5D
+msgs = messaging.drain_sock(controls_sock)%0A if len(msgs)
:%0A
@@ -2175,12 +2175,16 @@
e =
-sm%5B%22
+msgs%5B0%5D.
cont
@@ -2196,10 +2196,8 @@
tate
-%22%5D
.ale
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.