commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
74df88d572c8b6efabcde5e1803245d1bf31cc39
|
Switch to GitHub-esque event names
|
src/sentry/models/auditlogentry.py
|
src/sentry/models/auditlogentry.py
|
"""
sentry.models.auditlogentry
~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from sentry.db.models import (
Model, BoundedPositiveIntegerField, GzippedDictField,
sane_repr
)
class AuditLogEntryEvent(object):
MEMBER_INVITE = 1
MEMBER_ADD = 2
MEMBER_ACCEPT = 3
MEMBER_EDIT = 4
MEMBER_REMOVE = 5
class AuditLogEntry(Model):
organization = models.ForeignKey('sentry.Organization')
actor = models.ForeignKey('sentry.User', related_name='audit_actors')
target_object = BoundedPositiveIntegerField(null=True)
target_user = models.ForeignKey('sentry.User', null=True, related_name='audit_targets')
event = BoundedPositiveIntegerField(choices=(
(AuditLogEntryEvent.MEMBER_INVITE, _('Invited member')),
(AuditLogEntryEvent.MEMBER_ADD, _('Added member')),
(AuditLogEntryEvent.MEMBER_ACCEPT, _('Accepted Invite')),
(AuditLogEntryEvent.MEMBER_REMOVE, _('Removed member')),
(AuditLogEntryEvent.MEMBER_EDIT, _('Edited member')),
))
data = GzippedDictField()
datetime = models.DateTimeField(default=timezone.now)
class Meta:
app_label = 'sentry'
db_table = 'sentry_auditlogentry'
__repr__ = sane_repr('organization_id', 'type')
|
Python
| 0.00007
|
@@ -932,45 +932,101 @@
t =
-BoundedPositiveIntegerField(choices=(
+models.CharField(max_length=64, choices=(%0A # We emulate github a bit with event naming
%0A
@@ -1072,16 +1072,19 @@
_('
-I
+org.i
nvite
-d
+-
memb
@@ -1137,14 +1137,16 @@
_('
-Added
+org.add-
memb
@@ -1202,18 +1202,20 @@
_('
-A
+org.a
ccept
-ed I
+-i
nvit
@@ -1270,16 +1270,19 @@
_('
-Removed
+org.rempve-
memb
@@ -1336,15 +1336,17 @@
_('
-Edited
+org.edit-
memb
|
2457eaf72e0379d0e8915906ba7f7faa9b3f2f03
|
add plotter for glitch offset
|
code/plot_glitchOffset.py
|
code/plot_glitchOffset.py
|
Python
| 0
|
@@ -0,0 +1,646 @@
+#!/bin/python%0A%0Aimport matplotlib as mpl%0Aimport matplotlib.pyplot as plt%0Aimport matplotlib.cbook as cbook%0Aimport numpy as np%0Aimport math%0A%0Adata = np.genfromtxt('GlitchOffsetOut.txt', delimiter=' ', skip_header=1,%0A%09skip_footer=1, names=%5B'time', 'north', 'east'%5D)%0A%0Afig = plt.figure()%0A%0Aax1 = fig.add_subplot(211)%0A%0Aax1.set_title(%22GPS glitch offset%22) %0A#ax1.set_xlabel('time (s)')%0Aax1.set_ylabel('north position (m)')%0Aax1.plot(data%5B'time'%5D, data%5B'north'%5D, color='b', label='north')%0A%0A%0Aax2 = fig.add_subplot(212)%0A %0Aax2.set_xlabel('time (s)')%0Aax2.set_ylabel('east position (m)')%0Aax2.plot(data%5B'time'%5D, data%5B'east'%5D, color='b', label='east')%0A%0Aplt.show()%0A
|
|
526faad8c83d1385cc31ed3db85249a9f5882893
|
Create myproject.py
|
myproject.py
|
myproject.py
|
Python
| 0.000002
|
@@ -0,0 +1,204 @@
+from flask import Flask, render_template%0Aapp = Flask(__name__)%0A%0A@app.route(%22/%22)%0Adef home():%0A return render_template(%22home.html%22)%0A %0Aif __name__ == %22__main__%22:%0A app.run(host='0.0.0.0', debug=True)%0A
|
|
c6fbea313571cff4383ce57c689e5aac25537144
|
add command to run VCLWriter
|
run_vcl_writer.py
|
run_vcl_writer.py
|
Python
| 0.000008
|
@@ -0,0 +1,820 @@
+# Copyright 2014 varnishapi authors. All rights reserved.%0A# Use of this source code is governed by a BSD-style%0A# license that can be found in the LICENSE file.%0A%0Aimport argparse%0A%0Afrom feaas import api, vcl_writer%0A%0A%0Adef run(storage):%0A parser = argparse.ArgumentParser(%22VCL Writer runner%22)%0A parser.add_argument(%22-i%22, %22--interval%22,%0A help=%22Interval for running VCLWriter (in seconds)%22,%0A default=10, type=int)%0A parser.add_argument(%22-n%22, %22--max-items%22,%0A help=%22Maximum number of units to process at a time%22,%0A type=int)%0A args = parser.parse_args()%0A writer = vcl_writer.VCLWriter(storage, args.interval, args.max_items)%0A writer.loop()%0A%0Aif __name__ == %22__main__%22:%0A manager = api.get_manager()%0A run(manager.storage)%0A
|
|
c7fcd98cadb4e2e6929161b886eb6f942553ce81
|
Test the temperature returned by RM2 (#6205)
|
homeassistant/components/sensor/broadlink.py
|
homeassistant/components/sensor/broadlink.py
|
"""
Support for the Broadlink RM2 Pro (only temperature) and A1 devices.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.broadlink/
"""
from datetime import timedelta
import binascii
import logging
import socket
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (CONF_HOST, CONF_MAC,
CONF_MONITORED_CONDITIONS,
CONF_NAME, TEMP_CELSIUS, CONF_TIMEOUT)
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['broadlink==0.3']
_LOGGER = logging.getLogger(__name__)
CONF_UPDATE_INTERVAL = 'update_interval'
DEVICE_DEFAULT_NAME = 'Broadlink sensor'
DEFAULT_TIMEOUT = 10
SENSOR_TYPES = {
'temperature': ['Temperature', TEMP_CELSIUS],
'air_quality': ['Air Quality', ' '],
'humidity': ['Humidity', '%'],
'light': ['Light', ' '],
'noise': ['Noise', ' ']
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=DEVICE_DEFAULT_NAME): vol.Coerce(str),
vol.Optional(CONF_MONITORED_CONDITIONS, default=[]):
vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]),
vol.Optional(CONF_UPDATE_INTERVAL, default=timedelta(seconds=300)): (
vol.All(cv.time_period, cv.positive_timedelta)),
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_MAC): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Broadlink device sensors."""
mac = config.get(CONF_MAC).encode().replace(b':', b'')
mac_addr = binascii.unhexlify(mac)
broadlink_data = BroadlinkData(
config.get(CONF_UPDATE_INTERVAL),
config.get(CONF_HOST),
mac_addr, config.get(CONF_TIMEOUT))
dev = []
for variable in config[CONF_MONITORED_CONDITIONS]:
dev.append(BroadlinkSensor(
config.get(CONF_NAME),
broadlink_data,
variable))
add_devices(dev)
class BroadlinkSensor(Entity):
"""Representation of a Broadlink device sensor."""
def __init__(self, name, broadlink_data, sensor_type):
"""Initialize the sensor."""
self._name = "%s %s" % (name, SENSOR_TYPES[sensor_type][0])
self._state = None
self._type = sensor_type
self._broadlink_data = broadlink_data
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self.update()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return self._unit_of_measurement
def update(self):
"""Get the latest data from the sensor."""
self._broadlink_data.update()
if self._broadlink_data.data is None:
return
self._state = self._broadlink_data.data[self._type]
class BroadlinkData(object):
"""Representation of a Broadlink data object."""
def __init__(self, interval, ip_addr, mac_addr, timeout):
"""Initialize the data object."""
import broadlink
self.data = None
self._device = broadlink.a1((ip_addr, 80), mac_addr)
self._device.timeout = timeout
self.update = Throttle(interval)(self._update)
if not self._auth():
_LOGGER.warning("Failed to connect to device.")
def _update(self, retry=3):
try:
data = self._device.check_sensors_raw()
if (data is not None and data.get('humidity', 0) <= 100 and
data.get('light', 0) in [0, 1, 2, 3] and
data.get('air_quality', 0) in [0, 1, 2, 3] and
data.get('noise', 0) in [0, 1, 2]):
self.data = data
return
except socket.timeout as error:
if retry < 1:
_LOGGER.error(error)
return
if retry > 0 and self._auth():
self._update(retry-1)
def _auth(self, retry=3):
try:
auth = self._device.auth()
except socket.timeout:
auth = False
if not auth and retry > 0:
return self._auth(retry-1)
return auth
|
Python
| 0
|
@@ -1069,16 +1069,17 @@
e', ' '%5D
+,
%0A%7D%0A%0APLAT
@@ -3591,16 +3591,373 @@
timeout%0A
+ self._schema = vol.Schema(%7B%0A vol.Optional('temperature'): vol.Range(min=-50, max=150),%0A vol.Optional('humidity'): vol.Range(min=0, max=100),%0A vol.Optional('light'): vol.Any(0, 1, 2, 3),%0A vol.Optional('air_quality'): vol.Any(0, 1, 2, 3),%0A vol.Optional('noise'): vol.Any(0, 1, 2),%0A %7D)%0A
@@ -4205,17 +4205,16 @@
if
-(
data is
@@ -4225,230 +4225,8 @@
None
- and data.get('humidity', 0) %3C= 100 and%0A data.get('light', 0) in %5B0, 1, 2, 3%5D and%0A data.get('air_quality', 0) in %5B0, 1, 2, 3%5D and%0A data.get('noise', 0) in %5B0, 1, 2%5D)
:%0A
@@ -4251,20 +4251,34 @@
.data =
+self._schema(
data
+)
%0A
@@ -4411,32 +4411,131 @@
return%0A
+ except vol.Invalid:%0A pass # Continue quietly if device returned malformed data%0A
if retry
|
30bd54621ce649e90f4a1717d6709652a2c77351
|
Add missing migration
|
humans/migrations/0013_auto_20201204_1807.py
|
humans/migrations/0013_auto_20201204_1807.py
|
Python
| 0.0002
|
@@ -0,0 +1,433 @@
+# Generated by Django 2.2.13 on 2020-12-04 18:07%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('humans', '0012_remove_user_server_signed'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='user',%0A name='last_name',%0A field=models.CharField(blank=True, max_length=150, verbose_name='last name'),%0A ),%0A %5D%0A
|
|
a8bf127b1e17b4ce9c2a5c4e6d2bbbc19faa0141
|
Create snapper_chain.py
|
google-code-jam/snapper_chain.py
|
google-code-jam/snapper_chain.py
|
Python
| 0.000009
|
@@ -0,0 +1,448 @@
+%22%22%22%0Ahttps://code.google.com/codejam/contest/433101/dashboard%0A%22%22%22%0A%0A%0Adef light_on(n, k):%0A bits = bin(k)%5B2:%5D%0A%0A if len(bits) %3C n:%0A return False%0A%0A return all(b == '1' for b in list(reversed(bits))%5B:n%5D)%0A%0A%0Adef main():%0A T = int(raw_input())%0A for t in xrange(1, T+1):%0A n, k = map(int, raw_input().strip().split())%0A print 'Case #%7B%7D: %7B%7D'.format(t, 'ON' if light_on(n, k) else 'OFF')%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
76fe998ad769e97b3424f2a3b8a5cccf2496816f
|
add very rudimentary/prototype range splitter program, without robust input checking
|
rangesplitter.py
|
rangesplitter.py
|
Python
| 0
|
@@ -0,0 +1,881 @@
+#! /usr/bin/env python3.4%0A%0Aimport ipaddress%0Aimport math%0A%0AtoSplit=False%0Awhile not toSplit:%0A%0A%09inputRange = input('Input the IP range you would like to split into subranges: ')%0A%09try:%0A%09%09toSplit =ipaddress.ip_network(inputRange)%0A%09except:%0A%09%09ValueError%0A%0ArangeSize = False%0Adefault = False%0Awhile (not rangeSize and not default):%0A%09rawSize = input ('Input the size of the CIDR range you would calculate. Must be a larger or equal number compared to the suffix of the input range ('+str(toSplit.prefixlen)+') :')%0A%0A%09if not rawSize:%0A%09%09default=True%0A%09%0A%09if int(rawSize)%3CtoSplit.prefixlen:%0A%09%09print('Invalid input')%0A%09%09continue%0A%09else:%0A%09%09rangeSize = int(rawSize)%0A%0Aif not default:%0A%09print(list(toSplit.subnets(new_prefix=rangeSize)))%0Aif default:%0A%09if toSplit.version==4:%0A%09%09print(list(toSplit.subnets(new_prefix=16)))%0A%09if toSplit.version==6:%0A%09%09print(list(toSplit.subnets(new_prefix=48)))%0A%09print('default')%0A
|
|
b2661e8156f9a4e96cce3cc720563b1589037ad5
|
Add frequency_estimator.py
|
mhealthx/extractors/frequency_estimator.py
|
mhealthx/extractors/frequency_estimator.py
|
Python
| 0.998838
|
@@ -0,0 +1,3037 @@
+#!/usr/bin/env python%0A%22%22%22%0AThis program implements some of the frequency estimation functions from:%0Ahttps://gist.github.com/endolith/255291 and%0Ahttps://github.com/endolith/waveform-analyzer%0A%22%22%22%0A%0A%0Adef freq_from_autocorr(signal, fs):%0A %22%22%22%0A Estimate frequency using autocorrelation.%0A%0A Pros: Best method for finding the true fundamental of any repeating wave,%0A even with strong harmonics or completely missing fundamental%0A%0A Cons: Not as accurate, doesn't work for inharmonic things like musical%0A instruments, this implementation has trouble with finding the true peak%0A%0A From: https://gist.github.com/endolith/255291 and%0A https://github.com/endolith/waveform-analyzer%0A%0A Parameters%0A ----------%0A signal : list or array%0A time series data%0A fs : integer%0A sample rate%0A%0A Returns%0A -------%0A frequency : float%0A frequency (Hz)%0A%0A %22%22%22%0A import numpy as np%0A from scipy.signal import fftconvolve%0A from matplotlib.mlab import find%0A%0A from mhealthx.signals import parabolic%0A%0A # Calculate autocorrelation (same thing as convolution, but with one input%0A # reversed in time), and throw away the negative lags:%0A signal -= np.mean(signal) # Remove DC offset%0A corr = fftconvolve(signal, signal%5B::-1%5D, mode='full')%0A corr = corr%5Blen(corr)/2:%5D%0A%0A # Find the first low point:%0A d = np.diff(corr)%0A start = find(d %3E 0)%5B0%5D%0A%0A # Find the next peak after the low point (other than 0 lag). This bit is%0A # not reliable for long signals, due to the desired peak occurring between%0A # samples, and other peaks appearing higher.%0A i_peak = np.argmax(corr%5Bstart:%5D) + start%0A i_interp = parabolic(corr, i_peak)%5B0%5D%0A frequency = fs / i_interp%0A%0A return frequency%0A%0A%0Adef freq_from_hps(signal, fs):%0A %22%22%22%0A Estimate frequency using harmonic product spectrum.%0A%0A Note: Low frequency noise piles up and overwhelms the desired peaks.%0A%0A From: https://gist.github.com/endolith/255291 and%0A https://github.com/endolith/waveform-analyzer%0A%0A Parameters%0A ----------%0A signal : list or array%0A time series data%0A fs : integer%0A sample rate%0A%0A Returns%0A -------%0A frequency : float%0A frequency (Hz)%0A%0A %22%22%22%0A import numpy as np%0A from scipy.signal import blackmanharris, decimate%0A%0A from mhealthx.signals import parabolic%0A%0A N = len(signal)%0A signal -= np.mean(signal) # Remove DC offset%0A%0A # Compute Fourier transform of windowed signal:%0A windowed = signal * blackmanharris(len(signal))%0A%0A # Get spectrum:%0A X = np.log(abs(np.fft.rfft(windowed)))%0A%0A # Downsample sum logs of spectra instead of multiplying:%0A hps = np.copy(X)%0A for h in np.arange(2, 9): # TODO: choose a smarter upper limit%0A dec = decimate(X, h)%0A hps%5B:len(dec)%5D += dec%0A%0A # Find the peak and interpolate to get a more accurate peak:%0A i_peak = np.argmax(hps%5B:len(dec)%5D)%0A i_interp = parabolic(hps, i_peak)%5B0%5D%0A%0A # Convert to equivalent frequency:%0A frequency = fs * i_interp / N # Hz%0A%0A return frequency%0A
|
|
d2a80a76fdf28625ad36b2fd71af56938b9b9506
|
Add needed track known class.
|
src/trackknown.py
|
src/trackknown.py
|
Python
| 0
|
@@ -0,0 +1,1124 @@
+#!/usr/bin/env python %0A'''%0A@author jstober%0A%0ASimple class to track knowledge of states and actions. Based on %0A%0AL. Li, M. L. Littman, and C. R. Mansley, %E2%80%9COnline exploration in least-squares policy iteration%E2%80%9D AAMAS, 2009.%0A'''%0Aimport numpy as np%0Aimport pdb%0A%0Aclass TrackKnown:%0A %22%22%22%0A Track knowledge of states and actions.%0A%0A TODO: Generalize by adding epsilon and kd tree or approximation methods.%0A %22%22%22%0A def __init__(self, nstates, nactions, mcount):%0A self.nstates = nstates%0A self.nactions = nactions%0A self.mcount = mcount%0A self.counts = np.zeros(nstates, nactions)%0A%0A def init(self, samples):%0A for (s,a,r,ns,na) in samples:%0A self.counts%5Bs,a%5D += 1%0A%0A def known_pair(self,s,a):%0A if self.counts%5Bs,a%5D %3E self.mcount:%0A return True%0A else:%0A return False%0A%0A def known_state(self,s):%0A if np.greater(self.counts%5Bs,:%5D,self.mcount).all():%0A return True%0A else:%0A return False%0A%0A def unknown(self,s):%0A # indices of actions with low counts.%0A return np.where(self.counts%5Bs,:%5D %3C self.mcount)%5B0%5D
|
|
5dfa4397a282ddbafb57d990bc7d630fb6f927de
|
Add helper method for execute a commands
|
build.py
|
build.py
|
Python
| 0.000006
|
@@ -0,0 +1,3430 @@
+%22%22%22Update conda packages on binstars with latest versions%22%22%22%0Aimport os%0Aimport six%0Aimport subprocess%0Aimport time%0A%0AATTEMPTS = 3%0ARETRY_INTERVAL = 0.1%0A%0A%0Adef execute(command, **kwargs):%0A %22%22%22Helper method to shell out and execute a command through subprocess.%0A%0A :param attempts: How many times to retry running the command.%0A :param binary: On Python 3, return stdout and stderr as bytes if%0A binary is True, as Unicode otherwise.%0A :param check_exit_code: Single bool, int, or list of allowed exit%0A codes. Defaults to %5B0%5D. Raise%0A :class:%60CalledProcessError%60 unless%0A program exits with one of these code.%0A :param command: The command passed to the subprocess.Popen.%0A :param cwd: Set the current working directory%0A :param env_variables: Environment variables and their values that%0A will be set for the process.%0A :param retry_interval: Interval between execute attempts, in seconds%0A :param shell: whether or not there should be a shell used to%0A execute this command.%0A%0A :raises: :class:%60subprocess.CalledProcessError%60%0A %22%22%22%0A # pylint: disable=too-many-locals%0A%0A attempts = kwargs.pop(%22attempts%22, ATTEMPTS)%0A binary = kwargs.pop('binary', False)%0A check_exit_code = kwargs.pop('check_exit_code', %5B0%5D)%0A cwd = kwargs.pop('cwd', None)%0A env_variables = kwargs.pop(%22env_variables%22, None)%0A retry_interval = kwargs.pop(%22retry_interval%22, RETRY_INTERVAL)%0A shell = kwargs.pop(%22shell%22, False)%0A%0A command = %5Bstr(argument) for argument in command%5D%0A ignore_exit_code = False%0A%0A if isinstance(check_exit_code, bool):%0A ignore_exit_code = not check_exit_code%0A check_exit_code = %5B0%5D%0A elif isinstance(check_exit_code, int):%0A check_exit_code = %5Bcheck_exit_code%5D%0A%0A while attempts %3E 0:%0A attempts = attempts - 1%0A try:%0A process = subprocess.Popen(command,%0A stdin=subprocess.PIPE,%0A stdout=subprocess.PIPE,%0A stderr=subprocess.PIPE, shell=shell,%0A cwd=cwd, env=env_variables)%0A result = process.communicate()%0A return_code = process.returncode%0A%0A if six.PY3 and not binary and result is not None:%0A # pylint: disable=no-member%0A%0A # Decode from the locale using using the surrogate escape error%0A # handler (decoding cannot fail)%0A (stdout, stderr) = result%0A stdout = os.fsdecode(stdout)%0A stderr = os.fsdecode(stderr)%0A else:%0A stdout, stderr = result%0A%0A if not ignore_exit_code and return_code not in check_exit_code:%0A raise subprocess.CalledProcessError(returncode=return_code,%0A cmd=command,%0A output=(stdout, stderr))%0A else:%0A return (stdout, stderr)%0A except subprocess.CalledProcessError:%0A if attempts:%0A time.sleep(retry_interval)%0A else:%0A raise%0A%0A raise RuntimeError(%22The maximum number of attempts has been exceeded.%22)%0A
|
|
387758ebcc2a0fa29e9e7744eacc6c753ae5284e
|
add example for FIFOQueue and coordinate application
|
TF-Demo/QueueRunnerDemo/queue_runner_demo.py
|
TF-Demo/QueueRunnerDemo/queue_runner_demo.py
|
Python
| 0
|
@@ -0,0 +1,928 @@
+#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0A# Author: violinsolo%0A# Created on 12/12/2017%0A%0Aimport tensorflow as tf%0A%0A%0A# define FIFO queue%0Aq = tf.FIFOQueue(capacity=1000, dtypes='float32')%0A%0A# define ops%0Acounter = tf.Variable(initial_value=0, dtype='float32')%0Acounter_increment_op = tf.assign_add(counter, 1.)%0Aqueue_enqueue_op = q.enqueue(%5Bcounter%5D)%0A%0Acoordinator = tf.train.Coordinator()%0A%0Aqr = tf.train.QueueRunner(queue=q, enqueue_ops=%5Bcounter_increment_op, queue_enqueue_op%5D)%0A%0A# begin session%0Awith tf.Session() as sess:%0A sess.run(tf.global_variables_initializer())%0A%0A enqueue_threads = qr.create_threads(sess=sess, start=True, coord=coordinator)%0A%0A%0A for i in range(10):%0A print sess.run(q.dequeue())%0A%0A # coordinator.join(enqueue_threads)%0A%0A coordinator.request_stop()%0A print sess.run(q.size())%0A coordinator.join(enqueue_threads)%0A for i in range(100):%0A print %22-%25d-%22 %25 i%0A print sess.run(q.size())%0A
|
|
20b2e70fe732b6f0cc049d18da9cac717cd7e967
|
Remove groups from admin
|
polyaxon/db/admin/groups.py
|
polyaxon/db/admin/groups.py
|
Python
| 0
|
@@ -0,0 +1,108 @@
+from django.contrib import admin%0Afrom django.contrib.auth.models import Group%0A%0Aadmin.site.unregister(Group)%0A
|
|
4a7fc9efce33bba3aa9ea818d09f6e9b621ab152
|
add script to pull out contacts csv
|
portality/migrate/emails.py
|
portality/migrate/emails.py
|
Python
| 0
|
@@ -0,0 +1,602 @@
+from portality.models import Account%0Aimport csv%0A%0AOUT = %22emails.csv%22%0A%0Af = open(OUT, %22wb%22)%0Awriter = csv.writer(f)%0Awriter.writerow(%5B%22ID%22, %22Name%22, %22Journal Count%22, %22Email%22%5D)%0A%0Afor a in Account.iterall():%0A id = a.id%0A name = a.name%0A count = len(a.journal) if a.journal is not None else 0%0A email = a.email%0A if name is not None:%0A name = name.encode(%22ascii%22, %22ignore%22)%0A if name is None or name == %22%22:%0A name = %22no name available%22%0A if email is not None and email != %22%22:%0A email = email.encode(%22ascii%22, %22ignore%22)%0A writer.writerow(%5Bid, name, count, email%5D)%0A%0Af.close()%0A
|
|
9639ab62ed0f6e0c5229be9820a9b902e5870a67
|
update readme and make command line script
|
scripts/dianon.py
|
scripts/dianon.py
|
Python
| 0
|
@@ -0,0 +1,1169 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0Aimport argparse%0Aimport dicom%0Aimport sys%0A%0Aif __name__ == %22__main__%22: #pragma nocover%0A%0A from dianonymous.dianonymous import anonymize%0A parser = argparse.ArgumentParser(description=%22Anonymize DICOM files%22)%0A%0A parser.add_argument(%0A '-r', '--recurse',%0A default=False,%0A action=%22store_true%22,%0A help=%22If input is a directory all DICOM files including%22%0A %22subdirectories will be anonymized%22%0A )%0A%0A parser.add_argument(%0A '-o', '--output',%0A default=%22./anonymized/%22,%0A help=%22Directory to place output files. If it doesn't exist it will be created.%22%0A )%0A%0A parser.add_argument(%0A '-p', '--patient-id',%0A default=None,%0A help=%22Anonymous patient id to use%22%0A )%0A%0A parser.add_argument(%0A '-n', '--patient-name',%0A default=None,%0A help=%22Anonymous patient name to use%22%0A )%0A%0A parser.add_argument( 'files', nargs=%22+%22, help=%22Input files and directories%22)%0A%0A args = parser.parse_args()%0A%0A anonymize(args.files, output=args.output, anon_id=args.patient_id, anon_name=args.patient_name, recurse=args.recurse, log=sys.stdout)%0A
|
|
2ba1f25445f3b4d0ba3fdfe65d55041601b383db
|
Replace GoogleSTT with MycroftSTT
|
test/client/audio_consumer_test.py
|
test/client/audio_consumer_test.py
|
# Copyright 2016 Mycroft AI, Inc.
#
# This file is part of Mycroft Core.
#
# Mycroft Core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycroft Core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycroft Core. If not, see <http://www.gnu.org/licenses/>.
import unittest
from Queue import Queue
import speech_recognition
from os.path import dirname, join
from speech_recognition import WavFile, AudioData
from mycroft.client.speech.listener import AudioConsumer, RecognizerLoop
from mycroft.client.speech.local_recognizer import LocalRecognizer
from mycroft.stt import GoogleSTT
__author__ = 'seanfitz'
class MockRecognizer(object):
def __init__(self):
self.transcriptions = []
def recognize_google(self, audio, key=None, language=None, show_all=False):
if len(self.transcriptions) > 0:
return self.transcriptions.pop(0)
else:
raise speech_recognition.UnknownValueError()
def set_transcriptions(self, transcriptions):
self.transcriptions = transcriptions
class AudioConsumerTest(unittest.TestCase):
"""
AudioConsumerTest
"""
def setUp(self):
self.loop = RecognizerLoop()
self.queue = Queue()
self.recognizer = MockRecognizer()
self.consumer = AudioConsumer(
self.loop.state, self.queue, self.loop, GoogleSTT(),
LocalRecognizer(self.loop.wakeup_recognizer.key_phrase,
self.loop.wakeup_recognizer.phonemes, "1e-16"),
self.loop.mycroft_recognizer)
def __create_sample_from_test_file(self, sample_name):
root_dir = dirname(dirname(dirname(__file__)))
filename = join(
root_dir, 'test', 'client', 'data', sample_name + '.wav')
wavfile = WavFile(filename)
with wavfile as source:
return AudioData(
source.stream.read(), wavfile.SAMPLE_RATE,
wavfile.SAMPLE_WIDTH)
def test_word_extraction(self):
"""
This is intended to test the extraction of the word: ``mycroft``.
The values for ``ideal_begin`` and ``ideal_end`` were found using an
audio tool like Audacity and they represent a sample value position of
the audio. ``tolerance`` is an acceptable margin error for the distance
between the ideal and actual values found by the ``WordExtractor``
"""
# TODO: implement WordExtractor test without relying on the listener
return
audio = self.__create_sample_from_test_file('weather_mycroft')
self.queue.put(audio)
tolerance = 4000
ideal_begin = 70000
ideal_end = 92000
monitor = {}
self.recognizer.set_transcriptions(["what's the weather next week"])
def wakeword_callback(message):
monitor['pos_begin'] = message.get('pos_begin')
monitor['pos_end'] = message.get('pos_end')
self.loop.once('recognizer_loop:wakeword', wakeword_callback)
self.consumer.read()
actual_begin = monitor.get('pos_begin')
self.assertIsNotNone(actual_begin)
diff = abs(actual_begin - ideal_begin)
self.assertTrue(
diff <= tolerance,
str(diff) + " is not less than " + str(tolerance))
actual_end = monitor.get('pos_end')
self.assertIsNotNone(actual_end)
diff = abs(actual_end - ideal_end)
self.assertTrue(
diff <= tolerance,
str(diff) + " is not less than " + str(tolerance))
def test_wakeword_in_beginning(self):
self.queue.put(self.__create_sample_from_test_file('weather_mycroft'))
self.recognizer.set_transcriptions(["what's the weather next week"])
monitor = {}
def callback(message):
monitor['utterances'] = message.get('utterances')
self.loop.once('recognizer_loop:utterance', callback)
self.consumer.read()
utterances = monitor.get('utterances')
self.assertIsNotNone(utterances)
self.assertTrue(len(utterances) == 1)
self.assertEquals("what's the weather next week", utterances[0])
def test_wakeword(self):
self.queue.put(self.__create_sample_from_test_file('mycroft'))
self.recognizer.set_transcriptions(["silence"])
monitor = {}
def callback(message):
monitor['utterances'] = message.get('utterances')
self.loop.once('recognizer_loop:utterance', callback)
self.consumer.read()
utterances = monitor.get('utterances')
self.assertIsNotNone(utterances)
self.assertTrue(len(utterances) == 1)
self.assertEquals("silence", utterances[0])
def test_ignore_wakeword_when_sleeping(self):
self.queue.put(self.__create_sample_from_test_file('mycroft'))
self.recognizer.set_transcriptions(["not detected"])
self.loop.sleep()
monitor = {}
def wakeword_callback(message):
monitor['wakeword'] = message.get('utterance')
self.loop.once('recognizer_loop:wakeword', wakeword_callback)
self.consumer.read()
self.assertIsNone(monitor.get('wakeword'))
self.assertTrue(self.loop.state.sleeping)
def test_wakeup(self):
self.queue.put(self.__create_sample_from_test_file('mycroft_wakeup'))
self.loop.sleep()
self.consumer.read()
self.assertFalse(self.loop.state.sleeping)
def test_stop(self):
self.queue.put(self.__create_sample_from_test_file('mycroft'))
self.consumer.read()
self.queue.put(self.__create_sample_from_test_file('stop'))
self.recognizer.set_transcriptions(["stop"])
monitor = {}
def utterance_callback(message):
monitor['utterances'] = message.get('utterances')
self.loop.once('recognizer_loop:utterance', utterance_callback)
self.consumer.read()
utterances = monitor.get('utterances')
self.assertIsNotNone(utterances)
self.assertTrue(len(utterances) == 1)
self.assertEquals("stop", utterances[0])
def test_record(self):
self.queue.put(self.__create_sample_from_test_file('mycroft'))
self.consumer.read()
self.queue.put(self.__create_sample_from_test_file('record'))
self.recognizer.set_transcriptions(["record"])
monitor = {}
def utterance_callback(message):
monitor['utterances'] = message.get('utterances')
self.loop.once('recognizer_loop:utterance', utterance_callback)
self.consumer.read()
utterances = monitor.get('utterances')
self.assertIsNotNone(utterances)
self.assertTrue(len(utterances) == 1)
self.assertEquals("record", utterances[0])
|
Python
| 0.999999
|
@@ -1020,22 +1020,23 @@
import
-Google
+Mycroft
STT%0A%0A__a
@@ -1168,14 +1168,15 @@
ize_
-google
+mycroft
(sel
@@ -1194,16 +1194,42 @@
ey=None,
+%0A
languag
@@ -1818,14 +1818,15 @@
op,
-Google
+Mycroft
STT(
|
bd371ecbd2ac163e44f104a775390b2ca2b88d35
|
Add migration for index on departement
|
migrations/versions/75704b2e975e_add_index_on_departement_for_numero.py
|
migrations/versions/75704b2e975e_add_index_on_departement_for_numero.py
|
Python
| 0
|
@@ -0,0 +1,519 @@
+%22%22%22Add index on Departement for numero%0A%0ARevision ID: 75704b2e975e%0ARevises: 34c2049aaee2%0ACreate Date: 2019-10-22 17:27:10.925104%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '75704b2e975e'%0Adown_revision = '34c2049aaee2'%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0Afrom sqlalchemy.dialects import postgresql%0A%0Adef upgrade():%0A op.create_index('departement_numero_index', 'departement', %5B'numero'%5D, unique=False)%0A%0A%0Adef downgrade():%0A op.drop_index('departement_numero_index', table_name='departement')%0A
|
|
70428a920ae9e02820e63e7dba98fc16faab6f10
|
add benchmark for linalg.logm
|
benchmarks/benchmarks/linalg_logm.py
|
benchmarks/benchmarks/linalg_logm.py
|
Python
| 0.000001
|
@@ -0,0 +1,787 @@
+%22%22%22 Benchmark linalg.logm for various blocksizes.%0A%0A%22%22%22%0Aimport numpy as np%0A%0Atry:%0A import scipy.linalg%0Aexcept ImportError:%0A pass%0A%0Afrom .common import Benchmark%0A%0A%0Aclass Logm(Benchmark):%0A params = %5B%0A %5B'float64', 'complex128'%5D,%0A %5B64, 256%5D,%0A %5B'gen', 'her', 'pos'%5D%0A %5D%0A param_names = %5B'dtype', 'n', 'structure'%5D%0A%0A def setup(self, dtype, n, structure):%0A n = int(n)%0A dtype = np.dtype(dtype)%0A%0A A = np.random.rand(n, n)%0A if dtype == np.complex128:%0A A = A + 1j*np.random.rand(n, n)%0A%0A if structure == 'pos':%0A A = A @ A.T.conj()%0A elif structure == 'her':%0A A = A + A.T.conj()%0A%0A self.A = A%0A%0A def time_logm(self, dtype, n, structure):%0A scipy.linalg.logm(self.A, disp=False)%0A
|
|
e7e37e9b1fd56d18711299065d6f421c1cb28bac
|
Add some Feed test cases
|
moksha/tests/test_feed.py
|
moksha/tests/test_feed.py
|
Python
| 0
|
@@ -0,0 +1,853 @@
+from tw.api import Widget%0Afrom moksha.feed import Feed%0A%0Aclass TestFeed(object):%0A%0A def test_feed_subclassing(self):%0A class MyFeed(Feed):%0A url = 'http://lewk.org/rss'%0A feed = MyFeed()%0A assert feed.url == 'http://lewk.org/rss'%0A assert feed.num_entries() %3E 0%0A for entry in feed.iterentries():%0A pass%0A for entry in feed.entries():%0A pass%0A%0A def test_widget_children(self):%0A class MyWidget(Widget):%0A myfeedurl = 'http://lewk.org/rss'%0A children = %5BFeed('myfeed', url=myfeedurl)%5D%0A template = %22mako:$%7Bc.myfeed()%7D%22%0A widget = MyWidget()%0A assert widget.c.myfeed%0A%0A def test_feed_generator(self):%0A feed = Feed(url='http://lewk.org/rss')%0A iter = feed.iterentries()%0A data = iter.next()%0A assert iter.next()%0A
|
|
d308695c79face90ba7f908230edb5e2e2437cbd
|
Decrypt file using XOR
|
tools/xor_decryptor.py
|
tools/xor_decryptor.py
|
Python
| 0.000003
|
@@ -0,0 +1,1027 @@
+#! /usr/bin/env python3%0A%0Aimport sys%0Aimport os%0Afrom ctypes import c_ubyte%0A%0Akeys = %5B0xd1, 0x73, 0x52, 0xf6, 0xd2, 0x9a, 0xcb, 0x27, 0x3e, 0xaf, 0x59, 0x31, 0x37, 0xb3, 0xe7, 0xa2%5D%0Ainitial_key = 0x5e%0Adelta_key = 0x3d%0A%0Aif __name__ == '__main__':%0A for path in sys.argv%5B1:%5D:%0A if os.path.isfile(path): %0A with open(path, 'rb') as input_file:%0A data = input_file.read()%0A%0A dec_data = %5B%5D%0A position = 0%0A key = initial_key%0A for b in data:%0A dec_data.append(c_ubyte(int(b) %5E keys%5Bposition %25 len(keys)%5D - key))%0A key = int(b) + delta_key%0A dec_data = bytes(map(lambda b: b.value, dec_data))%0A%0A output_path, extension = os.path.splitext(path)%0A output_path = output_path + '_dec' + extension%0A%0A with open(output_path, 'wb') as output_file:%0A output_file.write(data)%0A else:%0A print('File %7B%7D does not exist.'.format(path))%0A
|
|
7a02f383986f347d208f69ba59526d9ce7df59bf
|
Add access grant functions
|
access.py
|
access.py
|
Python
| 0
|
@@ -0,0 +1,426 @@
+#%0A# access.py%0A#%0A# functions for dealing with access to Discord bot commands%0A#%0A%0Adef grant_user_access(user, commandclass):%0A%09new_grant = CommandClassAccess(user_id = user.id, command_class_id = commandclass.id)%0A%09session.add(new_grant)%0A%0A%09session.commit()%0A%0Adef grant_role_access(role, commandclass):%0A%09new_grant = CommandClassAccess(role_id = role.id, command_class_id = commandclass.id)%0A%09session.add(new_grant)%0A%0A%09session.commit()%0A
|
|
c4001a95dee88bc98eda5ce67a2f14485f4e85a5
|
Add configurations
|
configurations/initial.py
|
configurations/initial.py
|
Python
| 0.000003
|
@@ -0,0 +1,16 @@
+#TODO: add code%0A
|
|
9666a0d60eeb6954bae0c02300110a6772998859
|
Fix connection-refused error handling
|
ssbench/worker.py
|
ssbench/worker.py
|
import re
import time
import yaml
from ssbench.constants import *
from swift.common import client
class Worker:
MAX_RETRIES = 10
def __init__(self, queue):
queue.use(STATS_TUBE)
self.queue = queue
def go(self):
job = self.queue.reserve()
while job:
job.delete() # avoid any job-timeout nonsense
self.handle_job(job)
job = self.queue.reserve()
def handle_job(self, job):
job_data = yaml.load(job.body)
print job_data # XXX
tries = 0
if job_data['type'] == UPLOAD_OBJECT:
self.handle_upload_object(job_data)
elif job_data['type'] == DELETE_OBJECT:
self.handle_delete_object(job_data)
else:
raise NameError("Unknown job type %r" % (job_data['type'],))
def ignoring_http_responses(self, statuses, do_stuff):
tries = 0
while True:
try:
do_stuff()
break
except socket.error:
tries += 1
if not (tries <= self.MAX_RETRIES):
raise
except client.ClientException as error:
print "oh shit an error: %r" % (error,)
tries += 1
if not (error.http_status in statuses and tries <= self.MAX_RETRIES):
raise
def handle_delete_object(self, object_info):
self.ignoring_http_responses(
(404, 503),
lambda *x: client.delete_object(
url = object_info['url'],
token = object_info['token'],
container = object_info['container'],
name = object_info['object_name']))
def handle_upload_object(self, object_info):
self.ignoring_http_responses(
(503,),
lambda *x: client.put_object(
url = object_info['url'],
token = object_info['token'],
container = object_info['container'],
name = object_info['object_name'],
contents = 'A' * object_info['object_size']))
self.queue.put(yaml.dump({
"action": UPLOAD_OBJECT,
"completed_at": time.time()}))
|
Python
| 0.000001
|
@@ -3,16 +3,30 @@
port re%0A
+import socket%0A
import t
|
226cf36e4b4d069a920785b492804b78eebc34a5
|
Make non-commtrack location types administrative
|
corehq/apps/locations/management/commands/migrate_admin_status.py
|
corehq/apps/locations/management/commands/migrate_admin_status.py
|
Python
| 0.000313
|
@@ -0,0 +1,1753 @@
+# One-off migration from 2016-04-04%0Afrom optparse import make_option%0Afrom time import sleep%0Afrom django.core.management.base import BaseCommand%0Afrom corehq.apps.locations.models import LocationType, SQLLocation%0Afrom corehq.apps.es import DomainES%0Afrom corehq.util.log import with_progress_bar%0A%0A%0Adef get_affected_location_types():%0A commtrack_domains = (DomainES()%0A .commtrack_domains()%0A .values_list('name', flat=True))%0A return (LocationType.objects%0A .exclude(domain__in=commtrack_domains)%0A .filter(administrative=False))%0A%0A%0Adef show_info():%0A location_types = get_affected_location_types()%0A num_locations = SQLLocation.objects.filter(location_type__in=location_types).count()%0A print (%22There are %7Bdomains%7D domains, %7Bloc_types%7D loc types, and %22%0A %22%7Blocations%7D locations affected%22).format(%0A domains=location_types.distinct('domain').count(),%0A loc_types=location_types.count(),%0A locations=num_locations,%0A )%0A%0A%0Adef run_migration():%0A for location_type in with_progress_bar(get_affected_location_types()):%0A if not location_type.administrative:%0A location_type.administrative = True%0A location_type.save()%0A sleep(1)%0A%0A%0Aclass Command(BaseCommand):%0A help = ('There are a bunch of LocationTypes on non-commtrack domains which'%0A 'incorrectly are marked as administrative=False')%0A option_list = BaseCommand.option_list + (%0A make_option('--run', action='store_true', default=False),%0A )%0A%0A def handle(self, *args, **options):%0A if options.get('run', False):%0A run_migration()%0A else:%0A show_info()%0A print %22pass %60--run%60 to run the migration%22%0A
|
|
be2ac14fbb228e5a5addd393867b9b3c7267ba89
|
Add and define string_permu_check problem.
|
pyexp/string_permu_check.py
|
pyexp/string_permu_check.py
|
Python
| 0
|
@@ -0,0 +1,244 @@
+'''Module to solve the algoritm question:%0AGiven a string S, how to count how many permutations%0Aof S is in a longer string L, assuming, of course, that%0Apermutations of S must be in contagious blocks in L.%0A%0AI will solve it in O(len(L)) time.%0A'''%0A
|
|
a59682d4b8bd4f594dce72b0f86f2ed4096c4178
|
Add missing migration file
|
akvo/rsr/migrations/0127_auto_20180529_0955.py
|
akvo/rsr/migrations/0127_auto_20180529_0955.py
|
Python
| 0
|
@@ -0,0 +1,621 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0Aimport akvo.rsr.fields%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('rsr', '0126_auto_20180320_1252'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='report',%0A name='url',%0A field=akvo.rsr.fields.ValidXMLCharField(help_text='Enter the parametrized path for downloading the report. NOTE: one line only even if the input field allows for more!', max_length=200, verbose_name='url'),%0A preserve_default=True,%0A ),%0A %5D%0A
|
|
4322ca998fadbd0e380626b895415bf75c4f7214
|
change ordering on ability levels
|
editor/migrations/0043_auto_20160303_1138.py
|
editor/migrations/0043_auto_20160303_1138.py
|
Python
| 0
|
@@ -0,0 +1,389 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('editor', '0042_remove_comment_date'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterModelOptions(%0A name='abilitylevel',%0A options=%7B'ordering': ('framework', 'start')%7D,%0A ),%0A %5D%0A
|
|
65d7e81510980d85af5b52504e6d98e45943cc36
|
Create getdata.py
|
python_flask/public_html/nuotiovahti/nuotiovahti/getdata.py
|
python_flask/public_html/nuotiovahti/nuotiovahti/getdata.py
|
Python
| 0.000002
|
@@ -0,0 +1,1081 @@
+import paho.mqtt.client as mqtt%0Aimport mysql.connector%0Afrom flask import Flask, jsonify, json, request%0A%0Aapp = Flask(__name__)%0Aapp.route(%22/%22)%0Awith app.app_context():%0A%0A%0A%09def fetchfrombase():%0A%09%09try:%0A%09%09 cnx = mysql.connector.connect(option_files='/home/mint/connectors.cnf')%0A%09%09except mysql.connector.Error as err:%0A%09%09 if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:%0A%09%09 print(%22Something is wrong with your user name or password%22)%0A%09%09 elif err.errno == errorcode.ER_BAD_DB_ERROR:%0A%09%09 print(%22Database does not exist%22)%0A%09%09 else:%0A%09%09 print(err)%0A%09%09else:%0A%09%09 print(%22connected to database%22)%0A%0A%0A%09%09cursor = cnx.cursor()%0A%09%09print(%22selecting%22)%0A%0A%0A%09%09cursor.execute(%22select (select count(f.detection_time) from Flame as f where detection_time %3Enow()- INTERVAL 300 SECOND) as flame, (select count(p.detection_time) from Pir as p where detection_time %3Enow()- INTERVAL 300 SECOND) as pir;%22)%0A%09%0A%09%09datarows = jsonify(cursor.fetchall) %0A%09%0A%09%09print (datarows)%0A%0A%09%09cnx.close()%0A%09%0A%09fetchfrombase()%0A%0Aif __name__ == %22__name__%22:%0A app.run()%0A
|
|
26fcd91313b15ee2426aec36817a3f29734f4b93
|
add diagonal gaussian demo
|
examples/demo-diaggaussian.py
|
examples/demo-diaggaussian.py
|
Python
| 0.000008
|
@@ -0,0 +1,1652 @@
+from __future__ import division%0Aimport numpy as np%0Anp.seterr(invalid='raise')%0Afrom matplotlib import pyplot as plt%0Aimport copy%0A%0Afrom pybasicbayes import models, distributions%0Afrom pybasicbayes.util.text import progprint_xrange%0A%0Aalpha_0=5.0%0Aobs_hypparams=dict(%0A mu_0=np.zeros(2),%0A alphas_0=2*np.ones(2),%0A betas_0=np.ones(2),%0A nus_0=0.1*np.ones(2))%0A%0Apriormodel = models.Mixture(alpha_0=alpha_0,%0A components=%5Bdistributions.DiagonalGaussian(**obs_hypparams) for itr in range(30)%5D)%0A%0Adata, _ = priormodel.generate(500)%0A%0Aplt.figure()%0Apriormodel.plot()%0Aplt.title('true model')%0A%0Adel priormodel%0A%0Aplt.figure()%0Aplt.plot(data%5B:,0%5D,data%5B:,1%5D,'kx')%0Aplt.title('data')%0A%0Aposteriormodel = models.Mixture(alpha_0=alpha_0,%0A components=%5Bdistributions.DiagonalGaussian(**obs_hypparams) for itr in range(30)%5D)%0A%0Aposteriormodel.add_data(data)%0A%0Aallscores = %5B%5D%0Aallmodels = %5B%5D%0Afor superitr in range(5):%0A # Gibbs sampling to wander around the posterior%0A print 'Gibbs Sampling'%0A for itr in progprint_xrange(100):%0A posteriormodel.resample_model()%0A%0A # mean field to lock onto a mode%0A print 'Mean Field'%0A scores = %5Bposteriormodel.meanfield_coordinate_descent_step()%0A for itr in progprint_xrange(100)%5D%0A%0A allscores.append(scores)%0A allmodels.append(copy.deepcopy(posteriormodel))%0A%0Aplt.figure()%0Afor scores in allscores:%0A plt.plot(scores)%0Aplt.title('model vlb scores vs iteration')%0A%0Aimport operator%0Amodels_and_scores = sorted(%5B(m,s%5B-1%5D) for m,s%0A in zip(allmodels,allscores)%5D,key=operator.itemgetter(1),reverse=True)%0A%0Aplt.figure()%0Amodels_and_scores%5B0%5D%5B0%5D.plot()%0Aplt.title('best model')%0A%0Aplt.show()%0A
|
|
f48535102b6f71ba802e9b656c73cdd3ec746a3b
|
Add the test_repeat_layer.py
|
python/paddle/trainer_config_helpers/tests/configs/test_repeat_layer.py
|
python/paddle/trainer_config_helpers/tests/configs/test_repeat_layer.py
|
Python
| 0.00104
|
@@ -0,0 +1,312 @@
+from paddle.trainer_config_helpers import *%0A%0Asettings(batch_size=1000, learning_rate=1e-5)%0A%0Adin = data_layer(name='data', size=30)%0A%0Aoutputs(%0A repeat_layer(%0A input=din, num_repeats=10, as_row_vector=True),%0A repeat_layer(%0A input=din, num_repeats=10, act=TanhActivation(), as_row_vector=False))%0A
|
|
aa5c8164b26c388b6a3a1efe8ea402a63a1c7ae8
|
add migrations file
|
django_db_meter/migrations/0003_testmodel.py
|
django_db_meter/migrations/0003_testmodel.py
|
Python
| 0.000001
|
@@ -0,0 +1,834 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0Afrom django.conf import settings%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A migrations.swappable_dependency(settings.AUTH_USER_MODEL),%0A ('django_db_meter', '0002_appwiseaggregatedmetric_dbwiseaggregatedmetric_tablewiseaggregatedmetric'),%0A %5D%0A%0A operations = %5B%0A migrations.CreateModel(%0A name='TestModel',%0A fields=%5B%0A ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),%0A ('field1', models.CharField(max_length=10)),%0A ('filed2', models.BooleanField(default=False)),%0A ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),%0A %5D,%0A ),%0A %5D%0A
|
|
4db578f728a1eeda337f642513c57814fa9ec855
|
create module to save script to s3 bucket
|
session2s3.py
|
session2s3.py
|
Python
| 0
|
@@ -0,0 +1,622 @@
+%22%22%22%0ASave session to S3 bucket. Ex: ses2s3.workspace_to_s3('my-project-script')%0A%22%22%22%0Afrom datetime import datetime%0Aimport re%0A%0Aimport boto3%0Aimport dill%0A%0Adef session_to_s3(prefix, bucket_name, timestamp=True):%0A %22%22%22Save session to S3 bucket. Login via ~/.aws/credentials as per boto3.%22%22%22%0A if timestamp:%0A now_str = str(datetime.now())%0A date_time_str = re.sub('%5B%5E0-9a-zA-Z%5D+', '_', now_str)%0A filename = prefix + %22_%22 + date_time_str + %22.pkl%22%0A else:%0A filename = prefix + %22.pkl%22%0A dill.dump_session(filename)%0A s3 = boto3.resource('s3')%0A s3.meta.client.upload_file(filename, bucket_name, filename)%0A return filename%0A
|
|
c176d9e1ac63debeb449eff704164712a67bcd3b
|
remove .bam
|
direct/src/directtools/DirectGrid.py
|
direct/src/directtools/DirectGrid.py
|
from pandac.PandaModules import *
from direct.showbase.DirectObject import DirectObject
from DirectUtil import *
from DirectGeometry import *
class DirectGrid(NodePath, DirectObject):
def __init__(self,gridSize=100.0,gridSpacing=5.0,planeColor=(0.5,0.5,0.5,0.5),parent = None):
# Initialize superclass
NodePath.__init__(self, 'DirectGrid')
# Don't wireframe or light
useDirectRenderStyle(self)
# Load up grid parts to initialize grid object
# Polygon used to mark grid plane
self.gridBack = loader.loadModel('models/misc/gridBack.bam')
self.gridBack.reparentTo(self)
self.gridBack.setColor(*planeColor)
# Grid Lines
self.lines = self.attachNewNode('gridLines')
self.minorLines = LineNodePath(self.lines)
self.minorLines.lineNode.setName('minorLines')
self.minorLines.setColor(VBase4(0.3, 0.55, 1, 1))
self.minorLines.setThickness(1)
self.majorLines = LineNodePath(self.lines)
self.majorLines.lineNode.setName('majorLines')
self.majorLines.setColor(VBase4(0.3, 0.55, 1, 1))
self.majorLines.setThickness(5)
self.centerLines = LineNodePath(self.lines)
self.centerLines.lineNode.setName('centerLines')
self.centerLines.setColor(VBase4(1, 0, 0, 0))
self.centerLines.setThickness(3)
# Small marker to hilight snap-to-grid point
self.snapMarker = loader.loadModel('models/misc/sphere.bam')
self.snapMarker.node().setName('gridSnapMarker')
self.snapMarker.reparentTo(self)
self.snapMarker.setColor(1, 0, 0, 1)
self.snapMarker.setScale(0.3)
self.snapPos = Point3(0)
# Initialize Grid characteristics
self.fXyzSnap = 1
self.fHprSnap = 1
self.gridSize = gridSize
self.gridSpacing = gridSpacing
self.snapAngle = 15.0
self.enable(parent = parent)
def enable(self, parent = None):
if parent:
self.reparentTo(parent)
else:
self.reparentTo(base.direct.group)
self.updateGrid()
self.fEnabled = 1
def disable(self):
self.detachNode()
self.fEnabled = 0
def toggleGrid(self):
if self.fEnabled:
self.disable()
else:
self.enable()
def isEnabled(self):
return self.fEnabled
def updateGrid(self):
# Update grid lines based upon current grid spacing and grid size
# First reset existing grid lines
self.minorLines.reset()
self.majorLines.reset()
self.centerLines.reset()
# Now redraw lines
numLines = int(math.ceil(self.gridSize/self.gridSpacing))
scaledSize = numLines * self.gridSpacing
center = self.centerLines
minor = self.minorLines
major = self.majorLines
for i in range(-numLines, numLines + 1):
if i == 0:
center.moveTo(i * self.gridSpacing, -scaledSize, 0)
center.drawTo(i * self.gridSpacing, scaledSize, 0)
center.moveTo(-scaledSize, i * self.gridSpacing, 0)
center.drawTo(scaledSize, i * self.gridSpacing, 0)
else:
if (i % 5) == 0:
major.moveTo(i * self.gridSpacing, -scaledSize, 0)
major.drawTo(i * self.gridSpacing, scaledSize, 0)
major.moveTo(-scaledSize, i * self.gridSpacing, 0)
major.drawTo(scaledSize, i * self.gridSpacing, 0)
else:
minor.moveTo(i * self.gridSpacing, -scaledSize, 0)
minor.drawTo(i * self.gridSpacing, scaledSize, 0)
minor.moveTo(-scaledSize, i * self.gridSpacing, 0)
minor.drawTo(scaledSize, i * self.gridSpacing, 0)
center.create()
minor.create()
major.create()
if (self.gridBack):
self.gridBack.setScale(scaledSize)
def setXyzSnap(self, fSnap):
self.fXyzSnap = fSnap
def getXyzSnap(self):
return self.fXyzSnap
def setHprSnap(self, fSnap):
self.fHprSnap = fSnap
def getHprSnap(self):
return self.fHprSnap
def computeSnapPoint(self, point):
# Start of with current point
self.snapPos.assign(point)
# Snap if necessary
if self.fXyzSnap:
self.snapPos.set(
ROUND_TO(self.snapPos[0], self.gridSpacing),
ROUND_TO(self.snapPos[1], self.gridSpacing),
ROUND_TO(self.snapPos[2], self.gridSpacing))
# Move snap marker to this point
self.snapMarker.setPos(self.snapPos)
# Return the hit point
return self.snapPos
def computeSnapAngle(self, angle):
return ROUND_TO(angle, self.snapAngle)
def setSnapAngle(self, angle):
self.snapAngle = angle
def getSnapAngle(self):
return self.snapAngle
def setGridSpacing(self, spacing):
self.gridSpacing = spacing
self.updateGrid()
def getGridSpacing(self):
return self.gridSpacing
def setGridSize(self, size):
# Set size of grid back and redraw lines
self.gridSize = size
self.updateGrid()
def getGridSize(self):
return self.gridSize
|
Python
| 0.000131
|
@@ -585,20 +585,16 @@
gridBack
-.bam
')%0A
@@ -1480,12 +1480,8 @@
here
-.bam
')%0A
|
730c8bf23dbd687b3070eae58378ebcccf523736
|
add 'split' filter
|
filter_plugins/split.py
|
filter_plugins/split.py
|
Python
| 0.000009
|
@@ -0,0 +1,224 @@
+class FilterModule(object):%0A ''' A comment '''%0A%0A def filters(self):%0A return %7B%0A 'split': self.split,%0A %7D%0A%0A def split(self, input_value, separator):%0A return input_value.split(separator)%0A
|
|
938a6fabbc67feb409f6874966b30cb5c3e927a6
|
Create myotpsecrets.py
|
app/myotpsecrets.py
|
app/myotpsecrets.py
|
Python
| 0.000015
|
@@ -0,0 +1,199 @@
+ttp_user = 'admin'%0Ahttp_pass = 'admin'%0Acodes = %7B%0A%09'account1': 'pefjehegNusherewSunaumIcwoafIfyi',%0A%09'account2': 'memJarrIfomWeykvajLyutIkJeafcoyt',%0A%09'account3': 'rieshjaynEgDoipEjkecPopHiWighath',%0A%7D%0A%0A
|
|
eb396c12cccbda03a46381b5a54ff55d8f876152
|
Fix NameError
|
vdirsyncer/__init__.py
|
vdirsyncer/__init__.py
|
# -*- coding: utf-8 -*-
'''
vdirsyncer is a synchronization tool for vdir. See the README for more details.
'''
from __future__ import print_function
try:
from .version import version as __version__ # noqa
except ImportError: # pragma: no cover
raise ImportError(
'Failed to find (autogenerated) version.py. '
'This might be because you are installing from GitHub\'s tarballs, '
'use the PyPI ones.'
)
def _detect_faulty_requests(): # pragma: no cover
import requests
if 'dist-packages' not in requests.__file__:
return
text = (
'{e}\n\n'
'This most likely means you are running into a bug specific to '
'Debian-based distributions.\n\n'
'Consult {d}/problems.html#requests-related-importerrors-on-debian'
'-based-distributions on how to deal with this, or use a different '
'operating system.'
)
try:
from requests_toolbelt.auth.guess import GuessAuth # noqa
except ImportError as e:
import sys
print(text.format(e=str(e), d=DOCS_HOME), file=sys.stderr)
sys.exit(1)
_detect_faulty_requests()
PROJECT_HOME = 'https://github.com/untitaker/vdirsyncer'
DOCS_HOME = 'https://vdirsyncer.readthedocs.org/en/stable'
|
Python
| 0
|
@@ -145,16 +145,133 @@
nction%0A%0A
+PROJECT_HOME = 'https://github.com/untitaker/vdirsyncer'%0ADOCS_HOME = 'https://vdirsyncer.readthedocs.org/en/stable'%0A%0A
try:%0A
@@ -1236,17 +1236,16 @@
xit(1)%0A%0A
-%0A
_detect_
@@ -1266,122 +1266,4 @@
s()%0A
-%0A%0APROJECT_HOME = 'https://github.com/untitaker/vdirsyncer'%0ADOCS_HOME = 'https://vdirsyncer.readthedocs.org/en/stable'%0A
|
0e3711000bcf7d59f75baa68f357f49f5246f812
|
Add video capturing functionality
|
humblemedia/resources/utils/video_capture.py
|
humblemedia/resources/utils/video_capture.py
|
Python
| 0
|
@@ -0,0 +1,1379 @@
+import subprocess%0Aimport re%0A%0Adef get_video_duration(filename):%0A # returns duration in seconds%0A command = 'ffmpeg -i %25s 2%3E&1 %7C grep %22Duration%22' %25 filename%0A result = subprocess.Popen(command,%0A stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)%0A stdout_lines = result.stdout.readlines()%0A duration_line = stdout_lines%5B0%5D.decode()%0A match = re.match(r'%5Cs*Duration:%5Cs*(?P%3Chours%3E%5Cd+):(?P%3Cminutes%3E%5Cd+):(?P%3Cseconds%3E%5Cd+)', duration_line)%0A if not match:%0A raise 'Invalid video file'%0A%0A groups = match.groupdict()%0A%0A hours = int(groups.get('hours'))%0A minutes = int(groups.get('minutes'))%0A seconds = int(groups.get('seconds'))%0A%0A return hours * 3600 + (minutes * 60) + seconds%0A%0Adef get_video_capture(filename, at_second, output_file):%0A command = 'ffmpeg -ss %25s -i %25s -vframes 1 %25s' %25 (at_second, filename, output_file)%0A subprocess.Popen(command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, shell=True)%0A%0Adef get_random_video_captures(filename, count, output_files):%0A INITIAL_CAPTURE_SECOND = 5%0A duration = get_video_duration(filename)%0A capture_window = (duration - INITIAL_CAPTURE_SECOND) / count%0A capture_tuples = zip(range(INITIAL_CAPTURE_SECOND, duration, int(capture_window)), output_files)%0A for (at_second, output_file) in capture_tuples:%0A get_video_capture(filename, at_second, output_file)%0A
|
|
ff63bb34aaf01cd9cd7eff89c0c94135f896640f
|
Create mqtt_easydriver_stepper.py
|
linkit/easydriver/mqtt_easydriver_stepper.py
|
linkit/easydriver/mqtt_easydriver_stepper.py
|
Python
| 0.000002
|
@@ -0,0 +1,2014 @@
+import paho.mqtt.client as mqtt%0Aimport json, time%0Aimport mraa%0A%0Apin19 = mraa.Pwm(19)%0Apin0 = mraa.Gpio(0) %0Apin0.dir(mraa.DIR_OUT)%0A%0A# ----- CHANGE THESE FOR YOUR SETUP -----%0AMQTT_HOST = %22190.97.168.236%22%0AMQTT_PORT = 1883%0A%0Adef on_connect(client, userdata, rc):%0A print(%22%5CnConnected with result code %22 + str(rc) + %22%5Cn%22)%0A%0A #Subscribing in on_connect() means that if we lose the connection and%0A # reconnect then subscriptions will be renewed.%0A #client.subscribe(%22/pyxo/xyusers/%7BUSERNAME%7D/%7BAPIKEY%7D/iot/control/%22.format(**vars()), 2) # Connect to everything in /mcu topic%0A client.subscribe(%22/pryxo/yxusers/motor/control/%22) %0A print(%22Subscribed to homecontrol%22)%0A%0A%0Adef on_message_iotrl(client, userdata, msg):%0A print(%22%5Cn%5Ct* Linkit UPDATED (%22+msg.topic+%22): %22 + str(msg.payload))%0A if msg.payload == %22m1%22:%0A%09pin0.write(0)%0A%09pin1 = mraa.Gpio(1)%0A%09pin1.dir(mraa.DIR_OUT)%0A%09pin1.write(0)%0A%09pin19.period_us(300) %0A%09pin19.enable(True)%0A%09pin19.write(0.1) %0A%09time.sleep(2) %0A client.publish(%22/pryxo/yxusers/iot/status/%22, %22derecha%22, 2)%0A if msg.payload == %22m0%22:%0A%09pin1 = mraa.Gpio(1)%0A%09pin1.dir(mraa.DIR_OUT)%0A%09pin1.write(1)%0A%09pin19.period_us(300) %0A%09pin19.enable(True)%0A%09pin19.write(0.1) %0A%09time.sleep(2) %0A client.publish(%22/pryxo/yxusers/iot/status/%22, %22izquierda%22, 2)%0A if msg.payload == %22m2%22: %0A%09pin0.write(1) %0A%09client.publish(%22/pryxo/yxusers/iot/status/%22, %22STOP%22, 2) %0Adef command_error():%0A print(%22Error: Unknown command%22)%0A%0Aclient = mqtt.Client(client_id=%22linkit7688-patio%22)%0A%0A# Callback declarations (functions run based on certain messages)%0Aclient.on_connect = on_connect%0Aclient.message_callback_add(%22/pryxo/yxusers/motor/control/%22, on_message_iotrl)%0A%0A# This is where the MQTT service connects and starts listening for messages%0Aclient.connect(MQTT_HOST, MQTT_PORT, 60)%0Aclient.loop_start() # Background thread to call loop() automatically%0A%0A# Main program loop%0Awhile True:%0A time.sleep(10) %0A
|
|
2ae6f4183b2096287f8155d7db7e2ed0444618c4
|
Add first version of Day One entry splitter
|
day_one_entry_splitter.py
|
day_one_entry_splitter.py
|
Python
| 0
|
@@ -0,0 +1,605 @@
+#!/usr/bin/env python%0A# encoding: utf-8%0A%22%22%22%0Aday_one_entry_splitter.py%0A%0ACreated by Rod Docking on 2017-01-01.%0AAll rights reserved.%0A%22%22%22%0A%0Aimport sys%0A%0A%0Adef main():%0A %22%22%22Split entries from Day One export into separate files%22%22%22%0A%0A # Entry headers look like:%0A # %22Date:%09February 14, 2005 at 9:00 AM%22%0A # Need to:%0A # Loop through all the lines in the input file%0A # When we hit a new date, open a new file with approriate name%0A with open(sys.argv%5B1%5D) as in_handle:%0A for line in in_handle:%0A if %22Date:%22 in line:%0A print line%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
87e590c56a68871b1430d71704f303d38fc19e61
|
Generate Pascal Triangle
|
PascalsTriangle.py
|
PascalsTriangle.py
|
Python
| 0.999999
|
@@ -0,0 +1,1377 @@
+#!/usr/bin/env python%0A# HanoiMoves.py%0A# Author: Lijuan Marissa Zhou%0A# CreatedAt: 10/10/2014%0A%0A%22%22%22Interesting play with Pascals Triangle Problem in Python.%22%22%22%0A%0Aclass PascalsTriangle:%0A %22%22%22%0A Class of PascalsTriangle%0A %22%22%22%0A def __init__(self):%0A self.data = %5B%5D%0A%0A def generate(self, n):%0A %22%22%22%0A Given numRows, generate the first numRows of Pascal's triangle.%0A%0A :param n: number of layers of pascal's triangle%0A :type n: number %0A :return: a list of lists of integers%0A :rtype: list %0A %22%22%22%0A if (n %3C 0):%0A return None%0A if (n == 0):%0A return %5B%5D %0A self.data = %5B%5B1%5D%5D%0A for i in xrange(1, n):%0A rownum = i + 1%0A rowi = %5B1%5D + %5B0%5D*(rownum-2)+%5B1%5D%0A mid = rownum/2 + rownum%252%0A for j in xrange(1, mid):%0A val = self.data%5Bi-1%5D%5Bj-1%5D + self.data%5Bi-1%5D%5Bj%5D%0A rowi%5Bj%5D = val%0A rowi%5Brownum-j-1%5D = val%0A self.data.append(rowi)%0A return self.data%0A%0A def printData(self):%0A %22%22%22%0A print data%0A %22%22%22%0A print self.data%0A%0A#################################################%0A# test # %0A#################################################%0A%0Aif __name__ == %22__main__%22:%0A pt = PascalsTriangle()%0A pt.generate(5)%0A pt.printData()%0A
|
|
d73070f268e240439c71ffd193a18c477403dd2e
|
Add project model class
|
clowder/project.py
|
clowder/project.py
|
Python
| 0
|
@@ -0,0 +1,166 @@
+import argparse%0Aimport sys%0A%0Aclass Project(object):%0A%0A def __init__(self, name, path, url):%0A self.name = name%0A self.path = path%0A self.url = url%0A
|
|
dd708956ed19a38be09597cae94172e0b9863623
|
Add signing thanks @jmcarp
|
waterbutler/signing.py
|
waterbutler/signing.py
|
Python
| 0
|
@@ -0,0 +1,1934 @@
+# encoding: utf-8%0A%0Aimport hmac%0Aimport json%0Aimport base64%0Aimport collections%0A%0Afrom waterbutler import settings%0A%0A%0A# Written by @jmcarp originally%0Adef order_recursive(data):%0A %22%22%22Recursively sort keys of input data and all its nested dictionaries.%0A Used to ensure consistent ordering of JSON payloads.%0A %22%22%22%0A if isinstance(data, dict):%0A return collections.OrderedDict(%0A sorted(%0A (%0A (key, order_recursive(value))%0A for key, value in data.items()%0A ),%0A key=lambda item: item%5B0%5D%0A )%0A )%0A if isinstance(data, list):%0A return %5B%0A order_recursive(value)%0A for value in data%0A %5D%0A return data%0A%0A%0Adef serialize_payload(payload):%0A ordered = order_recursive(payload)%0A return base64.b64encode(json.dumps(ordered).encode('UTF-8'))%0A%0A%0Adef unserialize_payload(message):%0A payload = json.loads(base64.b64decode(message))%0A return order_recursive(payload)%0A%0A%0Aclass Signer(object):%0A%0A def __init__(self, secret, digest):%0A assert callable(digest)%0A self.secret = secret%0A self.digest = digest%0A%0A def sign_message(self, message):%0A return hmac.new(%0A key=self.secret,%0A digestmod=self.digest,%0A msg=message,%0A ).hexdigest()%0A%0A def sign_payload(self, payload):%0A message = serialize_payload(payload)%0A signature = self.sign_message(message)%0A return message, signature%0A%0A def verify_message(self, signature, message):%0A expected = self.sign_message(message)%0A return signature == expected%0A%0A def verify_payload(self, signature, payload):%0A _, expected = self.sign_payload(payload)%0A return signature == expected%0A%0A%0A# default_signer = Signer(settings.DEFAULT_HMAC_KEY, settings.DEFAULT_HMAC_ALGORITHM)%0Aosf_signer = Signer(settings.OSF_HMAC_SECRET, settings.OSF_HMAC_ALGORITHM)%0A
|
|
cad438214ec55684bfc7d5f1d5383109934f29ff
|
add weboob.tools.application.prompt.PromptApplication
|
weboob/tools/application/prompt.py
|
weboob/tools/application/prompt.py
|
Python
| 0.000001
|
@@ -0,0 +1,1867 @@
+# -*- coding: utf-8 -*-%0A%0A%22%22%22%0ACopyright(C) 2010 Romain Bignon%0A%0AThis program is free software; you can redistribute it and/or modify%0Ait under the terms of the GNU General Public License as published by%0Athe Free Software Foundation, version 3 of the License.%0A%0AThis program is distributed in the hope that it will be useful,%0Abut WITHOUT ANY WARRANTY; without even the implied warranty of%0AMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0AGNU General Public License for more details.%0A%0AYou should have received a copy of the GNU General Public License%0Aalong with this program; if not, write to the Free Software%0AFoundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.%0A%0A%22%22%22%0A%0Aimport sched%0Aimport time%0Aimport select%0Aimport sys%0A%0Afrom weboob import Weboob%0Afrom weboob.scheduler import Scheduler%0A%0Afrom .console import ConsoleApplication%0A%0Aclass PromptScheduler(Scheduler):%0A def __init__(self, prompt_cb, read_cb):%0A self.scheduler = sched.scheduler(time.time, self.sleep)%0A self.read_cb = read_cb%0A self.prompt_cb = prompt_cb%0A%0A def sleep(self, d):%0A self.prompt_cb()%0A try:%0A read, write, excepts = select.select(%5Bsys.stdin%5D, %5B%5D, %5B%5D, d or None)%0A if read:%0A line = sys.stdin.readline()%0A if not line:%0A self.want_stop()%0A else:%0A self.read_cb(line.strip())%0A except KeyboardInterrupt:%0A sys.stdout.write('%5Cn')%0A%0Aclass PromptApplication(ConsoleApplication):%0A def create_weboob(self):%0A return Weboob(self.APPNAME, scheduler=PromptScheduler(self.prompt, self.read_cb))%0A%0A def prompt(self):%0A sys.stdout.write('%3E ')%0A sys.stdout.flush()%0A%0A def loop(self):%0A self.weboob.loop()%0A%0A def read_cb(self, line):%0A line = line.split()%0A self.process_command(*line)%0A
|
|
1b3d7078a4ca91ef07f90d1645f26761d1f7abac
|
Add example of using lower-level plotting methods directly
|
examples/scatter.py
|
examples/scatter.py
|
Python
| 0.000001
|
@@ -0,0 +1,792 @@
+%22%22%22%0AExample of how %60ax.scatter%60 can be used to plot linear data on a stereonet%0Avarying color and/or size by other variables.%0A%0AThis also serves as a general example of how to convert orientation data into%0Athe coordinate system that the stereonet plot uses so that generic matplotlib%0Aplotting methods may be used.%0A%22%22%22%0Aimport numpy as np%0Aimport matplotlib.pyplot as plt%0Aimport mplstereonet%0Anp.random.seed(1)%0A%0Astrikes = np.arange(0, 360, 15)%0Adips = 45 * np.ones(strikes.size)%0Amagnitude = np.random.random(strikes.size)%0A%0A# Convert our strikes and dips to stereonet coordinates%0Alons, lats = mplstereonet.pole(strikes, dips)%0A%0A# Now we'll plot our data and color by magnitude%0Afig, ax = mplstereonet.subplots()%0Asm = ax.scatter(lons, lats, c=magnitude, s=50, cmap='gist_earth')%0A%0Aax.grid()%0Aplt.show()%0A%0A%0A
|
|
7383cc2a4b6ad21c747794dbb3d33338d8eea528
|
Add another example.
|
examples/tor-irc.py
|
examples/tor-irc.py
|
Python
| 0.000001
|
@@ -0,0 +1,1859 @@
+# Copyright (c) Aaron Gallagher %3C_@habnab.it%3E%0A# See COPYING for details.%0A%0Afrom twisted.internet.defer import Deferred%0Afrom twisted.internet.endpoints import TCP4ClientEndpoint%0Afrom twisted.internet.protocol import ClientFactory%0Afrom twisted.internet.task import react%0Afrom twisted.words.protocols.irc import IRCClient%0Afrom twisted.protocols.policies import SpewingFactory%0A%0Afrom txsocksx.client import SOCKS5ClientEndpoint%0A%0A%0Aclass CouldNotIRCError(Exception):%0A pass%0A%0A%0Aclass TorIRC(IRCClient):%0A nickname = 'txsocksx-tor-irc'%0A nickservPassword = ''%0A%0A def connectionMade(self):%0A self.sendLine('CAP REQ :sasl')%0A self.deferred = Deferred()%0A IRCClient.connectionMade(self)%0A%0A def irc_CAP(self, prefix, params):%0A if params%5B1%5D != 'ACK' or params%5B2%5D.split() != %5B'sasl'%5D:%0A self.deferred.errback(CouldNotIRCError('sasl not available'))%0A sasl = ('%7B0%7D%5C0%7B0%7D%5C0%7B1%7D'.format(self.nickname, self.nickservPassword)).encode('base64').strip()%0A self.sendLine('AUTHENTICATE PLAIN')%0A self.sendLine('AUTHENTICATE ' + sasl)%0A%0A def irc_903(self, prefix, params):%0A self.sendLine('CAP END')%0A%0A def irc_904(self, prefix, params):%0A self.deferred.errback(CouldNotIRCError('sasl auth failed', params))%0A irc_905 = irc_904%0A%0A def connectionLost(self, reason):%0A self.deferred.errback(reason)%0A%0A def signedOn(self):%0A print 'signed on successfully'%0A self.quit('')%0A%0A%0Aclass TorIRCFactory(ClientFactory):%0A protocol = TorIRC%0A%0A%0Adef main(reactor):%0A torEndpoint = TCP4ClientEndpoint(reactor, '127.0.0.1', 9050)%0A # freenode's tor endpoint%0A ircEndpoint = SOCKS5ClientEndpoint('lgttsalmpw3qo4no.onion', 6667, torEndpoint)%0A d = ircEndpoint.connect(SpewingFactory(TorIRCFactory()))%0A d.addCallback(lambda proto: proto.wrappedProtocol.deferred)%0A return d%0A%0Areact(main, %5B%5D)%0A
|
|
7a3e85231efeb5c03cab944f6da346d138f6fcb1
|
Add tests for pips
|
test/test_pips.py
|
test/test_pips.py
|
Python
| 0.000001
|
@@ -0,0 +1,259 @@
+import pytest%0A%0A@pytest.mark.parametrize(%22name%22, %5B%0A (%22awscli%22),%0A (%22boto3%22),%0A (%22docker-py%22),%0A (%22GitPython%22),%0A (%22mkdocs%22),%0A (%22pep8%22),%0A (%22virtualenv%22),%0A (%22virtualenvwrapper%22),%0A%5D)%0A%0Adef test_pips(host, name):%0A assert name in host.pip_package.get_packages()
|
|
8b4d27851889bccc87392b14557ce63d3f95e426
|
add build.py
|
build.py
|
build.py
|
Python
| 0.000001
|
@@ -0,0 +1,1954 @@
+#!/usr/bin/python%0Aimport glob%0Aimport gzip%0Aimport os%0Aimport platform%0Aimport re%0Aimport sh%0Aimport shutil%0Aimport subprocess%0Aimport sys%0Aimport time%0Afrom optparse import OptionParser%0A%0Alog = lambda *a: None%0A%0Adef VerbosePrint(*args):%0A # Print each argument separately so caller doesn't need to%0A # stuff everything to be printed into a single string%0A for arg in args:%0A print arg,%0A print%0A%0A%0Aclass ShError(Exception):%0A def __init__(self, value):%0A self.value = value%0A def __str__(self):%0A return repr(self.value)%0A%0A%0Adef BuildUnityPlugin():%0A p = platform.system().lower()%0A mdtool_path = %22mdtool%22 if not p == 'darwin' else %22/Applications/Unity/MonoDevelop.app/Contents/MacOS/mdtool%22%0A cmd = sh.Command(mdtool_path)%0A result = cmd.build(%22HappyFunTimes.sln%22)%0A if result.exit_code:%0A raise ShError(result)%0A log(result)%0A%0A%0Adef CopyFiles(files):%0A for file in files:%0A src = file%5B%22src%22%5D%0A dst = file%5B%22dst%22%5D%0A log(%22copy%22, src, %22-%3E%22, dst)%0A%0A%0Adef main(argv):%0A %22%22%22This is the main function.%22%22%22%0A parser = OptionParser()%0A parser.add_option(%0A %22-v%22, %22--verbose%22, action=%22store_true%22,%0A help=%22verbose%22)%0A%0A (options, args) = parser.parse_args(args=argv)%0A%0A if options.verbose:%0A global log%0A log = VerbosePrint%0A%0A files = %5B%0A %7B %22src%22: 'HappyFunTimes/bin/Release/DeJson.dll',%0A %22dst%22: 'HFTPlugin/Assets/Plugins/DeJson.dll',%0A %7D,%0A %7B %22src%22: 'HappyFunTimes/bin/Release/HappyFunTimes.dll',%0A %22dst%22: 'HFTPlugin/Assets/Plugins/HappyFunTimes.dll',%0A %7D,%0A %7B %22src%22: 'HappyFunTimes/bin/Release/websocket-sharp.dll',%0A %22dst%22: 'HFTPlugin/Assets/Plugins/websocket-sharp.dll',%0A %7D,%0A %7B %22src%22: 'Extra/HFTRunner.cs',%0A %22dst%22: 'HFTPlugin/Assets/Plugins/HFTExtra.dll',%0A %7D,%0A %7B %22src%22: 'HappyFunTimeEditor/bin/Release/HappyFunTimesEditor.dll',%0A %22dst%22: 'HFTPlugin/Assets/Plugins/Editor/HappyFunTimesEditor.dll',%0A %7D%0A %5D%0A%0A%0A BuildUnityPlugin()%0A CopyFiles(files)%0A%0Aif __name__==%22__main__%22:%0A main(sys.argv%5B1:%5D)%0A%0A%0A%0A%0A
|
|
4a97d5b9f9998a5b8ca8509547dabf8d757e70d9
|
Add build script.
|
build.py
|
build.py
|
Python
| 0
|
@@ -0,0 +1,414 @@
+import version%0A%0Aprint %22Reading gitmake.py...%22%0Awith open('gitmake.py') as fp:%0A lines = fp.readlines()%0A%0Aprint %22Rewriting gitmake.py...%22%0Awith open('gitmake.py', 'w') as fp:%0A for line in lines:%0A if line.startswith('version_info ='):%0A fp.write('version_info = (%25d,%25d,%25d,%5C'%25s%5C')%5Cn' %25 (version.major, version.minor, version.patch, version.branch))%0A%09else:%0A fp.write(line)%0A%0Aprint %22Done!%22%0A
|
|
6dabd92990df570d81a621e51d7119345671d4c0
|
Create Neopixel_Serial.py (#43)
|
home/moz4r/Neopixel_Serial.py
|
home/moz4r/Neopixel_Serial.py
|
Python
| 0
|
@@ -0,0 +1,493 @@
+#Just a poc maybe there is a best method%0A#Flash Neopixel_MRL.ino%0Aimport time%0Aserial = Runtime.createAndStart(%22serial%22,%22Serial%22)%0ARuntime.createAndStart(%22mouth%22, %22AcapelaSpeech%22)%0Aserial.connect(%22COM7%22, 9600, 8, 1, 0)%0Asleep(5)%0Amouth.speak(%22Hi everybody this is neo pixel ring controled by my robot lab%22)%0Asleep(3)%0Amouth.speak(%22Fire, It burn a lot%22)%0Aserial.write(2) %0Asleep(6)%0Amouth.speak(%22Hello jarvis%22)%0Aserial.write(3)%0Asleep(6)%0Amouth.speak(%22I am a cylon%22)%0Aserial.write(1)%0Asleep(6)%0Aserial.write(9)%0A
|
|
86ae30203475a2ac718cf3839e38522e8e1aa203
|
Add tests package #5
|
tests/__init__.py
|
tests/__init__.py
|
Python
| 0
|
@@ -0,0 +1,628 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0A#%0A# Copyright 2017 Jun-ya HASEBA%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A#%0A
|
|
f64c6eb4b4a51e8b3289bc3e0f8e3a0d4027c798
|
Remove command-line activation/optimization help.
|
theanets/flags.py
|
theanets/flags.py
|
'''This module contains command line flags.'''
import climate
climate.add_arg('--help-activation', action='store_true',
help='show available activation functions')
climate.add_arg('--help-optimize', action='store_true',
help='show available optimization algorithms')
g = climate.add_group('Architecture')
g.add_argument('-n', '--layers', nargs='+', type=int, metavar='N',
help='construct a network with layers of size N1, N2, ...')
g.add_argument('-g', '--hidden-activation', default='logistic', metavar='FUNC',
help='function for hidden unit activations')
g.add_argument('--output-activation', default='linear', metavar='FUNC',
help='function for output unit activations')
g = climate.add_group('Training')
g.add_argument('-a', '--algorithm', default=(), nargs='+', metavar='ALGO',
help='train with the given optimization algorithm(s)')
g.add_argument('-p', '--patience', type=int, default=4, metavar='N',
help='stop training if less than --min-improvement for N validations')
g.add_argument('-v', '--validate-every', type=int, default=10, metavar='N',
help='validate the model every N updates')
g.add_argument('-b', '--batch-size', type=int, default=64, metavar='N',
help='train with mini-batches of size N')
g.add_argument('-B', '--train-batches', type=int, metavar='N',
help='use at most N batches during gradient computations')
g.add_argument('-V', '--valid-batches', type=int, metavar='N',
help='use at most N batches during validation')
g.add_argument('--save-progress', metavar='FILE',
help='save the model periodically to FILE')
g.add_argument('--save-every', type=float, default=0, metavar='N',
help='save the model every N iterations or -N minutes')
g = climate.add_group('Regularization')
g.add_argument('--contractive', type=float, default=0, metavar='S',
help='penalize the Frobenius norm of the hidden Jacobian')
g.add_argument('--input-noise', type=float, default=0, metavar='S',
help='add noise to network inputs drawn from N(0, S)')
g.add_argument('--input-dropouts', type=float, default=0, metavar='R',
help='randomly set fraction R of input activations to 0')
g.add_argument('--hidden-noise', type=float, default=0, metavar='S',
help='add noise to hidden activations drawn from N(0, S)')
g.add_argument('--hidden-dropouts', type=float, default=0, metavar='R',
help='randomly set fraction R of hidden activations to 0')
g.add_argument('--hidden-l1', type=float, default=0, metavar='K',
help='regularize hidden activity with K on the L1 term')
g.add_argument('--hidden-l2', type=float, default=0, metavar='K',
help='regularize hidden activity with K on the L2 term')
g.add_argument('--weight-l1', type=float, default=0, metavar='K',
help='regularize network weights with K on the L1 term')
g.add_argument('--weight-l2', type=float, default=0, metavar='K',
help='regularize network weights with K on the L2 term')
g = climate.add_group('SGD-Based Optimization')
g.add_argument('-l', '--learning-rate', type=float, default=1e-4, metavar='V',
help='train the network with a learning rate of V')
g.add_argument('-m', '--momentum', type=float, default=0.9, metavar='V',
help='train the network with momentum of V')
g.add_argument('--min-improvement', type=float, default=0.01, metavar='R',
help='train until relative improvement is less than R')
g.add_argument('--gradient-clip', type=float, default=1e6, metavar='V',
help='clip gradient values to the interval [-V, V]')
g = climate.add_group('RmsProp Optimization')
g.add_argument('--rms-halflife', type=float, default=7, metavar='N',
help='use a half-life of N for RMS exponential moving averages')
g = climate.add_group('Rprop Optimization')
g.add_argument('--rprop-increase', type=float, default=1.01, metavar='R',
help='increase parameter steps at rate R')
g.add_argument('--rprop-decrease', type=float, default=0.99, metavar='R',
help='decrease parameter steps at rate R')
g.add_argument('--rprop-min-step', type=float, default=0., metavar='V',
help='cap parameter steps to V at the smallest')
g.add_argument('--rprop-max-step', type=float, default=1., metavar='V',
help='cap parameter steps to V at the largest')
g = climate.add_group('HF Optimization')
g.add_argument('-C', '--cg-batches', type=int, metavar='N',
help='use at most N batches for CG computation')
g.add_argument('--initial-lambda', type=float, default=1., metavar='K',
help='start the HF method with Tikhonov damping of K')
g.add_argument('--global-backtracking', action='store_true',
help='backtrack to lowest cost parameters during CG')
g.add_argument('--preconditioner', action='store_true',
help='precondition the system during CG')
|
Python
| 0
|
@@ -61,246 +61,8 @@
te%0A%0A
-climate.add_arg('--help-activation', action='store_true',%0A help='show available activation functions')%0Aclimate.add_arg('--help-optimize', action='store_true',%0A help='show available optimization algorithms')%0A%0A
g =
|
d519c7f171d7e89f30f073616f71af24654d223d
|
add solution for Rotate List
|
src/rotateList.py
|
src/rotateList.py
|
Python
| 0
|
@@ -0,0 +1,774 @@
+# Definition for singly-linked list.%0A# class ListNode:%0A# def __init__(self, x):%0A# self.val = x%0A# self.next = None%0A%0A%0Aclass Solution:%0A # @param head, a ListNode%0A # @param k, an integer%0A # @return a ListNode%0A%0A def rotateRight(self, head, k):%0A if not head:%0A return None%0A n = self.len(head)%0A k %25= n%0A if k == 0:%0A return head%0A slow = fast = head%0A for _ in xrange(k):%0A fast = fast.next%0A while fast.next:%0A slow, fast = slow.next, fast.next%0A head, fast.next = slow.next, head%0A slow.next = None%0A return head%0A%0A def len(self, head):%0A res = 0%0A while head:%0A res += 1%0A head = head.next%0A return res%0A
|
|
5828823d505aae1425fd2353f898c5b18722e6e5
|
Introduce base class and ProgressObserver for renaming occurences.
|
src/robotide/ui/progress.py
|
src/robotide/ui/progress.py
|
# Copyright 2008-2009 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import wx
from robotide import context
class LoadProgressObserver(object):
def __init__(self, frame):
self._progressbar = wx.ProgressDialog('RIDE', 'Loading the test data',
maximum=100, parent=frame,
style=wx.PD_ELAPSED_TIME)
def notify(self):
self._progressbar.Pulse()
def finish(self):
self._progressbar.Destroy()
context.LOG.report_parsing_errors()
def error(self, msg):
self.finish()
context.LOG.error(msg)
|
Python
| 0
|
@@ -608,16 +608,28 @@
mport wx
+%0Aimport time
%0A%0Afrom r
@@ -659,20 +659,16 @@
%0A%0Aclass
-Load
Progress
@@ -714,16 +714,32 @@
f, frame
+, title, message
):%0A
@@ -783,39 +783,22 @@
log(
-'RIDE', 'Loading the test data'
+title, message
,%0A
@@ -1180,8 +1180,529 @@
or(msg)%0A
+%0A%0Aclass LoadProgressObserver(ProgressObserver):%0A%0A def __init__(self, frame):%0A ProgressObserver.__init__(self, frame, 'RIDE', 'Loading the test data')%0A%0A%0Aclass RenameProgressObserver(ProgressObserver):%0A%0A def __init__(self, frame):%0A ProgressObserver.__init__(self, frame, 'RIDE', 'Renaming')%0A self._notification_occured = 0%0A%0A def notify(self):%0A if time.time() - self._notification_occured %3E 0.1:%0A self._progressbar.Pulse()%0A self._notification_occured = time.time()%0A
|
0d7b1d848d7ab80cc9054931f14b98bc123287bf
|
Create test_bulkresize.py file
|
jarviscli/plugins/test_bulkresize.py
|
jarviscli/plugins/test_bulkresize.py
|
Python
| 0
|
@@ -0,0 +1,378 @@
+from unittest import mock%0Aimport unittest%0Aimport os%0Afrom Jarvis import Jarvis%0Afrom plugins.bulkresize import spin%0Afrom plugins import bulkresize%0A%0Afrom tests import PluginTest%0A%0A%0ACURRENT_PATH = os.path.dirname(os.path.abspath(__file__))%0ADATA_PATH = os.path.join(CURRENT_PATH, '..', 'data/')%0A%0A%0Aclass Bulkresize(PluginTest):%0A pass%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()
|
|
35fd6f5829f25b8f9dd1b3e5fa816e7dbbd08c04
|
add --all options to `lx export-aria2` and `lx download-aria2`
|
lixian_plugins/commands/aria2.py
|
lixian_plugins/commands/aria2.py
|
from lixian_plugins.api import command
from lixian_config import *
from lixian_encoding import default_encoding
from lixian_cli_parser import command_line_parser
from lixian_cli_parser import with_parser
from lixian_cli_parser import command_line_value
from lixian_commands.util import parse_login, create_client
def export_aria2_conf(args):
client = create_client(args)
import lixian_query
tasks = lixian_query.search_tasks(client, args)
files = []
for task in tasks:
if task['type'] == 'bt':
subs, skipped, single_file = lixian_query.expand_bt_sub_tasks(task)
if not subs:
continue
if single_file:
files.append((subs[0]['xunlei_url'], subs[0]['name'], None))
else:
for f in subs:
import os.path
files.append((f['xunlei_url'], f['name'], task['name']))
else:
files.append((task['xunlei_url'], task['name'], None))
output = ''
for url, name, dir in files:
if type(url) == unicode:
url = url.encode(default_encoding)
output += url + '\n'
output += ' out=' + name.encode(default_encoding) + '\n'
if dir:
output += ' dir=' + dir.encode(default_encoding) + '\n'
output += ' header=Cookie: gdriveid=' + client.get_gdriveid() + '\n'
return output
@command(usage='export task download urls as aria2 format')
@command_line_parser()
@with_parser(parse_login)
def export_aria2(args):
'''
usage: lx export-aria2 [id|name]...
'''
print export_aria2_conf(args)
def download_aria2_stdin(aria2_conf, j):
aria2_opts = ['aria2c', '-i', '-', '-j', j]
aria2_opts.extend(get_config('aria2-opts', '').split())
from subprocess import Popen, PIPE
sub = Popen(aria2_opts, stdin=PIPE, bufsize=1, shell=True)
sub.communicate(aria2_conf)
sub.stdin.close()
exit_code = sub.wait()
if exit_code != 0:
raise Exception('aria2c exited abnormaly')
def download_aria2_temp(aria2_conf, j):
import tempfile
temp = tempfile.NamedTemporaryFile('w', delete=False)
temp.file.write(aria2_conf)
temp.file.close()
try:
aria2_opts = ['aria2c', '-i', temp.name, '-j', j]
aria2_opts.extend(get_config('aria2-opts', '').split())
import subprocess
exit_code = subprocess.call(aria2_opts)
finally:
import os
os.unlink(temp.name)
if exit_code != 0:
raise Exception('aria2c exited abnormaly')
@command(usage='concurrently download tasks in aria2')
@command_line_parser()
@with_parser(parse_login)
@command_line_value('max-concurrent-downloads', alias='j', default=get_config('aria2-j', '5'))
def download_aria2(args):
'''
usage: lx download-aria2 -j 5 [id|name]...
'''
aria2_conf = export_aria2_conf(args)
import platform
if platform.system() == 'Windows':
download_aria2_temp(aria2_conf, args.max_concurrent_downloads)
else:
download_aria2_stdin(aria2_conf, args.max_concurrent_downloads)
|
Python
| 0
|
@@ -220,32 +220,53 @@
li_parser import
+ command_line_option,
command_line_va
@@ -269,16 +269,16 @@
e_value%0A
-
from lix
@@ -734,28 +734,8 @@
bs:%0A
-%09%09%09%09%09import os.path%0A
%09%09%09%09
@@ -1316,16 +1316,44 @@
_login)%0A
+@command_line_option('all')%0A
def expo
@@ -2343,32 +2343,32 @@
d_line_parser()%0A
-
@with_parser(par
@@ -2369,32 +2369,60 @@
er(parse_login)%0A
+@command_line_option('all')%0A
@command_line_va
|
a8a87818094f0cf9954815caca9fb586ddb4099b
|
Add a gallery example to show coloring of points by categories (#1006)
|
examples/gallery/symbols/points_categorical.py
|
examples/gallery/symbols/points_categorical.py
|
Python
| 0.000622
|
@@ -0,0 +1,2690 @@
+%22%22%22%0AColor points by categories%0A---------------------------%0AThe :meth:%60pygmt.Figure.plot%60 method can be used to plot symbols which are%0Acolor-coded by categories. In the example below, we show how the%0A%60Palmer Penguins dataset %3Chttps://github.com/allisonhorst/palmerpenguins%3E%60__%0Acan be visualized. Here, we can pass the individual categories included in%0Athe %22species%22 column directly to the %60%60color%60%60 parameter via%0A%60%60color=df.species.cat.codes.astype(int)%60%60. Additionally, we have to set%0A%60%60cmap=True%60%60. A desired colormap can be selected via the :meth:%60pygmt.makecpt%60%0Amethod.%0A%22%22%22%0A%0Aimport pandas as pd%0Aimport pygmt%0A%0A# Load sample penguins data and convert 'species' column to categorical dtype%0Adf = pd.read_csv(%22https://github.com/mwaskom/seaborn-data/raw/master/penguins.csv%22)%0Adf.species = df.species.astype(dtype=%22category%22)%0A%0A# Use pygmt.info to get region bounds (xmin, xmax, ymin, ymax)%0A# The below example will return a numpy array like %5B30.0, 60.0, 12.0, 22.0%5D%0Aregion = pygmt.info(%0A table=df%5B%5B%22bill_length_mm%22, %22bill_depth_mm%22%5D%5D, # x and y columns%0A per_column=True, # report the min/max values per column as a numpy array%0A # round the min/max values of the first two columns to the nearest multiple%0A # of 3 and 2, respectively%0A spacing=(3, 2),%0A)%0A%0A# Make a 2D categorical scatter plot, coloring each of the 3 species differently%0Afig = pygmt.Figure()%0A%0A# Generate a basemap of 10 cm x 10 cm size%0Afig.basemap(%0A region=region,%0A projection=%22X10c/10c%22,%0A frame=%5B%0A 'xafg+l%22Bill length (mm)%22',%0A 'yafg+l%22Bill depth (mm)%22',%0A 'WSen+t%22Penguin size at Palmer Station%22',%0A %5D,%0A)%0A%0A# Define a colormap to be used for three categories, define the range of the%0A# new discrete CPT using series=(lowest_value, highest_value, interval),%0A# use color_model=%22+c%22 to write the discrete color palette %22inferno%22 in%0A# categorical format%0Apygmt.makecpt(cmap=%22inferno%22, series=(0, 3, 1), color_model=%22+c%22)%0A%0Afig.plot(%0A # Use bill length and bill depth as x and y data input, respectively%0A x=df.bill_length_mm,%0A y=df.bill_depth_mm,%0A # Vary each symbol size according to another feature (body mass, scaled by 7.5*10e-5)%0A sizes=df.body_mass_g * 7.5e-5,%0A # Points colored by categorical number code%0A color=df.species.cat.codes.astype(int),%0A # Use colormap created by makecpt%0A cmap=True,%0A # Do not clip symbols that fall close to the map bounds%0A no_clip=True,%0A # Use circles as symbols with size in centimeter units%0A style=%22cc%22,%0A # Set transparency level for all symbols to deal with overplotting%0A transparency=40,%0A)%0A%0A# A colorbar displaying the different penguin species types will be added%0A# once GMT 6.2.0 is released.%0A%0Afig.show()%0A
|
|
7ff0c1fd4eb77129c7829f92fc176678a06abe19
|
add solution for Balanced Binary Tree
|
src/balancedBinaryTree.py
|
src/balancedBinaryTree.py
|
Python
| 0.000001
|
@@ -0,0 +1,688 @@
+# Definition for a binary tree node%0A# class TreeNode:%0A# def __init__(self, x):%0A# self.val = x%0A# self.left = None%0A# self.right = None%0A%0A%0Aclass Solution:%0A # @param root, a tree node%0A # @return a boolean%0A%0A def isBalanced(self, root):%0A return self.getDepth(root) != -1%0A%0A def getDepth(self, root):%0A if root is None:%0A return 0%0A l_depth = self.getDepth(root.left)%0A if l_depth == -1:%0A return -1%0A r_depth = self.getDepth(root.right)%0A if r_depth == -1:%0A return -1%0A if l_depth-r_depth %3E 1 or l_depth-r_depth %3C -1:%0A return -1%0A return max(l_depth, r_depth) + 1%0A
|
|
7c17dfaf8d727047e32ab4e18438897f1b35feb2
|
226. Invert Binary Tree
|
problems/test_0226_bfs.py
|
problems/test_0226_bfs.py
|
Python
| 0.999305
|
@@ -0,0 +1,986 @@
+import unittest%0A%0Aimport utils%0Afrom tree import TreeNode%0A%0A%0A# O(n) time. O(log(n)) space. BFS.%0Aclass Solution:%0A def invertTree(self, root: TreeNode) -%3E TreeNode:%0A if not root:%0A return None%0A%0A q = %5Broot%5D%0A%0A while q:%0A new_q = %5B%5D%0A for curr in q:%0A curr.left, curr.right = curr.right, curr.left%0A if curr.left:%0A new_q.append(curr.left)%0A if curr.right:%0A new_q.append(curr.right)%0A q = new_q%0A%0A return root%0A%0A%0Aclass Test(unittest.TestCase):%0A def test(self):%0A cases = utils.load_test_json(__file__).test_cases%0A%0A for case in cases:%0A args = str(case.args)%0A root = TreeNode.from_array(case.args.root)%0A actual = Solution().invertTree(root)%0A actual = TreeNode.to_array(actual)%0A self.assertEqual(case.expected, actual, msg=args)%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
09445e163102e76299ab93a872de07b43c6dc5db
|
Fix bug in func test helper
|
nose2/tests/_common.py
|
nose2/tests/_common.py
|
"""Common functionality."""
import os.path
import tempfile
import shutil
import sys
import subprocess
import six
from nose2.compat import unittest
from nose2 import main, util
HERE = os.path.abspath(os.path.dirname(__file__))
SUPPORT = os.path.join(HERE, 'functional', 'support')
class TestCase(unittest.TestCase):
"""TestCase extension.
If the class variable _RUN_IN_TEMP is True (default: False), tests will be
performed in a temporary directory, which is deleted afterwards.
"""
_RUN_IN_TEMP = False
def setUp(self):
super(TestCase, self).setUp()
if self._RUN_IN_TEMP:
self._orig_dir = os.getcwd()
work_dir = self._work_dir = tempfile.mkdtemp()
os.chdir(self._work_dir)
# Make sure it's possible to import modules from current directory
sys.path.insert(0, work_dir)
def tearDown(self):
super(TestCase, self).tearDown()
if self._RUN_IN_TEMP:
os.chdir(self._orig_dir)
shutil.rmtree(self._work_dir, ignore_errors=True)
class FunctionalTestCase(unittest.TestCase):
tags = ['functional']
def assertTestRunOutputMatches(self, proc, stdout=None, stderr=None):
cmd_stdout, cmd_stderr = None, None
try:
cmd_stdout, cmd_stderr = self._output[proc.pid]
except AttributeError:
self._output = {}
except KeyError:
pass
if cmd_stdout is None:
cmd_stdout, cmd_stderr = proc.communicate()
self._output[proc.pid] = cmd_stdout, cmd_stderr
if stdout:
self.assertRegexpMatches(util.safe_decode(cmd_stdout), stdout)
if stderr:
self.assertRegexpMatches(util.safe_decode(cmd_stderr), stderr)
def runIn(self, testdir, *args, **kw):
return run_nose2(*args, cwd=testdir, **kw)
class _FakeEventBase(object):
"""Baseclass for fake Events."""
def __init__(self):
self.handled = False
self.version = '0.1'
self.metadata = {}
class FakeHandleFileEvent(_FakeEventBase):
"""Fake HandleFileEvent."""
def __init__(self, name):
super(FakeHandleFileEvent, self).__init__()
self.loader = Stub() # FIXME
self.name = name
self.path = os.path.split(name)[1]
self.extraTests = []
class FakeStartTestEvent(_FakeEventBase):
"""Fake StartTestEvent."""
def __init__(self, test):
super(FakeStartTestEvent, self).__init__()
self.test = test
self.result = test.defaultTestResult()
import time
self.startTime = time.time()
class FakeLoadFromNameEvent(_FakeEventBase):
"""Fake LoadFromNameEvent."""
def __init__(self, name):
super(FakeLoadFromNameEvent, self).__init__()
self.name = name
class FakeLoadFromNamesEvent(_FakeEventBase):
"""Fake LoadFromNamesEvent."""
def __init__(self, names):
super(FakeLoadFromNamesEvent, self).__init__()
self.names = names
class FakeStartTestRunEvent(_FakeEventBase):
"""Fake StartTestRunEvent"""
def __init__(self, runner=None, suite=None, result=None, startTime=None,
executeTests=None):
super(FakeStartTestRunEvent, self).__init__()
self.suite = suite
self.runner = runner
self.result = result
self.startTime = startTime
self.executeTests = executeTests
class Stub(object):
"""Stub object for use in tests"""
def __getattr__(self, attr):
return Stub()
def __call__(self, *arg, **kw):
return Stub()
def support_file(*path_parts):
return os.path.abspath(os.path.join(SUPPORT, *path_parts))
def run_nose2(*nose2_args, **nose2_kwargs):
if 'cwd' in nose2_kwargs:
cwd = nose2_kwargs.pop('cwd')
if not os.path.isabs(cwd):
nose2_kwargs['cwd'] = support_file(cwd)
if 'module' not in nose2_kwargs:
nose2_kwargs['module'] = None
return NotReallyAProc(nose2_args, **nose2_kwargs)
class NotReallyAProc(object):
def __init__(self, args, cwd=None, **kwargs):
self.args = args
self.chdir = cwd
self.kwargs = kwargs
def __enter__(self):
self._stdout = sys.__stdout__
self._stderr = sys.__stderr__
self.cwd = os.getcwd()
if self.chdir:
os.chdir(self.chdir)
self.stdout = sys.stdout = sys.__stdout__ = six.StringIO()
self.stderr = sys.stderr = sys.__stderr__ = six.StringIO()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
sys.stdout = sys.__stdout__ = self._stdout
sys.stderr = sys.__stderr__ = self._stderr
if self.chdir:
os.chdir(self.cwd)
return False
def communicate(self):
self.__enter__()
try:
self.result = main.PluggableTestProgram(
argv=('nose2',) + self.args, exit=False,
**self.kwargs)
return self.stdout.getvalue(), self.stderr.getvalue()
finally:
self.__exit__(None, None, None)
@property
def pid(self):
return id(self)
def poll(self):
return not self.result.result.wasSuccessful()
|
Python
| 0
|
@@ -4790,25 +4790,23 @@
-self.__enter__()%0A
+with self:%0A
@@ -4818,32 +4818,36 @@
ry:%0A
+
+
self.result = ma
@@ -4887,16 +4887,20 @@
+
+
argv=('n
@@ -4948,16 +4948,20 @@
+
**self.k
@@ -4983,121 +4983,147 @@
-return self.stdout.getvalue(), self.stderr.getvalue()%0A finally:%0A self.__exit__(None, None, None
+except SystemExit as e:%0A return %22%22, %22EXIT CODE %25s%22 %25 str(e)%0A return self.stdout.getvalue(), self.stderr.getvalue(
)%0A%0A
|
8bc4dddfad944d385c02e2a6ebd8031bfb6bfae8
|
Test dynamic_length
|
extenteten/dynamic_length_test.py
|
extenteten/dynamic_length_test.py
|
Python
| 0.000001
|
@@ -0,0 +1,514 @@
+import numpy as np%0Aimport tensorflow as tf%0A%0Afrom .dynamic_length import *%0A%0A%0Adef test_id_tree_to_root_width():%0A with tf.Session() as session, session.as_default():%0A id_tree = tf.constant(%5B%5B%5B1%5D, %5B2%5D, %5B3%5D, %5B0%5D, %5B0%5D%5D%5D)%0A assert id_tree_to_root_width(id_tree).eval() == np.array(%5B3%5D)%0A%0A%0Adef test_id_sequence_to_length():%0A with tf.Session() as session, session.as_default():%0A id_sequence = tf.constant(%5B%5B1, 2, 3, 0, 0%5D%5D)%0A assert id_sequence_to_length(id_sequence).eval() == np.array(%5B3%5D)%0A
|
|
e621f9c63e3ba676c3ce33ca227b96c5d6b68afa
|
make the fakes be the correct
|
nova/tests/db/fakes.py
|
nova/tests/db/fakes.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack, LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Stubouts, mocks and fixtures for the test suite"""
import time
from nova import db
from nova import test
from nova import utils
def stub_out_db_instance_api(stubs, injected=True):
"""Stubs out the db API for creating Instances."""
INSTANCE_TYPES = {
'm1.tiny': dict(id=2,
memory_mb=512,
vcpus=1,
local_gb=0,
flavorid=1,
rxtx_cap=1),
'm1.small': dict(id=5,
memory_mb=2048,
vcpus=1,
local_gb=20,
flavorid=2,
rxtx_cap=2),
'm1.medium':
dict(id=1,
memory_mb=4096,
vcpus=2,
local_gb=40,
flavorid=3,
rxtx_cap=3),
'm1.large': dict(id=3,
memory_mb=8192,
vcpus=4,
local_gb=80,
flavorid=4,
rxtx_cap=4),
'm1.xlarge':
dict(id=4,
memory_mb=16384,
vcpus=8,
local_gb=160,
flavorid=5,
rxtx_cap=5)}
flat_network_fields = {'id': 'fake_flat',
'bridge': 'xenbr0',
'label': 'fake_flat_network',
'netmask': '255.255.255.0',
'cidr_v6': 'fe80::a00:0/120',
'netmask_v6': '120',
'gateway': '10.0.0.1',
'gateway_v6': 'fe80::a00:1',
'broadcast': '10.0.0.255',
'dns': '10.0.0.2',
'ra_server': None,
'injected': injected}
vlan_network_fields = {'id': 'fake_vlan',
'bridge': 'br111',
'label': 'fake_vlan_network',
'netmask': '255.255.255.0',
'cidr_v6': 'fe80::a00:0/120',
'netmask_v6': '120',
'gateway': '10.0.0.1',
'gateway_v6': 'fe80::a00:1',
'broadcast': '10.0.0.255',
'dns': '10.0.0.2',
'ra_server': None,
'vlan': 111,
'injected': False}
fixed_ip_fields = {'address': '10.0.0.3',
'address_v6': 'fe80::a00:3',
'network_id': 'fake_flat'}
class FakeModel(object):
"""Stubs out for model."""
def __init__(self, values):
self.values = values
def __getattr__(self, name):
return self.values[name]
def __getitem__(self, key):
if key in self.values:
return self.values[key]
else:
raise NotImplementedError()
def fake_instance_type_get_all(context, inactive=0):
return INSTANCE_TYPES
def fake_instance_type_get_by_name(context, name):
return INSTANCE_TYPES[name]
def fake_instance_type_get_by_id(context, id):
for name, inst_type in INSTANCE_TYPES.iteritems():
if str(inst_type['id']) == str(id):
return inst_type
return None
def fake_network_get_by_instance(context, instance_id):
# Even instance numbers are on vlan networks
if instance_id % 2 == 0:
return FakeModel(vlan_network_fields)
else:
return FakeModel(flat_network_fields)
def fake_network_get_all_by_instance(context, instance_id):
# Even instance numbers are on vlan networks
if instance_id % 2 == 0:
return [FakeModel(vlan_network_fields)]
else:
return [FakeModel(flat_network_fields)]
def fake_instance_get_fixed_address(context, instance_id):
return FakeModel(fixed_ip_fields).address
def fake_instance_get_fixed_address_v6(context, instance_id):
return FakeModel(fixed_ip_fields).address
def fake_fixed_ip_get_all_by_instance(context, instance_id):
return [FakeModel(fixed_ip_fields)]
stubs.Set(db, 'network_get_by_instance', fake_network_get_by_instance)
stubs.Set(db, 'network_get_all_by_instance',
fake_network_get_all_by_instance)
stubs.Set(db, 'instance_type_get_all', fake_instance_type_get_all)
stubs.Set(db, 'instance_type_get_by_name', fake_instance_type_get_by_name)
stubs.Set(db, 'instance_type_get_by_id', fake_instance_type_get_by_id)
stubs.Set(db, 'instance_get_fixed_address',
fake_instance_get_fixed_address)
stubs.Set(db, 'instance_get_fixed_address_v6',
fake_instance_get_fixed_address_v6)
stubs.Set(db, 'network_get_all_by_instance',
fake_network_get_all_by_instance)
stubs.Set(db, 'fixed_ip_get_all_by_instance',
fake_fixed_ip_get_all_by_instance)
|
Python
| 0.9996
|
@@ -4723,16 +4723,18 @@
_address
+es
(context
@@ -4756,32 +4756,33 @@
%0A return
+%5B
FakeModel(fixed_
@@ -4791,32 +4791,33 @@
_fields).address
+%5D
%0A%0A def fake_i
@@ -4841,16 +4841,18 @@
_address
+es
_v6(cont
@@ -4877,32 +4877,33 @@
%0A return
+%5B
FakeModel(fixed_
@@ -4920,16 +4920,17 @@
.address
+%5D
%0A%0A de
@@ -5475,16 +5475,18 @@
_address
+es
',%0A
@@ -5519,16 +5519,18 @@
_address
+es
)%0A st
@@ -5564,24 +5564,26 @@
ixed_address
+es
_v6',%0A
@@ -5615,16 +5615,18 @@
_address
+es
_v6)%0A
|
471d60f41a283e5a2b2fb4a364cde67150de8acd
|
Create pmcolor.py
|
HexChat/pmcolor.py
|
HexChat/pmcolor.py
|
Python
| 0
|
@@ -0,0 +1,341 @@
+__module_name__ = %22PMColor%22%0A__module_author__ = %22TingPing%22%0A__module_version__ = %221%22%0A__module_description__ = %22Color PM tabs like Hilights%22%0A%0Aimport xchat%0A%0Adef pm_cb(word, word_eol, userdata):%0A xchat.command('GUI COLOR 3')%0A%09return None%0A%0Axchat.hook_print(%22Private Message to Dialog%22, pm_cb)%0Axchat.hook_print(%22Private Action to Dialog%22, pm_cb)%0A
|
|
8b5c9a434b1d8ae8d46a34d45114bc9c71dac0ea
|
Create install for nginx
|
genes/nginx/main.py
|
genes/nginx/main.py
|
Python
| 0
|
@@ -0,0 +1,391 @@
+from genes.apt import commands as apt%0Afrom genes.brew import commands as brew%0Afrom genes.debian.traits import is_debian%0Afrom genes.mac.traits import is_osx%0Afrom genes.ubuntu.traits import is_ubuntu%0A%0A%0Adef main():%0A if is_ubuntu() or is_debian():%0A apt.update()%0A apt.install('nginx')%0A elif is_osx():%0A brew.update()%0A brew.install('nginx')%0A else:%0A pass%0A
|
|
cd1f02d5707e1285fab54d31e65b6098e967a8d3
|
Move quality plugin priority earlier, so it can reject before e.g. regexp plugin causes imdb lookups.
|
flexget/plugins/filter/quality.py
|
flexget/plugins/filter/quality.py
|
import logging
from flexget.plugin import register_plugin, priority
import flexget.utils.qualities as quals
log = logging.getLogger('quality')
class FilterQuality(object):
"""
Rejects all entries that don't have one of the specified qualities
Example:
quality:
- hdtv
"""
def validator(self):
from flexget import validator
qualities = [q.name for q in quals.all()]
root = validator.factory()
root.accept('choice').accept_choices(qualities, ignore_case=True)
root.accept('list').accept('choice').accept_choices(qualities, ignore_case=True)
advanced = root.accept('dict')
advanced.accept('choice', key='min').accept_choices(qualities, ignore_case=True)
advanced.accept('choice', key='max').accept_choices(qualities, ignore_case=True)
advanced.accept('choice', key='quality').accept_choices(qualities, ignore_case=True)
advanced.accept('list', key='quality').accept('choice').accept_choices(qualities, ignore_case=True)
return root
def prepare_config(self, config):
if not isinstance(config, dict):
config = {'quality': config}
if isinstance(config.get('quality'), basestring):
config['quality'] = [config['quality']]
# Convert all config parameters from strings to their associated quality object
if 'quality' in config:
config['quality'] = [quals.get(q) for q in config['quality']]
for key in ['min', 'max']:
if key in config:
config[key] = quals.get(config[key])
return config
# Run before series and imdb plugins, so correct qualities are chosen
@priority(130)
def on_feed_filter(self, feed, config):
config = self.prepare_config(config)
for entry in feed.entries:
if 'quality' in config:
if not entry.get('quality') in config['quality']:
msg = 'quality is %s instead one of allowed (%s)' %\
(str(entry['quality']),
', '.join(str(x) for x in config['quality']))
feed.reject(entry, msg)
else:
if config.get('min'):
if entry.get('quality') < config['min']:
feed.reject(entry, 'quality %s not >= %s' % (entry['quality'], config['min']))
if config.get('max'):
if entry.get('quality') > config['max']:
feed.reject(entry, 'quality %s not <= %s' % (entry['quality'], config['max']))
register_plugin(FilterQuality, 'quality', api_ver=2)
|
Python
| 0.000003
|
@@ -1723,10 +1723,10 @@
ty(1
-30
+75
)%0A
|
e1a40e6a43915f8e8be2aa27387cd0d25f05ed67
|
Create Multiplication_Of_2_Numbers.py
|
Code/Multiplication_Of_2_Numbers.py
|
Code/Multiplication_Of_2_Numbers.py
|
Python
| 0.008666
|
@@ -0,0 +1,70 @@
+a=input(%22Enter a number --%3E%22)%0Ab=input(%22Enter a number --%3E%22)%0Aprint a*b%0A
|
|
20e1b3eae08b9052b44e9f54d5ed8c5d76e15033
|
Fix radiotherm I/O inside properties (#4227)
|
homeassistant/components/climate/radiotherm.py
|
homeassistant/components/climate/radiotherm.py
|
"""
Support for Radio Thermostat wifi-enabled home thermostats.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/climate.radiotherm/
"""
import datetime
import logging
import voluptuous as vol
from homeassistant.components.climate import (
STATE_AUTO, STATE_COOL, STATE_HEAT, STATE_IDLE, STATE_OFF,
ClimateDevice, PLATFORM_SCHEMA)
from homeassistant.const import CONF_HOST, TEMP_FAHRENHEIT, ATTR_TEMPERATURE
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['radiotherm==1.2']
_LOGGER = logging.getLogger(__name__)
ATTR_FAN = 'fan'
ATTR_MODE = 'mode'
CONF_HOLD_TEMP = 'hold_temp'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_HOST): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_HOLD_TEMP, default=False): cv.boolean,
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Radio Thermostat."""
import radiotherm
hosts = []
if CONF_HOST in config:
hosts = config[CONF_HOST]
else:
hosts.append(radiotherm.discover.discover_address())
if hosts is None:
_LOGGER.error("No Radiotherm Thermostats detected")
return False
hold_temp = config.get(CONF_HOLD_TEMP)
tstats = []
for host in hosts:
try:
tstat = radiotherm.get_thermostat(host)
tstats.append(RadioThermostat(tstat, hold_temp))
except OSError:
_LOGGER.exception("Unable to connect to Radio Thermostat: %s",
host)
add_devices(tstats)
class RadioThermostat(ClimateDevice):
"""Representation of a Radio Thermostat."""
def __init__(self, device, hold_temp):
"""Initialize the thermostat."""
self.device = device
self.set_time()
self._target_temperature = None
self._current_temperature = None
self._current_operation = STATE_IDLE
self._name = None
self.hold_temp = hold_temp
self.update()
self._operation_list = [STATE_AUTO, STATE_COOL, STATE_HEAT, STATE_OFF]
@property
def name(self):
"""Return the name of the Radio Thermostat."""
return self._name
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_FAHRENHEIT
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
return {
ATTR_FAN: self.device.fmode['human'],
ATTR_MODE: self.device.tmode['human']
}
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def current_operation(self):
"""Return the current operation. head, cool idle."""
return self._current_operation
@property
def operation_list(self):
"""Return the operation modes list."""
return self._operation_list
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
def update(self):
"""Update the data from the thermostat."""
self._current_temperature = self.device.temp['raw']
self._name = self.device.name['raw']
if self.device.tmode['human'] == 'Cool':
self._target_temperature = self.device.t_cool['raw']
self._current_operation = STATE_COOL
elif self.device.tmode['human'] == 'Heat':
self._target_temperature = self.device.t_heat['raw']
self._current_operation = STATE_HEAT
else:
self._current_operation = STATE_IDLE
def set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return
if self._current_operation == STATE_COOL:
self.device.t_cool = temperature
elif self._current_operation == STATE_HEAT:
self.device.t_heat = temperature
if self.hold_temp:
self.device.hold = 1
else:
self.device.hold = 0
def set_time(self):
"""Set device time."""
now = datetime.datetime.now()
self.device.time = {
'day': now.weekday(),
'hour': now.hour,
'minute': now.minute
}
def set_operation_mode(self, operation_mode):
"""Set operation mode (auto, cool, heat, off)."""
if operation_mode == STATE_OFF:
self.device.tmode = 0
elif operation_mode == STATE_AUTO:
self.device.tmode = 3
elif operation_mode == STATE_COOL:
self.device.t_cool = self._target_temperature
elif operation_mode == STATE_HEAT:
self.device.t_heat = self._target_temperature
|
Python
| 0
|
@@ -1968,32 +1968,86 @@
lf._name = None%0A
+ self._fmode = None%0A self._tmode = None%0A
self.hol
@@ -2065,16 +2065,16 @@
ld_temp%0A
-
@@ -2567,29 +2567,14 @@
elf.
-device.fmode%5B'human'%5D
+_fmode
,%0A
@@ -2599,37 +2599,23 @@
E: self.
-device.tmode%5B'human'%5D
+_tmode,
%0A
@@ -3356,26 +3356,86 @@
w'%5D%0A
-if
+self._fmode = self.device.fmode%5B'human'%5D%0A self._tmode =
self.device
@@ -3445,24 +3445,48 @@
ode%5B'human'%5D
+%0A%0A if self._tmode
== 'Cool':%0A
@@ -3617,37 +3617,22 @@
if self.
-device.tmode%5B'human'%5D
+_tmode
== 'Hea
|
18b6ac79ebcd881babaa328900c6e39b2da3c1bb
|
Handle 404 error when attempting to download a game that is still in progress
|
tenhou-download-game-xml.py
|
tenhou-download-game-xml.py
|
#!/usr/bin/python3
import glob
import os
from optparse import OptionParser
from struct import Struct
from urllib.parse import parse_qs
from urllib.request import urlopen
import struct
import codecs
table = [
22136, 52719, 55146, 42104,
59591, 46934, 9248, 28891,
49597, 52974, 62844, 4015,
18311, 50730, 43056, 17939,
64838, 38145, 27008, 39128,
35652, 63407, 65535, 23473,
35164, 55230, 27536, 4386,
64920, 29075, 42617, 17294,
18868, 2081
]
def tenhouHash(game):
code_pos = game.rindex("-") + 1
code = game[code_pos:]
if code[0] == 'x':
a,b,c = struct.unpack(">HHH", bytes.fromhex(code[1:]))
index = 0
if game[:12] > "2010041111gm":
x = int("3" + game[4:10])
y = int(game[9])
index = x % (33 - y)
first = (a ^ b ^ table[index]) & 0xFFFF
second = (b ^ c ^ table[index] ^ table[index + 1]) & 0xFFFF
return game[:code_pos] + codecs.getencoder('hex_codec')(struct.pack(">HH", first, second))[0].decode('ASCII')
else:
return game
p = OptionParser()
p.add_option('-d', '--directory',
default=os.path.expanduser('~/.tenhou-game-xml'),
help='Directory in which to store downloaded XML')
opts, args = p.parse_args()
if args:
p.error('This command takes no positional arguments')
sol_files = []
sol_files.extend(glob.glob(os.path.join(
os.path.expanduser('~'),
'.config/chromium/*/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects/*/mjv.jp/mjinfo.sol')))
sol_files.extend(glob.glob(os.path.join(
os.path.expanduser('~'),
'.config/google-chrome/*/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects/*/mjv.jp/mjinfo.sol')))
sol_files.extend(glob.glob(os.path.join(
os.path.expanduser('~'),
'.macromedia/Flash_Player/#SharedObjects/*/mjv.jp/mjinfo.sol')))
if not os.path.exists(opts.directory):
os.makedirs(opts.directory)
for sol_file in sol_files:
print("Reading Flash state file: {}".format(sol_file))
with open(sol_file, 'rb') as f:
data = f.read()
# What follows is a limited parser for Flash Local Shared Object files -
# a more complete implementation may be found at:
# https://pypi.python.org/pypi/PyAMF
header = Struct('>HI10s8sI')
magic, objlength, magic2, mjinfo, padding = header.unpack_from(data)
offset = header.size
assert magic == 0xbf
assert magic2 == b'TCSO\0\x04\0\0\0\0'
assert mjinfo == b'\0\x06mjinfo'
assert padding == 0
ushort = Struct('>H')
ubyte = Struct('>B')
while offset < len(data):
length, = ushort.unpack_from(data, offset)
offset += ushort.size
name = data[offset:offset+length]
offset += length
amf0_type, = ubyte.unpack_from(data, offset)
offset += ubyte.size
# Type 2: UTF-8 String, prefixed with 2-byte length
if amf0_type == 2:
length, = ushort.unpack_from(data, offset)
offset += ushort.size
value = data[offset:offset+length]
offset += length
# Type 6: Undefined
elif amf0_type == 6:
value = None
# Type 1: Boolean
elif amf0_type == 1:
value = bool(data[offset])
offset += 1
# Other types from the AMF0 specification are not implemented, as they
# have not been observed in mjinfo.sol files. If required, see
# http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf
else:
print("Unimplemented AMF0 type {} at offset={} (hex {})".format(amf0_type, offset, hex(offset)))
trailer_byte = data[offset]
assert trailer_byte == 0
offset += 1
if name == b'logstr':
loglines = filter(None, value.split(b'\n'))
for logline in loglines:
logname = parse_qs(logline.decode('ASCII'))['file'][0]
logname = tenhouHash(logname)
target_fname = os.path.join(opts.directory, "{}.xml".format(logname))
if os.path.exists(target_fname):
print("Game {} already downloaded".format(logname))
else:
print("Downloading game {}".format(logname))
resp = urlopen('http://e.mjv.jp/0/log/?' + logname)
data = resp.read()
with open(target_fname, 'wb') as f:
f.write(data)
|
Python
| 0
|
@@ -164,16 +164,51 @@
urlopen%0A
+from urllib.error import HTTPError%0A
import s
@@ -4197,32 +4197,53 @@
ormat(logname))%0A
+ try:%0A
resp
@@ -4298,24 +4298,28 @@
+
+
data = resp.
@@ -4321,24 +4321,28 @@
resp.read()%0A
+
@@ -4393,16 +4393,20 @@
+
+
f.write(
@@ -4407,12 +4407,233 @@
write(data)%0A
+ except HTTPError as e:%0A if e.code == 404:%0A print(%22Could not download game %7B%7D. Is the game still in progress?%22.format(logname))%0A else:%0A raise%0A
|
2059aa7776a8e0c947b68e9401d74bdd146a59cd
|
Test passed for week day
|
ch03_04.py
|
ch03_04.py
|
Python
| 0.000002
|
@@ -0,0 +1,772 @@
+(day, month, year) = input().split()%0A%0Aday = int(day); month = int(month); year = int(year)%0A%0Aif month %3C 3:%0A month += 12%0A year -= 1%0A%0Ac = year / 100%0Ak = year %25 100%0Aweek_day = int( day + (26 * (month + 1) / 10) + k + ( k / 4 ) + ( c / 4 ) + ( 5 * c ) ) %25 7%0A%0Aweek_day_name = ''%0A%0A# 1. Follow from flowchart%0Aif 0 == week_day:%0A week_day_name = 'SAT'%0Aelif 1 == week_day:%0A week_day_name = 'SUN'%0Aelif 2 == week_day:%0A week_day_name = 'MON'%0Aelif 3 == week_day:%0A week_day_name = 'TUE'%0Aelif 4 == week_day:%0A week_day_name = 'WED'%0Aelif 5 == week_day:%0A week_day_name = 'THU'%0Aelif 6 == week_day:%0A week_day_name = 'FRI'%0A%0Aprint(week_day_name)%0A%0A# 2. SHORTER VERSION%0A# week_day_list = %5B'SAT', 'SUN', 'MON', 'TUE', 'WED', 'THU', 'FRI'%5D%0A# print(week_day_list%5Bweek_day%5D)%0A
|
|
bda7ef0f449c40d572cc4fe40aaaa2f60996bde5
|
add spider for solitaireonline.com
|
gaming_spiders/solitaireonline.py
|
gaming_spiders/solitaireonline.py
|
Python
| 0
|
@@ -0,0 +1,525 @@
+#!/usr/bin/env python%0A%0Aimport json%0A%0Afrom cloudfeaster import spider%0A%0Afrom zygomatic import ZygomaticSpider%0A%0A%0Aclass SolitaireOnlineSpider(ZygomaticSpider):%0A%0A @classmethod%0A def get_metadata(cls):%0A return %7B%0A %22url%22: %22http://www.solitaireonline.com/?sort=mostPlayed%22,%0A %7D%0A%0A%0Aif __name__ == %22__main__%22:%0A crawl_args = spider.CLICrawlArgs(SolitaireOnlineSpider)%0A crawler = spider.SpiderCrawler(SolitaireOnlineSpider)%0A crawl_result = crawler.crawl(*crawl_args)%0A print json.dumps(crawl_result)%0A
|
|
868293aee14d6216c69446dc367491b25469f6e8
|
add import_question_metadata to import display_text and key for questions from csv file
|
apps/stories/management/commands/import_question_metadata.py
|
apps/stories/management/commands/import_question_metadata.py
|
Python
| 0.000001
|
@@ -0,0 +1,1488 @@
+from django.core.management.base import BaseCommand%0Aimport csv%0Afrom stories.models import Question, Questiongroup, QuestiongroupQuestions%0A%0Aclass Command(BaseCommand):%0A args = %22filename to import from%22%0A help = %22%22%22Import Key and Display Text metadata for questions%0A python manage.py import_question_metadata %3Cpath/to/file.csv%3E%0A %22%22%22%0A%0A def handle(self, *args, **options):%0A filename = args%5B0%5D%0A reader = csv.reader(open(filename))%0A i = 0%0A for row in reader:%0A if i == 0:%0A i += 1%0A continue%0A source = row%5B0%5D.strip()%0A school_type = row%5B1%5D.strip()%0A version = int(row%5B2%5D)%0A sequence = int(row%5B3%5D)%0A question_text = row%5B4%5D.strip()%0A key = row%5B5%5D.strip()%0A new_display_text = row%5B7%5D.strip()%0A qg = Questiongroup.objects.filter(source__name=source, version=version)%5B0%5D%0A qgq = QuestiongroupQuestions.objects.filter(sequence=sequence, questiongroup=qg,%0A question__school_type__name=school_type)%5B0%5D%0A question = qgq.question%0A print question.text%0A print question_text%0A if question.text.strip() != question_text:%0A raise Exception(%22question text does not match. failing.%22)%0A question.display_text = new_display_text%0A question.key = key%0A question.save()%0A %0A%0A
|
|
9f1dfbf4bf36c0e3ef991a66c5a68b2674223b19
|
Add a constant decoractor
|
const.py
|
const.py
|
Python
| 0.000978
|
@@ -0,0 +1,251 @@
+def constant(func):%0A %22%22%22 Decorator used to emulate constant values %22%22%22%0A %0A def fset(self, value):%0A raise TypeError(%22Cannot modify the value of a constant.%22)%0A%0A def fget(self):%0A return func()%0A %0A return property(fget, fset)
|
|
2ee04a1b668501eb41ce4b08e6c92ffe4f57d861
|
Build dependencies were borken because something sorts 1.0.1-XX and 1.0-YY wrong
|
aliyun/__init__.py
|
aliyun/__init__.py
|
"""
Aliyun API
==========
The Aliyun API is well-documented at `dev.aliyun.com <http://dev.aliyun.com/thread.php?spm=0.0.0.0.MqTmNj&fid=8>`_.
Each service's API is very similar: There are regions, actions, and each action has many parameters.
It is an OAuth2 API, so you need to have an ID and a secret. You can get these from the Aliyun management console.
Authentication
==============
You will need security credentials for your Aliyun account. You can view and
create them in the `Aliyun management console <http://console.aliyun.com>`_. This
library will look for credentials in the following places:
1. Environment variables `ALI_ACCESS_KEY_ID` and `ALI_SECRET_ACCESS_KEY`
2. An ini-style configuration file at `~/.aliyun.cfg` with contents like:
::
[default]
access_key_id=xxxxxxxxxxxxx
secret_access_key=xxxxxxxxxxxxxxxxxxxxxxx
..
3. A system-wide version of that file at /etc/aliyun.cfg with similar contents.
We recommend using environment variables whenever possible.
Main Interfaces
===============
The main components of python-aliyun are ECS and SLB. Other Aliyun products will
be added as API support develops. Within each Aliyun product, we tried to
implement every API Action variation available. We used a boto-style design
where most API interaction is done with a connection object which marshalls
Python objects and API representations.
*ECS*:
You can create a new ECS connection and interact with ECS like this::
import aliyun.ecs.connection
conn = aliyun.ecs.connection.EcsConnection('cn-hangzhou')
print conn.get_all_instance_ids()
See more at :mod:`aliyun.ecs`
*SLB*:
Similarly for SLB, get the connection object like this::
import aliyun.slb.connection
conn = aliyun.slb.connection.SlbConnection('cn-hangzhou')
print conn.get_all_load_balancer_ids()
See more at :mod:`aliyun.slb`
ali command
===========
The ali commandline tool is mostly used for debugging the Aliyun API interactions.
It accepts arbitrary Key=Value pairs and passes them on to the API after wrapping them.
::
ali --region cn-hangzhou ecs Action=DescribeRegions
ali --region cn-hangzhou slb Action=DescribeLoadBalancers
"""
__version__ = "1.0.1"
|
Python
| 0.000026
|
@@ -2207,9 +2207,9 @@
%221.
-0.1
+1.0
%22%0A
|
c480982a09f354a05c5e5ff0dc8a7c93f13f3970
|
add config for quakenet script
|
config/quakenet.py
|
config/quakenet.py
|
Python
| 0
|
@@ -0,0 +1,100 @@
+settings = %7B%0A %22authname%22: %22authname%22,%0A %22password%22: %22authpw%22,%0A %22channels%22: %22#pwnagedeluxe%22%0A%7D
|
|
6b0721b6aeda6d3ec6f5d31be7c741bc7fcc4635
|
bump release for 18.0.1 development
|
setup.py
|
setup.py
|
"""Functionality that should be in the standard library. Like
builtins, but Boltons.
Otherwise known as, "everyone's util.py," but cleaned up and
tested.
Contains over 160 BSD-licensed utility types and functions that can be
used as a package or independently. `Extensively documented on Read
the Docs <http://boltons.readthedocs.org>`_.
"""
from setuptools import setup
__author__ = 'Mahmoud Hashemi'
__version__ = '18.0.0'
__contact__ = 'mahmoud@hatnote.com'
__url__ = 'https://github.com/mahmoud/boltons'
__license__ = 'BSD'
setup(name='boltons',
version=__version__,
description="When they're not builtins, they're boltons.",
long_description=__doc__,
author=__author__,
author_email=__contact__,
url=__url__,
packages=['boltons'],
include_package_data=True,
zip_safe=False,
license=__license__,
platforms='any',
classifiers=[
# See: https://pypi.python.org/pypi?:action=list_classifiers
'Topic :: Utilities',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Topic :: Software Development :: Libraries',
'Development Status :: 5 - Production/Stable',
'Operating System :: OS Independent',
# List of python versions and their support status:
# https://en.wikipedia.org/wiki/CPython#Version_history
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy', ]
)
"""
A brief checklist for release:
* tox
* git commit (if applicable)
* Bump setup.py version off of -dev
* git commit -a -m "bump version for x.y.z release"
* python setup.py sdist bdist_wheel upload
* bump docs/conf.py version
* git commit
* git tag -a x.y.z -m "brief summary"
* write CHANGELOG
* git commit
* bump setup.py version onto n+1 dev
* git commit
* git push
"""
|
Python
| 0
|
@@ -420,17 +420,20 @@
= '18.0.
-0
+1dev
'%0A__cont
|
95d3306f2f7c492ea5f58c86b86165544273e6b9
|
Create mp.py
|
mp.py
|
mp.py
|
Python
| 0.000002
|
@@ -0,0 +1,539 @@
+import multiprocessing as mp%0Aimport time%0A%0ATHREADS=10%0A%0Adef f(x):%0A print(%22Starting....%22 + str(x))%0A time.sleep(5)%0A print(%22Finishing....%22+ str(x))%0A%0Aprocesses = %5BNone%5D * THREADS%0Aprint(processes)%0A%0Adef add_to_processes(args):%0A while True:%0A for idx, process in enumerate(processes):%0A if process is None or not process.is_alive():%0A p = mp.Process(target=f, args=args)%0A processes%5Bidx%5D = p%0A p.start()%0A return%0A%0Afor i in range(0, 30):%0A add_to_processes((i,))%0A
|
|
934c4136c6415b76577d206739b352ad965210f0
|
Create test_postures.py
|
home/beetlejuice/test_postures.py
|
home/beetlejuice/test_postures.py
|
Python
| 0.000001
|
@@ -0,0 +1,920 @@
+# Sweety's postures test%0Aimport random%0A%0A%0ARuntime.createAndStart(%22sweety%22, %22Sweety%22)%0Asweety.arduino.setBoard(%22atmega2560%22)%0Asweety.connect(%22COM9%22)%0Asleep(1) # give a second to the arduino for connect%0Asweety.attach()%0A%0Asweety.mouthState(%22smile%22)%0Asleep(1)%0A# set delays for led sync (delayTime, delayTimeStop, delayTimeLetter)%0Asweety.setdelays(50,200,50)%0Asweety.mouth.setLanguage(%22en%22)%0A#sweety.saying(%22Hello,my name is sweety.%22)%0A%0Asweety.posture(%22neutral%22)%0Asweety.saying(%22neutral.%22)%0Asleep(2)%0A%0Asweety.posture(%22yes%22)%0Asweety.saying(%22yes.%22)%0Asleep(2)%0A%0Asweety.posture(%22concentre%22)%0Asweety.saying(%22concentre.%22)%0Asleep(2)%0A%0Asweety.posture(%22showLeft%22)%0Asweety.saying(%22show left.%22)%0Asleep(2)%0A%0Asweety.posture(%22showRight%22)%0Asweety.saying(%22show right.%22)%0Asleep(2)%0A%0Asweety.posture(%22handsUp%22)%0Asweety.saying(%22hands up !%22)%0Asleep(2)%0A%0Asweety.posture(%22carryBags%22)%0Asweety.saying(%22carry bags.%22)%0Asleep(2)%0A%0Asweety.posture(%22neutral%22)%0Asweety.saying(%22neutral.%22)%0A
|
|
89714cf01186e9aa5575fadf45c6c1fa70812871
|
Create count.py
|
count.py
|
count.py
|
Python
| 0.000003
|
@@ -0,0 +1,356 @@
+#!/usr/bin/env python%0Aimport rospy%0Afrom std_msgs.msg import Int32%0A%0Aif __name__ == '__main__':%0A rospy.init_node('count')%0A pub = rospy.Publisher('count_up', Int32, queue_size=1)%0A rate = rospy.Rate(10)%0A n = 0%0A while not rospy.is_shutdown():%0A n += 1%0A pub.publish(n)%0A rate.sleep()%0A
|
|
9f443a5af6537867712f12419d93a5b8c824858a
|
Add Notify-osd option for linux based systems
|
flexget/plugins/output/notify_osd.py
|
flexget/plugins/output/notify_osd.py
|
Python
| 0
|
@@ -0,0 +1,2485 @@
+from __future__ import unicode_literals, division, absolute_import%0Aimport logging%0A%0Afrom flexget.plugin import register_plugin, priority, DependencyError%0Afrom flexget.utils.template import RenderError, render_from_task%0A%0Alog = logging.getLogger('notify_osd')%0A%0A%0Aclass OutputNotifyOsd(object):%0A%0A def validator(self):%0A from flexget import validator%0A config = validator.factory()%0A config.accept('boolean')%0A advanced = config.accept('dict')%0A advanced.accept('text', key='title_template')%0A advanced.accept('text', key='item_template')%0A return config%0A%0A def prepare_config(self, config):%0A if isinstance(config, bool):%0A config = %7B%7D%0A config.setdefault('title_template', '%7B%7Btask.name%7D%7D')%0A config.setdefault('item_template', '%7B%7Btitle%7D%7D')%0A return config%0A%0A def on_task_start(self, task, config):%0A try:%0A from gi.repository import Notify%0A except ImportError as e:%0A log.debug('Error importing Notify: %25s' %25 e)%0A raise DependencyError('notify_osd', 'gi.repository',%0A 'Notify module required. ImportError: %25s' %25 e)%0A%0A @priority(0)%0A def on_task_output(self, task, config):%0A %22%22%22%0A Configuration::%0A notify_osd:%0A title_template: Notification title, supports jinja templating, default %7B%7Btask.name%7D%7D%0A item_template: Notification body, suports jinja templating, default %7B%7Btitle%7D%7D%0A %22%22%22%0A from gi.repository import Notify%0A%0A if not Notify.init(%22Flexget%22):%0A log.error('Unable to init libnotify.')%0A return%0A%0A if not task.accepted:%0A return%0A%0A config = self.prepare_config(config)%0A body_items = %5B%5D%0A%0A for entry in task.accepted:%0A try:%0A body_items.append(entry.render(config%5B'item_template'%5D))%0A except RenderError as e:%0A log.error('Error setting body message: %25s' %25 e)%0A log.verbose(%22Send Notify-OSD notification about: %25s%22, %22 - %22.join(body_items))%0A%0A title = config%5B'title_template'%5D%0A try:%0A title = render_from_task(title, task)%0A log.debug('Setting bubble title to :%25s', title)%0A except RenderError as e:%0A log.error('Error setting title Notify-osd message: %25s' %25 e)%0A%0A n = Notify.Notification.new(title, '%5Cn'.join(body_items), None)%0A n.show()%0A%0Aregister_plugin(OutputNotifyOsd, 'notify_osd', api_ver=2)%0A
|
|
77922e6527ad0e2c223983c59329dea127cd38ef
|
Create heuristic_test
|
models/players/heuristic_test.py
|
models/players/heuristic_test.py
|
Python
| 0.00001
|
@@ -0,0 +1,94 @@
+from models.algorithm.minimax import Heuristic%0Afrom models.algorithm.minimax import Minimax%0A%0A%0A
|
|
93b2d737407389a1c4dbc67836a949663eeba948
|
Call the new presubmit checks from chrome/ code, with a blacklist.
|
chrome/PRESUBMIT.py
|
chrome/PRESUBMIT.py
|
Python
| 0.000001
|
@@ -0,0 +1,1496 @@
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.%0A# Use of this source code is governed by a BSD-style license that can be%0A# found in the LICENSE file.%0A%0A%22%22%22Makes sure that the chrome/ code is cpplint clean.%22%22%22%0A%0AINCLUDE_CPP_FILES_ONLY = (%0A r'.*%5C.cc$', r'.*%5C.h$'%0A)%0A%0AEXCLUDE = (%0A # Objective C confuses everything.%0A r'.*cocoa.*',%0A r'.*_mac%5C.(cc%7Ch)$',%0A r'.*_mac_.*',%0A # All the messages files do weird multiple include trickery%0A r'.*_messages_internal%5C.h$',%0A r'render_messages.h$',%0A # Autogenerated window resources files are off limits%0A r'.*resource.h$',%0A # GTK macros in C-ish header code cause false positives%0A r'gtk_.*%5C.h$',%0A # Header trickery%0A r'.*-inl%5C.h$',%0A # Templates%0A r'sigslotrepeater%5C.h$',%0A # GCC attribute trickery%0A r'sel_main%5C.cc$',%0A # Mozilla code%0A r'mork_reader%5C.h$',%0A r'mork_reader%5C.cc$',%0A r'nss_decryptor_linux%5C.cc$',%0A # Has safe printf usage that cpplint complains about%0A r'safe_browsing_util%5C.cc$',%0A # Too much math on one line?%0A r'bloom_filter%5C.cc$',%0A # Bogus ifdef tricks%0A r'renderer_webkitclient_impl%5C.cc$',%0A r'temp_scaffolding_stubs%5C.h$',%0A # Lines %3E 100 chars%0A r'gcapi%5C.cc$',%0A)%0A%0Adef CheckChangeOnUpload(input_api, output_api):%0A results = %5B%5D%0A black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE%0A sources = lambda x: input_api.FilterSourceFile(%0A x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)%0A results.extend(input_api.canned_checks.CheckChangeLintsClean(%0A input_api, output_api, sources))%0A return results%0A
|
|
923786f0ee9e5128337997b6687374f74388c1c2
|
add leetcode Find Minimum in Rotated Sorted Array
|
leetcode/FindMinimuminRotatedSortedArray/solution.py
|
leetcode/FindMinimuminRotatedSortedArray/solution.py
|
Python
| 0
|
@@ -0,0 +1,403 @@
+# -*- coding:utf-8 -*-%0Aclass Solution:%0A # @param num, a list of integer%0A # @return an integer%0A def findMin(self, num):%0A l = 0%0A h = len(num) - 1%0A while l %3C h:%0A mid = (l + h) // 2%0A if num%5Bl%5D %3E num%5Bmid%5D:%0A h = mid%0A elif num%5Bh%5D %3C num%5Bmid%5D:%0A l = mid + 1%0A else:%0A break%0A return num%5Bl%5D%0A
|
|
f8c7a80fc8500d53cacef904c4a7caea88263465
|
Add 20150608 question.
|
LeetCode/gas_station.py
|
LeetCode/gas_station.py
|
Python
| 0.000001
|
@@ -0,0 +1,590 @@
+%0Aclass Solution:%0A # @param %7Binteger%5B%5D%7D gas%0A # @param %7Binteger%5B%5D%7D cost%0A # @return %7Binteger%7D%0A def canCompleteCircuit(self, gas, cost):%0A diff = %5B%5D%0A i = 0%0A while i %3C len(gas):%0A diff.append(gas%5Bi%5D - cost%5Bi%5D)%0A i += 1%0A%0A leftGas, sumCost, start = 0, 0, 0%0A i = 0%0A while i %3C len(gas):%0A leftGas += diff%5Bi%5D%0A sumCost += diff%5Bi%5D%0A if sumCost %3C 0:%0A start = i + 1%0A sumCost = 0%0A i += 1%0A%0A if leftGas %3C 0:%0A return -1%0A return start%0A
|
|
08e57c27c47437b46c557f4697dd32d00f27fd7f
|
Create whatIsYourName.py
|
whatIsYourName.py
|
whatIsYourName.py
|
Python
| 0.000175
|
@@ -0,0 +1,182 @@
+a = 20%0Ab = 130%0Ac = a + b%0A%0Aprint (c)%0A%0A%0Ad = 100%0Ae = 2%0Af = d / e%0A%0Aprint (f)%0A%0A%0Ag = 34%0Ah = 47%0Ai = 82%0Aj= g + h + i%0A%0Aprint (j)%0A%0A%0Aname = input(%22What is your name? %22) %0Aprint(%22hello, %22, name)%0A
|
|
4f99ffbc3deb321ba3ff76b23bacb889b11e1f4d
|
add to index solved
|
Lesson4/add_to_index.py
|
Lesson4/add_to_index.py
|
Python
| 0.000001
|
@@ -0,0 +1,782 @@
+# Define a procedure, add_to_index,%0A# that takes 3 inputs:%0A%0A# - an index: %5B%5B%3Ckeyword%3E,%5B%3Curl%3E,...%5D%5D,...%5D%0A# - a keyword: String%0A# - a url: String%0A%0A# If the keyword is already%0A# in the index, add the url%0A# to the list of urls associated%0A# with that keyword.%0A%0A# If the keyword is not in the index,%0A# add an entry to the index: %5Bkeyword,%5Burl%5D%5D%0A%0Aindex = %5B%5D%0A%0Adef add_to_index(index,keyword,url):%0A for e in index:%0A if keyword in e:%0A e%5B1%5D.append(url)%0A return%0A index.append(%5Bkeyword,%5Burl%5D%5D)%0A%0A%0Aadd_to_index(index,'udacity','http://udacity.com')%0Aadd_to_index(index,'computing','http://acm.org')%0Aadd_to_index(index,'udacity','http://npr.org')%0Aprint index%0A#%3E%3E%3E %5B%5B'udacity', %5B'http://udacity.com', 'http://npr.org'%5D%5D, %0A#%3E%3E%3E %5B'computing', %5B'http://acm.org'%5D%5D%5D%0A%0A%0A%0A
|
|
d824d2fc32774ce51e4f36d702a2a6cc131db558
|
add migration file to automatically parse citations
|
osf/migrations/0074_parse_citation_styles.py
|
osf/migrations/0074_parse_citation_styles.py
|
Python
| 0
|
@@ -0,0 +1,2934 @@
+# This migration port %60scripts/parse_citation_styles%60 to automatically parse citation styles.%0A# Additionally, this set the corresponding %60has_bibliography%60 field to %60False%60 for all citation formats whose CSL files do not%0A# include a bibliography section. As a result, all such citation formats would not show up in OSF%0A# citation widgets for users to choose.%0A#%0A# NOTE:%0A# As of December 6th, 2017, there are however THREE EXCEPTIONS:%0A# %22Bluebook Law Review%22, %22Bluebook Law Review(2)%22 and %22Bluebook Inline%22 shares a%0A# special CSL file ('website/static/bluebook.cls'), in which a bibliography section is defined,%0A# in order to render bibliographies even though their official CSL files (located in CenterForOpenScience/styles repo)%0A# do not contain a bibliography section. Therefore, This migration also automatically set %60has_bibliography%60 to %60True%60 for all styles whose titles contain %22Bluebook%22%0A%0Aimport logging%0Aimport os%0A%0Afrom django.db import migrations%0Afrom lxml import etree%0A%0Afrom osf.models.citation import CitationStyle%0Afrom website import settings%0A%0Alogger = logging.getLogger(__file__)%0A%0Adef get_style_files(path):%0A files = (os.path.join(path, x) for x in os.listdir(path))%0A return (f for f in files if os.path.isfile(f))%0A%0Adef parse_citation_styles(*args):%0A # drop all styles%0A CitationStyle.remove()%0A%0A for style_file in get_style_files(settings.CITATION_STYLES_PATH):%0A with open(style_file, 'r') as f:%0A try:%0A root = etree.parse(f).getroot()%0A except etree.XMLSyntaxError:%0A continue%0A%0A namespace = root.nsmap.get(None)%0A selector = '%7B%7B%7Bns%7D%7D%7Dinfo/%7B%7B%7Bns%7D%7D%7D'.format(ns=namespace)%0A%0A title = root.find(selector + 'title').text%0A # %60has_bibliography%60 is set to %60True%60 for Bluebook citation formats due to the special way we handle them.%0A has_bibliography = root.find('%7B%7B%7Bns%7D%7D%7D%7Btag%7D'.format(ns=namespace, tag='bibliography')) is not None or 'Bluebook' in title%0A # Required%0A fields = %7B%0A '_id': os.path.splitext(os.path.basename(style_file))%5B0%5D,%0A 'title': title,%0A 'has_bibliography': has_bibliography,%0A %7D%0A%0A # Optional%0A try:%0A fields%5B'short_title'%5D = root.find(selector + %22title-short%22).text%0A except AttributeError:%0A pass%0A%0A try:%0A fields%5B'summary'%5D = root.find(selector + 'summary').text%0A except AttributeError:%0A pass%0A%0A style = CitationStyle(**fields)%0A style.save()%0A%0Adef revert(*args):%0A # The revert of this migration simply removes all CitationStyle instances.%0A CitationStyle.remove()%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('osf', '0073_citationstyle_has_bibliography'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(parse_citation_styles, revert),%0A %5D
|
|
9f46cf4836ad555a54dc9c47b8b2843643a878f2
|
Create migration for draft dos1 briefs to dos2
|
migrations/versions/840_migrate_draft_dos1_briefs_to_draft_dos2.py
|
migrations/versions/840_migrate_draft_dos1_briefs_to_draft_dos2.py
|
Python
| 0.000001
|
@@ -0,0 +1,531 @@
+%22%22%22Migrate draft DOS1 briefs to draft DOS2 briefs%0A%0ARevision ID: 840%0ARevises: 830%0ACreate Date: 2017-02-07 15:31:50.715832%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '840'%0Adown_revision = '830'%0A%0Afrom alembic import op%0A%0Adef upgrade():%0A # Change framework of draft DOS1 briefs from DOS1 (framework_id == 5) to DOS2 (framework_id == 7)%0A op.execute(%22%22%22%0A UPDATE briefs%0A SET framework_id = 7%0A WHERE framework_id = 5 AND published_at IS NULL%0A %22%22%22)%0A%0A%0Adef downgrade():%0A # No downgrade%0A pass%0A
|
|
4a3d56589cbf4e94618795d3f1bc09fa0f59e5ca
|
Add "ROV_SRS_Library.py" file containing functions for main script.
|
ROV_SRS_Library.py
|
ROV_SRS_Library.py
|
Python
| 0
|
@@ -0,0 +1,699 @@
+# ROV_SRS_Library%0A#%0A#%0A# Overview:%09A collection of helper functions used by the BeagleBone%0A#%09%09to control the ROV SRS Actuators.%0A#%0A# Authors:%09Jonathan Lee (2015)%0A#%0A%0Aimport Adafruit_BBIO.GPIO as GPIO%0Aimport Adafruit_BBIO.PWM as PWM%0A%0Adef calc_pulse_width(pin_name):%0A%09%22%22%22Calculates the pulse width of a PWM signal input.%0A%0A%09Stores the time of day on a Rising Edge and subsequent Falling Edge%0A%09event, then returns the difference in milliseconds.%0A%0A%09Args:%0A%09%09pin_name: A String containing the pin name on which the PWM%0A%09%09%09signal is expected. The pin name should be in the%0A%09%09%09format defined by the Adafruit_BBIO library.%0A%09%0A%09Returns:%0A%09%09A float containing the calculated pulse width, in%0A%09%09%09milliseconds.%0A%09%22%22%22%0A%0A%09%0A
|
|
0a45c8f0632f3e8ca5502b9e4fdbaef410b07c71
|
rename settings.py
|
config.py
|
config.py
|
Python
| 0.000001
|
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-%0A%0Afrom flask import Flask%0A%0Aapp = Flask(__name__)%0A
|
|
1d35451387f9cab55df12f28e71824b2dbe37153
|
add back after exposing my key
|
config.py
|
config.py
|
Python
| 0
|
@@ -0,0 +1,51 @@
+ECHO_NEST_API_KEY = %22INSERT ECHO NEST API KEY HERE%22
|
|
88a1f41c99320117bedb9d9922f3737fa820768a
|
fix import in config
|
config.py
|
config.py
|
#!/usr/bin/env python
# encoding: utf-8
"""
config.py
Application configurations
db_file : the SQLite file used to store the progress
serial : settings for the serial port that the RFID reader connects to
mpd_conn : the connection details for the MPD client
gpio_pins : the ids of the GPIO input pins and their callbacks
status_light_pin : the pin used by the status light
playing : keep track of playing status. rather use this instead of calling
status() all the time"""
__version_info__ = (0, 0, 1)
__version__ = '.'.join(map(str, __version_info__))
__author__ = "Willem van der Jagt"
db_file = "%s/%s" % (os.path.dirname(os.path.realpath(__file__)), 'state.db')
serial = { "port_name" : "/dev/ttyAMA0", "baudrate" : 9600, "string_length" : 14 }
mpd_conn = { "host" : "localhost", "port" : 6600 }
gpio_pins = [
{ 'pin_id': 9, 'callback' : 'rewind' },
{ 'pin_id': 11, 'callback' : 'toggle_pause' },
{ 'pin_id': 22, 'callback' : 'volume_down' },
{ 'pin_id': 10, 'callback' : 'volume_up' }
]
status_light_pin = 23
|
Python
| 0.000001
|
@@ -481,16 +481,26 @@
ime%22%22%22%0A%0A
+import os%0A
%0A__versi
|
3bd36c410c3d7fadc911e91ded07ff08eb2d21da
|
create path if doesn't exist
|
bakery/management/commands/build.py
|
bakery/management/commands/build.py
|
import os
import re
import six
import shutil
from django.conf import settings
from optparse import make_option
from django.core import management
from django.core.urlresolvers import get_callable
from django.core.exceptions import ViewDoesNotExist
from django.core.management.base import BaseCommand, CommandError
custom_options = (
make_option(
"--build-dir",
action="store",
dest="build_dir",
default='',
help="Specify the path of the build directory. \
Will use settings.BUILD_DIR by default."
),
make_option(
"--skip-static",
action="store_true",
dest="skip_static",
default=False,
help="Skip collecting the static files when building."
),
make_option(
"--skip-media",
action="store_true",
dest="skip_media",
default=False,
help="Skip collecting the media files when building."
),
)
class Command(BaseCommand):
help = 'Bake out a site as flat files in the build directory'
option_list = BaseCommand.option_list + custom_options
build_unconfig_msg = "Build directory unconfigured. Set BUILD_DIR in \
settings.py or provide it with --build-dir"
views_unconfig_msg = "Bakery views unconfigured. Set BAKERY_VIEWS in \
settings.py or provide a list as arguments."
def handle(self, *args, **options):
"""
Making it happen.
"""
self.verbosity = int(options.get('verbosity'))
# Figure out what build directory to use
if options.get("build_dir"):
self.build_dir = options.get("build_dir")
settings.BUILD_DIR = self.build_dir
else:
if not hasattr(settings, 'BUILD_DIR'):
raise CommandError(self.build_unconfig_msg)
self.build_dir = settings.BUILD_DIR
# Destroy the build directory, if it exists
if self.verbosity > 1:
six.print_("Creating build directory")
if os.path.exists(self.build_dir):
shutil.rmtree(self.build_dir)
# Then recreate it from scratch
os.makedirs(self.build_dir)
# Build up static files
if not options.get("skip_static"):
if self.verbosity > 1:
six.print_("Creating static directory")
management.call_command(
"collectstatic",
interactive=False,
verbosity=0
)
target_dir = os.path.join(self.build_dir, settings.STATIC_URL[1:])
if os.path.exists(settings.STATIC_ROOT) and settings.STATIC_URL:
if getattr(settings, 'BAKERY_GZIP', False):
for (dirpath, dirnames, filenames) in os.walk(settings.STATIC_ROOT):
# regex to match against. CSS, JS, JSON files
pattern = re.compile('(\.css|\.js|\.json)$')
for filename in filenames:
print os.path.join(dirpath, filename)
# reference to the original file
og_file = os.path.join(dirpath, filename)
# get the relative path that we want to copy into
rel_path = os.path.relpath(dirpath, settings.STATIC_ROOT)
dest_path = os.path.join(target_dir, rel_path[2:])
# run the regex match
m = pattern.search(filename)
if m:
print "gzipping %s" % filename
# create the new path in the build directory
f_in = open(og_file, 'rb')
# copy the file to gzip compressed output
f_out = gzip.open(os.path.join(dest_path, filename), 'wb', mtime=0)
f_out.writelines(f_in)
f_out.close()
f_in.close()
# otherwise, just copy the file
else:
shutil.copy(og_file, os.path.join(dest_path, filename))
# if gzip isn't enabled, just copy the tree straight over
else:
shutil.copytree(settings.STATIC_ROOT, target_dir)
# If they exist in the static directory, copy the robots.txt
# and favicon.ico files down to the root so they will work
# on the live website.
robot_src = os.path.join(target_dir, 'robots.txt')
favicon_src = os.path.join(target_dir, 'favicon.ico')
if os.path.exists(robot_src):
shutil.copy(robot_src, os.path.join(
settings.BUILD_DIR,
'robots.txt'
)
)
if os.path.exists(favicon_src):
shutil.copy(favicon_src, os.path.join(
settings.BUILD_DIR,
'favicon.ico',
)
)
# Build the media directory
if not options.get("skip_media"):
if self.verbosity > 1:
six.print_("Building media directory")
if os.path.exists(settings.MEDIA_ROOT) and settings.MEDIA_URL:
shutil.copytree(
settings.MEDIA_ROOT,
os.path.join(self.build_dir, settings.MEDIA_URL[1:])
)
# Figure out what views we'll be using
if args:
view_list = args
else:
if not hasattr(settings, 'BAKERY_VIEWS'):
raise CommandError(self.views_unconfig_msg)
view_list = settings.BAKERY_VIEWS
# Then loop through and run them all
for view_str in view_list:
if self.verbosity > 1:
six.print_("Building %s" % view_str)
try:
view = get_callable(view_str)
view().build_method()
except (TypeError, ViewDoesNotExist):
raise CommandError("View %s does not work." % view_str)
|
Python
| 0.000002
|
@@ -3379,24 +3379,139 @@
l_path%5B2:%5D)%0A
+ if not os.path.exists(dest_path):%0A os.mkdirs(dest_path)%0A
@@ -4309,41 +4309,17 @@
le,
-os.path.join(dest_path, filename)
+dest_path
)%0A
|
ffca5ea26c02170cc5edf6eea25ec9ef2c0c72bf
|
Disable trix serializer tests with Jython
|
test/test_trix_serialize.py
|
test/test_trix_serialize.py
|
#!/usr/bin/env python
import unittest
from rdflib.graph import ConjunctiveGraph
from rdflib.term import URIRef, Literal
from rdflib.graph import Graph
try:
from io import BytesIO
except ImportError:
from StringIO import StringIO as BytesIO
class TestTrixSerialize(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testSerialize(self):
s1 = URIRef('store:1')
r1 = URIRef('resource:1')
r2 = URIRef('resource:2')
label = URIRef('predicate:label')
g1 = Graph(identifier = s1)
g1.add((r1, label, Literal("label 1", lang="en")))
g1.add((r1, label, Literal("label 2")))
s2 = URIRef('store:2')
g2 = Graph(identifier = s2)
g2.add((r2, label, Literal("label 3")))
g = ConjunctiveGraph()
for s,p,o in g1.triples((None, None, None)):
g.addN([(s,p,o,g1)])
for s,p,o in g2.triples((None, None, None)):
g.addN([(s,p,o,g2)])
r3 = URIRef('resource:3')
g.add((r3, label, Literal(4)))
r = g.serialize(format='trix')
g3 = ConjunctiveGraph()
g3.parse(BytesIO(r), format='trix')
for q in g3.quads((None,None,None)):
# TODO: Fix once getGraph/getContext is in conjunctive graph
if isinstance(q[3].identifier, URIRef):
tg=Graph(store=g.store, identifier=q[3].identifier)
else:
# BNode, this is a bit ugly
# we cannot match the bnode to the right graph automagically
# here I know there is only one anonymous graph,
# and that is the default one, but this is not always the case
tg=g.default_context
self.assertTrue(q[0:3] in tg)
if __name__=='__main__':
unittest.main()
|
Python
| 0
|
@@ -1709,16 +1709,196 @@
in tg)%0A%0A
+import platform%0Aif platform.system() == 'Java':%0A from nose import SkipTest%0A raise SkipTest('Jython issues - %22JavaSAXParser%22 object has no attribute %22start_namespace_decl%22')%0A%0A
if __nam
|
ba6dc4269f96903f863748a779521d2bd8803d4f
|
Create Process.py
|
Samples/Process.py
|
Samples/Process.py
|
Python
| 0
|
@@ -0,0 +1,2882 @@
+__author__ = 'Marius'%0A%0Afrom TM1py import TM1Queries, Process%0Aimport uuid%0Aimport unittest%0A%0A%0Aclass TestAnnotationMethods(unittest.TestCase):%0A q = TM1Queries(ip='', port=8008, user='admin', password='apple', ssl=True)%0A random_string = str(uuid.uuid4()).replace('-', '_')%0A p_none = Process(name='unittest_none_' + random_string, datasource_type='None')%0A p_ascii = Process(name='unittest_ascii_' + random_string, datasource_type='ASCII',%0A datasource_ascii_delimiter_char=',',%0A datasource_data_source_name_for_server='C:%5CData%5Csimple_csv.csv',%0A datasource_data_source_name_for_client='C:%5CData%5Csimple_csv.csv')%0A # variables%0A p_ascii.add_variable('v_1', 'Numeric')%0A p_ascii.add_variable('v_2', 'Numeric')%0A p_ascii.add_variable('v_3', 'Numeric')%0A p_ascii.add_variable('v_4', 'Numeric')%0A # parameters%0A p_ascii.add_parameter('p_Year', 'year?', '2016')%0A p_view = Process(name='unittest_view_' + random_string, datasource_type='TM1CubeView', datasource_view='view1',%0A datasource_data_source_name_for_client='Plan_BudgetPlan',%0A datasource_data_source_name_for_server='Plan_BudgetPlan')%0A p_odbc = Process(name='unittest_odbc_' + random_string, datasource_type='ODBC')%0A%0A # create Process%0A def test1_create_process(self):%0A self.q.create_process(self.p_none)%0A self.q.create_process(self.p_ascii)%0A self.q.create_process(self.p_view)%0A self.q.create_process(self.p_odbc)%0A%0A # get Process%0A def test2_get_process(self):%0A p1 = self.q.get_process(self.p_ascii.name)%0A self.assertEqual(p1.body, self.p_ascii.body)%0A p2 = self.q.get_process(self.p_none.name)%0A self.assertEqual(p2.body, self.p_none.body)%0A p3 = self.q.get_process(self.p_view.name)%0A self.assertEqual(p3.body, self.p_view.body)%0A p4 = self.q.get_process(self.p_odbc.name)%0A x = p4.datasource_password = None%0A y = self.p_odbc.datasource_password = None%0A self.assertEqual(x, y)%0A%0A # update process%0A def test3_update_process(self):%0A # get%0A p = self.q.get_process(self.p_ascii.name)%0A # modify%0A p.set_data_procedure(Process.auto_generated_string() + %22x = 'Hi this is a test';%22)%0A # update on Server%0A self.q.update_process(p)%0A # get again%0A p_ascii_updated = self.q.get_process(p.name)%0A # assert%0A self.assertNotEqual(p_ascii_updated.data_procedure, self.p_ascii.data_procedure)%0A%0A # delete process%0A def test4_delete_process(self):%0A self.q.delete_process(self.p_none.name)%0A self.q.delete_process(self.p_ascii.name)%0A self.q.delete_process(self.p_view.name)%0A self.q.delete_process(self.p_odbc.name)%0A%0A def test_5_logout(self):%0A self.q.logout()%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
f26bdfa1ff0a388fb7bd2d473cf7b4b03fa61f6d
|
add unit test
|
doajtest/unit/event_consumers/test_application_publisher_revision_notify.py
|
doajtest/unit/event_consumers/test_application_publisher_revision_notify.py
|
Python
| 0.000001
|
@@ -0,0 +1,2945 @@
+from portality import models%0Afrom portality import constants%0Afrom portality.bll import exceptions%0Afrom doajtest.helpers import DoajTestCase%0Afrom doajtest.fixtures import ApplicationFixtureFactory%0Aimport time%0A%0Afrom portality.events.consumers.application_publisher_revision_notify import ApplicationPublisherRevisionNotify%0A%0A%0Aclass TestApplicationPublisherRevisionNotify(DoajTestCase):%0A def setUp(self):%0A super(TestApplicationPublisherRevisionNotify, self).setUp()%0A%0A def tearDown(self):%0A super(TestApplicationPublisherRevisionNotify, self).tearDown()%0A%0A def test_consumes(self):%0A source = ApplicationFixtureFactory.make_application_source()%0A%0A event = models.Event(constants.EVENT_APPLICATION_STATUS, context=%7B%22application%22: %22abcd%22, %22old_status%22: %22in progress%22, %22new_status%22: %22revisions_required%22%7D)%0A assert ApplicationPublisherRevisionNotify.consumes(event)%0A%0A event = models.Event(constants.EVENT_APPLICATION_STATUS,%0A context=%7B%22application%22: %22abcd%22, %22old_status%22: %22revisions_required%22, %22new_status%22: %22revisions_required%22%7D)%0A assert not ApplicationPublisherRevisionNotify.consumes(event)%0A%0A event = models.Event(%22test:event%22, context=%7B%22application%22 : %22abcd%22%7D)%0A assert not ApplicationPublisherRevisionNotify.consumes(event)%0A%0A event = models.Event(constants.EVENT_APPLICATION_STATUS)%0A assert not ApplicationPublisherRevisionNotify.consumes(event)%0A%0A def test_consume_success(self):%0A self._make_and_push_test_context(%22/%22)%0A%0A source = ApplicationFixtureFactory.make_application_source()%0A app = models.Application(**source)%0A app.save()%0A%0A acc = models.Account()%0A acc.set_id(%22publisher%22)%0A acc.set_email(%22test@example.com%22)%0A acc.save()%0A%0A event = models.Event(constants.EVENT_APPLICATION_STATUS, context=%7B%22application%22: %22abcdefghijk%22, %22old_status%22: %22in progress%22, %22new_status%22: %22revisions_required%22%7D)%0A ApplicationPublisherRevisionNotify.consume(event)%0A%0A time.sleep(2)%0A ns = models.Notification.all()%0A assert len(ns) == 1%0A%0A n = ns%5B0%5D%0A assert n.who == %22publisher%22, %22Expected: %7B%7D, Received: %7B%7D%22.format(%22publisher%22, n.who)%0A assert n.created_by == ApplicationPublisherRevisionNotify.ID, %22Expected: %7B%7D, Received: %7B%7D%22.format(ApplicationPublisherRevisionNotify.ID, n.created_by)%0A assert n.classification == constants.NOTIFICATION_CLASSIFICATION_STATUS_CHANGE, %22Expected: %7B%7D, Received: %7B%7D%22.format(constants.NOTIFICATION_CLASSIFICATION_STATUS_CHANGE, n.classification)%0A assert n.message is not None%0A assert n.action is None%0A assert not n.is_seen()%0A%0A def test_consume_fail(self):%0A event = models.Event(constants.EVENT_APPLICATION_ASSED_ASSIGNED, context=%7B%22application%22: %22abcd%22%7D)%0A with self.assertRaises(exceptions.NoSuchObjectException):%0A ApplicationPublisherRevisionNotify.consume(event)%0A%0A
|
|
d24e8c746359169058e9c0577c2f843695ca3b55
|
Add 2 instance with EBS test.
|
heat/tests/functional/test_WordPress_2_Instances_With_EBS.py
|
heat/tests/functional/test_WordPress_2_Instances_With_EBS.py
|
Python
| 0
|
@@ -0,0 +1,2476 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22); you may%0A# not use this file except in compliance with the License. You may obtain%0A# a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS, WITHOUT%0A# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the%0A# License for the specific language governing permissions and limitations%0A#%0A%0Aimport util%0Aimport verify%0Aimport nose%0Afrom nose.plugins.attrib import attr%0A%0Afrom heat.common import context%0Afrom heat.engine import manager%0Aimport unittest%0A%0A%0A@attr(speed='slow')%0A@attr(tag=%5B'func', 'wordpress', '2instance', 'ebs',%0A 'WordPress_2_Instances_With_EBS.template'%5D)%0Aclass WordPress2InstancesWithEBS(unittest.TestCase):%0A def setUp(self):%0A template = 'WordPress_2_Instances_With_EBS.template'%0A%0A self.stack = util.Stack(template, 'F17', 'x86_64', 'cfntools')%0A%0A self.WikiDatabase = util.Instance('WikiDatabase')%0A self.WikiDatabase.check_cfntools()%0A self.WikiDatabase.wait_for_provisioning()%0A%0A self.WebServer = util.Instance('WebServer')%0A self.WebServer.check_cfntools()%0A self.WebServer.wait_for_provisioning()%0A%0A def test_instance(self):%0A # ensure wordpress was installed%0A self.assertTrue(self.WebServer.file_present%0A ('/etc/wordpress/wp-config.php'))%0A print %22Wordpress installation detected%22%0A%0A # Verify the output URL parses as expected, ie check that%0A # the wordpress installation is operational%0A stack_url = self.stack.get_stack_output(%22WebsiteURL%22)%0A print %22Got stack output WebsiteURL=%25s, verifying%22 %25 stack_url%0A ver = verify.VerifyStack()%0A self.assertTrue(ver.verify_wordpress(stack_url))%0A%0A # Check EBS volume is present and mounted%0A stdin, stdout, sterr = self.WikiDatabase.exec_command(%0A 'grep vdc /proc/mounts')%0A result = stdout.readlines().pop().rstrip()%0A self.assertTrue(len(result))%0A print %22Checking EBS volume is attached : %25s%22 %25 result%0A devname = result.split()%5B0%5D%0A self.assertEqual(devname, '/dev/vdc1')%0A mountpoint = result.split()%5B1%5D%0A self.assertEqual(mountpoint, '/var/lib/mysql')%0A%0A self.stack.cleanup()%0A
|
|
24c5248d578774d13d69b001fad8f50e2eac192a
|
Add tracepoint_variable_sized_types.py
|
scripts/tracepoint_variable_sized_types.py
|
scripts/tracepoint_variable_sized_types.py
|
Python
| 0.000007
|
@@ -0,0 +1,1151 @@
+# This script lists all the types in the kernel's tracepoint format files%0A# which appear with more than one size. This script's output should be%0A# compared to the code in TracepointFormatParser::adjust_integer_types()%0A%0Aimport glob%0A%0Afield_types = %7B%7D%0A%0Afor format_file in glob.iglob(%22/sys/kernel/debug/tracing/events/*/*/format%22):%0A for line in open(format_file):%0A if not line.startswith(%22%5Ctfield:%22):%0A continue%0A%0A size_section = line.split(%22;%22)%5B2%5D.split(%22:%22)%0A if size_section%5B0%5D != %22%5Ctsize%22:%0A continue%0A size_val = size_section%5B1%5D%0A%0A field_section = line.split(%22;%22)%5B0%5D.split(%22:%22)%0A if field_section%5B0%5D != %22%5Ctfield%22:%0A continue%0A field_val = field_section%5B1%5D%0A if %22%5B%22 in field_val or %22*%22 in field_val:%0A continue%0A%0A field_type = %22 %22.join(field_val.split()%5B:-1%5D)%0A%0A if field_type not in field_types:%0A field_types%5Bfield_type%5D = set()%0A field_types%5Bfield_type%5D.add(size_val)%0A%0Afor t in sorted(field_types):%0A sizes = field_types%5Bt%5D%0A if len(sizes) %3E 1:%0A sizes_str = %22,%22.join(sorted(sizes))%0A print(f%22%7Bt%7D: %7Bsizes_str%7D%22)%0A
|
|
90b01102538ead893e516449fc52ad4befb8a77c
|
fix order of osx packer logs
|
joulupukki/worker/lib/osxpacker.py
|
joulupukki/worker/lib/osxpacker.py
|
import os
import subprocess
import pecan
import shutil
from joulupukki.common.logger import get_logger_job
from joulupukki.common.datamodel.job import Job
class OsxPacker(object):
def __init__(self, builder, config, job_id):
self.config = config
self.builder = builder
self.distro = "osx"
self.source_url = builder.source_url
self.source_type = builder.source_type
self.branch = builder.build.branch
self.folder = builder.folder
self.job = Job.fetch(self.builder.build, job_id)
self.folder_output = self.job.get_folder_output()
self.job_tmp_folder = self.job.get_folder_tmp()
if not os.path.exists(self.folder_output):
os.makedirs(self.folder_output)
if not os.path.exists(self.job_tmp_folder):
os.makedirs(self.job_tmp_folder)
self.logger = get_logger_job(self.job)
def set_status(self, status):
self.job.set_status(status)
def set_build_time(self, build_time):
self.job.set_build_time(build_time)
def run(self):
steps = (
('reading_conf', self.reading_conf),
('setup', self.setup),
('compiling', self.compile_),
# ('transfering', self.transfert_output),
)
for step_name, step_function in steps:
self.set_status(step_name)
if step_function() is not True:
self.logger.debug("Task failed during step: %s", step_name)
# Set status
self.set_status('failed')
# Transfert output to central joulupukki
self.transfert_output()
return False
# Save package name in build.cfg
if ('info' in self.config and 'name' in self.config['info'] and
self.builder.build.package_name is None):
self.builder.build.package_name = self.config['info']['name']
self.builder.build._save()
# Transfert output to central joulupukki
self.transfert_output()
# Set status
self.set_status('succeeded')
return True
def reading_conf(self):
self.logger.info("Checking conf")
try:
self.dependencies = self.config['brew_deps']
self.commands = self.config['commands']
self.transfer_files = self.config['transfer']['files']
except KeyError:
self.logger.error("Malformed .packer.yml file")
return False
return True
def setup(self):
# Installing dependencies
for depen in self.dependencies:
cmd_list = ["brew", "install"]
cmd_list.extend(depen.split(" "))
self.logger.info("Installing dependency: %s" % depen)
process = subprocess.Popen(
cmd_list,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = process.communicate()
self.logger.debug(stdout)
self.logger.info(stderr)
if process.returncode:
self.logger.error("Error in setup: %d" % process.returncode)
return False
return True
def compile_(self):
self.logger.info("Start compiling")
# Compiling ring-daemon
cd_command = ["cd %s" % self.job.get_folder_tmp()]
self.commands = cd_command + self.commands
long_command = " && "
long_command = long_command.join(self.commands)
long_command = long_command % {
"prefix_path": pecan.conf.workspace_path
}
self.logger.info("Compiling")
process = subprocess.Popen(
long_command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True
)
stdout, stderr = process.communicate()
self.logger.debug(stdout)
self.logger.info(stderr)
if process.returncode:
self.logger.error("Error in setup: %d" % process.returncode)
return False
return True
def transfert_output(self):
self.logger.info("Start package transfert")
# move dmg
try:
for f in self.transfer_files:
origin = os.path.join(self.job.get_folder_tmp(), f)
destination = os.path.join(self.builder.build.get_folder_path(),
"output",
"osx",
f.split('/')[-1])
os.rename(origin, destination)
except Exception:
self.logger.error("Can't move output file(s)")
#return False
# Delete useless files
try:
shutil.rmtree(self.job.get_folder_path() + "/tmp")
except Exception as e:
self.logger.error("Couldn't remove tmp job files: " + e)
host = pecan.conf.origin_host
user = pecan.conf.origin_user
key = pecan.conf.origin_key
# TODO: Correct source and dest (package_dir and path), output/*
# TODO: Add the transfert of jobs/*
path = self.builder.origin_build_path
package_dir = self.builder.build.get_folder_path() + "/*"
# transfert_command = "scp -r -i %s %s %s@%s:%s" % (
transfert_command = 'rsync -az -e "ssh -i %s" %s %s@%s:%s --exclude jobs/*/tmp' % (
key,
package_dir,
user,
host,
path
)
self.logger.info(transfert_command)
command_res = self.exec_cmd(transfert_command)
self.logger.info(command_res)
return command_res
def exec_cmd(self, cmds):
process = subprocess.Popen(
cmds,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True
)
stdout, stderr = process.communicate()
self.logger.debug(stdout)
self.logger.info(stderr)
if process.returncode:
self.logger.error("Error in setup: %d" % process.returncode)
return False
return True
|
Python
| 0
|
@@ -3450,16 +3450,28 @@
commands
+ + %5B%22 2%3E&1%22%5D
%0A
@@ -3487,16 +3487,21 @@
mand = %22
+ 2%3E&1
&& %22%0A
@@ -3798,44 +3798,8 @@
PE,%0A
- stderr=subprocess.PIPE,%0A
@@ -3878,42 +3878,8 @@
e()%0A
- self.logger.debug(stdout)%0A
@@ -3894,35 +3894,35 @@
.logger.info(std
-err
+out
)%0A if pro
|
673dac79cbab6de0be5650d46840a3bc9858b2b4
|
Add a help script to clear the test bucket
|
tests/clear_qiniu_bucket.py
|
tests/clear_qiniu_bucket.py
|
Python
| 0.000001
|
@@ -0,0 +1,824 @@
+import os%0Afrom qiniu import Auth, BucketManager%0A%0A%0AQINIU_ACCESS_KEY = os.environ.get('QINIU_ACCESS_KEY')%0AQINIU_SECRET_KEY = os.environ.get('QINIU_SECRET_KEY')%0AQINIU_BUCKET_NAME = os.environ.get('QINIU_BUCKET_NAME')%0AQINIU_BUCKET_DOMAIN = os.environ.get('QINIU_BUCKET_DOMAIN')%0A%0A%0Adef main():%0A auth = Auth(QINIU_ACCESS_KEY, QINIU_SECRET_KEY)%0A bucket = BucketManager(auth)%0A%0A while True:%0A ret, eof, info = bucket.list(QINIU_BUCKET_NAME, limit=100)%0A%0A if ret is None:%0A print info%0A break%0A%0A for item in ret%5B'items'%5D:%0A name = item%5B'key'%5D%0A print %22Deleting %25s ...%22 %25 name%0A ret, info = bucket.delete(QINIU_BUCKET_NAME, name)%0A if ret is None:%0A print info%0A if eof:%0A break%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
da7deee98bb8d6a92d2ab1b8ad5c3e550a24fc83
|
add `Config` class tests
|
tests/config/test_config.py
|
tests/config/test_config.py
|
Python
| 0.000016
|
@@ -0,0 +1,861 @@
+# -*- coding: utf-8 -*-%0A%0Aimport os%0Aimport tempfile%0Aimport unittest%0A%0Afrom mock import patch%0A%0Afrom opoona.config import Config%0A%0Aclass TestInvalidSyntaxException(unittest.TestCase):%0A @patch('os.path.expanduser')%0A def test_init(self, expanduser):%0A expanduser.return_value = 'HOME/.opoona.yaml'%0A config = Config()%0A self.assertEqual(config.config_path, 'HOME/.opoona.yaml')%0A expanduser.assert_called_with('~/.opoona.yaml')%0A%0A def test_load(self):%0A f = tempfile.NamedTemporaryFile(delete=False)%0A yaml = '''%5C%0Agithub:%0A token: XXX%0A'''%0A f.write(yaml.encode('utf-8'))%0A f.close()%0A%0A config = Config()%0A config.config_path = f.name%0A config.load()%0A%0A self.assertIsInstance(config%5B'github'%5D, dict)%0A self.assertEqual(config%5B'github'%5D%5B'token'%5D, 'XXX')%0A%0A os.remove(f.name)%0A
|
|
ae92573d2c86fa1e83b636c17c443cc8f97f4040
|
Add unittest for ElementaryLine.
|
tests/elementary_line_test.py
|
tests/elementary_line_test.py
|
Python
| 0
|
@@ -0,0 +1,1241 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0A%22%22%22 Test case for ElementaryLine.%0A%22%22%22%0A%0Aimport unittest%0A%0Afrom catplot.ep_components.ep_lines import ElementaryLine%0A%0A%0Aclass ElementaryLineTest(unittest.TestCase):%0A%0A def setUp(self):%0A self.maxDiff = True%0A%0A def test_construction_and_query(self):%0A %22%22%22 Test we can construct ElementaryLine object correctly.%0A %22%22%22%0A line = ElementaryLine(%5B0.0, 1.2, 0.7%5D, n=2)%0A%0A ret_x = line.x.tolist()%0A ref_x = %5B0.0, 1.0, 1.0, 2.0, 2.0, 3.0%5D%0A self.assertListEqual(ret_x, ref_x)%0A%0A ret_y = line.y.tolist()%0A ref_y = %5B0.0, 0.0, -3.4426554548552387e-18, 0.7, 0.7, 0.7%5D%0A self.assertListEqual(ret_y, ref_y)%0A%0A self.assertIsNone(line.rxn_equation)%0A self.assertEqual(line.color, %22#000000%22)%0A self.assertEqual(line.shadow_color, %22#595959%22)%0A self.assertEqual(line.shadow_depth, 7)%0A self.assertEqual(line.hline_length, 1.0)%0A self.assertEqual(line.interp_method, %22spline%22)%0A self.assertEqual(line.n, 2)%0A self.assertEqual(line.peak_width, 1.0)%0A%0Aif %22__main__%22 == __name__: %0A suite = unittest.TestLoader().loadTestsFromTestCase(ElementaryLineTest)%0A unittest.TextTestRunner(verbosity=2).run(suite) %0A%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.