commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
dcd02e0a7b626111bc0fc344df9f6fff2de832ae
|
Add a (bad) example of missing method.
|
examples/missingmethod.py
|
examples/missingmethod.py
|
Python
| 0.000074
|
@@ -0,0 +1,449 @@
+#!/usr/bin/python3%0A%0A%22%22%22Send an invalid request with missing method member.%22%22%22%0A%0Afrom simpleclient import send_data_to_socket%0A%0AEXAMPLE = %7B%0A %22params%22: %7B%0A %22filter%22: %7B%0A 'store': 'catalog',%0A 'schema': 'product',%0A 'id': '704e418e-682d-4ade-99be-710f2208102e'%0A %7D%0A %7D%0A%7D%0A%0A%0Adef main():%0A %22%22%22Send the example to the simple client.%22%22%22%0A send_data_to_socket(EXAMPLE)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
57b9fcfa5b200ec971f8f3070447cbc98026f5a5
|
add example of variable-length array branch
|
examples/tree/vararray.py
|
examples/tree/vararray.py
|
Python
| 0.000004
|
@@ -0,0 +1,786 @@
+#!/usr/bin/env python%0A%22%22%22%0A=================================%0ATrees with variable-length arrays%0A=================================%0A%0AThis example demonstrates how to create a tree with a variable-length array.%0A%22%22%22%0Aprint(__doc__)%0A%0Afrom rootpy.tree import Tree, TreeModel, IntCol, FloatArrayCol%0Afrom rootpy.io import root_open%0A%0Aclass Event(TreeModel):%0A num_vals = IntCol()%0A vals = FloatArrayCol(10, length_name='num_vals')%0A%0Arfile = root_open('test.root', 'w')%0Atree = Tree('events', model=Event)%0A%0Afor i in range(10):%0A tree.num_vals = i + 1%0A for j in range(i + 1):%0A tree.vals%5Bj%5D = j%0A tree.fill()%0A%0Atree.write()%0Atree.vals.reset()%0Atree.csv()%0Arfile.close()%0Aprint(%22===%22)%0A%0A# CSV output from tree read from file should match above output%0Aroot_open('test.root', 'r').events.csv()%0A
|
|
8ab83988f66270c76b28f36e8263f029011e773b
|
use Task & Job, a Task has many Jobs
|
farmer/models.py
|
farmer/models.py
|
#coding=utf8
import os
import time
import json
from datetime import datetime
from commands import getstatusoutput
from django.db import models
class Job(models.Model):
# hosts, like web_servers:host1 .
inventories = models.TextField(null = False, blank = False)
# 0, do not use sudo; 1, use sudo .
sudo = models.BooleanField(default = True)
# for example: ansible web_servers -m shell -a 'du -sh /tmp'
# the 'du -sh /tmp' is cmd here
cmd = models.TextField(null = False, blank = False)
# return code of this job
rc = models.IntegerField(null = True)
result = models.TextField(null = True)
start = models.DateTimeField(null = True)
end = models.DateTimeField(null = True)
@property
def cmd_shell(self):
option = self.sudo and '--sudo' or ''
option += ' -f 20 -m shell'
return 'ansible %s %s -a "%s"' % (self.inventories, option, self.cmd)
def run(self):
if os.fork() == 0:
#if 0 == 0:
tmpdir = '/tmp/ansible_%s' % time.time()
os.mkdir(tmpdir)
self.start = datetime.now()
self.save()
cmd_shell = self.cmd_shell + ' -t ' + tmpdir
status, output = getstatusoutput(cmd_shell)
self.end = datetime.now()
result = {}
for f in os.listdir(tmpdir):
result[f] = json.loads(open(tmpdir + '/' + f).read())
self.rc = status
self.result = json.dumps(result)
self.save()
os.system('rm -rf ' + tmpdir)
def __unicode__(self):
return self.cmd_shell
|
Python
| 0.000021
|
@@ -149,11 +149,12 @@
ass
-Job
+Task
(mod
@@ -594,52 +594,8 @@
) %0A%0A
- result = models.TextField(null = True)%0A%0A
@@ -963,47 +963,199 @@
-tmpdir = '/tmp/ansible_%25s' %25 time.time(
+self.start = datetime.now()%0A self.save()%0A%0A # initial jobs%0A cmd_shell = self.cmd_shell + ' --list-hosts'%0A status, output = getstatusoutput(cmd_shell
)%0A
@@ -1168,87 +1168,253 @@
+h
os
-.mkdir(tmpdir)%0A self.start = datetime.now()%0A self.save(
+ts = map(str.strip, output.splitlines())%0A for host in hosts:%0A self.job_set.add(Job(host = host, cmd = self.cmd))%0A%0A # run ansible%0A tmpdir = '/tmp/ansible_%25s' %25 self.id%0A os.mkdir(tmpdir
)%0A
@@ -1515,32 +1515,62 @@
utput(cmd_shell)
+%0A%0A self.rc = status
%0A sel
@@ -1608,19 +1608,8 @@
-result = %7B%7D
%0A
@@ -1672,11 +1672,8 @@
sult
-%5Bf%5D
= j
@@ -1729,192 +1729,923 @@
-self.rc = status%0A self.result = json.dumps(result)%0A self.save()%0A os.system('rm -rf ' + tmpdir)%0A%0A def __unicode__(self):%0A return self.cmd_shell
+ job = self.job_set.get(host = f)%0A job.start = result.get('start')%0A job.end = result.get('end')%0A job.rc = result.get('rc')%0A job.stdout = result.get('stdout')%0A job.stderr = result.get('stderr')%0A job.save()%0A%0A self.save()%0A%0A # clean tmp dir%0A os.system('rm -rf ' + tmpdir)%0A%0A def __unicode__(self):%0A return self.cmd_shell%0A%0Aclass Job(models.Model):%0A task = models.ForeignKey(Task)%0A host = models.TextField(null = False, blank = False)%0A cmd = models.TextField(null = False, blank = False)%0A start = models.DateTimeField(null = True)%0A end = models.DateTimeField(null = True)%0A rc = models.IntegerField(null = True) %0A stdout = models.TextField(null = True)%0A stderr = models.TextField(null = True)%0A%0A def __unicode__(self):%0A return self.host + ' : ' + self.cmd
%0A%0A%0A
|
d692508e9c6fba847f3bb179bbfd3684e6ebcef0
|
Add py solution for 384. Shuffle an Array
|
py/shuffle-an-array.py
|
py/shuffle-an-array.py
|
Python
| 0.99853
|
@@ -0,0 +1,801 @@
+from random import randint%0Aclass Solution(object):%0A%0A def __init__(self, nums):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A %22%22%22%0A self.nums = nums%0A%0A def reset(self):%0A %22%22%22%0A Resets the array to its original configuration and return it.%0A :rtype: List%5Bint%5D%0A %22%22%22%0A return self.nums%0A%0A def shuffle(self):%0A %22%22%22%0A Returns a random shuffling of the array.%0A :rtype: List%5Bint%5D%0A %22%22%22%0A out = self.nums%5B:%5D%0A n = len(self.nums)%0A for i in xrange(n - 1):%0A r = randint(i, n - 1)%0A if r != i:%0A out%5Br%5D, out%5Bi%5D = out%5Bi%5D, out%5Br%5D%0A return out%0A%0A%0A%0A# Your Solution object will be instantiated and called as such:%0A# obj = Solution(nums)%0A# param_1 = obj.reset()%0A# param_2 = obj.shuffle()%0A
|
|
6c7b9a0315bf12fb3e40ddd49f43fe8bec5c6132
|
Create 0001_0.py
|
pylyria/0001/0001_0.py
|
pylyria/0001/0001_0.py
|
Python
| 0.019732
|
@@ -0,0 +1,632 @@
+# -*- coding: utf-8 -*-%0A#!/usr/bin/env python%0A#%E7%AC%AC 0001 %E9%A2%98%EF%BC%9A%E5%81%9A%E4%B8%BA Apple Store App %E7%8B%AC%E7%AB%8B%E5%BC%80%E5%8F%91%E8%80%85%EF%BC%8C%E4%BD%A0%E8%A6%81%E6%90%9E%E9%99%90%E6%97%B6%E4%BF%83%E9%94%80%EF%BC%8C%E4%B8%BA%E4%BD%A0%E7%9A%84%E5%BA%94%E7%94%A8%E7%94%9F%E6%88%90%E6%BF%80%E6%B4%BB%E7%A0%81%EF%BC%88%E6%88%96%E8%80%85%E4%BC%98%E6%83%A0%E5%88%B8%EF%BC%89%EF%BC%8C%E4%BD%BF%E7%94%A8 Python %E5%A6%82%E4%BD%95%E7%94%9F%E6%88%90 200 %E4%B8%AA%E6%BF%80%E6%B4%BB%E7%A0%81%EF%BC%88%E6%88%96%E8%80%85%E4%BC%98%E6%83%A0%E5%88%B8%EF%BC%89%EF%BC%9F%0Aimport random%0Aimport string%0A%0Adef activation_code(id,length=16):%0A prefix = hex(int(id))%5B2:%5D+'V'%0A length = length - len(prefix)%0A chars=string.ascii_uppercase+string.digits%0A return prefix + ''.join(%5Brandom.choice(chars) for i in range(length)%5D)%0A%0Adef get_id(code):%0A return str(int(code.upper(), 16))%0A%0Aif __name__ == '__main__':%0A for i in range(10, 500, 23):%0A code = activation_code(i)%0A id_hex = code.split('L')%5B0%5D%0A id = get_id(id_hex)%0A print code,id%0A
|
|
706a88810abc1be1fcfa799b7bb46a1c8e774d59
|
add pygithub.login_github()
|
codekit/pygithub.py
|
codekit/pygithub.py
|
Python
| 0.000001
|
@@ -0,0 +1,850 @@
+%22%22%22%0Apygithub based functions intended to replace the github3.py based functions in%0Acodetools.%0A%22%22%22%0A%0Aimport logging%0Afrom public import public%0Afrom github import Github%0Aimport codekit.codetools as codetools%0A%0Alogging.basicConfig()%0Alogger = logging.getLogger('codekit')%0A%0A%0A@public%0Adef login_github(token_path=None, token=None):%0A %22%22%22Log into GitHub using an existing token.%0A%0A Parameters%0A ----------%0A token_path : str, optional%0A Path to the token file. The default token is used otherwise.%0A%0A token: str, optional%0A Literial token string. If specifified, this value is used instead of%0A reading from the token_path file.%0A%0A Returns%0A -------%0A gh : :class:%60github.GitHub%60 instance%0A A GitHub login instance.%0A %22%22%22%0A%0A token = codetools.github_token(token_path=token_path, token=token)%0A return Github(token)%0A
|
|
b68c8eab696f5950c4cd528bf60506469c97d08a
|
Create fixer.py
|
fixer.py
|
fixer.py
|
Python
| 0.000001
|
@@ -0,0 +1,2839 @@
+from datetime import datetime%0Afrom typing import List, TypeVar%0A%0Aimport requests%0A%0ABASE_URL = 'https://api.fixer.io/'%0ACURRENCY_CHOICE = %5B%22EUR%22, %22AUD%22, %22BGN%22, %22BRL%22, %22CAD%22, %22CHF%22, %22CNY%22, %22CZK%22,%0A %22DKK%22, %22GBP%22, %22HKD%22, %22HRK%22, %22HUF%22, %22IDR%22, %22ILS%22,%0A %22INR%22, %22JPY%22, %22KRW%22, %22MXN%22, %22MYR%22, %22NOK%22, %22NZD%22,%0A %22PHP%22, %22PLN%22, %22RON%22, %22RUB%22, %22SEK%22, %22SGD%22, %22THB%22,%0A %22TRY%22, %22USD%22, %22ZAR%22%5D%0A%0AD = TypeVar('D', datetime, str)%0A%0A%0Aclass Fixer(object):%0A%0A %22%22%22%0A class definining the api%0A%0A date:%0A Either a date in %22yyyy-mm-dd%22 format (available from 1999)%0A either %22latest%22 for latest date%0A default = %22latest%22%0A%0A base:%0A A currency in CURRENCY_CHOICE list.%0A Will setup the base currency for conversion%0A default = %22EUR%22%0A%0A Will raise a ValueError exception%0A%0A %22%22%22%0A%0A def __init__(self, date: str = %22latest%22, base: str = %22EUR%22,%0A symbols: List%5Bstr%5D = None) -%3E None:%0A super(Fixer, self).__init__()%0A self.symbols_string = ''%0A if self.currency_available(base, %22Base currency%22):%0A self.base = base%0A%0A if symbols:%0A self.symbols = %5B%5D%0A%0A for cur in symbols:%0A if self.currency_available(cur, %22Symbols currency%22):%0A self.symbols.append(cur)%0A%0A self.symbols_string = 'symbols=%7B%7D'.format(','.join(self.symbols))%0A%0A self.check_date(date)%0A%0A def currency_available(self, cur: str, method: str = %22%22) -%3E bool:%0A if cur not in CURRENCY_CHOICE:%0A # Raise a ValueError exception%0A raise ValueError(%22Currency %25s not available through this api%22 %25 cur,%0A method)%0A else:%0A return True%0A%0A def check_date(self, dt: D) -%3E None:%0A if type(dt) == datetime:%0A self.date = dt%0A elif type(dt) == str:%0A if dt == %22latest%22:%0A self.date = dt%0A else:%0A try:%0A self.date = datetime.strptime(dt, %22%25Y-%25m-%25d%22)%0A except ValueError as e:%0A raise e%0A%0A if not self.date.year %3E= 1999:%0A raise ValueError(%22Data available from 1999, %25s is to old%22 %25 self.date.strftime(%22%25Y-%25m-%25d%22))%0A%0A if self.date %3E datetime.now():%0A raise ValueError(%22%25s is in the future, data cannot be found%22 %25 self.date.strftime(%22%25Y-%25m-%25d%22))%0A else:%0A raise ValueError(%22%25s does not match required date format%22 %25 dt)%0A%0A def convert(self) -%3E str:%0A url = '%25s%25s?%25s&base=%25s' %25 (BASE_URL, self.date, self.symbols_string, self.base)%0A r = requests.get(url).json()%0A%0A if 'error' in r:%0A raise ReferenceError(r%5B'error'%5D)%0A return r%0A
|
|
16ec8043799c7aac029c5528f1c00f96070434d4
|
Move build view names function to utils
|
foundry/utils.py
|
foundry/utils.py
|
Python
| 0
|
@@ -0,0 +1,1139 @@
+from django.conf import settings%0A%0A%0Adef _build_view_names_recurse(url_patterns=None):%0A %22%22%22%0A Returns a tuple of url pattern names suitable for use as field choices%0A %22%22%22%0A if not url_patterns:%0A urlconf = settings.ROOT_URLCONF%0A url_patterns = __import__(settings.ROOT_URLCONF, globals(), locals(), %5C%0A %5B'urlpatterns', %5D, -1).urlpatterns%0A%0A result = %5B%5D%0A for pattern in url_patterns:%0A try:%0A #result.append((pattern.name, pattern.name.title().replace('_', %5C%0A # ' ')))%0A if pattern.name is not None:%0A result.append((pattern.name, pattern.name))%0A except AttributeError:%0A # If the pattern itself is an include, recurively fetch it%0A # patterns. Ignore admin patterns.%0A if not pattern.regex.pattern.startswith('%5Eadmin'):%0A try:%0A result += _build_view_names_recurse(pattern.url_patterns)%0A except AttributeError:%0A pass%0A return result%0A%0A%0Adef get_view_choices():%0A result = _build_view_names_recurse()%0A result.sort()%0A return result%0A%0A
|
|
f13045b5f933078225b89405a786c14da34d0af5
|
Add ClamAV script to analyze HTTPS traffic for viruses
|
scripts/clamav.py
|
scripts/clamav.py
|
Python
| 0
|
@@ -0,0 +1,1087 @@
+import pyclamd%0Afrom libmproxy.flow import decoded%0A%0A#http://www.eicar.org/85-0-Download.html%0A%0Aclamd = pyclamd.ClamdUnixSocket()%0Atry:%0A # test if server is reachable%0A clamd.ping()%0Aexcept AttributeError, pyclamd.ConnectionError:%0A # if failed, test for network socket%0A clamd = pyclamd.ClamdNetworkSocket()%0A clamd.ping() #fails instantly if we dont get a proper connection%0A%0Aprint %22ClamAV running: %25s%22 %25 clamd.version()%0A%0Adef response(context, flow):%0A with decoded(flow.response):%0A clamd_result = clamd.scan_stream(flow.response.content)%0A if clamd_result:%0A print %22Virus detected: %22,clamd_result%0A flow.response.content = %22HoneyProxy has detected a virus and stopped this page from loading: %25s%22 %25 str(clamd_result%5B%22stream%22%5D)%0A flow.response.headers%5B%22Content-Length%22%5D = %5Bstr(len(flow.response.content))%5D%0A flow.response.headers%5B%22Content-Type%22%5D = %5B%22text/html%22%5D%0A del flow.response.headers%5B%22Content-Disposition%22%5D%0A del flow.response.headers%5B%22Content-Encoding%22%5D%0A flow.response.code = 403%0A flow.response.msg = %22Forbidden%22%0A
|
|
805b393c51d9fa82f0dd28aa502378dfcf80924b
|
Add a binary demo.
|
reggie/demos/binary.py
|
reggie/demos/binary.py
|
Python
| 0
|
@@ -0,0 +1,1452 @@
+import os%0Aimport numpy as np%0Aimport mwhutils.plotting as mp%0Aimport mwhutils.grid as mg%0A%0Aimport reggie as rg%0A%0A%0Aif __name__ == '__main__':%0A cdir = os.path.abspath(os.path.dirname(__file__))%0A data = np.load(os.path.join(cdir, 'xy.npz'))%0A%0A # create the GP and optimize the model%0A gp1 = rg.make_gp(0.1, 1.0, 0.1)%0A gp1.add_data(data%5B'X'%5D, data%5B'y'%5D)%0A gp1.optimize()%0A%0A xmin = data%5B'X'%5D.min()%0A xmax = data%5B'X'%5D.max()%0A%0A like = rg.likelihoods.Probit()%0A kern = gp1._post.kern.copy()%0A mean = gp1._post.mean.copy()%0A%0A f = gp1.sample_f(100)%0A X = mg.uniform(%5B(xmin, xmax)%5D, 1000)%0A Y = like.sample(f.get(X))%0A%0A gp2 = rg.GP(like, kern, mean, inference='laplace')%0A gp2.add_data(X, Y)%0A gp2.optimize()%0A%0A # create the figure%0A fig = mp.figure(1, 1, 2)%0A fig.hold()%0A%0A # get the posterior moments for the first model%0A n = 500%0A x = np.linspace(xmin, xmax, n)%0A mu, s2 = gp1.predict(x%5B:, None%5D)%0A%0A fig%5B0%5D.plot_banded(x, mu, 2*np.sqrt(s2))%0A fig%5B0%5D.scatter(*gp1.data)%0A fig%5B0%5D.xlabel = 'inputs, X'%0A fig%5B0%5D.ylabel = 'outputs, Y'%0A fig%5B0%5D.title = 'Basic GP'%0A%0A # get the posterior moments for the second model%0A mu, s2 = gp2.predict(x%5B:, None%5D)%0A fig%5B1%5D.plot_banded(x, mu, 2*np.sqrt(s2))%0A fig%5B1%5D.scatter(*gp2.data)%0A fig%5B1%5D.plot(x, f.get(x%5B:, None%5D))%0A fig%5B1%5D.xlabel = 'inputs, X'%0A fig%5B1%5D.title = 'Binary GP%5Cn(with sampled function)'%0A%0A # show the figure%0A fig.draw()%0A mp.show()%0A
|
|
def9592885ab4093973e8547de5deac3b7022515
|
Create MaxSubarray_003.py
|
leetcode/053-Maximum-Subarray/MaxSubarray_003.py
|
leetcode/053-Maximum-Subarray/MaxSubarray_003.py
|
Python
| 0.000053
|
@@ -0,0 +1,275 @@
+class Solution:%0A # @param %7Binteger%5B%5D%7D nums%0A # @return %7Binteger%7D%0A def maxSubArray(self, nums):%0A res, tmp = nums%5B0%5D, nums%5B0%5D%0A for i in range(1, len(nums)):%0A tmp = max(tmp + nums%5Bi%5D, nums%5Bi%5D)%0A res = max(res, tmp)%0A%0A return res%0A
|
|
67300787f1f910065a88396f99f0d4dd25bec2d1
|
apply monkeypatch
|
buildbot.tac
|
buildbot.tac
|
import os
from twisted.application import service
from buildbot.master import BuildMaster
basedir = '.'
rotateLength = 10000000
maxRotatedFiles = 10
configfile = 'master.cfg'
# Default umask for server
umask = None
# if this is a relocatable tac file, get the directory containing the TAC
if basedir == '.':
import os.path
basedir = os.path.abspath(os.path.dirname(__file__))
# note: this line is matched against to check that this is a buildmaster
# directory; do not edit it.
application = service.Application('buildmaster')
from twisted.python.logfile import LogFile
from twisted.python.log import ILogObserver, FileLogObserver
logfile = LogFile.fromFullPath(os.path.join(basedir, "twistd.log"), rotateLength=rotateLength,
maxRotatedFiles=maxRotatedFiles)
application.setComponent(ILogObserver, FileLogObserver(logfile).emit)
m = BuildMaster(basedir, configfile, umask)
m.setServiceParent(application)
m.log_rotation.rotateLength = rotateLength
m.log_rotation.maxRotatedFiles = maxRotatedFiles
|
Python
| 0.000001
|
@@ -4,16 +4,71 @@
ort os%0A%0A
+from monkeypatch import apply_patches%0Aapply_patches()%0A%0A
from twi
|
8078bdd4c239770860ab05c2b10a919455ea84f3
|
Correct return value from track_info
|
mycroft/skills/playback_control/__init__.py
|
mycroft/skills/playback_control/__init__.py
|
import sys
from os.path import dirname, abspath, basename
from mycroft.skills.media import MediaSkill
from adapt.intent import IntentBuilder
from mycroft.messagebus.message import Message
from mycroft.configuration import ConfigurationManager
import subprocess
import time
import requests
from os.path import dirname
from mycroft.util.log import getLogger
config = ConfigurationManager.get().get('audio')
logger = getLogger(abspath(__file__).split('/')[-2])
__author__ = 'forslund'
sys.path.append(abspath(dirname(__file__)))
if config.get('audio.mopidy', 'False') == 'True':
MopidyService = __import__('mopidy_service').MopidyService
if config.get('audio.vlc', 'False') == 'True':
VlcService = __import__('vlc_service').VlcService
class Mpg123Service():
def __init__(self, config, emitter):
self.config = config
self.process = None
self.emitter = emitter
self.emitter.on('Mpg123ServicePlay', self._play)
@property
def name(self):
return self.config.get('audio.mpg123.name', 'mpg123')
def supported_uris(self):
return ['file', 'http']
def clear_list(self):
self.tracks = []
def add_list(self, tracks):
self.tracks = tracks
logger.info("Track list is " + str(tracks))
def _play(self, message):
logger.info('Mpg123Service._play')
track = self.tracks[self.index]
self.process = subprocess.Popen(['mpg123', track])
self.process.communicate()
self.process = None
self.index += 1
if self.index >= len(self.tracks):
self.emitter.emit(Message('Mpg123ServicePlay'))
def play(self):
logger.info('Call Mpg123ServicePlay')
self.index = 0
self.emitter.emit(Message('Mpg123ServicePlay'))
def stop(self):
logger.info('Mpg123ServiceStop')
self.clear_list()
if self.process:
self.process.terminate()
self.process = None
def pause(self):
pass
def resume(self):
pass
def next(self):
self.process.terminate()
def previous(self):
pass
def lower_volume(self):
pass
def restore_volume(self):
pass
def track_info(self):
return {}
class PlaybackControlSkill(MediaSkill):
def __init__(self):
super(PlaybackControlSkill, self).__init__('Playback Control Skill')
self.volume_is_low = False
self.current = None
logger.info('Playback Control Inited')
self.service = []
def initialize(self):
logger.info('initializing Playback Control Skill')
super(PlaybackControlSkill, self).initialize()
self.load_data_files(dirname(__file__))
if config.get('audio.vlc', 'False') == 'True':
logger.info('starting VLC service')
self.service.append(VlcService(config, self.emitter))
if config.get('audio.mopidy', 'False') == 'True':
logger.info('starting Mopidy service')
self.service.append(MopidyService(config, self.emitter))
logger.info('starting Mpg123 service')
self.service.append(Mpg123Service(config, self.emitter))
self.emitter.on('MycroftAudioServicePlay', self._play)
self.emitter.on('MycroftAudioServiceTrackInfo', self._track_info)
def play(self, tracks):
logger.info('play')
self.stop()
uri_type = tracks[0].split(':')[0]
logger.info('uri_type: ' + uri_type)
for s in self.service:
logger.info(str(s))
if uri_type in s.supported_uris():
service = s
break
else:
return
logger.info('Clear list')
service.clear_list()
logger.info('Add tracks' + str(tracks))
service.add_list(tracks)
logger.info('Playing')
service.play()
self.current = service
def _play(self, message):
logger.info('MycroftAudioServicePlay')
logger.info(message.metadata['tracks'])
tracks = message.metadata['tracks']
self.play(tracks)
def stop(self, message=None):
logger.info('stopping all playing services')
if self.current:
self.current.stop()
self.current = None
def handle_next(self, message):
if self.current:
self.current.next()
def handle_prev(self, message):
if self.current:
self.current.previous()
def handle_pause(self, message):
if self.current:
self.current.pause()
def handle_play(self, message):
"""Resume playback if paused"""
if self.current:
self.current.resume()
def lower_volume(self, message):
logger.info('lowering volume')
if self.current:
self.current.lower_volume()
self.volume_is_low = True
def restore_volume(self, message):
logger.info('maybe restoring volume')
if self.current:
self.volume_is_low = False
time.sleep(2)
if not self.volume_is_low:
logger.info('restoring volume')
self.current.restore_volume()
def handle_currently_playing(self, message):
if self.current:
track_info = self.current.track_info()
if track_info is not None:
data = {'current_track': track_info['name'],
'artist': track_info['album']}
self.speak_dialog('currently_playing', data)
time.sleep(6)
def _track_info(self, message):
if self.current:
track_info = self.current.track_info
else:
track_info = {}
self.emitter.emit(Message('MycroftAudioServiceTrackInfoReply',
metadata=track_info))
def create_skill():
return PlaybackControlSkill()
|
Python
| 0.000001
|
@@ -5276,321 +5276,14 @@
-if self.current:%0A track_info = self.current.track_info()%0A if track_info is not None:%0A data = %7B'current_track': track_info%5B'name'%5D,%0A 'artist': track_info%5B'album'%5D%7D%0A self.speak_dialog('currently_playing', data)%0A time.sleep(6)
+return
%0A%0A
@@ -5333,32 +5333,32 @@
f self.current:%0A
-
trac
@@ -5389,16 +5389,18 @@
ack_info
+()
%0A
|
e85bab14ab8058ba14d1f73dd2d47d8c38318c48
|
Add db_sqlite.py
|
db_sqlite.py
|
db_sqlite.py
|
Python
| 0.000019
|
@@ -0,0 +1,14 @@
+import sqlite3
|
|
266a3a3ddb99afc6fa696bdd2b7d3dc770b921ea
|
Add enroller talking to redis
|
spanky/lib/enroll.py
|
spanky/lib/enroll.py
|
Python
| 0
|
@@ -0,0 +1,582 @@
+import redis%0A%0A%0Aclass Enroller(object):%0A%0A def __init__(self, config):%0A self.config = config%0A%0A @property%0A def conn(self):%0A if not hasattr(self, '_conn'):%0A self._conn = redis.StrictRedis(host='localhost', port=6379, db=0)%0A return self._conn%0A%0A def join(self, name, host, port):%0A self.conn.lpush(name, '%25s:%25s' %25 (host, port))%0A%0A def enrolled(self, name):%0A return self.conn.lrange(name, 0, -1)%0A%0A%0Adef main():%0A e = Enroller(%7B%7D)%0A e.join('foo', 'bar', 8080)%0A print(e.enrolled('foo'))%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
95ceeb0af4e549e0d211b4e1ba6157d26ad5e44d
|
Fix race between MQ and mongo setting QueuedAt
|
sync_scheduler.py
|
sync_scheduler.py
|
from tapiriik.database import db
from tapiriik.messagequeue import mq
from tapiriik.sync import Sync
from datetime import datetime
from pymongo.read_preferences import ReadPreference
import kombu
import time
Sync.InitializeWorkerBindings()
producer = kombu.Producer(Sync._channel, Sync._exchange)
while True:
queueing_at = datetime.utcnow()
users = db.users.find(
{
"NextSynchronization": {"$lte": datetime.utcnow()}
},
{
"_id": True,
"SynchronizationHostRestriction": True
},
read_preference=ReadPreference.PRIMARY
)
scheduled_ids = set()
for user in users:
producer.publish({"user_id": str(user["_id"]), "queued_at": queueing_at.isoformat()}, routing_key=user["SynchronizationHostRestriction"] if "SynchronizationHostRestriction" in user and user["SynchronizationHostRestriction"] else "")
scheduled_ids.add(user["_id"])
print("Scheduled %d users at %s" % (len(scheduled_ids), datetime.utcnow()))
db.users.update({"_id": {"$in": list(scheduled_ids)}}, {"$set": {"QueuedAt": queueing_at}, "$unset": {"NextSynchronization": True}}, multi=True)
time.sleep(1)
|
Python
| 0.000001
|
@@ -200,16 +200,71 @@
ort time
+%0Afrom tapiriik.settings import MONGO_FULL_WRITE_CONCERN
%0A%0ASync.I
@@ -630,12 +630,200 @@
s =
-set(
+%5Bx%5B%22_id%22%5D for x in users%5D%0A%09db.users.update(%7B%22_id%22: %7B%22$in%22: scheduled_ids%7D%7D, %7B%22$set%22: %7B%22QueuedAt%22: queueing_at%7D, %22$unset%22: %7B%22NextSynchronization%22: True%7D%7D, multi=True, w=MONGO_FULL_WRITE_CONCERN
)%0A%09f
@@ -1079,41 +1079,8 @@
%22%22)%0A
-%09%09scheduled_ids.add(user%5B%22_id%22%5D)%0A
%09pri
@@ -1156,153 +1156,9 @@
)))%0A
+
%09
-db.users.update(%7B%22_id%22: %7B%22$in%22: list(scheduled_ids)%7D%7D, %7B%22$set%22: %7B%22QueuedAt%22: queueing_at%7D, %22$unset%22: %7B%22NextSynchronization%22: True%7D%7D, multi=True)
%0A%09ti
|
e532a4a5ba6706974dc1245b269f18fa0e82cb66
|
Create duplicates.py
|
module/duplicates.py
|
module/duplicates.py
|
Python
| 0.000391
|
@@ -0,0 +1,185 @@
+import os%0Aimport sys%0A%0A%0Adef search(dir)%0A for root, subdirs, files in os.walk(dir):%0A print('Dir(%25s)' %25 root)%0A%0A for filename in files:%0A print('- File(%25s)' %25 r)%0A
|
|
c8271b02c3636aa9620cce8b85c823ff0ec35c4a
|
Add a mobile device test of the Skype website
|
examples/test_skype_site.py
|
examples/test_skype_site.py
|
Python
| 0.000001
|
@@ -0,0 +1,1526 @@
+%22%22%22%0AThis is a mobile device test for Chromium-based browsers (such as MS Edge)%0AUsage: pytest test_skype_site.py --mobile --browser=edge%0A%0ADefault mobile settings for User Agent and Device Metrics if not specifed:%0A User Agent: --agent=%22Mozilla/5.0 (Linux; Android 9; Pixel 3 XL)%22%0A CSS Width, CSS Height, Pixel-Ratio: --metrics=%22411,731,3%22%0A%22%22%22%0Afrom seleniumbase import BaseCase%0A%0A%0Aclass SkypeWebsiteTestClass(BaseCase):%0A%0A def test_skype_website_on_mobile(self):%0A if not self.mobile_emulator:%0A print(%22%5Cn This test is only for mobile devices / emulators!%22)%0A print(%22 (Usage: '--mobile' with a Chromium-based browser.)%22)%0A self.skip_test(%22Please rerun this test using '--mobile!'!%22)%0A self.open(%22https://www.skype.com/en/%22)%0A self.assert_text(%22Install Skype%22, %22div.appInfo%22)%0A self.highlight(%22div.appBannerContent%22)%0A self.highlight('%5Bitemprop=%22url%22%5D')%0A self.highlight(%22h1%22)%0A self.highlight_click('%5Btitle=%22Download Skype%22%5D')%0A self.assert_element('%5Baria-label=%22Microsoft%22%5D')%0A self.assert_text(%22Download Skype%22, %22h1%22)%0A self.highlight(%22div.appBannerContent%22)%0A self.highlight(%22h1%22)%0A self.assert_text(%22Skype for Mobile%22, %22h2%22)%0A self.highlight(%22h2%22)%0A self.highlight(%22#get-skype-0%22)%0A self.highlight_click('%5Btitle*=%22Select from list%22%5D')%0A self.highlight('%5Bdata-bi-id*=%22android%22%5D')%0A self.highlight('%5Bdata-bi-id*=%22ios%22%5D')%0A self.highlight('%5Bdata-bi-id*=%22windows10%22%5D')%0A
|
|
cd551b16069f2814a0ae8bf5f2e104b09b7f9a90
|
Fix feed refresh interval computation.
|
oneflow/core/tasks/refresh.py
|
oneflow/core/tasks/refresh.py
|
# -*- coding: utf-8 -*-
u"""
Copyright 2013-2014 Olivier Cortès <oc@1flow.io>.
This file is part of the 1flow project.
1flow is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
1flow is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public
License along with 1flow. If not, see http://www.gnu.org/licenses/
"""
import logging
from constance import config
from celery import task
from django.utils.translation import ugettext_lazy as _
from ..models.reldb import (
MailAccount,
BaseFeed, basefeed_refresh_task
)
from oneflow.base.utils import RedisExpiringLock
from oneflow.base.utils.dateutils import (now, timedelta,
naturaldelta, benchmark)
LOGGER = logging.getLogger(__name__)
REFRESH_ALL_FEEDS_LOCK_NAME = 'refresh_all_feeds'
REFRESH_ALL_MAILACCOUNTS_LOCK_NAME = 'check_email_accounts'
@task(name='oneflow.core.tasks.refresh_all_feeds', queue='refresh')
def refresh_all_feeds(limit=None, force=False):
u""" Refresh all feeds (RSS/Mail/Twitter…). """
if config.FEED_FETCH_DISABLED:
# Do not raise any .retry(), this is a scheduled task.
LOGGER.warning(u'Feed refresh disabled in configuration.')
return
# Be sure two refresh operations don't overlap, but don't hold the
# lock too long if something goes wrong. In production conditions
# as of 20130812, refreshing all feeds takes only a moment:
# [2013-08-12 09:07:02,028: INFO/MainProcess] Task
# oneflow.core.tasks.refresh_all_feeds succeeded in 1.99886608124s.
#
my_lock = RedisExpiringLock(
REFRESH_ALL_FEEDS_LOCK_NAME,
expire_time=config.FEED_GLOBAL_REFRESH_INTERVAL * 60
- config.FEED_GLOBAL_REFRESH_INTERVAL
)
if not my_lock.acquire():
if force:
my_lock.release()
my_lock.acquire()
LOGGER.warning(_(u'Forcing all feed refresh…'))
else:
# Avoid running this task over and over again in the queue
# if the previous instance did not yet terminate. Happens
# when scheduled task runs too quickly.
LOGGER.warning(u'refresh_all_feeds() is already locked, aborting.')
return
# This should bring us a Polymorphic Query to refresh all feeds types.
feeds = BaseFeed.objects.filter(is_active=True, is_internal=False)
if limit:
feeds = feeds[:limit]
with benchmark('refresh_all_feeds()'):
try:
count = 0
mynow = now()
for feed in feeds:
if feed.refresh_lock.is_locked():
LOGGER.debug(u'Feed %s already locked, skipped.', feed)
continue
interval = timedelta(seconds=feed.fetch_interval)
feed.refresh_lock.acquire()
if feed.date_last_fetch is None:
basefeed_refresh_task.delay(feed.id)
LOGGER.info(u'Launched immediate refresh of feed %s which '
u'has never been refreshed.', feed)
elif force or feed.date_last_fetch + interval < mynow:
how_late = feed.date_last_fetch + interval - mynow
how_late = how_late.days * 86400 + how_late.seconds
late = feed.date_last_fetch + interval < mynow
basefeed_refresh_task.delay(feed.id, force)
LOGGER.info(u'Launched refresh of feed %s (%s %s).',
feed, naturaldelta(how_late),
u'late' if late else u'earlier')
count += 1
finally:
# HEADS UP: in case the system is overloaded and feeds refresh()
# tasks don't complete fast enough, the current task
# will overload it even more. Thus, we intentionaly
# don't release the lock to avoid over-re-launched
# global tasks to feed the refresh queue with useless
# double-triple-Nble individual tasks.
#
# my_lock.release()
pass
LOGGER.info(u'Launched %s refreshes out of %s feed(s) checked.',
count, feeds.count())
# Allow to release the lock manually for testing purposes.
refresh_all_feeds.lock = RedisExpiringLock(REFRESH_ALL_FEEDS_LOCK_NAME)
@task(name='oneflow.core.tasks.refresh_all_mailaccounts', queue='refresh')
def refresh_all_mailaccounts(force=False):
""" Check all unusable e-mail accounts. """
if config.MAIL_ACCOUNT_REFRESH_DISABLED:
# Do not raise any .retry(), this is a scheduled task.
LOGGER.warning(u'E-mail accounts check disabled in configuration.')
return
accounts = MailAccount.objects.unusable()
my_lock = RedisExpiringLock(REFRESH_ALL_MAILACCOUNTS_LOCK_NAME,
expire_time=30 * (accounts.count() + 2))
if not my_lock.acquire():
if force:
my_lock.release()
my_lock.acquire()
LOGGER.warning(_(u'Forcing check of email accounts…'))
else:
# Avoid running this task over and over again in the queue
# if the previous instance did not yet terminate. Happens
# when scheduled task runs too quickly.
LOGGER.warning(u'refresh_all_mailaccounts() is already locked, '
u'aborting.')
return
with benchmark('refresh_all_mailaccounts()'):
try:
for account in accounts:
try:
account.test_connection()
account.update_mailboxes()
except:
pass
finally:
my_lock.release()
LOGGER.info(u'Launched %s checks on unusable accounts out of %s total.',
accounts.count(), MailAccount.objects.all().count())
# Allow to release the lock manually for testing purposes.
refresh_all_mailaccounts.lock = RedisExpiringLock(
REFRESH_ALL_MAILACCOUNTS_LOCK_NAME)
|
Python
| 0
|
@@ -3473,16 +3473,460 @@
', feed)
+%0A if feed.fetch_interval %3E 86399:%0A interval_days = feed.fetch_interval / 86400%0A interval_seconds = feed.fetch_interval - (%0A interval_days * 86400)%0A%0A interval = timedelta(days=interval_days,%0A seconds=interval_seconds)%0A%0A else:%0A interval = timedelta(seconds=feed.fetch_interval)
%0A%0A
|
ea54e294d68962ec370dc1dc2381720f53ce6f01
|
Add local_settings.py stuff
|
voiexp/local_settings_example.py
|
voiexp/local_settings_example.py
|
Python
| 0.000001
|
@@ -0,0 +1,250 @@
+SECRET_KEY = '.uadjgfi67&%25%E2%82%ACyuhgsdfakjhgayv&/%25yugjhdfsc$y53'%0A%0A# SECURITY WARNING: don't run with debug turned on in production!%0ADEBUG = False%0A%0AALLOWED_HOSTS = %5B'127.0.0.1', 'some.example.com', %5D%0A%0ALANGUAGE_CODE = 'fi-fi'%0ATIME_ZONE = 'Europe/Helsinki'%0A%0A
|
|
8daf4237aa84a6b032e7627afb31b29a44f47ddc
|
Add another .py file for progress bar
|
ProgressBar.py
|
ProgressBar.py
|
Python
| 0.000001
|
@@ -0,0 +1,2456 @@
+import sys, time%0D%0Afrom CmdFormat import CmdFormat%0D%0A%0D%0Aclass ProgressBar(CmdFormat):%0D%0A def __init__(self, count = 0, total = 0, width = 80, bWithheader=True, bWithPercent=True,barColor='white'):%0D%0A super(CmdFormat, self).__init__()%0D%0A self.count = count%0D%0A self.total = total%0D%0A self.width = width%0D%0A self.bWithheader = bWithheader%0D%0A self.bWithPercent = bWithPercent%0D%0A self.__barColor = barColor%0D%0A def __Set_bar_color(self):%0D%0A if type(self.__barColor) != type('a'):%0D%0A raise TypeError(%22Wrong argument type of __Set_bar_color(color) in class ProgressBar%EF%BC%81%22)%0D%0A if self.__barColor=='red':%0D%0A self.set_cmd_color(4%7C8)%0D%0A if self.__barColor=='green':%0D%0A self.set_cmd_color(2%7C8)%0D%0A if self.__barColor=='blue':%0D%0A self.set_cmd_color(1%7C10)%0D%0A if self.__barColor=='yellow':%0D%0A self.set_cmd_color(6%7C8)%0D%0A def Move(self, s):%0D%0A self.count += 1%0D%0A sys.stdout.write(' '*(self.width + 20) + '%5Cr')%0D%0A sys.stdout.flush()%0D%0A print(s)%0D%0A progress = self.width * self.count / self.total%0D%0A if(self.bWithheader):sys.stdout.write('%7B0:3%7D/%7B1:3%7D:'.format(self.count, self.total))%0D%0A percent = progress * 100.0 / self.total%0D%0A%0D%0A if(self.bWithPercent): %0D%0A self.__Set_bar_color()%0D%0A sys.stdout.write('%5B' + int(progress)*'%3E' + int(self.width - progress)*'-' + '%5D' + ' %25.2f' %25 progress + '%25' + '%5Cr')%0D%0A self.reset_color()%0D%0A else:%0D%0A self.__Set_bar_color()%0D%0A sys.stdout.write('%5B' + int(progress)*'%3E' + int(self.width - progress)*'-' + '%5D'+'%5Cr')%0D%0A self.reset_color()%0D%0A if progress == self.width:%0D%0A sys.stdout.write('%5Cn')%0D%0A sys.stdout.flush()%0D%0A def Set_cmd_color(self,color):%0D%0A if type(color) != type('a'):%0D%0A raise TypeError(%22Wrong argument type of __Set_bar_color(color) in class ProgressBar%EF%BC%81%22)%0D%0A if color=='red':%0D%0A self.set_cmd_color(4%7C8)%0D%0A if color=='green':%0D%0A self.set_cmd_color(2%7C8)%0D%0A if color=='blue':%0D%0A self.set_cmd_color(1%7C10)%0D%0A if color=='yellow':%0D%0A self.set_cmd_color(6%7C8)%0D%0A=%0D%0Aif __name__ == '__main__':%0D%0A bar = ProgressBar(total = 15,bWithheader=True,bWithPercent=True,barColor='green')%0D%0A for i in range(15):%0D%0A bar.Set_cmd_color('red')%0D%0A bar.Move('sdfds ')%0D%0A time.sleep(1)%0D%0A%0D%0A
|
|
5e3b2ca14c4cc421e47d2709fe52390ee51eee11
|
Create S3toSQS.py
|
SQS/S3toSQS.py
|
SQS/S3toSQS.py
|
Python
| 0.000001
|
@@ -0,0 +1,2918 @@
+%22%22%22%0ACopyright 2016 Nicholas Christian%0ALicensed under the Apache License, Version 2.0 (the %22License%22);%0Ayou may not use this file except in compliance with the License.%0AYou may obtain a copy of the License at%0A http://www.apache.org/licenses/LICENSE-2.0%0AUnless required by applicable law or agreed to in writing, software%0Adistributed under the License is distributed on an %22AS IS%22 BASIS,%0AWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0ASee the License for the specific language governing permissions and%0Alimitations under the License.%0A%22%22%22%0A%0A# Just a note: SQS seems to be not meant to send large amounts of data through it because the number of messages%0A# per batch are limited to 10 and the maximum size of a batch is 256 KB.%0A%0A# If you are sending large amounts of data make sure the lambda function settings are reflective of the amount%0A# of time and resources this needs.%0A%0Afrom __future__ import print_function%0A%0Afrom gzip import open as g_open%0Afrom string import printable%0Afrom sys import getsizeof%0Afrom urllib import unquote_plus%0A%0Afrom boto3 import client%0A%0ASQS = client('sqs')%0AS3 = client('s3')%0A%0AQUEUE_NAME = %22%22%0AMESSAGE_RETENTION_PERIOD = '' # In seconds%0A%0A%0Adef memos(event, context):%0A print(%22Loading Function...%22)%0A%0A bucket = event%5B'Records'%5D%5B0%5D%5B's3'%5D%5B'bucket'%5D%5B'name'%5D%0A key = unquote_plus(event%5B'Records'%5D%5B0%5D%5B's3'%5D%5B'object'%5D%5B'key'%5D).decode('utf8')%0A%0A S3.download_file(bucket, key, '/tmp/%25s' %25 (key.split('/')%5B-1%5D))%0A%0A # If queue already exists it will just fetch the url of the queue.%0A queue_url = SQS.create_queue(QueueName=QUEUE_NAME,%0A Attributes=%7B'MessageRetentionPeriod': MESSAGE_RETENTION_PERIOD%7D)%5B'QueueUrl'%5D%0A%0A with g_open('/tmp/%25s' %25 (key.split('/')%5B-1%5D), 'r+') as f:%0A batch_of_mess = %5B%5D%0A for identifier, content in enumerate(f.readlines()):%0A # Gets rid of odd unicode characters that SQS does not like and the message would fail to send.%0A batch_of_mess.append(%7B'Id': str(identifier),%0A 'MessageBody': ''.join(l for l in content if l in printable)%7D)%0A%0A # Maximum size of a batch is 256 KB and/or 10 messages.%0A if getsizeof(batch_of_mess) %3E= 225 or len(batch_of_mess) == 10:%0A%0A message = SQS.send_message_batch(QueueUrl=queue_url,%0A Entries=batch_of_mess)%0A%0A # SQS does not throw up an error if a message fails to send.%0A if 'Failed' in message:%0A print(message)%0A%0A del batch_of_mess%5B:%5D%0A # Takes the remainder of the messages and sends them to SQS.%0A if batch_of_mess:%0A last_message = SQS.send_message_batch(QueueUrl=queue_url,%0A Entries=batch_of_mess)%0A if 'Failed' in last_message:%0A print(last_message)%0A%0A print(%22Done!%22)%0A
|
|
f0e733a3f62d37dc25d70b334dd3e1e46936477d
|
Add missing non-important migration
|
homedisplay/info_transportation/migrations/0016_auto_20150304_2159.py
|
homedisplay/info_transportation/migrations/0016_auto_20150304_2159.py
|
Python
| 0.003209
|
@@ -0,0 +1,610 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('info_transportation', '0015_line_type'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='line',%0A name='type',%0A field=models.CharField(default=b'bus', help_text=b'Liikennev%5Cxc3%5Cxa4linetyyppi', max_length=10, verbose_name=b'Tyyppi', choices=%5B(b'bus', b'bus'), (b'tram', b'tram'), (b'train', b'train'), (b'metro', b'metro')%5D),%0A preserve_default=True,%0A ),%0A %5D%0A
|
|
468a4c181768f0dcfcaa40201c26015b7c94e39e
|
add random gesture test
|
home/moz4r/Test/random.py
|
home/moz4r/Test/random.py
|
Python
| 0
|
@@ -0,0 +1,900 @@
+%EF%BB%BFimport random%0Afrom time import sleep%0A%0Ai01 = Runtime.createAndStart(%22i01%22, %22InMoov%22)%0Ai01.startHead(%22COM3%22)%0A%0Asleep(1)%0A%0A%0Adef MoveHeadRandomize():%0A%09if IcanMoveHeadRandom==1:%0A%09%09i01.moveHead(random.randint(50,130),random.randint(50,130))%0A%09%09%0A%0AMoveHeadTimer = Runtime.start(%22MoveHeadTimer%22,%22Clock%22)%0AMoveHeadTimer.setInterval(1001)%0A%0A%0Adef MoveHead(timedata):%0A%0A%09MoveHeadRandomize()%0A%09MoveHeadTimer.setInterval(random.randint(600,1200))%0A%09%09%0Adef MoveHeadStopped():%0A%0A%09if IcanMoveHeadRandom==1:%0A%09%09i01.moveHead(90,90)%0A%09%09HeadSide.moveTo(90)%0A%09%09%0A%0Adef MoveHeadStart():%0A%09MoveHeadRandomize()%0A%09%0AMoveHeadTimer.addListener(%22pulse%22, python.name, %22MoveHead%22)%0AMoveHeadTimer.addListener(%22clockStopped%22, python.name, %22MoveHeadStopped%22)%09%09%0AMoveHeadTimer.addListener(%22clockStarted%22, python.name, %22MoveHeadStart%22)%0A%0AMoveHeadTimer.startClock()%0A%0A#start to move head random 10 seconds%0AIcanMoveHeadRandom=1%0Asleep(10)%0AIcanMoveHeadRandom=0%0A%0A%0A
|
|
bb63af8be9abf1bcc8f3716bbd1a1a375685533f
|
Add a new feed bot, abusehelper.contrib.abusech.feodoccbot, for catching abuse.ch's Feodo Tracker RSS feed.
|
abusehelper/contrib/abusech/feodoccbot.py
|
abusehelper/contrib/abusech/feodoccbot.py
|
Python
| 0
|
@@ -0,0 +1,861 @@
+from abusehelper.core import bot%0A%0Afrom . import host_or_ip, split_description, AbuseCHFeedBot%0A%0A%0Aclass FeodoCcBot(AbuseCHFeedBot):%0A feed_type = %22c&c%22%0A%0A feeds = bot.ListParam(default=%5B%22https://feodotracker.abuse.ch/feodotracker.rss%22%5D)%0A%0A # The timestamp in the title appears to be the firstseen timestamp,%0A # skip including it as the %22source time%22.%0A parse_title = None%0A%0A def parse_description(self, description):%0A got_version = False%0A%0A for key, value in split_description(description):%0A if key == %22version%22:%0A yield %22malware%22, %22feodo.%22 + value.strip().lower()%0A got_version = True%0A elif key == %22host%22:%0A yield host_or_ip(value)%0A%0A if not got_version:%0A yield %22malware%22, %22feodo%22%0A%0A%0Aif __name__ == %22__main__%22:%0A FeodoCcBot.from_command_line().execute()%0A
|
|
e3d1c0d76db56689f9654c85e63f24c352c88a01
|
Use consistent modname calculation.
|
fedmsg/commands/collectd.py
|
fedmsg/commands/collectd.py
|
# This file is part of fedmsg.
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
import datetime
import logging
import socket
import time
import sys
import pygments
import pygments.lexers
import pygments.formatters
import fedmsg
import fedmsg.encoding
import fedmsg.meta
from fedmsg.commands import BaseCommand
from fedmsg.consumers import FedmsgConsumer
from moksha.hub.api import PollingProducer
from kitchen.iterutils import iterate
class CollectdConsumer(FedmsgConsumer):
config_key = "fedmsg.commands.collectd.enabled"
validate_messages = False
def __init__(self, hub):
self.hub = hub
# The consumer should pick up *all* messages.
self.topic = self.hub.config.get('topic_prefix', 'org.fedoraproject')
if not self.topic.endswith('*'):
self.topic += '*'
super(CollectdConsumer, self).__init__(hub)
self._dict = dict([
(p.__name__.lower(), 0) for p in fedmsg.meta.processors
])
self.host = socket.gethostname()
def consume(self, msg):
modname = msg['topic'].split('.')[3]
self._dict[modname] += 1
def dump(self):
""" Called by CollectdProducer every `n` seconds. """
# Print out the collectd feedback.
# This is sent to stdout while other log messages are sent to stderr.
for k, v in sorted(self._dict.items()):
print self.formatter(k, v)
# Reset each entry to zero
for k, v in sorted(self._dict.items()):
self._dict[k] = 0
def formatter(self, key, value):
""" Format messages for collectd to consume. """
template = "PUTVAL {host}/fedmsg/fedmsg_wallboard-{key} " +\
"interval={interval} {timestamp}:{value}"
timestamp = int(time.time())
interval = self.hub.config['collectd_interval']
return template.format(
host=self.host,
timestamp=timestamp,
value=value,
interval=interval,
key=key,
)
class CollectdProducer(PollingProducer):
# "Frequency" is set later at runtime.
def poll(self):
self.hub.consumers[0].dump()
class CollectdCommand(BaseCommand):
""" Print machine-readable information for collectd to monitor the bus. """
name = "fedmsg-collectd"
extra_args = [
(['--collectd-interval'], {
'dest': 'collectd_interval',
'type': int,
'help': 'Number of seconds to sleep between collectd updates.',
'default': 2,
}),
]
def run(self):
# Initialize the processors before CollectdConsumer is instantiated.
fedmsg.meta.make_processors(**self.config)
# Do just like in fedmsg.commands.hub and mangle fedmsg-config.py
# to work with moksha's expected configuration.
moksha_options = dict(
mute=True, # Disable some warnings.
zmq_subscribe_endpoints=','.join(
','.join(bunch) for bunch in self.config['endpoints'].values()
),
)
self.config.update(moksha_options)
self.config[CollectdConsumer.config_key] = True
CollectdProducer.frequency = datetime.timedelta(
seconds=self.config['collectd_interval']
)
from moksha.hub import main
main(self.config, [CollectdConsumer], [CollectdProducer],
framework=False)
def collectd():
command = CollectdCommand()
command.execute()
|
Python
| 0
|
@@ -1796,44 +1796,114 @@
-modname = msg%5B'topic'%5D.split('.')%5B3%5D
+processor = fedmsg.meta.msg2processor(msg, **self.hub.config)%0A modname = processor.__name__.lower()
%0A
|
94c125925b61a57bd29e9265dc993e1d868f2b7f
|
Create Selenium_Google.py
|
Selenium_Google.py
|
Selenium_Google.py
|
Python
| 0
|
@@ -0,0 +1,479 @@
+__author__ = 'Christie'%0A#%0Afrom selenium import webdriver%0Afrom selenium.webdriver.common.keys import Keys%0A%0Abrowser = webdriver.Firefox()%0A%0Abrowser.get('http://www.google.com')%0Aassert 'Google' in browser.title%0A%0A#browser.get('http://www.yahoo.com')%0A#assert 'Yahoo' in browser.title%0A#elem = browser.find_element_by_name('p') # Find the Yahoo search box%0Aelem = browser.find_element_by_name('q') # Find the Google search box%0Aelem.send_keys('seleniumhq' + Keys.RETURN)%0A%0Abrowser.quit()%0A
|
|
b00ae10f9ad841131ead33aa690587b7e2c50976
|
Add fetch recipe for fletch
|
recipes/fletch.py
|
recipes/fletch.py
|
Python
| 0.000016
|
@@ -0,0 +1,1057 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.%0A# Use of this source code is governed by a BSD-style license that can be%0A# found in the LICENSE file.%0A%0Aimport sys%0A%0Aimport recipe_util # pylint: disable=F0401%0A%0A# This class doesn't need an __init__ method, so we disable the warning%0A# pylint: disable=W0232%0Aclass Fletch(recipe_util.Recipe):%0A %22%22%22Basic Recipe class for Fletch.%22%22%22%0A%0A @staticmethod%0A def fetch_spec(props):%0A url = 'https://github.com/dart-lang/fletch.git'%0A solution = %7B%0A 'name' :'fletch',%0A 'url' : url,%0A 'deps_file': 'DEPS',%0A 'managed' : False,%0A 'custom_deps': %7B%7D,%0A 'safesync_url': '',%0A %7D%0A spec = %7B%0A 'solutions': %5Bsolution%5D,%0A %7D%0A if props.get('target_os'):%0A spec%5B'target_os'%5D = props%5B'target_os'%5D.split(',')%0A return %7B%0A 'type': 'gclient_git',%0A 'gclient_git_spec': spec,%0A %7D%0A%0A @staticmethod%0A def expected_root(_props):%0A return 'fletch'%0A%0A%0Adef main(argv=None):%0A return Fletch().handle_args(argv)%0A%0A%0Aif __name__ == '__main__':%0A sys.exit(main(sys.argv))%0A
|
|
76c25395590aa9dee64ca138633f01b62ac0d26b
|
Add new provider migration for osf registrations
|
providers/io/osf/registrations/migrations/0001_initial.py
|
providers/io/osf/registrations/migrations/0001_initial.py
|
Python
| 0
|
@@ -0,0 +1,685 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.7 on 2016-07-08 16:17%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0Aimport share.robot%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('share', '0001_initial'),%0A ('djcelery', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(%0A code=share.robot.RobotUserMigration('io.osf.registrations'),%0A ),%0A migrations.RunPython(%0A code=share.robot.RobotOauthTokenMigration('io.osf.registrations'),%0A ),%0A migrations.RunPython(%0A code=share.robot.RobotScheduleMigration('io.osf.registrations'),%0A ),%0A %5D%0A
|
|
81391212d0e0cecfbce14195e1ca8cd1d96a6671
|
Create Euler_2.py
|
Euler_2.py
|
Euler_2.py
|
Python
| 0.002755
|
@@ -0,0 +1,172 @@
+fib = 1%0Afib2 = 2%0Atemp = 0%0Atotal = 0%0A%0Awhile temp %3C= 4000000:%0A temp = fib2%0A if temp %25 2 == 0:%0A total += temp%0A temp = fib + fib2%0A fib = fib2%0A fib2 = temp%0Aprint(total)%0A
|
|
2c115a1b437aa36b42f74c04136601d9362dd5f6
|
add cutflow
|
rootpy/tree/cutflow.py
|
rootpy/tree/cutflow.py
|
Python
| 0.000002
|
@@ -0,0 +1,415 @@
+import struct%0A%0Aclass Cutflow(object):%0A%0A def __init__(self, names):%0A %0A self.__names = names%0A self.__dict = dict((name, '0') for name in names)%0A%0A def __setitem__(self, item, value):%0A%0A self.__dict%5Bitem%5D = str(int(bool(value)))%0A %0A def bitstring(self):%0A%0A return ''.join(%5Bself.__dict%5Bitem%5D for item in self.__names%5D)%0A%0A def int(self):%0A%0A return int(self.bitstring(), 2)%0A
|
|
a5b2db02926573ec1bc338d611af9f0ca363b237
|
add convoluving response function
|
convoluving_response.py
|
convoluving_response.py
|
Python
| 0.00001
|
@@ -0,0 +1,1103 @@
+import numpy as np%0Aimport matplotlib.pyplot as plt%0Aimport scipy.stats%0Afrom scipy.stats import gamma%0Afrom stimuli import events2neural%0A%0Adef hrf(times):%0A %22%22%22 Return values for HRF at given times %22%22%22%0A # Gamma pdf for the peak%0A peak_values = gamma.pdf(times, 6)%0A # Gamma pdf for the undershoot%0A undershoot_values = gamma.pdf(times, 12)%0A # Combine them%0A values = peak_values - 0.35 * undershoot_values%0A # Scale max to 0.6%0A return values / np.max(values) * 0.6%0A%0Adef constructing_convo(fname, n_volx):%0A%09tr_times = np.arange(0, 30, 2.5)%0A%09hrf_at_trs = hrf(tr_times)%0A%09neural_prediction = events2neural(fname + '_cond.txt', 2.5, n_volx)%0A%09all_tr_times = np.arange(n_volx) * 2.5%0A%09convolved = np.convolve(neural_prediction, hrf_at_trs)%0A%09convolved = convolved%5B:-len(hrf_at_trs)-1%5D%0A%0A%09plt.plot(all_tr_times, neural_prediction)%0A%09plt.plot(all_tr_times%5B0:len(convolved)%5D, convolved)%0A%09plt.show()%0A%0A%09np.savetxt(fname + '_conv.txt', convolved)%0A%0Aif __name__ == '__main__':%0A%09from sys import argv%0A%0A%09filename = argv%5B1%5D%0A%09if not filename:%0A%09%09filename = 'ds114_sub009_t2r1'%0A%09constructing_convo(filename, 173)
|
|
7a91235b1d6ed45a5452c455dd86797bbf092d17
|
Update S3Session.py
|
mongodb_consistent_backup/Upload/S3/S3Session.py
|
mongodb_consistent_backup/Upload/S3/S3Session.py
|
import logging
from boto import config
from boto.s3 import S3Connection
class S3Session:
def __init__(self, access_key, secret_key, s3_host='s3.amazonaws.com', secure=True, num_retries=5, socket_timeout=15):
self.access_key = access_key
self.secret_key = secret_key
self.s3_host = s3_host
self.secure = secure
self.num_retries = num_retries
self.socket_timeout = socket_timeout
for section in config.sections():
config.remove_section(section)
config.add_section('Boto')
config.setbool('Boto', 'is_secure', self.secure)
config.set('Boto', 'http_socket_timeout', str(self.socket_timeout))
config.set('Boto', 'num_retries', str(self.num_retries))
self._conn = None
self.connect()
def close(self):
if not self._conn:
self._conn.close()
def connect(self):
if not self._conn:
try:
logging.debug("Connecting to AWS S3 with Access Key: %s" % self.access_key)
self._conn = S3Connection(
self.access_key,
self.secret_key,
host=self.s3_host,
is_secure=self.secure
)
logging.debug("Successfully connected to AWS S3 with Access Key: %s" % self.access_key)
except Exception, e:
logging.error("Cannot connect to AWS S3 with Access Key: %s!" % self.access_key)
raise e
return self._conn
def get_bucket(self, bucket_name):
try:
logging.debug("Connecting to AWS S3 Bucket: %s" % bucket_name)
return self._conn.get_bucket(bucket_name)
except Exception, e:
logging.error("Cannot connect to AWS S3 Bucket: %s!" % bucket_name)
raise e
|
Python
| 0.000001
|
@@ -13,64 +13,34 @@
ng%0A%0A
-from boto import config%0Afrom boto.s3 import S3Connection
+import boto%0Aimport boto.s3
%0A%0A%0Ac
@@ -446,16 +446,21 @@
tion in
+boto.
config.s
@@ -478,24 +478,29 @@
+boto.
config.remov
@@ -522,24 +522,29 @@
on)%0A
+boto.
config.add_s
@@ -558,32 +558,37 @@
'Boto')%0A
+boto.
config.setbool('
@@ -620,32 +620,37 @@
secure)%0A
+boto.
config.set('Boto
@@ -709,16 +709,21 @@
+boto.
config.s
@@ -900,16 +900,29 @@
.close()
+%0A pass
%0A%0A de
@@ -1066,32 +1066,32 @@
elf.access_key)%0A
-
@@ -1103,16 +1103,24 @@
_conn =
+boto.s3.
S3Connec
|
7c755e7839f7c602a6c93b1aa2f5011e89d15c85
|
Create command for generating prices for flavors
|
nodeconductor/iaas/management/commands/addmissingpricelistflavors.py
|
nodeconductor/iaas/management/commands/addmissingpricelistflavors.py
|
Python
| 0
|
@@ -0,0 +1,1057 @@
+from __future__ import unicode_literals%0A%0Afrom django.contrib.contenttypes.models import ContentType%0Afrom django.core.management.base import BaseCommand%0A%0Afrom nodeconductor.cost_tracking.models import DefaultPriceListItem%0Afrom nodeconductor.iaas.models import Flavor, Instance%0A%0A%0Aclass Command(BaseCommand):%0A%0A def handle(self, *args, **options):%0A instance_content_type = ContentType.objects.get_for_model(Instance)%0A self.stdout.write('Checking flavors existance in DefaultPriceListItem table ...')%0A for flavor in Flavor.objects.all():%0A lookup_kwargs = %7B'item_type': 'flavor', 'key': flavor.name, 'resource_content_type': instance_content_type%7D%0A if not DefaultPriceListItem.objects.filter(**lookup_kwargs).exists():%0A item = DefaultPriceListItem(**lookup_kwargs)%0A item.name = 'Flavor type: %7B%7D'.format(flavor.name)%0A item.save()%0A self.stdout.write('DefaultPriceListItem was created for flavor %7B%7D'.format(flavor.name))%0A%0A self.stdout.write('... Done')%0A
|
|
2616d8f3ef51a8551ac14a9e83b0298b8165093a
|
Add work-in-progress script to fixup a standalone plugin library.
|
Superbuild/Projects/apple/fixup_plugin2.py
|
Superbuild/Projects/apple/fixup_plugin2.py
|
Python
| 0
|
@@ -0,0 +1,1918 @@
+#!/usr/bin/env python%0A%0Aimport subprocess%0Aimport os%0A%0Aplugin = 'libVelodyneHDLPlugin.dylib'%0AparaviewBuildDir = '/source/paraview/build'%0A%0Anameprefix = '@executable_path/../Libraries/'%0Aprefix = '@executable_path/../Libraries/'%0A%0A# The official ParaView OSX binaries are built with hdf5, not vtkhdf5.%0A# Also, they are built with Python 2.6, not 2.7%0Anamechanges = %7B%0A 'libvtkhdf5_hl-pv3.98.1.dylib' : 'libhdf5.1.8.9.dylib',%0A 'libvtkhdf5-pv3.98.1.dylib' : 'libhdf5_hl.1.8.9.dylib',%0A 'libvtkWrappingPython27-pv3.98.1.dylib' : 'libvtkWrappingPython26-pv3.98.1.dylib'%0A%7D%0A%0AchangePythonFramework = False%0A%0A%0Adef fixupPlugin():%0A%0A output = subprocess.check_output(%5B'otool', '-L', plugin%5D)%0A lines = output.split('%5Cn')%0A%0A libs = %5B%5D%0A qtlibs = %5B%5D%0A for l in lines:%0A%0A l = l.strip().split(' ')%5B0%5D%0A if l.startswith(paraviewBuildDir):%0A libs.append(l)%0A if l.startswith('Qt'):%0A qtlibs.append(l)%0A%0A%0A%0A for qtlib in qtlibs:%0A%0A command = 'install_name_tool -change %25s @executable_path/../Frameworks/%25s %25s' %25 (qtlib, qtlib, plugin)%0A subprocess.call(command.split())%0A%0A%0A if changePythonFramework:%0A command = 'install_name_tool -change /System/Library/Frameworks/Python.framework/Versions/2.7/Python /System/Library/Frameworks/Python.framework/Versions/2.6/Python %25s' %25 (plugin)%0A subprocess.call(command.split())%0A%0A%0A for lib in libs:%0A%0A name = os.path.basename(lib)%0A%0A if name in namechanges:%0A name = namechanges%5Bname%5D%0A%0A command = 'install_name_tool -change %25s %25s%25s %25s' %25 (lib, prefix, name, plugin)%0A subprocess.call(command.split())%0A%0A pvlib = '/Applications/paraview.app/Contents/Libraries/' + name%0A if not os.path.exists(pvlib):%0A print 'notfound:', pvlib%0A%0A%0A command = 'install_name_tool -id %25s%25s %25s' %25 (nameprefix, os.path.basename(plugin), plugin)%0A subprocess.call(command.split())%0A%0A%0Aif __name__ == '__main__':%0A fixupPlugin()%0A
|
|
735135c5570edd38324fe3e94aa2f4c2f3043627
|
Migrate data from contact_for_research_via and into contact_for_research_methods many to many field
|
cla_backend/apps/legalaid/migrations/0023_migrate_contact_for_research_via_field.py
|
cla_backend/apps/legalaid/migrations/0023_migrate_contact_for_research_via_field.py
|
Python
| 0.000002
|
@@ -0,0 +1,1153 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0Afrom django.db.models import Q%0A%0A%0Adef migrate_contact_for_research_via_field_data(apps, schema_editor):%0A ContactResearchMethod = apps.get_model(%22legalaid%22, %22ContactResearchMethod%22)%0A research_methods = %7Bmethod.method: method.id for method in ContactResearchMethod.objects.all()%7D%0A PersonalDetails = apps.get_model(%22legalaid%22, %22PersonalDetails%22)%0A models = PersonalDetails.objects.exclude(Q(contact_for_research_via=%22%22) %7C Q(contact_for_research_via=None))%0A for model in models:%0A if not model.contact_for_research_methods:%0A model.contact_for_research_methods = %5Bresearch_methods.get(model.contact_for_research_via)%5D%0A model.save()%0A%0A%0Adef rollback_migrate_contact_for_research_via_field_data(apps, schema_editor):%0A pass%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B(%22legalaid%22, %220022_default_contact_for_research_methods%22)%5D%0A%0A operations = %5B%0A migrations.RunPython(%0A migrate_contact_for_research_via_field_data, rollback_migrate_contact_for_research_via_field_data%0A )%0A %5D%0A
|
|
59ac9745064dd02903e35c1c51781505bad505df
|
add gunicorn config
|
gunicorn.conf.py
|
gunicorn.conf.py
|
Python
| 0.000001
|
@@ -0,0 +1,89 @@
+%0A%0Abind = %22unix:/tmp/mygpo.sock%22%0Aworkers = 2%0Aworker_class = %22gevent%22%0Amax_requests = 10000%0A
|
|
830a41911c5a2bc3982f35a6c6da38f6c659e78b
|
Create /pypardot/objects/tests/__init__.py
|
pypardot/objects/tests/__init__.py
|
pypardot/objects/tests/__init__.py
|
Python
| 0.000005
|
@@ -0,0 +1 @@
+%0A
|
|
f740dd60e7a4493269679e469c7f1ee5e24ff5af
|
add build/errors file
|
conary/build/errors.py
|
conary/build/errors.py
|
Python
| 0.000001
|
@@ -0,0 +1,326 @@
+%0Aclass BuildError(Exception):%0A def __init__(self, msg):%0A self.msg = msg%0A%0A def __repr__(self):%0A%09return self.msg%0A%0A def __str__(self):%0A%09return repr(self)%0A %0Aclass RecipeFileError(BuildError):%0A pass%0A%0A%0Aclass RecipeDependencyError(RecipeFileError):%0A pass%0A%0Aclass BadRecipeNameError(RecipeFileError):%0A pass%0A
|
|
18f385de7b287a932192f690cb74ff70a452cf47
|
test settings file
|
fpurlfield/test_settings.py
|
fpurlfield/test_settings.py
|
Python
| 0.000001
|
@@ -0,0 +1,1298 @@
+# Django settings for test_project project.%0A%0ADEBUG = True%0ATEMPLATE_DEBUG = DEBUG%0AADMINS = ()%0AMANAGERS = ADMINS%0A%0ADATABASES = %7B%0A 'default': %7B%0A 'ENGINE': 'django.db.backends.sqlite3',%0A 'NAME': ':memory:',%0A %7D,%0A%7D%0A%0A# TIME_ZONE = 'America/Chicago'%0A# LANGUAGE_CODE = 'en-us'%0A# USE_I18N = True%0A# USE_L10N = True%0A# USE_TZ = True%0A# MEDIA_ROOT = ''%0A# MEDIA_URL = ''%0A%0ASECRET_KEY = 'secret'%0ATEMPLATE_LOADERS = (%0A 'django.template.loaders.filesystem.Loader',%0A 'django.template.loaders.app_directories.Loader',%0A)%0A%0AMIDDLEWARE_CLASSES = (%0A 'django.middleware.common.CommonMiddleware',%0A)%0A%0AROOT_URLCONF = 'test_project.urls'%0AWSGI_APPLICATION = 'test_project.wsgi.application'%0A%0ATEMPLATE_DIRS = ()%0A%0AINSTALLED_APPS = (%0A 'fpurlfield',%0A)%0A%0ALOGGING = %7B%0A 'version': 1,%0A 'disable_existing_loggers': False,%0A 'filters': %7B%0A 'require_debug_false': %7B%0A '()': 'django.utils.log.RequireDebugFalse'%0A %7D%0A %7D,%0A 'handlers': %7B%0A 'mail_admins': %7B%0A 'level': 'ERROR',%0A 'filters': %5B'require_debug_false'%5D,%0A 'class': 'django.utils.log.AdminEmailHandler'%0A %7D%0A %7D,%0A 'loggers': %7B%0A 'django.request': %7B%0A 'handlers': %5B'mail_admins'%5D,%0A 'level': 'ERROR',%0A 'propagate': True,%0A %7D,%0A %7D%0A%7D
|
|
6ade5da25f8f265611f5b959f4e763ae3f070155
|
Add support CentOS 6.4
|
python/lib/cloudutils/utilities.py
|
python/lib/cloudutils/utilities.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from cloudException import CloudRuntimeException, formatExceptionInfo
import logging
from subprocess import PIPE, Popen
from signal import alarm, signal, SIGALRM, SIGKILL
import sys
import os
class bash:
def __init__(self, args, timeout=600):
self.args = args
logging.debug("execute:%s"%args)
self.timeout = timeout
self.process = None
self.success = False
self.run()
def run(self):
class Alarm(Exception):
pass
def alarm_handler(signum, frame):
raise Alarm
try:
self.process = Popen(self.args, shell=True, stdout=PIPE, stderr=PIPE)
if self.timeout != -1:
signal(SIGALRM, alarm_handler)
alarm(self.timeout)
try:
self.stdout, self.stderr = self.process.communicate()
if self.timeout != -1:
alarm(0)
except Alarm:
os.kill(self.process.pid, SIGKILL)
raise CloudRuntimeException("Timeout during command execution")
self.success = self.process.returncode == 0
except:
raise CloudRuntimeException(formatExceptionInfo())
if not self.success:
logging.debug("Failed to execute:" + self.getErrMsg())
def isSuccess(self):
return self.success
def getStdout(self):
return self.stdout.strip("\n")
def getLines(self):
return self.stdout.split("\n")
def getStderr(self):
return self.stderr.strip("\n")
def getErrMsg(self):
if self.isSuccess():
return ""
if self.getStderr() is None or self.getStderr() == "":
return self.getStdout()
else:
return self.getStderr()
def initLoging(logFile=None):
try:
if logFile is None:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(filename=logFile, level=logging.DEBUG)
except:
logging.basicConfig(level=logging.DEBUG)
def writeProgressBar(msg, result):
output = "[%-6s]\n"%"Failed"
if msg is not None:
output = "%-30s"%msg
elif result is True:
output = "[%-2s]\n"%"OK"
elif result is False:
output = "[%-6s]\n"%"Failed"
sys.stdout.write(output)
sys.stdout.flush()
class UnknownSystemException(Exception):
"This Excption is raised if the current operating enviornment is unknown"
pass
class Distribution:
def __init__(self):
self.distro = "Unknown"
self.release = "Unknown"
if os.path.exists("/etc/fedora-release"):
self.distro = "Fedora"
elif os.path.exists("/etc/redhat-release"):
version = file("/etc/redhat-release").readline()
if version.find("Red Hat Enterprise Linux Server release 6") != -1 or version.find("Scientific Linux release 6") != -1 or version.find("CentOS Linux release 6") != -1 or version.find("CentOS release 6.2") != -1 or version.find("CentOS release 6.3") != -1:
self.distro = "RHEL6"
elif version.find("CentOS release") != -1:
self.distro = "CentOS"
else:
self.distro = "RHEL5"
elif os.path.exists("/etc/legal") and "Ubuntu" in file("/etc/legal").read(-1):
self.distro = "Ubuntu"
kernel = bash("uname -r").getStdout()
if kernel.find("2.6.32") != -1:
self.release = "10.04"
self.arch = bash("uname -m").getStdout()
elif os.path.exists("/usr/bin/lsb_release"):
o = bash("/usr/bin/lsb_release -i")
distributor = o.getStdout().split(":\t")[1]
if "Debian" in distributor:
# This obviously needs a rewrite at some point
self.distro = "Ubuntu"
else:
raise UnknownSystemException(distributor)
else:
raise UnknownSystemException
def getVersion(self):
return self.distro
def getRelease(self):
return self.release
def getArch(self):
return self.arch
class serviceOps:
pass
class serviceOpsRedhat(serviceOps):
def isServiceRunning(self, servicename):
try:
o = bash("service " + servicename + " status")
if "running" in o.getStdout() or "start" in o.getStdout() or "Running" in o.getStdout():
return True
else:
return False
except:
return False
def stopService(self, servicename,force=False):
if self.isServiceRunning(servicename) or force:
return bash("service " + servicename +" stop").isSuccess()
return True
def disableService(self, servicename):
result = self.stopService(servicename)
bash("chkconfig --del " + servicename)
return result
def startService(self, servicename,force=False):
if not self.isServiceRunning(servicename) or force:
return bash("service " + servicename + " start").isSuccess()
return True
def enableService(self, servicename,forcestart=False):
bash("chkconfig --level 2345 " + servicename + " on")
return self.startService(servicename,force=forcestart)
def isKVMEnabled(self):
if os.path.exists("/dev/kvm"):
return True
else:
return False
class serviceOpsUbuntu(serviceOps):
def isServiceRunning(self, servicename):
try:
o = bash("sudo /usr/sbin/service " + servicename + " status")
if "not running" in o.getStdout():
return False
else:
return True
except:
return False
def stopService(self, servicename,force=True):
if self.isServiceRunning(servicename) or force:
return bash("sudo /usr/sbin/service " + servicename +" stop").isSuccess()
def disableService(self, servicename):
result = self.stopService(servicename)
bash("sudo update-rc.d -f " + servicename + " remove")
return result
def startService(self, servicename,force=True):
if not self.isServiceRunning(servicename) or force:
return bash("sudo /usr/sbin/service " + servicename + " start").isSuccess()
def enableService(self, servicename,forcestart=True):
bash("sudo update-rc.d -f " + servicename + " remove")
bash("sudo update-rc.d -f " + servicename + " defaults")
return self.startService(servicename,force=forcestart)
def isKVMEnabled(self):
return bash("kvm-ok").isSuccess()
|
Python
| 0
|
@@ -3815,53 +3815,8 @@
e 6.
-2%22) != -1 or version.find(%22CentOS release 6.3
%22) !
|
62c85cf12b388411919b86ac498908336bfd5e12
|
Create password_checker.py
|
Challenge-172/02-Intermediate-2/password_checker.py
|
Challenge-172/02-Intermediate-2/password_checker.py
|
Python
| 0.000267
|
@@ -0,0 +1,301 @@
+#!/usr/bin/python%0Aimport hashlib%0Aimport uuid%0A%0Apassword = 'test123' %0A%0Af = open('salt.txt')%0Asalt = f.read()%0Af.close()%0A%0Af = open('encrypted.txt')%0Ahashed_password = f.read()%0Af.close()%0A%0Aif hashlib.sha512(password + salt).hexdigest() == hashed_password:%0A%09print 'ACCESS GRANTED'%0Aelse:%0A%09print 'ACCESS DENIED'%0A
|
|
6507cc1dc85ffb7b26b89cc2dc1fce057fefc554
|
Fix #8960 - Decora Wi-Fi Switch unable to set brightness (#8989)
|
homeassistant/components/light/decora_wifi.py
|
homeassistant/components/light/decora_wifi.py
|
"""
Interfaces with the myLeviton API for Decora Smart WiFi products.
See:
http://www.leviton.com/en/products/lighting-controls/decora-smart-with-wifi
Uses Leviton's cloud services API for cloud-to-cloud integration.
"""
import logging
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_TRANSITION, Light,
PLATFORM_SCHEMA, SUPPORT_BRIGHTNESS, SUPPORT_TRANSITION)
from homeassistant.const import (
CONF_USERNAME, CONF_PASSWORD,
EVENT_HOMEASSISTANT_STOP)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['decora_wifi==1.3']
_LOGGER = logging.getLogger(__name__)
# Validation of the user's configuration
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
})
NOTIFICATION_ID = 'leviton_notification'
NOTIFICATION_TITLE = 'myLeviton Decora Setup'
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Decora WiFi platform."""
# pylint: disable=import-error
from decora_wifi import DecoraWiFiSession
from decora_wifi.models.person import Person
from decora_wifi.models.residential_account import ResidentialAccount
email = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
session = DecoraWiFiSession()
try:
success = session.login(email, password)
# If login failed, notify user.
if success is None:
msg = 'Failed to log into myLeviton Services. Check credentials.'
_LOGGER.error(msg)
hass.components.persistent_notification.create(
msg, title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID)
return False
# Gather all the available devices...
perms = session.user.get_residential_permissions()
all_switches = []
for permission in perms:
acct = ResidentialAccount(session, permission.residentialAccountId)
for residence in acct.get_residences():
for switch in residence.get_iot_switches():
all_switches.append(switch)
add_devices(DecoraWifiLight(sw) for sw in all_switches)
except ValueError:
_LOGGER.error('Failed to communicate with myLeviton Service.')
# Listen for the stop event and log out.
def logout(event):
"""Log out..."""
try:
if session is not None:
Person.logout(session)
except ValueError:
_LOGGER.error('Failed to log out of myLeviton Service.')
hass.bus.listen(EVENT_HOMEASSISTANT_STOP, logout)
class DecoraWifiLight(Light):
"""Representation of a Decora WiFi switch."""
def __init__(self, switch):
"""Initialize the switch."""
self._switch = switch
@property
def supported_features(self):
"""Return supported features."""
if self._switch.canSetLevel:
return SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION
else:
return 0
@property
def name(self):
"""Return the display name of this switch."""
return self._switch.name
@property
def brightness(self):
"""Return the brightness of the dimmer switch."""
return int(self._switch.brightness * 255 / 100)
@property
def is_on(self):
"""Return true if switch is on."""
return self._switch.power == 'ON'
def turn_on(self, **kwargs):
"""Instruct the switch to turn on & adjust brightness."""
attribs = {'power': 'ON'}
if ATTR_BRIGHTNESS in kwargs:
min_level = self._switch.get('minLevel', 0)
max_level = self._switch.get('maxLevel', 100)
brightness = int(kwargs[ATTR_BRIGHTNESS] * max_level / 255)
brightness = max(brightness, min_level)
attribs['brightness'] = brightness
if ATTR_TRANSITION in kwargs:
transition = int(kwargs[ATTR_TRANSITION])
attribs['fadeOnTime'] = attribs['fadeOffTime'] = transition
try:
self._switch.update_attributes(attribs)
except ValueError:
_LOGGER.error('Failed to turn on myLeviton switch.')
def turn_off(self, **kwargs):
"""Instruct the switch to turn off."""
attribs = {'power': 'OFF'}
try:
self._switch.update_attributes(attribs)
except ValueError:
_LOGGER.error('Failed to turn off myLeviton switch.')
def update(self):
"""Fetch new state data for this switch."""
try:
self._switch.refresh()
except ValueError:
_LOGGER.error('Failed to update myLeviton switch data.')
|
Python
| 0
|
@@ -3639,24 +3639,29 @@
elf._switch.
+data.
get('minLeve
@@ -3704,16 +3704,21 @@
_switch.
+data.
get('max
|
446984ad7b102587beac03d4329b5d0c061e2095
|
Add preserve_{current_canvas,batch_state} and invisible_canvas context managers
|
rootpy/context.py
|
rootpy/context.py
|
Python
| 0.000004
|
@@ -0,0 +1,1358 @@
+from contextlib import contextmanager%0A%0Aimport ROOT%0A%0A@contextmanager%0Adef preserve_current_canvas():%0A %22%22%22%0A Context manager which ensures that the current canvas remains the current%0A canvas when the context is left.%0A %22%22%22%0A %0A old = ROOT.gPad.func()%0A try:%0A yield%0A finally:%0A if old:%0A old.cd()%0A else:%0A # Is it possible to set ROOT.gPad back to None, somehow?%0A pass%0A%0A@contextmanager%0Adef preserve_batch_state():%0A %22%22%22%0A Context manager which ensures the batch state is the same on exit as it was%0A on entry.%0A %22%22%22%0A%0A old = ROOT.gROOT.IsBatch()%0A try:%0A yield%0A finally:%0A ROOT.gROOT.SetBatch(old)%0A%0A@contextmanager%0Adef invisible_canvas():%0A %22%22%22%0A Context manager yielding a temporary canvas drawn in batch mode, invisible%0A to the user. Original state is restored on exit.%0A %0A Example use; obtain X axis object without interfering with anything:%0A %0A with invisible_canvas() as c:%0A efficiency.Draw()%0A g = efficiency.GetPaintedGraph()%0A return g.GetXaxis()%0A %22%22%22%0A %0A with preserve_batch_state():%0A ROOT.gROOT.SetBatch()%0A with preserve_current_canvas():%0A c = ROOT.TCanvas()%0A try:%0A c.cd()%0A yield c%0A finally:%0A c.Close()%0A
|
|
7cb77ef66cad41e1b5d4907272b899a24a689c2d
|
Test for #423
|
test/algorithms/refinement/tst_dials-423.py
|
test/algorithms/refinement/tst_dials-423.py
|
Python
| 0
|
@@ -0,0 +1,2441 @@
+#!/usr/bin/env cctbx.python%0A%0A#%0A# Copyright (C) (2017) STFC Rutherford Appleton Laboratory, UK.%0A#%0A# Author: David Waterman.%0A#%0A# This code is distributed under the BSD license, a copy of which is%0A# included in the root directory of this package.%0A#%0A%0A%22%22%22 Test the situation that led to https://github.com/dials/dials/issues/423.%0AIn that case instantiating a Refiner for an experiment list with an I23%0Adetector model caused the panel origins to move before any refinement took%0Aplace. This occured because for the input experiments.json the root frame for%0Athe hierarchical detector is on source side of the laboratory frame origin, not%0Aon the detector side. Prior to the fix this resulted in incorrect calculation%0Aof the offsets of all panels from the root frame.%0A%22%22%22%0A%0Afrom __future__ import absolute_import, division%0Aimport os%0Aimport libtbx.load_env # required for libtbx.env.find_in_repositories%0Afrom libtbx import phil%0Afrom dxtbx.model.experiment_list import ExperimentListFactory%0Afrom dials.array_family import flex%0Afrom dials.algorithms.refinement import RefinerFactory%0A%0Aclass Test(object):%0A%0A def __init__(self):%0A%0A dials_regression = libtbx.env.find_in_repositories(%0A relative_path=%22dials_regression%22,%0A test=os.path.isdir)%0A%0A data_dir = os.path.join(dials_regression, %22refinement_test_data%22,%0A %22dials-423%22)%0A exp_file = os.path.join(data_dir, 'experiments.json')%0A ref_file = os.path.join(data_dir, 'subset.pickle')%0A%0A self._reflections = flex.reflection_table.from_pickle(ref_file)%0A self._experiments = ExperimentListFactory.from_json_file(exp_file,%0A check_format=False)%0A%0A def run(self):%0A %22%22%22Test that the detector remains similar after refiner construction%22%22%22%0A%0A from dials.algorithms.refinement.refiner import phil_scope%0A params = phil_scope.fetch(source=phil.parse('')).extract()%0A%0A # disable outlier rejection for speed of refiner construction%0A params.refinement.reflections.outlier.algorithm='null'%0A%0A refiner = RefinerFactory.from_parameters_data_experiments(params,%0A self._reflections, self._experiments)%0A%0A d1 = self._experiments%5B0%5D.detector%0A d2 = refiner.get_experiments()%5B0%5D.detector%0A%0A assert d1.is_similar_to(d2)%0A print %22OK%22%0A return%0A%0Adef run():%0A if not libtbx.env.has_module(%22dials_regression%22):%0A print %22Skipping tests in %22 + __file__ + %22 as dials_regression not present%22%0A return%0A%0A tst = Test()%0A tst.run()%0A%0Aif __name__ == '__main__':%0A run()%0A
|
|
32fcd5393402d868d8741385705f58b9e8eb7703
|
Update __init__.py
|
mycroft/version/__init__.py
|
mycroft/version/__init__.py
|
# Copyright 2016 Mycroft AI, Inc.
#
# This file is part of Mycroft Core.
#
# Mycroft Core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycroft Core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycroft Core. If not, see <http://www.gnu.org/licenses/>.
import json
from genericpath import exists, isfile
from mycroft.util.log import getLogger
__author__ = 'augustnmonteiro'
# The following lines are replaced during the release process.
# START_VERSION_BLOCK
CORE_VERSION_MAJOR = 0
CORE_VERSION_MINOR = 8
CORE_VERSION_BUILD = 10
# END_VERSION_BLOCK
CORE_VERSION_STR = (str(CORE_VERSION_MAJOR) + "." +
str(CORE_VERSION_MINOR) + "." +
str(CORE_VERSION_BUILD))
LOG = getLogger(__name__)
class VersionManager(object):
__location = "/opt/mycroft/version.json"
@staticmethod
def get():
if (exists(VersionManager.__location) and
isfile(VersionManager.__location)):
try:
with open(VersionManager.__location) as f:
return json.load(f)
except:
LOG.error("Failed to load version from '%s'"
% VersionManager.__location)
return {"coreVersion": None, "enclosureVersion": None}
|
Python
| 0.000072
|
@@ -980,17 +980,17 @@
UILD = 1
-0
+1
%0A# END_V
|
9cb122793d531690b621b4fa8f91481a105305e3
|
Add new module: minion.list
|
salt/modules/minion.py
|
salt/modules/minion.py
|
Python
| 0.000004
|
@@ -0,0 +1,2417 @@
+# -*- coding: utf-8 -*-%0A'''%0AModule to provide information about minions%0A'''%0A%0A# Import Python libs%0Aimport os%0A%0A# Import Salt libs%0Aimport salt.utils%0Aimport salt.key%0A%0A%0Adef list():%0A '''%0A Return a list of accepted, denied, unaccepted and rejected keys.%0A This is the same output as %60salt-key -L%60%0A%0A CLI Example:%0A%0A .. code-block:: bash%0A%0A salt 'master' minion.list%0A '''%0A pki_dir = globals().get('__salt__')%5B'config.get'%5D('pki_dir', '')%0A transport = globals().get('__salt__')%5B'config.get'%5D('transport', '')%0A%0A # We have to replace the minion/master directoryies%0A pki_dir = pki_dir.replace(%22minion%22, %22master%22)%0A%0A # The source code below is (nearly) a copy of salt.key.Key.list_keys%0A%0A # We have to differentiate between RaetKey._check_minions_directories%0A # and Zeromq-Keys. Raet-Keys only have three states while ZeroMQ-keys%0A # have an additional 'denied' state.%0A if transport in ('zeromq', 'tcp'):%0A key_dirs = _check_minions_directories(pki_dir)%0A else:%0A key_dirs = _check_minions_directories_raetkey(pki_dir)%0A%0A ret = %7B%7D%0A%0A for dir_ in key_dirs:%0A ret%5Bos.path.basename(dir_)%5D = %5B%5D%0A try:%0A for fn_ in salt.utils.isorted(os.listdir(dir_)):%0A if not fn_.startswith('.'):%0A if os.path.isfile(os.path.join(dir_, fn_)):%0A ret%5Bos.path.basename(dir_)%5D.append(fn_)%0A except (OSError, IOError):%0A # key dir kind is not created yet, just skip%0A continue%0A%0A return ret%0A%0A%0Adef _check_minions_directories(pki_dir):%0A '''%0A Return the minion keys directory paths.%0A%0A This function is a copy of salt.key.Key._check_minions_directories.%0A '''%0A minions_accepted = os.path.join(pki_dir, salt.key.Key.ACC)%0A minions_pre = os.path.join(pki_dir, salt.key.Key.PEND)%0A minions_rejected = os.path.join(pki_dir, salt.key.Key.REJ)%0A minions_denied = os.path.join(pki_dir, salt.key.Key.DEN)%0A%0A return minions_accepted, minions_pre, minions_rejected, minions_denied%0A%0A%0Adef _check_minions_directories_raetkey(pki_dir):%0A '''%0A Return the minion keys directory paths.%0A%0A This function is a copy of salt.key.RaetKey._check_minions_directories.%0A '''%0A accepted = os.path.join(pki_dir, salt.key.RaetKey.ACC)%0A pre = os.path.join(pki_dir, salt.key.RaetKey.PEND)%0A rejected = os.path.join(pki_dir, salt.key.RaetKey.REJ)%0A%0A return accepted, pre, rejected%0A
|
|
91645e4abf4fa128a59257584ba385c19b642425
|
Add @s0undtech's nb_open module
|
salt/utils/nb_popen.py
|
salt/utils/nb_popen.py
|
Python
| 0
|
@@ -0,0 +1,3166 @@
+# -*- coding: utf-8 -*-%0A'''%0A saltcloud.utils.nb_popen%0A ~~~~~~~~~~~~~~~~~~~~~~~~%0A%0A Non blocking subprocess Popen.%0A%0A :codeauthor: :email:%60Pedro Algarvio (pedro@algarvio.me)%60%0A :copyright: %C2%A9 2013 by the SaltStack Team, see AUTHORS for more details.%0A :license: Apache 2.0, see LICENSE for more details.%0A'''%0A%0A# Import python libs%0Aimport os%0Aimport sys%0Aimport fcntl%0Aimport logging%0Aimport subprocess%0A%0A%0Alog = logging.getLogger(__name__)%0A%0A%0Aclass NonBlockingPopen(subprocess.Popen):%0A%0A def __init__(self, *args, **kwargs):%0A self.stream_stds = kwargs.pop('stream_stds', False)%0A super(NonBlockingPopen, self).__init__(*args, **kwargs)%0A%0A if self.stdout is not None:%0A fod = self.stdout.fileno()%0A fol = fcntl.fcntl(fod, fcntl.F_GETFL)%0A fcntl.fcntl(fod, fcntl.F_SETFL, fol %7C os.O_NONBLOCK)%0A self.obuff = ''%0A%0A if self.stderr is not None:%0A fed = self.stderr.fileno()%0A fel = fcntl.fcntl(fed, fcntl.F_GETFL)%0A fcntl.fcntl(fed, fcntl.F_SETFL, fel %7C os.O_NONBLOCK)%0A self.ebuff = ''%0A%0A log.info('Running command %7B0!r%7D'.format(*args))%0A%0A def poll(self):%0A poll = super(NonBlockingPopen, self).poll()%0A%0A if self.stdout is not None:%0A try:%0A obuff = self.stdout.read()%0A self.obuff += obuff%0A if obuff:%0A logging.getLogger(%0A 'saltcloud.Popen.STDOUT.PID-%7B0%7D'.format(self.pid)%0A ).debug(obuff.rstrip())%0A if self.stream_stds:%0A sys.stdout.write(obuff)%0A except IOError, err:%0A if err.errno not in (11, 35):%0A # We only handle Resource not ready properly, any other%0A # raise the exception%0A raise%0A%0A if self.stderr is not None:%0A try:%0A ebuff = self.stderr.read()%0A self.ebuff += ebuff%0A if ebuff:%0A logging.getLogger(%0A 'saltcloud.Popen.STDERR.PID-%7B0%7D'.format(self.pid)%0A ).debug(ebuff.rstrip())%0A if self.stream_stds:%0A sys.stderr.write(ebuff)%0A except IOError, err:%0A if err.errno not in (11, 35):%0A # We only handle Resource not ready properly, any other%0A # raise the exception%0A raise%0A%0A return poll%0A%0A def __del__(self):%0A if self.stdout is not None:%0A try:%0A fod = self.stdout.fileno()%0A fol = fcntl.fcntl(fod, fcntl.F_GETFL)%0A fcntl.fcntl(fod, fcntl.F_SETFL, fol & ~os.O_NONBLOCK)%0A except ValueError:%0A # Closed FD%0A pass%0A%0A if self.stderr is not None:%0A try:%0A fed = self.stderr.fileno()%0A fel = fcntl.fcntl(fed, fcntl.F_GETFL)%0A fcntl.fcntl(fed, fcntl.F_SETFL, fel & ~os.O_NONBLOCK)%0A except ValueError:%0A # Closed FD%0A pass%0A%0A super(NonBlockingPopen, self).__del__()%0A
|
|
f6e32ae48265232f25866dd9060b7cb80551e333
|
Create main.py
|
main.py
|
main.py
|
Python
| 0.000347
|
@@ -0,0 +1,1636 @@
+%0Adef calcProbPos(bPlus,bMinus,cPlus,cMinus):%0A probPos = ((bPlus/cPlus)*(cPlus/(cPlus+cMinus)))/((bPlus+bMinus)/(cPlus+cMinus))%0A return probPos%0A %0Adef calcMean(t,i):%0A m = t/i%0A return m%0A%0Aprint('Enter a statement without punctuation:')%0AuserStatement = input().lower()%0Aprint('THINKING...')%0AuserStatement = userStatement.strip('%5Cn').split(' ')%0AcPlus = 0%0AcMinus = 0%0AmeanTotal = 0%0A%0Awith open('sampleCorpora.txt') as corpora:%0A for line in corpora.readlines():%0A lineArray = line.strip('%5Cn').split(',')%0A jment = str(lineArray%5B1%5D)%0A if jment == %22pos%22:%0A cPlus += 1%0A else:%0A if jment == %22neg%22:%0A cMinus += 1%0A %0A%0AlengthInput = len(userStatement)%0Afor n in range(0,lengthInput):%0A bPlus = 0%0A bMinus = 0%0A checkString = str(userStatement%5Bn%5D)%0A #print(checkString)%0A with open('sampleCorpora.txt') as corpora:%0A for line in corpora.readlines():%0A corporaLine = line.strip('%5Cn').split(',')%0A checkAgainst = str(corporaLine%5B0%5D)%0A if checkString in checkAgainst:%0A posNegCheck = str(corporaLine%5B1%5D)%0A if posNegCheck == %22pos%22:%0A bPlus += 1%0A else:%0A if posNegCheck == %22neg%22:%0A bMinus += 1%0A %0A probPos = calcProbPos(bPlus,bMinus,cPlus,cMinus)%0A meanTotal = meanTotal + probPos%0A%0Aprint('RESULT: ')%0A%0Amean = calcMean(meanTotal,lengthInput)%0Aprint(str(mean))%0Aif mean %3E 0.5:%0A print('positive')%0Aelse:%0A if mean %3C 0.5:%0A print('negative')%0A else:%0A if mean == 0.5:%0A print('neutral')%0A
|
|
87a9769af3d201b925a5a4a259ccbd007257b1d3
|
add python test: read_pack.py
|
test/read_pack.py
|
test/read_pack.py
|
Python
| 0.00414
|
@@ -0,0 +1,133 @@
+import os%0Aimport msgpack%0A%0Af = open(%22/tmp/data.bin%22, %22r%22)%0Apackage = f.read(1024)%0Af.close()%0A%0Adata = msgpack.unpackb(package)%0Aprint data
|
|
3912416390ebe5df3c883b280cc6acac5169c1f7
|
Add test to check if elements have at least one owner
|
tests/test_elements_have_owner.py
|
tests/test_elements_have_owner.py
|
Python
| 0
|
@@ -0,0 +1,1788 @@
+%22%22%22%0AFor all relevant model elements, check if there is at least one %22owner%22%0A(%22owner%22 is a derived union).%0A%0AThis is needed to display all elements in the tree view.%0A%22%22%22%0A%0Aimport itertools%0A%0Aimport pytest%0A%0Aimport gaphor.SysML.diagramitems%0Aimport gaphor.UML.diagramitems%0Afrom gaphor import UML%0Afrom gaphor.core.modeling import Element%0Afrom gaphor.core.modeling.properties import derived%0Afrom gaphor.diagram.support import get_model_element%0A%0A%0Adef all_subset_properties(prop):%0A for sub in prop.subsets:%0A if isinstance(sub, derived):%0A yield from all_subset_properties(sub)%0A else:%0A yield sub%0A%0A%0Adef all_presented_elements(module):%0A return (%0A get_model_element(getattr(module, name))%0A for name in dir(module)%0A if not name.startswith(%22_%22) and get_model_element(getattr(module, name))%0A )%0A%0A%0Adef all_presented_uml_and_sysml_elements():%0A return itertools.chain(%0A all_presented_elements(gaphor.UML.diagramitems),%0A all_presented_elements(gaphor.SysML.diagramitems),%0A %5B%0A UML.ExecutionOccurrenceSpecification,%0A UML.ExtensionEnd,%0A UML.InstanceSpecification,%0A UML.MessageOccurrenceSpecification,%0A %5D,%0A )%0A%0A%0Adef concrete_owner_property(class_):%0A return (%0A p for p in class_.umlproperties() if p in all_subset_properties(Element.owner)%0A )%0A%0A%0Adef test_all_presented_uml_and_sysml_elements():%0A elements = all_presented_uml_and_sysml_elements()%0A assert all(issubclass(c, Element) for c in elements)%0A%0A%0A@pytest.mark.parametrize(%22class_%22, all_presented_uml_and_sysml_elements())%0Adef test_element_has_concrete_ownable_property(class_):%0A owners = list(concrete_owner_property(class_))%0A print(f%22%7Bclass_%7D: %7Blist(map(str, owners))%7D%22)%0A assert any(owners)%0A
|
|
5307d1cf69c943f7f5fe9dfd475c93f317e8ebb7
|
add import script for West Lancashire
|
polling_stations/apps/data_collection/management/commands/import_west_lancashire.py
|
polling_stations/apps/data_collection/management/commands/import_west_lancashire.py
|
Python
| 0
|
@@ -0,0 +1,434 @@
+from data_collection.management.commands import BaseXpressWebLookupCsvImporter%0A%0Aclass Command(BaseXpressWebLookupCsvImporter):%0A council_id = 'E07000127'%0A addresses_name = 'West Lancashire - PropertyPostCodePollingStationWebLookup-2017-03-08.TSV'%0A stations_name = 'West Lancashire - PropertyPostCodePollingStationWebLookup-2017-03-08.TSV'%0A elections = %5B'local.lancashire.2017-05-04'%5D%0A csv_delimiter = '%5Ct'%0A
|
|
93997e72f63dd586d1a683475f49a466571a9fb0
|
Create index.py
|
index.py
|
index.py
|
Python
| 0.000016
|
@@ -0,0 +1,43 @@
+#!/usr/bin/python %0Aprint(%22Hello, World!%22);%0A
|
|
4a48b8dd804f9a287d35b697d851a660eec80a75
|
Add tests for simple enums
|
tests/richenum/test_simple_enums.py
|
tests/richenum/test_simple_enums.py
|
Python
| 0
|
@@ -0,0 +1,1236 @@
+import unittest%0Afrom richenum import EnumConstructionException, enum%0A%0A%0ABreakfast = enum(%0A COFFEE=0,%0A OATMEAL=1,%0A FRUIT=2)%0A%0A%0Aclass SimpleEnumTestSuite(unittest.TestCase):%0A%0A def test_members_are_accessible_through_attributes(self):%0A self.assertEqual(Breakfast.COFFEE, 0)%0A%0A def test_lookup_by_name(self):%0A self.assertEqual(Breakfast.get_id_by_label('COFFEE'), 0)%0A%0A def test_lookup_by_value(self):%0A self.assertEqual(Breakfast.get_label_by_id(0), 'COFFEE')%0A%0A def test_can_cast_to_list_of_choices(self):%0A self.assertEqual(%0A Breakfast.choices,%0A %5B(0, 'COFFEE'), (1, 'OATMEAL'), (2, 'FRUIT')%5D)%0A%0A def test_choices_are_ordered_by_value(self):%0A Shuffled = enum(FRUIT=2, COFFEE=0, OATMEAL=1)%0A self.assertEqual(Shuffled.choices, Breakfast.choices)%0A%0A def test_values_can_be_any_hashable_type(self):%0A try:%0A Confused = enum(INT=0, TUPLE=(1, 2), STR='yup')%0A self.assertEqual(Confused.get_id_by_label('TUPLE'), (1, 2))%0A except:%0A self.fail('Simple enums should accept values of any hashable type.')%0A%0A with self.assertRaisesRegexp(EnumConstructionException, 'hashable'):%0A Confused = enum(LIST=%5B1, 2%5D)%0A
|
|
92075a04b0835b1209eaa806c2aeb44ca371ff2b
|
Add harfbuzz 0.9.40
|
packages/harfbuzz.py
|
packages/harfbuzz.py
|
Python
| 0
|
@@ -0,0 +1,285 @@
+Package ('harfbuzz', '0.9.40',%0A%09sources = %5B'http://www.freedesktop.org/software/%25%7Bname%7D/release/%25%7Bname%7D-%25%7Bversion%7D.tar.bz2'%5D,%0A%09configure_flags = %5B%0A%09%09'--disable-silent-rules',%0A%09%09'--without-cairo',%0A%09%09'--without-freetype',%0A%09%09'--without-glib',%0A%09%09'--without-graphite2',%0A%09%09'--with-icu',%0A%09%5D)%0A
|
|
564bf6484347fed1d3346ff42d79e4bba02a3c98
|
add firs test
|
test_add_group.py
|
test_add_group.py
|
Python
| 0.999776
|
@@ -0,0 +1,2250 @@
+# -*- coding: utf-8 -*-%0Afrom selenium.webdriver.firefox.webdriver import WebDriver%0Afrom selenium.webdriver.common.action_chains import ActionChains%0Aimport time, unittest%0A%0Adef is_alert_present(wd):%0A try:%0A wd.switch_to_alert().text%0A return True%0A except:%0A return False%0A%0Aclass test_add_group(unittest.TestCase):%0A def setUp(self):%0A self.wd = WebDriver()%0A self.wd.implicitly_wait(60)%0A %0A def test_test_add_group(self):%0A success = True%0A wd = self.wd%0A wd.get(%22http://localhost/addressbook/%22)%0A wd.find_element_by_name(%22user%22).click()%0A wd.find_element_by_name(%22user%22).clear()%0A wd.find_element_by_name(%22user%22).send_keys(%22admin%22)%0A wd.find_element_by_name(%22pass%22).click()%0A wd.find_element_by_name(%22pass%22).clear()%0A wd.find_element_by_name(%22pass%22).send_keys(%22secret%22)%0A wd.find_element_by_xpath(%22//form%5B@id='LoginForm'%5D/input%5B3%5D%22).click()%0A wd.find_element_by_link_text(%22groups%22).click()%0A wd.find_element_by_name(%22new%22).click()%0A wd.find_element_by_name(%22group_name%22).click()%0A wd.find_element_by_name(%22group_name%22).clear()%0A wd.find_element_by_name(%22group_name%22).send_keys(%22test_name%22)%0A wd.find_element_by_name(%22group_name%22).click()%0A wd.find_element_by_name(%22group_name%22).clear()%0A wd.find_element_by_name(%22group_name%22).send_keys(%22test_name%22)%0A wd.find_element_by_name(%22group_header%22).click()%0A wd.find_element_by_name(%22group_header%22).clear()%0A wd.find_element_by_name(%22group_header%22).send_keys(%22test_header%22)%0A wd.find_element_by_name(%22group_header%22).click()%0A wd.find_element_by_name(%22group_header%22).clear()%0A wd.find_element_by_name(%22group_header%22).send_keys(%22test_header%22)%0A wd.find_element_by_name(%22group_footer%22).click()%0A wd.find_element_by_name(%22group_footer%22).clear()%0A wd.find_element_by_name(%22group_footer%22).send_keys(%22test_footer%22)%0A wd.find_element_by_name(%22submit%22).click()%0A wd.find_element_by_link_text(%22group page%22).click()%0A wd.find_element_by_link_text(%22Logout%22).click()%0A self.assertTrue(success)%0A %0A def tearDown(self):%0A self.wd.quit()%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
774b59a2bba95c4b617ac49e279bcbe73d6b6f3b
|
Add a script to plot timing data
|
profiling/plot.py
|
profiling/plot.py
|
Python
| 0.000002
|
@@ -0,0 +1,597 @@
+#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0Aimport glob%0A%0Aimport numpy as np%0Aimport matplotlib.pyplot as plt%0A%0A%0Acsv_files = glob.glob('*.csv')%0A%0Afig = plt.figure()%0Aax = fig.add_subplot(111)%0A%0Acolors = iter(plt.cm.rainbow(np.linspace(0,1,len(csv_files))))%0A%0Afor csv_file in csv_files:%0A data = np.genfromtxt(csv_file, delimiter=',', skip_header=1).transpose()%0A j = data%5B0%5D%0A N = data%5B1%5D%0A avg = data%5B2%5D%0A std = data%5B3%5D%0A ax.errorbar(N, avg, yerr=std, label=csv_file, color=next(colors), marker='o')%0A%0Aax.set_xlabel('N')%0Aax.set_ylabel('Timing %5Bms%5D')%0Aax.legend(loc='best')%0A%0Aplt.show()%0A
|
|
e4cc622b6c296f57324eccba2b1ed3ff2201868d
|
Reverse a singly linked list
|
python/interviewquestions/reverse_linked_list.py
|
python/interviewquestions/reverse_linked_list.py
|
Python
| 0.999671
|
@@ -0,0 +1,1154 @@
+%22%22%22%0AGiven a singly linked list, reverse it in place and return the head%0Aof the new list.%0A%22%22%22%0A%0Aimport unittest%0A%0Aclass Node(object):%0A def __init__(self, value):%0A self.value = value%0A self.next = None%0A%0A def __repr__(self):%0A return %22%3CNode %25d%3E%22 %25 self.value%0A%0A%0Adef rev(curr):%0A last = None%0A while curr is not None:%0A next = curr.next%0A curr.next = last%0A last = curr%0A curr = next%0A return last%0A%0A%0Aclass TestReverse(unittest.TestCase):%0A def test_reverse(self):%0A l1, l2, l3, l4, l5 = Node(1), Node(2), Node(3), Node(4), Node(5)%0A l1.next = l2%0A l2.next = l3%0A l3.next = l4%0A l4.next = l5%0A%0A head = rev(l1)%0A self.assertEqual(head, l5)%0A %0A self.assertEqual(head.next, l4)%0A self.assertEqual(l4.next, l3)%0A self.assertEqual(l3.next, l2)%0A self.assertEqual(l2.next, l1)%0A self.assertEqual(l1.next, None)%0A%0A def test_reverse_onenode(self):%0A n = Node(1)%0A self.assertEqual(n, rev(n))%0A%0A def test_reverse_empty(self):%0A self.assertEqual(None, rev(None))%0A%0A%0Aif __name__ == %22__main__%22:%0A unittest.main()%0A%0A
|
|
48c4a4fe9531123d6ca2b9af18162c916af09cc9
|
Create moto_parser.py
|
Bootloader/moto_parser.py
|
Bootloader/moto_parser.py
|
Python
| 0.000054
|
@@ -0,0 +1 @@
+%0A
|
|
13fdc81cb32842dc5e0f05d2aa84c997cd59daa3
|
Add test that, if we failed to open the log file, we don't try to write to it.
|
IPython/core/tests/test_logger.py
|
IPython/core/tests/test_logger.py
|
Python
| 0
|
@@ -0,0 +1,517 @@
+%22%22%22Test IPython.core.logger%22%22%22%0A%0Aimport nose.tools as nt%0A%0A_ip = get_ipython()%0A%0Adef test_logstart_inaccessible_file():%0A try:%0A _ip.logger.logstart(logfname=%22/%22) # Opening that filename will fail.%0A except IOError:%0A pass%0A else:%0A nt.assert_true(False) # The try block should never pass.%0A %0A try:%0A _ip.run_cell(%22a=1%22) # Check it doesn't try to log this%0A finally:%0A _ip.logger.log_active = False # If this fails, don't let later tests fail%0A %0A
|
|
a8b3af76c1a6cbf61887f5721fd10bf2ef24b2f8
|
Create A_Salinity_vertical_section_zy_movie.py
|
Cas_6/Vertical_sections/A_Salinity_vertical_section_zy_movie.py
|
Cas_6/Vertical_sections/A_Salinity_vertical_section_zy_movie.py
|
Python
| 0.000574
|
@@ -0,0 +1,782 @@
+ %0A %0A %0A %0A %0A %0A %0A %0A %0A plt.figure(2)%0A ax = plt.subplot(projection=ccrs.PlateCarree());%0A ds1%5B'S'%5D.where(ds1.hFacC%3E0)%5Bnt,:,:,280%5D.plot()%0A plt.title('Vertical Section (yz) of Salinity (XC = 0E)')%0A plt.text(5,5,nt,ha='center',wrap=True)%0A ax.coastlines()%0A gl = ax.gridlines(draw_labels=True, alpha = 0.5, linestyle='--');%0A gl.xlabels_top = False%0A gl.ylabels_right = False%0A gl.xformatter = LONGITUDE_FORMATTER%0A gl.yformatter = LATITUDE_FORMATTER%0A if (nt %3C 10):%0A plt.savefig('Salinity_Vertical_section_xz_Cas6-'+'00'+str(nt)+'.png')%0A plt.clf()%0A elif (nt %3E 9) and (nt %3C 100):%0A plt.savefig('Salinity_Vertical_section_xz_Cas6'+'0'+str(nt)+'.png')%0A plt.clf()%0A else:%0A plt.savefig('Salinity_Vertical_section_xz_Cas6'+str(nt)+'.png')%0A plt.clf()%0A
|
|
76e412121b80c39d9facc09a51d9b8aa4cdb9722
|
Add Check timeouts functionality
|
OAB/oab_check_timeouts.py
|
OAB/oab_check_timeouts.py
|
Python
| 0.000001
|
@@ -0,0 +1,2732 @@
+#!/usr/bin/python%0A%0Aimport argparse%0Aimport pycurl%0Aimport re%0Aimport csv%0Afrom StringIO import StringIO%0Afrom urllib import urlencode%0Afrom sys import exit%0A%0A# Arguments handling%0A%0A# Setting output filenames%0Ainputfile = %22lalala_nok.csv%22%0Afilename_ok = %22output_ok.csv%22%0Afilename_nok = %22output_nok.csv%22%0A%0A%0A# Variable definitions%0Aurl = 'https://www2.oabsp.org.br/asp/consultaInscritos/consulta_nr_advogado.asp'%0Areferer = 'https://www2.oabsp.org.br/asp/consultaInscritos/consulta01.asp'%0Ano_results = 'Não há resultados que satisfaçam a busca'%0A%0A# Building the pyCurl request%0Ac = pycurl.Curl()%0Ac.setopt(pycurl.SSL_VERIFYPEER, 0)%0Ac.setopt(pycurl.SSL_VERIFYHOST, 0)%0Ac.setopt(pycurl.SSLVERSION, 3)%0Ac.setopt(pycurl.CONNECTTIMEOUT, 3)%0Ac.setopt(pycurl.TIMEOUT, 3)%0Ac.setopt(pycurl.URL, url)%0Ac.setopt(pycurl.REFERER, referer)%0Ac.setopt(pycurl.HTTPHEADER, %5B'Connection: keep-alive'%5D)%0A%0A# Iterating through oab_codes%0Awith open(inputfile, %22r%22) as oab_codes:%0A for oab_codex in oab_codes:%0A%09oab_code = oab_codex.strip('%5Cn')%0A post_data = %7B'pagina': 0,%0A 'tipo_consulta' : 1,%0A 'nr_inscricao' : oab_code,%0A 'cbxadv' : 1,%0A 'id_tipoinscricao': 1,%0A 'parte_nome' : 1,%0A 'idCidade' : 0%0A %7D%0A post_fields = urlencode(post_data)%0A # print(post_fields)%0A %0A c.setopt(c.POSTFIELDS, post_fields)%0A try:%0A buffer = StringIO()%0A c.setopt(c.WRITEDATA, buffer)%0A c.perform()%0A response = buffer.getvalue()%0A %0A if(no_results in response):%0A with open(filename_nok, %22a%22) as output_nok:%0A output_nok.write(str(oab_code)+%22,notfound%5Cn%22)%0A print str(oab_code)+',notfound'%0A else: %0A token=response.split('%3Cli%3E%3Cspan%3E')%0A name=token%5B1%5D.replace(%22%3C/span%3E%3C/li%3E%22,%22%22)%0A oab_code_state=re.sub('.* - ', '', token%5B2%5D).replace('%3C/li%3E','')%0A date=re.sub('.*%3C/span%3E','',token%5B3%5D).replace('%3C/li%3E','')%0A subsection=re.sub('.*%3C/span%3E','',token%5B4%5D).replace('%3C/li%3E','')%0A status=re.sub('%3C/li%3E.*','',re.sub('.*%3C/span%3E','',token%5B5%5D))%0A %0A fields=%5Bstr(oab_code),name,status,oab_code_state,subsection,date%5D%0A %0A with open(filename_ok, %22a%22) as output_ok:%0A writer = csv.writer(output_ok)%0A writer.writerow(fields)%0A print fields%0A except:%0A with open(filename_nok, %22a%22) as output_nok:%0A output_nok.write(str(oab_code)+%22,timeout%5Cn%22)%0A print str(oab_code) + ',timeout'%0A%0Ac.close()%0A
|
|
a1337ca14fe2f21c849bd27132bdee079ac47e59
|
Add Session Support
|
app/Session.py
|
app/Session.py
|
Python
| 0
|
@@ -0,0 +1,1700 @@
+#!/usr/bin/python%0A# -*- coding:utf-8 -*-%0A# Powered By KK Studio%0A# Session Support For Tornado%0A%0Aimport hashlib%0Aimport os%0Aimport time%0Aimport json%0A%0A%0Aclass Session:%0A%0A%0A def __init__(self,prefix='',session_id=None,expires=7200,redis=None):%0A self.redis = redis%0A self.expires = expires%0A self.prefix = prefix%0A if session_id:%0A self.session_id = prefix + session_id%0A self.data = self.get_data()%0A if self.data:%0A self.isGuest = False%0A else:%0A self.isGuest = True # Not Login%0A else:%0A self.session_id = None%0A self.data = %7B%7D # Null Dict%0A self.isGuest = True # Not Login%0A%0A%0A # %E7%94%9F%E6%88%90SessionID%0A def gen_session_id(self):%0A sid = hashlib.sha1('%25s%25s' %25 (os.urandom(16), time.time())).hexdigest()%0A self.session_id = self.prefix + sid%0A return sid%0A%0A%0A # %E8%8E%B7%E5%8F%96Session%E6%95%B0%E6%8D%AE%0A def get_data(self):%0A session = self.redis.get(self.session_id)%0A if not session:%0A return None%0A session = json.loads(session) # %E5%AD%97%E7%AC%A6%E4%B8%B2%E8%BD%AC%E5%AD%97%E5%85%B8%0A return session%0A%0A%0A # Get%0A def get(self,name):%0A if name:%0A return self.data.get(name,None)%0A else:%0A return None%0A%0A%0A # Set%0A def set(self,name,value):%0A self.data%5Bname%5D = value%0A%0A%0A def save(self):%0A if not self.isGuest and self.session_id and self.data:%0A self.redis.set(self.session_id,json.dumps(self.data),self.expires)%0A%0A%0A # %E9%94%80%E6%AF%81Session%0A def remove(self):%0A if self.session_id: # SessionID%E5%AD%98%E5%9C%A8%0A self.redis.delete(self.session_id)%0A self.session_id = None%0A self.data = None%0A self.isGuest = True%0A
|
|
f9f5d2b040618bc7d7c26383218fad390bf9dd0a
|
add unit test_connection_detail_retriever
|
tests/test_common/test_cloudshell/test_connection_detail_retriever.py
|
tests/test_common/test_cloudshell/test_connection_detail_retriever.py
|
Python
| 0.000002
|
@@ -0,0 +1,1060 @@
+from unittest import TestCase%0A%0Afrom mock import Mock%0A%0Afrom common.cloudshell.conn_details_retriever import ResourceConnectionDetailsRetriever%0A%0A%0Aclass TestConnectionDetailRetriever(TestCase):%0A def test_connection_detail_retriever(self):%0A helpers = Mock()%0A cs_retriever_service = Mock()%0A session = Mock()%0A resource_context = Mock()%0A connection_details = Mock()%0A%0A helpers.get_resource_context_details = Mock(return_value=resource_context)%0A helpers.get_api_session = Mock(return_value=session)%0A cs_retriever_service.getVCenterConnectionDetails = Mock(return_value=connection_details)%0A retriever = ResourceConnectionDetailsRetriever(helpers, cs_retriever_service)%0A%0A res = retriever.connection_details()%0A%0A self.assertEqual(res, connection_details)%0A self.assertTrue(helpers.get_resource_context_details.called)%0A self.assertTrue(helpers.get_api_session.called)%0A self.assertTrue(cs_retriever_service.getVCenterConnectionDetails.called_with(session, resource_context))%0A
|
|
c5a6bfdca30a5111e641ebe4b2eac40b21b8ce74
|
FIx CPU time consuming in green_poller poll()
|
oslo_messaging/_drivers/zmq_driver/poller/green_poller.py
|
oslo_messaging/_drivers/zmq_driver/poller/green_poller.py
|
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import threading
import eventlet
import six
from oslo_messaging._drivers import common as rpc_common
from oslo_messaging._drivers.zmq_driver import zmq_poller
LOG = logging.getLogger(__name__)
class GreenPoller(zmq_poller.ZmqPoller):
def __init__(self):
self.incoming_queue = six.moves.queue.Queue()
self.green_pool = eventlet.GreenPool()
self.thread_by_socket = {}
def register(self, socket, recv_method=None):
if socket not in self.thread_by_socket:
self.thread_by_socket[socket] = self.green_pool.spawn(
self._socket_receive, socket, recv_method)
def _socket_receive(self, socket, recv_method=None):
while True:
if recv_method:
incoming = recv_method(socket)
else:
incoming = socket.recv_multipart()
self.incoming_queue.put((incoming, socket))
eventlet.sleep()
def poll(self, timeout=None):
incoming = None
try:
with eventlet.Timeout(timeout, exception=rpc_common.Timeout):
while incoming is None:
try:
incoming = self.incoming_queue.get_nowait()
except six.moves.queue.Empty:
eventlet.sleep()
except rpc_common.Timeout:
return None, None
return incoming[0], incoming[1]
def close(self):
for thread in self.thread_by_socket.values():
thread.kill()
self.thread_by_socket = {}
class HoldReplyPoller(GreenPoller):
def __init__(self):
super(HoldReplyPoller, self).__init__()
self.event_by_socket = {}
self._is_running = threading.Event()
def register(self, socket, recv_method=None):
super(HoldReplyPoller, self).register(socket, recv_method)
self.event_by_socket[socket] = threading.Event()
def resume_polling(self, socket):
pause = self.event_by_socket[socket]
pause.set()
def _socket_receive(self, socket, recv_method=None):
pause = self.event_by_socket[socket]
while not self._is_running.is_set():
pause.clear()
if recv_method:
incoming = recv_method(socket)
else:
incoming = socket.recv_multipart()
self.incoming_queue.put((incoming, socket))
pause.wait()
def close(self):
self._is_running.set()
for pause in self.event_by_socket.values():
pause.set()
eventlet.sleep()
super(HoldReplyPoller, self).close()
class GreenExecutor(zmq_poller.Executor):
def __init__(self, method):
self._method = method
super(GreenExecutor, self).__init__(None)
def _loop(self):
while True:
self._method()
eventlet.sleep()
def execute(self):
self.thread = eventlet.spawn(self._loop)
def wait(self):
if self.thread is not None:
self.thread.wait()
def stop(self):
if self.thread is not None:
self.thread.kill()
|
Python
| 0.000071
|
@@ -656,76 +656,8 @@
let%0A
-import six%0A%0Afrom oslo_messaging._drivers import common as rpc_common
%0Afro
@@ -844,32 +844,36 @@
queue =
-six.moves
+eventlet
.queue.
+Light
Queue()%0A
@@ -1533,368 +1533,108 @@
-incoming = None%0A try:%0A with eventlet.Timeout(timeout, exception=rpc_common.Timeout):%0A while incoming is None:%0A try:%0A incoming = self.incoming_queue.get_nowait()%0A except six.moves.queue.Empty:%0A eventlet.sleep()%0A except rpc_common.Timeout
+try:%0A return self.incoming_queue.get(timeout=timeout)%0A except eventlet.queue.Empty
:%0A
@@ -1650,16 +1650,17 @@
return
+(
None, No
@@ -1665,48 +1665,9 @@
None
-%0A return incoming%5B0%5D, incoming%5B1%5D
+)
%0A%0A
|
f5675a1cebfe6aa0f8dda3b94aa30139e2528c49
|
Create broadcast.py
|
plugins/broadcast.py
|
plugins/broadcast.py
|
Python
| 0.000001
|
@@ -0,0 +1,530 @@
+@bot.message_handler(commands=%5B'bc'%5D)%0Adef bc_msg(message):%0A if message.from_user.id in ADMINS_IDS:%0A if len(message.text.split()) %3C 2:%0A bot.reply_to(message, %22What should I broadcast?%22)%0A return%0A bcmsg = message.text.replace(%22/bc %22,%22%22)%0A allmembers = list(redisserver.smembers('zigzag_members'))%0A for userid in allmembers:%0A bot.send_message(userid, bcmsg, parse_mode=%22HTML%22)%0A bot.reply_to(message, %22Successfully broadcasted!%22)%0A else:%0A bot.send_message(message.chat.id, %22You dont have permission.%22)%0A
|
|
d34d1d50b853d3a205cbc60a75dd3911a9253b4e
|
update backend
|
app/scraper.py
|
app/scraper.py
|
Python
| 0.000001
|
@@ -0,0 +1,1116 @@
+import collections%0Aimport json%0Aimport httplib2%0Afrom oauth2client.client import GoogleCredentials%0A%0Afrom lib import Term%0A%0Adef get_http():%0A http = httplib2.Http()%0A GoogleCredentials.get_application_default().create_scoped(%5B%0A 'https://www.googleapis.com/auth/firebase.database',%0A 'https://www.googleapis.com/auth/userinfo.email'%0A %5D).authorize(http)%0A return http%0A %0A%0Adef run():%0A terms = sorted(list(Term.all()))%0A while len(terms) %3E 0:%0A term = terms.pop(0)%0A data = collections.defaultdict(dict)%0A for course, sections in term.courses.items():%0A for id, section in sections.items():%0A if data%5Bcourse.id%5D.get('name', section.name) != section.name:%0A print('%5B%25s%5D Conflicting course name for %25s: %25s, %25s' %25 (term, course.id, data%5Bcourse.id%5D, section.name))%0A data%5Bcourse.id%5D%5B'name'%5D = section.name%0A print(get_http().request(%0A 'https://canigraduate-43286.firebaseio.com/course-info.json',%0A method='PATCH',%0A body=json.dumps(data)))%0A %0A%0Aif __name__ == '__main__':%0A run()%0A
|
|
620401abdb33b335452df709a1a1f2c4bc55cd4c
|
Add challenge day 6
|
leetcode/challenge/day06.py
|
leetcode/challenge/day06.py
|
Python
| 0.000003
|
@@ -0,0 +1,626 @@
+%22%22%22%0D%0AGiven an array of strings, group anagrams together.%0D%0A%0D%0AExample:%0D%0A%0D%0AInput: %5B%22eat%22, %22tea%22, %22tan%22, %22ate%22, %22nat%22, %22bat%22%5D,%0D%0AOutput:%0D%0A%5B%0D%0A %5B%22ate%22,%22eat%22,%22tea%22%5D,%0D%0A %5B%22nat%22,%22tan%22%5D,%0D%0A %5B%22bat%22%5D%0D%0A%5D%0D%0ANote:%0D%0A%0D%0AAll inputs will be in lowercase.%0D%0AThe order of your output does not matter.%0D%0A%22%22%22%0D%0A%0D%0Aclass Solution:%0D%0A def groupAnagrams(self, strs: List%5Bstr%5D) -%3E List%5BList%5Bstr%5D%5D:%0D%0A hashes = %7B%7D%0D%0A for s in strs:%0D%0A sHash = tuple(sorted(s))%0D%0A if sHash in hashes:%0D%0A hashes%5BsHash%5D.append(s)%0D%0A else:%0D%0A hashes%5BsHash%5D = %5Bs%5D%0D%0A return %5Bv for k, v in hashes.items()%5D
|
|
7af8cc6d59a1d52e7decc90ecb9472f1c5825aa3
|
Create ds_hash_two_sum.py
|
leetcode/ds_hash_two_sum.py
|
leetcode/ds_hash_two_sum.py
|
Python
| 0.000169
|
@@ -0,0 +1,731 @@
+# @file Two Sum%0A# @brief Given an array and target, find 2 nums in array that sum to target%0A%0A# https://leetcode.com/problems/two-sum/%0A%0A'''%0AGiven an array of integers, return indices of the two numbers such that they%0Aadd up to a specific target.%0AYou may assume that each input would have exactly one solution.%0AExample: Given nums = %5B2, 7, 11, 15%5D, target = 9,%0ABecause nums%5B0%5D + nums%5B1%5D = 2 + 7 = 9,%0Areturn %5B0, 1%5D.%0A'''%0A%0A#Note: Use property that if x + y = target, y = target - x%0A#Use a dictionary with each value (x) as key and idx as value%0A#Time Complexity = O(n)%0Adef twoSum(self, nums, target):%0A dict = %7B%7D%0A for i in range(len(nums)):%0A x = nums%5Bi%5D%0A y = target - x%0A if(y in dict): return dict.get(y), i%0A dict%5Bx%5D = i%0A%0A
|
|
6a84ed3872303aa5b05462982406749d7bd447d4
|
Create main.py
|
main.py
|
main.py
|
Python
| 0
|
@@ -0,0 +1,892 @@
+#!/usr/bin/env python%0A# Command line script to convert a single given number to and from several units%0A%0Aimport argparse%0Afrom src.convert import kilometers_to_miles, miles_to_kilometers, %5C%0Ayears_to__minutes, minutes_to_years%0A%0A#parse args%0Aparse = argparse.ArgumentParser()%0Aparse.add_argument('value', type=float, help=%22Provide the number to be converted%22)%0Aargs.parse.parse_args()%0A%0A#perform conversions %0A#km -%3E miles%0Ato_miles = kilometers_to_miles(args.value)%0Aprint(%22%7B0%7D kilometer is %7B1%7D miles%22.format(args.value, to_miles))%0A%0A#miles -%3E km%0Ato_km = miles_to_kilometers(args.value)%0Aprint(%22%7B0%7D miles is %7B1%7D kilometers%22.format(args.value, to_km))%0A%0A#years -%3E minutes%0Ato_minutes = years_to_minutes(args.value)%0Aprint(%22%7B0%7D years is %7B1%7D minutes%22.format(args.value, to_minutes))%0A%0A#minutes -%3E years%0Ato_years = minutes_to_years(ags.value)%0Aprint(%22%7B0%7D minutes is %7B1%7D years%22.format(args.value, to_years))%0A%0A#fin%0A
|
|
7629a1cd27c80c5ebff91c4d01bf648f9d4c9b3c
|
Create main.py
|
main.py
|
main.py
|
Python
| 0.000001
|
@@ -0,0 +1 @@
+%0A
|
|
dbb147018a92426c5c9e19a523e0bd8d4c277035
|
Create LED_GPIO.py
|
setup/gpio/LED_GPIO.py
|
setup/gpio/LED_GPIO.py
|
Python
| 0.000001
|
@@ -0,0 +1,461 @@
+%0A%0Aimport time%0Aimport lgpio%0A%0A#17,27,22%0A%0ALED = 17%0A%0A# open the gpio chip and set the LED pin as output%0Ah = lgpio.gpiochip_open(0)%0Algpio.gpio_claim_output(h, LED)%0A%0Atry:%0A while True:%0A # Turn the GPIO pin on%0A lgpio.gpio_write(h, LED, 1)%0A time.sleep(1)%0A%0A # Turn the GPIO pin off%0A lgpio.gpio_write(h, LED, 0)%0A time.sleep(1)%0A %0Aexcept KeyboardInterrupt:%0A lgpio.gpio_write(h, LED, 0)%0A lgpio.gpiochip_close(h)%0A %0A
|
|
d856ea5597230b3befeb03049c45f3706bec5844
|
add kael-crontab cli
|
kael/cron/cli.py
|
kael/cron/cli.py
|
Python
| 0.000003
|
@@ -0,0 +1,811 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%22%22%22%0A@version:%0A@author:%0A@time: 2017/6/15%0A%22%22%22%0Aimport os%0A%0Aimport click%0A%0A%0A@click.group()%0Adef cli():%0A pass%0A%0A%0Adef _check_task_is_py(command):%0A command = command.strip()%0A head = command.split(' ')%5B0%5D%0A if 'py' == head.split('.')%5B-1%5D:%0A return True%0A return False%0A%0A%0A@cli.command('run', short_help='Run task of cron with env.')%0A@click.option('-c', help='Command string')%0A@click.option('-d', help='Absolute directory of task')%0A@click.option('-p', help='Python interpreter location')%0Adef run(c, d, p):%0A if not d:%0A raise%0A os.chdir(d)%0A python_env = p if p else 'python'%0A if _check_task_is_py(c):%0A os.system('%7B%7D %7B%7D'.format(python_env, c))%0A else:%0A os.system(c)%0A%0A%0Adef main():%0A cli()%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
599ed110458d5bcf23b74a95c5c472cc376ed702
|
Create field_notes.py
|
djspace/application/field_notes.py
|
djspace/application/field_notes.py
|
Python
| 0.000001
|
@@ -0,0 +1,51 @@
+# adding all the fields we need for this form..%0A# %0A
|
|
f4b0135a48ee94d8504ddf24dcc16b8036c05f2c
|
add test file
|
tests/app_test.py
|
tests/app_test.py
|
Python
| 0.000001
|
@@ -0,0 +1,437 @@
+import os%0Aimport app%0Aimport unittest%0Aimport tempfile%0A%0Aclass FlaskrTestCase(unittest.TestCase):%0A%0A def setUp(self):%0A self.db_fd, app.app.config%5B'DATABASE'%5D = tempfile.mkstemp()%0A app.app.config%5B'TESTING'%5D = True%0A self.app = app.app.test_client()%0A app.init_db()%0A%0A def tearDown(self):%0A os.close(self.db_fd)%0A os.unlink(app.app.config%5B'DATABASE'%5D)%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
27ab639a4a48c263a6ba2f53030d65136580bdb4
|
Remove useless constant.
|
astm/client.py
|
astm/client.py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Alexander Shorin
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
import logging
import socket
import time
from .asynclib import loop
from .codec import encode_message
from .constants import ENQ, EOT
from .exceptions import InvalidState, NotAccepted
from .mapping import Record
from .protocol import ASTMProtocol, STATE
log = logging.getLogger(__name__)
__all__ = ['Client']
class Client(ASTMProtocol):
"""Common ASTM client implementation.
:param emitter: Generator function that will produce ASTM records.
:type emitter: function
:param host: Server IP address or hostname.
:type host: str
:param port: Server port number.
:type port: int
:param serve_forever: Start over emitter after transfer termination.
:type serve_forever: bool
:param timeout: send/recv operation timeout value. If :const:`None` it will
be disabled.
:type timeout: int
"""
#: Number or attempts to send record to server.
retry_attempts = 3 # actually useless thing, but specification requires it.
def __init__(self, emitter, host='localhost', port=15200,
serve_forever=False, timeout=20):
super(Client, self).__init__(timeout=timeout)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connect((host, port))
self._emitter = emitter
self._retry_attempts = self.retry_attempts
self._serve_forever = serve_forever
self.set_init_state()
def emit_header(self):
"""Returns Header record."""
return self.astm_header()
def emit_terminator(self):
"""Returns Terminator record."""
return self.astm_terminator()
def retry_push_or_fail(self, data, attempts=3):
"""Sends `data` to server. If server rejects data due to some reasons
(with <NAK> reply) client tries to resend data for specified number
of `attempts`. If no attempts left, client terminates his session."""
if attempts <= 0:
try:
self.emitter.send(False)
except StopIteration:
pass
finally:
self.terminate()
else:
self.push(data)
def set_transfer_state(self):
self.terminator = 1
self.state = STATE.transfer
self.on_transfer_state()
def start(self, *args, **kwargs):
"""Initiates client transfer by sending <ENQ> message to server.
Implicitly runs pooling :func:`loop <astm.asynclib.loop>`
"""
self.on_start()
loop(*args, **kwargs)
def terminate(self):
"""Terminates client data transfer by sending <EOT> message to server.
If `server_forever` argument was passed on `Client` initialization,
after `state_reset_timeout` :meth:`start` will be called once again.
Otherwise connection with server will be closed.
"""
self.on_termination()
if self._serve_forever:
if self.timeout is not None:
time.sleep(self.timeout)
self.on_start()
else:
self.close()
def on_enq(self):
raise NotAccepted('Client should not receive ENQ.')
def on_ack(self):
if self.state not in [STATE.opened, STATE.transfer]:
raise InvalidState('Client is not ready to accept ACK.')
self.retry_attempts = self._retry_attempts
if self.state == STATE.opened:
self.set_transfer_state()
for record in self.emitter:
break
else:
self.terminate()
return
elif self.state == STATE.transfer:
try:
record = self.emitter.send(True)
except StopIteration:
self.terminate()
return
state = self._transfer_state
self._last_seq += 1
mtype = record[0]
if state is None:
assert mtype == 'H', mtype
state = 'header'
elif state == 'header':
assert mtype in ['P', 'L']
if mtype == 'P':
state = 'patient'
elif state == 'patient':
assert mtype in ['P', 'O', 'C', 'L']
if mtype == 'O':
state = 'order'
elif state == 'order':
assert mtype in ['O', 'C', 'M', 'R', 'L']
if mtype == 'R':
state = 'result'
elif state == 'result':
assert mtype in ['R', 'C', 'L']
if isinstance(record, Record):
record = record.to_astm()
if mtype == 'L':
state = None
data = encode_message(self._last_seq, [record])
self.push(data)
self._transfer_state = state
def on_nak(self):
self.retry_attempts -= 1
self.retry_push_or_fail(self._last_sent_data, self.retry_attempts)
def on_eot(self):
raise NotAccepted('Client should not receive EOT.')
def on_message(self):
raise NotAccepted('Client should not receive ASTM message.')
def on_init_state(self):
self._last_seq = 0
self._transfer_state = None
def on_opened_state(self):
self.emitter = self._emitter()
def on_start(self):
"""Calls on transfer initialization. Sets client state to OPENED (1)."""
self.push(ENQ)
self.set_opened_state()
def on_termination(self):
"""Calls on transfer termination. Resets client state to INIT (0)."""
self.push(EOT)
self.set_init_state()
|
Python
| 0.000002
|
@@ -1886,10 +1886,8 @@
mpts
-=3
):%0A
|
5b83b5e9a4e07af3f3dcd37d4f613039a42336e3
|
Add salt.modules.container_resource
|
salt/modules/container_resource.py
|
salt/modules/container_resource.py
|
Python
| 0.000042
|
@@ -0,0 +1,3323 @@
+# -*- coding: utf-8 -*-%0A'''%0ACommon resources for LXC and systemd-nspawn containers%0A%0AThese functions are not designed to be called directly, but instead from the%0A:mod:%60lxc %3Csalt.modules.lxc%3E%60 and the (future) :mod:%60nspawn%0A%3Csalt.modules.nspawn%3E%60 execution modules.%0A'''%0A%0A# Import python libs%0Afrom __future__ import absolute_import%0Aimport logging%0Aimport time%0Aimport traceback%0A%0A# Import salt libs%0Afrom salt.exceptions import SaltInvocationError%0Afrom salt.utils import vt%0A%0Alog = logging.getLogger(__name__)%0A%0A%0Adef run(name,%0A cmd,%0A output=None,%0A no_start=False,%0A stdin=None,%0A python_shell=True,%0A output_loglevel='debug',%0A ignore_retcode=False,%0A use_vt=False):%0A '''%0A Common logic for running shell commands in containers%0A%0A Requires the full command to be passed to :mod:%60cmd.run%0A %3Csalt.modules.cmdmod.run%3E%60/:mod:%60cmd.run_all %3Csalt.modules.cmdmod.run_all%3E%60%0A '''%0A valid_output = ('stdout', 'stderr', 'retcode', 'all')%0A if output is None:%0A cmd_func = 'cmd.run'%0A elif output not in valid_output:%0A raise SaltInvocationError(%0A '%5C'output%5C' param must be one of the following: %7B0%7D'%0A .format(', '.join(valid_output))%0A )%0A else:%0A cmd_func = 'cmd.run_all'%0A%0A if not use_vt:%0A ret = __salt__%5Bcmd_func%5D(cmd,%0A stdin=stdin,%0A python_shell=python_shell,%0A output_loglevel=output_loglevel,%0A ignore_retcode=ignore_retcode)%0A else:%0A stdout, stderr = '', ''%0A try:%0A proc = vt.Terminal(cmd,%0A shell=python_shell,%0A log_stdin_level=output_loglevel if%0A output_loglevel == 'quiet'%0A else 'info',%0A log_stdout_level=output_loglevel,%0A log_stderr_level=output_loglevel,%0A log_stdout=True,%0A log_stderr=True,%0A stream_stdout=False,%0A stream_stderr=False)%0A # Consume output%0A while proc.has_unread_data:%0A try:%0A cstdout, cstderr = proc.recv()%0A if cstdout:%0A stdout += cstdout%0A if cstderr:%0A if output is None:%0A stdout += cstderr%0A else:%0A stderr += cstderr%0A time.sleep(0.5)%0A except KeyboardInterrupt:%0A break%0A ret = stdout if output is None %5C%0A else %7B'retcode': proc.exitstatus,%0A 'pid': 2,%0A 'stdout': stdout,%0A 'stderr': stderr%7D%0A except vt.TerminalException:%0A trace = traceback.format_exc()%0A log.error(trace)%0A ret = stdout if output is None %5C%0A else %7B'retcode': 127,%0A 'pid': 2,%0A 'stdout': stdout,%0A 'stderr': stderr%7D%0A finally:%0A proc.terminate()%0A%0A return ret%0A
|
|
8c2b90d4d2c9fc8ad759284719eab4dd346ccab2
|
Add tests
|
test.py
|
test.py
|
Python
| 0.000001
|
@@ -0,0 +1,1020 @@
+%22%22%22%0ASimple test of CxoTime. The base Time object is extremely well%0Atested, so this simply confirms that the add-on in CxoTime works.%0A%22%22%22%0A%0Aimport pytest%0Aimport numpy as np%0A%0Afrom cxotime import CxoTime%0A%0Atry:%0A from Chandra.Time import DateTime%0A HAS_DATETIME = True%0Aexcept ImportError:%0A HAS_DATETIME = False%0A%0Adef test_cxotime_basic():%0A t = CxoTime(1)%0A assert t.format == 'secs'%0A assert t.scale == 'utc'%0A assert np.allclose(t.secs, 1.0, rtol=1e-10, atol=0)%0A assert t.tt.date == '1998:001:00:00:01.000'%0A%0A%0A t = CxoTime('1998:001:00:00:01.000', scale='tt')%0A assert t.scale == 'tt'%0A assert np.allclose(t.secs, 1.0, atol=1e-10, rtol=0)%0A%0A@pytest.mark.skipif('not HAS_DATETIME')%0Adef test_cxotime_vs_datetime():%0A dates = ('2015-06-30 23:59:60.5', '2015:180:01:02:03.456')%0A for date in dates:%0A assert np.allclose(CxoTime(date).secs, DateTime(date).secs, %0A atol=1e-4, rtol=0)%0A assert CxoTime(CxoTime(date).secs).date == DateTime(DateTime(date).secs).date%0A
|
|
815ef4b4b0dce640077e1f8ecd2fbe95598bf539
|
Create existing comments' owners records
|
src/ggrc/migrations/versions/20160608132526_170e453da661_add_comments_owners_info.py
|
src/ggrc/migrations/versions/20160608132526_170e453da661_add_comments_owners_info.py
|
Python
| 0
|
@@ -0,0 +1,1448 @@
+# Copyright (C) 2016 Google Inc., authors, and contributors %3Csee AUTHORS file%3E%0A# Licensed under http://www.apache.org/licenses/LICENSE-2.0 %3Csee LICENSE file%3E%0A# Created By: peter@reciprocitylabs.com%0A# Maintained By: peter@reciprocitylabs.com%0A%0A%22%22%22%0AAdd comments' owners information.%0A%0ACreate Date: 2016-06-08 13:25:26.635435%0A%22%22%22%0A# disable Invalid constant name pylint warning for mandatory Alembic variables.%0A# pylint: disable=invalid-name%0A%0Afrom alembic import op%0A%0A%0A# revision identifiers, used by Alembic.%0Arevision = %22170e453da661%22%0Adown_revision = %227a9b715ec504%22%0A%0A%0Adef upgrade():%0A %22%22%22Create owner information for the existing comments.%0A%0A A comment's owner is assumed to be the user who last edited it, and this%0A information is added to the object_owners table for all existing comments.%0A%0A If a record already exists, do nothing (this could happen e.g. on a DB%0A downgrade and a subsequent another upgrade).%0A %22%22%22%0A # NOTE: we set the status column's value to %22Draft%22 to be consistent with%0A # what the application does when a new comment is created%0A command = %22%22%22%0A INSERT IGNORE INTO object_owners (%0A person_id, ownable_id, ownable_type, modified_by_id,%0A created_at, updated_at, status%0A )%0A SELECT%0A modified_by_id, id, %22Comment%22, modified_by_id, created_at, updated_at,%0A %22Draft%22%0A FROM comments;%0A %22%22%22%0A%0A op.execute(command)%0A%0A%0Adef downgrade():%0A %22%22%22Do not delete any comments' owner information to preserve data.%22%22%22%0A
|
|
b73c75bbafb53864a86f95949d6a028f9e79f718
|
Add Tile class
|
tile.py
|
tile.py
|
Python
| 0
|
@@ -0,0 +1,149 @@
+from __future__ import division%0A%0Aclass Tile(object):%0A def __init__(self, x, y, z):%0A self.x = x%0A self.y = y%0A self.height = z%0A%0A
|
|
9cc26c8a95ab4e6ffa9c991b5a575c7e6d62dae4
|
add pytest for util.location
|
pytests/util/test_location.py
|
pytests/util/test_location.py
|
Python
| 0
|
@@ -0,0 +1,1475 @@
+import pytest%0Aimport json%0A%0Aimport util.location%0A%0A%0A@pytest.fixture%0Adef urllib_req(mocker):%0A util.location.reset()%0A return mocker.patch(%22util.location.urllib.request%22)%0A%0A%0A@pytest.fixture%0Adef primaryLocation():%0A return %7B%0A %22country%22: %22Middle Earth%22,%0A %22longitude%22: %2210.0%22,%0A %22latitude%22: %2220.5%22,%0A %22ip%22: %22127.0.0.1%22,%0A %7D%0A%0A%0A@pytest.fixture%0Adef secondaryLocation():%0A return %7B%0A %22country_name%22: %22Rivia%22,%0A %22longitude%22: %22-10.0%22,%0A %22latitude%22: %22-23%22,%0A %22ip%22: %22127.0.0.6%22,%0A %7D%0A%0A%0Adef test_primary_provider(urllib_req, primaryLocation):%0A urllib_req.urlopen.return_value.read.return_value = json.dumps(primaryLocation)%0A%0A assert util.location.country() == primaryLocation%5B%22country%22%5D%0A assert util.location.coordinates() == (%0A primaryLocation%5B%22latitude%22%5D,%0A primaryLocation%5B%22longitude%22%5D,%0A )%0A assert util.location.public_ip() == primaryLocation%5B%22ip%22%5D%0A%0A%0Adef test_secondary_provider(mocker, urllib_req, secondaryLocation):%0A urlopen = mocker.MagicMock()%0A urlopen.read.return_value = json.dumps(secondaryLocation)%0A urllib_req.urlopen.side_effect = %5BRuntimeError(), urlopen%5D%0A%0A assert util.location.country() == secondaryLocation%5B%22country_name%22%5D%0A assert util.location.coordinates() == (%0A secondaryLocation%5B%22latitude%22%5D,%0A secondaryLocation%5B%22longitude%22%5D,%0A )%0A assert util.location.public_ip() == secondaryLocation%5B%22ip%22%5D%0A%0A%0A# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4%0A
|
|
c9690cabe3c4d1d02307e3594a2cac505f4a166d
|
Add new image moments functions
|
photutils/utils/_moments.py
|
photutils/utils/_moments.py
|
Python
| 0.000002
|
@@ -0,0 +1,1707 @@
+# Licensed under a 3-clause BSD style license - see LICENSE.rst%0Afrom __future__ import (absolute_import, division, print_function,%0A unicode_literals)%0A%0Aimport numpy as np%0A%0Afrom ..centroids import centroid_com%0A%0A%0A__all__ = %5B'_moments_central', '_moments'%5D%0A%0A%0Adef _moments_central(data, center=None, order=1):%0A %22%22%22%0A Calculate the central image moments up to the specified order.%0A%0A Parameters%0A ----------%0A data : 2D array-like%0A The input 2D array.%0A%0A center : tuple of two floats or %60None%60, optional%0A The %60%60(x, y)%60%60 center position. If %60None%60 it will calculated as%0A the %22center of mass%22 of the input %60%60data%60%60.%0A%0A order : int, optional%0A The maximum order of the moments to calculate.%0A%0A Returns%0A -------%0A moments : 2D %60~numpy.ndarray%60%0A The central image moments.%0A %22%22%22%0A%0A data = np.asarray(data)%0A%0A if data.ndim != 2:%0A raise ValueError('data must be a 2D array.')%0A%0A if center is None:%0A center = centroid_com(data)%0A%0A indices = np.ogrid%5B%5Bslice(0, i) for i in data.shape%5D%5D%0A ypowers = (indices%5B0%5D - center%5B1%5D) ** np.arange(order + 1)%0A xpowers = np.transpose(indices%5B1%5D - center%5B0%5D) ** np.arange(order + 1)%0A%0A return np.dot(np.transpose(xpowers), np.dot(data, ypowers))%0A%0A%0Adef _moments(data, order=1):%0A %22%22%22%0A Calculate the raw image moments up to the specified order.%0A%0A Parameters%0A ----------%0A data : 2D array-like%0A The input 2D array.%0A%0A order : int, optional%0A The maximum order of the moments to calculate.%0A%0A Returns%0A -------%0A moments : 2D %60~numpy.ndarray%60%0A The raw image moments.%0A %22%22%22%0A%0A return _moments_central(data, center=(0, 0), order=order)%0A
|
|
6a3c960640741036c3f444547cada1e1b7a24100
|
Add first unit test for api
|
tests/test_api.py
|
tests/test_api.py
|
Python
| 0
|
@@ -0,0 +1,775 @@
+import os%0Aimport sys%0Aimport json%0Aimport responses%0Aimport unittest%0A%0ACWD = os.path.dirname(os.path.abspath(__file__))%0AMS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))%0A%0A# Allow import of api.py%0Aif os.path.join(MS_WD, 'utils') not in sys.path:%0A sys.path.insert(0, os.path.join(MS_WD, 'utils'))%0A# Use multiscanner in ../%0Asys.path.insert(0, os.path.dirname(CWD))%0A%0Aimport multiscanner%0Aimport api%0A%0A%0AHTTP_OK = 200%0AHTTP_CREATED = 201%0A%0A%0Aclass TestURLCase(unittest.TestCase):%0A def setUp(self):%0A self.app = api.app.test_client()%0A%0A def test_index(self):%0A expected_response = %7B'Message': 'True'%7D%0A resp = self.app.get('/')%0A self.assertEqual(resp.status_code, HTTP_OK)%0A self.assertEqual(json.loads(resp.data), expected_response)%0A
|
|
d0432f1d3d48634c00027b71eb131c5e36827c4b
|
Add dropdown element located in widget bar
|
src/lib/constants/element/widget_bar/dropdown.py
|
src/lib/constants/element/widget_bar/dropdown.py
|
Python
| 0
|
@@ -0,0 +1,392 @@
+SELECTOR = %22.inner-nav-item%22%0A%0ACLAUSES = %22Clauses%22%0ACONTRACTS = %22Contracts%22%0ADATA_ASSETS = %22Data Assets%22%0AFACILITIES = %22Facilities%22%0AMARKETS = %22Markets%22%0AORG_GROUPS = %22Org Groups%22%0APOLICIES = %22Policies%22%0APROCESSES = %22Processes%22%0APRODUCTS = %22Products%22%0APROJECTS = %22Projects%22%0ASTANDARDS = %22Standards%22%0ASYSTEMS = %22Systems%22%0AVENDORS = %22Vendors%22%0ATHREAD_ACTORS = %22Thread Actors%22%0ARISKS = %22Risks%22%0ATASKS = %22Tasks%22%0A
|
|
763680e57b28a9746050206cd63450bf11c3e512
|
Fix ProgramEditor permissions to not include Program delete
|
src/ggrc_basic_permissions/migrations/versions/20131010001257_10adeac7b693_fix_programeditor_pe.py
|
src/ggrc_basic_permissions/migrations/versions/20131010001257_10adeac7b693_fix_programeditor_pe.py
|
Python
| 0
|
@@ -0,0 +1,1536 @@
+%0A%22%22%22Fix ProgramEditor permissions%0A%0ARevision ID: 10adeac7b693%0ARevises: 8f33d9bd2043%0ACreate Date: 2013-10-10 00:12:57.391754%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '10adeac7b693'%0Adown_revision = '8f33d9bd2043'%0A%0Aimport json%0Aimport sqlalchemy as sa%0Afrom alembic import op%0Afrom datetime import datetime%0Afrom sqlalchemy.sql import table, column%0A%0Aroles_table = table('roles',%0A column('id', sa.Integer),%0A column('name', sa.String),%0A column('permissions_json', sa.Text),%0A column('description', sa.Text),%0A column('modified_by_id', sa.Integer),%0A column('created_at', sa.DateTime),%0A column('updated_at', sa.DateTime),%0A column('context_id', sa.Integer),%0A )%0A%0Adef set_permissions(program_editor_objects):%0A program_editor_delete_objects = list(program_editor_objects)%0A program_editor_delete_objects.remove('Program')%0A%0A current_datetime = datetime.now()%0A op.execute(roles_table.update()%5C%0A .values(%0A permissions_json = json.dumps(%7B%0A 'create': program_editor_objects,%0A 'read': program_editor_objects,%0A 'update': program_editor_objects,%0A 'delete': program_editor_delete_objects,%0A %7D),%0A updated_at = current_datetime)%5C%0A .where(roles_table.c.name == 'ProgramEditor'))%0A%0Adef upgrade():%0A set_permissions(%5B%0A 'Cycle',%0A 'ObjectDocument',%0A 'ObjectObjective',%0A 'ObjectPerson',%0A 'ObjectSection',%0A 'Program',%0A 'ProgramControl',%0A 'ProgramDirective',%0A 'Relationship',%0A %5D)%0A%0Adef downgrade():%0A pass%0A
|
|
da488fa4505de818a5efcec13fdb7963d5051389
|
Create util.py
|
util.py
|
util.py
|
Python
| 0.000002
|
@@ -0,0 +1,546 @@
+import requests%0Aimport logging%0A%0Adef downloadRedditUrl(url):%0A%09print %22downloadRedditUrl(): Downloading url: %7B%7D%22.format(url)%0A%09#assert url.startswith('https://www.reddit.com/r/learnprogramming/')%0A%09%0A %0A%09headers = %7B%0A %09%09'User-Agent': 'Searching Reddit bot version 1.0',%0A%09%7D%0A%09r = requests.get(url,headers = headers)%0A%09if r.status_code != 200:%0A%09%09raise Exception(%22Non-OK status code: %7B%7D%22.format(r.status_code))%0A%09return r.text%09%0A%0A#Find the TV Line in post%0Adef parseRedditPost(html):%09%0A%09bs = BeautifulSoup(html)%0A%09return bs.select('div.usertext-body')%5B1%5D.text%0A
|
|
2beac94eb32fc4adb976c4a10018de8518e4bada
|
Add wsgi file
|
wsgi.py
|
wsgi.py
|
Python
| 0.000001
|
@@ -0,0 +1,1261 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4%0A#%0A# Copyright 2016 Eugene Frolov %3Ceugene@frolov.net.ru%3E%0A#%0A# All Rights Reserved.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22); you may%0A# not use this file except in compliance with the License. You may obtain%0A# a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS, WITHOUT%0A# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the%0A# License for the specific language governing permissions and limitations%0A# under the License.%0Aimport logging%0Aimport sys%0A%0Afrom helix.api import app%0Afrom helix.common import config%0Afrom helix.common import encoding%0Afrom helix.common import log as helix_logging%0A%0A%0Aif not config.parse(sys.argv%5B1:%5D):%0A logging.warning(%22Unable to find configuration file via the%22%0A %22 default search paths (~/.helix/, ~/, /etc/helix/,%22%0A %22 /etc/) and the '--config-file' option!%22)%0Ahelix_logging.configure()%0Alog = logging.getLogger(__name__)%0Aencoding.set_default_encoding_from_config()%0A%0Aapplication = app.build_wsgi_application()%0A
|
|
7c095c82e1b6a16da65b8fcfaf77d9a606321d76
|
Create sum67.py
|
Python/CodingBat/sum67.py
|
Python/CodingBat/sum67.py
|
Python
| 0.001155
|
@@ -0,0 +1,218 @@
+# http://codingbat.com/prob/p108886%0A%0Adef sum67(nums):%0A sum = 0%0A %0A i = 0%0A while i %3C len(nums):%0A if nums%5Bi%5D == 6:%0A while nums%5Bi%5D != 7:%0A i += 1%0A else:%0A sum += nums%5Bi%5D%0A i += 1%0A %0A return sum%0A
|
|
28f41fcfc80bc562343e510e3e0e5e57d97d27ea
|
Create Scrap_share_marketdata.py
|
Scrap_share_marketdata.py
|
Scrap_share_marketdata.py
|
Python
| 0.000002
|
@@ -0,0 +1,978 @@
+import urllib%0Aimport re%0A%0A#TItile scrap of any website %0A%0A# regex='%3Ctitle%3E(.+?)%3C/title%3E'%0A# pattern =re.compile(regex)%0A%0A# htmlfile = urllib.urlopen(%22https://www.cnn.com%22)%0A# htmltext=htmlfile.read()%0A# titles=re.findall(pattern,htmltext)%0A# print titles%0A%0A# Scrap using finance yahoo.com%0A%0A# symbolfile=open(%22symbols.txt%22)%0A# symbolslist=symbolfile.read()%0A# newsymbolslist=symbolslist.split(%22%5Cn%22)%0A# # symbolslist=%5B%22APPL%22,%22SPY%22%5D%0A# i=0%0A# while i%3Clen(newsymbolslist)-1:%0A# %09url=%22http://finance.yahoo.com/q?s=%22+newsymbolslist%5Bi%5D+%22&ql=1%22%0A# %09htmlfile=urllib.urlopen(url)%0A# %09htmltext=htmlfile.read()%0A# %09regex='%3Cspan class=%22time_rtq_ticker%22%3E(.+?)%3C/span%3E%3C/span%3E'%0A# %09pattern=re.compile(regex)%0A# %09price=re.findall(pattern,htmltext)%0A# %09s=price%5B0%5D%0A# %09print s.split(%22%3E%22,1)%5B1%5D %0A# %09i+=1%0A%0A# Scrap using google finance%0A%0Ahtmltext=urllib.urlopen(%22http://www.google.com/finance?q=AAPL%22)%0A%0Aregex='%3Cspan id=%22ref_%5B%5E.%5D*_l%22%3E(.+?)%3C/span%3E'%0Apattern=re.compile(regex)%0Aresults=re.findall(pattern,htmltext)%0Aprint results%0A
|
|
1ec2f110c16de75503092df873693e2929baa8cd
|
add the "Cargos Importantes" field
|
candidates/migrations/0018_cr_add_important_posts_field.py
|
candidates/migrations/0018_cr_add_important_posts_field.py
|
Python
| 0
|
@@ -0,0 +1,951 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.conf import settings%0Afrom django.db import models, migrations%0A%0Adef add_extra_field(apps, schema_editor):%0A ExtraField = apps.get_model('candidates', 'ExtraField')%0A if settings.ELECTION_APP == 'cr':%0A ExtraField.objects.create(%0A key='important_roles',%0A type='longer-text',%0A label=u'Important Roles',%0A )%0A%0Adef remove_extra_field(apps, schema_editor):%0A ExtraField = apps.get_model('candidates', 'ExtraField')%0A if settings.ELECTION_APP == 'cr':%0A extra_field = ExtraField.objects.get('important_roles')%0A extra_field.personextrafieldvalue_set.all().delete()%0A extra_field.delete()%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('candidates', '0017_remove_cv_and_program_fields'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(add_extra_field, remove_extra_field)%0A %5D%0A
|
|
0f5a52a215f8f1e16ab5ddf622a541919ab760ce
|
Fix up language detector.
|
aleph/analyze/language.py
|
aleph/analyze/language.py
|
import logging
import langid
# https://github.com/saffsd/langid.py
from aleph.analyze.analyzer import Analyzer
log = logging.getLogger(__name__)
THRESHOLD = 0.9
CUTOFF = 30
class LanguageAnalyzer(Analyzer):
def analyze_text(self, document, meta):
if len(meta.languages):
return
languages = set()
for page in document.pages:
if not page.text or len(page.text) < CUTOFF:
continue
lang, score = langid.classify(page.text)
if score > THRESHOLD:
languages.add(lang)
self.save(document, meta, languages)
def analyze_tabular(self, document, meta):
if len(meta.languages):
return
languages = set()
for table in document.tables:
for row in table:
for text in row.values():
if not text or len(text) < CUTOFF:
continue
lang, score = langid.classify(text)
if score > THRESHOLD:
languages.add(lang)
self.save(document, meta, languages)
def save(self, document, meta, languages):
existing = meta.get('languages')
if existing is None or not len(existing):
meta['languages'] = list(languages)
super(LanguageAnalyzer, self).save(document, meta)
|
Python
| 0.000043
|
@@ -750,21 +750,22 @@
for
-table
+record
in docu
@@ -773,50 +773,17 @@
ent.
-tables:%0A for row in table:%0A
+records:%0A
@@ -807,10 +807,18 @@
in r
-ow
+ecord.data
.val
@@ -836,28 +836,24 @@
-
if not text
@@ -887,36 +887,32 @@
-
continue%0A
@@ -912,36 +912,32 @@
-
-
lang, score = la
@@ -964,36 +964,32 @@
-
if score %3E THRES
@@ -986,36 +986,32 @@
re %3E THRESHOLD:%0A
-
|
57fe1a44c2285f39cc1454bbd6cfb3ce621348c3
|
Add a test to validate the user creation
|
aligot/tests/test_user.py
|
aligot/tests/test_user.py
|
Python
| 0.000001
|
@@ -0,0 +1,1073 @@
+# coding: utf-8%0A%0Afrom django.core.urlresolvers import reverse%0Afrom django.test import TestCase%0Afrom rest_framework import status%0Afrom rest_framework.test import APIClient%0A%0Afrom ..models import User%0A%0A%0Aclass TestUser(TestCase):%0A%0A def setUp(self):%0A self.client = APIClient()%0A%0A def test_create_without_params(self):%0A self.assertEquals(status.HTTP_400_BAD_REQUEST, self.client.post(reverse('user-create')).status_code)%0A self.assertEquals(0, User.objects.count())%0A%0A def test_create(self):%0A %22%22%22%0A Create user & wait for 201 response.%0A %22%22%22%0A data = %7B%0A 'username': 'test',%0A 'password': 'test',%0A 'email': 'test@mail.com'%0A %7D%0A response = self.client.post(reverse('user-create'), data)%0A self.assertEqual(status.HTTP_201_CREATED, response.status_code, response.content)%0A self.assertEqual(1, User.objects.count())%0A%0A # Check the first%0A user = User.objects.all()%5B0%5D%0A self.assertEqual(user.username, data%5B'username'%5D, 'Username in DB don%5C't match')%0A%0A%0A%0A%0A
|
|
d63235026ec40857d3cbeef67064879d4b180eeb
|
add pip_upgrade
|
_bin/pip_upgrade.py
|
_bin/pip_upgrade.py
|
Python
| 0.000001
|
@@ -0,0 +1,177 @@
+#!/usr/bin/env python%0A%0Aimport pip%0Afrom subprocess import call%0A%0Afor dist in pip.get_installed_distributions():%0A call(%22pip install --upgrade %22 + dist.project_name, shell=True)%0A
|
|
cba5577517659e13511dcd45c996fd292cbd1cf8
|
Add Eq typeclass definition
|
typeclasses/eq.py
|
typeclasses/eq.py
|
Python
| 0.000182
|
@@ -0,0 +1,1047 @@
+# typeclasses, an educational implementation of Haskell-style type%0A# classes, in Python%0A#%0A# Copyright (C) 2010 Nicolas Trangez %3Ceikke eikke com%3E%0A#%0A# This library is free software; you can redistribute it and/or%0A# modify it under the terms of the GNU Lesser General Public%0A# License as published by the Free Software Foundation, version 2.1%0A# of the License.%0A#%0A# This library is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU%0A# Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this library; if not, write to the Free Software%0A# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,%0A# MA 02110-1301 USA%0A%0A'''Definition of the Eq typeclass'''%0A%0Afrom typeclasses import function, TypeClass%0A%0Aeq = function(1)%0Ane = function(1)%0A%0AEq = TypeClass((eq, lambda a, b: not ne(a, b)),%0A (ne, lambda a, b: not eq(a, b)))%0A
|
|
bdb7dcef2a214e18505b3465b66068fb9658739c
|
Update tests
|
corehq/apps/app_manager/tests/test_bulk_app_translation.py
|
corehq/apps/app_manager/tests/test_bulk_app_translation.py
|
import codecs
from django.test import SimpleTestCase
from corehq.apps.app_manager.const import APP_V2
from corehq.apps.app_manager.models import Application, Module
from corehq.apps.app_manager.tests.util import TestFileMixin
from corehq.apps.app_manager.translations import \
process_bulk_app_translation_upload, expected_bulk_app_sheet_rows, \
expected_bulk_app_sheet_headers
from dimagi.utils.excel import WorkbookJSONReader
class BulkAppTranslationTestBase(SimpleTestCase, TestFileMixin):
def setUp(self):
"""
Instantiate an app from file_path + app.json
"""
super(BulkAppTranslationTestBase, self).setUp()
self.app = Application.wrap(self.get_json("app"))
def do_upload(self, name, expected_messages=None):
"""
Upload the bulk app translation file at file_path + upload.xlsx
"""
if not expected_messages:
expected_messages = ["App Translations Updated!"]
with codecs.open(self.get_path(name, "xlsx")) as f:
messages = process_bulk_app_translation_upload(self.app, f)
self.assertListEqual(
[m[1] for m in messages], expected_messages
)
def assert_question_label(self, text, module_id, form_id, language, question_path):
"""
assert that the given text is equal to the label of the given question.
Return the label of the given question
:param text:
:param module_id: module index
:param form_id: form index
:param question_path: path to question (including "/data/")
:return: the label of the question
"""
form = self.app.get_module(module_id).get_form(form_id)
labels = {}
for lang in self.app.langs:
for question in form.get_questions(
[lang], include_triggers=True, include_groups=True):
labels[(question['value'], lang)] = question['label']
self.assertEqual(
labels[(question_path, language)],
text
)
def assert_case_property_label(self, text, field, module_id, short_or_long, language):
module = self.app.get_module(module_id)
cols = module.case_details[short_or_long].columns
col = next(col for col in cols if col.field == field)
self.assertEqual(text, col.header.get(language, None))
class BulkAppTranslationBasicTest(BulkAppTranslationTestBase):
file_path = "data", "bulk_app_translation", "basic"
def test_set_up(self):
self._shared_test_initial_set_up()
def test_no_change_upload(self):
self.do_upload("upload_no_change")
self._shared_test_initial_set_up()
def _shared_test_initial_set_up(self):
self.assert_question_label("question1", 0, 0, "en", "/data/question1")
self.assert_case_property_label("Autre Prop", "other-prop", 0, "long", "fra")
def test_change_upload(self):
self.do_upload("upload")
self.assert_question_label("in english", 0, 0, "en", "/data/question1")
self.assert_question_label("in french", 0, 0, "fra", "/data/question1")
# Test that translations can be deleted.
self.assert_question_label("English Label", 0, 0, "fra", "/data/question3/question5")
self.assert_case_property_label(None, "other-prop", 0, "long", "fra")
self.assert_case_property_label(None, "name", 0, "long", "en")
module = self.app.get_module(0)
self.assertEqual(
module.case_details.long.columns[1].enum[0].value['fra'],
'french bar'
)
self.assertEqual(
module.case_details.short.columns[0].header['fra'],
'Nom'
)
# Test special characters and output refs
self.assert_question_label("one < two", 0, 0, "en", "/data/question2")
self.assert_question_label("un < deux", 0, 0, "fra", "/data/question2")
self.assert_question_label("question6: ____", 0, 0, "en", "/data/question3/question4")
def test_missing_itext(self):
self.app = Application.wrap(self.get_json("app_no_itext"))
self.assert_question_label('question1', 0, 0, "en", "/data/question1")
try:
self.do_upload("upload_no_change")
except Exception as e:
self.fail(e)
class MismatchedItextReferenceTest(BulkAppTranslationTestBase):
"""
Test the bulk app translation upload when the itext reference in a question
in the xform body does not match the question's id/path.
The upload is an unchanged download.
"""
file_path = "data", "bulk_app_translation", "mismatched_ref"
def test_unchanged_upload(self):
self.do_upload("upload")
self.assert_question_label("question2", 0, 0, "en", "/data/foo/question2")
class BulkAppTranslationFormTest(BulkAppTranslationTestBase):
file_path = "data", "bulk_app_translation", "form_modifications"
def test_removing_form_translations(self):
self.do_upload("modifications")
form = self.app.get_module(0).get_form(0)
self.assertXmlEqual(self.get_xml("expected_form"), form.render_xform())
class BulkAppTranslationDownloadTest(SimpleTestCase, TestFileMixin):
file_path = ('data', 'bulk_app_translation', 'download')
maxDiff = None
@classmethod
def setUpClass(cls):
cls.app = Application.wrap(cls.get_json("app"))
wb_reader = WorkbookJSONReader(cls.get_path('bulk_app_translations', 'xlsx'))
cls.expected_workbook = [{'name': ws.title, 'rows': list(ws)}
for ws in wb_reader.worksheets]
def test_download(self):
actual_headers = expected_bulk_app_sheet_headers(self.app)
actual_rows = expected_bulk_app_sheet_rows(self.app)
actual_workbook = [
{'name': title,
'rows': [dict(zip(headers, row)) for row in actual_rows[title]]}
for title, headers in actual_headers
]
for actual_sheet, expected_sheet in zip(actual_workbook,
self.expected_workbook):
self.assertEqual(actual_sheet, expected_sheet)
self.assertEqual(actual_workbook, self.expected_workbook)
class RenameLangTest(SimpleTestCase):
def test_rename_lang_empty_form(self):
app = Application.new_app('domain', "Untitled Application", application_version=APP_V2)
module = app.add_module(Module.new_module('module', None))
form1 = app.new_form(module.id, "Untitled Form", None)
form1.source = '<source>'
# form with no source
form2 = app.new_form(module.id, "Empty form", None)
app.rename_lang('en', 'fra')
self.assertNotIn('en', module.name)
self.assertIn('fra', module.name)
self.assertNotIn('en', form1.name)
self.assertIn('fra', form1.name)
self.assertNotIn('en', form2.name)
self.assertIn('fra', form2.name)
|
Python
| 0.000001
|
@@ -3798,17 +3798,20 @@
el(%22one
-%3C
+<
two%22, 0
@@ -3879,17 +3879,20 @@
bel(%22un
-%3C
+<
deux%22,
|
dead36578f93ab2eb3a0b403a8da75b1ab0e3b12
|
Remove the lock on the table ir_sequence and use FOR UPDATE
|
bin/addons/base/ir/ir_sequence.py
|
bin/addons/base/ir/ir_sequence.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2004-2008 TINY SPRL. (http://tiny.be) All Rights Reserved.
#
# $Id$
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import time
from osv import fields,osv
class ir_sequence_type(osv.osv):
_name = 'ir.sequence.type'
_columns = {
'name': fields.char('Sequence Name',size=64, required=True),
'code': fields.char('Sequence Code',size=32, required=True),
}
ir_sequence_type()
def _code_get(self, cr, uid, context={}):
cr.execute('select code, name from ir_sequence_type')
return cr.fetchall()
class ir_sequence(osv.osv):
_name = 'ir.sequence'
_columns = {
'name': fields.char('Sequence Name',size=64, required=True),
'code': fields.selection(_code_get, 'Sequence Code',size=64, required=True),
'active': fields.boolean('Active'),
'prefix': fields.char('Prefix',size=64),
'suffix': fields.char('Suffix',size=64),
'number_next': fields.integer('Next Number', required=True),
'number_increment': fields.integer('Increment Number', required=True),
'padding' : fields.integer('Number padding', required=True),
}
_defaults = {
'active': lambda *a: True,
'number_increment': lambda *a: 1,
'number_next': lambda *a: 1,
'padding' : lambda *a : 0,
}
def _process(self, s):
return (s or '') % {'year':time.strftime('%Y'), 'month': time.strftime('%m'), 'day':time.strftime('%d')}
def get_id(self, cr, uid, sequence_id, test='id=%d'):
cr.execute('lock table ir_sequence')
cr.execute('select id,number_next,number_increment,prefix,suffix,padding from ir_sequence where '+test+' and active=True', (sequence_id,))
res = cr.dictfetchone()
if res:
cr.execute('update ir_sequence set number_next=number_next+number_increment where id=%d and active=True', (res['id'],))
if res['number_next']:
return self._process(res['prefix']) + '%%0%sd' % res['padding'] % res['number_next'] + self._process(res['suffix'])
else:
return self._process(res['prefix']) + self._process(res['suffix'])
return False
def get(self, cr, uid, code):
return self.get_id(cr, uid, code, test='code=%s')
ir_sequence()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Python
| 0.000042
|
@@ -2724,53 +2724,8 @@
'):%0A
- cr.execute('lock table ir_sequence')%0A
@@ -2844,24 +2844,35 @@
active=True
+ FOR UPDATE
', (sequence
|
2c7a40679e6202446a2e1076e19832589abf9ef9
|
Add test mobile flatpage
|
geotrek/api/tests/test_mobile_flatpage.py
|
geotrek/api/tests/test_mobile_flatpage.py
|
Python
| 0
|
@@ -0,0 +1,2233 @@
+from __future__ import unicode_literals%0A%0Aimport json%0A%0Afrom django.contrib.auth.models import User%0Afrom django.core.urlresolvers import reverse%0Afrom django.test.testcases import TestCase%0A%0Afrom geotrek.flatpages.factories import FlatPageFactory%0Afrom geotrek.flatpages.models import FlatPage%0A%0AFLATPAGE_DETAIL_PROPERTIES_JSON_STRUCTURE = sorted(%5B%0A 'id', 'title', 'content'%0A%5D)%0A%0A%0Aclass FlatPageAdministratorTest(TestCase):%0A @classmethod%0A def setUpTestData(cls):%0A cls.flatpage = FlatPageFactory.create()%0A FlatPageFactory.create()%0A cls.administrator = User.objects.create(username=%22administrator%22, is_superuser=True,%0A is_staff=True, is_active=True)%0A cls.administrator.set_password('administrator')%0A cls.administrator.save()%0A cls.administrator.refresh_from_db()%0A%0A def get_flatpage_list(self, params=None):%0A return self.client.get(reverse('apimobile:flatpage-list'), params, HTTP_ACCEPT_LANGUAGE='fr')%0A%0A def get_flatpage_detail(self, id_flatpage, params=None):%0A return self.client.get(reverse('apimobile:flatpage-detail', args=(id_flatpage,)),%0A params, HTTP_ACCEPT_LANGUAGE='fr')%0A%0A def test_flatpage_list_administrator(self):%0A self.client.login(username=%22administrator%22, password=%22administrator%22)%0A response = self.get_flatpage_list()%0A self.assertEqual(response.status_code, 200)%0A json_response = json.loads(response.content.decode('utf-8'))%0A self.assertEqual(len(json_response), 2)%0A self.assertEqual(json_response%5B0%5D.get('title'), FlatPage.objects.first().title)%0A%0A def test_flatpage_detail_administrator(self):%0A self.client.login(username=%22administrator%22, password=%22administrator%22)%0A response = self.get_flatpage_detail(self.flatpage.pk)%0A self.assertEqual(response.status_code, 200)%0A json_response = json.loads(response.content.decode('utf-8'))%0A self.assertEqual(sorted(json_response.keys()),%0A FLATPAGE_DETAIL_PROPERTIES_JSON_STRUCTURE)%0A self.assertEqual(json_response.get('content'), self.flatpage.content)%0A self.assertEqual(json_response.get('title'), self.flatpage.title)%0A
|
|
4afd2553625db404cdfedfcf336079b3d9d723e3
|
Add test for auth service pre-run time validation checks.
|
st2auth/tests/unit/test_validation_utils.py
|
st2auth/tests/unit/test_validation_utils.py
|
Python
| 0
|
@@ -0,0 +1,2657 @@
+# Licensed to the StackStorm, Inc ('StackStorm') under one or more%0A# contributor license agreements. See the NOTICE file distributed with%0A# this work for additional information regarding copyright ownership.%0A# The ASF licenses this file to You under the Apache License, Version 2.0%0A# (the %22License%22); you may not use this file except in compliance with%0A# the License. You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0Aimport unittest2%0Afrom oslo_config import cfg%0A%0Afrom st2auth.validation import validate_auth_backend_is_correctly_configured%0Afrom st2tests import config as tests_config%0A%0A__all__ = %5B%0A 'ValidationUtilsTestCase'%0A%5D%0A%0A%0Aclass ValidationUtilsTestCase(unittest2.TestCase):%0A def setUp(self):%0A super(ValidationUtilsTestCase, self).setUp()%0A tests_config.parse_args()%0A%0A def test_validate_auth_backend_is_correctly_configured_success(self):%0A result = validate_auth_backend_is_correctly_configured()%0A self.assertTrue(result)%0A%0A def test_validate_auth_backend_is_correctly_configured_invalid_backend(self):%0A cfg.CONF.set_override(group='auth', name='mode', override='invalid')%0A expected_msg = ('Invalid auth mode %22invalid%22 specified in the config. '%0A 'Valid modes are: proxy, standalone')%0A self.assertRaisesRegexp(ValueError, expected_msg,%0A validate_auth_backend_is_correctly_configured)%0A%0A def test_validate_auth_backend_is_correctly_configured_backend_doesnt_expose_groups(self):%0A # Flat file backend doesn't expose user group membership information aha provide%0A # %22has group info%22 capability%0A cfg.CONF.set_override(group='auth', name='backend', override='flat_file')%0A cfg.CONF.set_override(group='auth', name='backend_kwargs',%0A override='%7B%22file_path%22: %22dummy%22%7D')%0A cfg.CONF.set_override(group='rbac', name='enable', override=True)%0A cfg.CONF.set_override(group='rbac', name='sync_remote_groups', override=True)%0A%0A expected_msg = ('Configured auth backend doesn%5C't expose user group information. Disable '%0A 'remote group synchronization or')%0A self.assertRaisesRegexp(ValueError, expected_msg,%0A validate_auth_backend_is_correctly_configured)%0A
|
|
b413af07917f3555edb4b69c4d4a0e4d5c4a629f
|
Create boolean_logic_from_scratch.py
|
boolean_logic_from_scratch.py
|
boolean_logic_from_scratch.py
|
Python
| 0.999243
|
@@ -0,0 +1,412 @@
+#Kunal Gautam%0A#Codewars : @Kunalpod%0A#Problem name: Boolean Logic from Scratch%0A#Problem level: 7 kyu%0A%0Adef func_or(a,b):%0A #your code here - do no be lame and do not use built-in code!%0A if bool(a) or bool(b):%0A return True%0A return False %0A%0Adef func_xor(a,b):%0A #your code here - remember to consider truthy and falsey value as in JS%0A if bool(a)==bool(b):%0A return False%0A return True %0A
|
|
df9c8b2c2e616937afdbf09fc4a76ac7b821c8a5
|
Add test (which we fail at the moment)
|
bugimporters/tests/test_spider.py
|
bugimporters/tests/test_spider.py
|
Python
| 0
|
@@ -0,0 +1,1627 @@
+import os%0A%0Aimport bugimporters.main%0Afrom mock import Mock%0A%0A%0AHERE = os.path.dirname(os.path.abspath(__file__))%0A%0A# Create a global variable that can be referenced both from inside tests%0A# and from module level functions functions.%0A%0Abug_data_transit = %7B%0A 'get_fresh_urls': None,%0A 'update': None,%0A 'delete_by_url': None,%0A%7D%0A%0Atrac_data_transit = %7B%0A 'get_bug_times': lambda url: (None, None),%0A 'get_timeline_url': Mock(),%0A 'update_timeline': Mock()%0A%7D%0A%0Aimporter_data_transits = %7B'bug': bug_data_transit, 'trac': trac_data_transit%7D%0A%0A%0Aclass TestBaseSpider(object):%0A%0A def setup_class(cls):%0A cls.spider = bugimporters.main.BugImportSpider()%0A # This is sample input data that has an invalid special%0A # bug parser name.%0A cls.spider.input_data = %5B%0A %7B'as_appears_in_distribution': u'',%0A 'documentation_type': u'',%0A 'existing_bug_urls': %5B%5D,%0A 'bug_project_name_format': u'FEL',%0A 'base_url': u'https://fedorahosted.org/fedora-electronic-lab/report/1',%0A 'custom_parser': u'fedora-electronic-lab',%0A 'documentation_text': u'',%0A 'bitesized_text': u'',%0A 'bitesized_type': u'',%0A 'queries': %5Bu'https://fedorahosted.org/fedora-electronic-lab'%5D,%0A 'get_older_bug_data': None,%0A 'tracker_name': u'fedora-electronic-lab',%0A 'bugimporter': u'trac'%7D,%0A %5D%0A%0A def test_get_bugimporters(self):%0A # We should get no bugimporters out.%0A%0A # In the past, what happened was a crash.%0A assert(%5B%5D == list(self.spider.get_bugimporters()))%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.