commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
d8bfbe6c11249dbfe4a722cc75555ddc3a506acc | make sure any dotted params are turned into underscores as dotted isn't supported | hiidef/hiispider,hiidef/hiispider | hiispider/resources/exposed.py | hiispider/resources/exposed.py | import sys
from twisted.web import server
from twisted.internet.defer import maybeDeferred
from .base import BaseResource
class ExposedResource(BaseResource):
isLeaf = True
def __init__(self, server, function_name):
self.primary_server = server
self.function_name = function_name
BaseResource.__init__(self)
def render(self, request):
request.setHeader('Content-type', 'text/javascript; charset=UTF-8')
kwargs = {}
for key in request.args:
kwargs[key] = request.args[key][0].replace('.', '_')
d = maybeDeferred(self.primary_server.executeReservation, self.function_name, **kwargs)
d.addCallback(self._successResponse)
d.addErrback(self._errorResponse)
d.addCallback(self._immediateResponse, request)
return server.NOT_DONE_YET
| import sys
from twisted.web import server
from twisted.internet.defer import maybeDeferred
from .base import BaseResource
class ExposedResource(BaseResource):
isLeaf = True
def __init__(self, server, function_name):
self.primary_server = server
self.function_name = function_name
BaseResource.__init__(self)
def render(self, request):
request.setHeader('Content-type', 'text/javascript; charset=UTF-8')
kwargs = {}
for key in request.args:
kwargs[key] = request.args[key][0]
d = maybeDeferred(self.primary_server.executeReservation, self.function_name, **kwargs)
d.addCallback(self._successResponse)
d.addErrback(self._errorResponse)
d.addCallback(self._immediateResponse, request)
return server.NOT_DONE_YET
| mit | Python |
347e6f94144ecc10ab6aa36b384c57d10caeb906 | fix typo in ref_dict['evsonganaly'] | NickleDave/hybrid-vocal-classifier | hvc/parse/ref_spect_params.py | hvc/parse/ref_spect_params.py | import numpy as np
import scipy.signal
refs_dict = {
'tachibana': {
'nperseg': 256,
'noverlap': 192,
'window': 'Hann', # Hann window
'freq_cutoffs': [10, 15990], # basically no bandpass, as in Tachibana
'filter_func': 'diff',
'spect_func': 'mpl',
'log_transform_spect': False, # see tachibana feature docs
'thresh': None
},
'koumura': {
'nperseg': 512,
'noverlap': 480,
'window': 'dpss',
'freq_cutoffs': [1000, 8000],
'filter_func': None,
'spect_func': 'scipy',
'log_transform_spect': True,
'thresh': None
},
'evsonganaly': {
'nperseg': 512,
'noverlap': 409,
'window': 'Hann',
'freq_cutoffs': [500, 10000],
'filter_func': None,
'spect_func': 'mpl',
'log_transform_spect': False,
'thresh': None
}
}
| import numpy as np
import scipy.signal
refs_dict = {
'tachibana': {
'nperseg': 256,
'noverlap': 192,
'window': 'Hann', # Hann window
'freq_cutoffs': [10, 15990], # basically no bandpass, as in Tachibana
'filter_func': 'diff',
'spect_func': 'mpl',
'log_transform_spect': False, # see tachibana feature docs
'thresh': None
},
'koumura': {
'nperseg': 512,
'noverlap': 480,
'window': 'dpss',
'freq_cutoffs': [1000, 8000],
'filter_func': None,
'spect_func': 'scipy',
'log_transform_spect': True,
'thresh': None
},
'evsonganaly': {
'nperseg': 512,
'noverlap': 409,
'window': 'hann',
'freq_cutoffs': [500, 10000],
'filter_func': None,
'spect_func': 'mpl',
'log_transform_spect': False,
'thresh': None
}
}
| bsd-3-clause | Python |
9f1134174c594564519a88cbfafe443b2be782e2 | Update track_name, short_label, and long_label per discussions on 2016-09-09 | Duke-GCB/TrackHubGenerator,Duke-GCB/TrackHubGenerator | python/render/render_tracks.py | python/render/render_tracks.py | __author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}'.format(metadata['protein'], metadata['serial_number'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = metadata['protein']
d['long_label'] = 'Predicted {} binding sites (site width = {}); iMADS model {}'.format(metadata['protein'], metadata['width'], metadata['serial_number'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
| __author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {})'.format(metadata['protein'], metadata['width'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
| mit | Python |
3ce7a707d1933c89d00c773f95096e31d31325b5 | add Python logging module import to provide log.error | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/states/tls.py | salt/states/tls.py | # -*- coding: utf-8 -*-
'''
Enforce state for SSL/TLS
=========================
'''
# Import Python Libs
from __future__ import absolute_import, unicode_literals, print_function
import time
import datetime
import logging
__virtualname__ = 'tls'
def __virtual__():
if 'tls.cert_info' not in __salt__:
return False
return __virtualname__
def valid_certificate(
name,
weeks=0,
days=0,
hours=0,
minutes=0,
seconds=0,
):
'''
Verify that a TLS certificate is valid now and (optionally) will be valid
for the time specified through weeks, days, hours, minutes, and seconds.
'''
ret = {'name': name,
'changes': {},
'result': False,
'comment': ''}
now = time.time()
try:
cert_info = __salt__['tls.cert_info'](name)
except IOError as exc:
ret['comment'] = '{}'.format(exc)
ret['result'] = False
log.error(ret['comment'])
return ret
# verify that the cert is valid *now*
if now < cert_info['not_before']:
ret['comment'] = 'Certificate is not yet valid'
return ret
if now > cert_info['not_after']:
ret['comment'] = 'Certificate is expired'
return ret
# verify the cert will be valid for defined time
delta_remaining = datetime.timedelta(seconds=cert_info['not_after']-now)
delta_kind_map = {
'weeks': weeks,
'days': days,
'hours': hours,
'minutes': minutes,
'seconds': seconds,
}
delta_min = datetime.timedelta(**delta_kind_map)
# if ther eisn't enough time remaining, we consider it a failure
if delta_remaining < delta_min:
ret['comment'] = 'Certificate will expire in {0}, which is less than {1}'.format(delta_remaining, delta_min)
return ret
ret['result'] = True
ret['comment'] = 'Certificate is valid for {0}'.format(delta_remaining)
return ret
| # -*- coding: utf-8 -*-
'''
Enforce state for SSL/TLS
=========================
'''
# Import Python Libs
from __future__ import absolute_import, unicode_literals, print_function
import time
import datetime
__virtualname__ = 'tls'
def __virtual__():
if 'tls.cert_info' not in __salt__:
return False
return __virtualname__
def valid_certificate(
name,
weeks=0,
days=0,
hours=0,
minutes=0,
seconds=0,
):
'''
Verify that a TLS certificate is valid now and (optionally) will be valid
for the time specified through weeks, days, hours, minutes, and seconds.
'''
ret = {'name': name,
'changes': {},
'result': False,
'comment': ''}
now = time.time()
try:
cert_info = __salt__['tls.cert_info'](name)
except IOError as exc:
ret['comment'] = '{}'.format(exc)
ret['result'] = False
log.error(ret['comment'])
return ret
# verify that the cert is valid *now*
if now < cert_info['not_before']:
ret['comment'] = 'Certificate is not yet valid'
return ret
if now > cert_info['not_after']:
ret['comment'] = 'Certificate is expired'
return ret
# verify the cert will be valid for defined time
delta_remaining = datetime.timedelta(seconds=cert_info['not_after']-now)
delta_kind_map = {
'weeks': weeks,
'days': days,
'hours': hours,
'minutes': minutes,
'seconds': seconds,
}
delta_min = datetime.timedelta(**delta_kind_map)
# if ther eisn't enough time remaining, we consider it a failure
if delta_remaining < delta_min:
ret['comment'] = 'Certificate will expire in {0}, which is less than {1}'.format(delta_remaining, delta_min)
return ret
ret['result'] = True
ret['comment'] = 'Certificate is valid for {0}'.format(delta_remaining)
return ret
| apache-2.0 | Python |
039ca493135867bb70c182ba640628c5cc88081e | Simplify Post Detail queryset. | jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8 | blog/views.py | blog/views.py | from django.core.urlresolvers import reverse_lazy
from django.views.generic import (
ArchiveIndexView, CreateView, DeleteView,
DetailView, MonthArchiveView, YearArchiveView)
from core.utils import UpdateView
from user.decorators import \
require_authenticated_permission
from .forms import PostForm
from .models import Post
from .utils import (
AllowFuturePermissionMixin, DateObjectMixin,
PostFormValidMixin)
class PostArchiveMonth(
AllowFuturePermissionMixin,
MonthArchiveView):
model = Post
date_field = 'pub_date'
month_format = '%m'
class PostArchiveYear(
AllowFuturePermissionMixin,
YearArchiveView):
model = Post
date_field = 'pub_date'
make_object_list = True
@require_authenticated_permission(
'blog.add_post')
class PostCreate(PostFormValidMixin, CreateView):
form_class = PostForm
model = Post
@require_authenticated_permission(
'blog.delete_post')
class PostDelete(DateObjectMixin, DeleteView):
date_field = 'pub_date'
model = Post
success_url = reverse_lazy('blog_post_list')
class PostDetail(DateObjectMixin, DetailView):
date_field = 'pub_date'
queryset = (
Post.objects
.select_related('author__profile')
)
class PostList(
AllowFuturePermissionMixin,
ArchiveIndexView):
allow_empty = True
context_object_name = 'post_list'
date_field = 'pub_date'
make_object_list = True
model = Post
paginate_by = 5
template_name = 'blog/post_list.html'
@require_authenticated_permission(
'blog.change_post')
class PostUpdate(
PostFormValidMixin,
DateObjectMixin,
UpdateView):
date_field = 'pub_date'
form_class = PostForm
model = Post
| from django.core.urlresolvers import reverse_lazy
from django.views.generic import (
ArchiveIndexView, CreateView, DeleteView,
DetailView, MonthArchiveView, YearArchiveView)
from core.utils import UpdateView
from user.decorators import \
require_authenticated_permission
from .forms import PostForm
from .models import Post
from .utils import (
AllowFuturePermissionMixin, DateObjectMixin,
PostFormValidMixin)
class PostArchiveMonth(
AllowFuturePermissionMixin,
MonthArchiveView):
model = Post
date_field = 'pub_date'
month_format = '%m'
class PostArchiveYear(
AllowFuturePermissionMixin,
YearArchiveView):
model = Post
date_field = 'pub_date'
make_object_list = True
@require_authenticated_permission(
'blog.add_post')
class PostCreate(PostFormValidMixin, CreateView):
form_class = PostForm
model = Post
@require_authenticated_permission(
'blog.delete_post')
class PostDelete(DateObjectMixin, DeleteView):
date_field = 'pub_date'
model = Post
success_url = reverse_lazy('blog_post_list')
class PostDetail(DateObjectMixin, DetailView):
date_field = 'pub_date'
queryset = (
Post.objects
.select_related('author')
.select_related('author__profile')
)
class PostList(
AllowFuturePermissionMixin,
ArchiveIndexView):
allow_empty = True
context_object_name = 'post_list'
date_field = 'pub_date'
make_object_list = True
model = Post
paginate_by = 5
template_name = 'blog/post_list.html'
@require_authenticated_permission(
'blog.change_post')
class PostUpdate(
PostFormValidMixin,
DateObjectMixin,
UpdateView):
date_field = 'pub_date'
form_class = PostForm
model = Post
| bsd-2-clause | Python |
1eed345f4494313274eb97bcc8fdc60089545f51 | test script | chriz2600/DreamcastHDMI,chriz2600/DreamcastHDMI,chriz2600/DreamcastHDMI,chriz2600/DreamcastHDMI,chriz2600/DreamcastHDMI,chriz2600/DreamcastHDMI,chriz2600/DreamcastHDMI,chriz2600/DreamcastHDMI | assets/test.py | assets/test.py | #!env python3
#for x in range(859, 1400):
# print(x / 858);
# 1,06993006993007
# 1,05893536121673
results = {}
for vert in range(1053, 1900):
for horiz in range(859, 1400):
horiztest = (horiz / 858)
verttest = (vert / 1052)
ref = horiztest * verttest * 54
results[ref] = {
"horiztest": horiztest,
"verttest": verttest,
"ref": ref,
"horiz": horiz,
"vert": vert
}
for res in results:
data = results[res]
print("%g MHz: %f %f (%.60f %.60f)" % (res, data["horiz"], data["vert"], data["horiztest"], data["verttest"]))
# ./assets/test.py | awk '{ print length($1), $0 }' | sort -n | cut -d" " -f2- | head -10
| #!env python3
#for x in range(859, 1400):
# print(x / 858);
# 1,06993006993007
# 1,05893536121673
for vert in range(1053, 1900):
for horiz in range(859, 1400):
horiztest = (horiz / 858)
verttest = (vert / 1052)
ref = horiztest * verttest * 54
if ref == 72.0:
print("%.60f %.60f %f %f %f" % (horiztest, verttest, ref, horiz, vert));
# ./assets/test.py | awk '{ print length, $0 }' | sort -n | cut -d" " -f2- | less | mit | Python |
36472b552a119aa57acb484b9522445b3cd804dd | check for corrupted audio files | long0612/sas-clientLib,long0612/sas-clientLib,long0612/sas-clientLib,long0612/sas-clientLib | python/test/audio_proc_test.py | python/test/audio_proc_test.py | '''
Try processing audio from Illiad in python
Long Le <longle1@illinois.edu>
University of Illinois
'''
print(__doc__)
import numpy as np
from scipy.io import wavfile
from scipy import signal
import matplotlib.pyplot as plt
import sys
sys.path.insert(0,sys.path[0]+'/../src/')
#print(sys.path)
from sasclient import *
from datetime import datetime
servAddr = 'acoustic.ifp.illinois.edu:8080'
DB = 'publicDb'
USER = 'nan'
PWD = 'publicPwd'
DATA = 'data'
EVENT = 'event'
q = {'t1':datetime(2016,7,20,00,00,00),\
't2':datetime(2016,8,3,00,00,00)}
events = IllQuery(servAddr,DB, USER, PWD, EVENT, q);
print("Number of events found is "+str(len(events)))
# bytes
data = IllGridGet(servAddr, DB, USER, PWD, DATA, events[0]['filename'])
if 'RIFF' == data[0:4].decode('utf-8'):
with open('audio.wav', 'wb') as f:
f.write(data)
else:
sys.exit('Corrupted audio file!')
fs,data = wavfile.read('audio.wav')
T = np.arange(len(data))/fs
f,t,Sxx = signal.spectrogram(data,fs,window='hann',nperseg=512,noverlap=256)
fig = plt.figure()
fig.add_subplot(211)
plt.pcolormesh(t,f,Sxx)
plt.ylabel('Frequency (Hz)')
plt.xlabel('Time (sec)')
fig.add_subplot(212)
plt.plot(T,data)
plt.ylabel('Amplitude')
plt.xlabel('Time (sec)')
plt.show()
| '''
Try processing audio from Illiad in python
Long Le <longle1@illinois.edu>
University of Illinois
'''
print(__doc__)
import numpy as np
from scipy.io import wavfile
from scipy import signal
import matplotlib.pyplot as plt
import sys
sys.path.insert(0,sys.path[0]+'/../src/')
#print(sys.path)
from sasclient import *
from datetime import datetime
servAddr = 'acoustic.ifp.illinois.edu:8080'
DB = 'publicDb'
USER = 'nan'
PWD = 'publicPwd'
DATA = 'data'
EVENT = 'event'
q = {'t1':datetime(2016,7,20,00,00,00),\
't2':datetime(2016,8,3,00,00,00)}
events = IllQuery(servAddr,DB, USER, PWD, EVENT, q);
print("Number of events found is "+str(len(events)))
# bytes
data = IllGridGet(servAddr, DB, USER, PWD, DATA, events[0]['filename'])
with open('audio.wav', 'wb') as f:
f.write(data)
fs,data = wavfile.read('audio.wav')
T = np.arange(len(data))/fs
f,t,Sxx = signal.spectrogram(data,fs,window='hann',nperseg=512,noverlap=256)
fig = plt.figure()
fig.add_subplot(211)
plt.pcolormesh(t,f,Sxx)
plt.ylabel('Frequency (Hz)')
plt.xlabel('Time (sec)')
fig.add_subplot(212)
plt.plot(T,data)
plt.ylabel('Amplitude')
plt.xlabel('Time (sec)')
plt.show()
| mit | Python |
7b1d65d17dbbc71cbd80c49fd2ed19636f49680e | Add initial hue parameter | kevinpt/symbolator,SymbiFlow/symbolator | nucanvas/color/sinebow.py | nucanvas/color/sinebow.py | import math
from math import sin, pi
import colorsys
def sinebow(hue):
'''Adapted from http://basecase.org/env/on-rainbows'''
hue = -(hue + 0.5) # Start at red rotating clockwise
rgb = sin(pi * hue), sin(pi * (hue + 1.0/3.0)), sin(pi * (hue + 2.0/3.0))
return tuple(int(255 * c**2) for c in rgb)
def distinct_color_sequence(hue=0.0):
# Hue is normalized from 0-1.0 for one revolution
phi = (1 + 5**0.5) / 2
golden_angle = phi #1.0 / phi**2
#print('# GA:', math.degrees(golden_angle), phi)
while(True):
yield sinebow(hue)
hue += golden_angle
def lighten(rgb, p):
h,l,s = colorsys.rgb_to_hls(*(c / 255.0 for c in rgb))
l = p + l - p*l
return tuple(int(c * 255) for c in colorsys.hls_to_rgb(h,l,s))
if __name__ == '__main__':
import PIL
from PIL import Image, ImageDraw
cs = distinct_color_sequence()
im = Image.new('RGB',(1024,10))
d = ImageDraw.Draw(im)
for i in range(256):
hue = i / 256
#r,g,b = sinebow(hue)
r,g,b = next(cs)
d.line([(i*4,0), (i*4,9)], (r,g,b), width=4)
im.save('sinebow_rand.png')
im = Image.new('RGB',(256,10))
d = ImageDraw.Draw(im)
for i in range(256):
hue = i / 256
r,g,b = sinebow(hue)
d.line([(i,0), (i,9)], (r,g,b))
im.save('sinebow.png')
| import math
from math import sin, pi
import colorsys
def sinebow(hue):
'''Adapted from http://basecase.org/env/on-rainbows'''
hue = -(hue + 0.5) # Start at red rotating clockwise
rgb = sin(pi * hue), sin(pi * (hue + 1.0/3.0)), sin(pi * (hue + 2.0/3.0))
return tuple(int(255 * c**2) for c in rgb)
def distinct_color_sequence():
phi = (1 + 5**0.5) / 2
golden_angle = phi #1.0 / phi**2
#print('# GA:', math.degrees(golden_angle), phi)
hue = 0.0 # Normalized from 0-1.0 for one revolution
while(True):
yield sinebow(hue)
hue += golden_angle
def lighten(rgb, p):
h,l,s = colorsys.rgb_to_hls(*(c / 255.0 for c in rgb))
l = p + l - p*l
return tuple(int(c * 255) for c in colorsys.hls_to_rgb(h,l,s))
if __name__ == '__main__':
import PIL
from PIL import Image, ImageDraw
cs = distinct_color_sequence()
im = Image.new('RGB',(1024,10))
d = ImageDraw.Draw(im)
for i in range(256):
hue = i / 256
#r,g,b = sinebow(hue)
r,g,b = next(cs)
d.line([(i*4,0), (i*4,9)], (r,g,b), width=4)
im.save('sinebow_rand.png')
im = Image.new('RGB',(256,10))
d = ImageDraw.Draw(im)
for i in range(256):
hue = i / 256
r,g,b = sinebow(hue)
d.line([(i,0), (i,9)], (r,g,b))
im.save('sinebow.png')
| mit | Python |
fa5bb37159d09c5bff53b83a4821e3f154892d1d | Fix issue with test discovery and broken CUDA drivers. | sklam/numba,cpcloud/numba,sklam/numba,numba/numba,seibert/numba,jriehl/numba,numba/numba,stuartarchibald/numba,jriehl/numba,IntelLabs/numba,cpcloud/numba,stuartarchibald/numba,gmarkall/numba,IntelLabs/numba,jriehl/numba,seibert/numba,numba/numba,jriehl/numba,gmarkall/numba,numba/numba,sklam/numba,seibert/numba,stuartarchibald/numba,IntelLabs/numba,sklam/numba,IntelLabs/numba,jriehl/numba,stonebig/numba,cpcloud/numba,IntelLabs/numba,stonebig/numba,stonebig/numba,gmarkall/numba,stonebig/numba,seibert/numba,cpcloud/numba,stuartarchibald/numba,gmarkall/numba,sklam/numba,seibert/numba,stonebig/numba,gmarkall/numba,numba/numba,stuartarchibald/numba,cpcloud/numba | numba/cuda/device_init.py | numba/cuda/device_init.py | from __future__ import print_function, absolute_import, division
# Re export
from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads,
shared, local, const, grid, gridsize, atomic,
threadfence_block, threadfence_system,
threadfence)
from .cudadrv.error import CudaSupportError
from .cudadrv import nvvm
from . import initialize
from .errors import KernelRuntimeError
from .decorators import jit, autojit, declare_device
from .api import *
from .api import _auto_device
from .kernels import reduction
reduce = Reduce = reduction.Reduce
def is_available():
"""Returns a boolean to indicate the availability of a CUDA GPU.
This will initialize the driver if it hasn't been initialized.
"""
# whilst `driver.is_available` will init the driver itself,
# the driver initialization may raise and as a result break
# test discovery/orchestration as `cuda.is_available` is often
# used as a guard for whether to run a CUDA test, the try/except
# below is to handle this case.
driver_is_available = False
try:
driver_is_available = driver.driver.is_available
except CudaSupportError:
pass
return driver_is_available and nvvm.is_available()
def cuda_error():
"""Returns None or an exception if the CUDA driver fails to initialize.
"""
return driver.driver.initialization_error
initialize.initialize_all()
| from __future__ import print_function, absolute_import, division
# Re export
from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads,
shared, local, const, grid, gridsize, atomic,
threadfence_block, threadfence_system,
threadfence)
from .cudadrv.error import CudaSupportError
from .cudadrv import nvvm
from . import initialize
from .errors import KernelRuntimeError
from .decorators import jit, autojit, declare_device
from .api import *
from .api import _auto_device
from .kernels import reduction
reduce = Reduce = reduction.Reduce
def is_available():
"""Returns a boolean to indicate the availability of a CUDA GPU.
This will initialize the driver if it hasn't been initialized.
"""
return driver.driver.is_available and nvvm.is_available()
def cuda_error():
"""Returns None or an exception if the CUDA driver fails to initialize.
"""
return driver.driver.initialization_error
initialize.initialize_all()
| bsd-2-clause | Python |
e754c16b0e78fbf084374f470367f1971c826cd3 | Update constants.py | cloudcomputinghust/bioinformatics-dashboard,cloudcomputinghust/bioinformatics-dashboard,cloudcomputinghust/bioinformatics-dashboard | bioinformatics/bioworkflow/constants.py | bioinformatics/bioworkflow/constants.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from django.conf import settings as djangoSettings
DIRNAME = os.path.dirname(__file__)
# JSON_PATH = "/usr/share/openstack-dashboard/openstack_dashboard/dashboards/bioinformatics/gojs_parser/test.json"
JSON_PATH = djangoSettings.STATIC_ROOT + "/outfile.json"
HEAT_TEMPLATE_URL = "https://raw.githubusercontent.com/cloudcomputinghust/bio-informatics/master/kiennt/PhatTrien/bioinformatics/bioworkflow/docker_container.yaml"
PICKLE_PATH = os.path.join(DIRNAME, 'pickle/gojsnode.pickle')
MISTRAL_TEMPLATE_PATH = "/usr/share/openstack-dashboard/openstack_dashboard/dashboards/bioinformatics/gojs_parser/file_name"
USER = 'admin'
KEY = 'vandai123'
TENANT = 'admin'
#AUTHURL = 'http://192.168.100.11:35357/v3'
AUTHURL = 'http://192.168.50.15:5000/v2.0/'
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from django.conf import settings as djangoSettings
DIRNAME = os.path.dirname(__file__)
# JSON_PATH = "/usr/share/openstack-dashboard/openstack_dashboard/dashboards/bioinformatics/gojs_parser/test.json"
JSON_PATH = djangoSettings.STATIC_ROOT + "/outfile.json"
HEAT_TEMPLATE_URL = "https://raw.githubusercontent.com/cloudcomputinghust/bio-informatics/master/kiennt/PhatTrien/bioinformatics/bioworkflow/docker_container.yaml"
PICKLE_PATH = os.path.join(DIRNAME, 'pickle/gojsnode.pickle')
MISTRAL_TEMPLATE_PATH = "/usr/share/openstack-dashboard/openstack_dashboard/dashboards/bioinformatics/gojs_parser/file_name"
USER = 'admin'
KEY = 'vandai123'
TENANT = 'admin'
AUTHURL = 'http://192.168.100.11:35357/v3'
AUTHURL = 'http://192.168.100.11:5000/v2.0/'
| apache-2.0 | Python |
093657bf154f0fcadc209941791017a300f32b90 | duplicate line checking function added | mhabib1981/pySecn00b | url_decode.py | url_decode.py | import sys
from urllib import unquote
import re
from binascii import unhexlify
from hashlib import md5
def main():
if len(sys.argv)!=2:
print "Usage: %s <file>" % sys.argv[0]
sys.exit(0)
else:
inFile=open(sys.argv[1], 'r').readlines()
url_hash_list=[]
for line in inFile:
curr_hash_val=line_hash(line)
process_line=check_hash(curr_hash_val,url_hash_list)
url_hash_list.append(curr_hash_val)
if '%' in line and process_line==True:
print url_decode(line)
if '0x' in line and process_line==True:
hex_val=re.findall(r'0x(.*?),',line)
print re.sub(r'0x(.*?),',hex_decode(hex_val[0]),line)
def check_hash(curr_val,list):
if curr_val not in list:
return True
else:
return False
def line_hash(input):
hash_val=md5(input.encode())
return hash_val.hexdigest()
def url_decode(input):
url=unquote(input)
return url.decode('utf-8', 'ignore')
def hex_decode(input):
if len(input) % 2 ==0:
try:
uhex_val=unhexlify(input)
return uhex_val + ","
except TypeError:
pass
if __name__ == '__main__':
main()
| #!/usr/bin
import sys
from urllib import unquote
import re
from binascii import unhexlify
def main():
if len(sys.argv)!=2:
print "Usage: %s <file>" % sys.argv[0]
sys.exit(0)
else:
inFile=open(sys.argv[1], 'r').readlines()
for line in inFile:
decoded_url=url_decode(line)
if '0x' in decoded_url:
hex_val=re.findall(r'0x(.*?),',decoded_url)
print re.sub(r'0x(.*?),',hex_decode(hex_val[0]),decoded_url)
else:
print decoded_url
def url_decode(input):
url=unquote(input)
return url.decode('utf-8', 'ignore')
def hex_decode(input):
if len(input) % 2 ==0:
try:
uhex_val=unhexlify(input)
return uhex_val + ","
except TypeError:
pass
if __name__ == '__main__':
main()
| cc0-1.0 | Python |
6a144b25ec4e75dc526f821b651f4e426d792d75 | Handle comments on same line as function declaration | ilveroluca/rapi,ilveroluca/rapi,ilveroluca/rapi,ilveroluca/rapi | rapi_bwa/extract_bwa_header.py | rapi_bwa/extract_bwa_header.py | #!/usr/bin/env python
import re
import os
import sys
RequiredPrototypes = {
'kt_for': 'kthread.c',
'mem_align1_core': 'bwamem.c',
'mem_approx_mapq_se': 'bwamem.c',
'mem_mark_primary_se': 'bwamem.c',
'mem_matesw': 'bwamem_pair.c',
'mem_pair': 'bwamem_pair.c'
}
def writeline(txt=''):
sys.stdout.write(txt + '\n')
def extract_prototype(fn_name, filename):
if os.path.getsize(filename) > 1000000:
raise RuntimeError("file %s is larger than 1 MB. Are you sure it's a source file?" % filename)
with open(filename) as f:
text = f.read()
m = re.search(r'([A-Za-z_]\w* %s\([^;{]+)\s*(;|{|//)' % fn_name, text, re.MULTILINE)
if m:
proto = m.group(1)
# remove any trailing comments
comment_pos = proto.find('//')
if comment_pos >= 0:
proto = proto[0:comment_pos]
# strip whitespace
proto = proto.strip()
return proto
else:
raise RuntimeError("Couldn't extract prototype for function %s from file %s" % \
(fn_name, filename))
def extract_bwa_version(bwa_src_path):
main_file = os.path.join(bwa_src_path, 'main.c')
with open(main_file) as f:
for line in f:
m = re.match(r'#define PACKAGE_VERSION (.+)', line)
if m:
bwa_ver = m.group(1)
return bwa_ver
raise RuntimeError("Couldn't find PACKAGE_VERSION define in main.c")
def main(args=None):
if args is None:
args = sys.argv[1:]
if args and len(args) != 1:
sys.exit("Usage: %s [BWA_PATH]" % os.path.basename(sys.argv[0]))
bwa_path = args[0] if args else '.'
writeline('#ifndef RAPI_BWA_HEADER')
writeline('#define RAPI_BWA_HEADER')
writeline()
bwa_ver = extract_bwa_version(bwa_path)
writeline("#define WRAPPED_BWA_VERSION %s" % bwa_ver)
writeline()
for fn_name, file_name in RequiredPrototypes.iteritems():
proto = extract_prototype(fn_name, os.path.join(bwa_path, file_name))
writeline("extern %s;" % proto)
writeline()
writeline('#endif')
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import re
import os
import sys
RequiredPrototypes = {
'kt_for': 'kthread.c',
'mem_align1_core': 'bwamem.c',
'mem_approx_mapq_se': 'bwamem.c',
'mem_mark_primary_se': 'bwamem.c',
'mem_matesw': 'bwamem_pair.c',
'mem_pair': 'bwamem_pair.c'
}
def writeline(txt=''):
sys.stdout.write(txt + '\n')
def extract_prototype(fn_name, filename):
if os.path.getsize(filename) > 1000000:
raise RuntimeError("file %s is larger than 1 MB. Are you sure it's a source file?" % filename)
with open(filename) as f:
text = f.read()
m = re.search(r'([A-Za-z_]\w* %s\([^;{]+)\s*(;|{)' % fn_name, text, re.MULTILINE)
if m:
proto = m.group(1).strip()
return proto
else:
raise RuntimeError("Couldn't extract prototype for function %s from file %s" % \
(fn_name, filename))
def extract_bwa_version(bwa_src_path):
main_file = os.path.join(bwa_src_path, 'main.c')
with open(main_file) as f:
for line in f:
m = re.match(r'#define PACKAGE_VERSION (.+)', line)
if m:
bwa_ver = m.group(1)
return bwa_ver
raise RuntimeError("Couldn't find PACKAGE_VERSION define in main.c")
def main(args=None):
if args is None:
args = sys.argv[1:]
if args and len(args) != 1:
sys.exit("Usage: %s [BWA_PATH]" % os.path.basename(sys.argv[0]))
bwa_path = args[0] if args else '.'
writeline('#ifndef RAPI_BWA_HEADER')
writeline('#define RAPI_BWA_HEADER')
writeline()
bwa_ver = extract_bwa_version(bwa_path)
writeline("#define WRAPPED_BWA_VERSION %s" % bwa_ver)
writeline()
for fn_name, file_name in RequiredPrototypes.iteritems():
proto = extract_prototype(fn_name, os.path.join(bwa_path, file_name))
writeline("extern %s;" % proto)
writeline()
writeline('#endif')
if __name__ == '__main__':
main()
| mit | Python |
4ad67a81e2b8620649c27628473542c2d12ba02b | change those timeformats | ebu/ebu-tt-live-toolkit,bbc/ebu-tt-live-toolkit,bbc/ebu-tt-live-toolkit,bbc/ebu-tt-live-toolkit,ebu/ebu-tt-live-toolkit,ebu/ebu-tt-live-toolkit | ebu_tt_live/scripts/ebu_dummy_encoder.py | ebu_tt_live/scripts/ebu_dummy_encoder.py | import logging
from .common import create_loggers
from ebu_tt_live import bindings
from ebu_tt_live.bindings import _ebuttm as metadata
from pyxb import BIND
from datetime import timedelta
log = logging.getLogger('ebu_dummy_encoder')
def main():
create_loggers()
log.info('Dummy XML Encoder')
tt = bindings.tt(
sequenceIdentifier='testSequence001',
sequenceNumber='1',
timeBase='clock',
clockMode='local',
lang='en-GB',
head=bindings.head_type(
metadata.headMetadata_type(
metadata.documentMetadata()
),
bindings.styling(
bindings.style(
id='ID001'
)
),
bindings.layout()
),
body=BIND(
bindings.div_type(
bindings.p_type(
bindings.span_type(
'Some example text...'
),
bindings.br_type(),
bindings.span_type(
'And another line'
),
id='ID005',
begin=timedelta(seconds=.5),
end=timedelta(seconds=3.42),
)
),
begin=timedelta(seconds=.5),
dur=timedelta(seconds=5)
)
)
print(
tt.toxml()
)
log.info('XML output printed')
| import logging
from .common import create_loggers
from ebu_tt_live import bindings
from ebu_tt_live.bindings import _ebuttm as metadata
from pyxb import BIND
from datetime import timedelta
log = logging.getLogger('ebu_dummy_encoder')
def main():
create_loggers()
log.info('Dummy XML Encoder')
tt = bindings.tt(
sequenceIdentifier='testSequence001',
sequenceNumber='1',
timeBase='clock',
clockMode='local',
lang='en-GB',
head=bindings.head_type(
metadata.headMetadata_type(
metadata.documentMetadata()
),
bindings.styling(
bindings.style(
id='ID001'
)
),
bindings.layout()
),
body=BIND(
bindings.div_type(
bindings.p_type(
bindings.span_type(
'Some example text...'
),
bindings.br_type(),
bindings.span_type(
'And another line'
),
id='ID005',
begin='00:00:00.50',
end='00:00:03.24',
)
),
begin='00:00:00.50',
dur=timedelta(seconds=5)
)
)
print(
tt.toxml()
)
log.info('XML output printed')
| bsd-3-clause | Python |
54a24423c07e1d6e2c0a6e8c7c7586b8655aa6e7 | Fix `date` filter for models with `time` field | cdubz/babybuddy,cdubz/babybuddy,cdubz/babybuddy | api/filters.py | api/filters.py | # -*- coding: utf-8 -*-
from django_filters import rest_framework as filters
from core import models
class TimeFieldFilter(filters.FilterSet):
date = filters.DateFilter(field_name='time__date', label='Date')
date_min = filters.DateFilter(field_name='time__date', label='Min. Date',
lookup_expr='gte')
date_max = filters.DateFilter(field_name='time__date', label='Max. Date',
lookup_expr='lte')
class StartEndFieldFilter(filters.FilterSet):
end = filters.DateFilter(field_name='end__date', label='End Date')
end_min = filters.DateFilter(field_name='end__date', label='Min. End Date',
lookup_expr='gte')
end_max = filters.DateFilter(field_name='end__date', label='Max. End Date',
lookup_expr='lte')
start = filters.DateFilter(field_name='start__date', label='Start Date')
start_min = filters.DateFilter(field_name='start__date', lookup_expr='gte',
label='Min. Start Date',)
start_end = filters.DateFilter(field_name='start__date', lookup_expr='lte',
label='Max. End Date')
class DiaperChangeFilter(TimeFieldFilter):
class Meta:
model = models.DiaperChange
fields = ['child', 'wet', 'solid', 'color', 'amount']
class FeedingFilter(StartEndFieldFilter):
class Meta:
model = models.Feeding
fields = ['child', 'type', 'method']
class NoteFilter(TimeFieldFilter):
class Meta:
model = models.Note
fields = ['child']
class SleepFilter(StartEndFieldFilter):
class Meta:
model = models.Sleep
fields = ['child']
class TemperatureFilter(TimeFieldFilter):
class Meta:
model = models.Temperature
fields = ['child']
class TimerFilter(StartEndFieldFilter):
class Meta:
model = models.Timer
fields = ['child', 'active', 'user']
class TummyTimeFilter(StartEndFieldFilter):
class Meta:
model = models.TummyTime
fields = ['child']
| # -*- coding: utf-8 -*-
from django_filters import rest_framework as filters
from core import models
class TimeFieldFilter(filters.FilterSet):
date = filters.DateFilter(field_name='date', label='Date')
date_min = filters.DateFilter(field_name='time__date', label='Min. Date',
lookup_expr='gte')
date_max = filters.DateFilter(field_name='time__date', label='Max. Date',
lookup_expr='lte')
class StartEndFieldFilter(filters.FilterSet):
end = filters.DateFilter(field_name='end__date', label='End Date')
end_min = filters.DateFilter(field_name='end__date', label='Min. End Date',
lookup_expr='gte')
end_max = filters.DateFilter(field_name='end__date', label='Max. End Date',
lookup_expr='lte')
start = filters.DateFilter(field_name='start__date', label='Start Date')
start_min = filters.DateFilter(field_name='start__date', lookup_expr='gte',
label='Min. Start Date',)
start_end = filters.DateFilter(field_name='start__date', lookup_expr='lte',
label='Max. End Date')
class DiaperChangeFilter(TimeFieldFilter):
class Meta:
model = models.DiaperChange
fields = ['child', 'wet', 'solid', 'color', 'amount']
class FeedingFilter(StartEndFieldFilter):
class Meta:
model = models.Feeding
fields = ['child', 'type', 'method']
class NoteFilter(TimeFieldFilter):
class Meta:
model = models.Note
fields = ['child']
class SleepFilter(StartEndFieldFilter):
class Meta:
model = models.Sleep
fields = ['child']
class TemperatureFilter(TimeFieldFilter):
class Meta:
model = models.Temperature
fields = ['child']
class TimerFilter(StartEndFieldFilter):
class Meta:
model = models.Timer
fields = ['child', 'active', 'user']
class TummyTimeFilter(StartEndFieldFilter):
class Meta:
model = models.TummyTime
fields = ['child']
| bsd-2-clause | Python |
adbb54f0c935ef04e4e2e00cf147e3729d8c1761 | Add version number | iamahuman/angr,schieb/angr,angr/angr,iamahuman/angr,angr/angr,schieb/angr,angr/angr,iamahuman/angr,schieb/angr | angr/__init__.py | angr/__init__.py | # pylint: disable=wildcard-import
__version__ = (8, 19, 2, 4)
if bytes is str:
raise Exception("""
=-=-=-=-=-=-=-=-=-=-=-=-= WELCOME TO THE FUTURE! =-=-=-=-=-=-=-=-=-=-=-=-=-=
angr has transitioned to python 3. Due to the small size of the team behind it,
we can't reasonably maintain compatibility between both python 2 and python 3.
If you want to continue using the most recent version of angr (you definitely
want that, trust us) you should upgrade to python 3. It's like getting your
vaccinations. It hurts a little bit initially but in the end it's worth it.
For more information, see here: https://docs.angr.io/MIGRATION.html
Good luck!
""")
# first: let's set up some bootstrap logging
import logging
logging.getLogger("angr").addHandler(logging.NullHandler())
from .misc.loggers import Loggers
loggers = Loggers()
del Loggers
del logging
# this must happen first, prior to initializing analyses
from .sim_procedure import SimProcedure
from .procedures import SIM_PROCEDURES, SimProcedures, SIM_LIBRARIES
from . import sim_options
options = sim_options # alias
# enums
from .state_plugins.inspect import BP_BEFORE, BP_AFTER, BP_BOTH, BP_IPDB, BP_IPYTHON
# other stuff
from .state_plugins.inspect import BP
from .state_plugins import SimStatePlugin
from .project import *
from .errors import *
from .blade import Blade
from .simos import SimOS
from .block import Block
from .sim_manager import SimulationManager
from .analyses import Analysis, register_analysis
from . import analyses
from . import knowledge_plugins
from . import exploration_techniques
from .exploration_techniques import ExplorationTechnique
from . import type_backend
from . import sim_type as types
from .state_hierarchy import StateHierarchy
from .sim_state import SimState
from .engines import SimEngineVEX, SimEngine
from .calling_conventions import DEFAULT_CC, SYSCALL_CC, PointerWrapper, SimCC
from .storage.file import SimFileBase, SimFile, SimPackets, SimFileStream, SimPacketsStream, SimFileDescriptor, SimFileDescriptorDuplex
from .state_plugins.filesystem import SimMount, SimHostFilesystem
from .state_plugins.heap import SimHeapBrk, SimHeapPTMalloc, PTChunk
# for compatibility reasons
from . import sim_manager as manager
# now that we have everything loaded, re-grab the list of loggers
loggers.load_all_loggers()
| # pylint: disable=wildcard-import
if bytes is str:
raise Exception("""
=-=-=-=-=-=-=-=-=-=-=-=-= WELCOME TO THE FUTURE! =-=-=-=-=-=-=-=-=-=-=-=-=-=
angr has transitioned to python 3. Due to the small size of the team behind it,
we can't reasonably maintain compatibility between both python 2 and python 3.
If you want to continue using the most recent version of angr (you definitely
want that, trust us) you should upgrade to python 3. It's like getting your
vaccinations. It hurts a little bit initially but in the end it's worth it.
For more information, see here: https://docs.angr.io/MIGRATION.html
Good luck!
""")
# first: let's set up some bootstrap logging
import logging
logging.getLogger("angr").addHandler(logging.NullHandler())
from .misc.loggers import Loggers
loggers = Loggers()
del Loggers
del logging
# this must happen first, prior to initializing analyses
from .sim_procedure import SimProcedure
from .procedures import SIM_PROCEDURES, SimProcedures, SIM_LIBRARIES
from . import sim_options
options = sim_options # alias
# enums
from .state_plugins.inspect import BP_BEFORE, BP_AFTER, BP_BOTH, BP_IPDB, BP_IPYTHON
# other stuff
from .state_plugins.inspect import BP
from .state_plugins import SimStatePlugin
from .project import *
from .errors import *
from .blade import Blade
from .simos import SimOS
from .block import Block
from .sim_manager import SimulationManager
from .analyses import Analysis, register_analysis
from . import analyses
from . import knowledge_plugins
from . import exploration_techniques
from .exploration_techniques import ExplorationTechnique
from . import type_backend
from . import sim_type as types
from .state_hierarchy import StateHierarchy
from .sim_state import SimState
from .engines import SimEngineVEX, SimEngine
from .calling_conventions import DEFAULT_CC, SYSCALL_CC, PointerWrapper, SimCC
from .storage.file import SimFileBase, SimFile, SimPackets, SimFileStream, SimPacketsStream, SimFileDescriptor, SimFileDescriptorDuplex
from .state_plugins.filesystem import SimMount, SimHostFilesystem
from .state_plugins.heap import SimHeapBrk, SimHeapPTMalloc, PTChunk
# for compatibility reasons
from . import sim_manager as manager
# now that we have everything loaded, re-grab the list of loggers
loggers.load_all_loggers()
| bsd-2-clause | Python |
364b9c71442b141edaafae9acacad497f091f7db | Fix name duplication in `app_config` Bazel rule | project-oak/oak,project-oak/oak,project-oak/oak,project-oak/oak,project-oak/oak,project-oak/oak,project-oak/oak | oak/common/app_config.bzl | oak/common/app_config.bzl | #
# Copyright 2020 The Project Oak Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Rule for serializing an Oak application configuration."""
def serialized_config(name, textproto, modules):
"""Serializes an Oak application configuration in a binary file.
Implicit output targets:
name.bin: A binary file with a serialized application configuration.
Args:
name: Name of the generated binary file (the output file will have a `.bin` extension).
textproto: Textproto file with application configuration.
modules: A dictionary with module names as keys and module paths as values.
"""
srcs = [textproto] + modules.values()
module_list = ",".join(["{}:$(location {})".format(name, path) for (name, path) in modules.items()])
cmd = "$(location //oak/common:app_config_serializer)" + \
" --textproto=$(location {})".format(textproto) + \
" --modules={}".format(module_list) + \
" --output_file=$@"
native.genrule(
name = name,
srcs = srcs,
# Name of the rule cannot be the same as the output file.
outs = ["{}.bin".format(name)],
cmd = cmd,
tools = ["//oak/common:app_config_serializer"],
)
| #
# Copyright 2020 The Project Oak Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Rule for serializing an Oak application configuration."""
def serialized_config(name, textproto, modules):
"""Serializes an Oak application configuration in a binary file.
Args:
name: Name of the generated binary file.
textproto: Textproto file with application configuration.
modules: A dictionary with module names as keys and module paths as values.
"""
srcs = [textproto] + modules.values()
module_list = ",".join(["{}:$(location {})".format(name, path) for (name, path) in modules.items()])
cmd = "$(location //oak/common:app_config_serializer)" + \
" --textproto=$(location {})".format(textproto) + \
" --modules={}".format(module_list) + \
" --output_file=$@"
native.genrule(
name = name,
srcs = srcs,
outs = [name],
cmd = cmd,
tools = ["//oak/common:app_config_serializer"],
)
| apache-2.0 | Python |
7204f6d4e957c3f15068e0bbf72f822717193b39 | Add serializer param to RPC service | NaohiroTamura/ironic-lib,faizan-barmawer/ironic-lib,citrix-openstack-build/ironic-lib,faizan-barmawer/elytics,openstack/ironic-lib | ironic/openstack/common/rpc/service.py | ironic/openstack/common/rpc/service.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2011 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironic.openstack.common.gettextutils import _
from ironic.openstack.common import log as logging
from ironic.openstack.common import rpc
from ironic.openstack.common.rpc import dispatcher as rpc_dispatcher
from ironic.openstack.common import service
LOG = logging.getLogger(__name__)
class Service(service.Service):
"""Service object for binaries running on hosts.
A service enables rpc by listening to queues based on topic and host."""
def __init__(self, host, topic, manager=None, serializer=None):
super(Service, self).__init__()
self.host = host
self.topic = topic
self.serializer = serializer
if manager is None:
self.manager = self
else:
self.manager = manager
def start(self):
super(Service, self).start()
self.conn = rpc.create_connection(new=True)
LOG.debug(_("Creating Consumer connection for Service %s") %
self.topic)
dispatcher = rpc_dispatcher.RpcDispatcher([self.manager],
self.serializer)
# Share this same connection for these Consumers
self.conn.create_consumer(self.topic, dispatcher, fanout=False)
node_topic = '%s.%s' % (self.topic, self.host)
self.conn.create_consumer(node_topic, dispatcher, fanout=False)
self.conn.create_consumer(self.topic, dispatcher, fanout=True)
# Hook to allow the manager to do other initializations after
# the rpc connection is created.
if callable(getattr(self.manager, 'initialize_service_hook', None)):
self.manager.initialize_service_hook(self)
# Consume from all consumers in a thread
self.conn.consume_in_thread()
def stop(self):
# Try to shut the connection down, but if we get any sort of
# errors, go ahead and ignore them.. as we're shutting down anyway
try:
self.conn.close()
except Exception:
pass
super(Service, self).stop()
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2011 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironic.openstack.common.gettextutils import _
from ironic.openstack.common import log as logging
from ironic.openstack.common import rpc
from ironic.openstack.common.rpc import dispatcher as rpc_dispatcher
from ironic.openstack.common import service
LOG = logging.getLogger(__name__)
class Service(service.Service):
"""Service object for binaries running on hosts.
A service enables rpc by listening to queues based on topic and host."""
def __init__(self, host, topic, manager=None):
super(Service, self).__init__()
self.host = host
self.topic = topic
if manager is None:
self.manager = self
else:
self.manager = manager
def start(self):
super(Service, self).start()
self.conn = rpc.create_connection(new=True)
LOG.debug(_("Creating Consumer connection for Service %s") %
self.topic)
dispatcher = rpc_dispatcher.RpcDispatcher([self.manager])
# Share this same connection for these Consumers
self.conn.create_consumer(self.topic, dispatcher, fanout=False)
node_topic = '%s.%s' % (self.topic, self.host)
self.conn.create_consumer(node_topic, dispatcher, fanout=False)
self.conn.create_consumer(self.topic, dispatcher, fanout=True)
# Hook to allow the manager to do other initializations after
# the rpc connection is created.
if callable(getattr(self.manager, 'initialize_service_hook', None)):
self.manager.initialize_service_hook(self)
# Consume from all consumers in a thread
self.conn.consume_in_thread()
def stop(self):
# Try to shut the connection down, but if we get any sort of
# errors, go ahead and ignore them.. as we're shutting down anyway
try:
self.conn.close()
except Exception:
pass
super(Service, self).stop()
| apache-2.0 | Python |
bf73e73c93c323a6b7395b3a1d40dd55dea4b65a | Update init file with descriptions | bgyori/indra,johnbachman/indra,johnbachman/indra,sorgerlab/indra,sorgerlab/belpy,sorgerlab/belpy,sorgerlab/belpy,bgyori/indra,johnbachman/indra,sorgerlab/indra,bgyori/indra,sorgerlab/indra | indra/sources/dgi/__init__.py | indra/sources/dgi/__init__.py | # -*- coding: utf-8 -*-
"""A processor for the `Drug Gene Interaction Database (DGI-DB) <http://www.dgidb.org>`_.
* `Integration of the Drug–Gene Interaction Database (DGIdb 4.0) with open crowdsource efforts
<https://doi.org/10.1093/nar/gkaa1084>`_. Freshour, *et al*. Nucleic Acids Research. 2020 Nov 25.
Interactions data from the January 2021 release can be obtained at the
following URLs:
* https://www.dgidb.org/data/monthly_tsvs/2021-Jan/interactions.tsv
"""
from .api import get_version_df
from .processor import DGIProcessor
| bsd-2-clause | Python | |
571e4d827aee1445ed40467f7c394a29f1b730db | Remove request argument | FedorSelitsky/eventrack,FedorSelitsky/eventrack,FedorSelitsky/eventrack,FedorSelitsky/eventrack | user/views.py | user/views.py | from django.http import HttpResponseRedirect
from django.views import generic
from django.shortcuts import render, reverse
from django.contrib.auth import login, logout
from .forms import SignupForm, SigninForm, UpdateForm
from .models import User
class ProfileView(generic.ListView):
model = User
template_name = 'user/profile.html'
def signup(request):
if request.user.is_authenticated():
return HttpResponseRedirect(reverse('event:index'))
if request.method == 'POST':
form = SignupForm(request.POST)
if form.is_valid():
form.save()
user = form.signin()
if user is not None:
login(request, user)
return HttpResponseRedirect(reverse('event:index'))
else:
return render(request, 'user/signup.html', {'form': form, 'error': True})
else:
form = SignupForm()
return render(request, 'user/signup.html', {'form': form})
def signin(request):
if request.user.is_authenticated():
return HttpResponseRedirect(reverse('event:index'))
if request.method == 'POST':
form = SigninForm(data=request.POST)
if form.is_valid():
user = form.signin()
if user is not None:
login(request, user)
return HttpResponseRedirect(reverse('event:index'))
else:
return render(request, 'user/signin.html', {'form': form, 'error': True})
else:
form = SigninForm()
return render(request, 'user/signin.html', {'form': form})
def signout(request):
logout(request)
return HttpResponseRedirect(reverse('event:index'))
def update(request):
if request.method == 'POST':
form = UpdateForm(data=request.POST, instance=request.user)
if form.is_valid():
form.save()
else:
return render(request, 'user/profile.html', {'form': form, 'error': True})
else:
form = UpdateForm(instance=request.user)
return render(request, 'user/profile.html', {'form': form})
| from django.http import HttpResponseRedirect
from django.views import generic
from django.shortcuts import render, reverse
from django.contrib.auth import login, logout
from .forms import SignupForm, SigninForm, UpdateForm
from .models import User
class ProfileView(generic.ListView):
model = User
template_name = 'user/profile.html'
def signup(request):
if request.user.is_authenticated():
return HttpResponseRedirect(reverse('event:index'))
if request.method == 'POST':
form = SignupForm(request.POST)
if form.is_valid():
form.save()
user = form.signin(request)
if user is not None:
login(request, user)
return HttpResponseRedirect(reverse('event:index'))
else:
return render(request, 'user/signup.html', {'form': form, 'error': True})
else:
form = SignupForm()
return render(request, 'user/signup.html', {'form': form})
def signin(request):
if request.user.is_authenticated():
return HttpResponseRedirect(reverse('event:index'))
if request.method == 'POST':
form = SigninForm(data=request.POST)
if form.is_valid():
user = form.signin(request)
if user is not None:
login(request, user)
return HttpResponseRedirect(reverse('event:index'))
else:
return render(request, 'user/signin.html', {'form': form, 'error': True})
else:
form = SigninForm()
return render(request, 'user/signin.html', {'form': form})
def signout(request):
logout(request)
return HttpResponseRedirect(reverse('event:index'))
def update(request):
if request.method == 'POST':
form = UpdateForm(data=request.POST, instance=request.user)
if form.is_valid():
form.save()
else:
return render(request, 'user/profile.html', {'form': form, 'error': True})
else:
form = UpdateForm(instance=request.user)
return render(request, 'user/profile.html', {'form': form})
| mit | Python |
fecdc92a9be73465e268077eccd00ea75622bd72 | fix closed acc bug | sudoguy/instabot,rasperepodvipodvert/instabot,misisnik/testinsta,instagrambot/instabot,vkgrd/instabot,Diapostrofo/instabot,ohld/instabot,misisnik/testinsta,instagrambot/instapro,instagrambot/instabot,AlexBGoode/instabot | instabot/bot/bot_like_feed.py | instabot/bot/bot_like_feed.py | import time
import random
def like_timeline(bot, amount=None):
""" Likes last 8 medias from timeline feed """
print ("Liking timeline feed:")
if amount is not None and amount > 8:
amount = 8
print (" Can't request more than 8 medias from timeline... yet")
if not bot.getTimelineFeed():
print (" Error while getting timeline feed")
return False
not_liked_feed = filter_not_liked(bot.LastJson["items"][:amount])
return bot.like_medias(not_liked_feed)
def like_user_id(bot, user_id, amount=None):
""" Likes last username's medias """
print ("Liking user_%s's feed:" % user_id)
if isinstance(user_id, int):
user_id = str(user_id)
if not user_id.isdigit():
print ("You should pass user_id, not user's login.")
if amount is not None and amount > 16:
amount = 16
print (" Can't request more that 16 medias from user's feed... yet")
bot.getUserFeed(user_id)
if bot.LastJson["status"] == 'fail':
print (" This is a closed account")
return False
not_liked_feed = filter_not_liked(bot.LastJson["items"][:amount])
return bot.like_medias(not_liked_feed)
def filter_not_liked(media_items):
not_liked_medias = [item["pk"] for item in media_items if not item["has_liked"]]
print (" Recieved: %d. Already liked: %d." % (
len(media_items),
len(media_items) - len(not_liked_medias)
)
)
return not_liked_medias
| import time
import random
def like_timeline(bot, amount=None):
""" Likes last 8 medias from timeline feed """
print ("Liking timeline feed:")
if amount is not None and amount > 8:
amount = 8
print (" Can't request more than 8 medias from timeline... yet")
if not bot.getTimelineFeed():
print (" Error while getting timeline feed")
return False
not_liked_feed = filter_not_liked(bot.LastJson["items"][:amount])
return bot.like_medias(not_liked_feed)
def like_user_id(bot, user_id, amount=None):
""" Likes last username's medias """
print ("Liking user_%s's feed:" % user_id)
if isinstance(user_id, int):
user_id = str(user_id)
if not user_id.isdigit():
print ("You should pass user_id, not user's login.")
if amount is not None and amount > 16:
amount = 16
print (" Can't request more that 16 medias from user's feed... yet")
bot.getUserFeed(user_id)
not_liked_feed = filter_not_liked(bot.LastJson["items"][:amount])
return bot.like_medias(not_liked_feed)
def filter_not_liked(media_items):
not_liked_medias = [item["pk"] for item in media_items if not item["has_liked"]]
print (" Recieved: %d. Already liked: %d." % (
len(media_items),
len(media_items) - len(not_liked_medias)
)
)
return not_liked_medias
| apache-2.0 | Python |
c1b4b2adc84dad470dc90f0aa898b41ae6fe7162 | remove tests for infer_repo as it no longer exists | ceph/ice-setup | ice_setup/tests/test_system.py | ice_setup/tests/test_system.py | import os
import tempfile
from pytest import raises
import pytest
from textwrap import dedent
from ice_setup.ice import get_fqdn, ICEError, DirNotFound
class FakeSocket(object):
pass
@pytest.fixture
def cephdeploy_conf():
path = tempfile.mkstemp()
def fin():
os.remove(path)
return path[-1]
class TestGetFQDN(object):
def setup(self):
self.sock = FakeSocket()
def test_dot_local_fqdn(self):
self.sock.getfqdn = lambda: 'alfredo.local'
assert get_fqdn(_socket=self.sock) is None
def test_localhost(self):
self.sock.getfqdn = lambda: 'localhost'
assert get_fqdn(_socket=self.sock) is None
def test_valid_fqdn(self):
self.sock.getfqdn = lambda: 'zombo.com'
assert get_fqdn(_socket=self.sock) == 'zombo.com'
| import os
import tempfile
from pytest import raises
import pytest
from textwrap import dedent
from ice_setup.ice import get_fqdn, infer_ceph_repo, ICEError, DirNotFound
class FakeSocket(object):
pass
@pytest.fixture
def cephdeploy_conf():
path = tempfile.mkstemp()
def fin():
os.remove(path)
return path[-1]
class TestGetFQDN(object):
def setup(self):
self.sock = FakeSocket()
def test_dot_local_fqdn(self):
self.sock.getfqdn = lambda: 'alfredo.local'
assert get_fqdn(_socket=self.sock) is None
def test_localhost(self):
self.sock.getfqdn = lambda: 'localhost'
assert get_fqdn(_socket=self.sock) is None
def test_valid_fqdn(self):
self.sock.getfqdn = lambda: 'zombo.com'
assert get_fqdn(_socket=self.sock) == 'zombo.com'
class TestInferCephRepo(object):
def test_does_not_find_cephdeployconf(self):
with raises(DirNotFound):
infer_ceph_repo(_configs=[''])
def test_does_not_find_a_ceph_repo_section(self, cephdeploy_conf):
with raises(ICEError):
infer_ceph_repo(_configs=[cephdeploy_conf])
def test_does_find_a_ceph_repo_section(self, cephdeploy_conf):
print cephdeploy_conf
with open(cephdeploy_conf, 'w') as f:
f.write(dedent("""
[ceph]
baseurl=http://fqdn/static/ceph/0.80
"""))
result = infer_ceph_repo(_configs=[cephdeploy_conf])
assert result == '/opt/calamari/webapp/content/ceph/0.80'
def test_deals_with_non_trailing_slashes(self, cephdeploy_conf):
print cephdeploy_conf
with open(cephdeploy_conf, 'w') as f:
f.write(dedent("""
[ceph]
baseurl=http://fqdn/static/ceph/0.80/
"""))
result = infer_ceph_repo(_configs=[cephdeploy_conf])
assert result == '/opt/calamari/webapp/content/ceph/0.80'
| mit | Python |
5db0dfc3ada74f338a598d7e82405ba47556d79d | Update models.py | flysmoke/ijizhang,flysmoke/ijizhang,flysmoke/ijizhang,flysmoke/ijizhang | ijizhang_prj/jizhang/models.py | ijizhang_prj/jizhang/models.py | #coding=utf-8
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
# Create your models here.
class Category(models.Model):
INCOME_CHOICES = (
(True, _(u'收入')),
(False, _(u'支出') ),
)
p_category = models.ForeignKey('self', null = True, blank = True, verbose_name=_(u"父类名称"), related_name='child')
name = models.CharField(max_length=20, verbose_name=_(u"类别名称"))
isIncome = models.BooleanField(choices=INCOME_CHOICES, verbose_name='是否收入')
user = models.ForeignKey(User,verbose_name='所属用户')
def __unicode__(self):
return self.name
def get_absolute_url(self):
return '%s' % (reverse('jizhang:index_category_item', args=[self.id]))
class Item(models.Model):
price = models.DecimalField(max_digits=10, decimal_places=2, verbose_name='金额')
comment = models.CharField(max_length=200, blank = True, verbose_name='注释')
pub_date = models.DateField(verbose_name='日期')
category = models.ForeignKey(Category,verbose_name='分类')
def __unicode__(self):
return str(self.price)
| #coding=utf-8
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
# Create your models here.
class Category(models.Model):
INCOME_CHOICES = (
(True, _(u'收入')),
(False, _(u'支出') ),
)
p_category = models.ForeignKey('self', null = True, blank = True, verbose_name=_(u"父类名称"))
name = models.CharField(max_length=20, verbose_name=_(u"类别名称"))
isIncome = models.BooleanField(choices=INCOME_CHOICES, verbose_name='是否收入')
user = models.ForeignKey(User,verbose_name='所属用户')
def __unicode__(self):
return self.name
def get_absolute_url(self):
return '%s' % (reverse('jizhang:index_category_item', args=[self.id]))
class Item(models.Model):
price = models.DecimalField(max_digits=10, decimal_places=2, verbose_name='金额')
comment = models.CharField(max_length=200, blank = True, verbose_name='注释')
pub_date = models.DateField(verbose_name='日期')
category = models.ForeignKey(Category,verbose_name='分类')
def __unicode__(self):
return str(self.price)
| mit | Python |
64a78085fffe8dc525596b870c8e150d9171f271 | Fix issue where Pulsar would enter a restart loop when cancelling a buffering | likeitneverwentaway/plugin.video.quasar,komakino/plugin.video.pulsar,johnnyslt/plugin.video.quasar,elrosti/plugin.video.pulsar,Zopieux/plugin.video.pulsar,pmphxs/plugin.video.pulsar,johnnyslt/plugin.video.quasar,steeve/plugin.video.pulsar,peer23peer/plugin.video.quasar,peer23peer/plugin.video.quasar,likeitneverwentaway/plugin.video.quasar | resources/site-packages/pulsar/monitor.py | resources/site-packages/pulsar/monitor.py | import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
# Only when closing Kodi
if xbmc.abortRequested:
self._closing.set()
self._closing.clear()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
| import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
self._closing.set()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
| bsd-3-clause | Python |
822571366271b5dca0ac8bf41df988c6a3b61432 | Bump version. | concordusapps/alchemist | alchemist/_version.py | alchemist/_version.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import, division
__version_info__ = (0, 3, 12)
__version__ = '.'.join(map(str, __version_info__))
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import, division
__version_info__ = (0, 3, 11)
__version__ = '.'.join(map(str, __version_info__))
| mit | Python |
d91b32f9f14787d45e271561094a5372990adea4 | Fix Meta/__meta__ resolution over more than two levels of inheritance | biosustain/venom | venom/util.py | venom/util.py | from typing import Dict, Any, Tuple
# FIXME should be Generic
class AttributeDict(dict):
__getattr__ = dict.__getitem__
__setattr__ = dict.__setitem__
def _meta_obj_to_dict(meta_obj):
dct = {}
for k, v in meta_obj.__dict__.items():
if not k.startswith('__'):
dct[k] = v
return dct
# FIXME should be AttributeDict[str, Any]
MetaDict = AttributeDict
def meta(bases, members, meta_name='Meta') -> Tuple[MetaDict, MetaDict]:
meta_ = AttributeDict()
for base in bases:
if hasattr(base, '__meta__') and base.__meta__ is not None:
meta_.update(base.__meta__)
elif hasattr(base, meta_name):
meta_.update(_meta_obj_to_dict(getattr(base, meta_name)))
changes = {}
if meta_name in members:
changes = _meta_obj_to_dict(members[meta_name])
meta_.update(changes)
return meta_, changes
def upper_camelcase(s: str) -> str:
return s.title().replace('_', '')
class cached_property(object):
"""
Descriptor (non-data) for building an attribute on-demand on first use.
"""
def __init__(self, factory):
"""
<factory> is called such: factory(instance) to build the attribute.
"""
self._attr_name = factory.__name__
self._factory = factory
def __get__(self, instance, owner):
# Build the attribute.
attr = self._factory(instance)
# Cache the value; hide ourselves.
setattr(instance, self._attr_name, attr)
return attr
| from typing import Dict, Any, Tuple
# FIXME should be Generic
class AttributeDict(dict):
__getattr__ = dict.__getitem__
__setattr__ = dict.__setitem__
def _meta_obj_to_dict(meta_obj):
dct = {}
for k, v in meta_obj.__dict__.items():
if not k.startswith('__'):
dct[k] = v
return dct
# FIXME should be AttributeDict[str, Any]
MetaDict = AttributeDict
def meta(bases, members, meta_name='Meta') -> Tuple[MetaDict, MetaDict]:
meta_ = AttributeDict()
for base in bases:
if hasattr(base, meta_name):
meta_.update(_meta_obj_to_dict(base.Meta))
changes = {}
if meta_name in members:
changes = _meta_obj_to_dict(members[meta_name])
meta_.update(changes)
return meta_, changes
def upper_camelcase(s: str) -> str:
return s.title().replace('_', '')
class cached_property(object):
"""
Descriptor (non-data) for building an attribute on-demand on first use.
"""
def __init__(self, factory):
"""
<factory> is called such: factory(instance) to build the attribute.
"""
self._attr_name = factory.__name__
self._factory = factory
def __get__(self, instance, owner):
# Build the attribute.
attr = self._factory(instance)
# Cache the value; hide ourselves.
setattr(instance, self._attr_name, attr)
return attr
| mit | Python |
adc533b76845fa7055ab41bd669ea8145e966849 | Fix default_seat_identifier | FodT/t1-python,pswaminathan/t1-python,Cawb07/t1-python,MediaMath/t1-python | terminalone/models/supplysource.py | terminalone/models/supplysource.py | # -*- coding: utf-8 -*-
"""Provides supply source object."""
from __future__ import absolute_import
from ..entity import Entity
class SupplySource(Entity):
"""SupplySource object"""
collection = 'supply_sources'
resource = 'supply_source'
_relations = {
'parent_supply',
}
_rtb_types = Entity._enum({'STANDARD', 'MARKETPLACE'}, None)
_supply_types = Entity._enum({'exchange', 'data'}, None)
_pull = {
'bidder_exchange_identifier': int,
'code': None,
'created_on': Entity._strpt,
'default_seat_identifier': None,
'distribute': Entity._int_to_bool,
'has_display': Entity._int_to_bool,
'has_mobile_display': Entity._int_to_bool,
'has_mobile_video': Entity._int_to_bool,
'has_video': Entity._int_to_bool,
'id': int,
'is_proservice': Entity._int_to_bool,
'mm_safe': Entity._int_to_bool,
'parent_supply_id': int,
'pixel_tag': None,
'pmp_enabled': Entity._int_to_bool,
'rtb_enabled': Entity._int_to_bool,
'rtb_type': None,
'seat_enabled': Entity._int_to_bool,
'status': Entity._int_to_bool,
'supply_type': None,
'updated_on': Entity._strpt,
'use_pool': Entity._int_to_bool,
'version': int,
}
_push = _pull.copy()
_push.update({
'distribute': int,
'has_display': int,
'has_mobile_display': int,
'has_mobile_video': int,
'has_video': int,
'is_proservice': int,
'mm_safe': int,
'pmp_enabled': int,
'rtb_enabled': int,
'seat_enabled': int,
'status': int,
'use_pool': int,
})
def __init__(self, session, properties=None, **kwargs):
super(SupplySource, self).__init__(session, properties, **kwargs)
| # -*- coding: utf-8 -*-
"""Provides supply source object."""
from __future__ import absolute_import
from ..entity import Entity
class SupplySource(Entity):
"""SupplySource object"""
collection = 'supply_sources'
resource = 'supply_source'
_relations = {
'parent_supply',
}
_rtb_types = Entity._enum({'STANDARD', 'MARKETPLACE'}, None)
_supply_types = Entity._enum({'exchange', 'data'}, None)
_pull = {
'id': int,
'bidder_exchange_identifier': int,
'code': None,
'created_on': Entity._strpt,
'default_seat_identifier': Entity._strpt,
'distribute': Entity._int_to_bool,
'has_display': Entity._int_to_bool,
'has_mobile_display': Entity._int_to_bool,
'has_mobile_video': Entity._int_to_bool,
'has_video': Entity._int_to_bool,
'is_proservice': Entity._int_to_bool,
'mm_safe': Entity._int_to_bool,
'parent_supply_id': int,
'pixel_tag': None,
'pmp_enabled': Entity._int_to_bool,
'rtb_enabled': Entity._int_to_bool,
'rtb_type': None,
'seat_enabled': Entity._int_to_bool,
'status': Entity._int_to_bool,
'supply_type': None,
'updated_on': Entity._strpt,
'use_pool': Entity._int_to_bool,
'version': int,
}
_push = _pull.copy()
_push.update({
'distribute': int,
'has_display': int,
'has_mobile_display': int,
'has_mobile_video': int,
'has_video': int,
'is_proservice': int,
'mm_safe': int,
'pmp_enabled': int,
'rtb_enabled': int,
'seat_enabled': int,
'status': int,
'use_pool': int,
})
def __init__(self, session, properties=None, **kwargs):
super(SupplySource, self).__init__(session, properties, **kwargs)
| apache-2.0 | Python |
102f3768544c180395b5b044ad0c0bf628d5f89a | Fix import | Zaneh-/bearded-tribble-back,forging2012/taiga-back,forging2012/taiga-back,coopsource/taiga-back,frt-arch/taiga-back,Zaneh-/bearded-tribble-back,gam-phon/taiga-back,forging2012/taiga-back,CoolCloud/taiga-back,dycodedev/taiga-back,astagi/taiga-back,dycodedev/taiga-back,CMLL/taiga-back,gauravjns/taiga-back,gauravjns/taiga-back,CMLL/taiga-back,gam-phon/taiga-back,obimod/taiga-back,obimod/taiga-back,astronaut1712/taiga-back,crr0004/taiga-back,19kestier/taiga-back,Tigerwhit4/taiga-back,dayatz/taiga-back,crr0004/taiga-back,EvgeneOskin/taiga-back,xdevelsistemas/taiga-back-community,EvgeneOskin/taiga-back,bdang2012/taiga-back-casting,WALR/taiga-back,jeffdwyatt/taiga-back,astronaut1712/taiga-back,gauravjns/taiga-back,frt-arch/taiga-back,Rademade/taiga-back,jeffdwyatt/taiga-back,jeffdwyatt/taiga-back,gam-phon/taiga-back,19kestier/taiga-back,CoolCloud/taiga-back,Tigerwhit4/taiga-back,rajiteh/taiga-back,joshisa/taiga-back,astronaut1712/taiga-back,joshisa/taiga-back,coopsource/taiga-back,crr0004/taiga-back,CMLL/taiga-back,gauravjns/taiga-back,dycodedev/taiga-back,dycodedev/taiga-back,coopsource/taiga-back,Tigerwhit4/taiga-back,EvgeneOskin/taiga-back,WALR/taiga-back,gam-phon/taiga-back,Rademade/taiga-back,seanchen/taiga-back,taigaio/taiga-back,WALR/taiga-back,rajiteh/taiga-back,obimod/taiga-back,taigaio/taiga-back,rajiteh/taiga-back,bdang2012/taiga-back-casting,bdang2012/taiga-back-casting,rajiteh/taiga-back,xdevelsistemas/taiga-back-community,joshisa/taiga-back,Rademade/taiga-back,Rademade/taiga-back,Tigerwhit4/taiga-back,forging2012/taiga-back,bdang2012/taiga-back-casting,seanchen/taiga-back,crr0004/taiga-back,Rademade/taiga-back,taigaio/taiga-back,xdevelsistemas/taiga-back-community,coopsource/taiga-back,seanchen/taiga-back,dayatz/taiga-back,astagi/taiga-back,astronaut1712/taiga-back,frt-arch/taiga-back,astagi/taiga-back,obimod/taiga-back,CoolCloud/taiga-back,seanchen/taiga-back,joshisa/taiga-back,astagi/taiga-back,dayatz/taiga-back,jeffdwyatt/taiga-back,19kestier/taiga-back,CMLL/taiga-back,EvgeneOskin/taiga-back,Zaneh-/bearded-tribble-back,CoolCloud/taiga-back,WALR/taiga-back | taiga/projects/votes/serializers.py | taiga/projects/votes/serializers.py | from rest_framework import serializers
from taiga.users.models import User
class VoterSerializer(serializers.ModelSerializer):
full_name = serializers.CharField(source='get_full_name', required=False)
class Meta:
model = User
fields = ('id', 'username', 'first_name', 'last_name', 'full_name')
| from django.contrib.auth import get_user_model
from rest_framework import serializers
class VoterSerializer(serializers.ModelSerializer):
full_name = serializers.CharField(source='get_full_name', required=False)
class Meta:
model = get_user_model()
fields = ('id', 'username', 'first_name', 'last_name', 'full_name')
| agpl-3.0 | Python |
6d93bf9cac2d8c5a2388d8681c29dc24a7490502 | disable github api test on ci | sdpython/pyquickhelper,sdpython/pyquickhelper,sdpython/pyquickhelper,sdpython/pyquickhelper | _unittests/ut_loghelper/test_github_api.py | _unittests/ut_loghelper/test_github_api.py | """
@brief test tree node (time=12s)
"""
import sys
import os
import unittest
try:
import src
except ImportError:
path = os.path.normpath(
os.path.abspath(
os.path.join(
os.path.split(__file__)[0],
"..",
"..")))
if path not in sys.path:
sys.path.append(path)
import src
from src.pyquickhelper.loghelper.flog import fLOG
from src.pyquickhelper.pycode import is_travis_or_appveyor
from src.pyquickhelper.loghelper.github_api import call_github_api, GitHubApiException
class TestGitHub(unittest.TestCase):
def test_github_api(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
if is_travis_or_appveyor():
# Too many calls from many projects.
return
pulls = call_github_api("sdpython", "pyquickhelper", "issues")
self.assertIsInstance(pulls, list)
self.assertTrue(len(pulls) > 0)
pr = call_github_api("scikit-learn", "scikit-learn", "pulls")
self.assertIsInstance(pr, list)
self.assertTrue(len(pr) > 0)
stats = call_github_api(
"scikit-learn", "scikit-learn", "stats/commit_activity")
self.assertIsInstance(stats, list)
self.assertTrue(len(stats) > 0)
try:
call_github_api("scikit-learn", "scikit-learn", "traffic/views")
self.assertTrue(False)
except GitHubApiException as e:
self.assertIn("message", str(e))
if __name__ == "__main__":
unittest.main()
| """
@brief test tree node (time=12s)
"""
import sys
import os
import unittest
try:
import src
except ImportError:
path = os.path.normpath(
os.path.abspath(
os.path.join(
os.path.split(__file__)[0],
"..",
"..")))
if path not in sys.path:
sys.path.append(path)
import src
from src.pyquickhelper.loghelper.flog import fLOG
from src.pyquickhelper.pycode import is_travis_or_appveyor
from src.pyquickhelper.loghelper.github_api import call_github_api, GitHubApiException
class TestGitHub(unittest.TestCase):
def test_github_api(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
if is_travis_or_appveyor() == "travis":
# Too many calls from many projects.
return
pulls = call_github_api("sdpython", "pyquickhelper", "issues")
self.assertIsInstance(pulls, list)
self.assertTrue(len(pulls) > 0)
pr = call_github_api("scikit-learn", "scikit-learn", "pulls")
self.assertIsInstance(pr, list)
self.assertTrue(len(pr) > 0)
stats = call_github_api(
"scikit-learn", "scikit-learn", "stats/commit_activity")
self.assertIsInstance(stats, list)
self.assertTrue(len(stats) > 0)
try:
call_github_api("scikit-learn", "scikit-learn", "traffic/views")
self.assertTrue(False)
except GitHubApiException as e:
self.assertIn("message", str(e))
if __name__ == "__main__":
unittest.main()
| mit | Python |
9915d7ce33897409e23ce007806fa1f5f34e183b | Drop (py3.6+) environment printout in test example | originell/jpype,originell/jpype,originell/jpype,originell/jpype,originell/jpype | jpype/_pyinstaller/example.py | jpype/_pyinstaller/example.py | import os
import jpype
import jpype.imports
print('+++ about to start JVM')
jpype.startJVM()
print('+++ JVM started')
| import os
import jpype
import jpype.imports
from jpype.types import *
for key, value in sorted(os.environ.items()):
print(f'{key!r}: {value!r}')
print('+++ about to start JVM')
jpype.startJVM()
print('+++ JVM started')
| apache-2.0 | Python |
fb99e7248441353ab1e4a271102ba269b331c6b8 | Update libchromiumcontent to have mas build | simongregory/electron,joaomoreno/atom-shell,aliib/electron,rajatsingla28/electron,felixrieseberg/electron,arturts/electron,kcrt/electron,nekuz0r/electron,minggo/electron,voidbridge/electron,deed02392/electron,dongjoon-hyun/electron,leftstick/electron,twolfson/electron,kokdemo/electron,astoilkov/electron,voidbridge/electron,tinydew4/electron,brenca/electron,the-ress/electron,voidbridge/electron,tylergibson/electron,bbondy/electron,matiasinsaurralde/electron,thingsinjars/electron,miniak/electron,kcrt/electron,astoilkov/electron,thompsonemerson/electron,stevekinney/electron,etiktin/electron,simongregory/electron,aichingm/electron,bbondy/electron,gabriel/electron,Floato/electron,jhen0409/electron,brave/muon,gabriel/electron,Evercoder/electron,gerhardberger/electron,arturts/electron,MaxWhere/electron,IonicaBizauKitchen/electron,Floato/electron,Gerhut/electron,Gerhut/electron,rajatsingla28/electron,aliib/electron,deed02392/electron,arturts/electron,rreimann/electron,tonyganch/electron,tinydew4/electron,matiasinsaurralde/electron,thompsonemerson/electron,ankitaggarwal011/electron,dongjoon-hyun/electron,pombredanne/electron,jaanus/electron,Floato/electron,thompsonemerson/electron,brave/muon,aichingm/electron,joaomoreno/atom-shell,brave/muon,seanchas116/electron,deed02392/electron,astoilkov/electron,wan-qy/electron,etiktin/electron,the-ress/electron,MaxWhere/electron,tylergibson/electron,noikiy/electron,meowlab/electron,electron/electron,nekuz0r/electron,aliib/electron,wan-qy/electron,tylergibson/electron,roadev/electron,jhen0409/electron,thomsonreuters/electron,roadev/electron,ankitaggarwal011/electron,biblerule/UMCTelnetHub,renaesop/electron,nekuz0r/electron,preco21/electron,tylergibson/electron,biblerule/UMCTelnetHub,the-ress/electron,renaesop/electron,roadev/electron,felixrieseberg/electron,shiftkey/electron,astoilkov/electron,preco21/electron,gabriel/electron,twolfson/electron,rreimann/electron,matiasinsaurralde/electron,the-ress/electron,noikiy/electron,gabriel/electron,deed02392/electron,leethomas/electron,gabriel/electron,tonyganch/electron,deed02392/electron,IonicaBizauKitchen/electron,wan-qy/electron,voidbridge/electron,simongregory/electron,shiftkey/electron,leethomas/electron,evgenyzinoviev/electron,rreimann/electron,astoilkov/electron,aliib/electron,electron/electron,electron/electron,biblerule/UMCTelnetHub,aliib/electron,kcrt/electron,noikiy/electron,kokdemo/electron,etiktin/electron,felixrieseberg/electron,Gerhut/electron,matiasinsaurralde/electron,Floato/electron,posix4e/electron,brave/muon,minggo/electron,meowlab/electron,seanchas116/electron,bbondy/electron,gerhardberger/electron,jaanus/electron,tonyganch/electron,IonicaBizauKitchen/electron,MaxWhere/electron,tinydew4/electron,miniak/electron,arturts/electron,ankitaggarwal011/electron,nekuz0r/electron,joaomoreno/atom-shell,thomsonreuters/electron,lzpfmh/electron,meowlab/electron,minggo/electron,pombredanne/electron,gerhardberger/electron,Floato/electron,lzpfmh/electron,preco21/electron,kokdemo/electron,thomsonreuters/electron,leftstick/electron,minggo/electron,pombredanne/electron,brave/electron,lzpfmh/electron,bbondy/electron,seanchas116/electron,etiktin/electron,aliib/electron,jhen0409/electron,brave/electron,leftstick/electron,wan-qy/electron,rajatsingla28/electron,shiftkey/electron,posix4e/electron,rajatsingla28/electron,dongjoon-hyun/electron,noikiy/electron,leftstick/electron,joaomoreno/atom-shell,thompsonemerson/electron,simongregory/electron,brave/electron,noikiy/electron,meowlab/electron,voidbridge/electron,jaanus/electron,bpasero/electron,roadev/electron,aichingm/electron,posix4e/electron,kokdemo/electron,shiftkey/electron,bpasero/electron,biblerule/UMCTelnetHub,kcrt/electron,brave/electron,biblerule/UMCTelnetHub,thompsonemerson/electron,minggo/electron,bbondy/electron,Evercoder/electron,kcrt/electron,etiktin/electron,simongregory/electron,meowlab/electron,arturts/electron,deed02392/electron,twolfson/electron,stevekinney/electron,ankitaggarwal011/electron,felixrieseberg/electron,minggo/electron,tinydew4/electron,renaesop/electron,joaomoreno/atom-shell,the-ress/electron,jaanus/electron,dongjoon-hyun/electron,Gerhut/electron,leethomas/electron,Evercoder/electron,lzpfmh/electron,Gerhut/electron,wan-qy/electron,gerhardberger/electron,preco21/electron,kokdemo/electron,brave/muon,thingsinjars/electron,gerhardberger/electron,bpasero/electron,electron/electron,etiktin/electron,IonicaBizauKitchen/electron,roadev/electron,the-ress/electron,lzpfmh/electron,jhen0409/electron,Evercoder/electron,leethomas/electron,Evercoder/electron,thingsinjars/electron,seanchas116/electron,pombredanne/electron,thingsinjars/electron,tonyganch/electron,rajatsingla28/electron,jaanus/electron,renaesop/electron,twolfson/electron,pombredanne/electron,thomsonreuters/electron,renaesop/electron,aichingm/electron,rreimann/electron,matiasinsaurralde/electron,gerhardberger/electron,arturts/electron,miniak/electron,stevekinney/electron,bpasero/electron,jhen0409/electron,noikiy/electron,miniak/electron,thomsonreuters/electron,preco21/electron,electron/electron,leethomas/electron,bpasero/electron,miniak/electron,thomsonreuters/electron,stevekinney/electron,shiftkey/electron,leethomas/electron,tylergibson/electron,brenca/electron,posix4e/electron,brave/muon,evgenyzinoviev/electron,twolfson/electron,kokdemo/electron,roadev/electron,ankitaggarwal011/electron,dongjoon-hyun/electron,rreimann/electron,tinydew4/electron,evgenyzinoviev/electron,tylergibson/electron,brenca/electron,biblerule/UMCTelnetHub,IonicaBizauKitchen/electron,renaesop/electron,thingsinjars/electron,aichingm/electron,brenca/electron,electron/electron,jhen0409/electron,MaxWhere/electron,evgenyzinoviev/electron,evgenyzinoviev/electron,lzpfmh/electron,brave/electron,stevekinney/electron,dongjoon-hyun/electron,bbondy/electron,brenca/electron,voidbridge/electron,rajatsingla28/electron,bpasero/electron,simongregory/electron,rreimann/electron,jaanus/electron,Evercoder/electron,shiftkey/electron,Floato/electron,seanchas116/electron,wan-qy/electron,evgenyzinoviev/electron,astoilkov/electron,preco21/electron,gabriel/electron,MaxWhere/electron,ankitaggarwal011/electron,gerhardberger/electron,matiasinsaurralde/electron,aichingm/electron,felixrieseberg/electron,the-ress/electron,tonyganch/electron,electron/electron,thompsonemerson/electron,miniak/electron,stevekinney/electron,felixrieseberg/electron,bpasero/electron,brave/electron,tinydew4/electron,brenca/electron,nekuz0r/electron,tonyganch/electron,MaxWhere/electron,posix4e/electron,seanchas116/electron,IonicaBizauKitchen/electron,twolfson/electron,Gerhut/electron,posix4e/electron,joaomoreno/atom-shell,kcrt/electron,nekuz0r/electron,leftstick/electron,pombredanne/electron,leftstick/electron,thingsinjars/electron,meowlab/electron | script/lib/config.py | script/lib/config.py | #!/usr/bin/env python
import errno
import os
import platform
import sys
BASE_URL = os.getenv('LIBCHROMIUMCONTENT_MIRROR') or \
'http://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '78e54bc39a04b758ed5167cd980cc4d9951bd629'
PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
verbose_mode = False
def get_platform_key():
if os.environ.has_key('MAS_BUILD'):
return 'mas'
else:
return PLATFORM
def get_target_arch():
try:
target_arch_path = os.path.join(__file__, '..', '..', '..', 'vendor',
'brightray', 'vendor', 'download',
'libchromiumcontent', '.target_arch')
with open(os.path.normpath(target_arch_path)) as f:
return f.read().strip()
except IOError as e:
if e.errno != errno.ENOENT:
raise
if PLATFORM == 'win32':
return 'ia32'
else:
return 'x64'
def get_chromedriver_version():
return 'v2.15'
def s3_config():
config = (os.environ.get('ATOM_SHELL_S3_BUCKET', ''),
os.environ.get('ATOM_SHELL_S3_ACCESS_KEY', ''),
os.environ.get('ATOM_SHELL_S3_SECRET_KEY', ''))
message = ('Error: Please set the $ATOM_SHELL_S3_BUCKET, '
'$ATOM_SHELL_S3_ACCESS_KEY, and '
'$ATOM_SHELL_S3_SECRET_KEY environment variables')
assert all(len(c) for c in config), message
return config
def enable_verbose_mode():
print 'Running in verbose mode'
global verbose_mode
verbose_mode = True
def is_verbose_mode():
return verbose_mode
| #!/usr/bin/env python
import errno
import os
import platform
import sys
BASE_URL = os.getenv('LIBCHROMIUMCONTENT_MIRROR') or \
'http://github-janky-artifacts.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '04523758cda2a96d2454f9056fb1fb9a1c1f95f1'
PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
verbose_mode = False
def get_platform_key():
if os.environ.has_key('MAS_BUILD'):
return 'mas'
else:
return PLATFORM
def get_target_arch():
try:
target_arch_path = os.path.join(__file__, '..', '..', '..', 'vendor',
'brightray', 'vendor', 'download',
'libchromiumcontent', '.target_arch')
with open(os.path.normpath(target_arch_path)) as f:
return f.read().strip()
except IOError as e:
if e.errno != errno.ENOENT:
raise
if PLATFORM == 'win32':
return 'ia32'
else:
return 'x64'
def get_chromedriver_version():
return 'v2.15'
def s3_config():
config = (os.environ.get('ATOM_SHELL_S3_BUCKET', ''),
os.environ.get('ATOM_SHELL_S3_ACCESS_KEY', ''),
os.environ.get('ATOM_SHELL_S3_SECRET_KEY', ''))
message = ('Error: Please set the $ATOM_SHELL_S3_BUCKET, '
'$ATOM_SHELL_S3_ACCESS_KEY, and '
'$ATOM_SHELL_S3_SECRET_KEY environment variables')
assert all(len(c) for c in config), message
return config
def enable_verbose_mode():
print 'Running in verbose mode'
global verbose_mode
verbose_mode = True
def is_verbose_mode():
return verbose_mode
| mit | Python |
d2ad58a752685b9982081331971276d7c14fec92 | remove print statement | coco-project/coco,coco-project/coco,coco-project/coco,coco-project/coco | ipynbsrv/web/api_client_proxy.py | ipynbsrv/web/api_client_proxy.py | from ipynbsrv.client.clients import HttpClient
def get_httpclient_instance(request):
base_url = "http://localhost:8000/api"
username = request.user.username
password = request.session.get('password')
return HttpClient(base_url, auth=(username, password))
def set_session_password(request):
if request.POST:
password = request.POST.get('password')
request.session['password'] = password
| from ipynbsrv.client.clients import HttpClient
def get_httpclient_instance(request):
base_url = "http://localhost:8000/api"
username = request.user.username
password = request.session.get('password')
print("{}:{}".format(username, password))
return HttpClient(base_url, auth=(username, password))
def set_session_password(request):
if request.POST:
password = request.POST.get('password')
request.session['password'] = password
| bsd-3-clause | Python |
b4e0294eed4dce7d5170cbc41e0bbec5e6387e82 | Revert "improved config" | renskiy/fabricio | examples/service/swarm/fabfile.py | examples/service/swarm/fabfile.py | import fabricio
from fabric import api as fab
from fabricio import tasks, docker
from fabricio.misc import AvailableVagrantHosts
hosts = AvailableVagrantHosts(guest_network_interface='eth1')
@fab.task(name='swarm-init')
@fab.serial
def swarm_init():
"""
enable Docker swarm mode
"""
def _swarm_init():
if swarm_init.worker_join_command is None:
fabricio.run(
'docker swarm init --advertise-addr {0}'.format(fab.env.host),
ignore_errors=True,
)
join_token = fabricio.run(
'docker swarm join-token --quiet manager',
ignore_errors=True,
)
swarm_init.worker_join_command = (
'docker swarm join --token {join_token} {host}:2377'
).format(join_token=join_token, host=fab.env.host)
else:
fabricio.run(
swarm_init.worker_join_command,
ignore_errors=True,
)
with fab.settings(hosts=hosts):
fab.execute(_swarm_init)
swarm_init.worker_join_command = None
nginx = tasks.DockerTasks(
service=docker.Service(
name='nginx',
image='nginx:stable-alpine',
options=dict(
publish='80:80',
replicas=2,
),
),
hosts=hosts,
)
| import fabricio
from fabric import api as fab
from fabricio import tasks, docker
from fabricio.misc import AvailableVagrantHosts
hosts = AvailableVagrantHosts(guest_network_interface='eth1')
@fab.task(name='swarm-init')
@fab.serial
def swarm_init():
"""
enable Docker swarm mode
"""
def _swarm_init():
if swarm_init.worker_join_command is None:
fabricio.run(
'docker swarm init',
ignore_errors=True,
)
join_token = fabricio.run(
'docker swarm join-token --quiet manager',
ignore_errors=True,
)
swarm_init.worker_join_command = (
'docker swarm join --token {join_token} {host}:2377'
).format(join_token=join_token, host=fab.env.host)
else:
fabricio.run(
swarm_init.worker_join_command,
ignore_errors=True,
)
with fab.settings(hosts=hosts):
fab.execute(_swarm_init)
swarm_init.worker_join_command = None
nginx = tasks.DockerTasks(
service=docker.Service(
name='nginx',
image='nginx:stable-alpine',
options=dict(
publish='80:80',
replicas=2,
),
),
hosts=hosts,
)
| mit | Python |
92e55388e427b9497947bf9534b612c744978bb6 | Load config on startup, reorganize imports, remove comments, log dry run start | kocsenc/i-have-an-opinion,kocsenc/i-have-an-opinion,kocsenc/i-have-an-opinion,kocsenc/i-have-an-opinion | backend/api.py | backend/api.py | # create our little application :)
from flask import Flask, request
import argparse
import configparser
import random
import tweepy
app = Flask(__name__)
dry_run = None
config = None
def are_you_the_keymaster():
config = configparser.ConfigParser()
config.read('keys.ini')
return config
def twitter_handler(message):
twconf = are_you_the_keymaster()['twitter']
auth = tweepy.OAuthHandler(twconf['conkey'], twconf['consec'])
auth.set_access_token(twconf['atok'], twconf['atoksec'])
api = tweepy.API(auth)
return str(api.update_status(message))
def devnull_handler(message):
return ('', 200)
def dry_run_handler_wrapper(handler):
def dry_run_handler(message):
print("Would be using", handler.__name__, "for", message)
return ('', 200)
return dry_run_handler
def logging_handler_wrapper(handler):
def logging_handler(message):
print("Using", handler.__name__, "for", message)
return handler(message)
return logging_handler
HANDLERS = (twitter_handler, devnull_handler)
def choose_handler():
global dry_run
handler = random.choice(HANDLERS)
if dry_run:
handler = dry_run_handler_wrapper(handler)
else:
handler = logging_handler_wrapper(handler)
return handler
def validate_message(message):
print("Testing message", message)
return message != ''
def handle_message(message):
if not validate_message(message):
return 'Bad message!', 400
handler = choose_handler()
return handler(message)
@app.route('/opinion', methods=['GET', 'POST', 'HEYLISTEN'])
def opinionate():
if request.method == 'GET':
return 'Hey! POST me an opinion to have it put somewhere.'
elif request.method in ('POST', 'HEYLISTEN'):
message = request.data
return handle_message(message)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d', dest='debug', action='store_true', help='Enable debug mode')
parser.add_argument('-n', dest='dry', action='store_true', help="Don't make any external requests, just log what would have happened")
args = parser.parse_args()
dry_run = args.dry
if dry_run:
print("Dry run, not making any external requests")
app.debug = args.debug
config = are_you_the_keymaster()
app.run()
| # create our little application :)
import argparse
import configparser
from flask import Flask, request
import random
import tweepy
app = Flask(__name__)
dry_run = None
def are_you_the_keymaster():
config = configparser.ConfigParser()
config.read('keys.ini')
return config
def twitter_handler(message):
twconf = are_you_the_keymaster()['twitter']
auth = tweepy.OAuthHandler(twconf['conkey'], twconf['consec'])
auth.set_access_token(twconf['atok'], twconf['atoksec'])
api = tweepy.API(auth)
return str(api.update_status(message))
#return str(api.verify_credentials())
def devnull_handler(message):
return ('', 200)
def dry_run_handler_wrapper(handler):
def dry_run_handler(message):
print("Would be using", handler.__name__, "for", message)
return ('', 200)
return dry_run_handler
def logging_handler_wrapper(handler):
def logging_handler(message):
print("Using", handler.__name__, "for", message)
return handler(message)
return logging_handler
HANDLERS = (twitter_handler, devnull_handler)
def choose_handler():
global dry_run
handler = random.choice(HANDLERS)
if dry_run:
handler = dry_run_handler_wrapper(handler)
else:
handler = logging_handler_wrapper(handler)
return handler
def validate_message(message):
print("Testing message", message)
return message != ''
def handle_message(message):
if not validate_message(message):
return 'Bad message!', 400
handler = choose_handler()
return handler(message)
@app.route('/opinion', methods=['GET', 'POST', 'HEYLISTEN'])
def opinionate():
if request.method == 'GET':
return 'Hey! POST me an opinion to have it put somewhere.'
elif request.method in ('POST', 'HEYLISTEN'):
message = request.data
return handle_message(message)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d', dest='debug', action='store_true', help='Enable debug mode')
parser.add_argument('-n', dest='dry', action='store_true', help="Don't make any external requests, just log what would have happened")
args = parser.parse_args()
dry_run = args.dry
app.debug = args.debug
app.run()
| bsd-2-clause | Python |
3a15797507678c1ae2962e5421ba2c28afe01f26 | Remove outdated comments. | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | Lib/symbol.py | Lib/symbol.py | #! /usr/bin/env python
#
# Non-terminal symbols of Python grammar (from "graminit.h")
#
# This file is automatically generated; please don't muck it up!
#
# To update the symbols in this file, 'cd' to the top directory of
# the python source tree after building the interpreter and run:
#
# python Lib/symbol.py
#--start constants--
single_input = 256
file_input = 257
eval_input = 258
funcdef = 259
parameters = 260
varargslist = 261
fpdef = 262
fplist = 263
stmt = 264
simple_stmt = 265
small_stmt = 266
expr_stmt = 267
print_stmt = 268
del_stmt = 269
pass_stmt = 270
flow_stmt = 271
break_stmt = 272
continue_stmt = 273
return_stmt = 274
raise_stmt = 275
import_stmt = 276
dotted_name = 277
global_stmt = 278
exec_stmt = 279
assert_stmt = 280
compound_stmt = 281
if_stmt = 282
while_stmt = 283
for_stmt = 284
try_stmt = 285
except_clause = 286
suite = 287
test = 288
and_test = 289
not_test = 290
comparison = 291
comp_op = 292
expr = 293
xor_expr = 294
and_expr = 295
shift_expr = 296
arith_expr = 297
term = 298
factor = 299
power = 300
atom = 301
lambdef = 302
trailer = 303
subscriptlist = 304
subscript = 305
sliceop = 306
exprlist = 307
testlist = 308
dictmaker = 309
classdef = 310
arglist = 311
argument = 312
#--end constants--
sym_name = {}
for _name, _value in globals().items():
if type(_value) is type(0):
sym_name[_value] = _name
def main():
import sys
import token
if len(sys.argv) == 1:
sys.argv = sys.argv + ["Include/graminit.h", "Lib/symbol.py"]
token.main()
if __name__ == "__main__":
main()
| #! /usr/bin/env python
#
# Non-terminal symbols of Python grammar (from "graminit.h")
#
# This file is automatically generated; please don't muck it up!
#
# To update the symbols in this file, 'cd' to the top directory of
# the python source tree after building the interpreter and run:
#
# PYTHONPATH=Lib:Modules ./python Lib/symbol.py
#
# (this path allows the import of string.py, token.py, and regexmodule.so
# for a site with no installation in place)
#--start constants--
single_input = 256
file_input = 257
eval_input = 258
funcdef = 259
parameters = 260
varargslist = 261
fpdef = 262
fplist = 263
stmt = 264
simple_stmt = 265
small_stmt = 266
expr_stmt = 267
print_stmt = 268
del_stmt = 269
pass_stmt = 270
flow_stmt = 271
break_stmt = 272
continue_stmt = 273
return_stmt = 274
raise_stmt = 275
import_stmt = 276
dotted_name = 277
global_stmt = 278
exec_stmt = 279
assert_stmt = 280
compound_stmt = 281
if_stmt = 282
while_stmt = 283
for_stmt = 284
try_stmt = 285
except_clause = 286
suite = 287
test = 288
and_test = 289
not_test = 290
comparison = 291
comp_op = 292
expr = 293
xor_expr = 294
and_expr = 295
shift_expr = 296
arith_expr = 297
term = 298
factor = 299
power = 300
atom = 301
lambdef = 302
trailer = 303
subscriptlist = 304
subscript = 305
sliceop = 306
exprlist = 307
testlist = 308
dictmaker = 309
classdef = 310
arglist = 311
argument = 312
#--end constants--
sym_name = {}
for _name, _value in globals().items():
if type(_value) is type(0):
sym_name[_value] = _name
def main():
import sys
import token
if len(sys.argv) == 1:
sys.argv = sys.argv + ["Include/graminit.h", "Lib/symbol.py"]
token.main()
if __name__ == "__main__":
main()
#
# end of file
| mit | Python |
b3e78b4934bebaf43fdb7787c5a3b3e166a35e23 | Bump to 0.2.1 | jrief/djangocms-bootstrap3,jrief/djangocms-bootstrap3 | cms_bootstrap3/__init__.py | cms_bootstrap3/__init__.py | __version__ = '0.2.1'
| __version__ = '0.2.0'
| mit | Python |
a2fdd4c71c53095ce0285ab562f88950aab01bd7 | Fix indentation | frinder/frinder-app,frinder/frinder-app,frinder/frinder-app | scripts/add_users.py | scripts/add_users.py | from google.cloud import firestore
import argparse
import datetime
import names
import random
genders = [u'male', u'female']
interests = [u'Movies', u'Football', u'Books', u'Music']
script_version=1
def queryUsers(db):
users_ref = db.collection(u'users')
docs = users_ref.get()
for doc in docs:
print(u'{} => {}'.format(doc.id, doc.to_dict()))
def addUser(db, ref_lat, ref_lon, range):
doc_ref = db.collection(u'users').document()
uid = doc_ref.id
gender = random.choice(genders)
lat = ref_lat + random.uniform(-range, range)
lon = ref_lon + random.uniform(-range, range)
doc_ref.set({
u'desc': u'Test user created using a script',
u'id': uid,
u'name': names.get_full_name(gender=gender),
u'linkUrl': u'https://www.google.com',
u'email': u'fake@gmail.com',
u'profilePicUrl': None,
u'gender': gender,
u'age': random.randint(14,80),
u'timestamp': datetime.datetime.now(),
u'interests': [random.choice(interests)],
u'location': [lat, lon],
u'createdByScript': True,
u'scriptVersion': script_version
})
doc = doc_ref.get()
print(u'Created {} => {}'.format(doc.id, doc.to_dict()))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--count", type=int, default=3)
parser.add_argument("--lat", type=float, default=37.0)
parser.add_argument("--lon", type=float, default=-122.0)
parser.add_argument("--range", type=float, default=-0.000001)
args = parser.parse_args()
db = firestore.Client()
for i in range(0, args.count):
addUser(db, args.lat, args.lon, args.range)
# Uncomment to query all users
# queryUsers(db)
| from google.cloud import firestore
import argparse
import datetime
import names
import random
genders = [u'male', u'female']
interests = [u'Movies', u'Football', u'Books', u'Music']
script_version=1
def queryUsers(db):
users_ref = db.collection(u'users')
docs = users_ref.get()
for doc in docs:
print(u'{} => {}'.format(doc.id, doc.to_dict()))
def addUser(db, ref_lat, ref_lon, range):
doc_ref = db.collection(u'users').document()
uid = doc_ref.id
gender = random.choice(genders)
lat = ref_lat + random.uniform(-range, range)
lon = ref_lon + random.uniform(-range, range)
doc_ref.set({
u'desc': u'Test user created using a script',
u'id': uid,
u'name': names.get_full_name(gender=gender),
u'linkUrl': u'https://www.google.com',
u'email': u'fake@gmail.com',
u'profilePicUrl': None,
u'gender': gender,
u'age': random.randint(14,80),
u'timestamp': datetime.datetime.now(),
u'interests': [random.choice(interests)],
u'location': [lat, lon],
u'createdByScript': True,
u'scriptVersion': script_version
})
doc = doc_ref.get()
print(u'Created {} => {}'.format(doc.id, doc.to_dict()))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--count", type=int, default=3)
parser.add_argument("--lat", type=float, default=37.0)
parser.add_argument("--lon", type=float, default=-122.0)
parser.add_argument("--range", type=float, default=-0.000001)
args = parser.parse_args()
db = firestore.Client()
for i in range(0, args.count):
addUser(db, args.lat, args.lon, args.range)
# Uncomment to query all users
# queryUsers(db)
| mit | Python |
d301e196017b442562224f786cfe23cc8cc3c70c | Update connectivity matrix. | visdesignlab/TulipPaths,visdesignlab/TulipPaths | experiments/connectivityMatrix.py | experiments/connectivityMatrix.py | from tulip import *
from tulipgui import *
import tulippaths as tp
import json
#graphFile = '../data/test_feedback.tlp'
graphFile = '../data/514_10hops.tlp'
graph = tlp.loadGraph(graphFile)
acRegexes = ['AC', 'IAC', 'YAC', 'GAC']
acRegexes = '(?:%s)' % '|'.join(acRegexes)
nodeConstraints = ['CBb.*', acRegexes, 'CBb.*']
edgeConstraints = ['.*', '.*']
"""
matrix = tp.ConnectivityMatrix(graph)
matrix.activate(nodeConstraints, edgeConstraints)
matrix.collapseSources()
matrix.collapseTargets()
jsonObject = matrix.getAsJsonObject(True)
print json.dumps(jsonObject)
"""
nodeConstraints = ['CBb.*', 'CBb.*']
edgeConstraints = ['Gap Junction']
matrix = tp.ConnectivityMatrix(graph)
matrix.activate(nodeConstraints, edgeConstraints)
matrix.collapseSources()
matrix.collapseTargets()
jsonObject = matrix.getAsJsonObject(True)
print json.dumps(jsonObject) | from tulip import *
from tulipgui import *
import tulippaths as tp
import json
graphFile = '../data/test_feedback.tlp'
#graphFile = '../data/514_10hops.tlp'
graph = tlp.loadGraph(graphFile)
acRegexes = ['AC', 'IAC', 'YAC']
acRegexes = '(?:%s)' % '|'.join(acRegexes)
nodeConstraints = ['CBb.*', acRegexes, 'GC']
edgeConstraints = ['.*', '.*']
matrix = tp.ConnectivityMatrix(graph)
matrix.activate(nodeConstraints, edgeConstraints)
jsonObject = matrix.getAsJsonObject(True)
json.dumps(jsonObject) | mit | Python |
a76d215f95b5acaf1ce7e35daeef6447fa122795 | fix maintainer email address to point to mailing list | acigna/pywez,acigna/pywez,acigna/pywez | zsi/setup.py | zsi/setup.py | #! /usr/bin/env python
# $Header$
import sys
from distutils.core import setup
_url = "http://pywebsvcs.sf.net/"
import ConfigParser
cf = ConfigParser.ConfigParser()
cf.read('setup.cfg')
major = cf.getint('version', 'major')
minor = cf.getint('version', 'minor')
release = cf.getint('version', 'release')
_version = "%d.%d.%d" % ( major, minor, release )
try:
open('ZSI/version.py', 'r').close()
except:
print 'ZSI/version.py not found; run "make"'
sys.exit(1)
setup(
name="ZSI",
version=_version,
license="Python",
packages=[ "ZSI", "ZSI.wstools" ],
scripts=["scripts/wsdl2py.py", "scripts/wsdl2dispatch.py"],
description="Zolera SOAP Infrastructure",
author="Rich Salz, et al",
author_email="rsalz@datapower.com",
maintainer="Rich Salz, et al",
maintainer_email="pywebsvcs-talk@lists.sf.net",
url=_url,
long_description='For additional information, please see ' + _url
)
| #! /usr/bin/env python
# $Header$
import sys
from distutils.core import setup
_url = "http://pywebsvcs.sf.net/"
import ConfigParser
cf = ConfigParser.ConfigParser()
cf.read('setup.cfg')
major = cf.getint('version', 'major')
minor = cf.getint('version', 'minor')
release = cf.getint('version', 'release')
_version = "%d.%d.%d" % ( major, minor, release )
try:
open('ZSI/version.py', 'r').close()
except:
print 'ZSI/version.py not found; run "make"'
sys.exit(1)
setup(
name="ZSI",
version=_version,
license="Python",
packages=[ "ZSI", "ZSI.wstools" ],
scripts=["scripts/wsdl2py.py", "scripts/wsdl2dispatch.py"],
description="Zolera SOAP Infrastructure",
author="Rich Salz",
author_email="rsalz@datapower.com",
maintainer="Rich Salz",
maintainer_email="rsalz@datapower.com",
url=_url,
long_description='For additional information, please see ' + _url
)
| mit | Python |
67c98ba67f99d5de5022b32fdb3eb9cd0d96908f | Add missing line on the script and some comment | jstuyck/MitmProxyScripts | scripts/tappedout.py | scripts/tappedout.py | from binascii import unhexlify
#This can be replace by using the "decode" function on the reponse
def request(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.request.headers['Accept-Encoding'] = ['']
def response(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.response.content = unhexlify('[..]ffffff[..]')
#this ca be enhanced by using the protobuf to deserialize the message | from binascii import unhexlify
def request(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
def response(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.response.content = unhexlify('[..]ffffff[..]') | apache-2.0 | Python |
4e6b759219c2d15902196d1747a7eb89cebad150 | test commit | noahklein/unichat | chat/models.py | chat/models.py | from django.db import models
class Chatroom(models.Model):
name = models.CharField(max_length=32)
occupants = models.IntegerField(default=0)
def __unicode__(self):
return self.name
class Message(models.Model):
chatroom = models.ForeignKey(Chatroom)
username = models.CharField(max_length=32)
text = models.CharField(max_length=512)
timestamp = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.text
| from django.db import models
class Chatroom(models.Model):
name = models.CharField(max_length=32)
occupants = models.IntegerField(default=0)
def __unicode__(self):
return self.name
class Message(models.Model):
chatroom = models.ForeignKey(Chatroom)
username = models.CharField(max_length=32)
text = models.CharField(max_length=512)
timestamp = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.text
| mit | Python |
151a5f75a240c875fc591390c208c933e8d0e782 | Update name of Eidos reading class | johnbachman/indra,pvtodorov/indra,johnbachman/belpy,johnbachman/indra,sorgerlab/indra,bgyori/indra,johnbachman/indra,pvtodorov/indra,sorgerlab/belpy,pvtodorov/indra,johnbachman/belpy,sorgerlab/belpy,bgyori/indra,sorgerlab/indra,sorgerlab/indra,sorgerlab/belpy,bgyori/indra,johnbachman/belpy,pvtodorov/indra | indra/sources/eidos/eidos_reader.py | indra/sources/eidos/eidos_reader.py | import json
from indra.java_vm import autoclass, JavaException
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.EidosSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.toJsonStr(mentions)
json_dict = json.loads(mentions_json)
return json_dict
| import json
from indra.java_vm import autoclass, JavaException
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.toJsonStr(mentions)
json_dict = json.loads(mentions_json)
return json_dict
| bsd-2-clause | Python |
977557675167a5e2c22fd3d94cd4c93acbb4b326 | Update ipc_lista1.8.py | any1m1c/ipc20161 | lista1/ipc_lista1.8.py | lista1/ipc_lista1.8.py | #ipc_lista1.8
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que pergunte quanto você ganha por hora e o número de horas trabalhadas no mês.
#Calcule e mostre o total do seu salário no referido mês.
QntHora = input("Entre com o valor de seu rendimento por hora: ")
hT = input("Entre com a quantidade de horas trabalhadas no mês: )
Salario = round(QntHora*hT,2
| #ipc_lista1.8
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que pergunte quanto você ganha por hora e o número de horas trabalhadas no mês.
#Calcule e mostre o total do seu salário no referido mês.
QntHora = input("Entre com o valor de seu rendimento por hora: ")
hT = input("Entre com a quantidade de horas trabalhadas no mês: )
Salario = round(QntHora*hT
| apache-2.0 | Python |
ad76d59c48443697107854812073ea8521d21cbf | Update __init__.py | lindemann09/pyForceDAQ,lindemann09/pyForceDAQ,lindemann09/pyForceDAQ | forceDAQ/__init__.py | forceDAQ/__init__.py | __version__ = "0.8.11c"
__author__ = "Oliver Lindemann"
"""
launch the GUI force from your Python program:
``
from forceDAQ import gui
gui.run_with_options(remote_control=False,
ask_filename=True,
calibration_file="FT_sensor1.cal")
``
import relevant stuff to program your own force:
``
from forceDAQ import force
``
import relevant stuff for remote control of the GUI force:
``
from forceDAQ import remote_control
``
For function to support data handling see the folder pyForceDAQ/analysis
"""
import sys as _sys
PYTHON3 = (_sys.version_info[0] == 3)
USE_DUMMY_SENSOR = False
| __version__ = "0.8.11b"
__author__ = "Oliver Lindemann"
"""
launch the GUI force from your Python program:
``
from forceDAQ import gui
gui.run_with_options(remote_control=False,
ask_filename=True,
calibration_file="FT_sensor1.cal")
``
import relevant stuff to program your own force:
``
from forceDAQ import force
``
import relevant stuff for remote control of the GUI force:
``
from forceDAQ import remote_control
``
For function to support data handling see the folder pyForceDAQ/analysis
"""
import sys as _sys
PYTHON3 = (_sys.version_info[0] == 3)
USE_DUMMY_SENSOR = False
| mit | Python |
3db6d8352a993f64380c21c2b29d30ae7f79e4cc | Fix isolated test runner | llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy | llvm/tests/__init__.py | llvm/tests/__init__.py | from __future__ import print_function
import sys
import os
import unittest
import subprocess
import llvm
tests = [] # stores unittest.TestCase objects
# Isolated tests
# Tests that affect process-wide settings
isolated_tests = [] # stores modue name
def run(verbosity=1):
print('llvmpy is installed in: ' + os.path.dirname(__file__))
print('llvmpy version: ' + llvm.__version__)
print(sys.version)
files = filter(lambda s: s.startswith('test_') and s.endswith('.py'),
os.listdir(os.path.dirname(__file__)))
for f in files:
fname = f.split('.', 1)[0]
__import__('.'.join([__name__, fname]))
suite = unittest.TestSuite()
for cls in tests:
if cls:
suite.addTest(unittest.makeSuite(cls))
# The default stream fails in IPython qtconsole on Windows,
# so just using sys.stdout
kwargs = dict(verbosity=verbosity, stream=sys.stdout)
if sys.version_info[:2] > (2, 6):
kwargs['buffer'] = True
runner = unittest.TextTestRunner(**kwargs)
try:
from guppy import hpy
except ImportError:
testresult = runner.run(suite)
else:
hp = hpy()
hp.setref()
testresult = runner.run(suite)
print(hp.heap())
if testresult:
# Run isolated tests
print("run isolated tests".center(80, '-'))
for test in isolated_tests:
print(('testing %s' % test).center(80))
cmd = [sys.executable, '-m', test]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
for line in p.stdout:
print(line.decode('utf8'), end='')
p.wait()
if p.returncode:
raise Exception("%s returned: %d" % (test, p.returncode))
return testresult
if __name__ == '__main__':
unittest.main()
| from __future__ import print_function
import sys
import os
import unittest
import subprocess
import llvm
tests = [] # stores unittest.TestCase objects
# Isolated tests
# Tests that affect process-wide settings
isolated_tests = [] # stores modue name
def run(verbosity=1):
print('llvmpy is installed in: ' + os.path.dirname(__file__))
print('llvmpy version: ' + llvm.__version__)
print(sys.version)
files = filter(lambda s: s.startswith('test_') and s.endswith('.py'),
os.listdir(os.path.dirname(__file__)))
for f in files:
fname = f.split('.', 1)[0]
__import__('.'.join([__name__, fname]))
suite = unittest.TestSuite()
for cls in tests:
if cls:
suite.addTest(unittest.makeSuite(cls))
# The default stream fails in IPython qtconsole on Windows,
# so just using sys.stdout
kwargs = dict(verbosity=verbosity, stream=sys.stdout)
if sys.version_info[:2] > (2, 6):
kwargs['buffer'] = True
runner = unittest.TextTestRunner(**kwargs)
try:
from guppy import hpy
except ImportError:
testresult = runner.run(suite)
else:
hp = hpy()
hp.setref()
testresult = runner.run(suite)
print(hp.heap())
if testresult:
# Run isolated tests
print("run isolated tests".center(80, '-'))
for test in isolated_tests:
print(('testing %s' % test).center(80))
cmd = [sys.executable, '-m', test]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
for line in p.stdout:
print(line.decode('utf8'), end='')
p.wait()
if p.returncode:
raise Exception("%s returned: %d" % p.returncode)
return testresult
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python |
aae63bca5f5a9a70be40078de38a074f95c53f64 | fix duplicate text execution | xuru/substrate,xuru/substrate,xuru/substrate | local/commands/test.py | local/commands/test.py | import logging
import tempfile
from google.appengine.tools import dev_appserver
from google.appengine.tools import dev_appserver_main
from nose.core import main
from nosegae import NoseGAE
from nose_exclude import NoseExclude
from nose.plugins.logcapture import LogCapture
import re
import os
os.environ['NOSE_WITH_NOSEEXCLUDE'] = "--exclude-dir=lib"
os.environ['NOSEEXCLUDE_DIRS'] = "./lib ./local"
os.environ['NOSE_WHERE'] = "."
os.environ['NOSE_ALL_MODULES'] = "false"
os.environ['NOSE_LOGGING_CLEAR_HANDLERS'] = "true"
config = matcher = None
try:
config, matcher = dev_appserver.LoadAppConfig(".", {})
except yaml_errors.EventListenerError, e:
logging.error('Fatal error when loading application configuration:\n' +
str(e))
except dev_appserver.InvalidAppConfigError, e:
logging.error('Application configuration file invalid:\n%s', e)
#Configure our dev_appserver setup args
args = dev_appserver_main.DEFAULT_ARGS.copy()
args[dev_appserver_main.ARG_CLEAR_DATASTORE] = True
args[dev_appserver_main.ARG_BLOBSTORE_PATH] = os.path.join(
tempfile.gettempdir(), 'dev_appserver.test.blobstore')
args[dev_appserver_main.ARG_DATASTORE_PATH] = os.path.join(
tempfile.gettempdir(), 'dev_appserver.test.datastore')
args[dev_appserver_main.ARG_MATCHER_PATH] = os.path.join(
tempfile.gettempdir(), 'dev_appserver.test.matcher')
args[dev_appserver_main.ARG_HISTORY_PATH] = os.path.join(
tempfile.gettempdir(), 'dev_appserver.test.datastore.history')
dev_appserver.SetupStubs(config.application, **args)
os.environ['APPLICATION_ID'] = config.application
# Run the test on the current directory
# import sys
# sys.argv[1] = "."
# Run the test on the current directory if no other test is passed in as an option.
# ie python manage.py test tests/my_tests.py
import sys
if len (sys.argv) < 3:
print "No tests specified. Running everything..."
sys.argv[1] = "."
else:
print "Testing: %s"% sys.argv[2]
sys.argv[1] = sys.argv[2]
del sys.argv[2]
main(plugins=[NoseGAE(), NoseExclude()])
# main(plugins=[NoseGAE(), NoseExclude(), LogCapture()])
| import logging
import tempfile
from google.appengine.tools import dev_appserver
from google.appengine.tools import dev_appserver_main
from nose.core import main
from nosegae import NoseGAE
from nose_exclude import NoseExclude
from nose.plugins.logcapture import LogCapture
import re
import os
os.environ['NOSE_WITH_NOSEEXCLUDE'] = "--exclude-dir=lib"
os.environ['NOSEEXCLUDE_DIRS'] = "./lib ./local"
os.environ['NOSE_WHERE'] = "."
os.environ['NOSE_ALL_MODULES'] = "false"
os.environ['NOSE_LOGGING_CLEAR_HANDLERS'] = "true"
config = matcher = None
try:
config, matcher = dev_appserver.LoadAppConfig(".", {})
except yaml_errors.EventListenerError, e:
logging.error('Fatal error when loading application configuration:\n' +
str(e))
except dev_appserver.InvalidAppConfigError, e:
logging.error('Application configuration file invalid:\n%s', e)
#Configure our dev_appserver setup args
args = dev_appserver_main.DEFAULT_ARGS.copy()
args[dev_appserver_main.ARG_CLEAR_DATASTORE] = True
args[dev_appserver_main.ARG_BLOBSTORE_PATH] = os.path.join(
tempfile.gettempdir(), 'dev_appserver.test.blobstore')
args[dev_appserver_main.ARG_DATASTORE_PATH] = os.path.join(
tempfile.gettempdir(), 'dev_appserver.test.datastore')
args[dev_appserver_main.ARG_MATCHER_PATH] = os.path.join(
tempfile.gettempdir(), 'dev_appserver.test.matcher')
args[dev_appserver_main.ARG_HISTORY_PATH] = os.path.join(
tempfile.gettempdir(), 'dev_appserver.test.datastore.history')
dev_appserver.SetupStubs(config.application, **args)
os.environ['APPLICATION_ID'] = config.application
# Run the test on the current directory
# import sys
# sys.argv[1] = "."
# Run the test on the current directory if no other test is passed in as an option.
# ie python manage.py test tests/my_tests.py
import sys
if len (sys.argv) < 3:
print "No tests specified. Running everything..."
sys.argv[1] = "."
else:
print "Testing: %s"% sys.argv[2]
sys.argv[1] = sys.argv[2]
main(plugins=[NoseGAE(), NoseExclude()])
# main(plugins=[NoseGAE(), NoseExclude(), LogCapture()])
| mit | Python |
2d34a073932cc61a469e0c9e86be44465e3fa67f | fix auth.urls | sunlightlabs/django-locksmith,sunlightlabs/django-locksmith,sunlightlabs/django-locksmith | locksmith/auth/urls.py | locksmith/auth/urls.py | from django.conf.urls.defaults import *
urlpatterns = patterns('locksmith.auth.views',
url(r'^create_key/$', 'create_key', name='create_key'),
url(r'^update_key/$', 'update_key', name='update_key'),
url(r'^update_key_by_email/$', 'update_key', {'get_by':'email'},
name='update_key_by_email'),
)
| from django.conf.urls.defaults import *
urlpatterns = patterns('locksmith.auth.views',
url(r'^create_key/$', self.create_key_view, name='create_key'),
url(r'^update_key/$', self.update_key_view, name='update_key'),
url(r'^update_key_by_email/$', self.update_key_view, {'get_by':'email'},
name='update_key_by_email'),
)
| bsd-3-clause | Python |
8179c573ff3a55e9a8df878c1286d5a99acab9cd | test perm using py.test - tidy #1150 | pkimber/login,pkimber/login,pkimber/login | login/tests/fixture.py | login/tests/fixture.py | # -*- encoding: utf-8 -*-
import pytest
from login.tests.factories import (
TEST_PASSWORD,
UserFactory,
)
class PermTest:
def __init__(self, client):
setup_users()
self.client = client
def anon(self, url):
self.client.logout()
response = self.client.get(url)
message = "'url should be public '{}'".format(url)
assert 200 == response.status_code, message
def staff(self, url):
# check anon user cannot login
self.client.logout()
response = self.client.get(url)
assert 302 == response.status_code
assert 'accounts/login' in response['Location']
# check web user cannot login
assert self.client.login(username='web', password=TEST_PASSWORD)
assert 302 == response.status_code
assert 'accounts/login' in response['Location']
# check staff user can login
assert self.client.login(username='staff', password=TEST_PASSWORD)
response = self.client.get(url)
assert 200 == response.status_code
@pytest.fixture
def perm_check(client):
"""Check permissions on a URL.
We use a clever trick to pass parameters to the fixture. For details:
py.test: Pass a parameter to a fixture function
http://stackoverflow.com/questions/18011902/py-test-pass-a-parameter-to-a-fixture-function
"""
return PermTest(client)
def setup_users():
"""Using factories - set-up users for permissions test cases."""
UserFactory(
username='admin',
email='admin@pkimber.net',
is_staff=True,
is_superuser=True
)
UserFactory(username='staff', email='staff@pkimber.net', is_staff=True)
UserFactory(
username='web', email='web@pkimber.net',
first_name='William', last_name='Webber'
)
| # -*- encoding: utf-8 -*-
import pytest
from login.tests.factories import (
TEST_PASSWORD,
UserFactory,
)
class PermTest:
def __init__(self, client):
setup_users()
self.client = client
def anon(self, url):
self.client.logout()
response = self.client.get(url)
assert 200 == response.status_code
def staff(self, url):
# check anon user cannot login
self.client.logout()
response = self.client.get(url)
assert 302 == response.status_code
assert 'accounts/login' in response['Location']
# check web user cannot login
assert self.client.login(username='web', password=TEST_PASSWORD)
assert 302 == response.status_code
assert 'accounts/login' in response['Location']
# check staff user can login
assert self.client.login(username='staff', password=TEST_PASSWORD)
response = self.client.get(url)
assert 200 == response.status_code
@pytest.fixture
def perm_check(client):
"""Check permissions on a URL.
We use a clever trick to pass parameters to the fixture. For details:
py.test: Pass a parameter to a fixture function
http://stackoverflow.com/questions/18011902/py-test-pass-a-parameter-to-a-fixture-function
"""
return PermTest(client)
def setup_users():
"""Using factories - set-up users for permissions test cases."""
UserFactory(
username='admin',
email='admin@pkimber.net',
is_staff=True,
is_superuser=True
)
UserFactory(username='staff', email='staff@pkimber.net', is_staff=True)
UserFactory(
username='web', email='web@pkimber.net',
first_name='William', last_name='Webber'
)
| apache-2.0 | Python |
09ed0e911e530e9b907ac92f2892248b6af245fa | Add cassette and failed request as properties of thrown CannotOverwriteCassetteException | kevin1024/vcrpy,graingert/vcrpy,graingert/vcrpy,kevin1024/vcrpy | vcr/errors.py | vcr/errors.py | class CannotOverwriteExistingCassetteException(Exception):
def __init__(self, *args, **kwargs):
self.cassette = kwargs["cassette"]
self.failed_request = kwargs["failed_request"]
message = self._get_message(kwargs["cassette"], kwargs["failed_request"])
super(CannotOverwriteExistingCassetteException, self).__init__(message)
def _get_message(self, cassette, failed_request):
"""Get the final message related to the exception"""
# Get the similar requests in the cassette that
# have match the most with the request.
best_matches = cassette.find_requests_with_most_matches(failed_request)
# Build a comprehensible message to put in the exception.
best_matches_msg = ""
for best_match in best_matches:
request, _, failed_matchers_assertion_msgs = best_match
best_matches_msg += "Similar request found : (%r).\n" % request
for failed_matcher, assertion_msg in failed_matchers_assertion_msgs:
best_matches_msg += "Matcher failed : %s\n" "%s\n" % (
failed_matcher,
assertion_msg,
)
return (
"Can't overwrite existing cassette (%r) in "
"your current record mode (%r).\n"
"No match for the request (%r) was found.\n"
"%s"
% (cassette._path, cassette.record_mode, failed_request, best_matches_msg)
)
class UnhandledHTTPRequestError(KeyError):
"""Raised when a cassette does not contain the request we want."""
pass
| class CannotOverwriteExistingCassetteException(Exception):
def __init__(self, *args, **kwargs):
message = self._get_message(kwargs["cassette"], kwargs["failed_request"])
super(CannotOverwriteExistingCassetteException, self).__init__(message)
def _get_message(self, cassette, failed_request):
"""Get the final message related to the exception"""
# Get the similar requests in the cassette that
# have match the most with the request.
best_matches = cassette.find_requests_with_most_matches(failed_request)
# Build a comprehensible message to put in the exception.
best_matches_msg = ""
for best_match in best_matches:
request, _, failed_matchers_assertion_msgs = best_match
best_matches_msg += "Similar request found : (%r).\n" % request
for failed_matcher, assertion_msg in failed_matchers_assertion_msgs:
best_matches_msg += "Matcher failed : %s\n" "%s\n" % (
failed_matcher,
assertion_msg,
)
return (
"Can't overwrite existing cassette (%r) in "
"your current record mode (%r).\n"
"No match for the request (%r) was found.\n"
"%s"
% (cassette._path, cassette.record_mode, failed_request, best_matches_msg)
)
class UnhandledHTTPRequestError(KeyError):
"""Raised when a cassette does not contain the request we want."""
pass
| mit | Python |
18f24306c5201d3bf5241ad0a684e3e4016bf10f | Improve the design: machinist style FSM, another failure edge case. | w4ngyi/flocker,moypray/flocker,lukemarsden/flocker,Azulinho/flocker,LaynePeng/flocker,wallnerryan/flocker-profiles,beni55/flocker,moypray/flocker,wallnerryan/flocker-profiles,beni55/flocker,AndyHuu/flocker,agonzalezro/flocker,jml/flocker,mbrukman/flocker,runcom/flocker,runcom/flocker,AndyHuu/flocker,adamtheturtle/flocker,achanda/flocker,beni55/flocker,jml/flocker,LaynePeng/flocker,achanda/flocker,mbrukman/flocker,1d4Nf6/flocker,achanda/flocker,agonzalezro/flocker,hackday-profilers/flocker,wallnerryan/flocker-profiles,jml/flocker,w4ngyi/flocker,adamtheturtle/flocker,mbrukman/flocker,moypray/flocker,1d4Nf6/flocker,hackday-profilers/flocker,AndyHuu/flocker,1d4Nf6/flocker,agonzalezro/flocker,w4ngyi/flocker,Azulinho/flocker,runcom/flocker,lukemarsden/flocker,LaynePeng/flocker,Azulinho/flocker,adamtheturtle/flocker,hackday-profilers/flocker,lukemarsden/flocker | flocker/snapshots.py | flocker/snapshots.py | """
Snapshotting of a filesystem.
"""
from zope.interface import Interface
class IFilesystemSnapshots(Interface):
"""
Support creating and listing snapshots of a specific filesystem.
"""
def create(name):
"""
Create a snapshot of the filesystem.
@param name: The name of the snapshot.
@type name: L{bytes}
@return: L{Deferred} that fires on snapshot creation, or errbacks if
snapshotting failed. The L{Deferred} should support cancellation
if at all possible.
"""
def list():
"""
Return all the filesystem's snapshots.
@return: L{Deferred} that fires with a L{list} of L{bytes} (snapshot
names). This will likely be improved in later iterations.
"""
class ChangeSnapshotter(object):
"""
Create snapshots based on writes to a filesystem.
1. All changes to the filesystem should result in a snapshot being
created in the near future.
2. Only one snapshot should be created at a time (i.e. no parallel
snapshots).
3. Snapshots are named using the current time and the node they were
created on.
4. Snapshots are expected to run very quickly, so if a snapshot take
more than 10 seconds it should be cancelled.
This suggests the following state machine, (input, state) -> outputs, new_state:
(FILESYSTEM_CHANGE, IDLE) -> [START_SNAPSHOT], SNAPSHOTTING
(FILESYSTEM_CHANGE, SNAPSHOTTING) -> [], SNAPSHOTTING_DIRTY
(FILESYSTEM_CHANGE, SNAPSHOTTING_DIRTY) -> [], SNAPSHOTTING_DIRTY
(SNAPSHOT_SUCCESS, SNAPSHOTTING) -> IDLE
(SNAPSHOT_SUCCESS, SNAPSHOTTING_DIRTY) -> [START_SNAPSHOT], SNAPSHOTTING
(SNAPSHOT_FAILURE, SNAPSHOTTING) -> [START_SNAPSHOT], SNAPSHOTTING
(SNAPSHOT_FAILURE, SNAPSHOTTING_DIRTY) -> [START_SNAPSHOT], SNAPSHOTTING
output_START_SNAPSHOT should create the snapshot, and add a 10 second timeout to the Deferred.
(As a second pass we probably want to wait 1 second between snapshots.)
"""
def __init__(self, name, clock, fsSnapshots):
"""
@param name: The name of the current node, to be used in snapshot names.
@type name: L{bytes}
@param clock: A L{IReactorTime} provider.
@param fsSnapshots: A L{IFilesystemSnapshots} provider.
"""
| """
Snapshotting of a filesystem.
"""
from zope.interface import Interface
class IFilesystemSnapshots(Interface):
"""
Support creating and listing snapshots of a specific filesystem.
"""
def create(name):
"""
Create a snapshot of the filesystem.
@param name: The name of the snapshot.
@type name: L{bytes}
@return: L{Deferred} that fires on snapshot creation, or errbacks if
snapshotting failed.
"""
def list():
"""
Return all the filesystem's snapshots.
@return: L{Deferred} that fires with a L{list} of L{bytes} (snapshot
names). This will likely be improved in later iterations.
"""
class ChangeSnapshotter(object):
"""
Create snapshots based on writes to a filesystem.
1. All changes to the filesystem should result in a snapshot being
created in the near future.
2. Only one snapshot should be created at a time (i.e. no parallel
snapshots).
3. Snapshots are named using the current time and the node they were
created on.
This suggests the following state machine, (input, state) -> new_state:
(FILESYSTEM_CHANGE, IDLE) -> SNAPSHOTTING
(FILESYSTEM_CHANGE, SNAPSHOTTING) -> SNAPSHOTTING_DIRTY
(FILESYSTEM_CHANGE, SNAPSHOTTING_DIRTY) -> SNAPSHOTTING_DIRTY
(SNAPSHOT_SUCCESS, SNAPSHOTTING) -> IDLE
(SNAPSHOT_SUCCESS, SNAPSHOTTING_DIRTY) -> SNAPSHOTTING
(SNAPSHOT_FAILURE, SNAPSHOTTING) -> SNAPSHOTTING
(SNAPSHOT_FAILURE, SNAPSHOTTING_DIRTY) -> SNAPSHOTTING
(As a second pass we probably want to wait 1 second between snapshots.)
"""
def __init__(self, name, clock, fsSnapshots):
"""
@param name: The name of the current node, to be used in snapshot names.
@type name: L{bytes}
@param clock: A L{IReactorTime} provider.
@param fsSnapshots: A L{IFilesystemSnapshots} provider.
"""
| apache-2.0 | Python |
d8a0c5adc2b3554e085c7199a1ad1011fe237f9b | bump version to v0.5.6 | PaulSchweizer/flowpipe | flowpipe/__init__.py | flowpipe/__init__.py | """Flow-based programming with python."""
__version__ = '0.5.6'
import logging
PACKAGE = 'flowpipe'
# create logger
logger = logging.getLogger(PACKAGE)
logger.propagate = False
# create console handler and set level to debug
handler = logging.StreamHandler()
# create formatter
formatter = logging.Formatter('%(name)s %(levelname)s: %(message)s')
# add formatter to handler
handler.setFormatter(formatter)
# add handler to logger
logger.addHandler(handler)
| """Flow-based programming with python."""
__version__ = '0.5.5'
import logging
PACKAGE = 'flowpipe'
# create logger
logger = logging.getLogger(PACKAGE)
logger.propagate = False
# create console handler and set level to debug
handler = logging.StreamHandler()
# create formatter
formatter = logging.Formatter('%(name)s %(levelname)s: %(message)s')
# add formatter to handler
handler.setFormatter(formatter)
# add handler to logger
logger.addHandler(handler)
| mit | Python |
01b2307235512db3f8395797cd7c5ac8f464d23c | Update extralife crawler | datagutten/comics,jodal/comics,datagutten/comics,klette/comics,jodal/comics,klette/comics,jodal/comics,klette/comics,datagutten/comics,datagutten/comics,jodal/comics | comics/comics/extralife.py | comics/comics/extralife.py | from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'ExtraLife'
language = 'en'
url = 'http://www.myextralife.com/'
start_date = '2001-06-17'
rights = 'Scott Johnson'
class Crawler(CrawlerBase):
history_capable_date = '2001-06-17'
schedule = 'Mo,We,Fr'
time_zone = -7
def crawl(self, pub_date):
url = 'http://www.myextralife.com/comics/%s.jpg' % (
pub_date.strftime('%Y-%m-%d'),)
return CrawlerResult(url)
| from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'ExtraLife'
language = 'en'
url = 'http://www.myextralife.com/'
start_date = '2001-06-17'
rights = 'Scott Johnson'
class Crawler(CrawlerBase):
history_capable_date = '2001-06-17'
schedule = 'Mo,We,Fr'
time_zone = -7
def crawl(self, pub_date):
url = 'http://www.myextralife.com/strips/%s.jpg' % (
pub_date.strftime('%m-%d-%Y'),)
return CrawlerResult(url)
| agpl-3.0 | Python |
48f9f8b299d665b8fa96231b58c5be0809988b4e | add doc | TeamGhostBuster/restful-api | app/api/__init__.py | app/api/__init__.py | from .article import controller as article_api
from .list import controller as list_api
from .user import controller as user_api
from .group import controller as group_api
from .comment import controller as comment_api
from .article import model as article_model
from .list import model as list_model
from .user import model as user_model
from .group import model as group_model
from .comment import model as comment_model
# APIDOC Inherit Doc
"""
@apiDefine AuthorizationTokenHeader
@apiHeader {String} Access-Token Access token obtains from Oauth2 provider.
@apiHeader {String} Provider-Name Oauth2 provider name.
@apiHeaderExample {json} Header (Example):
{
"Access-Token": "12xsdklajlkadsf",
"Provider-Name": "Google"
}
"""
"""
@apiDefine UnauthorizedAccessError
@apiError UnauthorizedAccessError User's access token is not valid
@apiErrorExample Error 401
{
"msg": "Unauthorized access"
}
"""
"""
@apiDefine ListDoesNotExist
apiError ListDoesNotExist The list does not exist
@apiErrorExample Error 400
{
"msg": "List does not exist"
}
"""
"""
@apiDefine ArticleDoesNotExist
apiError ArticleDoesNotExist The article does not exist
@apiErrorExample Error 400
{
"msg": "Article does not exist"
}
"""
"""
@apiDefine InvalidObjectID
apiError ArticleDoesNotExist The article does not exist
@apiErrorExample Error 400
{
"msg": "ObjectID is not valid"
}
"""
"""
@apiDefine UserNotInGroup
apiError UserNotInGroup The user is not in any group
@apiErrorExample Error 204
{
"msg": "User is not in any group"
}
""" | from .article import controller as article_api
from .list import controller as list_api
from .user import controller as user_api
from .group import controller as group_api
from .comment import controller as comment_api
from .article import model as article_model
from .list import model as list_model
from .user import model as user_model
from .group import model as group_model
from .comment import model as comment_model
# APIDOC Inherit Doc
"""
@apiDefine AuthorizationTokenHeader
@apiHeader {String} Access-Token Access token obtains from Oauth2 provider.
@apiHeader {String} Provider-Name Oauth2 provider name.
@apiHeaderExample {json} Header (Example):
{
"Access-Token": "12xsdklajlkadsf",
"Provider-Name": "Google"
}
"""
"""
@apiDefine UnauthorizedAccessError
@apiError UnauthorizedAccessError User's access token is not valid
@apiErrorExample Error 401
{
"msg": "Unauthorized access"
}
"""
"""
@apiDefine ListDoesNotExist
apiError ListDoesNotExist The list does not exist
@apiErrorExample Error 400
{
"msg": "List does not exist"
}
"""
"""
@apiDefine ArticleDoesNotExist
apiError ArticleDoesNotExist The article does not exist
@apiErrorExample Error 400
{
"msg": "Article does not exist"
}
"""
"""
@apiDefine InvalidObjectID
apiError ArticleDoesNotExist The article does not exist
@apiErrorExample Error 400
{
"msg": "ObjectID is not valid"
}
""" | apache-2.0 | Python |
f0bd1ac1499f069e11fc773d33c82018670f91af | Fix RestFormMixin errors bug | matllubos/django-is-core,matllubos/django-is-core | is_core/form/__init__.py | is_core/form/__init__.py | from django import forms
from django.core.exceptions import ValidationError
class AllFieldsUniqueValidationModelForm(forms.ModelForm):
def validate_unique(self):
try:
self.instance.validate_unique()
except ValidationError as e:
self._update_errors(e)
class RestFormMixin(object):
def is_invalid(self):
'''
Validate input data. It uses django forms
'''
errors = {}
if not self.is_valid():
errors = dict([(k, v[0]) for k, v in self.errors.items()])
non_field_errors = self.non_field_errors()
if non_field_errors:
errors = errors['non-field-errors'] = non_field_errors
if errors:
return errors
return False
class RestModelForm(RestFormMixin, AllFieldsUniqueValidationModelForm):
pass
| from django import forms
from django.core.exceptions import ValidationError
class AllFieldsUniqueValidationModelForm(forms.ModelForm):
def validate_unique(self):
try:
self.instance.validate_unique()
except ValidationError as e:
self._update_errors(e)
class RestFormMixin(object):
def is_invalid(self):
'''
Validate input data. It uses django forms
'''
errors = {}
if not self.is_valid():
errors = dict([(k, v[0]) for k, v in self.errors.items()])
non_field_errors = self.non_field_errors()
if non_field_errors:
errors = {'non-field-errors': non_field_errors}
if errors:
return errors
return False
class RestModelForm(RestFormMixin, AllFieldsUniqueValidationModelForm):
pass
| bsd-3-clause | Python |
e6abb0a1326cc15209076883029b61e5d4fd03c3 | Add date format to queue_util | lukesanantonio/inpassing-backend,lukesanantonio/inpassing-backend | inpassing/worker/queue_util.py | inpassing/worker/queue_util.py | # Copyright (c) 2016 Luke San Antonio Bialecki
# All rights reserved.
DATE_FMT = '%Y-%m-%d'
CONSUMER_QUEUE_FMT = '{}:{}:consumer'
def consumer_queue(org_id, date):
return CONSUMER_QUEUE_FMT.format(org_id, str(date))
PRODUCER_QUEUE_FMT = '{}:{}:producer'
def producer_queue(org_id, date):
return PRODUCER_QUEUE_FMT.format(org_id, str(date))
USER_BORROW_SET_FMT = '{}:{}:borrows'
def user_borrow_set(org_id, user_id):
return USER_BORROW_SET_FMT.format(org_id, user_id)
USER_LEND_SET_FMT = '{}:{}:lends'
def user_lend_set(org_id, user_id):
return USER_LEND_SET_FMT.format(org_id, user_id)
USER_REQUEST_FMT = '{}:{}'
def user_borrow(user_id, user_token):
return USER_REQUEST_FMT.format(user_id, user_token)
USER_LEND_FMT = '{}:{}'
def user_lend(pass_id, req_token):
return USER_LEND_FMT.format(pass_id, req_token)
| # Copyright (c) 2016 Luke San Antonio Bialecki
# All rights reserved.
CONSUMER_QUEUE_FMT = '{}:{}:consumer'
def consumer_queue(org_id, date):
return CONSUMER_QUEUE_FMT.format(org_id, str(date))
PRODUCER_QUEUE_FMT = '{}:{}:producer'
def producer_queue(org_id, date):
return PRODUCER_QUEUE_FMT.format(org_id, str(date))
USER_BORROW_SET_FMT = '{}:{}:borrows'
def user_borrow_set(org_id, user_id):
return USER_BORROW_SET_FMT.format(org_id, user_id)
USER_LEND_SET_FMT = '{}:{}:lends'
def user_lend_set(org_id, user_id):
return USER_LEND_SET_FMT.format(org_id, user_id)
USER_REQUEST_FMT = '{}:{}'
def user_borrow(user_id, user_token):
return USER_REQUEST_FMT.format(user_id, user_token)
USER_LEND_FMT = '{}:{}'
def user_lend(pass_id, req_token):
return USER_LEND_FMT.format(pass_id, req_token)
| mit | Python |
0dfb00466b8a5ad38520b9346d35cec032d1d969 | Update buy.py | sukeesh/Jarvis,sukeesh/Jarvis,appi147/Jarvis,appi147/Jarvis,sukeesh/Jarvis,sukeesh/Jarvis | jarviscli/plugins/buy.py | jarviscli/plugins/buy.py | from plugin import plugin
import os
import subprocess
import sys
import webbrowser
@plugin("buy")
def buy(jarvis, s):
"""
Searches the string you provide on amazon or ebay.
Generates Url and opens browser.
Uses: a) "buy <shop> <search term>" (One line command)
b) "buy", Asks for shop,"<shop>", Asks for search term, "<search term>"
"""
# Checks if one line command
cut_string = s.split(" ")
if len(cut_string) > 1:
endURL = oneLineCommand(cut_string[0], cut_string[1:])
if len(endURL) < 1:
jarvis.say("Wrong input. Try again or try with just 'buy'.")
return None
else:
jarvis.say("Pick a site (Amazon or Ebay)")
shop_input = jarvis.input()
startingURL = shop(shop_input)
if len(startingURL) < 1:
jarvis.say("Pick between Amazon or Ebay. Please try again.")
return None
jarvis.say("What you need to buy?")
search_term = jarvis.input()
endURL = generateURL(startingURL, search_term, False)
if len(endURL) < 1:
jarvis.say("Empty search term. Please try again.")
return None
webbrowser.open(endURL)
# Check is shop is supported and creates the url for searching on that shop
def shop(shopName):
startingURL = ""
if shopName in ('amazon', 'Amazon'):
startingURL = "https://www.amazon.com/s?k="
elif shopName in ('ebay', 'Ebay', 'eBay', 'e-bay'):
startingURL = "https://www.ebay.com/sch/i.html?_nkw="
return startingURL
# Gets the first part of search url and adds the search term to generate the full url
def generateURL(startingURL, searchTerm, splitted):
if(splitted):
splittedTerm = searchTerm
else:
splittedTerm = searchTerm.split(" ")
counter = 0
for word in splittedTerm:
if len(word) > 0:
if counter == 0:
startingURL += word
counter += 1
else:
startingURL += '+' + word
counter += 1
return startingURL
# Call if one line command is uses
def oneLineCommand(shop_input, search_term):
endURL = ""
startingURL = shop(shop_input)
if len(startingURL) > 0:
endURL = generateURL(startingURL, search_term, True)
return endURL
| from plugin import plugin
import os
import subprocess
import sys
import webbrowser
@plugin("buy")
def buy(jarvis, s):
"""
Searches the string you provide on amazon or ebay.
Generates Url and opens browser.
Uses: a) "buy <shop> <search term>" (One line command)
b) "buy", Asks for shop,"<shop>", Asks for search term, "<search term>"
"""
# Checks if one line command
cut_string = s.split(" ")
if len(cut_string) > 1:
endURL = oneLineCommand(cut_string[0], cut_string[1:])
if len(endURL) < 1:
jarvis.say("Wrong input. Try again or try with just 'buy'.")
return None
else:
jarvis.say("Pick a site (Amazon or Ebay)")
shop_input = jarvis.input()
startingURL = shop(shop_input)
if len(startingURL) < 1:
jarvis.say("Pick between Amazon or Ebay. Please try again.")
return None
jarvis.say("What you need to buy?")
search_term = jarvis.input()
endURL = generateURL(startingURL, search_term, False)
if len(endURL) < 1:
jarvis.say("Empty search term. Please try again.")
return None
webbrowser.open(endURL)
#openBrowser(jarvis, endURL)
# Check is shop is supported and creates the url for searching on that shop
def shop(shopName):
startingURL = ""
if shopName in ('amazon', 'Amazon'):
startingURL = "https://www.amazon.com/s?k="
elif shopName in ('ebay', 'Ebay', 'eBay', 'e-bay'):
startingURL = "https://www.ebay.com/sch/i.html?_nkw="
return startingURL
# Gets the first part of search url and adds the search term to generate the full url
def generateURL(startingURL, searchTerm, splitted):
if(splitted):
splittedTerm = searchTerm
else:
splittedTerm = searchTerm.split(" ")
counter = 0
for word in splittedTerm:
if len(word) > 0:
if counter == 0:
startingURL += word
counter += 1
else:
startingURL += '+' + word
counter += 1
return startingURL
# Call if one line command is uses
def oneLineCommand(shop_input, search_term):
endURL = ""
startingURL = shop(shop_input)
if len(startingURL) > 0:
endURL = generateURL(startingURL, search_term, True)
return endURL
| mit | Python |
392f209791eede86d65f018a9b873b33cb7ccb02 | Fix issue with GO term (unsorted). | ArnaudBelcour/Workflow_GeneList_Analysis,ArnaudBelcour/Workflow_GeneList_Analysis | test/test_uniprot_retrieval_data.py | test/test_uniprot_retrieval_data.py | import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist().sort(), df_result_truth['GOs'].tolist().sort())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
| import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist(), df_result_truth['GOs'].tolist())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
| agpl-3.0 | Python |
e17fe26503e9a72b43c1b9b662dd4319ccff1fd7 | Use an immutable tagged version of the Docker CLI container | DigitalSlideArchive/HistomicsTK,DigitalSlideArchive/HistomicsTK | server/__init__.py | server/__init__.py | import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import genRESTEndPointsForSlicerCLIsInDocker
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk:v0.1.0'
)
| import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import genRESTEndPointsForSlicerCLIsInDocker
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk'
)
| apache-2.0 | Python |
9bba07c29f8cf5ecfa1f20d1fa16a16866bb3f6c | update Pendle import script for parl.2017-06-08 (closes #948) | DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_pendle.py | polling_stations/apps/data_collection/management/commands/import_pendle.py | from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000122'
addresses_name = 'parl.2017-06-08/Version 1/Pendle Democracy_Club__08June2017.tsv'
stations_name = 'parl.2017-06-08/Version 1/Pendle Democracy_Club__08June2017.tsv'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
| from data_collection.management.commands import BaseXpressWebLookupCsvImporter
class Command(BaseXpressWebLookupCsvImporter):
council_id = 'E07000122'
addresses_name = 'PendlePropertyPostCodePollingStationWebLookup-2017-03-01.TSV'
stations_name = 'PendlePropertyPostCodePollingStationWebLookup-2017-03-01.TSV'
elections = [
'local.lancashire.2017-05-04',
'parl.2017-06-08'
]
csv_delimiter = '\t'
| bsd-3-clause | Python |
6ff0a16e4b9f6b0e1aabfa50135f59ec6e36042c | fix os.environ | nagyistoce/edx-analytics-data-api,rue89-tech/edx-analytics-data-api,edx/edx-analytics-data-api,Stanford-Online/edx-analytics-data-api,open-craft/edx-analytics-data-api,rue89-tech/edx-analytics-data-api,rue89-tech/edx-analytics-data-api,Stanford-Online/edx-analytics-data-api,nagyistoce/edx-analytics-data-api,edx/edx-analytics-data-api,nagyistoce/edx-analytics-data-api,open-craft/edx-analytics-data-api,open-craft/edx-analytics-data-api,Stanford-Online/edx-analytics-data-api | analyticsdataserver/settings/production.py | analyticsdataserver/settings/production.py | """Production settings and globals."""
from os import environ
from base import *
import yaml
from analyticsdataserver.logsettings import get_logger_config
# Normally you should not import ANYTHING from Django directly
# into your settings, but ImproperlyConfigured is an exception.
from django.core.exceptions import ImproperlyConfigured
LOGGING = get_logger_config()
def get_env_setting(setting):
"""Get the environment setting or return exception."""
try:
return environ[setting]
except KeyError:
error_msg = "Set the %s env variable" % setting
raise ImproperlyConfigured(error_msg)
########## HOST CONFIGURATION
# See: https://docs.djangoproject.com/en/1.5/releases/1.5/#allowed-hosts-required-in-production
ALLOWED_HOSTS = ['*']
########## END HOST CONFIGURATION
CONFIG_FILE=get_env_setting('ANALYTICS_API_CFG')
with open(CONFIG_FILE) as f:
config_from_yaml = yaml.load(f)
vars().update(config_from_yaml)
DB_OVERRIDES = dict(
PASSWORD=environ.get('DB_MIGRATION_PASS', DATABASES['default']['PASS']),
ENGINE=environ.get('DB_MIGRATION_ENGINE', DATABASES['default']['ENGINE']),
USER=environ.get('DB_MIGRATION_USER', DATABASES['default']['USER']),
NAME=environ.get('DB_MIGRATION_NAME', DATABASES['default']['NAME']),
HOST=environ.get('DB_MIGRATION_HOST', DATABASES['default']['HOST']),
PORT=environ.get('DB_MIGRATION_PORT', DATABASES['default']['PORT']),
)
for override, value in DB_OVERRIDES.iteritems():
DATABASES['default'][override] = value
| """Production settings and globals."""
from os import environ
from base import *
import yaml
from analyticsdataserver.logsettings import get_logger_config
# Normally you should not import ANYTHING from Django directly
# into your settings, but ImproperlyConfigured is an exception.
from django.core.exceptions import ImproperlyConfigured
LOGGING = get_logger_config()
def get_env_setting(setting):
"""Get the environment setting or return exception."""
try:
return environ[setting]
except KeyError:
error_msg = "Set the %s env variable" % setting
raise ImproperlyConfigured(error_msg)
########## HOST CONFIGURATION
# See: https://docs.djangoproject.com/en/1.5/releases/1.5/#allowed-hosts-required-in-production
ALLOWED_HOSTS = ['*']
########## END HOST CONFIGURATION
CONFIG_FILE=get_env_setting('ANALYTICS_API_CFG')
with open(CONFIG_FILE) as f:
config_from_yaml = yaml.load(f)
vars().update(config_from_yaml)
DB_OVERRIDES = dict(
PASSWORD=os.environ.get('DB_MIGRATION_PASS', DATABASES['default']['PASS']),
ENGINE=os.environ.get('DB_MIGRATION_ENGINE', DATABASES['default']['ENGINE']),
USER=os.environ.get('DB_MIGRATION_USER', DATABASES['default']['USER']),
NAME=os.environ.get('DB_MIGRATION_NAME', DATABASES['default']['NAME']),
HOST=os.environ.get('DB_MIGRATION_HOST', DATABASES['default']['HOST']),
PORT=os.environ.get('DB_MIGRATION_PORT', DATABASES['default']['PORT']),
)
for override, value in DB_OVERRIDES.iteritems():
DATABASES['default'][override] = value
| agpl-3.0 | Python |
ce7d80de371baac92c786651706bde09ee45c96e | Handle check-ins w/o boarding group/position | DavidWittman/serverless-southwest-check-in | lambda/src/handlers/check_in.py | lambda/src/handlers/check_in.py | import logging
import sys
import swa, exceptions, mail
# Set up logging
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
def _generate_email_body(response):
body = "I just checked in to your flight! Please login to Southwest to view your boarding passes.\n"
for flight in response['checkInConfirmationPage']['flights']:
body += f"\n{flight['originAirportCode']} => {flight['destinationAirportCode']} (#{flight['flightNumber']})\n"
for passenger in flight['passengers']:
# Child and infant fares might check in without a boarding group/position
if 'boardingGroup' in passenger:
body += f" - {passenger['name']}: {passenger['boardingGroup']}{passenger['boardingPosition']}\n"
else:
body += f" - {passenger['name']}\n"
return body
def main(event, context):
"""
This function is triggered at check-in time and completes the check-in via
the Southwest API and emails the reservation, if requested.
"""
confirmation_number = event['confirmation_number']
email = event['email']
first_name = event['first_name']
last_name = event['last_name']
log.info("Checking in {} {} ({})".format(
first_name, last_name, confirmation_number
))
try:
resp = swa.check_in(first_name, last_name, confirmation_number)
log.info("Checked in successfully!")
log.debug("Check-in response: {}".format(resp))
except exceptions.ReservationNotFoundError:
log.error("Reservation {} not found. It may have been cancelled".format(confirmation_number))
raise
except Exception as e:
log.error("Error checking in: {}".format(e))
raise
# Send success email
# TODO(dw): This should probably be a separate task in the step function
subject = "You're checked in!"
body = "I just checked into your flight! Please login to Southwest to view your boarding passes."
try:
body = _generate_email_body(resp)
except Exception as e:
log.warning("Error parsing flight details from check-in response: {}".format(e))
try:
mail.send_ses_email(email, subject, body)
except Exception as e:
log.warning("Error sending email: {}".format(e))
# Older events use check_in_times.remaining to track remaining check-ins
# TODO(dw): Remove this when old events are deprecated
if 'remaining' in event['check_in_times'] and len(event['check_in_times']['remaining']) > 0:
return False
return True
| import logging
import sys
import swa, exceptions, mail
# Set up logging
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
def _generate_email_body(response):
body = "I just checked in to your flight! Please login to Southwest to view your boarding passes.\n"
for flight in response['checkInConfirmationPage']['flights']:
body += f"\n{flight['originAirportCode']} => {flight['destinationAirportCode']} (#{flight['flightNumber']})\n"
for passenger in flight['passengers']:
body += f" - {passenger['name']}: {passenger['boardingGroup']}{passenger['boardingPosition']}\n"
return body
def main(event, context):
"""
This function is triggered at check-in time and completes the check-in via
the Southwest API and emails the reservation, if requested.
"""
confirmation_number = event['confirmation_number']
email = event['email']
first_name = event['first_name']
last_name = event['last_name']
log.info("Checking in {} {} ({})".format(
first_name, last_name, confirmation_number
))
try:
resp = swa.check_in(first_name, last_name, confirmation_number)
log.info("Checked in successfully!")
log.debug("Check-in response: {}".format(resp))
except exceptions.ReservationNotFoundError:
log.error("Reservation {} not found. It may have been cancelled".format(confirmation_number))
raise
except Exception as e:
log.error("Error checking in: {}".format(e))
raise
# Send success email
# TODO(dw): This should probably be a separate task in the step function
subject = "You're checked in!"
body = "I just checked into your flight! Please login to Southwest to view your boarding passes."
try:
body = _generate_email_body(resp)
except Exception as e:
log.warning("Error parsing flight details from check-in response: {}".format(e))
try:
mail.send_ses_email(email, subject, body)
except Exception as e:
log.warning("Error sending email: {}".format(e))
# Older events use check_in_times.remaining to track remaining check-ins
# TODO(dw): Remove this when old events are deprecated
if 'remaining' in event['check_in_times'] and len(event['check_in_times']['remaining']) > 0:
return False
return True
| mit | Python |
8a20c956c02caa74dbf7c79e9e714e420479dcef | update py | suensummit/erjsTesting,suensummit/erjsTesting | web_test.py | web_test.py | from selenium import webdriver
from pandas import *
import csv
import time
# load test data
with open('web_test_funnel.csv', 'rb') as f:
reader = csv.reader(f)
testbot_raw = list(reader)
testbot = sorted(testbot_raw, key=lambda testbot_raw: testbot_raw[2])
df = DataFrame(testbot, columns = testbot[len(testbot)-1])
# set test sample ec url
url = "file:///Users/summitsuen/Documents/erjsTesting/index.html"
#url = "https://suensummit.github.io/erjsTesting/"
#
driver = {}
k = 0
ssid = df.ssid.unique()
while k < len(ssid)-1:
key = ssid[k]
value = webdriver.PhantomJS()
#value = webdriver.Firefox()
driver[key] = value
k += 1
# send actions from testbot
for k in range(len(df)-2):
driver[df.ssid[k]].get(url)
driver[df.ssid[k]].find_element_by_id('uid').send_keys(df.uid[k])
driver[df.ssid[k]].find_element_by_id('act').send_keys(df.act[k])
driver[df.ssid[k]].find_element_by_id('cat').send_keys(df.cat[k])
driver[df.ssid[k]].find_element_by_id('pid').send_keys(df.pid[k])
driver[df.ssid[k]].find_element_by_id('eruid').send_keys(df.eruid[k])
#print 'It is the ' + str(k+1) + 'th testbot\n'
#driver[df.ssid[k].find_element_by_id('sendAction').click()
# Take screenshot for verify
driver[df.ssid[k]].save_screenshot('screenshot_' + str(k+1) + '.png')
for k in range(len(ssid)-1):
driver[ssid[k]].quit
| from selenium import webdriver
from pandas import *
import csv
import time
# load test data
with open('web_test_funnel.csv', 'rb') as f:
reader = csv.reader(f)
testbot_raw = list(reader)
testbot = sorted(testbot_raw, key=lambda testbot_raw: testbot_raw[2])
df = DataFrame(testbot, columns = testbot[len(testbot)-1])
# set test sample ec url
url = "file:///Users/summitsuen/Documents/erjsTesting/index.html"
#url = "https://suensummit.github.io/erjsTesting/"
#
driver = {}
k = 0
ssid = df.ssid.unique()
while k < len(ssid)-1:
key = ssid[k]
#value = webdriver.PhantomJS()
value = webdriver.Firefox()
driver[key] = value
k += 1
# send actions from testbot
for k in range(len(df)-2):
driver[df.ssid[k]].get(url)
driver[df.ssid[k]].find_element_by_id('uid').send_keys(df.uid[k])
driver[df.ssid[k]].find_element_by_id('act').send_keys(df.act[k])
driver[df.ssid[k]].find_element_by_id('cat').send_keys(df.cat[k])
driver[df.ssid[k]].find_element_by_id('pid').send_keys(df.pid[k])
driver[df.ssid[k]].find_element_by_id('eruid').send_keys(df.eruid[k])
#print 'It is the ' + str(k+1) + 'th testbot\n'
#driver[df.ssid[k].find_element_by_id('sendAction').click()
# Take screenshot for verify
driver[df.ssid[k]].save_screenshot('screenshot_' + str(k+1) + '.png')
for k in range(len(ssid)-1):
driver[ssid[k]].quit
| apache-2.0 | Python |
2cf4d2446414795ab23a1117874085bc19cd814e | make it runnable even if locale is not set such like in docker env; may close issues#3 | ssato/python-anytemplate,ssato/python-anytemplate | anytemplate/compat.py | anytemplate/compat.py | #
# Author: Satoru SATOH <ssato redhat.com>
# License: MIT
#
# pylint: disable=invalid-name, redefined-builtin, unused-argument
"""Module to keep backward compatibilities.
"""
from __future__ import absolute_import
import codecs
import itertools
import locale
import os.path
import sys
try:
import json
except ImportError:
import simplejson as json # :throw: ImportError
IS_PYTHON_3 = sys.version_info[0] == 3
ENCODING = locale.getdefaultlocale()[1] or "UTF-8"
# Borrowed from library doc, 9.7.1 Itertools functions:
def _from_iterable(iterables):
"""
itertools.chain.from_iterable alternative.
>>> list(_from_iterable([[1, 2], [3, 4]]))
[1, 2, 3, 4]
"""
for itr in iterables:
for element in itr:
yield element
def json_load(filepath, *args, **kwargs):
"""
Alternative if anyconfig is not available.
:param filepath: JSON file path
"""
return json.load(open(filepath))
def get_file_extension(filepath):
"""
Copy if anyconfig.utils.get_file_extension is not available.
>>> get_file_extension("/a/b/c")
''
>>> get_file_extension("/a/b.txt")
'txt'
>>> get_file_extension("/a/b/c.tar.xz")
'xz'
"""
_ext = os.path.splitext(filepath)[-1]
if _ext:
return _ext[1:] if _ext.startswith('.') else _ext
else:
return ''
def merge(dic, diff):
"""
Merge mapping objects.
:param dic: Original mapping object to update with `diff`
:param diff: Diff mapping object
:return: None but `dic` will be updated
>>> dic = {}
>>> merge(dic, {'a': 1})
>>> assert 'a' in dic and dic['a'] == 1
"""
dic.update(diff)
if IS_PYTHON_3:
from_iterable = itertools.chain.from_iterable
raw_input = input
def copen(filepath, flag='r', encoding=ENCODING):
"""
>>> c = copen(__file__)
>>> c is not None
True
"""
return codecs.open(filepath, flag + 'b', encoding)
else:
try:
from_iterable = itertools.chain.from_iterable
except AttributeError:
from_iterable = _from_iterable
raw_input = raw_input
def copen(filepath, flag='r', encoding=ENCODING):
"""
>>> c = copen(__file__)
>>> c is not None
True
"""
return codecs.open(filepath, flag, encoding)
# vim:sw=4:ts=4:et:
| #
# Author: Satoru SATOH <ssato redhat.com>
# License: MIT
#
# pylint: disable=invalid-name, redefined-builtin, unused-argument
"""Module to keep backward compatibilities.
"""
from __future__ import absolute_import
import codecs
import itertools
import locale
import os.path
import sys
try:
import json
except ImportError:
import simplejson as json # :throw: ImportError
IS_PYTHON_3 = sys.version_info[0] == 3
ENCODING = locale.getdefaultlocale()[1]
# Borrowed from library doc, 9.7.1 Itertools functions:
def _from_iterable(iterables):
"""
itertools.chain.from_iterable alternative.
>>> list(_from_iterable([[1, 2], [3, 4]]))
[1, 2, 3, 4]
"""
for itr in iterables:
for element in itr:
yield element
def json_load(filepath, *args, **kwargs):
"""
Alternative if anyconfig is not available.
:param filepath: JSON file path
"""
return json.load(open(filepath))
def get_file_extension(filepath):
"""
Copy if anyconfig.utils.get_file_extension is not available.
>>> get_file_extension("/a/b/c")
''
>>> get_file_extension("/a/b.txt")
'txt'
>>> get_file_extension("/a/b/c.tar.xz")
'xz'
"""
_ext = os.path.splitext(filepath)[-1]
if _ext:
return _ext[1:] if _ext.startswith('.') else _ext
else:
return ''
def merge(dic, diff):
"""
Merge mapping objects.
:param dic: Original mapping object to update with `diff`
:param diff: Diff mapping object
:return: None but `dic` will be updated
>>> dic = {}
>>> merge(dic, {'a': 1})
>>> assert 'a' in dic and dic['a'] == 1
"""
dic.update(diff)
if IS_PYTHON_3:
from_iterable = itertools.chain.from_iterable
raw_input = input
def copen(filepath, flag='r', encoding=ENCODING):
"""
>>> c = copen(__file__)
>>> c is not None
True
"""
return codecs.open(filepath, flag + 'b', encoding)
else:
try:
from_iterable = itertools.chain.from_iterable
except AttributeError:
from_iterable = _from_iterable
raw_input = raw_input
def copen(filepath, flag='r', encoding=ENCODING):
"""
>>> c = copen(__file__)
>>> c is not None
True
"""
return codecs.open(filepath, flag, encoding)
# vim:sw=4:ts=4:et:
| mit | Python |
760ad145b380fe45d6a0350d7298a29d7c47452c | Add python3.4 compatibility to is_json | Daanvdk/is_valid | is_valid/wrapper_predicates.py | is_valid/wrapper_predicates.py | import json
def is_transformed(
transform, predicate, *args,
exceptions=[Exception], msg='data can\'t be transformed', **kwargs
):
"""
Generates a predicate that checks if the data is valid according to some
predicate after a function has been applied to the data. If this function
throws an exception the predicate will consider the data invalid.
With the ``exceptions`` parameter you can limit the exceptions that the
predicate catches. With the ``msg`` parameter you can specify what the
explanation should be when the predicate catches an exception.
The predicate that this function returns also has an optional ``include``
parameter, if you set this to ``True`` the data after the transformation
will also be returned in case the transformation was succesful.
All other arguments provided will be passed on to the transform function.
"""
def is_valid(data, explain=False, include=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (
(False, msg, None) if explain else (False, None)
) if include else (
(False, msg) if explain else False
)
return ((
predicate(data, explain=True) + (data,)
) if explain else (
predicate(data), data
)) if include else predicate(data, explain=explain)
return is_valid
# Python <=3.4 compatibility
try:
jsonError = json.JSONDecodeError
except AttributeError:
jsonError = ValueError
def is_json(predicate, *args, loader=json.loads, **kwargs):
"""
Generates a predicate that checks if the data is valid according to some
predicate after it has been decoded as JSON. The predicate considers the
data invalid if it is invalid JSON.
With the ``loader`` parameter you can specify a different loader than the
default JSON loader.
All other arguments provided will be passed on to the JSON loader.
"""
return is_transformed(
loader, predicate, *args,
exceptions=[jsonError], msg='data is not valid json', **kwargs
)
| import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
"""
Generates a predicate that checks if the data is valid according to some
predicate after a function has been applied to the data. If this function
throws an exception the predicate will consider the data invalid.
With the ``exceptions`` parameter you can limit the exceptions that the
predicate catches. With the ``msg`` parameter you can specify what the
explanation should be when the predicate catches an exception.
The predicate that this function returns also has an optional ``include``
parameter, if you set this to ``True`` the data after the transformation
will also be returned in case the transformation was succesful.
All other arguments provided will be passed on to the transform function.
"""
def is_valid(data, explain=False, include=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (
(False, msg, None) if explain else (False, None)
) if include else (
(False, msg) if explain else False
)
return ((
predicate(data, explain=True) + (data,)
) if explain else (
predicate(data), data
)) if include else predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, loader=json.loads, **kwargs):
"""
Generates a predicate that checks if the data is valid according to some
predicate after it has been decoded as JSON. The predicate considers the
data invalid if it is invalid JSON.
With the ``loader`` parameter you can specify a different loader than the
default JSON loader.
All other arguments provided will be passed on to the JSON loader.
"""
return is_transformed(loader, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
| mit | Python |
63a9d670035368e1a2e8da2a5b783b6811340575 | add actionAngleSpherical to top level | followthesheep/galpy,followthesheep/galpy,jobovy/galpy,followthesheep/galpy,followthesheep/galpy,jobovy/galpy,jobovy/galpy,jobovy/galpy | galpy/actionAngle.py | galpy/actionAngle.py | from galpy.actionAngle_src import actionAngle
from galpy.actionAngle_src import actionAngleFlat
from galpy.actionAngle_src import actionAnglePower
from galpy.actionAngle_src import actionAngleAxi
from galpy.actionAngle_src import actionAngleAdiabatic
from galpy.actionAngle_src import actionAngleAdiabaticGrid
from galpy.actionAngle_src import actionAngleStaeckel
from galpy.actionAngle_src import actionAngleStaeckelGrid
from galpy.actionAngle_src import actionAngleIsochrone
from galpy.actionAngle_src import actionAngleIsochroneApprox
from galpy.actionAngle_src import actionAngleSpherical
#
# Exceptions
#
UnboundError= actionAngle.UnboundError
#
# Functions
#
estimateDeltaStaeckel= actionAngleStaeckel.estimateDeltaStaeckel
estimateBIsochrone= actionAngleIsochroneApprox.estimateBIsochrone
#
# Classes
#
actionAngle= actionAngle.actionAngle
actionAngleFlat= actionAngleFlat.actionAngleFlat
actionAnglePower= actionAnglePower.actionAnglePower
actionAngleAxi= actionAngleAxi.actionAngleAxi
actionAngleAdiabatic= actionAngleAdiabatic.actionAngleAdiabatic
actionAngleAdiabaticGrid= actionAngleAdiabaticGrid.actionAngleAdiabaticGrid
actionAngleStaeckelSingle= actionAngleStaeckel.actionAngleStaeckelSingle
actionAngleStaeckel= actionAngleStaeckel.actionAngleStaeckel
actionAngleStaeckelGrid= actionAngleStaeckelGrid.actionAngleStaeckelGrid
actionAngleIsochrone= actionAngleIsochrone.actionAngleIsochrone
actionAngleIsochroneApprox=\
actionAngleIsochroneApprox.actionAngleIsochroneApprox
actionAngleSpherical= actionAngleSpherical.actionAngleSpherical
| from galpy.actionAngle_src import actionAngle
from galpy.actionAngle_src import actionAngleFlat
from galpy.actionAngle_src import actionAnglePower
from galpy.actionAngle_src import actionAngleAxi
from galpy.actionAngle_src import actionAngleAdiabatic
from galpy.actionAngle_src import actionAngleAdiabaticGrid
from galpy.actionAngle_src import actionAngleStaeckel
from galpy.actionAngle_src import actionAngleStaeckelGrid
from galpy.actionAngle_src import actionAngleIsochrone
from galpy.actionAngle_src import actionAngleIsochroneApprox
#
# Exceptions
#
UnboundError= actionAngle.UnboundError
#
# Functions
#
estimateDeltaStaeckel= actionAngleStaeckel.estimateDeltaStaeckel
estimateBIsochrone= actionAngleIsochroneApprox.estimateBIsochrone
#
# Classes
#
actionAngle= actionAngle.actionAngle
actionAngleFlat= actionAngleFlat.actionAngleFlat
actionAnglePower= actionAnglePower.actionAnglePower
actionAngleAxi= actionAngleAxi.actionAngleAxi
actionAngleAdiabatic= actionAngleAdiabatic.actionAngleAdiabatic
actionAngleAdiabaticGrid= actionAngleAdiabaticGrid.actionAngleAdiabaticGrid
actionAngleStaeckelSingle= actionAngleStaeckel.actionAngleStaeckelSingle
actionAngleStaeckel= actionAngleStaeckel.actionAngleStaeckel
actionAngleStaeckelGrid= actionAngleStaeckelGrid.actionAngleStaeckelGrid
actionAngleIsochrone= actionAngleIsochrone.actionAngleIsochrone
actionAngleIsochroneApprox=\
actionAngleIsochroneApprox.actionAngleIsochroneApprox
| bsd-3-clause | Python |
1e10da86d224642066e4e3390454be615c308664 | set the process group and kill children | Yelp/paasta,gstarnberger/paasta,gstarnberger/paasta,somic/paasta,somic/paasta,Yelp/paasta | paasta_tools/paasta_cli/paasta_cli.py | paasta_tools/paasta_cli/paasta_cli.py | #!/usr/bin/env python
# PYTHON_ARGCOMPLETE_OK
"""A command line tool for viewing information from the PaaSTA stack."""
import argcomplete
import argparse
import os
import signal
from paasta_tools.paasta_cli import cmds
from paasta_tools.paasta_cli.utils \
import file_names_in_dir as paasta_commands_dir, load_method
from paasta_tools.utils import configure_log
def add_subparser(command, subparsers):
"""Given a command name, paasta_cmd, execute the add_subparser method
implemented in paasta_cmd.py.
Each paasta client command must implement a method called add_subparser.
This allows the client to dynamically add subparsers to its subparser, which
provides the benefits of argcomplete/argparse but gets it done in a modular
fashion.
:param command: a simple string - e.g. 'list'
:param subparsers: an ArgumentParser object"""
module_name = 'paasta_tools.paasta_cli.cmds.%s' % command
add_subparser_fn = load_method(module_name, 'add_subparser')
add_subparser_fn(subparsers)
def parse_args():
"""Initialize autocompletion and configure the argument parser.
:return: an argparse.Namespace object mapping parameter names to the inputs
from sys.argv
"""
parser = argparse.ArgumentParser(description="Yelp PaaSTA client")
subparsers = parser.add_subparsers(help="[-h, --help] for subcommand help")
for command in sorted(paasta_commands_dir(cmds)):
add_subparser(command, subparsers)
argcomplete.autocomplete(parser)
return parser.parse_args()
def main():
"""Perform a paasta call. Read args from sys.argv and pass parsed args onto
appropriate command in paata_cli/cmds directory.
"""
os.setpgrp()
try:
configure_log()
args = parse_args()
args.command(args)
finally:
os.killpg(0, signal.SIGKILL)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# PYTHON_ARGCOMPLETE_OK
"""A command line tool for viewing information from the PaaSTA stack."""
import argcomplete
import argparse
from paasta_tools.paasta_cli import cmds
from paasta_tools.paasta_cli.utils \
import file_names_in_dir as paasta_commands_dir, load_method
from paasta_tools.utils import configure_log
def add_subparser(command, subparsers):
"""Given a command name, paasta_cmd, execute the add_subparser method
implemented in paasta_cmd.py.
Each paasta client command must implement a method called add_subparser.
This allows the client to dynamically add subparsers to its subparser, which
provides the benefits of argcomplete/argparse but gets it done in a modular
fashion.
:param command: a simple string - e.g. 'list'
:param subparsers: an ArgumentParser object"""
module_name = 'paasta_tools.paasta_cli.cmds.%s' % command
add_subparser_fn = load_method(module_name, 'add_subparser')
add_subparser_fn(subparsers)
def parse_args():
"""Initialize autocompletion and configure the argument parser.
:return: an argparse.Namespace object mapping parameter names to the inputs
from sys.argv
"""
parser = argparse.ArgumentParser(description="Yelp PaaSTA client")
subparsers = parser.add_subparsers(help="[-h, --help] for subcommand help")
for command in sorted(paasta_commands_dir(cmds)):
add_subparser(command, subparsers)
argcomplete.autocomplete(parser)
return parser.parse_args()
def main():
"""Perform a paasta call. Read args from sys.argv and pass parsed args onto
appropriate command in paata_cli/cmds directory.
"""
configure_log()
args = parse_args()
args.command(args)
if __name__ == '__main__':
main()
| apache-2.0 | Python |
4da09ae9bc9fa05545f5032c398a350798035d60 | Fix factory import | SCUEvals/scuevals-api,SCUEvals/scuevals-api | tests/fixtures/factories/student.py | tests/fixtures/factories/student.py | import factory
from datetime import timedelta, datetime, timezone
from .user import UserFactory
from scuevals_api import models
from scuevals_api.utils import datetime_from_date
class StudentFactory(UserFactory):
class Meta:
model = models.Student
sqlalchemy_session = models.db.session
graduation_year = 2020
gender = factory.Iterator(['m', 'f', 'o'])
read_access_until = datetime_from_date(datetime.now() + timedelta(days=180), tzinfo=timezone.utc)
@factory.lazy_attribute
def roles(self):
return [
models.Role.query.get(models.Role.StudentWrite),
models.Role.query.get(models.Role.StudentRead)
]
| import factory
from datetime import timedelta, datetime, timezone
from utils import datetime_from_date
from .user import UserFactory
from scuevals_api import models
class StudentFactory(UserFactory):
class Meta:
model = models.Student
sqlalchemy_session = models.db.session
graduation_year = 2020
gender = factory.Iterator(['m', 'f', 'o'])
read_access_until = datetime_from_date(datetime.now() + timedelta(days=180), tzinfo=timezone.utc)
@factory.lazy_attribute
def roles(self):
return [
models.Role.query.get(models.Role.StudentWrite),
models.Role.query.get(models.Role.StudentRead)
]
| agpl-3.0 | Python |
2333980b427c818e0db320a3a5406094a9e64183 | Test parent processes handling of KeyboardInterrupt | dbryant4/furtive | tests/test_hasher_hash_directory.py | tests/test_hasher_hash_directory.py | """ Test cases for furtive.hasher object """
import logging
import unittest
import multiprocessing
from mock import MagicMock, patch
from furtive.hasher import HashDirectory, hash_task, initializer
class TestHashDirectory(unittest.TestCase):
def test_hash_directory(self):
""" Ensure HashDirectory will correctly hash all files in a directory """
hash_directory = HashDirectory('tests/fixtures/test-data')
results = hash_directory.hash_files()
self.assertEqual(results['documents/Important Document 1.odt'], 'd460a36805fb460c038d96723f206b20')
self.assertEqual(results['documents/Important Presentation.odp'], '1911ec839cedcbf00739a7d3447ec3a3')
self.assertEqual(results['pictures/Picture #1.jpg'], '6eec850e32622c0e33bdae08ced29e24')
self.assertEqual(results['documents/exclude_me.txt'], '2e7d8cb32bb82e838506aff5600182d1')
self.assertEqual(len(results), 4)
def test_hash_directory_keyboard_interupt(self):
""" Ensure HashDirectory gracefully handles a KeyboardInterrupt """
with patch('furtive.hasher.multiprocessing.Pool') as mock_pool:
pool = MagicMock()
pool.map.side_effect = KeyboardInterrupt
mock_pool.return_value = pool
hash_directory = HashDirectory('tests/fixtures/test-data')
results = hash_directory.hash_files()
pool.terminate.assert_called_once_with()
def test_hash_task(self):
""" Ensure furtive.hasher.hash_task works as expected """
terminating = MagicMock()
terminating.is_set.return_value = False
initializer(terminating)
result = hash_task('tests/fixtures/test-data/documents/Important Document 1.odt')
self.assertEqual(result['tests/fixtures/test-data/documents/Important Document 1.odt'], 'd460a36805fb460c038d96723f206b20', msg=result)
def test_hash_task_terminates(self):
""" Ensure furtive.hasher.hash_task terminates when terminating is set """
terminating = MagicMock()
terminating.is_set.return_value = True
initializer(terminating)
result = hash_task('tests/fixtures/test-data/documents/Important Document 1.odt')
self.assertEqual(result, None, msg=result)
def test_hash_task_keyboard_interupt(self):
""" Ensure furtive.hasher.hash_task sets terminating to true during KeyboardInterrupt """
terminating = MagicMock(spec=multiprocessing.Event())
terminating.is_set.side_effect = KeyboardInterrupt
initializer(terminating)
result = hash_task('tests/fixtures/test-data/documents/Important Document 1.odt')
self.assertEqual(result, None)
terminating.set.assert_called_once_with()
if __name__ == '__main__':
unittest.main()
| """ Test cases for furtive.hasher object """
import logging
import unittest
import multiprocessing
from mock import MagicMock
from furtive.hasher import HashDirectory, hash_task, initializer
class TestHashDirectory(unittest.TestCase):
def test_hash_directory(self):
""" Ensure HashDirectory will correctly hash all files in a directory """
hash_directory = HashDirectory('tests/fixtures/test-data')
results = hash_directory.hash_files()
self.assertEqual(results['documents/Important Document 1.odt'], 'd460a36805fb460c038d96723f206b20')
self.assertEqual(results['documents/Important Presentation.odp'], '1911ec839cedcbf00739a7d3447ec3a3')
self.assertEqual(results['pictures/Picture #1.jpg'], '6eec850e32622c0e33bdae08ced29e24')
self.assertEqual(results['documents/exclude_me.txt'], '2e7d8cb32bb82e838506aff5600182d1')
self.assertEqual(len(results), 4)
def test_hash_task(self):
""" Ensure furtive.hasher.hash_task works as expected """
terminating = MagicMock()
terminating.is_set.return_value = False
initializer(terminating)
result = hash_task('tests/fixtures/test-data/documents/Important Document 1.odt')
self.assertEqual(result['tests/fixtures/test-data/documents/Important Document 1.odt'], 'd460a36805fb460c038d96723f206b20', msg=result)
def test_hash_task_terminates(self):
""" Ensure furtive.hasher.hash_task terminates when terminating is set """
terminating = MagicMock()
terminating.is_set.return_value = True
initializer(terminating)
result = hash_task('tests/fixtures/test-data/documents/Important Document 1.odt')
self.assertEqual(result, None, msg=result)
def test_hash_task_keyboard_interupt(self):
""" Ensure furtive.hasher.hash_task sets terminating to true during KeyboardInterrupt """
terminating = MagicMock(spec=multiprocessing.Event())
terminating.is_set.side_effect = KeyboardInterrupt
initializer(terminating)
result = hash_task('tests/fixtures/test-data/documents/Important Document 1.odt')
self.assertEqual(result, None)
terminating.set.assert_called_once_with()
if __name__ == '__main__':
unittest.main()
| mit | Python |
90da7aa5028d64437f3fcaf903075cbda293b575 | Fix logging in tests | genenetwork/genenetwork2,genenetwork/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,DannyArends/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2 | test/requests/parametrized_test.py | test/requests/parametrized_test.py | import logging
import unittest
from wqflask import app
from elasticsearch import Elasticsearch, TransportError
class ParametrizedTest(unittest.TestCase):
def __init__(self, methodName='runTest', gn2_url="http://localhost:5003", es_url="localhost:9200"):
super(ParametrizedTest, self).__init__(methodName=methodName)
self.gn2_url = gn2_url
self.es_url = es_url
def setUp(self):
self.es = Elasticsearch([self.es_url])
self.es_cleanup = []
es_logger = logging.getLogger("elasticsearch")
es_logger.setLevel(app.config.get("LOG_LEVEL"))
es_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationInfo.log"))
es_trace_logger = logging.getLogger("elasticsearch.trace")
es_trace_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationTrace.log"))
def tearDown(self):
self.es.delete_by_query(
index="users"
, doc_type="local"
, body={"query":{"match":{"email_address":"test@user.com"}}})
| import logging
import unittest
from elasticsearch import Elasticsearch, TransportError
class ParametrizedTest(unittest.TestCase):
def __init__(self, methodName='runTest', gn2_url="http://localhost:5003", es_url="localhost:9200"):
super(ParametrizedTest, self).__init__(methodName=methodName)
self.gn2_url = gn2_url
self.es_url = es_url
def setUp(self):
self.es = Elasticsearch([self.es_url])
self.es_cleanup = []
es_logger = logging.getLogger("elasticsearch")
es_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationInfo.log"))
es_trace_logger = logging.getLogger("elasticsearch.trace")
es_trace_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationTrace.log"))
def tearDown(self):
self.es.delete_by_query(
index="users"
, doc_type="local"
, body={"query":{"match":{"email_address":"test@user.com"}}})
| agpl-3.0 | Python |
b60fb0db2cc1ab3605f34e9b604e920279434c36 | Disable mouse handling in vterm example. | westurner/urwid,wardi/urwid,zyga/urwid,douglas-larocca/urwid,harlowja/urwid,drestebon/urwid,hkoof/urwid,bk2204/urwid,urwid/urwid,drestebon/urwid,inducer/urwid,hkoof/urwid,hkoof/urwid,bk2204/urwid,rndusr/urwid,rndusr/urwid,zyga/urwid,westurner/urwid,inducer/urwid,rndusr/urwid,mountainstorm/urwid,foreni-packages/urwid,foreni-packages/urwid,ivanov/urwid,tonycpsu/urwid,harlowja/urwid,zyga/urwid,drestebon/urwid,tonycpsu/urwid,mountainstorm/urwid,urwid/urwid,foreni-packages/urwid,douglas-larocca/urwid,ivanov/urwid,douglas-larocca/urwid,Julian/urwid,westurner/urwid,harlowja/urwid,wardi/urwid,Julian/urwid,Julian/urwid,ivanov/urwid,mountainstorm/urwid,tonycpsu/urwid,urwid/urwid,inducer/urwid,wardi/urwid,bk2204/urwid | vterm_test.py | vterm_test.py | #!/usr/bin/python
import urwid
def main():
event_loop = urwid.SelectEventLoop()
mainframe = urwid.Frame(
urwid.Columns([
('fixed', 3, urwid.SolidFill('|')),
urwid.Pile([
('weight', 70, urwid.TerminalWidget(None, event_loop)),
('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))),
]),
('fixed', 3, urwid.SolidFill('|')),
], box_columns=[1]),
header=urwid.Columns([
('fixed', 3, urwid.Text('.,:')),
urwid.Divider('-'),
('fixed', 3, urwid.Text(':,.')),
]),
footer=urwid.Columns([
('fixed', 3, urwid.Text('`"*')),
urwid.Divider('-'),
('fixed', 3, urwid.Text('*"\'')),
]),
)
def quit(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
loop = urwid.MainLoop(
mainframe,
handle_mouse=False,
unhandled_input=quit,
event_loop=event_loop
).run()
if __name__ == '__main__':
main()
| #!/usr/bin/python
import urwid
def main():
event_loop = urwid.SelectEventLoop()
mainframe = urwid.Frame(
urwid.Columns([
('fixed', 3, urwid.SolidFill('|')),
urwid.Pile([
('weight', 70, urwid.TerminalWidget(None, event_loop)),
('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))),
]),
('fixed', 3, urwid.SolidFill('|')),
], box_columns=[1]),
header=urwid.Columns([
('fixed', 3, urwid.Text('.,:')),
urwid.Divider('-'),
('fixed', 3, urwid.Text(':,.')),
]),
footer=urwid.Columns([
('fixed', 3, urwid.Text('`"*')),
urwid.Divider('-'),
('fixed', 3, urwid.Text('*"\'')),
]),
)
def quit(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
loop = urwid.MainLoop(
mainframe,
unhandled_input=quit,
event_loop=event_loop
).run()
if __name__ == '__main__':
main()
| lgpl-2.1 | Python |
e1fc818b8d563c00c77060cd74d2781b287c0b5d | Include Plot, SPlot in xnuplot.__all__. | marktsuchida/Xnuplot | xnuplot/__init__.py | xnuplot/__init__.py | from .plot import Plot, SPlot
__all__ = ["Plot", "SPlot", "gnuplot", "numplot"]
| from .plot import Plot, SPlot
__all__ = ["gnuplot", "numplot"]
| mit | Python |
68802180ac5c7d02a20d88879b583fbcdc7f4059 | Add BinauralSepsm to __init__ | achabotl/pambox | pambox/speech/__init__.py | pambox/speech/__init__.py | """
The :mod:`pambox.speech` module gather speech intelligibility
models.
"""
from __future__ import absolute_import
from .binauralsepsm import BinauralSepsm
from .sepsm import Sepsm
from .mrsepsm import MrSepsm
from .sii import Sii
from .material import Material
from .experiment import Experiment
__all__ = [
'BinauralSepsm',
'Sepsm',
'MrSepsm',
'Sii',
'Material',
'Experiment'
]
| """
The :mod:`pambox.speech` module gather speech intelligibility
models.
"""
from __future__ import absolute_import
from .sepsm import Sepsm
from .mrsepsm import MrSepsm
from .sii import Sii
from .material import Material
from .experiment import Experiment
__all__ = [
'Sepsm',
'MrSepsm',
'Sii',
'Material',
'Experiment'
]
| bsd-3-clause | Python |
7c37d4f95897ddbc061ec0a84185a19899b85b89 | Update shebang to use /usr/bin/env. | quattor/aquilon-protocols,quattor/aquilon-protocols | compile_for_dist.py | compile_for_dist.py | #!/usr/bin/env python2.6
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
| #!/ms/dist/python/PROJ/core/2.5.2-1/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
| apache-2.0 | Python |
0f848402257faac3a807cdfe90cc505e2ae39129 | Fix typo | OneDrive/onedrive-sdk-python | src/onedrivesdk/model/async_operation_status.py | src/onedrivesdk/model/async_operation_status.py | # -*- coding: utf-8 -*-
'''
# Copyright (c) 2015 Microsoft Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# This file was generated and any changes will be overwritten.
'''
from __future__ import unicode_literals
from ..one_drive_object_base import OneDriveObjectBase
class AsyncOperationStatus(OneDriveObjectBase):
def __init__(self, prop_dict=None):
self._prop_dict = prop_dict if prop_dict is not None else {}
@property
def operation(self):
"""Gets and sets the operation
Returns:
str:
The operation
"""
if "operation" in self._prop_dict:
return self._prop_dict["operation"]
else:
return None
@operation.setter
def operation(self, val):
self._prop_dict["operation"] = val
@property
def percentage_complete(self):
"""Gets and sets the percentageComplete
Returns:
float:
The percentageComplete
"""
if "percentageComplete" in self._prop_dict:
return self._prop_dict["percentageComplete"]
else:
return None
@percentage_complete.setter
def percentage_complete(self, val):
self._prop_dict["percentageComplete"] = val
@property
def status(self):
"""Gets and sets the status
Returns:
str:
The status
"""
if "status" in self._prop_dict:
return self._prop_dict["status"]
else:
return None
@status.setter
def status(self, val):
self._prop_dict["status"] = val
| # -*- coding: utf-8 -*-
'''
# Copyright (c) 2015 Microsoft Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# This file was generated and any changes will be overwritten.
'''
from __future__ import unicode_literals
from ..one_drive_object_base import OneDriveObjectBase
class AsyncOperationStatus(OneDriveObjectBase):
def __init__(self, prop_dict=None):
self._prop_dict = prop_dict if prop_dict not None else {}
@property
def operation(self):
"""Gets and sets the operation
Returns:
str:
The operation
"""
if "operation" in self._prop_dict:
return self._prop_dict["operation"]
else:
return None
@operation.setter
def operation(self, val):
self._prop_dict["operation"] = val
@property
def percentage_complete(self):
"""Gets and sets the percentageComplete
Returns:
float:
The percentageComplete
"""
if "percentageComplete" in self._prop_dict:
return self._prop_dict["percentageComplete"]
else:
return None
@percentage_complete.setter
def percentage_complete(self, val):
self._prop_dict["percentageComplete"] = val
@property
def status(self):
"""Gets and sets the status
Returns:
str:
The status
"""
if "status" in self._prop_dict:
return self._prop_dict["status"]
else:
return None
@status.setter
def status(self, val):
self._prop_dict["status"] = val
| mit | Python |
70245c78a7f4a036b439b0dc1e784c58d468233e | Bump ptpython version | google/pigweed,google/pigweed,google/pigweed,google/pigweed,google/pigweed,google/pigweed,google/pigweed,google/pigweed,google/pigweed,google/pigweed | pw_console/py/setup.py | pw_console/py/setup.py | # Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""pw_console"""
import setuptools # type: ignore
setuptools.setup(
name='pw_console',
version='0.0.1',
author='Pigweed Authors',
author_email='pigweed-developers@googlegroups.com',
description='Pigweed interactive console',
packages=setuptools.find_packages(),
package_data={
'pw_console': [
'py.typed',
'templates/keybind_list.jinja',
'templates/repl_output.jinja',
]
},
zip_safe=False,
entry_points={
'console_scripts': [
'pw-console = pw_console.__main__:main',
]
},
install_requires=[
'ipdb',
'ipython',
'jinja2',
'prompt_toolkit',
'ptpython>=3.0.18',
'pw_cli',
'pw_tokenizer',
'pygments',
'pygments-style-dracula',
'pygments-style-tomorrow',
],
)
| # Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""pw_console"""
import setuptools # type: ignore
setuptools.setup(
name='pw_console',
version='0.0.1',
author='Pigweed Authors',
author_email='pigweed-developers@googlegroups.com',
description='Pigweed interactive console',
packages=setuptools.find_packages(),
package_data={
'pw_console': [
'py.typed',
'templates/keybind_list.jinja',
'templates/repl_output.jinja',
]
},
zip_safe=False,
entry_points={
'console_scripts': [
'pw-console = pw_console.__main__:main',
]
},
install_requires=[
'ipdb',
'ipython',
'jinja2',
'prompt_toolkit',
# Required features are not yet in https://pypi.org/project/ptpython/
'ptpython @ git+https://github.com/prompt-toolkit/ptpython.git@b74af76',
'pw_cli',
'pw_tokenizer',
'pygments',
'pygments-style-dracula',
'pygments-style-tomorrow',
],
)
| apache-2.0 | Python |
a86dfd5335a9dc764de4f6e75c03621373831448 | Fix ParametricAttention layer (#457) | spacy-io/thinc,explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc | thinc/layers/parametricattention.py | thinc/layers/parametricattention.py | from typing import Tuple, Callable, Optional
from ..model import Model
from ..config import registry
from ..types import Ragged
from ..util import get_width
InT = Ragged
OutT = Ragged
@registry.layers("ParametricAttention.v1")
def ParametricAttention(nO: Optional[int] = None) -> Model[InT, OutT]:
"""Weight inputs by similarity to a learned vector"""
return Model("para-attn", forward, init=init, params={"Q": None}, dims={"nO": nO})
def forward(model: Model[InT, OutT], Xr: InT, is_train: bool) -> Tuple[OutT, Callable]:
Q = model.get_param("Q")
attention, bp_attention = _get_attention(model.ops, Q, Xr.dataXd, Xr.lengths)
output, bp_output = _apply_attention(model.ops, attention, Xr.dataXd, Xr.lengths)
def backprop(dYr: OutT) -> InT:
dX, d_attention = bp_output(dYr.dataXd)
dQ, dX2 = bp_attention(d_attention)
model.inc_grad("Q", dQ.ravel())
dX += dX2
return Ragged(dX, dYr.lengths)
return Ragged(output, Xr.lengths), backprop
def init(
model: Model[InT, OutT], X: Optional[InT] = None, Y: Optional[OutT] = None
):
if X is not None:
model.set_dim("nO", get_width(X))
# Randomly initialize the parameter, as though it were an embedding.
Q = model.ops.alloc1f(model.get_dim("nO"))
Q += model.ops.xp.random.uniform(-0.1, 0.1, Q.shape)
model.set_param("Q", Q)
return model
def _get_attention(ops, Q, X, lengths):
attention = ops.gemm(X, ops.reshape2f(Q, -1, 1))
attention = ops.softmax_sequences(attention, lengths)
def get_attention_bwd(d_attention):
d_attention = ops.backprop_softmax_sequences(d_attention, attention, lengths)
dQ = ops.gemm(X, d_attention, trans1=True)
dX = ops.xp.outer(d_attention, Q)
return dQ, dX
return attention, get_attention_bwd
def _apply_attention(ops, attention, X, lengths):
output = X * attention
def apply_attention_bwd(d_output):
d_attention = (X * d_output).sum(axis=1, keepdims=True)
dX = d_output * attention
return dX, d_attention
return output, apply_attention_bwd
| from typing import Tuple, Callable, Optional
from ..model import Model
from ..config import registry
from ..types import Ragged
from ..util import get_width
InT = Ragged
OutT = Ragged
@registry.layers("ParametricAttention.v1")
def ParametricAttention(nO: Optional[int] = None) -> Model[InT, OutT]:
"""Weight inputs by similarity to a learned vector"""
return Model("para-attn", forward, init=init, params={"Q": None}, dims={"nO": nO})
def forward(model: Model[InT, OutT], Xr: InT, is_train: bool) -> Tuple[OutT, Callable]:
Q = model.get_param("Q")
attention, bp_attention = _get_attention(model.ops, Q, Xr.data, Xr.lengths)
output, bp_output = _apply_attention(model.ops, attention, Xr.data, Xr.lengths)
def backprop(dYr: OutT) -> InT:
dX, d_attention = bp_output(dYr.data)
dQ, dX2 = bp_attention(d_attention)
model.inc_grad("Q", dQ.ravel())
dX += dX2
return Ragged(dX, dYr.lengths)
return Ragged(output, Xr.lengths), backprop
def init(
model: Model[InT, OutT], X: Optional[InT] = None, Y: Optional[OutT] = None
) -> Model[InT, OutT]:
if Y is not None:
model.set_dim("nO", get_width(Y.data))
model.set_param("Q", model.ops.alloc1f(model.get_dim("nO")))
return model
def _get_attention(ops, Q, X, lengths):
attention = ops.gemm(X, ops.reshape2f(Q, -1, 1))
attention = ops.softmax_sequences(attention, lengths)
def get_attention_bwd(d_attention):
d_attention = ops.backprop_softmax_sequences(d_attention, attention, lengths)
dQ = ops.gemm(X, d_attention, trans1=True)
dX = ops.xp.outer(d_attention, Q)
return dQ, dX
return attention, get_attention_bwd
def _apply_attention(ops, attention, X, lengths):
output = X * attention
def apply_attention_bwd(d_output):
d_attention = (X * d_output).sum(axis=1, keepdims=True)
dX = d_output * attention
return dX, d_attention
return output, apply_attention_bwd
| mit | Python |
fb3cd2f1096ff8e7fa2a377c3a531f3e00168a0f | Add a 404 handler | TheScienceMuseum/nmsi-redirects | application.py | application.py | from flask import Flask
from flask import redirect
from flask import request
import csv
# example of old url
# http://collectionsonline.nmsi.ac.uk/detail.php?type=related&kv=66468&t=objects
application = Flask(__name__)
courl = 'https://collection.sciencemuseum.org.uk'
lookup = {}
# load mapping table into a dictonary keyed on kv value
application.logger.info('loading mappings');
f = open('lookup.csv', "r", encoding='latin-1')
try:
reader = csv.reader(f)
for row in reader:
lookup[row[0]] = row[1]
except OSError as err:
application.logger.debug("OS error: {0}".format(err))
except ValueError:
application.logger.debug("Could not convert data")
except:
application.logger.debug("Unexpected error:", sys.exc_info()[0])
raise
finally:
f.close()
@application.route('/')
def index():
return redirect(courl);
@application.route('/detail.php')
def match():
try:
return redirect(courl + '/oid/' + lookup[request.args.get('kv')] + "?redirect=true")
except KeyError:
application.logger.info("Could not find kv value")
return redirect(courl)
@application.errorhandler(404)
def not_found(error):
return redirect(courl)
if __name__ == '__main__':
# application.debug = True
application.run()
| from flask import Flask
from flask import redirect
from flask import request
import csv
# example of old url
# http://collectionsonline.nmsi.ac.uk/detail.php?type=related&kv=66468&t=objects
application = Flask(__name__)
courl = 'https://collection.sciencemuseum.org.uk'
lookup = {}
# load mapping table into a dictonary keyed on kv value
application.logger.info('loading mappings');
f = open('lookup.csv', "r", encoding='latin-1')
try:
reader = csv.reader(f)
for row in reader:
lookup[row[0]] = row[1]
except OSError as err:
application.logger.debug("OS error: {0}".format(err))
except ValueError:
application.logger.debug("Could not convert data")
except:
application.logger.debug("Unexpected error:", sys.exc_info()[0])
raise
finally:
f.close()
@application.route('/')
def index():
return redirect(courl);
@application.route('/detail.php')
def match():
try:
return redirect(courl + '/oid/' + lookup[request.args.get('kv')] + "?redirect=true")
except KeyError:
application.logger.info("Could not find kv value")
return redirect(courl)
if __name__ == '__main__':
# application.debug = True
application.run()
| mit | Python |
12a5b4402bbcb62291bcb5ecc6ac8817b88dd72c | Fix typos | davidgasquez/tip | tip/algorithms/sorting/mergesort.py | tip/algorithms/sorting/mergesort.py | """Merge Sort Algorithm.
The Merge Sort is a recursive sort of order n*log(n).
It is notable for having a worst case and average complexity of O(n*log(n)),
and a best case complexity of O(n) (for pre-sorted input).
The basic idea is to split the collection into smaller groups by halving it
until the groups only have one element or no elements (which are both entirely
sorted groups).Then merge the groups back together so that their elements are
in order.
This is how the algorithm gets its "divide and conquer" description.
"""
def merge(left, right):
"""Merge two list keeping the elements in order."""
if len(left) * len(right) == 0:
return left + right
merge_list = (left[0] < right[0] and left or right).pop(0)
return [merge_list] + merge(left, right)
def mergesort(unsorted_list):
"""Merge Sort function."""
if len(unsorted_list) < 2:
return unsorted_list
mid = len(unsorted_list) / 2
left_part = mergesort(unsorted_list[:int(mid)])
right_part = mergesort(unsorted_list[int(mid):])
return merge(left_part, right_part)
| """ Merge Sort
The Merge Sort is a recursive sort of order n*log(n).
It is notable for having a worst case and average complexity of O(n*log(n)),
and a best case complexity of O(n) (for pre-sorted input).
The basic idea is to split the collection into smaller groups by halving it
until the groups only have one element or no elements (which are both entirely
sorted groups).Then merge the groups back together so that their elements are
in order.
This is how the algorithm gets its "divide and conquer" description.
"""
def merge(left, right):
"""Merge two list keeping the elements in order."""
if len(left) * len(right) == 0:
return left + right
merge_list = (left[0] < right[0] and left or right).pop(0)
return [merge_list] + merge(left, right)
def mergesort(unsorted_list):
"""Merge Sort function."""
if len(unsorted_list) < 2:
return unsorted_list
mid = len(unsorted_list) / 2
left_part = mergesort(unsorted_list[:int(mid)])
right_part = mergesort(unsorted_list[int(mid):])
return merge(left_part, right_part)
| unlicense | Python |
10bcbc32e4a0c20d3a2ec6c85dbd7f92f5aea52d | fix fetching undelivered messages to user | tomi77/django-chat,tomi77/django-chat | chat/query.py | chat/query.py | """Message related query sets"""
from django.db.models.query import QuerySet
class MessageQuerySet(QuerySet):
"""Message query set"""
def undelivered(self, to):
"""Fetch only undelivered messages"""
if to is not None:
return self.filter(deliveries__receiver=to,
deliveries__delivered_at__isnull=True)
else:
return self.filter(deliveries__delivered_at__isnull=True)
| """Message related query sets"""
from django.db.models.query import QuerySet
class MessageQuerySet(QuerySet):
"""Message query set"""
def undelivered(self, to):
"""Fetch only undelivered messages"""
queryset = self.filter(deliveries__delivered_at__isnull=True)
if to is not None:
queryset = queryset.filter(deliveries__receiver=to)
return queryset
| mit | Python |
4bd6c570eeebee87bec301140be3a4b1c8bddd19 | add AutoReload class | karldoenitz/karlooper,karldoenitz/karlooper,karldoenitz/karlooper,karldoenitz/karlooper | karlooper/autoreload/__init__.py | karlooper/autoreload/__init__.py | # -*-coding:utf-8-*-
class AutoReload(object):
def __init__(self, **kwargs):
pass
def __check(self):
pass
def run(self):
pass
| # -*-coding:utf-8-*-
| mit | Python |
4f1b16facd4e209a2185caf6f8cfd82583abf247 | Fix NameError for class Election | shirlei/helios-server,shirlei/helios-server,shirlei/helios-server,shirlei/helios-server,shirlei/helios-server | server_ui/views.py | server_ui/views.py | """
server_ui specific views
"""
from helios.models import *
from helios_auth.security import *
from view_utils import *
import helios.views
import helios
from helios.crypto import utils as cryptoutils
from helios_auth.security import *
from helios.security import can_create_election
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect, Http404, HttpResponseNotAllowed
from django.conf import settings
import copy
import helios_auth.views as auth_views
def get_election():
return None
def home(request):
# load the featured elections
featured_elections = helios.models.Election.get_featured()
user = get_user(request)
create_p = can_create_election(request)
if create_p:
elections_administered = helios.models.Election.get_by_user_as_admin(user, archived_p=False, limit=5)
else:
elections_administered = None
if user:
elections_voted = helios.models.Election.get_by_user_as_voter(user, limit=5)
else:
elections_voted = None
auth_systems = copy.copy(settings.AUTH_ENABLED_AUTH_SYSTEMS)
try:
auth_systems.remove('password')
except: pass
login_box = auth_views.login_box_raw(request, return_url="/", auth_systems=auth_systems)
return render_template(request, "index", {'elections': featured_elections,
'elections_administered' : elections_administered,
'elections_voted' : elections_voted,
'create_p':create_p,
'login_box' : login_box})
def about(request):
return render_template(request, "about")
def docs(request):
return render_template(request, "docs")
def faq(request):
return render_template(request, "faq")
def privacy(request):
return render_template(request, "privacy")
| """
server_ui specific views
"""
from helios.models import *
from helios_auth.security import *
from view_utils import *
import helios.views
import helios
from helios.crypto import utils as cryptoutils
from helios_auth.security import *
from helios.security import can_create_election
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect, Http404, HttpResponseNotAllowed
from django.conf import settings
import copy
import helios_auth.views as auth_views
def get_election():
return None
def home(request):
# load the featured elections
featured_elections = Election.get_featured()
user = get_user(request)
create_p = can_create_election(request)
if create_p:
elections_administered = Election.get_by_user_as_admin(user, archived_p=False, limit=5)
else:
elections_administered = None
if user:
elections_voted = Election.get_by_user_as_voter(user, limit=5)
else:
elections_voted = None
auth_systems = copy.copy(settings.AUTH_ENABLED_AUTH_SYSTEMS)
try:
auth_systems.remove('password')
except: pass
login_box = auth_views.login_box_raw(request, return_url="/", auth_systems=auth_systems)
return render_template(request, "index", {'elections': featured_elections,
'elections_administered' : elections_administered,
'elections_voted' : elections_voted,
'create_p':create_p,
'login_box' : login_box})
def about(request):
return render_template(request, "about")
def docs(request):
return render_template(request, "docs")
def faq(request):
return render_template(request, "faq")
def privacy(request):
return render_template(request, "privacy")
| apache-2.0 | Python |
4db43166543ea8bc47f7eeeb5228540c7b865e35 | define sday, year, and month for stub entries | akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem | scripts/ingestors/harry/fill_month.py | scripts/ingestors/harry/fill_month.py | import psycopg2
import datetime
import sys
from pyiem.network import Table as NetworkTable
PGCONN = psycopg2.connect(database='coop', host='iemdb')
cursor = PGCONN.cursor()
def main():
""" Go Main Go """
year = int(sys.argv[1])
month = int(sys.argv[2])
sts = datetime.datetime(year, month, 1)
ets = sts + datetime.timedelta(days=35)
ets = ets.replace(day=1)
nt = NetworkTable("IACLIMATE")
for sid in nt.sts.keys():
if sid[2] == 'C' or sid == 'IA0000':
continue
cursor.execute("""SELECT count(*) from alldata_ia where
station = %s and month = %s and year = %s""", (sid, month, year))
row = cursor.fetchone()
if row[0] == 0:
now = sts
while now < ets:
print "Adding %s %s" % (sid, now.strftime("%d %b %Y"))
cursor.execute("""INSERT into alldata_ia(station, day, year,
month, sday)
VALUES (%s, %s, %s, %s, %s)""", (sid, now, now.year,
now.month, now.strftime("%m%d")))
now += datetime.timedelta(days=1)
if __name__ == '__main__':
main()
cursor.close()
PGCONN.commit()
PGCONN.close() | import psycopg2
import datetime
import sys
from pyiem.network import Table as NetworkTable
PGCONN = psycopg2.connect(database='coop', host='iemdb')
cursor = PGCONN.cursor()
def main():
""" Go Main Go """
year = int(sys.argv[1])
month = int(sys.argv[2])
sts = datetime.datetime(year, month, 1)
ets = sts + datetime.timedelta(days=35)
ets = ets.replace(day=1)
nt = NetworkTable("IACLIMATE")
for sid in nt.sts.keys():
if sid[2] == 'C' or sid == 'IA0000':
continue
cursor.execute("""SELECT count(*) from alldata_ia where
station = %s and month = %s and year = %s""", (sid, month, year))
row = cursor.fetchone()
if row[0] == 0:
now = sts
while now < ets:
print "Adding %s %s" % (sid, now.strftime("%d %b %Y"))
cursor.execute("""INSERT into alldata_ia(station, day)
VALUES (%s, %s)""", (sid, now))
now += datetime.timedelta(days=1)
if __name__ == '__main__':
main()
cursor.close()
PGCONN.commit()
PGCONN.close() | mit | Python |
f3b0cefab30a9b9cb78c05054afa96eac5f66565 | add some prints | antocuni/pypy-wheels,antocuni/pypy-wheels | build_index.py | build_index.py | import sys
import py
class IndexBuilder(object):
def __init__(self, wheeldir, outdir):
self.wheeldir = py.path.local(wheeldir)
self.outdir = py.path.local(outdir)
self.packages = []
def copy_wheels(self):
for whl in self.wheeldir.visit('*.whl'):
print 'Collecting wheel:', whl.basename
name, version = self.parse(whl)
self.packages.append(name)
d = self.outdir.join(name).ensure(dir=True)
dst = d.join(whl.basename)
if dst.check(file=False):
whl.copy(d)
else:
print ' already exists, skipping'
def build_index(self):
print 'Building index files...'
self._write_index(self.outdir, 'PyPy Wheel Index', self.packages)
for pkg in self.packages:
d = self.outdir.join(pkg)
wheels = [whl.basename for whl in d.listdir('*.whl')]
self._write_index(d, 'Links for %s' % pkg, wheels)
print 'OK'
def parse(self, f):
name, version, _ = f.basename.split('-', 2)
return name, version
def _write_index(self, d, title, links):
lines = [
'<html><body><h1>{title}</h1>'.format(title=title)
]
for name in links:
line = '<a href="{name}">{name}</a>'.format(name=name)
lines.append(line)
lines.append('</body></html>')
html = '\n'.join(lines)
d.join('index.html').write(html)
def main():
wheeldir = sys.argv[1]
outdir = sys.argv[2]
index = IndexBuilder(wheeldir, outdir)
index.copy_wheels()
index.build_index()
if __name__ == '__main__':
main()
| import sys
import py
PACKAGES = [
'netifaces',
]
class IndexBuilder(object):
def __init__(self, wheeldir, outdir):
self.wheeldir = py.path.local(wheeldir)
self.outdir = py.path.local(outdir)
self.packages = []
def copy_wheels(self):
for whl in self.wheeldir.visit('*.whl'):
name, version = self.parse(whl)
self.packages.append(name)
d = self.outdir.join(name).ensure(dir=True)
dst = d.join(whl.basename)
if dst.check(file=False):
whl.copy(d)
def build_index(self):
self._write_index(self.outdir, 'PyPy Wheel Index', self.packages)
for pkg in self.packages:
d = self.outdir.join(pkg)
wheels = [whl.basename for whl in d.listdir('*.whl')]
self._write_index(d, 'Links for %s' % pkg, wheels)
def parse(self, f):
name, version, _ = f.basename.split('-', 2)
return name, version
def _write_index(self, d, title, links):
lines = [
'<html><body><h1>{title}</h1>'.format(title=title)
]
for name in links:
line = '<a href="{name}">{name}</a>'.format(name=name)
lines.append(line)
lines.append('</body></html>')
html = '\n'.join(lines)
d.join('index.html').write(html)
def main():
wheeldir = sys.argv[1]
outdir = sys.argv[2]
index = IndexBuilder(wheeldir, outdir)
index.copy_wheels()
index.build_index()
if __name__ == '__main__':
main()
| mit | Python |
231a58846c7c4c6fe782ac1a98f4cbf7860cdd86 | add missing import ChessPiece | ilius/chess-challenge | chess_util.py | chess_util.py | """
contains some chess-related utility functions
"""
import random
from cmd_util import input_int
from pieces import ChessPiece
def format_board(board, row_count, col_count):
"""
convert a `board` into string than can be shown in console
board: a dict { (row_num, col_num) => piece_symbol }
row_count: number of rows
col_count: number of columns
"""
sep_line = '-' * (col_count * 4 + 1)
lines = [sep_line]
for row_num in range(row_count):
lines.append(
'| ' + ' | '.join([
board.get((row_num, col_num), ' ')
for col_num in range(col_count)
]) + ' |'
)
lines.append(sep_line)
return '\n'.join(lines)
def make_random_board(row_count, col_count, density=0.5):
"""create a random chess board with given size and density"""
board = {}
for row_num in range(row_count):
for col_num in range(col_count):
factor = random.random() / density
if factor >= 1:
continue
index = int(factor * len(ChessPiece.class_list))
board[(row_num, col_num)] = ChessPiece.class_list[index].symbol
return board
def test_format_random_board(density=0.5):
"""test `format_random_board` function"""
while True:
row_count = input_int('Number of rows: ', minimum=2, default=0)
if row_count == 0:
break
col_count = input_int('Number of columns: ', minimum=2)
board = make_random_board(row_count, col_count, density)
print(format_board(board, row_count, col_count))
print('\n\n')
if __name__ == '__main__':
test_format_random_board(density=0.5)
| """
contains some chess-related utility functions
"""
import random
from cmd_util import input_int
def format_board(board, row_count, col_count):
"""
convert a `board` into string than can be shown in console
board: a dict { (row_num, col_num) => piece_symbol }
row_count: number of rows
col_count: number of columns
"""
sep_line = '-' * (col_count * 4 + 1)
lines = [sep_line]
for row_num in range(row_count):
lines.append(
'| ' + ' | '.join([
board.get((row_num, col_num), ' ')
for col_num in range(col_count)
]) + ' |'
)
lines.append(sep_line)
return '\n'.join(lines)
def make_random_board(row_count, col_count, density=0.5):
"""create a random chess board with given size and density"""
board = {}
for row_num in range(row_count):
for col_num in range(col_count):
factor = random.random() / density
if factor >= 1:
continue
index = int(factor * len(ChessPiece.class_list))
board[(row_num, col_num)] = ChessPiece.class_list[index].symbol
return board
def test_format_random_board(density=0.5):
"""test `format_random_board` function"""
while True:
row_count = input_int('Number of rows: ', minimum=2, default=0)
if row_count == 0:
break
col_count = input_int('Number of columns: ', minimum=2)
board = make_random_board(row_count, col_count, density)
print(format_board(board, row_count, col_count))
print('\n\n')
if __name__ == '__main__':
test_format_random_board(density=0.5)
| mit | Python |
675ad4ce3baff98980126e67db970067bfdd73cb | Fix unicorn heroku bug | audip/doctorsfor.me,audip/doctorsfor.me | application.py | application.py | from app import app
import os
port = int(os.environ.get('PORT', 8000))
if __name__ == '__main__':
app.run(host='0.0.0.0',port=port, debug=debug_flag)
| from app import app
import os
port = int(os.environ.get('PORT', 8000))
app.run(host='0.0.0.0',port=port, debug=False)
| apache-2.0 | Python |
3b67827ff5a81559e521a2ab9f837baccf67386c | Update Mpu6050.py | MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab | service/Mpu6050.py | service/Mpu6050.py | webgui = Runtime.createAndStart("WebGui","WebGui")
raspi = Runtime.createAndStart("RasPi","RasPi")
#
mpu6050 = Runtime.createAndStart("Mpu6050","Mpu6050")
mpu6050.setController(raspi,"1","0x68")
mpu6050.refresh()
print mpu6050.filtered_x_angle;
print mpu6050.filtered_y_angle;
print mpu6050.filtered_z_angle;
| # start the service
mpu6050 = Runtime.start("mpu6050","Mpu6050") | apache-2.0 | Python |
99820d14a1b083b5aee651da8773ac7b720e6aa5 | Improve JSON response handling | schwartzman/technologist,schwartzman/technologist | application.py | application.py | import hashlib
import sqlite3
from flask import Flask
from flask import Markup
from flask import g
from flask import jsonify
from flask import render_template
from random import SystemRandom
choice = SystemRandom().choice
app = Flask(__name__)
@app.context_processor
def set_buster():
with open('last-commit.txt') as f:
bust = f.readline()
return {'bust': bust}
@app.route('/')
def index():
g.curr = 'index'
return render_template('index.j2')
@app.route('/sites/')
def sites():
g.curr = 'sites'
con = sqlite3.connect('db.sqlite')
con.row_factory = sqlite3.Row
cur = con.execute('''
SELECT slug, title, ssl, link, source, blurb
FROM sites
WHERE display=1
ORDER BY sort
''')
folios = cur.fetchall()
return render_template('sites.j2', folios=folios)
@app.route('/tools/')
def tools():
g.curr = 'tool-ind'
return render_template('tools.j2')
@app.route('/tools/<tool>/')
def tool(tool):
g.curr = ' '.join(['tools', tool])
if tool in ['dicer', 'hasher']:
return render_template(tool + '.j2')
else:
return page_not_found()
@app.route('/tools/dicer/<flavor>/<int:length>')
def dicer(flavor, length):
with open('lists/' + flavor + '.txt') as f:
words = f.read().splitlines()
seq = [choice(words) for i in range(length)]
phrases = {
'hy': '-'.join(seq),
'sp': ' '.join(seq),
'so': ''.join(seq)
}
response = jsonify(phrases)
response.headers['Cache-Control'] = 'no-store, no-cache'
return response
@app.route('/tools/hasher/<path:victim>')
def hasher(victim):
venc = victim.encode('utf-8')
hashes = {}
for h in ['md5', 'sha1', 'sha256', 'sha512']:
hashes[h] = getattr(hashlib, h)(venc).hexdigest()
response = jsonify(hashes)
response.headers['Cache-Control'] = 'no-store, no-cache'
return response
@app.errorhandler(404)
def page_not_found():
g.curr = 'err'
return render_template('base.j2',
e404=Markup('<img src="https://http.cat/404.jpg">')
), 404
# for elastic beanstalk
application = app
| import hashlib
import json
import sqlite3
from flask import Flask
from flask import Markup
from flask import g
from flask import render_template
from random import SystemRandom
choice = SystemRandom().choice
app = Flask(__name__)
@app.context_processor
def set_buster():
with open('last-commit.txt') as f:
bust = f.readline()
return {'bust': bust}
@app.route('/')
def index():
g.curr = 'index'
return render_template('index.j2')
@app.route('/sites/')
def sites():
g.curr = 'sites'
con = sqlite3.connect('db.sqlite')
con.row_factory = sqlite3.Row
cur = con.execute('''
SELECT slug, title, ssl, link, source, blurb
FROM sites
WHERE display=1
ORDER BY sort
''')
folios = cur.fetchall()
return render_template('sites.j2', folios=folios)
@app.route('/tools/')
def tools():
g.curr = 'tool-ind'
return render_template('tools.j2')
@app.route('/tools/<tool>/')
def tool(tool):
g.curr = ' '.join(['tools', tool])
if tool in ['dicer', 'hasher']:
return render_template(tool + '.j2')
else:
return page_not_found()
@app.route('/tools/dicer/<flavor>/<int:length>')
def dicer(flavor, length):
with open('lists/' + flavor + '.txt') as f:
words = f.read().splitlines()
seq = [choice(words) for i in range(length)]
phrases = {
'hy': '-'.join(seq),
'sp': ' '.join(seq),
'so': ''.join(seq)
}
return json.dumps(phrases)
@app.route('/tools/hasher/<path:victim>')
def hasher(victim):
venc = victim.encode('utf-8')
hashes = {}
for h in ['md5', 'sha1', 'sha256', 'sha512']:
hashes[h] = getattr(hashlib, h)(venc).hexdigest()
return json.dumps(hashes)
@app.errorhandler(404)
def page_not_found():
g.curr = 'err'
return render_template('base.j2',
e404=Markup('<img src="https://http.cat/404.jpg">')
), 404
# for elastic beanstalk
application = app
| mit | Python |
3155104dc72cd4c4951395219e6aae201bca4e52 | Remove an extra line | foauth/foauth.org,foauth/foauth.org,foauth/foauth.org | services/yammer.py | services/yammer.py | import foauth.providers
class Yammer(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.yammer.com/'
docs_url = 'https://developer.yammer.com/api/'
# URLs to interact with the API
authorize_url = 'https://www.yammer.com/dialog/oauth'
access_token_url = 'https://www.yammer.com/oauth2/access_token.json'
api_domain = 'www.yammer.com'
available_permissions = [
(None, 'read and post to your stream'),
]
def parse_token(self, content):
data = super(Yammer, self).parse_token(content)
data['access_token'] = data['access_token']['token']
return data
| import foauth.providers
class Yammer(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.yammer.com/'
docs_url = 'https://developer.yammer.com/api/'
# URLs to interact with the API
authorize_url = 'https://www.yammer.com/dialog/oauth'
access_token_url = 'https://www.yammer.com/oauth2/access_token.json'
api_domain = 'www.yammer.com'
available_permissions = [
(None, 'read and post to your stream'),
]
def parse_token(self, content):
data = super(Yammer, self).parse_token(content)
data['access_token'] = data['access_token']['token']
return data
| bsd-3-clause | Python |
8674a8ae44f285ab7ee753822d879bdffca3864d | make tagging optional | rizumu/django-paste-organizer | paste_organizer/models.py | paste_organizer/models.py | from datetime import datetime
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
try:
from tagging.fields import TagField
except:
TagField = None
class Pastebin(models.Model):
"""
Online service where your paste is hosted
"""
pastebin_name = models.CharField(blank=True, max_length=100)
create_paste_url = models.URLField(blank=True, verify_exists=True)
paste_url = models.URLField(blank=True, verify_exists=True)
def __unicode__(self):
return self.pastebin_name
class Meta(object):
verbose_name = _('pastebin')
verbose_name_plural = _('pastebins')
ordering=['pastebin_name']
def create_paste():
return self.create_paste_url
class Paste(models.Model):
"""
Plaintext password field could simply be filled in with a reminder of.
"""
paste_name = models.CharField(_('paste name'), max_length=100)
pastebin_type = models.ForeignKey(Pastebin)
creator = models.ForeignKey(User, related_name=_("creator"))
create_date = models.DateTimeField(_("created"), default=datetime.now)
paste_id = models.CharField(_('paste id'), max_length=25)
if TagField:
tags = TagField()
plaintext_password = models.CharField(_('plaintext password'),
max_length=100, blank =True, null =True, help_text="no encryption")
active = models.BooleanField(default=True)
public = models.BooleanField(default=True)
def __unicode__(self):
return self.paste_name
class Meta(object):
verbose_name = _('paste')
verbose_name_plural = _('pastes')
ordering=['create_date']
def url(self):
return "%s%s" % (self.pastebin_type.paste_url, self.paste_id)
| from datetime import datetime
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from tagging.fields import TagField
class Pastebin(models.Model):
"""
Online service where your paste is hosted
"""
pastebin_name = models.CharField(blank=True, max_length=100)
create_paste_url = models.URLField(blank=True, verify_exists=True)
paste_url = models.URLField(blank=True, verify_exists=True)
def __unicode__(self):
return self.pastebin_name
class Meta(object):
verbose_name = _('pastebin')
verbose_name_plural = _('pastebins')
ordering=['pastebin_name']
def create_paste():
return self.create_paste_url
class Paste(models.Model):
"""
Plaintext password field could simply be filled in with a reminder of.
"""
paste_name = models.CharField(_('paste name'), max_length=100)
pastebin_type = models.ForeignKey(Pastebin)
creator = models.ForeignKey(User, related_name=_("creator"))
create_date = models.DateTimeField(_("created"), default=datetime.now)
paste_id = models.CharField(_('paste id'), max_length=25)
tags = TagField()
plaintext_password = models.CharField(_('plaintext password'),
max_length=100, blank =True, null =True, help_text="no encryption")
active = models.BooleanField(default=True)
public = models.BooleanField(default=True)
def __unicode__(self):
return self.paste_name
class Meta(object):
verbose_name = _('paste')
verbose_name_plural = _('pastes')
ordering=['create_date']
def url(self):
return "%s%s" % (self.pastebin_type.paste_url, self.paste_id) | mit | Python |
1cbe401e62b2f5d7fee815a3516ee6ea0ecfac75 | Bump version to v0.7.0.3 | gaqzi/py-gocd-cli,gaqzi/gocd-cli | gocd_cli/__init__.py | gocd_cli/__init__.py | __import__('pkg_resources').declare_namespace(__name__)
__version__ = '0.7.0.3'
| __import__('pkg_resources').declare_namespace(__name__)
__version__ = '0.7.0.2'
| mit | Python |
b5abccb1dc733522df19d56e0906890531a700d3 | Set all fields on input object types | graphql-python/graphene,graphql-python/graphene | graphene/types/inputobjecttype.py | graphene/types/inputobjecttype.py | from collections import OrderedDict
from .base import BaseOptions, BaseType
from .inputfield import InputField
from .unmountedtype import UnmountedType
from .utils import yank_fields_from_attrs
# For static type checking with Mypy
MYPY = False
if MYPY:
from typing import Dict, Callable # NOQA
class InputObjectTypeOptions(BaseOptions):
fields = None # type: Dict[str, InputField]
create_container = None # type: Callable
class InputObjectTypeContainer(dict, BaseType):
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
for key in self._meta.fields.keys():
setattr(self, key, self.get(key, None))
def __init_subclass__(cls, *args, **kwargs):
pass
class InputObjectType(UnmountedType, BaseType):
'''
Input Object Type Definition
An input object defines a structured collection of fields which may be
supplied to a field argument.
Using `NonNull` will ensure that a value must be provided by the query
'''
@classmethod
def __init_subclass_with_meta__(cls, container=None, **options):
_meta = InputObjectTypeOptions(cls)
fields = OrderedDict()
for base in reversed(cls.__mro__):
fields.update(
yank_fields_from_attrs(base.__dict__, _as=InputField)
)
_meta.fields = fields
if container is None:
container = type(cls.__name__, (InputObjectTypeContainer, cls), {})
_meta.container = container
super(InputObjectType, cls).__init_subclass_with_meta__(_meta=_meta, **options)
@classmethod
def get_type(cls):
'''
This function is called when the unmounted type (InputObjectType instance)
is mounted (as a Field, InputField or Argument)
'''
return cls
| from collections import OrderedDict
from .base import BaseOptions, BaseType
from .inputfield import InputField
from .unmountedtype import UnmountedType
from .utils import yank_fields_from_attrs
# For static type checking with Mypy
MYPY = False
if MYPY:
from typing import Dict, Callable # NOQA
class InputObjectTypeOptions(BaseOptions):
fields = None # type: Dict[str, InputField]
create_container = None # type: Callable
class InputObjectTypeContainer(dict, BaseType):
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
for key, value in self.items():
setattr(self, key, value)
def __init_subclass__(cls, *args, **kwargs):
pass
class InputObjectType(UnmountedType, BaseType):
'''
Input Object Type Definition
An input object defines a structured collection of fields which may be
supplied to a field argument.
Using `NonNull` will ensure that a value must be provided by the query
'''
@classmethod
def __init_subclass_with_meta__(cls, container=None, **options):
_meta = InputObjectTypeOptions(cls)
fields = OrderedDict()
for base in reversed(cls.__mro__):
fields.update(
yank_fields_from_attrs(base.__dict__, _as=InputField)
)
_meta.fields = fields
if container is None:
container = type(cls.__name__, (InputObjectTypeContainer, cls), {})
_meta.container = container
super(InputObjectType, cls).__init_subclass_with_meta__(_meta=_meta, **options)
@classmethod
def get_type(cls):
'''
This function is called when the unmounted type (InputObjectType instance)
is mounted (as a Field, InputField or Argument)
'''
return cls
| mit | Python |
3ef154fe39ab68ab4b4e178edb60edddbf5c15b7 | Add (gontend-assuming) get_display_url() to Event | akx/gentry,akx/gentry,akx/gentry,akx/gentry | gore/models/event.py | gore/models/event.py | import json
from django.db import models
from django.utils import timezone
from django.utils.encoding import force_text
from django.utils.timezone import now
from gentry.utils import make_absolute_uri
def determine_type(body):
type = 'unknown'
if 'exception' in body:
type = 'exception'
if 'sentry.interfaces.Message' in body:
type = 'message' # May be overridden by loggeriness
if 'logger' in body:
type = 'log'
return type
class EventManager(models.Manager):
def create_from_raven(self, project_id, body, timestamp=None):
return self.create(
data=json.dumps(body),
event_id=body['event_id'],
message=force_text(body.get('message', ''))[:128],
culprit=force_text(body.get('culprit', ''))[:128],
level=body.get('level', ''),
project_id=project_id,
timestamp=(timestamp or now()),
type=determine_type(body),
)
class Event(models.Model):
project = models.ForeignKey('gore.Project')
event_id = models.CharField(max_length=64)
type = models.CharField(max_length=32, default='unknown')
message = models.CharField(max_length=128, blank=True)
culprit = models.CharField(max_length=128, blank=True)
level = models.CharField(max_length=32, blank=True)
date_added = models.DateTimeField(default=timezone.now, editable=False)
timestamp = models.DateTimeField(db_index=True, editable=False)
data = models.TextField(blank=True, editable=False)
objects = EventManager()
def __str__(self):
return '[%s] - %s' % (self.project, self.message)
@property
def data_dict(self):
return json.loads(self.data)
def get_display_url(self):
return make_absolute_uri('/#/event/{id}'.format(id=self.id))
| import json
from django.db import models
from django.utils import timezone
from django.utils.encoding import force_text
from django.utils.timezone import now
def determine_type(body):
type = 'unknown'
if 'exception' in body:
type = 'exception'
if 'sentry.interfaces.Message' in body:
type = 'message' # May be overridden by loggeriness
if 'logger' in body:
type = 'log'
return type
class EventManager(models.Manager):
def create_from_raven(self, project_id, body, timestamp=None):
return self.create(
data=json.dumps(body),
event_id=body['event_id'],
message=force_text(body.get('message', ''))[:128],
culprit=force_text(body.get('culprit', ''))[:128],
level=body.get('level', ''),
project_id=project_id,
timestamp=(timestamp or now()),
type=determine_type(body),
)
class Event(models.Model):
project = models.ForeignKey('gore.Project')
event_id = models.CharField(max_length=64)
type = models.CharField(max_length=32, default='unknown')
message = models.CharField(max_length=128, blank=True)
culprit = models.CharField(max_length=128, blank=True)
level = models.CharField(max_length=32, blank=True)
date_added = models.DateTimeField(default=timezone.now, editable=False)
timestamp = models.DateTimeField(db_index=True, editable=False)
data = models.TextField(blank=True, editable=False)
objects = EventManager()
def __str__(self):
return '[%s] - %s' % (self.project, self.message)
@property
def data_dict(self):
return json.loads(self.data)
| mit | Python |
683e995e8be98e0c20b3530fd6729b519ac61eae | Add help | haramaki/spark-openstack | webhookapp.py | webhookapp.py | # import Flask
from flask import Flask, request
# import custom-made modules
import sparkmessage
import argparse
import prettytable
import oscontroller
# Create an instance of Flask
app = Flask(__name__)
TOKEN = ""
CON = {}
HELP = """
help show this message
server create <name> create server
server list list active servers
flavor list list flavors
image list list image
volume list list volumes
"""
# Index page will trigger index() function
@app.route('/')
def index():
return 'Hello World'
# Webhook page will trigger webhooks() function
@app.route("/webhook", methods=['POST'])
def webhooks():
# Get the json data
json = request.json
con = oscontroller.create_connection(CON["url"], "RegionOne", CON["project"], CON["user"], CON["password"])
# parse the message id, person id, person email, and room id
message_id = json["data"]["id"]
person_id = json["data"]["personId"]
person_email = json["data"]["personEmail"]
room_id = json["data"]["roomId"]
# convert the message id into readable text
message = sparkmessage.get(TOKEN, message_id)
print(message)
# create data table
volume = prettytable.PrettyTable(['name', 'status', 'size'])
volume.add_row(['volume1', 'OK', '2'])
volume.add_row(['volume2', 'OK', '4'])
# check if the message is the command to get hosts
if message == "Hi":
sparkmessage.post(TOKEN, person_id, person_email, room_id, "Hi, How are you")
elif message == "help"
sparkmessage.post(TOKEN, person_id, person_email, room_id, HELP)
elif message == "server list":
reply_msg = oscontroller.get_server(con)
sparkmessage.post(TOKEN, person_id, person_email, room_id, reply_msg)
elif message == "volume list":
sparkmessage.post(TOKEN, person_id, person_email, room_id, volume.get_string())
return "OK"
# @app.route("/token", methods=['GET'])
# def gettoken():
# return TOKEN
# run the application
def main():
p = argparse.ArgumentParser()
p.add_argument("-token", default="")
p.add_argument("-url")
p.add_argument("-project")
p.add_argument("-user")
p.add_argument("-password")
args = p.parse_args()
CON = {"url": args.url, "project": args.project, "user": args.user, "password": args.password}
TOKEN = args.token
print (TOKEN)
app.run(host="0.0.0.0", port=8000)
if __name__ == "__main__":
main()
| # import Flask
from flask import Flask, request
# import custom-made modules
import sparkmessage
import argparse
import prettytable
import oscontroller
# Create an instance of Flask
app = Flask(__name__)
TOKEN = ""
CON = {}
# Index page will trigger index() function
@app.route('/')
def index():
return 'Hello World'
# Webhook page will trigger webhooks() function
@app.route("/webhook", methods=['POST'])
def webhooks():
# Get the json data
json = request.json
con = oscontroller.create_connection(CON["url"], "RegionOne", CON["project"], CON["user"], CON["password"])
# parse the message id, person id, person email, and room id
message_id = json["data"]["id"]
person_id = json["data"]["personId"]
person_email = json["data"]["personEmail"]
room_id = json["data"]["roomId"]
# convert the message id into readable text
message = sparkmessage.get(TOKEN, message_id)
print(message)
# create data table
volume = prettytable.PrettyTable(['name', 'status', 'size'])
volume.add_row(['volume1', 'OK', '2'])
volume.add_row(['volume2', 'OK', '4'])
# check if the message is the command to get hosts
if message == "Hi":
sparkmessage.post(TOKEN, person_id, person_email, room_id, "Hi, How are you")
if message == "server list":
reply = oscontroller.get_server(con)
sparkmessage.post(TOKEN, person_id, person_email, room_id, reply)
if message == "volume list":
sparkmessage.post(TOKEN, person_id, person_email, room_id, volume.get_string())
return "OK"
# @app.route("/token", methods=['GET'])
# def gettoken():
# return TOKEN
# run the application
if __name__ == "__main__":
p = argparse.ArgumentParser()
p.add_argument("-token", default="")
p.add_argument("-url")
p.add_argument("-project")
p.add_argument("-user")
p.add_argument("-password")
args = p.parse_args()
CON = {"url": args.url, "project": args.project, "user": args.user, "password": args.password}
TOKEN = args.token
print (TOKEN)
app.run(host="0.0.0.0", port=8000)
| apache-2.0 | Python |
d60aa60c38d16ae745e336a7cd016bb2fd044b4a | test if the working dir exists | laurentb/assnet,laurentb/assnet | ass2m/ass2m.py | ass2m/ass2m.py | # -*- coding: utf-8 -*-
# Copyright(C) 2011 Romain Bignon, Laurent Bachelier
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
from storage import Storage
class NotWorkingDir(Exception): pass
class Ass2m(object):
DIRNAME = '.ass2m'
def __init__(self, path):
if isinstance(path, Storage):
storage = path
elif not path:
raise NotWorkingDir()
else:
try:
while not self.DIRNAME in os.listdir(path) and path != os.path.dirname(path):
path = os.path.dirname(path)
except OSError:
raise NotWorkingDir()
if path == os.path.dirname(path):
raise NotWorkingDir()
storage = Storage(os.path.join(path, self.DIRNAME))
self.storage = storage
self.root = os.path.realpath(os.path.join(storage.path, os.path.pardir))
@classmethod
def create(cls, path):
return cls(Storage.init(os.path.join(path, cls.DIRNAME)))
| # -*- coding: utf-8 -*-
# Copyright(C) 2011 Romain Bignon, Laurent Bachelier
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
from storage import Storage
class NotWorkingDir(Exception): pass
class Ass2m(object):
DIRNAME = '.ass2m'
def __init__(self, path):
if isinstance(path, Storage):
storage = path
elif not path:
raise NotWorkingDir()
else:
while not self.DIRNAME in os.listdir(path) and path != os.path.dirname(path):
path = os.path.dirname(path)
if path == os.path.dirname(path):
raise NotWorkingDir()
storage = Storage(os.path.join(path, self.DIRNAME))
self.storage = storage
self.root = os.path.realpath(os.path.join(storage.path, os.path.pardir))
@classmethod
def create(cls, path):
return cls(Storage.init(os.path.join(path, cls.DIRNAME)))
| agpl-3.0 | Python |
9fb7de1bd5e0a697f44008c324ecacf283f8e4eb | Fix for time format | macedot/scrapyGranja,macedot/scrapyGranja | granjaRaces/items.py | granjaRaces/items.py | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
from scrapy.loader.processors import MapCompose, TakeFirst
def intCheckDQ(str):
if 'DQ' in str:
return 99
return int(str)
def strTimeToFloat(str):
str = str.replace(',', '.') # pt_BR time format
if '.' not in str:
return 999999
if ':' in str:
# 18:21.801
retValue = 0
valTime = str.split(':')
size = len(valTime)
for i in range(0, size):
retValue = retValue + pow(60, i) * float(valTime[size - i - 1])
else:
retValue = float(str)
return float("{0:.3f}".format(retValue))
class GranjaRacesItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
# pass
# class RaceEntry(scrapy.Item):
# [u'POS', u'NO.', u'NOME', u'CLASSE', u'COMENT\xc1RIOS', u'PONTOS', u'VOLTAS', u'TOTAL TEMPO', u'MELHOR TEMPO', u'DIFF', u'ESPA\xc7O']
id = scrapy.Field(
input_processor=MapCompose(int),
output_processor=TakeFirst(),
)
raceId = scrapy.Field(
input_processor=MapCompose(int),
output_processor=TakeFirst(),
)
trackConfig = scrapy.Field(
input_processor=MapCompose(str),
output_processor=TakeFirst(),
)
racePosition = scrapy.Field(
input_processor=MapCompose(intCheckDQ),
output_processor=TakeFirst(),
)
kartNumber = scrapy.Field(
input_processor=MapCompose(int),
output_processor=TakeFirst(),
)
driverName = scrapy.Field(
input_processor=MapCompose(unicode.strip),
output_processor=TakeFirst(),
)
driverClass = scrapy.Field(
input_processor=MapCompose(str),
output_processor=TakeFirst(),
)
# comments = scrapy.Field()
# points = scrapy.Field()
numOfLaps = scrapy.Field(
input_processor=MapCompose(int),
output_processor=TakeFirst(),
)
raceTime = scrapy.Field(
input_processor=MapCompose(strTimeToFloat),
output_processor=TakeFirst(),
)
bestLapTime = scrapy.Field(
input_processor=MapCompose(strTimeToFloat),
output_processor=TakeFirst(),
)
# diffToLeader = scrapy.Field()
# diffToPrevious = scrapy.Field()
| # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
from scrapy.loader.processors import MapCompose, TakeFirst
def intCheckDQ(str):
if 'DQ' in str:
return 99
return int(str)
def strTimeToFloat(str):
retValue = 0
str = str.replace(',', '.') # pt_BR time format
if '.' not in str:
return 999999
if ':' in str:
# 18:21.801
valTime = str.split(':')
sizeTime = len(valTime)
for i in range(0, sizeTime - 1):
retValue = retValue + pow(60, i) * float(valTime[i])
else:
retValue = float(str)
return float("{0:.3f}".format(retValue))
class GranjaRacesItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
# pass
# class RaceEntry(scrapy.Item):
# [u'POS', u'NO.', u'NOME', u'CLASSE', u'COMENT\xc1RIOS', u'PONTOS', u'VOLTAS', u'TOTAL TEMPO', u'MELHOR TEMPO', u'DIFF', u'ESPA\xc7O']
id = scrapy.Field(
input_processor=MapCompose(int),
output_processor=TakeFirst(),
)
raceId = scrapy.Field(
input_processor=MapCompose(int),
output_processor=TakeFirst(),
)
trackConfig = scrapy.Field(
input_processor=MapCompose(str),
output_processor=TakeFirst(),
)
racePosition = scrapy.Field(
input_processor=MapCompose(intCheckDQ),
output_processor=TakeFirst(),
)
kartNumber = scrapy.Field(
input_processor=MapCompose(int),
output_processor=TakeFirst(),
)
driverName = scrapy.Field(
input_processor=MapCompose(unicode.strip),
output_processor=TakeFirst(),
)
driverClass = scrapy.Field(
input_processor=MapCompose(str),
output_processor=TakeFirst(),
)
# comments = scrapy.Field()
# points = scrapy.Field()
numOfLaps = scrapy.Field(
input_processor=MapCompose(int),
output_processor=TakeFirst(),
)
raceTime = scrapy.Field(
input_processor=MapCompose(strTimeToFloat),
output_processor=TakeFirst(),
)
bestLapTime = scrapy.Field(
input_processor=MapCompose(strTimeToFloat),
output_processor=TakeFirst(),
)
# diffToLeader = scrapy.Field()
# diffToPrevious = scrapy.Field()
| agpl-3.0 | Python |
acb512a4a74f118c5e09ddb508549aed896ae1dd | Fix dynamic split | viewflow/viewflow,pombredanne/viewflow,codingjoe/viewflow,pombredanne/viewflow,ribeiro-ucl/viewflow,viewflow/viewflow,codingjoe/viewflow,viewflow/viewflow,ribeiro-ucl/viewflow,codingjoe/viewflow,ribeiro-ucl/viewflow | tests/examples/customnode/views.py | tests/examples/customnode/views.py | from django.views import generic
from django.http import HttpResponseRedirect
from viewflow.views import task
from . import models
class DecisionView(task.TaskViewMixin, generic.CreateView):
model = models.Decision
fields = ['decision']
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.user = self.request.user
self.object.process = self.activation.process
self.object.save()
self.activation.done()
self.message_complete()
return HttpResponseRedirect(self.get_success_url())
| from django.views import generic
from django.http import HttpResponseRedirect
from viewflow.views import task
from . import models
class DecisionView(task.TaskViewMixin, generic.CreateView):
model = models.Decision
fields = ['decision']
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.user = self.request.user
self.object.process = self.activation.process
self.object.save()
self.activation.done()
return HttpResponseRedirect(self.get_success_url())
| agpl-3.0 | Python |
6aa8db30afba817ff9b5653480d6f735f09d9c3a | Add players, match_valid to Ladder | massgo/mgaladder,hndrewaall/mgaladder,massgo/mgaladder,massgo/mgaladder,hndrewaall/mgaladder,hndrewaall/mgaladder | ladder.py | ladder.py | #! /usr/bin/env python3
class Player:
def __init__(self, name, rank):
self.name = name
self.rank = rank
def __repr__(self):
return '<{:s}(name={:s}, rank={:d})>'.format(self.__class__.__name__, self.name, self.rank)
def __str__(self):
rank_str = ''
if self.rank < 0:
rank_str = '{:d}K'.format(-self.rank)
else:
rank_str = '{:d}D'.format(self.rank)
return '{:s} {:s}'.format(self.name, rank_str)
class Ladder:
def __init__(self, standings):
self.standings = standings
def __str__(self):
the_string = 'Ladder standings:'
position = 1
for player in self.standings:
the_string += '\n {:d}. {:s}'.format(position, str(player))
position += 1
return the_string
def players(self):
return set(self.standings)
def match_valid(self, player_one, player_two):
if not {player_one, player_two} < self.players():
return False
return True
if __name__ == '__main__':
ladder = Ladder([Player('Andrew', -1), Player('Walther', 5), Player('Milan', -6)])
print(ladder)
| #! /usr/bin/env python3
class Player:
def __init__(self, name, rank):
self.name = name
self.rank = rank
def __repr__(self):
return '<{:s}(name={:s}, rank={:d})>'.format(self.__class__.__name__, self.name, self.rank)
def __str__(self):
rank_str = ''
if self.rank < 0:
rank_str = '{:d}K'.format(-self.rank)
else:
rank_str = '{:d}D'.format(self.rank)
return '{:s} {:s}'.format(self.name, rank_str)
class Ladder:
def __init__(self, standings):
self.standings = standings
def __str__(self):
the_string = 'Ladder standings:'
position = 1
for player in self.standings:
the_string += '\n {:d}. {:s}'.format(position, str(player))
position += 1
return the_string
if __name__ == '__main__':
ladder = Ladder([Player('Andrew', -1), Player('Walther', 5), Player('Milan', -6)])
print(ladder)
| agpl-3.0 | Python |
51fca346cceffb3a67bacbbec04166f49c6448a0 | Update get_arxiv_corpus.py | fajifr/recontent,fajifr/recontent,fajifr/recontent | tools/gensimple/get_arxiv_corpus.py | tools/gensimple/get_arxiv_corpus.py | # Freija Descamps <freija@gmail.com> July 2016
# modified to download arxiv corpus
import os
import wget
URL = 'https://dl.dropboxusercontent.com/u/99220436/recontent-data/arx/'
CORPUS_NAME = 'arx'
MMFILE = CORPUS_NAME + '.mm'
DICTFILE = CORPUS_NAME + '_wordids.txt'
SIMMATRIX = CORPUS_NAME + '-lsi.index'
LSIMODEL = CORPUS_NAME + '.lsi_model'
LSIPROJ = CORPUS_NAME + '.lsi_model.projection'
ARTICLEDICT = CORPUS_NAME + '_adict.json'
NPYINDEX = CORPUS_NAME + '-lsi.index.index.npy'
DEFAULT_DICT_SIZE = 100000
def main(argv=None):
# first clean up
try:
os.remove(MMFILE)
os.remove(DICTFILE)
os.remove(SIMMATRIX)
os.remove(LSIMODEL)
os.remove(ARTICLEDICT)
os.remove(LSIPROJ)
os.remove(NPYINDEX)
except OSError:
pass
wget.download(URL + MMFILE)
wget.download(URL + DICTFILE)
wget.download(URL + SIMMATRIX)
wget.download(URL + LSIMODEL)
wget.download(URL + ARTICLEDICT)
wget.download(URL + LSIPROJ)
wget.download(URL + NPYINDEX)
if __name__ == "__main__":
main()
| # Freija Descamps <freija@gmail.com> July 2016
# modified to download arxiv corpus
import os
import wget
URL = 'https://dl.dropboxusercontent.com/u/99220436/recontent-data/arXiv1314/'
CORPUS_NAME = 'arx'
MMFILE = CORPUS_NAME + '.mm'
DICTFILE = CORPUS_NAME + '_wordids.txt'
SIMMATRIX = CORPUS_NAME + '-lsi.index'
LSIMODEL = CORPUS_NAME + '.lsi_model'
LSIPROJ = CORPUS_NAME + '.lsi_model.projection'
ARTICLEDICT = CORPUS_NAME + '_adict.json'
NPYINDEX = CORPUS_NAME + '-lsi.index.index.npy'
DEFAULT_DICT_SIZE = 100000
def main(argv=None):
# first clean up
try:
os.remove(MMFILE)
os.remove(DICTFILE)
os.remove(SIMMATRIX)
os.remove(LSIMODEL)
os.remove(ARTICLEDICT)
os.remove(LSIPROJ)
os.remove(NPYINDEX)
except OSError:
pass
wget.download(URL + MMFILE)
wget.download(URL + DICTFILE)
wget.download(URL + SIMMATRIX)
wget.download(URL + LSIMODEL)
wget.download(URL + ARTICLEDICT)
wget.download(URL + LSIPROJ)
wget.download(URL + NPYINDEX)
if __name__ == "__main__":
main() | mit | Python |
c10b7cb1e8d284f3d09559167751cd43c51d83dd | revert accidental changes to PRESUBMIT.py | hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ltilve/chromium,M4sse/chromium.src,axinging/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,chuan9/chromium-crosswalk,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,Chilledheart/chromium,dednal/chromium.src,markYoungH/chromium.src,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,Fireblend/chromium-crosswalk,ltilve/chromium,markYoungH/chromium.src,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk,markYoungH/chromium.src,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,dednal/chromium.src,fujunwei/chromium-crosswalk,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,krieger-od/nwjs_chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,jaruba/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,fujunwei/chromium-crosswalk,ltilve/chromium,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,M4sse/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,dednal/chromium.src,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,dednal/chromium.src,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,chuan9/chromium-crosswalk,dushu1203/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,ltilve/chromium,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,Jonekee/chromium.src,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,Chilledheart/chromium,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,jaruba/chromium.src,axinging/chromium-crosswalk,Just-D/chromium-1,ltilve/chromium,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,dushu1203/chromium.src,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium,Chilledheart/chromium,Jonekee/chromium.src,Chilledheart/chromium,Just-D/chromium-1,Fireblend/chromium-crosswalk,jaruba/chromium.src,axinging/chromium-crosswalk,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,Jonekee/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,dushu1203/chromium.src,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,markYoungH/chromium.src,markYoungH/chromium.src | tools/memory_inspector/PRESUBMIT.py | tools/memory_inspector/PRESUBMIT.py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for memory_inspector.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
def CommonChecks(input_api, output_api):
output = []
blacklist = [r'classification_rules.*']
output.extend(input_api.canned_checks.RunPylint(
input_api, output_api, black_list=blacklist,
extra_paths_list=[
input_api.os_path.join(input_api.PresubmitLocalPath(), '..', '..',
'build', 'android')
]))
output.extend(input_api.canned_checks.RunUnitTests(
input_api,
output_api,
[input_api.os_path.join(input_api.PresubmitLocalPath(), 'run_tests')]))
if input_api.is_committing:
output.extend(input_api.canned_checks.PanProjectChecks(input_api,
output_api,
owners_check=False))
return output
def CheckChangeOnUpload(input_api, output_api):
return CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CommonChecks(input_api, output_api)
| # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for memory_inspector.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
def _CommonChecks(input_api, output_api):
output = []
blacklist = [r'classification_rules.*']
output.extend(input_api.canned_checks.RunPylint(
input_api, output_api, black_list=blacklist,
extra_paths_list=[
input_api.os_path.join(input_api.PresubmitLocalPath(), '..', '..',
'build', 'android')
]))
output.extend(input_api.canned_checks.RunUnitTests(
input_api,
output_api,
[input_api.os_path.join(input_api.PresubmitLocalPath(), 'run_tests')]))
if input_api.is_committing:
output.extend(input_api.canned_checks.PanProjectChecks(input_api,
output_api,
owners_check=False))
return output
def _CheckPrebuiltsAreUploaded(input_api, output_api):
import sys
import urllib2
old_sys_path = sys.path
try:
sys.path.append(input_api.os_path.join(input_api.PresubmitLocalPath()))
from memory_inspector import constants
finally:
sys.path = old_sys_path
missing_files = []
for f in input_api.os_listdir(constants.PREBUILTS_PATH):
if not f.endswith('.sha1'):
continue
prebuilt_sha_path = input_api.os_path.join(constants.PREBUILTS_PATH, f)
with open(prebuilt_sha_path) as sha_file:
sha = sha_file.read().strip()
url = constants.PREBUILTS_BASE_URL + sha
request = urllib2.Request(url)
request.get_method = lambda : 'HEAD'
try:
urllib2.urlopen(request)
except Exception, e:
if isinstance(e, urllib2.HTTPError) and e.code == 404:
missing_files += [prebuilt_sha_path]
else:
return [output_api.PresubmitError('HTTP Error while checking %s' % url,
long_text=str(e))]
if missing_files:
return [output_api.PresubmitError(
'Some prebuilts have not been uploaded. Perhaps you forgot to '
'upload_to_google_storage.py?', missing_files)]
return []
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
results.extend(_CheckPrebuiltsAreUploaded(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
return _CommonChecks(input_api, output_api)
| bsd-3-clause | Python |
08c6ce960f3d96cebd0ea964827550f70232321b | fix setup for new directory structure | SiLab-Bonn/basil,MarcoVogt/basil,SiLab-Bonn/basil | host/setup.py | host/setup.py | #!/usr/bin/env python
from distutils.core import setup
f = open('VERSION', 'r')
basil_version = f.readline().strip()
f.close()
setup(
name='Basil',
version=basil_version,
packages=['basil', 'basil.HL', 'basil.RL', 'basil.TL', 'basil.UL', 'basil.utils'],
description='Basil: SILAB modular readout framework',
url='https://silab-redmine.physik.uni-bonn.de/projects/basil',
license='BSD 3-Clause ("BSD New" or "BSD Simplified") License',
long_description=''
)
| #!/usr/bin/env python
from distutils.core import setup
f = open('VERSION', 'r')
basil_version = f.readline().strip()
f.close()
setup(
name='Basil',
version=basil_version,
package_dir={'basil': '', 'basil.HL': 'HL', 'basil.RL': 'RL', 'basil.TL': 'TL', 'basil.UL': 'UL', 'basil.utils': 'utils'},
packages=['basil', 'basil', 'basil.HL', 'basil.RL', 'basil.TL', 'basil.UL', 'basil.utils'],
#packages=[''],
description='SILAB modular readout framework',
url='https://silab-redmine.physik.uni-bonn.de/projects/basil',
license='BSD 3-Clause ("BSD New" or "BSD Simplified") License',
long_description =''
)
| bsd-3-clause | Python |
5a5c25d507540598f989086f6e9cfb7ec815f51c | Rename sdk/redistributable_bin/osx32 to sdk/redistributable_bin/osx | Gramps/GodotSteam,Gramps/GodotSteam,Gramps/GodotSteam | godotsteam/config.py | godotsteam/config.py | def can_build(env, platform):
return platform=="x11" or platform=="windows" or platform=="osx"
def configure(env):
env.Append(CPPPATH=["#modules/godotsteam/sdk/public/"])
# If compiling Linux
if env["platform"]== "x11":
env.Append(LIBS=["steam_api"])
env.Append(RPATH=["."])
if env["bits"]=="32":
env.Append(LIBPATH=["#modules/godotsteam/sdk/redistributable_bin/linux32"])
else: # 64 bit
env.Append(LIBPATH=["#modules/godotsteam/sdk/redistributable_bin/linux64"])
# If compiling Windows
elif env["platform"] == "windows":
# Mostly VisualStudio
if env["CC"] == "cl":
if env["bits"]=="32":
env.Append(LINKFLAGS=["steam_api.lib"])
env.Append(LIBPATH=["#modules/godotsteam/sdk/redistributable_bin"])
else: # 64 bit
env.Append(LINKFLAGS=["steam_api64.lib"])
env.Append(LIBPATH=["#modules/godotsteam/sdk/redistributable_bin/win64"])
# Mostly "GCC"
else:
if env["bits"]=="32":
env.Append(LIBS=["steam_api"])
env.Append(LIBPATH=["#modules/godotsteam/sdk/redistributable_bin"])
else: # 64 bit
env.Append(LIBS=["steam_api64"])
env.Append(LIBPATH=["#modules/godotsteam/sdk/redistributable_bin/win64"])
# If compiling OSX
elif env["platform"] == "osx":
env.Append(CXXFLAGS="-std=c++0x")
env.Append(LIBS=["steam_api"])
env.Append(LIBPATH=['#modules/godotsteam/sdk/redistributable_bin/osx'])
| def can_build(env, platform):
return platform=="x11" or platform=="windows" or platform=="osx"
def configure(env):
env.Append(CPPPATH=["#modules/godotsteam/sdk/public/"])
# If compiling Linux
if env["platform"]== "x11":
env.Append(LIBS=["steam_api"])
env.Append(RPATH=["."])
if env["bits"]=="32":
env.Append(LIBPATH=["#modules/godotsteam/sdk/redistributable_bin/linux32"])
else: # 64 bit
env.Append(LIBPATH=["#modules/godotsteam/sdk/redistributable_bin/linux64"])
# If compiling Windows
elif env["platform"] == "windows":
# Mostly VisualStudio
if env["CC"] == "cl":
if env["bits"]=="32":
env.Append(LINKFLAGS=["steam_api.lib"])
env.Append(LIBPATH=["#modules/godotsteam/sdk/redistributable_bin"])
else: # 64 bit
env.Append(LINKFLAGS=["steam_api64.lib"])
env.Append(LIBPATH=["#modules/godotsteam/sdk/redistributable_bin/win64"])
# Mostly "GCC"
else:
if env["bits"]=="32":
env.Append(LIBS=["steam_api"])
env.Append(LIBPATH=["#modules/godotsteam/sdk/redistributable_bin"])
else: # 64 bit
env.Append(LIBS=["steam_api64"])
env.Append(LIBPATH=["#modules/godotsteam/sdk/redistributable_bin/win64"])
# If compiling OSX
elif env["platform"] == "osx":
env.Append(CXXFLAGS="-std=c++0x")
env.Append(LIBS=["steam_api"])
env.Append(LIBPATH=['#modules/godotsteam/sdk/redistributable_bin/osx32'])
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.