commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
ea633b9cf062e28525910f5659b6b8f2ddbb74e3
|
accelerator/migrations/0099_update_program_model.py
|
accelerator/migrations/0099_update_program_model.py
|
# Generated by Django 2.2.28 on 2022-04-20 13:05
from django.db import (
migrations,
models,
)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0098_update_startup_update_20220408_0441'),
]
operations = [
migrations.AddField(
model_name='program',
name='hubspot_url',
field=models.URLField(blank=True, null=True),
),
migrations.AddField(
model_name='program',
name='program_image',
field=models.ImageField(null=True, upload_to=''),
),
]
|
# Generated by Django 2.2.28 on 2022-04-20 13:05
import sorl.thumbnail.fields
from django.db import (
migrations,
models,
)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0098_update_startup_update_20220408_0441'),
]
operations = [
migrations.AddField(
model_name='program',
name='hubspot_url',
field=models.URLField(blank=True, null=True),
),
migrations.AddField(
model_name='program',
name='program_image',
field=sorl.thumbnail.fields.ImageField(
null=True,
upload_to='program_images'),
),
]
|
Fix image field import and migration
|
[AC-9452] Fix image field import and migration
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
5c5e0c7809d8db77327dc259df0547579e515a54
|
server-side/Judge.py
|
server-side/Judge.py
|
import sys
sys.path.append('../audio2tokens')
from audio2tokens import *
download_dir='/home/michele/Downloads'
from rhyme import get_score
import time
def retrieve_tokens(audiofile):
#time.sleep(3)
audio_path_raw = raw_audio(audiofile.replace(':','-'))
tokens = get_text(audio_path_raw)
print(tokens)
return tokens
def rank(namefile):
#audiofile=os.path.join(download_dir,namefile)
tokens=retrieve_tokens(namefile)
if tokens :
score = get_score({'text':tokens})
else:
score = None
print "No tokens found"
print(score)
return {'score':score, 'rhymes_scheme':6}
|
import sys
sys.path.append('../audio2tokens')
from audio2tokens import *
download_dir='/home/michele/Downloads'
from rhyme import get_score
import time
def remove_audiofiles(audio_path):
audio_path_16k = audio_path.replace('.wav','_16k.wav')
audio_path_raw = audio_path.replace('.wav','.raw')
os.remove(audio_path)
os.remove(audio_path_16k)
os.remove(audio_path_raw)
def retrieve_tokens(audiofile):
#time.sleep(3)
audio_path_raw = raw_audio(audiofile.replace(':','-'))
tokens = get_text(audio_path_raw)
remove_audiofiles(audio_path_raw)
print(tokens)
return tokens
def rank(namefile):
#audiofile=os.path.join(download_dir,namefile)
tokens=retrieve_tokens(namefile)
if tokens :
score = get_score({'text':tokens})
else:
score = None
print "No tokens found"
print(score)
return {'score':score, 'rhymes_scheme':6}
|
Add some functions to remove wave files
|
Add some functions to remove wave files
|
Python
|
mit
|
hajicj/HAMR_2016,hajicj/HAMR_2016,hajicj/HAMR_2016
|
693dc9d8448740e1a1c4543cc3a91e3769fa7a3e
|
pySPM/utils/plot.py
|
pySPM/utils/plot.py
|
import numpy as np
import matplotlib.pyplot as plt
def plotMask(ax, mask, color, **kargs):
import copy
m = np.ma.masked_array(mask, ~mask)
palette = copy.copy(plt.cm.gray)
palette.set_over(color, 1.0)
ax.imshow(m, cmap=palette, vmin=0, vmax=0.5, **kargs)
def Xdist(ax,left, right, y, color='r', linestyle=':', fmt='.2f', xtransf=lambda x: x, **kargs):
ax.axvline(left,color=color, linestyle=linestyle)
ax.axvline(right,color=color, linestyle=linestyle)
s = "{:"+fmt+"}"+kargs.get('unit','')
ax.annotate(s.format(xtransf(right-left)),(.5*(left+right),y),(0,2),textcoords='offset pixels',va='bottom',ha='center')
ax.annotate("",(left,y),(right,y),arrowprops=dict(arrowstyle=kargs.get('arrowstyle','<->')))
|
import numpy as np
import matplotlib.pyplot as plt
def plotMask(ax, mask, color, **kargs):
import copy
m = np.ma.masked_array(mask, ~mask)
palette = copy.copy(plt.cm.gray)
palette.set_over(color, 1.0)
ax.imshow(m, cmap=palette, vmin=0, vmax=0.5, **kargs)
def Xdist(ax,left, right, y, color='r', linestyle=':', fmt='.2f', xtransf=lambda x: x, **kargs):
ax.axvline(left,color=color, linestyle=linestyle)
ax.axvline(right,color=color, linestyle=linestyle)
s = "{:"+fmt+"}"+kargs.get('unit','')
ax.annotate(s.format(xtransf(right-left)),(.5*(left+right),y),(0,2),textcoords='offset pixels',va='bottom',ha='center')
ax.annotate("",(left,y),(right,y),arrowprops=dict(arrowstyle=kargs.get('arrowstyle','<->')))
def DualPlot(ax, col1='C0',col2='C1'):
axb = ax.twinx()
axb.spines['left'].set_color(col1)
axb.spines['right'].set_color(col2)
ax.yaxis.label.set_color(col1)
axb.yaxis.label.set_color(col2)
ax.tick_params(axis='y', colors=col1)
axb.tick_params(axis='y', colors=col2)
return axb
|
Add helper function to create a DualPlot
|
Add helper function to create a DualPlot
|
Python
|
apache-2.0
|
scholi/pySPM
|
2bd551d7fa8da9d7641998a5515fba634d65bc56
|
comics/feedback/views.py
|
comics/feedback/views.py
|
from django.conf import settings
from django.core.mail import mail_admins
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render
from comics.feedback.forms import FeedbackForm
def feedback(request):
"""Mail feedback to ADMINS"""
if request.method == 'POST':
form = FeedbackForm(request.POST)
if form.is_valid():
subject = 'Feedback from %s' % settings.COMICS_SITE_TITLE
message = form.cleaned_data['message']
mail_admins(subject, message)
return HttpResponseRedirect(reverse('feedback-thanks'))
else:
form = FeedbackForm()
return render(request, 'feedback/form.html', {'feedback_form': form})
def feedback_thanks(request):
"""Display form submit confirmation page"""
return render(request, 'feedback/thanks.html')
|
from django.conf import settings
from django.core.mail import mail_admins
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render
from comics.feedback.forms import FeedbackForm
def feedback(request):
"""Mail feedback to ADMINS"""
if request.method == 'POST':
form = FeedbackForm(request.POST)
if form.is_valid():
subject = 'Feedback from %s' % settings.COMICS_SITE_TITLE
message = form.cleaned_data['message']
metadata = 'Client IP address: %s\n' % request.META['REMOTE_ADDR']
metadata += 'User agent: %s\n' % request.META['HTTP_USER_AGENT']
if request.user.is_authenticated():
metadata += 'User: %s <%s>\n' % (
request.user.username, request.user.email)
else:
metadata += 'User: anonymous\n'
message = '%s\n\n%s' % (message, metadata)
mail_admins(subject, message)
return HttpResponseRedirect(reverse('feedback-thanks'))
else:
form = FeedbackForm()
return render(request, 'feedback/form.html', {'feedback_form': form})
def feedback_thanks(request):
"""Display form submit confirmation page"""
return render(request, 'feedback/thanks.html')
|
Add user information to feedback emails
|
Add user information to feedback emails
|
Python
|
agpl-3.0
|
jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics
|
c7ab4bc8e0b3dbdd305a7a156ef58dddaa37296c
|
pystorm/__init__.py
|
pystorm/__init__.py
|
from .component import Component, Tuple
from .bolt import BatchingBolt, Bolt, TicklessBatchingBolt
from .spout import Spout
__all__ = [
'BatchingBolt',
'Bolt',
'Component',
'Spout',
'TicklessBatchingBolt',
'Tuple',
]
|
'''
pystorm is a production-tested Storm multi-lang implementation for Python
It is mostly intended to be used by other libraries (e.g., streamparse).
'''
from .component import Component, Tuple
from .bolt import BatchingBolt, Bolt, TicklessBatchingBolt
from .spout import Spout
from .version import __version__, VERSION
__all__ = [
'BatchingBolt',
'Bolt',
'Component',
'Spout',
'TicklessBatchingBolt',
'Tuple',
]
|
Add VERSION and __version__ directly to pystorm namespace
|
Add VERSION and __version__ directly to pystorm namespace
|
Python
|
apache-2.0
|
pystorm/pystorm
|
eb0aeda225cc7c0aef85559857de4cca35b77efd
|
ratemyflight/urls.py
|
ratemyflight/urls.py
|
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
urlpatterns = patterns("ratemyflight.views",
url("^api/airport/list/(?P<south>.*)/(?P<west>.*)/(?P<north>.*)/(?P<east>.*)/$",
"airports_for_boundary", name="airports_for_boundary"),
url("^api/flight/list/(?P<south>.*)/(?P<west>.*)/(?P<north>.*)/(?P<east>.*)/$",
"flights_for_boundary", name="flights_for_boundary"),
url("^$", "home", name="home"),
)
|
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
urlpatterns = patterns("ratemyflight.views",
url("^api/airport/boundary/(?P<south>.*)/(?P<west>.*)/(?P<north>.*)/(?P<east>.*)/$",
"airports_for_boundary", name="airports_for_boundary"),
url("^api/flight/boundary/(?P<south>.*)/(?P<west>.*)/(?P<north>.*)/(?P<east>.*)/$",
"flights_for_boundary", name="flights_for_boundary"),
url("^api/flight/airline/(?P<iata_code>.*)/$",
"flights_for_airline", name="flights_for_airline"),
url("^api/flight/username/(?P<username>.*)/$",
"flights_for_username", name="flights_for_username"),
url("^api/flight/recent/$", "recent_flights", name="recent_flights"),
url("^$", "home", name="home"),
)
|
Clean up URLS for API and point final URLS to views.
|
Clean up URLS for API and point final URLS to views.
|
Python
|
bsd-2-clause
|
stephenmcd/ratemyflight,stephenmcd/ratemyflight
|
1775782f100f9db9ad101a19887ba95fbc36a6e9
|
backend/project_name/celerybeat_schedule.py
|
backend/project_name/celerybeat_schedule.py
|
from celery.schedules import crontab
CELERYBEAT_SCHEDULE = {
# Internal tasks
"clearsessions": {"schedule": crontab(hour=3, minute=0), "task": "users.tasks.clearsessions"},
}
|
from celery.schedules import crontab # pylint:disable=import-error,no-name-in-module
CELERYBEAT_SCHEDULE = {
# Internal tasks
"clearsessions": {"schedule": crontab(hour=3, minute=0), "task": "users.tasks.clearsessions"},
}
|
Disable prospector on celery.schedules import
|
Disable prospector on celery.schedules import
|
Python
|
mit
|
vintasoftware/django-react-boilerplate,vintasoftware/django-react-boilerplate,vintasoftware/django-react-boilerplate,vintasoftware/django-react-boilerplate
|
691e3581f1602714fba33f6dcb139f32e0507d23
|
packages/syft/src/syft/core/node/common/node_table/setup.py
|
packages/syft/src/syft/core/node/common/node_table/setup.py
|
# third party
from sqlalchemy import Column
from sqlalchemy import Integer
from sqlalchemy import String
# relative
from . import Base
class SetupConfig(Base):
__tablename__ = "setup"
id = Column(Integer(), primary_key=True, autoincrement=True)
domain_name = Column(String(255), default="")
node_id = Column(String(32), default="")
def __str__(self) -> str:
return f"<Domain Name: {self.domain_name}>"
def create_setup(id: int, domain_name: str, node_id: str) -> SetupConfig:
return SetupConfig(id=id, domain_name=domain_name, node_id=node_id)
|
# third party
from sqlalchemy import Column
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy import Boolean
# relative
from . import Base
class SetupConfig(Base):
__tablename__ = "setup"
id = Column(Integer(), primary_key=True, autoincrement=True)
domain_name = Column(String(255), default="")
description = Column(String(255), default="")
contact = Column(String(255), default="")
daa = Column(Boolean(), default=False)
node_id = Column(String(32), default="")
def __str__(self) -> str:
return f"<Domain Name: {self.domain_name}>"
def create_setup(id: int, domain_name: str, node_id: str) -> SetupConfig:
return SetupConfig(id=id, domain_name=domain_name, node_id=node_id)
|
ADD description / contact / daa fields
|
ADD description / contact / daa fields
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
7fc81e93ea44e52ab7c29087a01eac65a145db09
|
qiprofile_rest/server/settings.py
|
qiprofile_rest/server/settings.py
|
"""This ``settings`` file specifies the Eve configuration."""
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# The MongoDB host default is localhost, but can be reset
# by the MONGO_HOST environment variable.
host = os.getenv('MONGO_HOST')
if host:
MONGO_HOST = host
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
|
"""This ``settings`` file specifies the Eve configuration."""
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# The MongoDB host default is localhost, but can be reset
# by the MONGO_HOST environment variable.
host = os.getenv('MONGO_HOST')
if host:
MONGO_HOST = host
# The MongoDB port.
port = os.getenv('MONGO_PORT')
if port:
MONGO_PORT = int(port)
# The MongoDB username.
user = os.getenv('MONGO_USERNAME')
if user:
MONGO_USERNAME = user
# The MongoDB password.
pswd = os.getenv('MONGO_PASSWORD')
if pswd:
MONGO_PASSWORD = pswd
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
|
Add Mongo env var overrides.
|
Add Mongo env var overrides.
|
Python
|
bsd-2-clause
|
ohsu-qin/qiprofile-rest,ohsu-qin/qirest
|
b5e368437a600d78e22a53abe53c0103b20daa24
|
_python/main/migrations/0003_auto_20191029_2015.py
|
_python/main/migrations/0003_auto_20191029_2015.py
|
# Generated by Django 2.2.6 on 2019-10-29 20:15
from django.db import migrations, models
import main.models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20191007_1639'),
]
operations = [
migrations.AlterField(
model_name='contentnode',
name='headnote',
field=main.models.SanitizingTextField(blank=True, null=True),
),
migrations.AlterField(
model_name='default',
name='url',
field=models.URLField(max_length=1024),
),
migrations.AlterField(
model_name='textblock',
name='content',
field=main.models.SanitizingCharField(max_length=5242880),
),
]
|
# Generated by Django 2.2.6 on 2019-10-29 20:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20191007_1639'),
]
operations = [
migrations.AlterField(
model_name='default',
name='url',
field=models.URLField(max_length=1024),
),
]
|
Repair migration, which was a no-op in SQL and was 'faked' anyway.
|
Repair migration, which was a no-op in SQL and was 'faked' anyway.
|
Python
|
agpl-3.0
|
harvard-lil/h2o,harvard-lil/h2o,harvard-lil/h2o,harvard-lil/h2o
|
f87b979443bd4a578884bdb327dbd72616d07533
|
changes/backends/jenkins/generic_builder.py
|
changes/backends/jenkins/generic_builder.py
|
from .builder import JenkinsBuilder
class JenkinsGenericBuilder(JenkinsBuilder):
def __init__(self, *args, **kwargs):
self.script = kwargs.pop('script')
self.cluster = kwargs.pop('cluster')
super(JenkinsGenericBuilder, self).__init__(*args, **kwargs)
def get_job_parameters(self, job, script=None, target_id=None, path=''):
params = super(JenkinsGenericBuilder, self).get_job_parameters(
job, target_id=target_id)
if script is None:
script = self.script
project = job.project
repository = project.repository
vcs = repository.get_vcs()
if vcs:
repo_url = vcs.remote_url
else:
repo_url = repository.url
params.extend([
{'name': 'CHANGES_PID', 'value': project.slug},
{'name': 'REPO_URL', 'value': repo_url},
{'name': 'SCRIPT', 'value': script},
{'name': 'REPO_VCS', 'value': repository.backend.name},
{'name': 'CLUSTER', 'value': self.cluster},
{'name': 'WORK_PATH', 'value': path},
])
return params
|
from .builder import JenkinsBuilder
class JenkinsGenericBuilder(JenkinsBuilder):
def __init__(self, *args, **kwargs):
self.script = kwargs.pop('script')
self.cluster = kwargs.pop('cluster')
self.path = kwargs.pop('path', '')
super(JenkinsGenericBuilder, self).__init__(*args, **kwargs)
def get_job_parameters(self, job, script=None, target_id=None, path=None):
params = super(JenkinsGenericBuilder, self).get_job_parameters(
job, target_id=target_id)
if path is None:
path = self.path
if script is None:
script = self.script
project = job.project
repository = project.repository
vcs = repository.get_vcs()
if vcs:
repo_url = vcs.remote_url
else:
repo_url = repository.url
params.extend([
{'name': 'CHANGES_PID', 'value': project.slug},
{'name': 'REPO_URL', 'value': repo_url},
{'name': 'SCRIPT', 'value': script},
{'name': 'REPO_VCS', 'value': repository.backend.name},
{'name': 'CLUSTER', 'value': self.cluster},
{'name': 'WORK_PATH', 'value': path},
])
return params
|
Support fixed path in generic builder
|
Support fixed path in generic builder
|
Python
|
apache-2.0
|
wfxiang08/changes,bowlofstew/changes,dropbox/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes
|
ba0334459b8318a62014ec945a753fc36fc7d519
|
account_verification_flask/models/models.py
|
account_verification_flask/models/models.py
|
#from flask.ext.login import UserMixin
from account_verification_flask import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key = True)
name = db.Column(db.String, nullable = False)
email = db.Column(db.String, nullable = False)
password = db.Column(db.String)
phone_number = db.Column(db.String, nullable = False)
country_code = db.Column(db.String, nullable = False)
phone_number_confirmed = db.Column(db.Boolean, nullable = False, default = False)
authy_user_id = db.Column(db.String, nullable = True)
def __init__(self, name, email, password, phone_number, country_code):
self.name = name
self.email = email
self.password = bcrypt.generate_password_hash(password)
self.phone_number = phone_number
self.country_code = country_code
self.phone_number_confirmed = False
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return unicode(self.id)
def confirm_phone_number(self):
self.phone_number_confirmed = True
def __unicode__(self):
return self.name
def __repr__(self):
return '<User %r>' % (self.name)
|
#from flask.ext.login import UserMixin
from account_verification_flask import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key = True)
name = db.Column(db.String, nullable = False)
email = db.Column(db.String, nullable = False)
password = db.Column(db.String)
phone_number = db.Column(db.String, nullable = False)
country_code = db.Column(db.String, nullable = False)
phone_number_confirmed = db.Column(db.Boolean, nullable = False, default = False)
authy_user_id = db.Column(db.String, nullable = True)
def __init__(self, name, email, password, phone_number, country_code):
self.name = name
self.email = email
self.password = bcrypt.generate_password_hash(password)
self.phone_number = phone_number
self.country_code = country_code
self.phone_number_confirmed = False
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return unicode(self.id)
def __unicode__(self):
return self.name
def __repr__(self):
return '<User %r>' % (self.name)
|
Remove confirm_phone_number method from model
|
Remove confirm_phone_number method from model
|
Python
|
mit
|
TwilioDevEd/account-verification-flask,TwilioDevEd/account-verification-flask,TwilioDevEd/account-verification-flask
|
1dc2856368e5e6852b526d86a0c78c5fe10b1550
|
myhronet/models.py
|
myhronet/models.py
|
# -*- coding: utf-8 -*-
import string
from django.db import models
class Blacklist(models.Model):
domain = models.CharField(max_length=255, unique=True, null=True)
def __unicode__(self):
return self.domain
class URL(models.Model):
hashcode = models.CharField(max_length=10, unique=True,
db_index=True, null=True)
longurl = models.CharField(max_length=1024, unique=True,
db_index=True, null=True)
views = models.IntegerField(default=0)
ip = models.GenericIPAddressField(null=True)
data = models.DateTimeField(auto_now_add=True, null=True)
def save(self, *args, **kwargs):
if URL.objects.count():
last = URL.objects.latest('id').pk + 1
alphabet = string.digits + string.ascii_lowercase
base36 = ''
while last != 0:
last, i = divmod(last, len(alphabet))
base36 = alphabet[i] + base36
self.hashcode = base36
else:
self.hashcode = '1'
return super(URL, self).save(*args, **kwargs)
def short_url(self, request):
return ''.join([
request.scheme,
'://', request.get_host(),
'/', self.hashcode,
])
def __unicode__(self):
return ' - '.join([self.hashcode, self.longurl])
|
# -*- coding: utf-8 -*-
import string
from django.db import models
class Blacklist(models.Model):
domain = models.CharField(max_length=255, unique=True, null=True)
def __unicode__(self):
return self.domain
class URL(models.Model):
hashcode = models.CharField(max_length=10, unique=True,
db_index=True, null=True)
longurl = models.CharField(max_length=1024, unique=True,
db_index=True, null=True)
views = models.IntegerField(default=0)
ip = models.GenericIPAddressField(null=True)
data = models.DateTimeField(auto_now_add=True, null=True)
def save(self, *args, **kwargs):
if not self.pk:
if URL.objects.count():
last = URL.objects.latest('id').pk + 1
alphabet = string.digits + string.ascii_lowercase
base36 = ''
while last != 0:
last, i = divmod(last, len(alphabet))
base36 = alphabet[i] + base36
self.hashcode = base36
else:
self.hashcode = '1'
return super(URL, self).save(*args, **kwargs)
def short_url(self, request):
return ''.join([
request.scheme,
'://', request.get_host(),
'/', self.hashcode,
])
def __unicode__(self):
return ' - '.join([self.hashcode, self.longurl])
|
Fix hashcode generation for existing URLs
|
Fix hashcode generation for existing URLs
|
Python
|
mit
|
myhro/myhronet,myhro/myhronet
|
1cad9ab61148173b0f61971805b3e6203da3050d
|
faker/providers/en_CA/ssn.py
|
faker/providers/en_CA/ssn.py
|
# coding=utf-8
from __future__ import unicode_literals
from ..ssn import Provider as SsnProvider
class Provider(SsnProvider):
ssn_formats = ("### ### ###",)
@classmethod
def ssn(cls):
return cls.bothify(cls.random_element(cls.ssn_formats))
|
# coding=utf-8
from __future__ import unicode_literals
from ..ssn import Provider as SsnProvider
import random
class Provider(SsnProvider):
#in order to create a valid SIN we need to provide a number that passes a simple modified Luhn Algorithmn checksum
#this function essentially reverses the checksum steps to create a random valid SIN (Social Insurance Number)
@classmethod
def ssn(cls):
#create an array of 8 elements initialized randomly
digits = random.sample(range(10), 8)
# All of the digits must sum to a multiple of 10.
# sum the first 8 and set 9th to the value to get to a multiple of 10
digits.append(10 - (sum(digits) % 10))
#digits is now the digital root of the number we want multiplied by the magic number 121 212 121
#reverse the multiplication which occurred on every other element
for i in range(1, len(digits), 2):
if digits[i] % 2 == 0:
digits[i] = (digits[i] / 2)
else:
digits[i] = (digits[i] + 9) / 2
#build the resulting SIN string
sin = ""
for i in range(0, len(digits), 1):
sin += str(digits[i])
#add a space to make it conform to normal standards in Canada
if i % 3 == 2:
sin += " "
#finally return our random but valid SIN
return sin
|
Update Canada SSN/SIN provider to create a valid number
|
Update Canada SSN/SIN provider to create a valid number
The first revision generated a random number in the correct format.
This commit creates a SIN number that passes the checksum as described
here http://http://en.wikipedia.org/wiki/Social_Insurance_Number
|
Python
|
mit
|
jaredculp/faker,trtd/faker,xfxf/faker-python,HAYASAKA-Ryosuke/faker,johnraz/faker,joke2k/faker,joke2k/faker,venmo/faker,ericchaves/faker,xfxf/faker-1,GLMeece/faker,danhuss/faker,beetleman/faker,thedrow/faker,meganlkm/faker,yiliaofan/faker,MaryanMorel/faker
|
dd1ed907532526a4a70694c46918136ca6d93277
|
nqueens/nqueens.py
|
nqueens/nqueens.py
|
from nqueens.chessboard import Chessboard
from nqueens.printer import Printer
from nqueens.solver import Solver
board = Chessboard.create(8)
solver = Solver.create(board)
solution = solver.solve()
if solution is not None:
printer = Printer.create(solution)
printer.printBoard()
|
#!/usr/bin/env python3
import os
import sys
import getopt
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from nqueens.chessboard import Chessboard
from nqueens.printer import Printer
from nqueens.solver import Solver
def main():
try:
n = parse_command_line()
except ValueError as e:
print("Error: " + str(e))
print("Usage: nqueens.py <n>")
sys.exit(1)
solution = solve_for(n)
if solution is None:
print("No solution found")
else:
print_solution(solution)
def parse_command_line():
try:
opts, args = getopt.getopt(sys.argv[1:], "", [])
except getopt.GetoptError:
raise ValueError("Could not parse command line")
if len(args) == 0:
raise ValueError("No arguments supplied")
if len(args) > 1:
raise ValueError("Too many arguments supplied")
n = args[0]
if not n.isdigit() or int(n) < 1:
raise ValueError("n must be a positive number")
return int(n)
def solve_for(n):
board = Chessboard.create(n)
solver = Solver.create(board)
return solver.solve()
def print_solution(solution):
printer = Printer.create(solution)
printer.printBoard()
if __name__ == '__main__':
sys.exit(main())
|
Add ability to run problems from command line
|
Add ability to run problems from command line
|
Python
|
mit
|
stevecshanks/nqueens
|
c9cc5585e030951a09687c6a61a489ec51f83446
|
cr2/plotter/__init__.py
|
cr2/plotter/__init__.py
|
# $Copyright:
# ----------------------------------------------------------------
# This confidential and proprietary software may be used only as
# authorised by a licensing agreement from ARM Limited
# (C) COPYRIGHT 2015 ARM Limited
# ALL RIGHTS RESERVED
# The entire notice above must be reproduced on all authorised
# copies and copies may only be made to the extent permitted
# by a licensing agreement from ARM Limited.
# ----------------------------------------------------------------
# File: __init__.py
# ----------------------------------------------------------------
# $
#
"""Init Module for the Plotter Code"""
import pandas as pd
from LinePlot import LinePlot
|
# $Copyright:
# ----------------------------------------------------------------
# This confidential and proprietary software may be used only as
# authorised by a licensing agreement from ARM Limited
# (C) COPYRIGHT 2015 ARM Limited
# ALL RIGHTS RESERVED
# The entire notice above must be reproduced on all authorised
# copies and copies may only be made to the extent permitted
# by a licensing agreement from ARM Limited.
# ----------------------------------------------------------------
# File: __init__.py
# ----------------------------------------------------------------
# $
#
"""Init Module for the Plotter Code"""
import pandas as pd
from LinePlot import LinePlot
import AttrConf
def register_forwarding_arg(arg_name):
"""Allows the user to register args to
be forwarded to matplotlib
"""
if arg_name not in AttrConf.ARGS_TO_FORWARD:
AttrConf.ARGS_TO_FORWARD.append(arg_name)
def unregister_forwarding_arg(arg_name):
"""Unregisters arg_name from being passed to
plotter matplotlib calls
"""
try:
AttrConf.ARGS_TO_FORWARD.remove(arg_name)
except ValueError:
pass
|
Enable user specified arg forwarding to matplotlib
|
plotter: Enable user specified arg forwarding to matplotlib
This change allows the user to register args for forwarding to
matplotlib and also unregister the same.
Change-Id: If53dab43dd4a2f530b3d1faf35582206ac925740
Signed-off-by: Kapileshwar Singh <d373e2b6407ea84be359ce4a11e8631121819e79@arm.com>
|
Python
|
apache-2.0
|
JaviMerino/trappy,joelagnel/trappy,bjackman/trappy,derkling/trappy,ARM-software/trappy,sinkap/trappy,JaviMerino/trappy,joelagnel/trappy,ARM-software/trappy,derkling/trappy,bjackman/trappy,sinkap/trappy,ARM-software/trappy,ARM-software/trappy,bjackman/trappy,sinkap/trappy,joelagnel/trappy,sinkap/trappy,JaviMerino/trappy,bjackman/trappy,derkling/trappy,joelagnel/trappy
|
38cd50805e080f6613d7e1d5867a84952ec88580
|
flask_resty/related.py
|
flask_resty/related.py
|
from .exceptions import ApiError
# -----------------------------------------------------------------------------
class Related(object):
def __init__(self, item_class=None, **kwargs):
self._item_class = item_class
self._view_classes = kwargs
def resolve_related(self, data):
for field_name, view_class in self._view_classes.items():
value = data.get(field_name, None)
if value is None:
# If this field were required or non-nullable, the deserializer
# would already have raised an exception.
continue
try:
resolved = self.resolve_field(value, view_class)
except ApiError as e:
pointer = '/data/{}'.format(field_name)
raise e.update({'source': {'pointer': pointer}})
data[field_name] = resolved
if self._item_class:
return self._item_class(**data)
return data
def resolve_field(self, value, view_class):
# marshmallow always uses lists here.
many = isinstance(value, list)
if many and not value:
# As a tiny optimization, there's no need to resolve an empty list.
return value
if isinstance(view_class, Related):
# This is not actually a view class.
resolver = view_class.resolve_related
else:
resolver = view_class().resolve_related_item
if many:
return [resolver(item) for item in value]
return resolver(value)
|
from .exceptions import ApiError
# -----------------------------------------------------------------------------
class Related(object):
def __init__(self, item_class=None, **kwargs):
self._item_class = item_class
self._resolvers = kwargs
def resolve_related(self, data):
for field_name, resolver in self._resolvers.items():
value = data.get(field_name, None)
if value is None:
# If this field were required or non-nullable, the deserializer
# would already have raised an exception.
continue
try:
resolved = self.resolve_field(value, resolver)
except ApiError as e:
pointer = '/data/{}'.format(field_name)
raise e.update({'source': {'pointer': pointer}})
data[field_name] = resolved
if self._item_class:
return self._item_class(**data)
return data
def resolve_field(self, value, resolver):
# marshmallow always uses lists here.
many = isinstance(value, list)
if many and not value:
# As a tiny optimization, there's no need to resolve an empty list.
return value
if isinstance(resolver, Related):
resolve_item = resolver.resolve_related
else:
resolve_item = resolver().resolve_related_item
if many:
return [resolve_item(item) for item in value]
return resolve_item(value)
|
Fix variable names in Related
|
Fix variable names in Related
|
Python
|
mit
|
taion/flask-jsonapiview,4Catalyzer/flask-jsonapiview,4Catalyzer/flask-resty
|
8cdbbb52ebe161b0b7fea342e8a3d197ec290ab1
|
satnogsclient/settings.py
|
satnogsclient/settings.py
|
from os import environ
DEMODULATION_COMMAND = environ.get('SATNOGS_DEMODULATION_COMMAND', None)
ENCODING_COMMAND = environ.get('SATNOGS_ENCODING_COMMAND', None)
DECODING_COMMAND = environ.get('SATNOGS_DECODING_COMMAND', None)
OUTPUT_PATH = environ.get('SATNOGS_OUTPUT_PATH', None)
|
from os import environ
DEMODULATION_COMMAND = environ.get('SATNOGS_DEMODULATION_COMMAND', 'rtl_fm')
ENCODING_COMMAND = environ.get('SATNOGS_ENCODING_COMMAND', 'oggenc')
DECODING_COMMAND = environ.get('SATNOGS_DECODING_COMMAND', 'multimon-ng')
OUTPUT_PATH = environ.get('SATNOGS_OUTPUT_PATH', '/tmp')
|
Add default commands for encoding/decoding/demodulation.
|
Add default commands for encoding/decoding/demodulation.
|
Python
|
agpl-3.0
|
adamkalis/satnogs-client,adamkalis/satnogs-client,cshields/satnogs-client,cshields/satnogs-client
|
3335c8c9ce8419d9d1d1034903687aa02983280d
|
tests/rules_tests/NoRuleSpecifiedTest.py
|
tests/rules_tests/NoRuleSpecifiedTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
class NoRuleSpecifiedTest(TestCase):
pass
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
from grammpy.exceptions import RuleNotDefinedException
class NoRuleSpecifiedTest(TestCase):
def test_noRule(self):
class tmp(Rule):
x = 5
with self.assertRaises(RuleNotDefinedException):
x = tmp.rules
with self.assertRaises(RuleNotDefinedException):
x = tmp.rule
with self.assertRaises(RuleNotDefinedException):
x = tmp.left
with self.assertRaises(RuleNotDefinedException):
x = tmp.right
with self.assertRaises(RuleNotDefinedException):
x = tmp.fromSymbol
with self.assertRaises(RuleNotDefinedException):
x = tmp.toSymbol
if __name__ == '__main__':
main()
|
Add tests when no rules is specified
|
Add tests when no rules is specified
|
Python
|
mit
|
PatrikValkovic/grammpy
|
0b3f6ae3b21cd51b99bcecf17d5ea1275c04abfd
|
statirator/project_template/project_name/settings.py
|
statirator/project_template/project_name/settings.py
|
# Generated by statirator
import os
# directories setup
ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
SOURCE_DIR = os.path.join(ROOT_DIR, '{{ source }}')
BUILD_DIR = os.path.join(ROOT_DIR, '{{ build }}')
# languages setup
LANGUAGE_CODE = '{{default_lang}}'
_ = lambda s:s
LANGUAGES = ({% for code, name in languages %}
('{{code}}', _('{{ name }}')),
{% endfor %})
ROOT_URLCONF = '{{ project_name }}.urls'
TEMPLATE_DIRS = (
os.path.join(ROOT_DIR, 'templates'),
)
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.sites',
'django.contrib.staticfiles',
'django_medusa',
)
MEDUSA_RENDERER_CLASS = "django_medusa.renderers.DiskStaticSiteRenderer"
MEDUSA_MULTITHREAD = True
MEDUSA_DEPLOY_DIR = BUILD_DIR
|
# Generated by statirator
import os
# directories setup
ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
SOURCE_DIR = os.path.join(ROOT_DIR, '{{ source }}')
BUILD_DIR = os.path.join(ROOT_DIR, '{{ build }}')
# languages setup
LANGUAGE_CODE = '{{default_lang}}'
_ = lambda s:s
LANGUAGES = ({% for code, name in languages %}
('{{code}}', _('{{ name }}')),
{% endfor %})
ROOT_URLCONF = '{{ project_name }}.urls'
TEMPLATE_DIRS = (
os.path.join(ROOT_DIR, 'templates'),
)
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.sites',
'django.contrib.staticfiles',
'django_medusa',
'taggit',
'statirator.blog',
)
MEDUSA_RENDERER_CLASS = "django_medusa.renderers.DiskStaticSiteRenderer"
MEDUSA_MULTITHREAD = True
MEDUSA_DEPLOY_DIR = BUILD_DIR
|
Add taggit and statirator.blog to INSTALLED_APPS
|
Add taggit and statirator.blog to INSTALLED_APPS
|
Python
|
mit
|
MeirKriheli/statirator,MeirKriheli/statirator,MeirKriheli/statirator
|
91a7e4ba30c2c455c58b7069015680b7af511cc4
|
tests/test_get_joke.py
|
tests/test_get_joke.py
|
def test_get_joke():
from pyjokes import get_joke
for i in range(10):
assert get_joke()
languages = ['eng', 'de', 'spa']
categories = ['neutral', 'explicit', 'all']
for lang in languages:
for cat in categories:
for i in range(10):
assert get_joke(cat, lang)
|
import pytest
from pyjokes import get_joke
from pyjokes.pyjokes import LanguageNotFoundError, CategoryNotFoundError
def test_get_joke():
assert get_joke()
languages = ['en', 'de', 'es']
categories = ['neutral', 'explicit', 'all']
for lang in languages:
assert get_joke(language=lang)
for cat in categories:
assert get_joke(category=cat)
def test_get_joke_raises():
assert pytest.raises(LanguageNotFoundError, get_joke, language='eu')
assert pytest.raises(LanguageNotFoundError, get_joke, language='tr')
assert pytest.raises(CategoryNotFoundError, get_joke, category='123')
|
Simplify get_joke test, add raise checks
|
Simplify get_joke test, add raise checks
|
Python
|
bsd-3-clause
|
borjaayerdi/pyjokes,trojjer/pyjokes,martinohanlon/pyjokes,bennuttall/pyjokes,ElectronicsGeek/pyjokes,pyjokes/pyjokes,gmarkall/pyjokes
|
67d067fe499ba2ec78d34083640a4bfe9835d62b
|
tests/test_sequence.py
|
tests/test_sequence.py
|
from unittest import TestCase
from prudent.sequence import Sequence
class SequenceTest(TestCase):
def setUp(self):
self.seq = Sequence([1, 2, 3])
def test_getitem(self):
assert self.seq[0] == 1
self.seq[2]
assert self.seq[2] == 3
def test_len(self):
assert len(self.seq) == 0
self.seq[2]
assert len(self.seq) == 3
def test_iter(self):
for _ in range(2):
assert list(self.seq) == [1, 2, 3]
|
from unittest import TestCase
from prudent.sequence import Sequence
class SequenceTest(TestCase):
def setUp(self):
self.seq = Sequence([1, 2, 3])
def test_getitem(self):
assert self.seq[0] == 1
assert self.seq[2] == 3
def test_getitem_raises_indexerror(self):
self.assertRaises(IndexError, lambda: self.seq[3])
def test_len_returns_current_size(self):
assert len(self.seq) == 0
self.seq[2]
assert len(self.seq) == 3
def test_iter_preserves_elems(self):
for _ in range(2):
assert list(self.seq) == [1, 2, 3]
|
Test that IndexError is raised when appropriate
|
Test that IndexError is raised when appropriate
|
Python
|
mit
|
eugene-eeo/prudent
|
987e3b3387124c9eee7b0d69647fe2eeba40b70d
|
snippets/list_holidays.py
|
snippets/list_holidays.py
|
#!/usr/bin/env python
import pandas as pd
from datetime import date
import holidays
def sanitize_holiday_name(name):
new_name = [c for c in name if c.isalpha() or c.isdigit() or c == ' ']
new_name = "".join(new_name).lower().replace(" ", "_")
return new_name
def process_holidays(df):
# Create a date object
user_date = date(df['year'], df['month'], df['day'])
# Get US holidays for this year
holidays_dates = holidays.US(years=df['year'])
for holiday_date, name in holidays_dates.iteritems():
# if 'observed' in name:
# pass
# Compute difference in days
days = (holiday_date - user_date).days
# Clean holiday name
name = sanitize_holiday_name(name)
# Add the computed days to holiday into our DataFrame
df['days_to_' + name] = days
return df
def main():
df = pd.DataFrame()
df['year'] = pd.Series(range(2010, 2015))
df['day'] = pd.Series(range(11, 27, 3))
df['month'] = pd.Series(range(2, 12, 2))
print df.apply(process_holidays, axis=1)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import pandas as pd
from datetime import date
import holidays
def sanitize_holiday_name(name):
new_name = [c for c in name if c.isalpha() or c.isdigit() or c == ' ']
new_name = "".join(new_name).lower().replace(" ", "_")
return new_name
def process_holidays(df):
# Create a date object
user_date = date(
df['year_account_created'],
df['month_account_created'],
df['day_account_created']
)
# Get US holidays for this year
holidays_dates = holidays.US(years=df['year'])
for holiday_date, name in holidays_dates.iteritems():
# if 'observed' in name:
# pass
# Compute difference in days
days = (holiday_date - user_date).days
# Clean holiday name
name = sanitize_holiday_name(name)
# Add the computed days to holiday into our DataFrame
df['days_to_' + name] = days
return df
def main():
path = '../datasets/processed/'
train_users = pd.read_csv(path + 'processed_train_users.csv')
train_users = train_users.head(500)
test_users = pd.read_csv(path + 'processed_train_users.csv')
test_users = test_users.head(500)
train_users = train_users.apply(process_holidays, axis=1)
print train_users.columns
if __name__ == '__main__':
main()
|
Update with real train users
|
Update with real train users
|
Python
|
mit
|
davidgasquez/kaggle-airbnb
|
4a8aaf3c9e1da5fd10580cc5a3859d801f2c9553
|
django_docker/django_docker/urls.py
|
django_docker/django_docker/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'hello_world.views.hello_world', name='hello_world'),
)
|
from django.conf.urls import include, url
from hello_world import views as hello_world_views
from django.contrib import admin
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', hello_world_views.hello_world, name='hello_world'),
]
|
Update URLs to be compatible with Django 1.10
|
Update URLs to be compatible with Django 1.10
|
Python
|
mit
|
morninj/django-docker,morninj/django-docker,morninj/django-docker
|
caeb76cbcb6cdd49138e41f57144573598b722ba
|
source/clique/__init__.py
|
source/clique/__init__.py
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from ._version import __version__
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import re
from collections import defaultdict
from ._version import __version__
from .collection import Collection
from .error import CollectionError
#: Pattern for matching an index with optional padding.
DIGITS_PATTERN = '(?P<index>(?P<padding>0*)\d+)'
_DIGITS_REGEX = re.compile(DIGITS_PATTERN)
#: Common patterns that can be passed to :py:func:`~clique.assemble`.
PATTERNS = {
'frames': '\.{0}\.\D+\d?$'.format(DIGITS_PATTERN),
'versions': 'v{0}'.format(DIGITS_PATTERN)
}
def assemble(iterable, patterns=None, minimum_items=2):
'''Assemble items in *iterable* into discreet collections.
*patterns* may be specified as a list of regular expressions to limit
the returned collection possibilities. Use this when interested in
collections that only match specific patterns. Each pattern must contain
the expression from :py:data:`DIGITS_PATTERN` exactly once.
A selection of common expressions are available in :py:data:`PATTERNS`.
.. note::
If a pattern is supplied as a string it will be automatically compiled
to a regular expression for convenience.
When *patterns* is not specified, collections are formed by examining all
possible groupings of the items in *iterable* based around common numerical
components.
*minimum_items* dictates the minimum number of items a collection must have
in order to be included in the result. The default is 2, filtering out
single item collections.
Return list of assembled :py:class:`~clique.collection.Collection`
instances.
'''
collection_map = defaultdict(set)
collections = []
# Compile patterns.
compiled_patterns = []
if patterns is not None:
if not patterns:
return collections
for pattern in patterns:
if isinstance(pattern, basestring):
compiled_patterns.append(re.compile(pattern))
else:
compiled_patterns.append(pattern)
else:
compiled_patterns.append(_DIGITS_REGEX)
# Process iterable.
for item in iterable:
for pattern in compiled_patterns:
for match in pattern.finditer(item):
index = match.group('index')
head = item[:match.start('index')]
tail = item[match.end('index'):]
padding = match.group('padding')
if padding:
padding = len(index)
else:
padding = 0
key = (head, tail, padding)
collection_map[key].add(int(index))
# Form collections, filtering out those that do not have at least
# as many indexes as minimum_items
for (head, tail, padding), indexes in collection_map.items():
if len(indexes) >= minimum_items:
collections.append(
Collection(head, tail, padding, indexes)
)
return collections
|
Add top level function to help assemble collections from arbitrary items.
|
Add top level function to help assemble collections from arbitrary items.
|
Python
|
apache-2.0
|
4degrees/clique
|
21b97ceea5b2e667940ddd45682313261eba845b
|
discode_server/notify.py
|
discode_server/notify.py
|
import json
from discode_server import db
from discode_server import fragments
connected = set()
notified = set()
async def feed(request, ws):
global connected
connected.add(ws)
print("Open WebSockets: ", len(connected))
try:
while True:
if not ws.open:
return
async with request.app.config.DB.acquire() as conn:
await conn.execute(f"LISTEN channel")
msg = await conn.connection.notifies.get()
if not ws.open:
return
fingerprint = ws.remote_address, msg.payload
if fingerprint in notified:
continue
notified.add(fingerprint)
paste_id, lineno, comment_id = msg.payload.split(',')
paste = await db.get_paste(conn, int(paste_id))
html = fragments.comment_row(lineno,
paste.comments[int(lineno)])
data = json.dumps({
"html": html,
"lineno": lineno,
"paste_id": paste.id,
"comment_id": comment_id,
})
await ws.send(data)
finally:
connected.remove(ws)
print("Open WebSockets: ", len(connected))
|
import asyncio
import json
from discode_server import db
from discode_server import fragments
connected = set()
notified = set()
async def feed(request, ws):
global connected
connected.add(ws)
print("Open WebSockets: ", len(connected))
try:
while True:
if not ws.open:
return
async with request.app.config.DB.acquire() as conn:
await conn.execute(f"LISTEN channel")
try:
msg = await asyncio.wait_for(
conn.connection.notifies.get(), 1)
except asyncio.TimeoutError:
continue
if not ws.open:
return
fingerprint = ws.remote_address, msg.payload
if fingerprint in notified:
continue
notified.add(fingerprint)
paste_id, lineno, comment_id = msg.payload.split(',')
paste = await db.get_paste(conn, int(paste_id))
html = fragments.comment_row(lineno,
paste.comments[int(lineno)])
data = json.dumps({
"html": html,
"lineno": lineno,
"paste_id": paste.id,
"comment_id": comment_id,
})
await ws.send(data)
finally:
connected.remove(ws)
print("Open WebSockets: ", len(connected))
|
Add the asyncio wait_for back in
|
Add the asyncio wait_for back in
|
Python
|
bsd-2-clause
|
d0ugal/discode-server,d0ugal/discode-server,d0ugal/discode-server
|
c3d03629734abfead5ae1eae83d1b6dcec792b45
|
iconizer/django_in_iconizer/django_server.py
|
iconizer/django_in_iconizer/django_server.py
|
import os
class DjangoServer(object):
default_django_manage_script = "manage.py"
def __init__(self, django_manage_script=None):
super(DjangoServer, self).__init__()
if django_manage_script is None:
self.django_manage_script = self.default_django_manage_script
else:
self.django_manage_script = django_manage_script
def get_task_descriptor(self, task_name, param_list=[]):
task_name_and_param = [self.django_manage_script, task_name]
task_name_and_param.extend(param_list)
return {task_name: task_name_and_param}
# noinspection PyMethodMayBeStatic
def get_cmd_str(self, cmd_name, param_list=[]):
return "python %s %s" % (self.django_manage_script, cmd_name)
def execute_cmd(self, django_cmd):
os.system(self.get_cmd_str(django_cmd))
|
import os
class DjangoServer(object):
default_django_manage_script = "manage.py"
def __init__(self, django_manage_script=None):
super(DjangoServer, self).__init__()
if django_manage_script is None:
self.django_manage_script = self.default_django_manage_script
else:
self.django_manage_script = django_manage_script
self.django_manage_script = os.environ.get("MANAGE_PY", self.django_manage_script)
def get_task_descriptor(self, task_name, param_list=[]):
task_name_and_param = [self.django_manage_script, task_name]
task_name_and_param.extend(param_list)
return {task_name: task_name_and_param}
# noinspection PyMethodMayBeStatic
def get_cmd_str(self, cmd_name, param_list=[]):
return "python %s %s" % (self.django_manage_script, cmd_name)
def execute_cmd(self, django_cmd):
os.system(self.get_cmd_str(django_cmd))
|
Enable specify command line processor.
|
Enable specify command line processor.
|
Python
|
bsd-3-clause
|
weijia/iconizer
|
fce890ce9046dd055f219ac880ae9734f334f534
|
greenlight/harness/arguments.py
|
greenlight/harness/arguments.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from marionette import BaseMarionetteOptions
from greenlight import tests
class ReleaseTestParser(BaseMarionetteOptions):
def parse_args(self, *args, **kwargs):
options, test_files = BaseMarionetteOptions.parse_args(self,
*args, **kwargs)
if not test_files:
test_files = [tests.manifest]
return (options, test_files)
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from marionette import BaseMarionetteOptions
from greenlight import tests
class ReleaseTestParser(BaseMarionetteOptions):
def parse_args(self, *args, **kwargs):
options, test_files = BaseMarionetteOptions.parse_args(self,
*args, **kwargs)
if not any([(k.startswith('log_') and v is not None and '-' in v)
for (k, v) in vars(options).items()]):
options.log_mach = '-'
if not test_files:
test_files = [tests.manifest]
return (options, test_files)
|
Make colored terminal output the default log formatter.
|
Make colored terminal output the default log formatter.
|
Python
|
mpl-2.0
|
myrdd/firefox-ui-tests,gbrmachado/firefox-ui-tests,utvar/firefox-ui-tests,chmanchester/firefox-ui-tests,armenzg/firefox-ui-tests,whimboo/firefox-ui-tests,myrdd/firefox-ui-tests,gbrmachado/firefox-ui-tests,sr-murthy/firefox-ui-tests,armenzg/firefox-ui-tests,Motwani/firefox-ui-tests,galgeek/firefox-ui-tests,armenzg/firefox-ui-tests,sr-murthy/firefox-ui-tests,sr-murthy/firefox-ui-tests,galgeek/firefox-ui-tests,myrdd/firefox-ui-tests,utvar/firefox-ui-tests,galgeek/firefox-ui-tests,whimboo/firefox-ui-tests,whimboo/firefox-ui-tests,Motwani/firefox-ui-tests,chmanchester/firefox-ui-tests
|
63e09b77e3b00a7483249417020fb093f98773a9
|
infrastructure/tests/helpers.py
|
infrastructure/tests/helpers.py
|
from datetime import datetime
from django.contrib.staticfiles.testing import LiveServerTestCase
from selenium import webdriver
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
class BaseSeleniumTestCase(LiveServerTestCase):
def setUp(self):
super(BaseSeleniumTestCase, self).setUp()
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument("headless")
chrome_options.add_argument("--no-sandbox")
d = DesiredCapabilities.CHROME
d["loggingPrefs"] = {"browser": "ALL"}
self.selenium = webdriver.Chrome(
chrome_options=chrome_options, desired_capabilities=d
)
self.selenium.implicitly_wait(10)
self.wait = WebDriverWait(self.selenium, 5)
self.addCleanup(self.selenium.quit)
def wait_until_text_in(self, selector, text):
self.wait.until(
EC.text_to_be_present_in_element((By.CSS_SELECTOR, selector), text)
)
|
from datetime import datetime
from django.contrib.staticfiles.testing import LiveServerTestCase
from selenium import webdriver
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
import logging
logger = logging.Logger(__name__)
class BaseSeleniumTestCase(LiveServerTestCase):
def setUp(self):
super(BaseSeleniumTestCase, self).setUp()
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument("headless")
chrome_options.add_argument("--no-sandbox")
d = DesiredCapabilities.CHROME
d["loggingPrefs"] = {"browser": "ALL"}
self.selenium = webdriver.Chrome(
chrome_options=chrome_options, desired_capabilities=d
)
self.selenium.implicitly_wait(10)
self.wait = WebDriverWait(self.selenium, 5)
self.addCleanup(self.selenium.quit)
def wait_until_text_in(self, selector, text):
if self.wait.until(EC.text_to_be_present_in_element((By.CSS_SELECTOR, selector), text)):
pass
else:
text_content = self.selenium.find_elements_by_css_selector(selector)[0].text
logger.error("Element contents: %s" % text_content)
|
Add logging to selector testing
|
Add logging to selector testing
|
Python
|
mit
|
Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data
|
d07b6483d110eb4c51f7c631f888d1bf30eacc1c
|
destroyer-runner.py
|
destroyer-runner.py
|
#!/usr/bin/python
"""destroyer-runner.py - Run the main application"""
import sys
import subprocess
if __name__ == '__main__':
subprocess.call(['python', './destroyer/destroyer.py'] + [str(arg) for arg in sys.argv[1:]])
|
#!/usr/bin/python
"""destroyer-runner.py - Run the main application"""
from destroyer.destroyer import main
if __name__ == '__main__':
main()
|
Update with working code to run destroyer
|
Update with working code to run destroyer
|
Python
|
mit
|
jaredmichaelsmith/destroyer
|
261c294690427b57d111c777cdc4d13b9c84f9d2
|
cloudkittydashboard/dashboards/admin/modules/forms.py
|
cloudkittydashboard/dashboards/admin/modules/forms.py
|
# Copyright 2017 Objectif Libre
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from cloudkittydashboard.api import cloudkitty as api
LOG = logging.getLogger(__name__)
class EditPriorityForm(forms.SelfHandlingForm):
priority = forms.IntegerField(label=_("Priority"), required=True)
def handle(self, request, data):
ck_client = api.cloudkittyclient(request)
return ck_client.modules.update(
module_id=self.initial["module_id"],
priority=data["priority"]
)
|
# Copyright 2017 Objectif Libre
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from cloudkittydashboard.api import cloudkitty as api
class EditPriorityForm(forms.SelfHandlingForm):
priority = forms.IntegerField(label=_("Priority"), required=True)
def handle(self, request, data):
ck_client = api.cloudkittyclient(request)
return ck_client.modules.update(
module_id=self.initial["module_id"],
priority=data["priority"]
)
|
Delete the unused LOG code
|
Delete the unused LOG code
Change-Id: Ief253cdd226f8c2688429b0ff00785151a99759b
|
Python
|
apache-2.0
|
stackforge/cloudkitty-dashboard,openstack/cloudkitty-dashboard,openstack/cloudkitty-dashboard,openstack/cloudkitty-dashboard,stackforge/cloudkitty-dashboard,stackforge/cloudkitty-dashboard
|
316323387c508c88595f205182ea2436c271621d
|
src/highdicom/uid.py
|
src/highdicom/uid.py
|
import logging
import pydicom
logger = logging.getLogger(__name__)
class UID(pydicom.uid.UID):
"""Unique DICOM identifier with a highdicom-specific UID prefix."""
def __new__(cls: type) -> str:
prefix = '1.2.826.0.1.3680043.10.511.3.'
identifier = pydicom.uid.generate_uid(prefix=prefix)
return super().__new__(cls, identifier)
|
import logging
from typing import Type, TypeVar
import pydicom
logger = logging.getLogger(__name__)
T = TypeVar('T', bound='UID')
class UID(pydicom.uid.UID):
"""Unique DICOM identifier with a highdicom-specific UID prefix."""
def __new__(cls: Type[T]) -> T:
prefix = '1.2.826.0.1.3680043.10.511.3.'
identifier = pydicom.uid.generate_uid(prefix=prefix)
return super().__new__(cls, identifier)
|
Fix typing for UID class
|
Fix typing for UID class
|
Python
|
mit
|
MGHComputationalPathology/highdicom
|
ee53ec51d98802bf0bc55e70c39cc0918f2bb274
|
icekit/plugins/blog_post/content_plugins.py
|
icekit/plugins/blog_post/content_plugins.py
|
"""
Definition of the plugin.
"""
from django.apps import apps
from django.conf import settings
from django.db.models.loading import get_model
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
default_blog_model = 'blog_tools.BlogPost'
icekit_blog_model = getattr(settings, 'ICEKIT_BLOG_MODEL', default_blog_model)
BLOG_MODEL = apps.get_model(*icekit_blog_model.rsplit('.', 1))
if icekit_blog_model != default_blog_model:
@plugin_pool.register
class BlogPostPlugin(ContentPlugin):
model = get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.PostItem'))
category = _('Blog')
render_template = 'icekit/plugins/post/default.html'
raw_id_fields = ['post', ]
|
"""
Definition of the plugin.
"""
from django.apps import apps
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
default_blog_model = 'blog_tools.BlogPost'
icekit_blog_model = getattr(settings, 'ICEKIT_BLOG_MODEL', default_blog_model)
BLOG_MODEL = apps.get_model(*icekit_blog_model.rsplit('.', 1))
if icekit_blog_model != default_blog_model:
@plugin_pool.register
class BlogPostPlugin(ContentPlugin):
model = apps.get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.BlogPostItem'))
category = _('Blog')
render_template = 'icekit/plugins/post/default.html'
raw_id_fields = ['post', ]
|
Update Blog model and content item matching
|
Update Blog model and content item matching
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
c7143cd725fc829c33ad9f9150e5975deb7be93a
|
irctest/optional_extensions.py
|
irctest/optional_extensions.py
|
import unittest
import operator
import itertools
class OptionalExtensionNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported extension: {}'.format(self.args[0])
class OptionalSaslMechanismNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported SASL mechanism: {}'.format(self.args[0])
class OptionalityReportingTextTestRunner(unittest.TextTestRunner):
def run(self, test):
result = super().run(test)
if result.skipped:
print()
print('Some tests were skipped because the following optional'
'specifications/mechanisms are not supported:')
msg_to_tests = itertools.groupby(result.skipped,
key=operator.itemgetter(1))
for (msg, tests) in msg_to_tests:
print('\t{} ({} test(s))'.format(msg, sum(1 for x in tests)))
return result
|
import unittest
import operator
import itertools
class NotImplementedByController(unittest.SkipTest):
def __str__(self):
return 'Not implemented by controller: {}'.format(self.args[0])
class OptionalExtensionNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported extension: {}'.format(self.args[0])
class OptionalSaslMechanismNotSupported(unittest.SkipTest):
def __str__(self):
return 'Unsupported SASL mechanism: {}'.format(self.args[0])
class OptionalityReportingTextTestRunner(unittest.TextTestRunner):
def run(self, test):
result = super().run(test)
if result.skipped:
print()
print('Some tests were skipped because the following optional'
'specifications/mechanisms are not supported:')
msg_to_tests = itertools.groupby(result.skipped,
key=operator.itemgetter(1))
for (msg, tests) in sorted(msg_to_tests):
print('\t{} ({} test(s))'.format(msg, sum(1 for x in tests)))
return result
|
Add an exception to tell a controller does not implement something.
|
Add an exception to tell a controller does not implement something.
|
Python
|
mit
|
ProgVal/irctest
|
a0ce4d366681f2f62f232f4f952ac18df07667d4
|
ideascube/conf/idb_fra_cultura.py
|
ideascube/conf/idb_fra_cultura.py
|
# -*- coding: utf-8 -*-
"""Ideaxbox Cultura, France"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Cultura"
IDEASCUBE_PLACE_NAME = _("city")
COUNTRIES_FIRST = ['FR']
TIME_ZONE = None
LANGUAGE_CODE = 'fr'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
(_('Personal informations'), ['serial', 'short_name', 'full_name', 'latin_name', 'birth_year', 'gender']), # noqa
)
HOME_CARDS = HOME_CARDS + [
{
'id': 'cpassorcier',
},
{
'id': 'wikisource',
},
{
'id': 'software',
},
{
'id': 'ted',
},
{
'id': 'ubuntudoc',
},
]
|
# -*- coding: utf-8 -*-
"""Ideaxbox Cultura, France"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Cultura"
IDEASCUBE_PLACE_NAME = _("city")
COUNTRIES_FIRST = ['FR']
TIME_ZONE = None
LANGUAGE_CODE = 'fr'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
(_('Personal informations'), ['serial', 'short_name', 'full_name', 'latin_name', 'birth_year', 'gender']), # noqa
)
HOME_CARDS = HOME_CARDS + [
{
'id': 'cpassorcier',
},
{
'id': 'wikisource',
},
{
'id': 'ted',
},
{
'id': 'ubuntudoc',
},
]
|
Remove "software" card from Cultura conf
|
Remove "software" card from Cultura conf
|
Python
|
agpl-3.0
|
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
|
a977908efcc176e1e5adbd82843033805953c6cb
|
tools/reago/format_reago_input_files.py
|
tools/reago/format_reago_input_files.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os
import argparse
import re
reago_dir = '/tools/rna_manipulation/reago/reago/'
def add_read_pair_num(input_filepath, output_filepath, read_pair_num):
to_add = '.' + str(read_pair_num)
with open(input_filepath,'r') as input_file:
with open(output_filepath,'w') as output_file:
for line in input_file:
if line[0] == '>':
split_line = line.split()
seq_id = split_line[0]
if seq_id.rfind(to_add) != (len(seq_id)-len(to_add)):
split_line[0] = seq_id + to_add
output_file.write(' '.join(split_line) + '\n')
else:
output_file.write(line)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--r1_sequence_file', required=True)
parser.add_argument('--r2_sequence_file', required=True)
args = parser.parse_args()
add_read_pair_num(args.r1_input_sequence_file, args.r1_input_sequence_file, 1)
add_read_pair_num(args.r2_input_sequence_file, args.r2_input_sequence_file, 2)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os
import argparse
import re
reago_dir = '/tools/rna_manipulation/reago/reago/'
def add_read_pair_num(input_filepath, output_filepath, read_pair_num):
to_add = '.' + str(read_pair_num)
with open(input_filepath,'r') as input_file:
with open(output_filepath,'w') as output_file:
for line in input_file:
if line[0] == '>':
split_line = line.split()
seq_id = split_line[0]
if seq_id.rfind(to_add) != (len(seq_id)-len(to_add)):
split_line[0] = seq_id + to_add
output_file.write(' '.join(split_line) + '\n')
else:
output_file.write(line)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--r1_sequence_file', required=True)
parser.add_argument('--r2_sequence_file', required=True)
args = parser.parse_args()
add_read_pair_num(args.r1_sequence_file, args.r1_sequence_file, 1)
add_read_pair_num(args.r2_sequence_file, args.r2_sequence_file, 2)
|
Correct argument name in script to format reago input file
|
Correct argument name in script to format reago input file
|
Python
|
apache-2.0
|
ASaiM/galaxytools,ASaiM/galaxytools
|
37c59a8535ac0d68a995af378fcb61f03070e018
|
kombu_fernet/serializers/__init__.py
|
kombu_fernet/serializers/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import os
from cryptography.fernet import Fernet, MultiFernet
fernet = Fernet(os.environ['KOMBU_FERNET_KEY'])
fallback_fernet = None
try:
fallback_fernet = Fernet(os.environ['OLD_KOMBU_FERNET_KEY'])
except KeyError:
pass
else:
fernet = MultiFernet([fernet, fallback_fernet])
def fernet_encode(func):
def inner(message):
return fernet.encrypt(func(message))
return inner
def fernet_decode(func):
def inner(encoded_message):
if isinstance(encoded_message, unicode):
encoded_message = encoded_message.encode('utf-8')
message = fernet.decrypt(encoded_message)
return func(message)
return inner
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import os
from cryptography.fernet import Fernet, MultiFernet
fernet = Fernet(os.environ['KOMBU_FERNET_KEY'])
fallback_fernet = None
try:
fallback_fernet = Fernet(os.environ['KOMBU_FERNET_KEY_PREVIOUS'])
except KeyError:
pass
else:
fernet = MultiFernet([fernet, fallback_fernet])
def fernet_encode(func):
def inner(message):
return fernet.encrypt(func(message))
return inner
def fernet_decode(func):
def inner(encoded_message):
if isinstance(encoded_message, unicode):
encoded_message = encoded_message.encode('utf-8')
message = fernet.decrypt(encoded_message)
return func(message)
return inner
|
Make update previous key name
|
Make update previous key name
|
Python
|
mit
|
heroku/kombu-fernet-serializers
|
a83dd9bfce43a898cf71c9e66192ad589a02b6c8
|
macroeco/compare/__init__.py
|
macroeco/compare/__init__.py
|
"""
=================================
Compare (:mod:`macroeco.compare`)
=================================
This module contains functions that compare the goodness of fit of a
distribution/curve to data or the fit of two distributions/curves to each
other.
Comparison Functions
====================
.. autosummary::
:toctree: generated/
get_AIC
get_AICC
get_AIC_weights
get_nll
get_empirical_cdf
get_sum_of_squares
get_r_squared
get_chi_squared
get_lrt
bin_data
"""
from .compare import *
|
"""
=================================
Compare (:mod:`macroeco.compare`)
=================================
This module contains functions that compare the goodness of fit of a
distribution/curve to data or the fit of two distributions/curves to each
other.
Comparison Functions
====================
.. autosummary::
:toctree: generated/
AIC
AICC
AIC_weights
nll
empirical_cdf
sum_of_squares
r_squared
chi_squared
lrt
bin_data
"""
from .compare import *
|
Remove get_ prefixes from compare docstring
|
Remove get_ prefixes from compare docstring
|
Python
|
bsd-2-clause
|
jkitzes/macroeco
|
2015233d252e625419485c269f1f70a7e0edada8
|
skmisc/__init__.py
|
skmisc/__init__.py
|
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
__all__ = ['__version__']
# We first need to detect if we're being called as part of the skmisc
# setup procedure itself in a reliable manner.
try:
__SKMISC_SETUP__
except NameError:
__SKMISC_SETUP__ = False
if __SKMISC_SETUP__:
import sys as _sys
_sys.stderr.write('Running from skmisc source directory.\n')
del _sys
else:
from skmisc.__config__ import show as show_config # noqa: F401
# try:
# from skmisc.__config__ import show as show_config # noqa: F401
# except ImportError:
# msg = """Error importing skmisc: you cannot import skmisc while
# being in skmisc source directory; please exit the skmisc source
# tree first, and relaunch your python intepreter."""
# raise ImportError(msg)
__all__.append('show_config')
def test(args=None, plugins=None):
"""
Run tests
"""
# The doctests are not run when called from an installed
# package since the pytest.ini is not included in the
# package.
import os
try:
import pytest
except ImportError:
msg = "To run the tests, you must install pytest"
raise ImportError(msg)
path = os.path.realpath(__file__)
if args is None:
args = [path]
else:
args.append(path)
return pytest.main(args=args, plugins=plugins)
|
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
__all__ = ['__version__']
# We first need to detect if we're being called as part of the skmisc
# setup procedure itself in a reliable manner.
try:
__SKMISC_SETUP__
except NameError:
__SKMISC_SETUP__ = False
if __SKMISC_SETUP__:
import sys as _sys
_sys.stderr.write('Running from skmisc source directory.\n')
del _sys
else:
try:
from skmisc.__config__ import show as show_config # noqa: F401
except ImportError as err:
msg = """Error importing skmisc: you cannot import skmisc while
being in skmisc source directory; please exit the skmisc source
tree first, and relaunch your python intepreter."""
raise ImportError('\n\n'.join([err.message, msg]))
__all__.append('show_config')
def test(args=None, plugins=None):
"""
Run tests
"""
# The doctests are not run when called from an installed
# package since the pytest.ini is not included in the
# package.
import os
try:
import pytest
except ImportError:
msg = "To run the tests, you must install pytest"
raise ImportError(msg)
path = os.path.dirname(os.path.realpath(__file__))
if args is None:
args = [path]
else:
args.append(path)
return pytest.main(args=args, plugins=plugins)
|
Fix pytest path to root of package
|
Fix pytest path to root of package
Instead of the package init file.
|
Python
|
bsd-3-clause
|
has2k1/onelib,has2k1/onelib,has2k1/onelib
|
8187591a0f8255487f4b16b653ba5070bfffe739
|
specs/test_diff.py
|
specs/test_diff.py
|
'''
This is an example of a python test
that compares a diff function (in this case
a hardcoded one that doesn't work) to the
reference JSON to check compliance.
'''
from nose.tools import eq_
import json
def diff(before, after):
return []
def test_diffs():
test_cases = json.load(open('test_cases.json'))
for test_case in test_cases:
result = diff(test_case['before'], test_case['after'])
eq_(result, test_case['diff'])
|
'''
This is an example of a python test
that compares a diff function (in this case
a hardcoded one that doesn't work) to the
reference JSON to check compliance.
'''
from nose.tools import eq_
import json
def diff(before, after):
return []
def test_diffs():
test_cases = json.load(open('test_cases_simple.json'))
for test_case in test_cases:
result = diff(test_case['before'], test_case['after'])
eq_(result, test_case['diff'])
def test_diffs_cells():
test_cases = json.load(open('test_cases_cells.json'))
for test_case in test_cases:
result = diff(test_case['before'], test_case['after'])
eq_(result, test_case['diff'])
|
Add code to specs example test
|
Add code to specs example test
|
Python
|
mit
|
tarmstrong/nbdiff,tarmstrong/nbdiff,tarmstrong/nbdiff,tarmstrong/nbdiff
|
05151bb3ccd018b37097ddf5288e9984f5b45716
|
ci/management/commands/cancel_old_jobs.py
|
ci/management/commands/cancel_old_jobs.py
|
from __future__ import unicode_literals, absolute_import
from django.core.management.base import BaseCommand
from ci import models, views, TimeUtils
from datetime import timedelta
class Command(BaseCommand):
help = 'Cancel old Civet jobs. When a specific civet client is no longer running, it can leave jobs lying around that other clients have to ignore.'
def add_arguments(self, parser):
parser.add_argument('--dryrun', default=False, action='store_true', help="Don't make any changes, just report what would have happened")
parser.add_argument('--days', required=True, type=int, help="Cancel jobs older than this many days")
def handle(self, *args, **options):
dryrun = options["dryrun"]
days = options["days"]
d = TimeUtils.get_local_time() - timedelta(days=days)
jobs = models.Job.objects.filter(active=True, ready=True, status=models.JobStatus.NOT_STARTED, created__lt=d)
count = jobs.count()
prefix = ""
if dryrun:
prefix = "DRY RUN: "
for job in jobs.all():
self.stdout.write("%sCancel job %s: %s: %s" % (prefix, job.pk, job, job.created))
if not dryrun:
views.set_job_canceled(job, "Civet client hasn't run this job in too long a time")
job.event.set_complete_if_done()
if count == 0:
self.stdout.write("No jobs to cancel")
|
from __future__ import unicode_literals, absolute_import
from django.core.management.base import BaseCommand
from ci import models, views, TimeUtils
from datetime import timedelta
class Command(BaseCommand):
help = 'Cancel old Civet jobs. When a specific civet client is no longer running, it can leave jobs lying around that other clients have to ignore.'
def add_arguments(self, parser):
parser.add_argument('--dryrun', default=False, action='store_true', help="Don't make any changes, just report what would have happened")
parser.add_argument('--days', required=True, type=int, help="Cancel jobs older than this many days")
def handle(self, *args, **options):
dryrun = options["dryrun"]
days = options["days"]
d = TimeUtils.get_local_time() - timedelta(days=days)
jobs = models.Job.objects.filter(active=True, ready=True, status=models.JobStatus.NOT_STARTED, created__lt=d)
count = jobs.count()
prefix = ""
if dryrun:
prefix = "DRY RUN: "
for job in jobs.all():
self.stdout.write("%sCancel job %s: %s: %s" % (prefix, job.pk, job, job.created))
if not dryrun:
views.set_job_canceled(job, "Canceled due to civet client not running this job in too long a time")
job.event.set_complete_if_done()
if count == 0:
self.stdout.write("No jobs to cancel")
|
Update cancel old job message
|
Update cancel old job message
|
Python
|
apache-2.0
|
idaholab/civet,brianmoose/civet,idaholab/civet,brianmoose/civet,brianmoose/civet,idaholab/civet,idaholab/civet,brianmoose/civet
|
aef51ce5ece86d054f76d86dafca9667f88d3b1a
|
ccui/testexecution/templatetags/results.py
|
ccui/testexecution/templatetags/results.py
|
# Case Conductor is a Test Case Management system.
# Copyright (C) 2011 uTest Inc.
#
# This file is part of Case Conductor.
#
# Case Conductor is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Case Conductor is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Case Conductor. If not, see <http://www.gnu.org/licenses/>.
from django import template
from django.core.urlresolvers import reverse
from ..models import TestCycle, TestRun, TestRunIncludedTestCase
register = template.Library()
@register.filter
def results_detail_url(obj):
if isinstance(obj, TestCycle):
return reverse("results_testruns") + "?testCycle=%s" % obj.id
elif isinstance(obj, TestRun):
return reverse("results_testcases") + "?testRun=%s" % obj.id
elif isinstance(obj, TestRunIncludedTestCase):
return reverse("results_testcase_detail", kwargs={"itc_id": obj.id})
return ""
|
# Case Conductor is a Test Case Management system.
# Copyright (C) 2011 uTest Inc.
#
# This file is part of Case Conductor.
#
# Case Conductor is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Case Conductor is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Case Conductor. If not, see <http://www.gnu.org/licenses/>.
from django import template
from django.core.urlresolvers import reverse
from ..models import TestCycle, TestRun, TestRunIncludedTestCase
register = template.Library()
@register.filter
def results_detail_url(obj):
if isinstance(obj, TestCycle):
return reverse("results_testruns") + "?filter-testCycle=%s" % obj.id
elif isinstance(obj, TestRun):
return reverse("results_testcases") + "?filter-testRun=%s" % obj.id
elif isinstance(obj, TestRunIncludedTestCase):
return reverse("results_testcase_detail", kwargs={"itc_id": obj.id})
return ""
|
Fix result status chiclet links for new-style filter querystrings.
|
Fix result status chiclet links for new-style filter querystrings.
|
Python
|
bsd-2-clause
|
shinglyu/moztrap,shinglyu/moztrap,bobsilverberg/moztrap,mccarrmb/moztrap,mccarrmb/moztrap,mozilla/moztrap,shinglyu/moztrap,bobsilverberg/moztrap,mozilla/moztrap,bobsilverberg/moztrap,mozilla/moztrap,mccarrmb/moztrap,shinglyu/moztrap,mozilla/moztrap,mccarrmb/moztrap,bobsilverberg/moztrap,mccarrmb/moztrap,shinglyu/moztrap,mozilla/moztrap
|
538acc8a114c9fda8489dc5fe91fed2314a37c9b
|
src/sentry/web/forms/invite_organization_member.py
|
src/sentry/web/forms/invite_organization_member.py
|
from __future__ import absolute_import
from django import forms
from django.db import transaction, IntegrityError
from sentry.models import (
AuditLogEntry, AuditLogEntryEvent, OrganizationMember,
OrganizationMemberType
)
class InviteOrganizationMemberForm(forms.ModelForm):
class Meta:
fields = ('email',)
model = OrganizationMember
def save(self, actor, organization, ip_address):
om = super(InviteOrganizationMemberForm, self).save(commit=False)
om.organization = organization
om.type = OrganizationMemberType.MEMBER
try:
existing = OrganizationMember.objects.get(
organization=organization,
user__email__iexact=om.email,
)
except OrganizationMember.DoesNotExist:
pass
else:
return existing, False
sid = transaction.savepoint(using='default')
try:
om.save()
except IntegrityError:
transaction.savepoint_rollback(sid, using='default')
return OrganizationMember.objects.get(
email__iexact=om.email,
organization=organization,
), False
transaction.savepoint_commit(sid, using='default')
AuditLogEntry.objects.create(
organization=organization,
actor=actor,
ip_address=ip_address,
target_object=om.id,
event=AuditLogEntryEvent.MEMBER_INVITE,
data=om.get_audit_log_data(),
)
om.send_invite_email()
return om, True
|
from __future__ import absolute_import
from django import forms
from django.db import transaction, IntegrityError
from sentry.models import (
AuditLogEntry, AuditLogEntryEvent, OrganizationMember,
OrganizationMemberType
)
class InviteOrganizationMemberForm(forms.ModelForm):
class Meta:
fields = ('email',)
model = OrganizationMember
def save(self, actor, organization, ip_address):
om = super(InviteOrganizationMemberForm, self).save(commit=False)
om.organization = organization
om.type = OrganizationMemberType.MEMBER
try:
existing = OrganizationMember.objects.filter(
organization=organization,
user__email__iexact=om.email,
)[0]
except IndexError:
pass
else:
return existing, False
sid = transaction.savepoint(using='default')
try:
om.save()
except IntegrityError:
transaction.savepoint_rollback(sid, using='default')
return OrganizationMember.objects.get(
email__iexact=om.email,
organization=organization,
), False
transaction.savepoint_commit(sid, using='default')
AuditLogEntry.objects.create(
organization=organization,
actor=actor,
ip_address=ip_address,
target_object=om.id,
event=AuditLogEntryEvent.MEMBER_INVITE,
data=om.get_audit_log_data(),
)
om.send_invite_email()
return om, True
|
Handle members with duplicate email addresses
|
Handle members with duplicate email addresses
|
Python
|
bsd-3-clause
|
ifduyue/sentry,jean/sentry,gg7/sentry,daevaorn/sentry,Kryz/sentry,songyi199111/sentry,felixbuenemann/sentry,vperron/sentry,looker/sentry,jean/sentry,looker/sentry,hongliang5623/sentry,alexm92/sentry,daevaorn/sentry,pauloschilling/sentry,fuziontech/sentry,TedaLIEz/sentry,mvaled/sentry,beeftornado/sentry,pauloschilling/sentry,BuildingLink/sentry,mitsuhiko/sentry,zenefits/sentry,alexm92/sentry,zenefits/sentry,songyi199111/sentry,wong2/sentry,Kryz/sentry,BuildingLink/sentry,daevaorn/sentry,drcapulet/sentry,ifduyue/sentry,gg7/sentry,JamesMura/sentry,jean/sentry,nicholasserra/sentry,ngonzalvez/sentry,BayanGroup/sentry,TedaLIEz/sentry,JackDanger/sentry,JackDanger/sentry,JamesMura/sentry,BuildingLink/sentry,TedaLIEz/sentry,JamesMura/sentry,boneyao/sentry,ewdurbin/sentry,1tush/sentry,Natim/sentry,korealerts1/sentry,zenefits/sentry,felixbuenemann/sentry,fotinakis/sentry,kevinlondon/sentry,hongliang5623/sentry,gg7/sentry,zenefits/sentry,JTCunning/sentry,kevinlondon/sentry,ngonzalvez/sentry,mvaled/sentry,ifduyue/sentry,mvaled/sentry,JamesMura/sentry,BuildingLink/sentry,gencer/sentry,nicholasserra/sentry,ewdurbin/sentry,zenefits/sentry,ngonzalvez/sentry,JTCunning/sentry,ifduyue/sentry,JamesMura/sentry,korealerts1/sentry,jean/sentry,beeftornado/sentry,fuziontech/sentry,nicholasserra/sentry,kevinlondon/sentry,looker/sentry,imankulov/sentry,hongliang5623/sentry,1tush/sentry,imankulov/sentry,gencer/sentry,wujuguang/sentry,Natim/sentry,kevinastone/sentry,looker/sentry,kevinastone/sentry,mvaled/sentry,drcapulet/sentry,songyi199111/sentry,ifduyue/sentry,jean/sentry,fotinakis/sentry,BayanGroup/sentry,mvaled/sentry,gencer/sentry,mvaled/sentry,boneyao/sentry,imankulov/sentry,fotinakis/sentry,felixbuenemann/sentry,looker/sentry,gencer/sentry,Natim/sentry,alexm92/sentry,JTCunning/sentry,Kryz/sentry,vperron/sentry,wong2/sentry,wong2/sentry,kevinastone/sentry,ewdurbin/sentry,drcapulet/sentry,vperron/sentry,korealerts1/sentry,fuziontech/sentry,BuildingLink/sentry,fotinakis/sentry,1tush/sentry,daevaorn/sentry,BayanGroup/sentry,beeftornado/sentry,mitsuhiko/sentry,gencer/sentry,pauloschilling/sentry,wujuguang/sentry,boneyao/sentry,JackDanger/sentry,wujuguang/sentry
|
3c7641c3380acab821dcbf2ae274da4fb8fade96
|
students/psbriant/final_project/test_clean_data.py
|
students/psbriant/final_project/test_clean_data.py
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data as cd
import pandas
import io
def get_data():
"""
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
def test_plot_zipcode():
"""
"""
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data as cd
import pandas
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
def test_plot_zipcode():
"""
"""
|
Add docstrings for get_data and test_rename_columns and remove import io statement.
|
Add docstrings for get_data and test_rename_columns and remove import io statement.
|
Python
|
unlicense
|
weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016
|
17ddcb1b6c293197834b3154830b9521769d76fb
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Hardy Jones
# Copyright (c) 2013
#
# License: MIT
#
"""This module exports the Hlint plugin class."""
from SublimeLinter.lint import Linter
class Hlint(Linter):
"""Provides an interface to hlint."""
syntax = ('haskell', 'haskell-sublimehaskell', 'literate haskell')
cmd = 'hlint'
regex = (
r'^.+:(?P<line>\d+):'
'(?P<col>\d+):\s*'
'(?:(?P<error>Error)|(?P<warning>Warning)):\s*'
'(?P<message>.+)$'
)
multiline = True
tempfile_suffix = {
'haskell': 'hs',
'haskell-sublimehaskell': 'hs',
'literate haskell': 'lhs'
}
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Hardy Jones
# Copyright (c) 2013
#
# License: MIT
#
"""This module exports the Hlint plugin class."""
from SublimeLinter.lint import Linter
class Hlint(Linter):
"""Provides an interface to hlint."""
defaults = {
'selector': 'source.haskell'
}
cmd = 'hlint'
regex = (
r'^.+:(?P<line>\d+):'
'(?P<col>\d+):\s*'
'(?:(?P<error>Error)|(?P<warning>Warning)):\s*'
'(?P<message>.+)$'
)
multiline = True
tempfile_suffix = 'hs'
|
Update to new `defaults` configuration
|
Update to new `defaults` configuration
|
Python
|
mit
|
SublimeLinter/SublimeLinter-hlint
|
2bab1888b43a9c232b37cc26c37df992ea5df2c5
|
project/apps/api/signals.py
|
project/apps/api/signals.py
|
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from .models import (
Performance,
Session,
)
@receiver(post_save, sender=Performance)
def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
s = 1
while s <= instance.round.num_songs:
song = instance.songs.create(
performance=instance,
num=s,
)
s += 1
judges = instance.round.session.judges.filter(
category__in=[
instance.round.session.judges.model.CATEGORY.music,
instance.round.session.judges.model.CATEGORY.presentation,
instance.round.session.judges.model.CATEGORY.singing,
]
)
for judge in judges:
judge.scores.create(
judge=judge,
song=song,
category=judge.category,
kind=judge.kind,
)
@receiver(post_save, sender=Session)
def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
i = 1
while i <= instance.num_rounds:
instance.rounds.create(
num=i,
kind=(instance.num_rounds - i) + 1,
)
i += 1
|
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from .models import (
Performance,
Session,
)
@receiver(post_save, sender=Session)
def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
i = 1
while i <= instance.num_rounds:
instance.rounds.create(
num=i,
kind=(instance.num_rounds - i) + 1,
)
i += 1
@receiver(post_save, sender=Performance)
def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
s = 1
while s <= instance.round.num_songs:
song = instance.songs.create(
performance=instance,
num=s,
)
s += 1
judges = instance.round.session.judges.filter(
category__in=[
instance.round.session.judges.model.CATEGORY.music,
instance.round.session.judges.model.CATEGORY.presentation,
instance.round.session.judges.model.CATEGORY.singing,
]
)
for judge in judges:
judge.scores.create(
judge=judge,
song=song,
category=judge.category,
kind=judge.kind,
)
|
Create sentinel rounds on Session creation
|
Create sentinel rounds on Session creation
|
Python
|
bsd-2-clause
|
barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api
|
375b26fbb6e5ba043a1017e28027241c12374207
|
napalm_logs/transport/zeromq.py
|
napalm_logs/transport/zeromq.py
|
# -*- coding: utf-8 -*-
'''
ZeroMQ transport for napalm-logs.
'''
from __future__ import absolute_import
from __future__ import unicode_literals
# Import stdlib
import json
# Import third party libs
import zmq
# Import napalm-logs pkgs
from napalm_logs.transport.base import TransportBase
class ZMQTransport(TransportBase):
'''
ZMQ transport class.
'''
def __init__(self, addr, port):
self.addr = addr
self.port = port
def start(self):
self.context = zmq.Context()
self.socket = self.context.socket(zmq.PUB)
self.socket.bind('tcp://{addr}:{port}'.format(
addr=self.addr,
port=self.port)
)
def serialise(self, obj):
return json.dumps(obj)
def publish(self, obj):
self.socket.send(
self.serialise(obj)
)
def tear_down(self):
if hasattr(self, 'socket'):
self.socket.close()
if hasattr(self, 'context'):
self.context.term()
|
# -*- coding: utf-8 -*-
'''
ZeroMQ transport for napalm-logs.
'''
from __future__ import absolute_import
from __future__ import unicode_literals
# Import stdlib
import json
import logging
# Import third party libs
import zmq
# Import napalm-logs pkgs
from napalm_logs.exceptions import BindException
from napalm_logs.transport.base import TransportBase
log = logging.getLogger(__name__)
class ZMQTransport(TransportBase):
'''
ZMQ transport class.
'''
def __init__(self, addr, port):
self.addr = addr
self.port = port
def start(self):
self.context = zmq.Context()
self.socket = self.context.socket(zmq.PUB)
try:
self.socket.bind('tcp://{addr}:{port}'.format(
addr=self.addr,
port=self.port)
)
except zmq.error.ZMQError as err:
log.error(err, exc_info=True)
raise BindException(err)
def serialise(self, obj):
return json.dumps(obj)
def publish(self, obj):
self.socket.send(
self.serialise(obj)
)
def tear_down(self):
if hasattr(self, 'socket'):
self.socket.close()
if hasattr(self, 'context'):
self.context.term()
|
Raise bind exception and log
|
Raise bind exception and log
|
Python
|
apache-2.0
|
napalm-automation/napalm-logs,napalm-automation/napalm-logs
|
c8ce1315caf762f2c0073ab0ebed8ef627be5581
|
profile/files/openstack/horizon/overrides.py
|
profile/files/openstack/horizon/overrides.py
|
# Disable Floating IPs
from openstack_dashboard.dashboards.project.access_and_security import tabs
from openstack_dashboard.dashboards.project.instances import tables
import horizon
NO = lambda *x: False
tabs.FloatingIPsTab.allowed = NO
tabs.APIAccessTab.allowed = NO
tables.AssociateIP.allowed = NO
tables.SimpleAssociateIP.allowed = NO
tables.SimpleDisassociateIP.allowed = NO
tables.ResizeLink.allowed = NO
project_dashboard = horizon.get_dashboard("project")
# Completely remove panel Network->Routers
routers_panel = project_dashboard.get_panel("routers")
project_dashboard.unregister(routers_panel.__class__)
# Completely remove panel Network->Networks
networks_panel = project_dashboard.get_panel("networks")
project_dashboard.unregister(networks_panel.__class__) # Disable Floating IPs
|
# Disable Floating IPs
from openstack_dashboard.dashboards.project.access_and_security import tabs
from openstack_dashboard.dashboards.project.instances import tables
import horizon
NO = lambda *x: False
tabs.FloatingIPsTab.allowed = NO
tabs.APIAccessTab.allowed = NO
tables.AssociateIP.allowed = NO
tables.SimpleAssociateIP.allowed = NO
tables.SimpleDisassociateIP.allowed = NO
tables.ResizeLink.allowed = NO
project_dashboard = horizon.get_dashboard("project")
# Completely remove panel Network->Routers
routers_panel = project_dashboard.get_panel("routers")
project_dashboard.unregister(routers_panel.__class__)
# Completely remove panel Network->Networks
networks_panel = project_dashboard.get_panel("networks")
project_dashboard.unregister(networks_panel.__class__) # Disable Floating IPs
# Remove "Volume Consistency Groups" tab
from openstack_dashboard.dashboards.project.volumes import tabs
tabs.CGroupsTab.allowed = NO
|
Remove volume consistency group tab from horizon in mitaka
|
Remove volume consistency group tab from horizon in mitaka
|
Python
|
apache-2.0
|
raykrist/himlar,tanzr/himlar,mikaeld66/himlar,raykrist/himlar,norcams/himlar,eckhart/himlar,mikaeld66/himlar,norcams/himlar,mikaeld66/himlar,raykrist/himlar,tanzr/himlar,TorLdre/himlar,tanzr/himlar,TorLdre/himlar,mikaeld66/himlar,tanzr/himlar,eckhart/himlar,norcams/himlar,raykrist/himlar,eckhart/himlar,TorLdre/himlar,norcams/himlar,TorLdre/himlar,mikaeld66/himlar,tanzr/himlar,raykrist/himlar,norcams/himlar,eckhart/himlar,TorLdre/himlar
|
88dc672b8797834b03d67b962dda2de2d40ad4f1
|
demos/minimal.py
|
demos/minimal.py
|
#!/usr/bin/env python
from gi.repository import GtkClutter
GtkClutter.init([])
from gi.repository import GObject, Gtk, GtkChamplain
GObject.threads_init()
GtkClutter.init([])
window = Gtk.Window(type=Gtk.WindowType.TOPLEVEL)
window.connect("destroy", Gtk.main_quit)
widget = GtkChamplain.Embed()
widget.set_size_request(640, 480)
window.add(widget)
window.show_all()
Gtk.main()
|
#!/usr/bin/env python
# To run this example, you need to set the GI_TYPELIB_PATH environment
# variable to point to the gir directory:
#
# export GI_TYPELIB_PATH=$GI_TYPELIB_PATH:/usr/local/lib/girepository-1.0/
from gi.repository import GtkClutter
GtkClutter.init([])
from gi.repository import GObject, Gtk, GtkChamplain
GObject.threads_init()
GtkClutter.init([])
window = Gtk.Window(type=Gtk.WindowType.TOPLEVEL)
window.connect("destroy", Gtk.main_quit)
widget = GtkChamplain.Embed()
widget.set_size_request(640, 480)
window.add(widget)
window.show_all()
Gtk.main()
|
Add description how to run the python demo
|
Add description how to run the python demo
|
Python
|
lgpl-2.1
|
Distrotech/libchamplain,PabloCastellano/libchamplain,PabloCastellano/libchamplain,StanciuMarius/Libchamplain-map-wrapping,PabloCastellano/libchamplain,StanciuMarius/Libchamplain-map-wrapping,StanciuMarius/Libchamplain-map-wrapping,PabloCastellano/libchamplain,GNOME/libchamplain,StanciuMarius/Libchamplain-map-wrapping,Distrotech/libchamplain,StanciuMarius/Libchamplain-map-wrapping,Distrotech/libchamplain,Distrotech/libchamplain,Distrotech/libchamplain,GNOME/libchamplain,PabloCastellano/libchamplain
|
0cda8aae3c5a8ad4d110c41007279a3364c6f33a
|
manage.py
|
manage.py
|
__author__ = 'zifnab'
from flask_script import Manager, Server
from app import app
manager=Manager(app)
manager.add_command('runserver', Server(host=app.config.get('HOST', '0.0.0.0'), port=app.config.get('PORT', 5000)))
@manager.command
def print_routes():
for rule in app.url_map.iter_rules():
print rule
if __name__ == '__main__':
manager.run()
|
__author__ = 'zifnab'
from flask_script import Manager, Server
from app import app
from database import Paste
import arrow
manager=Manager(app)
manager.add_command('runserver', Server(host=app.config.get('HOST', '0.0.0.0'), port=app.config.get('PORT', 5000)))
@manager.command
def print_routes():
for rule in app.url_map.iter_rules():
print rule
@manager.command
def remove_expired():
for paste in Paste.objects(expire__lt=arrow.now().datetime, user=None):
print 'delete {0}'.format(paste.name)
paste.delete()
if __name__ == '__main__':
manager.run()
|
Add way to remove old pastes
|
Add way to remove old pastes
|
Python
|
mit
|
zifnab06/zifb.in,zifnab06/zifb.in
|
3260594268f19dcfe1ea5613f939c892d609b47e
|
skimage/filters/tests/test_filter_import.py
|
skimage/filters/tests/test_filter_import.py
|
from warnings import catch_warnings, simplefilter
def test_filter_import():
with catch_warnings():
simplefilter('ignore')
from skimage import filter as F
assert('sobel' in dir(F))
assert any(['has been renamed' in w
for (w, _, _) in F.__warningregistry__])
|
from warnings import catch_warnings, simplefilter
def test_filter_import():
with catch_warnings():
simplefilter('ignore')
from skimage import filter as F
assert('sobel' in dir(F))
assert any(['has been renamed' in w
for (w, _, _) in F.__warningregistry__]), F.__warningregistry__
|
Add debug print to failing assert
|
Add debug print to failing assert
|
Python
|
bsd-3-clause
|
Hiyorimi/scikit-image,juliusbierk/scikit-image,dpshelio/scikit-image,chriscrosscutler/scikit-image,Britefury/scikit-image,juliusbierk/scikit-image,vighneshbirodkar/scikit-image,jwiggins/scikit-image,WarrenWeckesser/scikits-image,rjeli/scikit-image,Britefury/scikit-image,youprofit/scikit-image,ajaybhat/scikit-image,bennlich/scikit-image,oew1v07/scikit-image,Midafi/scikit-image,Hiyorimi/scikit-image,keflavich/scikit-image,bennlich/scikit-image,michaelpacer/scikit-image,bsipocz/scikit-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,jwiggins/scikit-image,oew1v07/scikit-image,GaZ3ll3/scikit-image,michaelaye/scikit-image,chriscrosscutler/scikit-image,warmspringwinds/scikit-image,dpshelio/scikit-image,WarrenWeckesser/scikits-image,ajaybhat/scikit-image,blink1073/scikit-image,pratapvardhan/scikit-image,rjeli/scikit-image,Midafi/scikit-image,ofgulban/scikit-image,GaZ3ll3/scikit-image,robintw/scikit-image,emon10005/scikit-image,pratapvardhan/scikit-image,blink1073/scikit-image,ClinicalGraphics/scikit-image,keflavich/scikit-image,rjeli/scikit-image,youprofit/scikit-image,michaelaye/scikit-image,ClinicalGraphics/scikit-image,bsipocz/scikit-image,paalge/scikit-image,paalge/scikit-image,robintw/scikit-image,newville/scikit-image,warmspringwinds/scikit-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,newville/scikit-image,emon10005/scikit-image,paalge/scikit-image,michaelpacer/scikit-image
|
7756236c5e1fa70f1173dbd58b7e57f56214c19f
|
unitTestUtils/parseXML.py
|
unitTestUtils/parseXML.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from xml.etree.ElementTree import ParseError
import xml.etree.ElementTree as ET
import glob
import sys
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def parse():
for infile in glob.glob('*.xml'):
try:
tree = ET.parse(infile)
root = tree.getroot()
if root.findall('.//FatalError'):
eprint("Error detected")
print(infile)
sys.exit(1)
except ParseError:
eprint("The file xml isn't correct. There were some mistakes in the tests ")
sys.exit(1)
def main():
parse()
if __name__ == '__main__':
main()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from xml.etree.ElementTree import ParseError
import xml.etree.ElementTree as ET
import glob
import sys
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def parse():
for infile in glob.glob('*.xml'):
try:
tree = ET.parse(infile)
root = tree.getroot()
if root.findall('.//FatalError'):
element=root.findall('.//FatalError')[0]
eprint("Error detected")
print(infile)
print(element.text)
sys.exit(1)
except ParseError:
eprint("The file xml isn't correct. There were some mistakes in the tests ")
sys.exit(1)
def main():
parse()
if __name__ == '__main__':
main()
|
Add a verbose error reporting on Travis
|
Add a verbose error reporting on Travis
|
Python
|
apache-2.0
|
wkrzemien/j-pet-framework,wkrzemien/j-pet-framework,wkrzemien/j-pet-framework,wkrzemien/j-pet-framework,wkrzemien/j-pet-framework
|
0df35e81754f703d1a8164cf0ea5169a53355185
|
code/python/knub/thesis/word2vec_gaussian_lda_preprocessing.py
|
code/python/knub/thesis/word2vec_gaussian_lda_preprocessing.py
|
import argparse
import logging
import os
from gensim.models import Word2Vec
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
if __name__ == "__main__":
parser = argparse.ArgumentParser("Prepare model for Gaussian LDA")
parser.add_argument("--topic_model", type=str)
parser.add_argument("--embedding_model", type=str)
args = parser.parse_args()
word2vec = Word2Vec.load_word2vec_format(args.embedding_model, binary=True)
embedding_name = os.path.basename(args.embedding_model)
with open(args.topic_model + "." + embedding_name + ".gaussian-lda", "w") as output:
with open(args.topic_model + ".restricted.alphabet", "r") as f:
for line in f:
word = line.split("#")[0]
output.write(word + " ")
output.write(" ".join(word2vec[word]))
output.write("\n")
|
import argparse
from codecs import open
import logging
import os
from gensim.models import Word2Vec
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
if __name__ == "__main__":
parser = argparse.ArgumentParser("Prepare model for Gaussian LDA")
parser.add_argument("--topic-model", type=str)
parser.add_argument("--embedding-model", type=str)
args = parser.parse_args()
word2vec = Word2Vec.load_word2vec_format(args.embedding_model, binary=True)
embedding_name = os.path.basename(args.embedding_model)
with open(args.topic_model + "." + embedding_name + ".gaussian-lda", "w", encoding="utf-8") as output:
with open(args.topic_model + "." + embedding_name + ".restricted.alphabet", "r", encoding="utf-8") as f:
for line in f:
word = line.split("#")[0]
output.write(word + " ")
output.write(" ".join(map(str, word2vec[word])))
output.write("\n")
|
Fix parameter parsing in gaussian lda preprocessing.
|
Fix parameter parsing in gaussian lda preprocessing.
|
Python
|
apache-2.0
|
knub/master-thesis,knub/master-thesis,knub/master-thesis,knub/master-thesis
|
ce0be23f554eb9949a3769da1e4a3d3d51b546f1
|
src/server/datab.py
|
src/server/datab.py
|
'''
Database module.
Get the database, convert it to the built-in data structure and hold a link
to it. The module should be initialized before any other modules except mailer
and log.
Design: Heranort
'''
'''
Connect to the database.
'''
def connect_to_datab():
pass
'''
Get raw data of the database.
'''
def datab_get_raw_data():
pass
'''
Process the raw data.
'''
def datab_process_data():
pass
'''
Preserve the processed data into somewhere.
'''
def datab_preserve_data():
pass
'''
Check wether the history is modified. If so, emit warning.
'''
def check_health():
pass
|
'''
Database module.
Get the database, convert it to the built-in data structure and hold a link
to it. The module should be initialized before any other modules except mailer
and log.
Design: Heranort
'''
import sqlite3, os
'''
Connect to the database.
'''
def connect_to_datab():
path = os.getcwd()
pparent_path = os.path.dirname(os.path.dirname(path)) #get the root dir
# print(pparent_path)
sql = sqlite3.connect(pparent_path + '\data\data.db')
return sql
'''
Get raw data of the database.
'''
def datab_get_raw_data(sql):
cur = sql.cursor()
cur.execute('select * from flight') #fetch the raw data of flight
raw_data_flight = cur.fetchall()
cur.execute('select * from train') #fetch the raw data of train
raw_data_train = cur.fetchall()
cur.execute('select * from highway') #fetch the raw data of highway
raw_data_bus = cur.fetchall()
return (raw_data_flight, raw_data_train, raw_data_bus)
'''
Process the raw data.
'''
def datab_process_data(raw_data_flight, raw_data_train, raw_data_bus):
data_price = [[-1 for i in range(10)] for i in range(10)]
data_instance = [[-1 for i in range(10)] for i in range(10)]
data_time = [[-1 for i in range(10)] for i in range(10)]
for element in raw_data_bus:
pass
'''
Preserve the processed data into somewhere.
'''
def datab_preserve_data():
pass
'''
Check wether the history is modified. If so, emit warning.
'''
def check_health():
pass
if(__name__ == '__main__'):
sql = connect_to_datab()
(raw_data_flight, raw_data_train, raw_data_bus) = datab_get_raw_data(sql)
datab_process_data(raw_data_flight, raw_data_train, raw_data_bus)
|
Add function of data connection
|
Add function of data connection
|
Python
|
mit
|
niwtr/map-walker
|
5d3d47e0fae9ddb9f445972e5186429163aabf40
|
statirator/core/management/commands/init.py
|
statirator/core/management/commands/init.py
|
import os
from optparse import make_option
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Init the static site project"
args = '[directory]'
option_list = (
make_option(
'--title', '-t', dest='title', default='Default site',
help='Site title [Default: "%defaults"]'),
make_option(
'--domain', '-d', dest='domain', default='example.com',
help='Domain name [Default: "%default"]'),
make_option(
'--languages', '-l', dest='languages', default=['he', 'en'],
action='append', help='Supported languages. [Default: "%default"]')
) + BaseCommand.option_list
def handle(self, directory, **options):
from django.conf.global_settings import LANGUAGES
extra = {
'build': 'build',
'default_lang': options['languages'][0],
'languages': [l for l in LANGUAGES if l[0] in options["languages"]],
'extensions': ('py', ),
'files': (),
'template': os.path.abspath(
os.path.join(
os.path.dirname(__file__),
os.pardir, os.pardir, os.pardir, 'project_template')),
}
extra.update(options)
from django.core.management import call_command
call_command('startproject', 'conf', directory, **extra)
|
import os
import logging
from django.core.management.base import BaseCommand
from optparse import make_option
class Command(BaseCommand):
help = "Init the static site project"
args = '[directory]'
option_list = (
make_option(
'--title', '-t', dest='title', default='Default site',
help='Site title [Default: "%defaults"]'),
make_option(
'--domain', '-d', dest='domain', default='example.com',
help='Domain name [Default: "%default"]'),
make_option(
'--languages', '-l', dest='languages', default=['he', 'en'],
action='append', help='Supported languages. [Default: "%default"]')
) + BaseCommand.option_list
def handle(self, directory, **options):
logging.info("Initializing project structure in %s", directory)
os.makedirs(directory)
from django.conf.global_settings import LANGUAGES
extra = {
'build': 'build',
'default_lang': options['languages'][0],
'languages': [l for l in LANGUAGES if l[0] in options["languages"]],
'extensions': ('py', ),
'files': (),
'template': os.path.abspath(
os.path.join(
os.path.dirname(__file__),
os.pardir, os.pardir, os.pardir, 'project_template')),
}
extra.update(options)
from django.core.management import call_command
call_command('startproject', 'conf', directory, **extra)
|
Create the directory before calling the startprojcet command
|
Create the directory before calling the startprojcet command
|
Python
|
mit
|
MeirKriheli/statirator,MeirKriheli/statirator,MeirKriheli/statirator
|
2d584531d043804f3dcf3acf132cb60b463e4c1a
|
azdweb/markdown_serv.py
|
azdweb/markdown_serv.py
|
import os
from flask import request, render_template
from azdweb import app
from azdweb.util import gh_markdown
root_path = os.path.abspath("markdown")
# {filename: (mtime, contents)}
cache = {}
def load(filename):
with open(filename) as file:
return gh_markdown.markdown(file.read())
def load_cached(filename):
mtime = os.path.getmtime(filename)
if filename in cache:
old_mtime, contents = cache[filename]
if mtime != old_mtime:
contents = load(filename)
cache[filename] = (mtime, contents)
else:
contents = load(filename)
cache[filename] = (mtime, contents)
return contents
@app.route("/md/<page>")
def serve_markdown(page):
filename = os.path.join(root_path, "{}.md".format(page))
if not os.path.exists(filename):
return render_template("markdown-404.html", page=page)
return render_template("markdown.html", page=page, content=load_cached(filename))
|
import codecs
import os
from flask import render_template
from azdweb import app
from azdweb.util import gh_markdown
root_path = os.path.abspath("markdown")
# {filename: (mtime, contents)}
cache = {}
def load(filename):
with codecs.open(filename, encoding="utf-8") as file:
return gh_markdown.markdown(file.read())
def load_cached(filename):
mtime = os.path.getmtime(filename)
if filename in cache:
old_mtime, contents = cache[filename]
if mtime != old_mtime:
contents = load(filename)
cache[filename] = (mtime, contents)
else:
contents = load(filename)
cache[filename] = (mtime, contents)
return contents
@app.route("/md", defaults={"page": "index"})
@app.route("/md/<path:page>")
def serve_markdown(page):
if "." in page:
return render_template("markdown-404.html", page=page)
if page.endswith("/"):
page += "index"
filename = os.path.join(root_path, "{}.md".format(page))
if not os.path.exists(filename):
return render_template("markdown-404.html", page=page)
sidebar = os.path.join(os.path.dirname(filename), "sidebar.md")
if os.path.exists(sidebar):
sidebar_content = load_cached(sidebar)
else:
sidebar_content = ""
return render_template("markdown.html", title=page, content=load_cached(filename), sidebar=sidebar_content)
@app.route("/sw", defaults={"page": "index"})
@app.route("/sw/<path:page>")
def skywars_alias(page):
return serve_markdown("skywars/{}".format(page))
|
Add support for a sidebar, and also add a /sw/ alias for /md/skywars/
|
Add support for a sidebar, and also add a /sw/ alias for /md/skywars/
|
Python
|
apache-2.0
|
daboross/dabo.guru,daboross/dabo.guru,daboross/dabo.guru,daboross/dabo.guru
|
a4c24372ffcbac656a9879cc2fd705d67a875a3e
|
prime-factors/prime_factors.py
|
prime-factors/prime_factors.py
|
# File: prime_factors.py
# Purpose: Compute the prime factors of a given natural number.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 26 September 2016, 12:05 AM
|
# File: prime_factors.py
# Purpose: Compute the prime factors of a given natural number.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 26 September 2016, 12:05 AM
def prime(number):
if number <= 1:
return False
else:
if number % 1 == 0 and number % range(2, number)
|
Set condition [1 is not a prime]
|
Set condition [1 is not a prime]
|
Python
|
mit
|
amalshehu/exercism-python
|
14fb663019038b80d42f212e0ad8169cd0d37e84
|
neutron_lib/exceptions/address_group.py
|
neutron_lib/exceptions/address_group.py
|
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib._i18n import _
from neutron_lib import exceptions
class AddressGroupNotFound(exceptions.NotFound):
message = _("Address group %(address_group_id)s could not be found.")
class AddressesNotFound(exceptions.NotFound):
message = _("Addresses %(addresses)s not found in the address group "
"%(address_group_id)s.")
class AddressesAlreadyExist(exceptions.BadRequest):
message = _("Addresses %(addresses)s already exist in the "
"address group %(address_group_id)s.")
|
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib._i18n import _
from neutron_lib import exceptions
class AddressGroupNotFound(exceptions.NotFound):
message = _("Address group %(address_group_id)s could not be found.")
class AddressGroupInUse(exceptions.InUse):
message = _("Address group %(address_group_id)s is in use on one or more "
"security group rules.")
class AddressesNotFound(exceptions.NotFound):
message = _("Addresses %(addresses)s not found in the address group "
"%(address_group_id)s.")
class AddressesAlreadyExist(exceptions.BadRequest):
message = _("Addresses %(addresses)s already exist in the "
"address group %(address_group_id)s.")
|
Add address group in use exception
|
Add address group in use exception
Related change: https://review.opendev.org/#/c/751110/
Change-Id: I2a9872890ca4d5e59a9e266c1dcacd3488a3265c
|
Python
|
apache-2.0
|
openstack/neutron-lib,openstack/neutron-lib,openstack/neutron-lib,openstack/neutron-lib
|
14d51aa701dcc8d1d3f026af947c935abb0eabe3
|
examples/rune.py
|
examples/rune.py
|
import cassiopeia as cass
from cassiopeia.core import Summoner
def test_cass():
name = "Kalturi"
runes = cass.get_runes()
for rune in runes:
if rune.tier == 3:
print(rune.name)
if __name__ == "__main__":
test_cass()
|
import cassiopeia as cass
def print_t3_runes():
for rune in cass.get_runes():
if rune.tier == 3:
print(rune.name)
if __name__ == "__main__":
print_t3_runes()
|
Change function name, remove unneeded summoner name
|
Change function name, remove unneeded summoner name
|
Python
|
mit
|
robrua/cassiopeia,10se1ucgo/cassiopeia,meraki-analytics/cassiopeia
|
d72df78e0dea27ae93bde52e43cec360a963b32c
|
openprescribing/frontend/management/commands/delete_measure.py
|
openprescribing/frontend/management/commands/delete_measure.py
|
from django.conf import settings
from django.core.management import BaseCommand, CommandError
from frontend.models import Measure
class Command(BaseCommand):
def handle(self, measure_id, **options):
if not measure_id.startswith(settings.MEASURE_PREVIEW_PREFIX):
raise CommandError(
f"Not deleting '{measure_id}' because it doesn't look like a preview "
f"measure (it doesn't start with '{settings.MEASURE_PREVIEW_PREFIX}')"
)
try:
measure = Measure.objects.get(id=measure_id)
except Measure.DoesNotExist:
raise CommandError(f"No measure with ID '{measure_id}'")
# The ON DELETE CASCADE configuration ensures that all MeasureValues are deleted
# as well
measure.delete()
self.stdout.write(f"Deleted measure '{measure_id}'")
def add_arguments(self, parser):
parser.add_argument("measure_id")
|
from django.conf import settings
from django.core.management import BaseCommand, CommandError
from frontend.models import Measure
from gcutils.bigquery import Client
class Command(BaseCommand):
def handle(self, measure_id, **options):
if not measure_id.startswith(settings.MEASURE_PREVIEW_PREFIX):
raise CommandError(
f"Not deleting '{measure_id}' because it doesn't look like a preview "
f"measure (it doesn't start with '{settings.MEASURE_PREVIEW_PREFIX}')"
)
try:
measure = Measure.objects.get(id=measure_id)
except Measure.DoesNotExist:
raise CommandError(f"No measure with ID '{measure_id}'")
delete_from_bigquery(measure_id)
# The ON DELETE CASCADE configuration ensures that all MeasureValues are deleted
# as well
measure.delete()
self.stdout.write(f"Deleted measure '{measure_id}'")
def add_arguments(self, parser):
parser.add_argument("measure_id")
def delete_from_bigquery(measure_id):
# Dataset name from `import_measures.MeasureCalculation.get_table()`
client = Client("measures")
# Table naming convention from `import_measures.MeasureCalculation.table_name()`
table_suffix = f"_data_{measure_id}"
tables_to_delete = [
table for table in client.list_tables() if table.table_id.endswith(table_suffix)
]
for table in tables_to_delete:
client.delete_table(table.table_id)
|
Delete measures from BigQuery as well
|
Delete measures from BigQuery as well
|
Python
|
mit
|
ebmdatalab/openprescribing,annapowellsmith/openpresc,ebmdatalab/openprescribing,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc
|
9b6ff8eb88084b69190fed24de92eca31f8509d5
|
palindrome-products/palindrome_products.py
|
palindrome-products/palindrome_products.py
|
def largest_palindrome():
pass
def smallest_palindrome():
pass
|
from collections import defaultdict
def largest_palindrome(max_factor, min_factor=0):
return _palindromes(max_factor, min_factor, max)
def smallest_palindrome(max_factor, min_factor=0):
return _palindromes(max_factor, min_factor, min)
def _palindromes(max_factor, min_factor, minmax):
pals = defaultdict(set)
for i in range(min_factor, max_factor+1):
for j in range(min_factor, max_factor+1):
p = i * j
if is_palindrome(p):
pals[p].add(tuple(sorted([i,j])))
value = minmax(pals)
factors = pals[value]
return (value, factors)
def is_palindrome(n):
return str(n) == str(n)[::-1]
|
Add an initial solution that works, but with the wrong output format
|
Add an initial solution that works, but with the wrong output format
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
d0df78e9f660b138b5f79d6714312740ebcf1648
|
fparser/setup.py
|
fparser/setup.py
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('fparser',parent_package,top_path)
return config
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('fparser',parent_package,top_path)
config.add_data_files('log.config')
return config
|
Add log.config to data files to fix installed fparser.
|
Add log.config to data files to fix installed fparser.
|
Python
|
bsd-3-clause
|
pemryan/f2py,travellhyne/f2py,pearu/f2py,pemryan/f2py
|
fa191537e15dd0729deb94aaa91dbb7fa9295e04
|
mathdeck/loadproblem.py
|
mathdeck/loadproblem.py
|
# -*- coding: utf-8 -*-
"""
mathdeck.loadproblem
~~~~~~~~~~~~
This module loads a problem file as a module.
:copyright: (c) 2015 by Patrick Spencer.
:license: Apache 2.0, see ../LICENSE for more details.
"""
import os
import sys
# Load problem file as
def load_file_as_module(file):
"""
Load problem file as a module.
:param file: The full path to the problem file
returns a module represented by the problem file
"""
# Create a new module to hold the seed variable so
# the loaded module can reference the seed variable
if sys.version_info[0] == 2:
import imp
problem_module = imp.load_source('prob_module',file)
if sys.version_info[0] == 3:
import importlib.machinery
problem_module = importlib.machinery \
.SourceFileLoader("prob_module",file) \
.load_module()
try:
problem_module.answers
except AttributeError:
raise AttributeError('Problem file has no \'answers\' attribute')
return problem_module
|
# -*- coding: utf-8 -*-
"""
mathdeck.loadproblem
~~~~~~~~~~~~~~~~~~~~
This module loads a problem file as a module.
:copyright: (c) 2015 by Patrick Spencer.
:license: Apache 2.0, see ../LICENSE for more details.
"""
import os
import sys
# Load problem file as
def load_file_as_module(file_path):
"""
Load problem file as a module.
:param file: The full path to the problem file
returns a module represented by the problem file
"""
# Create a new module to hold the seed variable so
# the loaded module can reference the seed variable
if sys.version_info[0] == 2:
import imp
problem_module = imp.load_source('prob_mod_pkg',file_path)
if sys.version_info[0] == 3:
import importlib.machinery
problem_module = importlib.machinery \
.SourceFileLoader('prob_mod_pkg',file_path) \
.load_module()
try:
problem_module.answers
except AttributeError:
raise AttributeError('Problem file has no \'answers\' attribute')
return problem_module
|
Change package name in loadmodule call
|
Change package name in loadmodule call
Not much reason to do this. It just happened.
|
Python
|
apache-2.0
|
patrickspencer/mathdeck,patrickspencer/mathdeck
|
6c39f3504dad1cf918189fd46d9e8529a2fc9586
|
Functions/template-python/lambda_function.py
|
Functions/template-python/lambda_function.py
|
"""Created By: Andrew Ryan DeFilippis"""
print('Lambda cold-start...')
from json import dumps, loads
# Disable 'testing_locally' when deploying to AWS Lambda
testing_locally = True
verbose = True
class CWLogs(object):
def __init__(self, context):
self.context = context
def event(self, message, event_prefix='LOG'):
print('{} RequestId: {}\t{}'.format(
event_prefix,
self.context.aws_request_id,
message
))
def lambda_handler(event, context):
log = CWLogs(context)
if verbose is True:
log.event('Event: {}'.format(dumps(event)))
log.event('Hello World!')
return None
def local_test():
import context
with open('event.json', 'r') as f:
event = loads(f.read())
print('\nFunction Log:\n')
lambda_handler(event, context)
if testing_locally is True:
local_test()
|
"""Created By: Andrew Ryan DeFilippis"""
print('Lambda cold-start...')
from json import dumps, loads
# Disable 'testing_locally' when deploying to AWS Lambda
testing_locally = True
verbose = True
class CWLogs(object):
"""Define the structure of log events to match all other CloudWatch Log Events logged by AWS Lambda.
"""
def __init__(self, context):
"""Define the instance of the context object.
:param context: Lambda context object
"""
self.context = context
def event(self, message, event_prefix='LOG'):
# type: (any, str) -> None
"""Print an event into the CloudWatch Logs stream for the Function's invocation.
:param message: The information to be logged (required)
:param event_prefix: The prefix that appears before the 'RequestId' (default 'LOG')
:return:
"""
print('{} RequestId: {}\t{}'.format(
event_prefix,
self.context.aws_request_id,
message
))
return None
def lambda_handler(event, context):
"""AWS Lambda executes the 'lambda_handler' function on invocation.
:param event: Ingested JSON event object provided at invocation
:param context: Lambda context object, containing information specific to the invocation and Function
:return: Final response to AWS Lambda, and passed to the invoker if the invocation type is RequestResponse
"""
# Instantiate our CloudWatch logging class
log = CWLogs(context)
if verbose is True:
log.event('Event: {}'.format(dumps(event)))
log.event('Hello World!')
return None
def local_test():
"""Testing on a local development machine (outside of AWS Lambda) is made possible by...
"""
import context
with open('event.json', 'r') as f:
event = loads(f.read())
print('\nFunction Log:\n')
lambda_handler(event, context)
if testing_locally is True:
local_test()
|
Add documentation, and modify default return value
|
Add documentation, and modify default return value
|
Python
|
apache-2.0
|
andrewdefilippis/aws-lambda
|
e6046cf33af1a4b1f16424740b5093ebd423842e
|
scipy/spatial/transform/__init__.py
|
scipy/spatial/transform/__init__.py
|
"""
========================================================
Spatial Transformations (:mod:`scipy.spatial.transform`)
========================================================
.. currentmodule:: scipy.spatial.transform
This package implements various spatial transformations. For now,
only rotations are supported.
Rotations in 3 dimensions
-------------------------
.. autosummary::
:toctree: generated/
Rotation
"""
from __future__ import division, print_function, absolute_import
from .rotation import Rotation
__all__ = ['Rotation']
from scipy._lib._testutils import PytestTester
test = PytestTester(__name__)
del PytestTester
|
"""
========================================================
Spatial Transformations (:mod:`scipy.spatial.transform`)
========================================================
.. currentmodule:: scipy.spatial.transform
This package implements various spatial transformations. For now,
only rotations are supported.
Rotations in 3 dimensions
-------------------------
.. autosummary::
:toctree: generated/
Rotation
Slerp
"""
from __future__ import division, print_function, absolute_import
from .rotation import Rotation, Slerp
__all__ = ['Rotation', 'Slerp']
from scipy._lib._testutils import PytestTester
test = PytestTester(__name__)
del PytestTester
|
Add Slerp class to transform package
|
BLD: Add Slerp class to transform package
|
Python
|
bsd-3-clause
|
aarchiba/scipy,rgommers/scipy,mdhaber/scipy,grlee77/scipy,vigna/scipy,perimosocordiae/scipy,gertingold/scipy,anntzer/scipy,ilayn/scipy,anntzer/scipy,jamestwebber/scipy,person142/scipy,grlee77/scipy,gfyoung/scipy,e-q/scipy,perimosocordiae/scipy,matthew-brett/scipy,endolith/scipy,person142/scipy,scipy/scipy,jor-/scipy,ilayn/scipy,matthew-brett/scipy,arokem/scipy,jamestwebber/scipy,endolith/scipy,arokem/scipy,tylerjereddy/scipy,pizzathief/scipy,jor-/scipy,andyfaff/scipy,gfyoung/scipy,perimosocordiae/scipy,grlee77/scipy,gertingold/scipy,aeklant/scipy,person142/scipy,aarchiba/scipy,andyfaff/scipy,vigna/scipy,matthew-brett/scipy,person142/scipy,e-q/scipy,Stefan-Endres/scipy,gfyoung/scipy,zerothi/scipy,endolith/scipy,endolith/scipy,Stefan-Endres/scipy,andyfaff/scipy,matthew-brett/scipy,lhilt/scipy,mdhaber/scipy,arokem/scipy,lhilt/scipy,WarrenWeckesser/scipy,gertingold/scipy,scipy/scipy,anntzer/scipy,perimosocordiae/scipy,Stefan-Endres/scipy,tylerjereddy/scipy,e-q/scipy,jor-/scipy,mdhaber/scipy,gertingold/scipy,Eric89GXL/scipy,lhilt/scipy,jamestwebber/scipy,vigna/scipy,WarrenWeckesser/scipy,lhilt/scipy,rgommers/scipy,pizzathief/scipy,person142/scipy,jor-/scipy,scipy/scipy,ilayn/scipy,WarrenWeckesser/scipy,mdhaber/scipy,mdhaber/scipy,aeklant/scipy,Stefan-Endres/scipy,matthew-brett/scipy,Eric89GXL/scipy,WarrenWeckesser/scipy,Eric89GXL/scipy,perimosocordiae/scipy,pizzathief/scipy,pizzathief/scipy,tylerjereddy/scipy,nmayorov/scipy,Stefan-Endres/scipy,endolith/scipy,zerothi/scipy,andyfaff/scipy,perimosocordiae/scipy,WarrenWeckesser/scipy,Stefan-Endres/scipy,pizzathief/scipy,e-q/scipy,mdhaber/scipy,andyfaff/scipy,aeklant/scipy,jor-/scipy,lhilt/scipy,vigna/scipy,nmayorov/scipy,scipy/scipy,nmayorov/scipy,rgommers/scipy,Eric89GXL/scipy,arokem/scipy,e-q/scipy,grlee77/scipy,Eric89GXL/scipy,zerothi/scipy,aarchiba/scipy,gfyoung/scipy,aarchiba/scipy,anntzer/scipy,tylerjereddy/scipy,scipy/scipy,anntzer/scipy,vigna/scipy,nmayorov/scipy,tylerjereddy/scipy,grlee77/scipy,rgommers/scipy,andyfaff/scipy,zerothi/scipy,WarrenWeckesser/scipy,ilayn/scipy,Eric89GXL/scipy,ilayn/scipy,jamestwebber/scipy,aarchiba/scipy,arokem/scipy,aeklant/scipy,endolith/scipy,zerothi/scipy,aeklant/scipy,zerothi/scipy,anntzer/scipy,scipy/scipy,rgommers/scipy,ilayn/scipy,jamestwebber/scipy,gertingold/scipy,nmayorov/scipy,gfyoung/scipy
|
6db8a9e779031ae97977a49e4edd11d42fb6389d
|
samples/04_markdown_parse/epub2markdown.py
|
samples/04_markdown_parse/epub2markdown.py
|
#!/usr/bin/env python
import sys
import subprocess
import os
import os.path
from ebooklib import epub
# This is just a basic example which can easily break in real world.
if __name__ == '__main__':
# read epub
book = epub.read_epub(sys.argv[1])
# get base filename from the epub
base_name = os.path.basename(os.path.splitext(sys.argv[1])[0])
for item in book.items:
# convert into markdown if this is html
if isinstance(item, epub.EpubHtml):
proc = subprocess.Popen(['pandoc', '-f', 'html', '-t', 'markdown', '-'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE
)
content, error = proc.communicate(item.content)
file_name = os.path.splitext(item.file_name)[0]+'.md'
else:
file_name = item.file_name
content = item.content
# create needed directories
dir_name = '%s/%s' % (base_name, os.path.dirname(file_name))
if not os.path.exists(dir_name):
os.makedirs(dir_name)
print '>> ', file_name
# write content to file
f = open('%s/%s' % (base_name, file_name), 'w')
f.write(content)
f.close()
|
#!/usr/bin/env python
import os.path
import subprocess
import sys
from ebooklib import epub
# This is just a basic example which can easily break in real world.
if __name__ == '__main__':
# read epub
book = epub.read_epub(sys.argv[1])
# get base filename from the epub
base_name = os.path.basename(os.path.splitext(sys.argv[1])[0])
for item in book.items:
# convert into markdown if this is html
if isinstance(item, epub.EpubHtml):
proc = subprocess.Popen(['pandoc', '-f', 'html', '-t', 'markdown', '-'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
content, error = proc.communicate(item.content)
file_name = os.path.splitext(item.file_name)[0] + '.md'
else:
file_name = item.file_name
content = item.content
# create needed directories
dir_name = '{0}/{1}'.format(base_name, os.path.dirname(file_name))
if not os.path.exists(dir_name):
os.makedirs(dir_name)
print('>> {0}'.format(file_name))
# write content to file
with open('{0}/{1}'.format(base_name, file_name), 'w') as f:
f.write(content)
|
Make `samples/04_markdown_parse` Python 2+3 compatible
|
Make `samples/04_markdown_parse` Python 2+3 compatible
|
Python
|
agpl-3.0
|
booktype/ebooklib,aerkalov/ebooklib
|
82696dd76351f8d0bb4fcfe9f173ada652947acc
|
whacked4/whacked4/ui/dialogs/errordialog.py
|
whacked4/whacked4/ui/dialogs/errordialog.py
|
#!/usr/bin/env python
#coding=utf8
"""
Error dialog interface.
"""
from whacked4.ui import windows
import wx
class ErrorDialog(windows.ErrorDialogBase):
def __init__(self, parent):
windows.ErrorDialogBase.__init__(self, parent)
wx.EndBusyCursor()
def set_log(self, log_file):
"""
Shows the log file's contents in the report field.
"""
log_file.flush()
log_file.seek(0)
self.Report.ChangeValue(log_file.read())
def copy(self, event):
self.Report.SelectAll()
self.Report.Copy()
self.Report.SetSelection(-1, -1)
def close(self, event):
self.Hide()
|
#!/usr/bin/env python
#coding=utf8
"""
Error dialog interface.
"""
from whacked4.ui import windows
import wx
class ErrorDialog(windows.ErrorDialogBase):
def __init__(self, parent):
windows.ErrorDialogBase.__init__(self, parent)
if wx.IsBusy() == True:
wx.EndBusyCursor()
def set_log(self, log_file):
"""
Shows the log file's contents in the report field.
"""
log_file.flush()
log_file.seek(0)
self.Report.ChangeValue(log_file.read())
def copy(self, event):
self.Report.SelectAll()
self.Report.Copy()
self.Report.SetSelection(-1, -1)
def close(self, event):
self.Hide()
|
Fix exceptions not displaying if a busy cursor was set.
|
Fix exceptions not displaying if a busy cursor was set.
|
Python
|
bsd-2-clause
|
GitExl/WhackEd4,GitExl/WhackEd4
|
2575946f1b05ac1601a00f8936ab3a701b05bc7e
|
tests/test_utils.py
|
tests/test_utils.py
|
import unittest
from utils import TextLoader
import numpy as np
class TestUtilsMethods(unittest.TestCase):
def setUp(self):
self.data_loader = TextLoader("tests/test_data", batch_size=2, seq_length=5)
def test_init(self):
print (self.data_loader.vocab)
print (self.data_loader.tensor)
print (self.data_loader.vocab_size)
def test_build_vocab(self):
sentences = ["I", "love", "cat", "cat"]
vocab, vocab_inv = self.data_loader.build_vocab(sentences)
print (vocab, vocab_inv)
# Must include I, love, and cat
self.assertItemsEqual(vocab, ["I", "love", "cat"])
self.assertDictEqual(vocab, {'I': 0, 'love': 2, 'cat': 1})
self.assertItemsEqual(vocab_inv, ["I", "love", "cat"])
def test_batch_vocab(self):
print (np.array(self.data_loader.x_batches).shape)
self.assertItemsEqual(self.data_loader.x_batches[0][0][1:],
self.data_loader.y_batches[0][0][:-1])
self.assertItemsEqual(self.data_loader.x_batches[0][1][1:],
self.data_loader.y_batches[0][1][:-1])
if __name__ == '__main__':
unittest.main()
|
import unittest
from utils import TextLoader
import numpy as np
from collections import Counter
class TestUtilsMethods(unittest.TestCase):
def setUp(self):
self.data_loader = TextLoader("tests/test_data", batch_size=2, seq_length=5)
def test_init(self):
print (self.data_loader.vocab)
print (self.data_loader.tensor)
print (self.data_loader.vocab_size)
def test_build_vocab(self):
sentences = ["I", "love", "cat", "cat"]
vocab, vocab_inv = self.data_loader.build_vocab(sentences)
print (vocab, vocab_inv)
# Must include I, love, and cat
self.assertEqual(Counter(list(vocab)), Counter(list(["I", "love", "cat"])))
self.assertDictEqual(vocab, {'I': 0, 'love': 2, 'cat': 1})
self.assertEqual(Counter(list(vocab_inv)), Counter(list(["I", "love", "cat"])))
def test_batch_vocab(self):
print (np.array(self.data_loader.x_batches).shape)
self.assertEqual(Counter(list(self.data_loader.x_batches[0][0][1:])),
Counter(list(self.data_loader.y_batches[0][0][:-1])))
self.assertEqual(Counter(list(self.data_loader.x_batches[0][1][1:])),
Counter(list(self.data_loader.y_batches[0][1][:-1])))
if __name__ == '__main__':
unittest.main()
|
Generalize method names to be compatible with Python 2.7 and 3.4
|
Generalize method names to be compatible with Python 2.7 and 3.4
|
Python
|
mit
|
hunkim/word-rnn-tensorflow,bahmanh/word-rnn-tensorflow
|
52fc9bc79343632a034d2dc51645306f4b58210c
|
tests/services/conftest.py
|
tests/services/conftest.py
|
import pytest
from responses import RequestsMock
from netvisor import Netvisor
@pytest.fixture
def netvisor():
kwargs = dict(
sender='Test client',
partner_id='xxx_yyy',
partner_key='E2CEBB1966C7016730C70CA92CBB93DD',
customer_id='xx_yyyy_zz',
customer_key='7767899D6F5FB333784A2520771E5871',
organization_id='1967543-8',
language='EN'
)
return Netvisor(host='http://koulutus.netvisor.fi', **kwargs)
@pytest.yield_fixture(autouse=True)
def responses():
requests_mock = RequestsMock()
requests_mock._start()
yield requests_mock
requests_mock._stop()
requests_mock.reset()
|
import pytest
from responses import RequestsMock
from netvisor import Netvisor
@pytest.fixture
def netvisor():
kwargs = dict(
sender='Test client',
partner_id='xxx_yyy',
partner_key='E2CEBB1966C7016730C70CA92CBB93DD',
customer_id='xx_yyyy_zz',
customer_key='7767899D6F5FB333784A2520771E5871',
organization_id='1967543-8',
language='EN'
)
return Netvisor(host='http://koulutus.netvisor.fi', **kwargs)
@pytest.yield_fixture(autouse=True)
def responses():
r = RequestsMock()
with r:
yield r
|
Fix tests to work with responses 0.3.0
|
Fix tests to work with responses 0.3.0
|
Python
|
mit
|
fastmonkeys/netvisor.py
|
a92118d7ee6acde57ab9853186c43a5c6748e8a6
|
tracpro/__init__.py
|
tracpro/__init__.py
|
from __future__ import absolute_import
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa
__version__ = "1.0.0"
|
from __future__ import absolute_import
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa
VERSION = (1, 0, 0, "dev")
def get_version(version):
assert len(version) == 4, "Version must be formatted as (major, minor, micro, state)"
major, minor, micro, state = version
assert isinstance(major, int), "Major version must be an integer."
assert isinstance(minor, int), "Minor version must be an integer."
assert isinstance(micro, int), "Micro version must be an integer."
assert state in ('final', 'dev'), "State must be either final or dev."
if state == 'final':
return "{}.{}.{}".format(major, minor, micro)
else:
return "{}.{}.{}.{}".format(major, minor, micro, state)
__version__ = get_version(VERSION)
|
Use tuple to represent version
|
Use tuple to represent version
|
Python
|
bsd-3-clause
|
rapidpro/tracpro,xkmato/tracpro,xkmato/tracpro,xkmato/tracpro,xkmato/tracpro,rapidpro/tracpro,rapidpro/tracpro
|
2b8674528972655937eff797e61fa6819bfc3ba8
|
apps/feeds/signals.py
|
apps/feeds/signals.py
|
from libs.djpubsubhubbub.signals import updated
def update_handler(sender, update, **kwargs):
"""
Process new content being provided from SuperFeedr
"""
print sender
for entry in update.entries:
print entry
updated.connect(update_handler, dispatch_uid='superfeedr')
|
from libs.djpubsubhubbub.signals import updated
from .models import Feed
def update_handler(sender, update, **kwargs):
"""
Process new content being provided from SuperFeedr
"""
print sender.topic
users = []
feeds = Feed.objects.filter(feed_url=sender.topic)
for feed in feeds:
if feed.created_by not in users:
users.append(feed.created_by)
for user in users:
kippt = user.kippt_client()
for entry in update.entries:
title = entry['title']
summary = entry['summary']
link = entry['link']
kippt.addClip(link, user.list_id, title=title, notes=summary)
updated.connect(update_handler, dispatch_uid='superfeedr')
|
Handle new content from SuperFeedr to Kippt
|
Handle new content from SuperFeedr to Kippt
|
Python
|
mit
|
jpadilla/feedleap,jpadilla/feedleap
|
532b0809b040318abbb8e62848f18ad0cdf72547
|
src/workspace/workspace_managers.py
|
src/workspace/workspace_managers.py
|
from workspace.models import GroupPublishedWorkspace, PublishedWorkSpace, WorkSpace
def ref_from_workspace(workspace):
if isinstance(workspace, WorkSpace):
return 'group/' + str(workspace.id)
elif isinstance(workspace, PublishedWorkSpace):
return 'group_published/' + str(workspace.id)
class OrganizationWorkspaceManager:
def get_id(self):
return 'ezweb_organizations'
def update_base_workspaces(self, user, current_workspace_refs):
workspaces_to_remove = current_workspace_refs[:]
workspaces_to_add = []
user_groups = user.groups.all()
# workspaces assigned to the user's groups
# the compression list outside the inside compression list is for flattening
# the inside list
workspaces = [workspace for sublist in
[WorkSpace.objects.filter(targetOrganizations=org)
for org in user_groups]
for workspace in sublist]
# published workspaces assigned to the user's groups
# the compression list outside the inside compression list is for flattening
# the inside list
workspaces += [relation.workspace for sublist in
[GroupPublishedWorkspace.objects.filter(group=group)
for group in user_groups]
for relation in sublist]
workspaces = set(workspaces)
for workspace in workspaces:
ref = ref_from_workspace(workspace)
if ref not in current_workspace_refs:
workspaces_to_add.append((ref, workspace))
else:
workspaces_to_remove.remove(ref)
return (workspaces_to_remove, workspaces_to_add)
|
from workspace.models import GroupPublishedWorkspace, PublishedWorkSpace, WorkSpace
def ref_from_workspace(workspace):
if isinstance(workspace, WorkSpace):
return 'group/' + str(workspace.id)
elif isinstance(workspace, PublishedWorkSpace):
return 'group_published/' + str(workspace.id)
class OrganizationWorkspaceManager:
def get_id(self):
return 'ezweb_organizations'
def update_base_workspaces(self, user, current_workspace_refs):
workspaces_to_remove = current_workspace_refs[:]
workspaces_to_add = []
user_groups = user.groups.all()
# workspaces assigned to the user's groups
# the compression list outside the inside compression list is for flattening
# the inside list
workspaces = [workspace for sublist in
[WorkSpace.objects.filter(targetOrganizations=org)
for org in user_groups]
for workspace in sublist]
# published workspaces assigned to the user's groups
# the compression list outside the inside compression list is for flattening
# the inside list
workspaces += [relation.workspace for sublist in
[GroupPublishedWorkspace.objects.filter(group=group)
for group in user_groups]
for relation in sublist]
workspaces = set(workspaces)
for workspace in workspaces:
if workspace.creator == user:
# Ignore workspaces created by the user
continue
ref = ref_from_workspace(workspace)
if ref not in current_workspace_refs:
workspaces_to_add.append((ref, workspace))
else:
workspaces_to_remove.remove(ref)
return (workspaces_to_remove, workspaces_to_add)
|
Make OrganizationWorkspaceManager ignore the original workspace when sharing workspaces with groups
|
Make OrganizationWorkspaceManager ignore the original workspace when sharing workspaces with groups
|
Python
|
agpl-3.0
|
rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud
|
06e7cf66d37a34a33349e47c374e733b1f3006be
|
test/functional/feature_shutdown.py
|
test/functional/feature_shutdown.py
|
#!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test bitcoind shutdown."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, get_rpc_proxy
from threading import Thread
def test_long_call(node):
block = node.waitfornewblock()
assert_equal(block['height'], 0)
class ShutdownTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
node = get_rpc_proxy(self.nodes[0].url, 1, timeout=600, coveragedir=self.nodes[0].coverage_dir)
Thread(target=test_long_call, args=(node,)).start()
# wait 1 second to ensure event loop waits for current connections to close
self.stop_node(0, wait=1000)
if __name__ == '__main__':
ShutdownTest().main()
|
#!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test bitcoind shutdown."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, get_rpc_proxy, wait_until
from threading import Thread
def test_long_call(node):
block = node.waitfornewblock()
assert_equal(block['height'], 0)
class ShutdownTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
node = get_rpc_proxy(self.nodes[0].url, 1, timeout=600, coveragedir=self.nodes[0].coverage_dir)
# Force connection establishment by executing a dummy command.
node.getblockcount()
Thread(target=test_long_call, args=(node,)).start()
# Wait until the server is executing the above `waitfornewblock`.
wait_until(lambda: len(self.nodes[0].getrpcinfo()['active_commands']) == 2)
# Wait 1 second after requesting shutdown but not before the `stop` call
# finishes. This is to ensure event loop waits for current connections
# to close.
self.stop_node(0, wait=1000)
if __name__ == '__main__':
ShutdownTest().main()
|
Remove race between connecting and shutdown on separate connections
|
qa: Remove race between connecting and shutdown on separate connections
|
Python
|
mit
|
chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin
|
d73872b8bcc6c7c32fa10d4a8ffdd77fe568a954
|
pyautotest/cli.py
|
pyautotest/cli.py
|
# -*- coding: utf-8 -*-
import logging
import os
import signal
import time
from optparse import OptionParser
from watchdog.observers import Observer
from pyautotest.observers import Notifier, ChangeHandler
# Configure logging
logging.basicConfig(format='%(asctime)s (%(name)s) [%(levelname)s]: %(message)s',
datefmt='%m-%d-%Y %H:%M:%S',
level=logging.INFO)
logger = logging.getLogger('pyautotest')
def main():
parser = OptionParser("usage: %prog [options]")
parser.set_defaults(loglevel="INFO")
parser.add_option("-l", "--log-level", action="store", dest="loglevel")
(options, args) = parser.parse_args()
# Handle options
logger.setLevel(getattr(logging, options.loglevel.upper(), None))
while True:
event_handler = ChangeHandler()
event_handler.run_tests()
observer = Observer()
observer.schedule(event_handler, os.getcwd(), recursive=True)
# Avoid child zombie processes
signal.signal(signal.SIGCHLD, signal.SIG_IGN)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
import argparse
import logging
import os
import signal
import time
from watchdog.observers import Observer
from pyautotest.observers import Notifier, ChangeHandler
# Configure logging
logging.basicConfig(format='%(asctime)s (%(name)s) [%(levelname)s]: %(message)s',
datefmt='%m-%d-%Y %H:%M:%S',
level=logging.INFO)
logger = logging.getLogger('pyautotest')
def main():
parser = argparse.ArgumentParser(description="Continuously run unit tests when changes detected")
parser.add_argument('-l', '--log-level',
metavar='L',
default='INFO',
dest='loglevel',
action='store',
help='set logger level')
args = parser.parse_args()
# Handle options
logger.setLevel(getattr(logging, args.loglevel.upper(), None))
while True:
event_handler = ChangeHandler()
event_handler.run_tests()
observer = Observer()
observer.schedule(event_handler, os.getcwd(), recursive=True)
# Avoid child zombie processes
signal.signal(signal.SIGCHLD, signal.SIG_IGN)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
if __name__ == "__main__":
main()
|
Switch from optparase to argparse
|
Switch from optparase to argparse
|
Python
|
mit
|
ascarter/pyautotest
|
5e30bd1ae8218a6ad5a2582c15aed99258994d83
|
tests/tests/test_swappable_model.py
|
tests/tests/test_swappable_model.py
|
from django.test import TestCase
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, modify_settings
from boardinghouse.schema import get_schema_model
class TestSwappableModel(TestCase):
@modify_settings()
def test_schema_model_app_not_found(self):
settings.BOARDINGHOUSE_SCHEMA_MODEL = 'foo.bar'
with self.assertRaises(ImproperlyConfigured):
get_schema_model()
@modify_settings()
def test_schema_model_model_not_found(self):
settings.BOARDINGHOUSE_SCHEMA_MODEL = 'boardinghouse.NotSchemaModel'
with self.assertRaises(ImproperlyConfigured):
get_schema_model()
@modify_settings()
def test_invalid_schema_model_string(self):
settings.BOARDINGHOUSE_SCHEMA_MODEL = 'foo__bar'
with self.assertRaises(ImproperlyConfigured):
get_schema_model()
|
Write tests for swappable model.
|
Write tests for swappable model.
Resolves #28, #36.
--HG--
branch : fix-swappable-model
|
Python
|
bsd-3-clause
|
schinckel/django-boardinghouse,schinckel/django-boardinghouse,schinckel/django-boardinghouse
|
1b1652053213f3939b50b2ac66a775cd5d4beed9
|
openpnm/__init__.py
|
openpnm/__init__.py
|
r"""
=======
OpenPNM
=======
OpenPNM is a package for performing pore network simulations of transport in
porous materials.
OpenPNM consists of several key modules. Each module is consisted of
several classes and each class is consisted of a few methods. Here, you'll
find a comprehensive documentation of the modules, classes, and finally the
methods, occasionally with basic embedded examples on how to use them.
"""
from .__version__ import __version__
from . import utils
from . import core
from . import network
from . import geometry
from . import phases
from . import physics
from . import models
from . import solvers
from . import integrators
from . import algorithms
from . import materials
from . import topotools
from . import io
from . import metrics
from .utils import Workspace, Project
import numpy as _np
_np.seterr(divide='ignore', invalid='ignore')
|
r"""
=======
OpenPNM
=======
OpenPNM is a package for performing pore network simulations of transport in
porous materials.
OpenPNM consists of several key modules. Each module is consisted of
several classes and each class is consisted of a few methods. Here, you'll
find a comprehensive documentation of the modules, classes, and finally the
methods, occasionally with basic embedded examples on how to use them.
"""
from .__version__ import __version__
from . import utils
from . import core
from . import network
from . import geometry
from . import phases
from . import physics
from . import models
from . import algorithms
from . import solvers
from . import integrators
from . import materials
from . import topotools
from . import io
from . import metrics
from .utils import Workspace, Project
import numpy as _np
_np.seterr(divide='ignore', invalid='ignore')
|
Fix import order to avoid circular import
|
Fix import order to avoid circular import
|
Python
|
mit
|
PMEAL/OpenPNM
|
32bf828445ed897609b908dff435191287f922f4
|
bookie/views/stats.py
|
bookie/views/stats.py
|
"""Basic views with no home"""
import logging
from pyramid.view import view_config
from bookie.bcelery import tasks
from bookie.models import BmarkMgr
from bookie.models.auth import ActivationMgr
from bookie.models.auth import UserMgr
LOG = logging.getLogger(__name__)
@view_config(
route_name="dashboard",
renderer="/stats/dashboard.mako")
def dashboard(request):
"""A public dashboard of the system
"""
res = tasks.count_total.delay()
# Generate some user data and stats
user_count = UserMgr.count()
pending_activations = ActivationMgr.count()
# Generate some bookmark data.
bookmark_count = BmarkMgr.count()
unique_url_count = BmarkMgr.count(distinct=True)
users_with_bookmarks = BmarkMgr.count(distinct_users=True)
return {
'bookmark_data': {
'count': bookmark_count,
'unique_count': unique_url_count,
},
'user_data': {
'count': user_count,
'activations': pending_activations,
'with_bookmarks': users_with_bookmarks,
}
}
|
"""Basic views with no home"""
import logging
from pyramid.view import view_config
from bookie.models import BmarkMgr
from bookie.models.auth import ActivationMgr
from bookie.models.auth import UserMgr
LOG = logging.getLogger(__name__)
@view_config(
route_name="dashboard",
renderer="/stats/dashboard.mako")
def dashboard(request):
"""A public dashboard of the system
"""
# Generate some user data and stats
user_count = UserMgr.count()
pending_activations = ActivationMgr.count()
# Generate some bookmark data.
bookmark_count = BmarkMgr.count()
unique_url_count = BmarkMgr.count(distinct=True)
users_with_bookmarks = BmarkMgr.count(distinct_users=True)
return {
'bookmark_data': {
'count': bookmark_count,
'unique_count': unique_url_count,
},
'user_data': {
'count': user_count,
'activations': pending_activations,
'with_bookmarks': users_with_bookmarks,
}
}
|
Clean up old code no longer used
|
Clean up old code no longer used
|
Python
|
agpl-3.0
|
adamlincoln/Bookie,charany1/Bookie,GreenLunar/Bookie,charany1/Bookie,skmezanul/Bookie,charany1/Bookie,adamlincoln/Bookie,GreenLunar/Bookie,pombredanne/Bookie,wangjun/Bookie,bookieio/Bookie,skmezanul/Bookie,pombredanne/Bookie,bookieio/Bookie,GreenLunar/Bookie,pombredanne/Bookie,teodesson/Bookie,skmezanul/Bookie,bookieio/Bookie,teodesson/Bookie,wangjun/Bookie,adamlincoln/Bookie,teodesson/Bookie,skmezanul/Bookie,wangjun/Bookie,GreenLunar/Bookie,teodesson/Bookie,adamlincoln/Bookie,bookieio/Bookie,wangjun/Bookie
|
c5a2c7e802d89ea17a7f0fd1a9194eaab8eaf61d
|
wcontrol/src/main.py
|
wcontrol/src/main.py
|
from flask import Flask
app = Flask(__name__)
app.config.from_object("config")
|
import os
from flask import Flask
app = Flask(__name__)
app.config.from_object(os.environ.get("WCONTROL_CONF"))
|
Use a env var to get config
|
Use a env var to get config
|
Python
|
mit
|
pahumadad/weight-control,pahumadad/weight-control,pahumadad/weight-control,pahumadad/weight-control
|
8d669dc8b09b8d7c8bc9b4c123e2bdd7c3521521
|
functionaltests/api/base.py
|
functionaltests/api/base.py
|
# -*- coding: utf-8 -*-
#
# Copyright 2013 - Noorul Islam K M
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.common import rest_client
from tempest import config
import testtools
CONF = config.CONF
class SolumClient(rest_client.RestClient):
def __init__(self, username, password, auth_url, tenant_name=None):
super(SolumClient, self).__init__(username, password, auth_url,
tenant_name)
self.service = 'application_deployment'
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
username = CONF.identity.username
password = CONF.identity.password
tenant_name = CONF.identity.tenant_name
auth_url = CONF.identity.uri
client_args = (username, password, auth_url, tenant_name)
self.client = SolumClient(*client_args)
|
# -*- coding: utf-8 -*-
#
# Copyright 2013 - Noorul Islam K M
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest.common import rest_client
from tempest import config
import testtools
CONF = config.CONF
class SolumClient(rest_client.RestClient):
def __init__(self, auth_provider):
super(SolumClient, self).__init__(auth_provider)
self.service = 'application_deployment'
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
username = CONF.identity.username
password = CONF.identity.password
tenant_name = CONF.identity.tenant_name
mgr = clients.Manager(username, password, tenant_name)
auth_provider = mgr.get_auth_provider()
self.client = SolumClient(auth_provider)
|
Fix Tempest tests failing on gate-solum-devstack-dsvm
|
Fix Tempest tests failing on gate-solum-devstack-dsvm
Due to a new commit on tempest, devstack tests are failing. It changed
the signature of RestClient constructor.
This patch changes the signature of our inherited class to match the
change from tempest.
Change-Id: Id15682c68de123c0d66c6aa10d889c6304fcbb65
|
Python
|
apache-2.0
|
devdattakulkarni/test-solum,openstack/solum,ed-/solum,openstack/solum,ed-/solum,gilbertpilz/solum,ed-/solum,julienvey/solum,gilbertpilz/solum,stackforge/solum,ed-/solum,stackforge/solum,gilbertpilz/solum,gilbertpilz/solum,devdattakulkarni/test-solum,julienvey/solum
|
98dfc5569fb1ae58905f8b6a36deeda324dcdd7b
|
cronos/teilar/models.py
|
cronos/teilar/models.py
|
from django.db import models
class Departments(models.Model):
urlid = models.IntegerField(unique = True)
name = models.CharField("Department name", max_length = 200)
class Teachers(models.Model):
urlid = models.CharField("URL ID", max_length = 30, unique = True)
name = models.CharField("Teacher name", max_length = 100)
email = models.EmailField("Teacher's mail", null = True)
department = models.CharField("Teacher's department", max_length = 100, null = True)
def __unicode__(self):
return self.name
|
from django.db import models
class Departments(models.Model):
urlid = models.IntegerField(unique = True)
name = models.CharField("Department name", max_length = 200)
deprecated = models.BooleanField(default = False)
def __unicode__(self):
return self.name
class Teachers(models.Model):
urlid = models.IntegerField(unique = True)
name = models.CharField("Teacher name", max_length = 100)
email = models.EmailField("Teacher's mail", null = True)
department = models.CharField("Teacher's department", max_length = 100, null = True)
deprecated = models.BooleanField(default = False)
def __unicode__(self):
return self.name
|
Add deprecated flag for teachers and departments
|
Add deprecated flag for teachers and departments
|
Python
|
agpl-3.0
|
LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr
|
2ac94aa922dbf2d07039bc6545e7b1d31c5c9e4e
|
src/cclib/progress/__init__.py
|
src/cclib/progress/__init__.py
|
__revision__ = "$Revision$"
from textprogress import TextProgress
try:
import qt
except ImportError:
pass # import QtProgress will cause an error
else:
from qtprogress import QtProgress
|
__revision__ = "$Revision$"
from textprogress import TextProgress
import sys
if 'qt' in sys.modules.keys():
from qtprogress import QtProgress
|
Check to see if qt is loaded; if so, export QtProgress class
|
Check to see if qt is loaded; if so, export QtProgress class
|
Python
|
lgpl-2.1
|
Clyde-fare/cclib,Schamnad/cclib,jchodera/cclib,ghutchis/cclib,gaursagar/cclib,berquist/cclib,andersx/cclib,ben-albrecht/cclib,Schamnad/cclib,cclib/cclib,ATenderholt/cclib,cclib/cclib,langner/cclib,berquist/cclib,Clyde-fare/cclib,langner/cclib,ben-albrecht/cclib,ATenderholt/cclib,ghutchis/cclib,jchodera/cclib,andersx/cclib,cclib/cclib,langner/cclib,berquist/cclib,gaursagar/cclib
|
32def5f720b5ffb858f604dc360f66fa2a1b946a
|
pirx/base.py
|
pirx/base.py
|
import collections
class Settings(object):
def __init__(self):
self._settings = collections.OrderedDict()
def __setattr__(self, name, value):
if name.startswith('_'):
super(Settings, self).__setattr__(name, value)
else:
self._settings[name] = value
def _set_raw_value(self, value):
self._settings['_%d' % len(self._settings)] = value
def imp(self, module_name):
value = 'import %s' % module_name
self._set_raw_value(value)
def write(self):
for name, value in self._settings.iteritems():
if name.startswith('_'):
print value
else:
print '%s = %s' % (name.upper(), value.__repr__())
|
import collections
class Settings(object):
def __init__(self):
self._settings = collections.OrderedDict()
def __setattr__(self, name, value):
if name.startswith('_'):
super(Settings, self).__setattr__(name, value)
else:
self._settings[name] = value
def __str__(self):
lines = []
for name, value in self._settings.iteritems():
if name.startswith('_'):
lines.append(value)
else:
lines.append('%s = %s' % (name.upper(), value.__repr__()))
return '\n'.join(lines)
def _set_raw_value(self, value):
self._settings['_%d' % len(self._settings)] = value
def imp(self, module_name):
value = 'import %s' % module_name
self._set_raw_value(value)
|
Replace 'write' method with '__str__'
|
Replace 'write' method with '__str__'
|
Python
|
mit
|
piotrekw/pirx
|
d26b9d22363f9763f959332d07445a2a4e7c221c
|
services/vimeo.py
|
services/vimeo.py
|
import foauth.providers
class Vimeo(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://vimeo.com/'
docs_url = 'http://developer.vimeo.com/apis/advanced'
category = 'Videos'
# URLs to interact with the API
request_token_url = 'https://vimeo.com/oauth/request_token'
authorize_url = 'https://vimeo.com/oauth/authorize?permission=delete'
access_token_url = 'https://vimeo.com/oauth/access_token'
api_domain = 'vimeo.com'
available_permissions = [
('read', 'access information about videos'),
('write', 'update and like videos'),
('delete', 'delete videos'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/rest/v2?method=vimeo.people.getInfo&format=json')
return r.json[u'person'][u'id']
|
import foauth.providers
class Vimeo(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://vimeo.com/'
docs_url = 'http://developer.vimeo.com/apis/advanced'
category = 'Videos'
# URLs to interact with the API
request_token_url = 'https://vimeo.com/oauth/request_token'
authorize_url = 'https://vimeo.com/oauth/authorize'
access_token_url = 'https://vimeo.com/oauth/access_token'
api_domain = 'vimeo.com'
available_permissions = [
(None, 'access your videos'),
('write', 'access, update and like videos'),
('delete', 'access, update, like and delete videos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Vimeo, self).get_authorize_params(redirect_uri, scopes)
if any(scopes):
params['permission'] = scopes[0]
return params
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/rest/v2?method=vimeo.people.getInfo&format=json')
return r.json[u'person'][u'id']
|
Rewrite Vimeo to use the new scope selection system
|
Rewrite Vimeo to use the new scope selection system
|
Python
|
bsd-3-clause
|
foauth/foauth.org,foauth/foauth.org,foauth/oauth-proxy,foauth/foauth.org
|
612c393ec4d964fb933ebf5b8f957ae573ae65ba
|
tests/rules/test_git_push.py
|
tests/rules/test_git_push.py
|
import pytest
from thefuck.rules.git_push import match, get_new_command
from tests.utils import Command
@pytest.fixture
def stderr():
return '''fatal: The current branch master has no upstream branch.
To push the current branch and set the remote as upstream, use
git push --set-upstream origin master
'''
def test_match(stderr):
assert match(Command('git push master', stderr=stderr))
assert not match(Command('git push master'))
assert not match(Command('ls', stderr=stderr))
def test_get_new_command(stderr):
assert get_new_command(Command('git push', stderr=stderr))\
== "git push --set-upstream origin master"
|
import pytest
from thefuck.rules.git_push import match, get_new_command
from tests.utils import Command
@pytest.fixture
def stderr():
return '''fatal: The current branch master has no upstream branch.
To push the current branch and set the remote as upstream, use
git push --set-upstream origin master
'''
def test_match(stderr):
assert match(Command('git push', stderr=stderr))
assert match(Command('git push master', stderr=stderr))
assert not match(Command('git push master'))
assert not match(Command('ls', stderr=stderr))
def test_get_new_command(stderr):
assert get_new_command(Command('git push', stderr=stderr))\
== "git push --set-upstream origin master"
|
Check git_push matches without specifying a branch
|
Check git_push matches without specifying a branch
|
Python
|
mit
|
nvbn/thefuck,mlk/thefuck,mlk/thefuck,nvbn/thefuck,SimenB/thefuck,Clpsplug/thefuck,scorphus/thefuck,scorphus/thefuck,SimenB/thefuck,Clpsplug/thefuck
|
e621b9f03b19e38dc6754dd1a4cb7b172e4891e7
|
tests/test_extended_tests.py
|
tests/test_extended_tests.py
|
import pytest
import glob
from html2kirby import HTML2Kirby
files = []
for f in glob.glob("extended_tests/*.html"):
html = f
txt = f.replace(".html", ".txt")
files.append((html, txt))
@pytest.mark.parametrize("html,kirby", files)
def test_file(html, kirby):
formatter = HTML2Kirby()
with open(html, 'r') as html_file:
formatter.feed(html_file.read())
with open(kirby, 'r') as kirby_file:
expected_result = kirby_file.read()
assert formatter.markdown.strip() == expected_result.strip()
|
import pytest
import glob
import os
from html2kirby import HTML2Kirby
files = []
path = os.path.dirname(os.path.abspath(__file__))
extended_tests_path = os.path.join(path, "extended_tests/*.html")
for f in glob.glob(extended_tests_path):
html = f
txt = f.replace(".html", ".txt")
files.append((html, txt))
@pytest.mark.parametrize("html,kirby", files)
def test_file(html, kirby):
formatter = HTML2Kirby()
with open(html, 'r') as html_file:
formatter.feed(html_file.read())
with open(kirby, 'r') as kirby_file:
expected_result = kirby_file.read()
assert formatter.markdown.strip() == expected_result.strip()
|
Fix the extended test search
|
Fix the extended test search
|
Python
|
mit
|
liip/html2kirby,liip/html2kirby
|
603aacd06b99326d7dbab28e750b34589c51fa05
|
tests/test_postgresqlgate.py
|
tests/test_postgresqlgate.py
|
# coding: utf-8
"""
Unit tests for the base gate.
"""
from unittest.mock import MagicMock, mock_open, patch
import smdba.postgresqlgate
class TestPgGt:
"""
Test suite for base gate.
"""
@patch("os.path.exists", MagicMock(side_effect=[True, False, False]))
@patch("smdba.postgresqlgate.open", new_callable=mock_open,
read_data="key=value")
def test_get_scenario_template(self, mck):
"""
Gets scenario template.
:return:
"""
pgt = smdba.postgresqlgate.PgSQLGate({})
template = pgt.get_scenario_template(target="psql")
assert template == "cat - << EOF | /usr/bin/psql -t --pset footer=off\n@scenario\nEOF"
|
# coding: utf-8
"""
Unit tests for the base gate.
"""
from unittest.mock import MagicMock, mock_open, patch
import smdba.postgresqlgate
class TestPgGt:
"""
Test suite for base gate.
"""
@patch("os.path.exists", MagicMock(side_effect=[True, False, False]))
@patch("smdba.postgresqlgate.open", new_callable=mock_open,
read_data="key=value")
def test_get_scenario_template(self, mck):
"""
Gets scenario template.
:return:
"""
pgt = smdba.postgresqlgate.PgSQLGate({})
template = pgt.get_scenario_template(target="psql")
assert template == "cat - << EOF | /usr/bin/psql -t --pset footer=off\n@scenario\nEOF"
@patch("os.path.exists", MagicMock(side_effect=[True, False, False]))
@patch("smdba.postgresqlgate.open", new_callable=mock_open,
read_data="key=value")
def test_call_scenario(self, mck):
"""
Calls database scenario.
:return:
"""
pgt = smdba.postgresqlgate.PgSQLGate({})
pgt.get_scn = MagicMock()
pgt.get_scn().read = MagicMock(return_value="SELECT pg_reload_conf();")
pgt.syscall = MagicMock()
pgt.call_scenario("pg-reload-conf.scn", target="psql")
expectations = [
(
('sudo', '-u', 'postgres', '/bin/bash'),
{'input': 'cat - << EOF | /usr/bin/psql -t --pset footer=off\nSELECT pg_reload_conf();\nEOF'}
)
]
for call in pgt.syscall.call_args_list:
args, kw = call
exp_args, exp_kw = next(iter(expectations))
expectations.pop(0)
assert args == exp_args
assert "input" in kw
assert "input" in exp_kw
assert kw["input"] == exp_kw["input"]
assert not expectations
|
Add unit test for database scenario call
|
Add unit test for database scenario call
|
Python
|
mit
|
SUSE/smdba,SUSE/smdba
|
2a0b1d070996bfb3d950d4fae70b264ddabc7d2f
|
sheldon/config.py
|
sheldon/config.py
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
import os
class Config:
def __init__(self, prefix='SHELDON_'):
"""
Load config from environment variables.
:param prefix: string, all needed environment variables
starts from it.
Default - 'SHELDON_'. So, environment
variables will be looking like:
'SHELDON_BOT_NAME', 'SHELDON_TWITTER_KEY'
:return:
"""
# Bot config variables
self.variables = {}
for variable in os.environ:
if variable.startswith(prefix):
self.variables[variable] = os.environ[variable]
def get(self, variable, default_value):
"""
:param variable: string, needed variable
:param default_value: string, value that returns if
variable is not set
:return:
"""
if variable not in self.variables:
return default_value
return self.variables[variable]
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
import os
class Config:
def __init__(self, prefix='SHELDON_'):
"""
Load config from environment variables.
:param prefix: string, all needed environment variables
starts from it.
Default - 'SHELDON_'. So, environment
variables will be looking like:
'SHELDON_BOT_NAME', 'SHELDON_TWITTER_KEY'
:return:
"""
# Bot config variables
self.variables = {}
for variable in os.environ:
if variable.startswith(prefix):
self.variables[variable] = os.environ[variable]
def get(self, variable, default_value):
"""
Get variable value from environment
:param variable: string, needed variable
:param default_value: string, value that returns if
variable is not set
:return: variable value
"""
if variable not in self.variables:
return default_value
return self.variables[variable]
def get_installed_plugins(self):
"""
Return list of installed plugins from installed_plugins.txt
:return: list of strings with names of plugins
"""
plugins_file = open('installed_plugins.txt')
return plugins_file.readlines()
|
Add function for getting installed plugins
|
Add function for getting installed plugins
|
Python
|
mit
|
lises/sheldon
|
2f8ae4d29bd95c298209a0cb93b5354c00186d6b
|
trackpy/C_fallback_python.py
|
trackpy/C_fallback_python.py
|
try:
from _Cfilters import nullify_secondary_maxima
except ImportError:
import numpy as np
# Because of the way C imports work, nullify_secondary_maxima
# is *called*, as in nullify_secondary_maxima().
# For the pure Python variant, we do not want to call the function,
# so we make nullify_secondary_maxima a wrapper than returns
# the pure Python function that does the actual filtering.
def _filter(a):
target = a.size // 2 + 1
target_val = a[target]
if np.any(a[:target] > target_val):
return 0
if np.any(a[target + 1:] >= target_val):
return 0
return target
def nullify_secondary_maxima():
return _filter
|
try:
from _Cfilters import nullify_secondary_maxima
except ImportError:
import numpy as np
# Because of the way C imports work, nullify_secondary_maxima
# is *called*, as in nullify_secondary_maxima().
# For the pure Python variant, we do not want to call the function,
# so we make nullify_secondary_maxima a wrapper than returns
# the pure Python function that does the actual filtering.
def _filter(a):
target = a.size // 2
target_val = a[target]
if target_val == 0:
return 0 # speedup trivial case
if np.any(a[:target] > target_val):
return 0
if np.any(a[target + 1:] >= target_val):
return 0
return target_val
def nullify_secondary_maxima():
return _filter
|
Fix and speedup pure-Python fallback for C filter.
|
BUG/PERF: Fix and speedup pure-Python fallback for C filter.
|
Python
|
bsd-3-clause
|
daniorerio/trackpy,daniorerio/trackpy
|
0003ef7fe3d59c4bda034dee334d45b6d7a2622d
|
pyvm_test.py
|
pyvm_test.py
|
import pyvm
import unittest
class PyVMTest(unittest.TestCase):
def setUp(self):
self.vm = pyvm.PythonVM()
def test_load_const_num(self):
self.assertEqual(
10,
self.vm.eval('10')
)
def test_load_const_str(self):
self.assertEqual(
"hoge",
self.vm.eval('"hoge"')
)
if __name__ == '__main__':
unittest.main()
|
import pyvm
import unittest
class PyVMTest(unittest.TestCase):
def setUp(self):
self.vm = pyvm.PythonVM()
def test_load_const_num(self):
self.assertEqual(
10,
self.vm.eval('10')
)
def test_load_const_num_float(self):
self.assertEqual(
10.55,
self.vm.eval('10.55')
)
def test_load_const_str(self):
self.assertEqual(
"hoge",
self.vm.eval('"hoge"')
)
if __name__ == '__main__':
unittest.main()
|
Add test of storing float
|
Add test of storing float
|
Python
|
mit
|
utgwkk/tiny-python-vm
|
c4d64672c8c72ca928b354e9cfd35a7d40dbb78f
|
MROCPdjangoForm/ocpipeline/mrpaths.py
|
MROCPdjangoForm/ocpipeline/mrpaths.py
|
#
# Code to load project paths
#
import os, sys
MR_BASE_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "/Users/dmhembere44/MR-connectome" ))
MR_CMAPPER_PATH = os.path.join(MR_BASE_PATH, "cmapper" )
MR_MRCAP_PATH = os.path.join(MR_BASE_PATH, "mrcap" )
sys.path += [ MR_BASE_PATH, MR_CMAPPER_PATH, MR_MRCAP_PATH ]
|
#
# Code to load project paths
#
import os, sys
MR_BASE_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.." ))
MR_CMAPPER_PATH = os.path.join(MR_BASE_PATH, "cmapper" )
MR_MRCAP_PATH = os.path.join(MR_BASE_PATH, "mrcap" )
sys.path += [ MR_BASE_PATH, MR_CMAPPER_PATH, MR_MRCAP_PATH ]
|
Change to path, made relative
|
Change to path, made relative
Former-commit-id: f00bf782fad3f6ddc6d2c97a23ff4f087ad3a22f
|
Python
|
apache-2.0
|
neurodata/ndmg
|
cba5a3d4928a3ee2e7672ca4a3f766a789d83acf
|
cupcake/smush/plot.py
|
cupcake/smush/plot.py
|
"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher='pca', n_components=2, marker='o', marker_order=None,
text=False, text_order=None, linewidth=1, linewidth_order=None,
edgecolor='k', edgecolor_order=None, smusher_kws=None,
plot_kws=None):
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
|
"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher='PCA', x=1, y=2, n_components=2, marker='o',
marker_order=None, text=False, text_order=None, linewidth=1,
linewidth_order=None, edgecolor='k', edgecolor_order=None,
smusher_kws=None, plot_kws=None):
"""Plot high dimensional data in 2d space
Parameters
----------
data : pandas.DataFrame or numpy.array
A (n_samples, m_features) wide matrix of observations. The samples
(rows) will be plotted relative to the reduced representation of the
features (columns)
smusher : str or object
Either a string specifying a valid dimensionality reduction algorithm
in ``sklearn.decomposition`` or ``sklearn.manifold``, or any object
with ``fit_transform()`` methods.
Notes
-----
"""
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
|
Add x, y arguments and docstring
|
Add x, y arguments and docstring
|
Python
|
bsd-3-clause
|
olgabot/cupcake
|
66ba9aa2172fbed67b67a06acb331d449d32a33c
|
tests/services/shop/conftest.py
|
tests/services/shop/conftest.py
|
"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer(normal_user):
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
|
"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer():
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
|
Remove unused fixture from orderer
|
Remove unused fixture from orderer
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
2e897f7dce89d4b52c3507c62e7120ee238b713c
|
database/database_setup.py
|
database/database_setup.py
|
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from models.base import Base
from models.user import User
from models.store import Store
from models.product import Product
engine = create_engine('sqlite:///productcatalog.db')
Base.metadata.create_all(engine)
|
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from models.base import Base
from models.user import User
from models.store import Store
from models.product import Product
engine = create_engine('postgresql://catalog:catalog123!@localhost:8000/catalog')
Base.metadata.create_all(engine)
|
Connect database engine to postgresql
|
feat: Connect database engine to postgresql
|
Python
|
mit
|
caasted/aws-flask-catalog-app,caasted/aws-flask-catalog-app
|
a8c8b136f081e3a2c7f1fd1f833a85288a358e42
|
vumi_http_retry/workers/api/validate.py
|
vumi_http_retry/workers/api/validate.py
|
import json
from functools import wraps
from twisted.web import http
from jsonschema import Draft4Validator
from vumi_http_retry.workers.api.utils import response
def validate(*validators):
def validator(fn):
@wraps(fn)
def wrapper(api, req, *a, **kw):
errors = []
for v in validators:
errors.extend(v(req, *a, **kw) or [])
if not errors:
return fn(api, req, *a, **kw)
else:
return response(req, {'errors': errors}, code=http.BAD_REQUEST)
return wrapper
return validator
def has_header(name):
def validator(req):
if not req.requestHeaders.hasHeader(name):
return [{
'type': 'header_missing',
'message': "Header '%s' is missing" % (name,)
}]
else:
return []
return validator
def body_schema(schema):
json_validator = Draft4Validator(schema)
def validator(req, body):
return [{
'type': 'invalid_body',
'message': e.message
} for e in json_validator.iter_errors(body)]
return validator
|
import json
from functools import wraps
from twisted.web import http
from jsonschema import Draft4Validator
from vumi_http_retry.workers.api.utils import response
def validate(*validators):
def validator(fn):
@wraps(fn)
def wrapper(api, req, *a, **kw):
errors = []
for v in validators:
errors.extend(v(req, *a, **kw) or [])
if not errors:
return fn(api, req, *a, **kw)
else:
return response(req, {'errors': errors}, code=http.BAD_REQUEST)
return wrapper
return validator
def has_header(name):
def validator(req, *a, **kw):
if not req.requestHeaders.hasHeader(name):
return [{
'type': 'header_missing',
'message': "Header '%s' is missing" % (name,)
}]
else:
return []
return validator
def body_schema(schema):
json_validator = Draft4Validator(schema)
def validator(req, body, *a, **kw):
return [{
'type': 'invalid_body',
'message': e.message
} for e in json_validator.iter_errors(body)]
return validator
|
Change validators to allow additional arguments to be given to the functions they are wrapping
|
Change validators to allow additional arguments to be given to the functions they are wrapping
|
Python
|
bsd-3-clause
|
praekelt/vumi-http-retry-api,praekelt/vumi-http-retry-api
|
370dac353937d73798b4cd2014884b9f1aa95abf
|
osmaxx-py/osmaxx/contrib/auth/tests/test_frontend_permissions.py
|
osmaxx-py/osmaxx/contrib/auth/tests/test_frontend_permissions.py
|
from django.test import TestCase
from django.contrib.auth.models import User
from osmaxx.contrib.auth.frontend_permissions import user_in_osmaxx_group
class TestFrontendPermissions(TestCase):
def test_superuser_can_access_frontend_even_if_not_in_osmaxx_group(self):
an_admin = User.objects.create_superuser('A. D. Min', 'admin@example.com', 'password')
self.assertTrue(user_in_osmaxx_group(an_admin))
|
from django.test import TestCase
from django.contrib.auth.models import User, Group
from osmaxx.contrib.auth.frontend_permissions import user_in_osmaxx_group, FRONTEND_USER_GROUP
class TestFrontendPermissions(TestCase):
def test_superuser_can_access_frontend_even_if_not_in_osmaxx_group(self):
an_admin = User.objects.create_superuser('A. D. Min', 'admin@example.com', 'password')
self.assertTrue(user_in_osmaxx_group(an_admin))
def test_user_can_access_frontend_when_in_osmaxx_group(self):
a_user = User.objects.create_user('U. Ser', 'user@example.com', 'password')
a_user.groups.add(Group.objects.get(name=FRONTEND_USER_GROUP))
self.assertTrue(user_in_osmaxx_group(a_user))
|
Test that users can access frontend when in osmaxx group
|
Test that users can access frontend when in osmaxx group
|
Python
|
mit
|
geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend,geometalab/drf-utm-zone-info,geometalab/osmaxx,geometalab/drf-utm-zone-info,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx,geometalab/osmaxx-frontend
|
837b767036f580a8c9d523e0f6c175a75d1dc3b2
|
pi_control_service/gpio_service.py
|
pi_control_service/gpio_service.py
|
from rpc import RPCService
from pi_pin_manager import PinManager
ALLOWED_ACTIONS = ('on', 'off', 'read')
class GPIOService(RPCService):
def __init__(self, rabbit_url, device_key, pin_config):
self.pins = PinManager(config_file=pin_config)
super(GPIOService, self).__init__(
rabbit_url=rabbit_url,
queue_name='gpio_service',
device_key=device_key,
request_action=self._perform_gpio_action)
def _perform_gpio_action(self, instruction):
result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"}
if instruction['action'] not in ALLOWED_ACTIONS:
result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS))
return result
try:
result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin']))
result['error'] = 0
except ValueError:
result['response'] = "'pin' value must be an integer"
except Exception as e:
result['response'] = e.message
return result
def stop(self):
self.pins.cleanup()
super(GPIOService, self).stop()
|
from rpc import RPCService
from pi_pin_manager import PinManager
ALLOWED_ACTIONS = ('on', 'off', 'read', 'get_config')
class GPIOService(RPCService):
def __init__(self, rabbit_url, device_key, pin_config):
self.pins = PinManager(config_file=pin_config)
super(GPIOService, self).__init__(
rabbit_url=rabbit_url,
queue_name='gpio_service',
device_key=device_key,
request_action=self._perform_gpio_action)
def _perform_gpio_action(self, instruction):
result = {'error': 1, 'response': "An error occurred"}
if instruction['action'] not in ALLOWED_ACTIONS:
result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS))
return result
try:
pin = instruction['pin']
except KeyError:
try:
result['response'] = getattr(self.pins, instruction['action'])()
result['error'] = 0
except Exception as e:
result['response'] = e.message
else:
try:
result['response'] = getattr(self.pins, instruction['action'])(int(pin))
result['error'] = 0
except ValueError:
result['response'] = "'pin' value must be an integer"
except Exception as e:
result['response'] = e.message
return result
def stop(self):
self.pins.cleanup()
super(GPIOService, self).stop()
|
Add get_config as GPIO action
|
Add get_config as GPIO action
|
Python
|
mit
|
HydAu/ProjectWeekds_Pi-Control-Service,projectweekend/Pi-Control-Service
|
6f8b5950a85c79ed33c1d00a35a1def2efc7bff5
|
tests/conftest.py
|
tests/conftest.py
|
from factories import post_factory, post
|
from factories import post_factory, post
import os
import sys
root = os.path.join(os.path.dirname(__file__))
package = os.path.join(root, '..')
sys.path.insert(0, os.path.abspath(package))
|
Make the tests run just via py.test
|
Make the tests run just via py.test
|
Python
|
mit
|
kalasjocke/hyp
|
39c777d6fc5555534628113190bb543c6225c07e
|
uncurl/bin.py
|
uncurl/bin.py
|
from __future__ import print_function
import sys
from .api import parse
def main():
result = parse(sys.argv[1])
print(result)
|
from __future__ import print_function
import sys
from .api import parse
def main():
if sys.stdin.isatty():
result = parse(sys.argv[1])
else:
result = parse(sys.stdin.read())
print(result)
|
Read from stdin if available.
|
Read from stdin if available.
|
Python
|
apache-2.0
|
weinerjm/uncurl,spulec/uncurl
|
899f28e2cd7dbeb6227e8c56eef541cce1a424f4
|
alertaclient/commands/cmd_heartbeat.py
|
alertaclient/commands/cmd_heartbeat.py
|
import os
import platform
import sys
import click
prog = os.path.basename(sys.argv[0])
@click.command('heartbeat', short_help='Send a heartbeat')
@click.option('--origin', default='{}/{}'.format(prog, platform.uname()[1]))
@click.option('--tag', '-T', 'tags', multiple=True)
@click.option('--timeout', metavar='EXPIRES', help='Seconds before heartbeat is stale')
@click.option('--delete', '-D', metavar='ID', help='Delete hearbeat')
@click.pass_obj
def cli(obj, origin, tags, timeout, delete):
"""Send or delete a heartbeat."""
client = obj['client']
if delete:
if origin or tags or timeout:
raise click.UsageError('Option "--delete" is mutually exclusive.')
client.delete_heartbeat(delete)
else:
try:
heartbeat = client.heartbeat(origin=origin, tags=tags, timeout=timeout)
except Exception as e:
click.echo('ERROR: {}'.format(e))
sys.exit(1)
click.echo(heartbeat.id)
|
import os
import platform
import sys
import click
prog = os.path.basename(sys.argv[0])
@click.command('heartbeat', short_help='Send a heartbeat')
@click.option('--origin', default='{}/{}'.format(prog, platform.uname()[1]))
@click.option('--tag', '-T', 'tags', multiple=True)
@click.option('--timeout', metavar='EXPIRES', type=int, help='Seconds before heartbeat is stale')
@click.option('--delete', '-D', metavar='ID', help='Delete hearbeat')
@click.pass_obj
def cli(obj, origin, tags, timeout, delete):
"""Send or delete a heartbeat."""
client = obj['client']
if delete:
if origin or tags or timeout:
raise click.UsageError('Option "--delete" is mutually exclusive.')
client.delete_heartbeat(delete)
else:
try:
heartbeat = client.heartbeat(origin=origin, tags=tags, timeout=timeout)
except Exception as e:
click.echo('ERROR: {}'.format(e))
sys.exit(1)
click.echo(heartbeat.id)
|
Add check that heartbeat timeout is integer
|
Add check that heartbeat timeout is integer
|
Python
|
apache-2.0
|
alerta/python-alerta-client,alerta/python-alerta-client,alerta/python-alerta
|
ee8cb600c772e4a0f795a0fe00b1e612cb8a8e37
|
dirmuncher.py
|
dirmuncher.py
|
#!/usr/bin/env python
# -*- Coding: utf-8 -*-
import os
class Dirmuncher:
def __init__(self, directory):
self.directory = directory
def directoryListing(self):
for dirname, dirnames, filenames in os.walk(self.directory):
# Subdirectories
for subdirname in dirnames:
print(os.path.join(dirname, subdirname))
# Filenames
for filename in filenames:
print(os.path.join(dirname, filename))
if __name__ == "__main__":
muncher = Dirmuncher('movies')
muncher.directoryListing()
|
#!/usr/bin/env python
# -*- Coding: utf-8 -*-
import os
class Dirmuncher:
def __init__(self, directory):
self.directory = directory
def getFiles(self):
result = {}
for dirname, dirnames, filenames in os.walk(self.directory):
# Subdirectories
for subdirname in dirnames:
print(os.path.join(dirname, subdirname))
# Filenames
for filename in filenames:
print(os.path.join(dirname, filename))
result[dirname] = filenames
return result
if __name__ == "__main__":
muncher = Dirmuncher('movies')
print(muncher.getFiles())
|
Sort files into dict with dir as key
|
[py] Sort files into dict with dir as key
|
Python
|
mit
|
claudemuller/masfir
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.