commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
7c5747688dffd41737c841ac354c4947939006c3
|
django_excel_tools/exceptions.py
|
django_excel_tools/exceptions.py
|
class BaseExcelError(Exception):
def __init__(self, message):
super(BaseExcelError, self).__init__()
self.message = message
class ValidationError(BaseExcelError):
pass
class ColumnNotEqualError(BaseExcelError):
pass
class FieldNotExist(BaseExcelError):
pass
class SerializerConfigError(BaseExcelError):
pass
|
class BaseExcelError(Exception):
def __init__(self, message):
super(BaseExcelError, self).__init__()
self.message = message
class ColumnNotEqualError(BaseExcelError):
pass
class FieldNotExist(BaseExcelError):
pass
class ImportOperationFailed(BaseExcelError):
pass
class SerializerConfigError(BaseExcelError):
pass
class ValidationError(BaseExcelError):
pass
|
Add exception for import operation error and validation error
|
Add exception for import operation error and validation error
|
Python
|
mit
|
NorakGithub/django-excel-tools
|
8e4d77636a9846296225ddbfab872be4c7486261
|
dask_distance/_pycompat.py
|
dask_distance/_pycompat.py
|
# -*- coding: utf-8 -*-
try:
irange = xrange
except NameError:
irange = range
|
# -*- coding: utf-8 -*-
try:
irange = xrange
except NameError:
irange = range
try:
from itertools import izip
except ImportError:
izip = zip
|
Add izip for Python 2/3 compatibility
|
Add izip for Python 2/3 compatibility
Simply use `izip` from `itertools` on Python 2 and alias `izip` as `zip`
on Python 3. This way an iterable form of `zip` remains available on
both Python 2 and Python 3 that is named `izip`. Should help avoid
having the performance of the two implementations from diverging too
far.
|
Python
|
bsd-3-clause
|
jakirkham/dask-distance
|
21541a4a1b2fcdfaa4bb7d2fbebf7db702eebacc
|
app/main.py
|
app/main.py
|
import csv
import yaml
import parse
import deparse
def main():
with open('data/features.csv', 'r') as f:
segments = [segment for segment in csv.DictReader(f)]
with open('data/diacritics.yaml') as f:
diacritics = yaml.load(f)
with open('data/feature-strings-with-diacritics.csv') as f:
feature_strings = [line for line in csv.reader(f)]
words = parse.parse_words(['bːɒtl', 'b\u02D0ɒtl'], segments, diacritics)
print(deparse.deparse_words(words, segments, feature_strings))
if __name__ == '__main__':
main()
|
import csv
import yaml
import parse
import evolve
import deparse
def evolve_words(words, available_rules, generations=5):
'''Evolves the given list of words according to the given list of rules, for a
number of generations. If no more applicable rules are available, the evolution
will stop early. Returns the evolved list of words and a list of rule which
were applied.
'''
applied_rules = []
try:
for _ in range(generations):
rule, words = evolve.evolve(words, available_rules)
applied_rules.append(rule)
# StopIteration is raised when there are no more applicable rules
except StopIteration:
return words, applied_rules
return words, applied_rules
def main():
with open('data/features.csv', 'r') as f:
segments = [segment for segment in csv.DictReader(f)]
with open('data/diacritics.yaml') as f:
diacritics = yaml.load(f)
with open('data/rules.yaml') as f:
rules = yaml.load(f)
with open('data/feature-strings-with-diacritics.csv') as f:
feature_strings = [line for line in csv.reader(f)]
word_strings = ['mːɒtl', 'b\u02D0ɒtl']
words = parse.parse_words(word_strings, segments, diacritics)
evolved_words, applied_rules = evolve_words(words, rules)
deparsed = deparse.deparse_words(evolved_words, segments, feature_strings)
for word, evolved_word in zip(word_strings, deparsed):
print('{0} -> {1}'.format(word, evolved_word))
if __name__ == '__main__':
main()
|
Create basic flow of evolution process.
|
Create basic flow of evolution process.
|
Python
|
mit
|
kdelwat/LangEvolve,kdelwat/LangEvolve,kdelwat/LangEvolve
|
fe4a0e0b86300f7da5f45a5541ee9c42c0a76ab7
|
docker_manager/dispatcher.py
|
docker_manager/dispatcher.py
|
import requests_unixsocket
import urllib
import json
import re
def dockerapi_dispatcher(app,request):
method = request.method
uri = re.match(r"^.+/dockerapi/(.+)", request.url).group(1)
session = requests_unixsocket.Session()
unix_socket = urllib.quote_plus( app.config['SOCKET'] )
return getattr(session,method.lower())('http+unix://{0}/{1}'.format(unix_socket,uri),json=request.json)
|
import requests_unixsocket
import urllib
import re
def dockerapi_dispatcher(app,request):
method = request.method
uri = re.match(r"^.+/dockerapi/(.+)", request.url).group(1)
session = requests_unixsocket.Session()
unix_socket = urllib.quote_plus( app.config['SOCKET'] )
return getattr(session,method.lower())('http+unix://{0}/{1}'.format(unix_socket,uri),json=request.json)
|
Remove unsed json module import
|
Remove unsed json module import
|
Python
|
apache-2.0
|
nathanIL/docker-manager,nathanIL/docker-manager,nathanIL/docker-manager
|
1b18d81f673e9f982e9778823198cbc336285db4
|
tests/grammar_creation_test/NonterminalAddingTest.py
|
tests/grammar_creation_test/NonterminalAddingTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
class NonterminalAddingTest(TestCase):
pass
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import NotNonterminalException
class A(Nonterminal):
pass
class B(Nonterminal):
pass
class C(Nonterminal):
pass
class D(Nonterminal):
pass
class NonterminalAddingTest(TestCase):
def test_shouldAddOneNonterminal(self):
g = Grammar(nonterminals=[A])
self.assertTrue(g.have_nonterm(A))
self.assertFalse(g.have_nonterm(B))
self.assertFalse(g.have_nonterm([A, B]))
def test_shouldAddMoreNonterminals(self):
g = Grammar(nonterminals=[A, B, C])
self.assertTrue(g.have_nonterm(A))
self.assertTrue(g.have_nonterm([A, B, C]))
self.assertFalse(g.have_nonterm(D))
def test_shouldNotAddInvalidNonterminal(self):
with self.assertRaises(NotNonterminalException):
Grammar(nonterminals=[0])
def test_shouldNotAddOneInvalidNonterminal(self):
with self.assertRaises(NotNonterminalException):
Grammar(nonterminals=[A, B, 1])
if __name__ == '__main__':
main()
|
Add test of adding nonterminals when grammar is create
|
Add test of adding nonterminals when grammar is create
|
Python
|
mit
|
PatrikValkovic/grammpy
|
8b04e4de91f60a2727f9c7a6f5a56d86279c667d
|
avalonstar/apps/live/urls.py
|
avalonstar/apps/live/urls.py
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import AwayView, DiscussionView, GameView, PrologueView
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Overlays.
url(r'^away/$', name='live-away', view=AwayView.as_view()),
url(r'^discussion/$', name='live-discussion', view=DiscussionView.as_view()),
url(r'^game/$', name='live-game', view=GameView.as_view()),
url(r'^prologue/$', name='live-prologue', view=PrologueView.as_view()),
)
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import (AwayView, DiscussionView, EpilogueView, GameView,
PrologueView)
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Overlays.
url(r'^away/$', name='live-away', view=AwayView.as_view()),
url(r'^discussion/$', name='live-discussion', view=DiscussionView.as_view()),
url(r'^epilogue/$', name='live-epilogue', view=EpilogueView.as_view()),
url(r'^game/$', name='live-game', view=GameView.as_view()),
url(r'^prologue/$', name='live-prologue', view=PrologueView.as_view()),
)
|
Add a URL conf for the EpilogueView.
|
Add a URL conf for the EpilogueView.
|
Python
|
apache-2.0
|
bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv
|
5cbbe25d9bbfbea804d22f399ec16ed438c492e4
|
moss/plots.py
|
moss/plots.py
|
import os.path as op
import numpy as np
import nibabel as nib
import seaborn as sns
def plot_mask_distribution(fname, hist=False):
"""Plot the distribution of voxel coordinates in a mask file.
Parameters
----------
fname : string
path to binary mask file
Returns
-------
ax : matplotlib axis object
axis with plot on it
"""
img = nib.load(fname)
mask = img.get_data()
aff = img.get_affine()
vox = np.where(mask)
vox = np.vstack([vox, np.ones(mask.sum())])
coords = np.dot(aff, vox)[:-1]
colors = sns.get_color_list()[:3]
for axis, data, color in zip(["x", "y", "z"], coords, colors):
if hist:
ax = sns.kdeplot(data, hist=True, label=axis, color=color)
else:
ax = sns.kdeplot(data, shade=True, label=axis, color=color)
ax.legend()
ax.set_title(op.basename(fname))
return ax
|
import os.path as op
import numpy as np
import nibabel as nib
import matplotlib.pyplot as plt
import seaborn as sns
def plot_mask_distribution(fname, ax=None, hist=False):
"""Plot the distribution of voxel coordinates in a mask file.
Parameters
----------
fname : string
path to binary mask file
Returns
-------
ax : matplotlib axis object
axis with plot on it
"""
if ax is None:
ax = plt.subplot(111)
img = nib.load(fname)
mask = img.get_data()
aff = img.get_affine()
vox = np.where(mask)
vox = np.vstack([vox, np.ones(mask.sum())])
coords = np.dot(aff, vox)[:-1]
colors = sns.get_color_list()[:3]
for axis, data, color in zip(["x", "y", "z"], coords, colors):
if hist:
sns.kdeplot(data, hist=True, label=axis, color=color, ax=ax)
else:
sns.kdeplot(data, shade=True, label=axis, color=color, ax=ax)
ax.legend()
ax.set_title(op.basename(fname))
return ax
|
Allow for shade or hist in mask plot
|
Allow for shade or hist in mask plot
|
Python
|
bsd-3-clause
|
mwaskom/moss,mwaskom/moss
|
99f862b6c123b8c6d81e931254c061e64431bccc
|
pysingcells/logger.py
|
pysingcells/logger.py
|
# -*- coding: utf-8 -*-
""" Function related to logging """
# stp import
import sys
import logging
# fief import
from fief import filter_effective_parameters as fief
log = logging.getLogger()
@fief
def setup_logging(options):
log_level = 10 if "logging_level" in options else options["logging_level"]
log.setLevel(log_level)
handler = logging.StreamHandler(sys.stderr)
formatter = logging.Formatter('%(levelname)s :: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(log_level)
log.addHandler(handler)
|
# -*- coding: utf-8 -*-
""" Function related to logging """
# stp import
import sys
import logging
# fief import
from fief import filter_effective_parameters as fief
log = logging.getLogger()
@fief
def setup_logging(options):
log_level = 10 if "logging_level" in options else options["logging_level"]
log.setLevel(log_level)
handler = logging.StreamHandler(sys.stderr)
formatter = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(log_level)
log.addHandler(handler)
|
Add time in logging trace
|
Add time in logging trace
|
Python
|
mit
|
Fougere87/pysingcells
|
1bc174d357423964191625faad6733466320face
|
application.py
|
application.py
|
import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations import logging
from app import create_app
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
sentry_sdk.set_level('error') # only record error logs or exceptions
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
|
import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations import logging
from app import create_app
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration(), RedisIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
sentry_sdk.set_level('error') # only record error logs or exceptions
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
|
Add RedisIntegration to Sentry trial
|
Add RedisIntegration to Sentry trial
This should expose additional performance stats.
|
Python
|
mit
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
|
6de1eb3e3279af6fa09ddafd0e220becac7edf21
|
author/urls.py
|
author/urls.py
|
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.generic.base import RedirectView
from django.core.urlresolvers import reverse_lazy
from django.contrib.auth.views import login
from django.contrib.auth.decorators import login_required
from author import views
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url=reverse_lazy('author-login'),
permanent=False)),
url(r'^login/$',
login,
{'template_name': 'author/login.html'},
name='author-login'),
url(r'^panel/$',
login_required(function=views.PanelView.as_view(),
login_url=reverse_lazy('author-login')),
name='panel'),
)
|
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.generic.base import RedirectView
from django.core.urlresolvers import reverse_lazy
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import Permission
from django.contrib.auth.views import login
from django.contrib.auth.decorators import login_required
from django.contrib.auth.decorators import permission_required
from author import views
def author_required(function=None, login_url=None):
author_permission = Permission(
content_type=ContentType.objects.get(app_label='game',
model='task'),
codename='add_task',
)
actual_decorator = permission_required(author_permission,
login_url=login_url)
if function is None:
return actual_decorator(login_required)
return actual_decorator(login_required(function))
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url=reverse_lazy('author-login'),
permanent=False)),
url(r'^login/$',
login,
{'template_name': 'author/login.html'},
name='author-login'),
url(r'^panel/$',
author_required(function=views.PanelView.as_view(),
login_url=reverse_lazy('author-login')),
name='panel'),
)
|
Add permission checks in author app
|
Add permission checks in author app
|
Python
|
bsd-3-clause
|
stefantsov/blackbox3,stefantsov/blackbox3,stefantsov/blackbox3
|
69029424035c8506a46b94422fd347871ee51269
|
accelerator/tests/factories/program_family_factory.py
|
accelerator/tests/factories/program_family_factory.py
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from factory import (
DjangoModelFactory,
Sequence,
)
from accelerator.apps import AcceleratorConfig
ProgramFamily = swapper.load_model(AcceleratorConfig.name, 'ProgramFamily')
class ProgramFamilyFactory(DjangoModelFactory):
class Meta:
model = ProgramFamily
name = Sequence(lambda n: "Program Family {0}".format(n))
short_description = 'A program family for testing'
url_slug = Sequence(lambda n: "pf{0}".format(n))
email_domain = Sequence(lambda n: "pf{0}.accelerator.org".format(n))
phone_number = "617-555-1212"
physical_address = "Boston"
is_open_for_startups = True
is_open_for_experts = True
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from factory import (
DjangoModelFactory,
Sequence,
)
from accelerator.apps import AcceleratorConfig
ProgramFamily = swapper.load_model(AcceleratorConfig.name, 'ProgramFamily')
class ProgramFamilyFactory(DjangoModelFactory):
class Meta:
model = ProgramFamily
name = Sequence(lambda n: "Program Family {0}".format(n))
short_description = 'A program family for testing'
url_slug = Sequence(lambda n: "pf{0}".format(n))
email_domain = Sequence(lambda n: "pf{0}.accelerator.org".format(n))
phone_number = "617-555-1212"
physical_address = "Boston"
is_open_for_startups = True
is_open_for_experts = True
use_site_tree_side_nav = False
|
Add new field to factory
|
Add new field to factory
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
3860e0a9ac99c4c4422630e4133bf3b35927bf41
|
conf_site/sponsorship/tests/__init__.py
|
conf_site/sponsorship/tests/__init__.py
|
import factory
from symposion.schedule.tests.factories import ConferenceFactory
from symposion.sponsorship.models import SponsorLevel, Sponsor
class SponsorLevelFactory(factory.django.DjangoModelFactory):
conference = factory.SubFactory(ConferenceFactory)
name = factory.Faker("color_name")
order = factory.Faker("pyint")
cost = factory.Faker("pyint")
description = factory.Faker("paragraph")
class Meta:
model = SponsorLevel
class SponsorFactory(factory.django.DjangoModelFactory):
name = factory.Faker("company")
display_url = factory.Faker("uri")
external_url = factory.Faker("uri")
contact_name = factory.Faker("name")
contact_email = factory.Faker("company_email")
level = factory.SubFactory(SponsorLevelFactory)
class Meta:
model = Sponsor
|
import factory
from symposion.schedule.tests.factories import ConferenceFactory
from symposion.sponsorship.models import SponsorLevel, Sponsor
class SponsorLevelFactory(factory.django.DjangoModelFactory):
conference = factory.SubFactory(ConferenceFactory)
name = factory.Faker("color_name")
order = factory.Faker("pyint")
cost = factory.Faker("pyint")
description = factory.Faker("paragraph")
class Meta:
model = SponsorLevel
class SponsorFactory(factory.django.DjangoModelFactory):
name = factory.Faker("company")
display_url = factory.Faker("uri")
external_url = factory.Faker("uri")
contact_name = factory.Faker("name")
contact_email = factory.Faker("company_email")
level = factory.SubFactory(SponsorLevelFactory)
active = factory.Faker("boolean")
class Meta:
model = Sponsor
|
Allow SponsorFactory to create active sponsors.
|
Allow SponsorFactory to create active sponsors.
|
Python
|
mit
|
pydata/conf_site,pydata/conf_site,pydata/conf_site
|
58321c0471f10169ea5c0f705cdb35825036d77f
|
cat_leds.py
|
cat_leds.py
|
#!/usr/bin/env python
"""Display standard input on LEDs."""
import sys
import RPi.GPIO as GPIO
from seven_segment import print_leds
pins = [17, 23, 24, 22, 27, 25, 5]
GPIO.setmode(GPIO.BCM)
GPIO.setup(pins, GPIO.OUT)
pipe_contents = sys.stdin.read()
print_leds(pipe_contents, pins)
GPIO.cleanup()
|
#!/usr/bin/env python
"""Display standard input on LEDs."""
import sys
import RPi.GPIO as GPIO
from seven_segment import print_leds
pins = [6, 19, 5, 13, 20, 12, 16]
GPIO.setmode(GPIO.BCM)
GPIO.setup(pins, GPIO.OUT)
pipe_contents = sys.stdin.read()
print_leds(pipe_contents, pins, 1/2.0)
GPIO.cleanup()
|
Set up for my new 7 seg display.
|
Set up for my new 7 seg display.
|
Python
|
mit
|
zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie
|
caaf8e40c99f410b09580b81c4fa7a094395319c
|
test/test_progress_bar.py
|
test/test_progress_bar.py
|
import pytest
from downloads.download import _progress_bar
@pytest.mark.parametrize(
"current,block_size,total_size",
[
(
100,
32,
100 * 32,
),
(
75,
32,
100 * 32,
),
(
50,
32,
100 * 32,
),
(
25,
32,
100 * 32,
),
(
0,
32,
100 * 32,
),
],
)
def test_progress_bar(current, block_size, total_size):
bar = _progress_bar(
current=current, block_size=block_size, total_size=total_size
)
assert bar.count("#") == current
assert bar.split()[-1] == f"{current:.1f}%"
|
import pytest
from downloads.download import _progress_bar
@pytest.mark.parametrize(
"current,block_size,total_size",
[
(
100,
32,
100 * 32,
),
(
75,
32,
100 * 32,
),
(
50,
32,
100 * 32,
),
(
25,
32,
100 * 32,
),
(
0,
32,
100 * 32,
),
],
)
def test_progress_bar(current, block_size, total_size):
bar = _progress_bar(
current=current, block_size=block_size, total_size=total_size
)
assert bar.count("#") == current
assert bar.split()[-1] == f"{current:.1f}%"
assert len(bar) == 100 + 8
|
Test width of progress bar
|
Test width of progress bar
|
Python
|
mit
|
audy/downloads
|
f30d2f5b09ae3a62bff02165a173af6e25345a60
|
trevornet/nets/pynet/aimath.py
|
trevornet/nets/pynet/aimath.py
|
import math
import sys
def sigmoid(x):
try:
val = 1/(1 + math.exp(-x))
except OverflowError:
val = sys.float_info.max
return val
def sigmoidprime(x):
return (1 - sigmoid(x))*sigmoid(x)
|
import math
import sys
def sigmoid(x):
try:
val = 1/(1 + math.exp(-x))
except OverflowError:
val = 0.
return val
def sigmoidprime(x):
return (1 - sigmoid(x))*sigmoid(x)
|
Fix mistake in sigmoid function
|
Fix mistake in sigmoid function
|
Python
|
mit
|
tmerr/trevornet
|
4510a4a22965d002bd41293fd8fe629c8285800d
|
tests/test_errors.py
|
tests/test_errors.py
|
# coding: pyxl
import pytest
from pyxl.codec.register import pyxl_decode
from pyxl.codec.parser import ParseError
def test_malformed_if():
with pytest.raises(ParseError):
pyxl_decode(b"""
<frag>
<if cond="{true}">foo</if>
this is incorrect!
<else>bar</else>
</frag>""")
def test_multiple_else():
with pytest.raises(ParseError):
pyxl_decode(b"""
<frag>
<if cond="{true}">foo</if>
<else>bar</else>
<else>baz</else>
</frag>""")
def test_nested_else():
with pytest.raises(ParseError):
pyxl_decode(b"""
<frag>
<if cond="{true}">foo</if>
<else><else>bar</else></else>
</frag>""")
|
# coding: pyxl
import pytest
from pyxl.codec.register import pyxl_decode
from pyxl.codec.parser import ParseError
from pyxl.codec.html_tokenizer import BadCharError
def test_malformed_if():
with pytest.raises(ParseError):
pyxl_decode(b"""
<frag>
<if cond="{true}">foo</if>
this is incorrect!
<else>bar</else>
</frag>""")
def test_multiple_else():
with pytest.raises(ParseError):
pyxl_decode(b"""
<frag>
<if cond="{true}">foo</if>
<else>bar</else>
<else>baz</else>
</frag>""")
def test_nested_else():
with pytest.raises(ParseError):
pyxl_decode(b"""
<frag>
<if cond="{true}">foo</if>
<else><else>bar</else></else>
</frag>""")
def test_bad_char():
with pytest.raises(BadCharError):
pyxl_decode(b"""<_bad_element></lm>""")
|
Add test for BadCharError exception.
|
Add test for BadCharError exception.
|
Python
|
apache-2.0
|
pyxl4/pyxl4
|
25e4730c4614a26cdecd60eb0846e69578353d2c
|
tomcrypt/__init__.py
|
tomcrypt/__init__.py
|
import os
import ctypes
# We need to manually load the _core SO the first time so that we can specify
# that it use the RTLD_GLOBAL flag. Otherwise (when not on a Mac) the runtime
# linker will not be able to resolve undefined symbols in the other modules.
_core_handle = ctypes.CDLL(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '_core.so'),
ctypes.RTLD_GLOBAL
)
class Error(ValueError):
def __init__(self, *args, **kwargs):
self.code = kwargs.get('code', -1)
ValueError.__init__(self, *args)
class LibError(Error):
pass
|
import ctypes
class Error(ValueError):
def __init__(self, *args, **kwargs):
self.code = kwargs.get('code', -1)
ValueError.__init__(self, *args)
class LibError(Error):
pass
# We need to manually load the _core the first time so that we can specify
# that it use the RTLD_GLOBAL flag. Otherwise (when not on a Mac) the runtime
# linker will not be able to resolve undefined symbols in the other modules.
# This must also be done after the above exceptions are defined so that they
# are availible to the core.
from . import _core
ctypes.PyDLL(_core.__file__, mode=ctypes.RTLD_GLOBAL)
|
Use import to locate _core
|
Linking: Use import to locate _core
Related to #8
|
Python
|
bsd-3-clause
|
mikeboers/PyTomCrypt,mikeboers/PyTomCrypt,mikeboers/PyTomCrypt
|
b4ce1a023bf047524f40ac63f40d46a70c8f6f77
|
src/dirtyfields/dirtyfields.py
|
src/dirtyfields/dirtyfields.py
|
# Adapted from http://stackoverflow.com/questions/110803/dirty-fields-in-django
from django.db.models.signals import post_save
class DirtyFieldsMixin(object):
def __init__(self, *args, **kwargs):
super(DirtyFieldsMixin, self).__init__(*args, **kwargs)
post_save.connect(reset_state, sender=self.__class__,
dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__)
reset_state(sender=self.__class__, instance=self)
def _as_dict(self):
return dict([(f.name, getattr(self, f.name)) for f in self._meta.local_fields if not f.rel])
def get_dirty_fields(self):
new_state = self._as_dict()
return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]])
def is_dirty(self):
# in order to be dirty we need to have been saved at least once, so we
# check for a primary key and we need our dirty fields to not be empty
if not self.pk:
return True
return {} != self.get_dirty_fields()
def reset_state(sender, instance, **kwargs):
instance._original_state = instance._as_dict()
|
# Adapted from http://stackoverflow.com/questions/110803/dirty-fields-in-django
from django.db.models.signals import post_save
class DirtyFieldsMixin(object):
def __init__(self, *args, **kwargs):
super(DirtyFieldsMixin, self).__init__(*args, **kwargs)
post_save.connect(reset_state, sender=self.__class__,
dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__)
reset_state(sender=self.__class__, instance=self)
def _as_dict(self):
return dict([(f.name, getattr(self, f.name)) for f in self._meta.local_fields])
def get_dirty_fields(self):
new_state = self._as_dict()
return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]])
def is_dirty(self):
# in order to be dirty we need to have been saved at least once, so we
# check for a primary key and we need our dirty fields to not be empty
if not self.pk:
return True
return {} != self.get_dirty_fields()
def reset_state(sender, instance, **kwargs):
instance._original_state = instance._as_dict()
|
Check primary keys for foreign key and one-to-one fields
|
Check primary keys for foreign key and one-to-one fields
|
Python
|
bsd-3-clause
|
stanhu/django-dirtyfields
|
67ea33e9e46ff321c2915d46a354d66756ff9c2b
|
main.py
|
main.py
|
#!/usr/bin/env python
"""
This is the main file for a script that reads info off a game on kongregate.com and acts upon it.
"""
# import line/s for builtin modules
# import pyautogui
__author__ = "Alex Flores Escarcega"
__copyright__ = "Copyright 2007, Alex Flores Escarcega"
__credits__ = ["Alex Flores Escarcega"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "Alex Flores Escarcega"
__email__ = "alex.floresescar@gmail.com"
__status__ = "Development"
# game located at http://www.kongregate.com/games/Volch/endless-expansion?haref=HP_TGTM_endless-expansion
def main():
"""
Just now runs main()
inputs: none
outputs: none
"""
pass
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
"""
This is the main file for a script that reads info off a game on kongregate.com and acts upon it.
"""
# import line/s for builtin modules
# import pyautogui
__author__ = "Alex Flores Escarcega"
__copyright__ = "Copyright 2007, Alex Flores Escarcega"
__credits__ = ["Alex Flores Escarcega"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "Alex Flores Escarcega"
__email__ = "alex.floresescar@gmail.com"
__status__ = "Development"
# game located at http://www.kongregate.com/games/Volch/endless-expansion?haref=HP_TGTM_endless-expansion
def find_game_region():
"""
Finds the top left coordinates of the game by substracting (700 + 300) from the location of the game.
The 300 comes from the width of the top_right_corner.png file that is used to locate the top right corner.
Input: None.
Output: the top left coordinates. Two elements in a tuple.
coors = pyautogui.locateOnScreen("images/top_right_corner.png")
return (coors[0],coors[1])
def main():
"""
Just now runs main()
inputs: none
outputs: none
"""
print find_game_region()
if __name__ == "__main__":
main()
|
Add method to find the coordinates of game
|
Add method to find the coordinates of game
|
Python
|
mit
|
hydrophilicsun/Automating-Endless-Expansion
|
8b6daefb98c20ddb288604f7aa2202c21df2c526
|
main.py
|
main.py
|
__author__ = 'alex'
print 45
print 54
|
_author__ = 'alex'
import sys
import xml.dom.minidom as dom
def get_Res_Matrix(length,nodes,nets_d,elem_type):
Res = [[[] for j in range(length)] for i in range(length)]
for i in range(nodes.length):
if nodes[i].nodeType != elem_type: continue
name = nodes[i].nodeName
if name == "diode":
net_from, net_to = nets_d[(int)(nodes[i].getAttribute("net_from"))], nets_d[(int)(nodes[i].getAttribute("net_to"))]
res, rev_res = (float)(nodes[i].getAttribute("resistance")), (float)(nodes[i].getAttribute("reverse_resistance"))
Res[net_from][net_to].append(res)
Res[net_to][net_from].append(rev_res)
else:
if name == "capactor" or name == "resistor":
net_from, net_to = nets_d[(int)(nodes[i].getAttribute("net_from"))], nets_d[(int)(nodes[i].getAttribute("net_to"))]
res = (float)(nodes[i].getAttribute("resistance"))
Res[net_from][net_to].append(res)
Res[net_to][net_from].append(res)
return Res
def parse_xml():
elem_type = dom.Element.ELEMENT_NODE
doc = dom.parse(sys.argv[1])
#parse xml
for node in doc.childNodes:
if node.nodeName == "schematics": break
nodes = node.childNodes
nets_d = {}
for i in range(nodes.length):
if nodes[i].nodeType != elem_type: continue
if nodes[i].nodeName != "net": continue
nets_d[(int)(nodes[i].getAttribute("id"))] = 0
length = 0
for x in sorted(nets_d):
nets_d[x] = length
length += 1
return nodes,nets_d,elem_type,length
if __name__ == "__main__":
if len(sys.argv) <> 3:
print("check the arguments")
exit()
nodes,nets_d,elem_type,length = parse_xml()
Res = get_Res_Matrix(length,nodes,nets_d,elem_type)
print Res
|
Read and parse xml to Matrix
|
Read and parse xml to Matrix
|
Python
|
mit
|
BaydinAlexey/proglangs_baydin,BaydinAlexey/proglangs_baydin
|
fa2fb3387912474eff2b6c2a14d6304fcf5cd1f8
|
erasmus/cogs/bible/testing_server_preferences_group.py
|
erasmus/cogs/bible/testing_server_preferences_group.py
|
from __future__ import annotations
from typing import TYPE_CHECKING
from botus_receptus.app_commands import test_guilds_only
from discord import app_commands
from .daily_bread.daily_bread_preferences_group import DailyBreadPreferencesGroup
if TYPE_CHECKING:
from ...erasmus import Erasmus
from ...l10n import GroupLocalizer
from .types import ParentCog
@app_commands.default_permissions(administrator=True)
@app_commands.guild_only()
@test_guilds_only
class TestingServerPreferencesGroup(
app_commands.Group, name='test-server-prefs', description='Testing group'
):
bot: Erasmus
localizer: GroupLocalizer
daily_bread = DailyBreadPreferencesGroup()
def initialize_from_parent(self, parent: ParentCog, /) -> None:
self.bot = parent.bot
self.localizer = parent.localizer.for_group(self)
self.daily_bread.initialize_from_parent(self)
|
from __future__ import annotations
from typing import TYPE_CHECKING
from botus_receptus.app_commands import test_guilds_only
from discord import app_commands
from .daily_bread.daily_bread_preferences_group import DailyBreadPreferencesGroup
if TYPE_CHECKING:
from ...erasmus import Erasmus
from ...l10n import GroupLocalizer
from .types import ParentCog
@app_commands.default_permissions(administrator=True)
@app_commands.guild_only()
@test_guilds_only
class TestingServerPreferencesGroup(
app_commands.Group, name='test-server-prefs', description='Testing group'
):
bot: Erasmus
localizer: GroupLocalizer
daily_bread = DailyBreadPreferencesGroup()
def initialize_from_parent(self, parent: ParentCog, /) -> None:
self.bot = parent.bot
self.localizer = parent.localizer.for_group('serverprefs')
self.daily_bread.initialize_from_parent(self)
|
Use serverprefs localizer for TestingServerPreferencesGroup
|
Use serverprefs localizer for TestingServerPreferencesGroup
|
Python
|
bsd-3-clause
|
bryanforbes/Erasmus
|
d044576e08e06dd6a2c68ab7868c281cd2979764
|
wsgi.py
|
wsgi.py
|
from app import create_app
from credstash import getAllSecrets
import os
default_env_file = '/home/ubuntu/environment'
environment = 'live'
if os.path.isfile(default_env_file):
with open(default_env_file, 'r') as environment_file:
environment = environment_file.readline().strip()
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
from config import configs
os.environ['NOTIFY_ADMIN_ENVIRONMENT'] = configs[environment]
application = create_app()
if __name__ == "__main__":
application.run()
|
from credstash import getAllSecrets
import os
default_env_file = '/home/ubuntu/environment'
environment = 'live'
if os.path.isfile(default_env_file):
with open(default_env_file, 'r') as environment_file:
environment = environment_file.readline().strip()
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
from config import configs
os.environ['NOTIFY_ADMIN_ENVIRONMENT'] = configs[environment]
from app import create_app
application = create_app()
if __name__ == "__main__":
application.run()
|
Order of imports means the config doesn't get set up properly
|
Order of imports means the config doesn't get set up properly
|
Python
|
mit
|
gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin
|
f3ec0593bb67db25c4f5af4b3b00d82d5e4e0f04
|
csv2ofx/mappings/gls.py
|
csv2ofx/mappings/gls.py
|
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter('Auftraggeber/Empfänger'),
}
|
from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter('Auftraggeber/Empfänger'),
}
|
Add import for other python versions
|
Add import for other python versions
|
Python
|
mit
|
reubano/csv2ofx,reubano/csv2ofx
|
3e52078450a4205fdfaa2d4ba2448bce3d3d19d7
|
gpio_components/input_devices.py
|
gpio_components/input_devices.py
|
from RPi import GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
class InputDevice(object):
def __init__(self, pin):
self.pin = pin
GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP)
def is_pressed(self):
return GPIO.input(self.pin) == 0
class Button(InputDevice):
pass
|
from RPi import GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
class InputDevice(object):
def __init__(self, pin=None):
if pin is None:
raise InputDeviceError('No GPIO pin number given')
self.pin = pin
self.pull = GPIO.PUD_UP
self.edge = GPIO.FALLING
self.active = 0
self.inactive = 1
GPIO.setup(pin, GPIO.IN, self.pull)
def is_active(self):
return GPIO.input(self.pin) == self.active
def wait_for_input(self):
GPIO.wait_for_edge(self.pin, self.edge)
def add_callback(self, callback=None, bouncetime=1000):
if callback is None:
raise InputDeviceError('No callback function given')
GPIO.add_event_detect(self.pin, self.edge, callback, bouncetime)
def remove_callback(self):
GPIO.remove_event_detect(self.pin)
class Button(InputDevice):
pass
class InputDeviceError(Exception):
pass
|
Add wait_for_input and add_callback methods to InputDevice
|
Add wait_for_input and add_callback methods to InputDevice
|
Python
|
bsd-3-clause
|
lurch/python-gpiozero,waveform80/gpio-zero,agiledata/python-gpiozero,Gadgetoid/python-gpiozero,cymplecy/python-gpioone,RPi-Distro/python-gpiozero,MrHarcombe/python-gpiozero
|
0a9f378784e8c30cdf16d4d1caaf3b98f112bb90
|
nap/meta.py
|
nap/meta.py
|
from __future__ import unicode_literals
class Meta(object):
'''Generic container for Meta classes'''
def __new__(cls, meta=None):
# Return a new class base on ourselves
attrs = dict(
(name, getattr(meta, name))
for name in dir(meta)
if not name[0] == '_'
)
return object.__new__(type(str('Meta'), (cls,), attrs))
|
from __future__ import unicode_literals
class Meta(object):
'''Generic container for Meta classes'''
def __new__(cls, meta=None):
# Return a new class base on ourselves
attrs = dict(
(name, getattr(meta, name))
for name in dir(meta)
if not name[0] == '_' and hasattr(cls, name)
)
return object.__new__(type(str('Meta'), (cls,), attrs))
|
Make Meta class enforce only known properties
|
Make Meta class enforce only known properties
|
Python
|
bsd-3-clause
|
MarkusH/django-nap,limbera/django-nap
|
ea5bfe240cc349144e089f606534726863f2c21b
|
media/sites/lapinkansa.py
|
media/sites/lapinkansa.py
|
import requests
from bs4 import BeautifulSoup
def nouda( url , out ):
r = requests.get( url )
r.encoding = 'UTF-8'
soup = BeautifulSoup( r.text )
teksti = soup.find_all( class_ ='post-meta' )
for string in teksti[0].stripped_strings:
out.write( string.encode('utf8') + ' ' )
if __name__ == '__main__':
nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
|
import requests
from bs4 import BeautifulSoup
def nouda( url , out ):
r = requests.get( url )
r.encoding = 'UTF-8'
soup = BeautifulSoup( r.text )
teksti = soup.find_all( class_ = 'news-excerpt' )
for string in teksti[0].stripped_strings:
out.write( string.encode('utf8') + ' ' )
if __name__ == '__main__':
nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
|
Fix to new page layout
|
Fix to new page layout
|
Python
|
mit
|
HIIT/digivaalit-2015,HIIT/digivaalit-2015,HIIT/digivaalit-2015
|
b21d63d8df7d17e150702c531ef449f409100eff
|
wot_clan_battles/views_auth.py
|
wot_clan_battles/views_auth.py
|
import six
from django.http import HttpResponseRedirect
from django.shortcuts import reverse
from django.conf import settings
from openid.consumer import consumer
import wargaming
wot = wargaming.WoT(settings.WARGAMING_KEY, language='ru', region='ru')
def auth_callback(request):
oidconsumer = consumer.Consumer(request.session, None)
url = 'http://%s%s' % (request.META['HTTP_HOST'], reverse('auth_callback'))
result = oidconsumer.complete(request.GET, url)
if result.status == consumer.SUCCESS:
identifier = result.getDisplayIdentifier()
print identifier
user_id, username = six.moves.urllib_parse.urlparse(identifier).path.split('/')[2].split('-')
request.session['user_id'] = user_id
request.session['username'] = username
request.session['user_clan_id'] = wot.account.info(account_id=user_id)[str(user_id)]['clan_id']
return HttpResponseRedirect('/')
def auth_login(request):
oidconsumer = consumer.Consumer(dict(request.session), None)
openid_request = oidconsumer.begin(u'http://ru.wargaming.net/id/openid/')
trust_root = 'http://%s' % request.META['HTTP_HOST']
return_to = '%s%s' % (trust_root, reverse('auth_callback'))
redirect_to = openid_request.redirectURL(trust_root, return_to, immediate=False)
return HttpResponseRedirect(redirect_to)
|
import six
from django.http import HttpResponseRedirect
from django.shortcuts import reverse
from django.conf import settings
from openid.consumer import consumer
import wargaming
wot = wargaming.WoT(settings.WARGAMING_KEY, language='ru', region='ru')
def auth_callback(request):
oidconsumer = consumer.Consumer(request.session, None)
url = 'http://%s%s' % (request.META['HTTP_HOST'], reverse('auth_callback'))
result = oidconsumer.complete(request.GET, url)
if result.status == consumer.SUCCESS:
identifier = result.getDisplayIdentifier()
user_id, username = six.moves.urllib_parse.urlparse(identifier).path.split('/')[2].split('-')
request.session['user_id'] = user_id
request.session['username'] = username
request.session['user_clan_id'] = wot.account.info(account_id=user_id)[str(user_id)]['clan_id']
return HttpResponseRedirect('/')
def auth_login(request):
oidconsumer = consumer.Consumer(dict(request.session), None)
openid_request = oidconsumer.begin(u'http://ru.wargaming.net/id/openid/')
trust_root = 'http://%s' % request.META['HTTP_HOST']
return_to = '%s%s' % (trust_root, reverse('auth_callback'))
redirect_to = openid_request.redirectURL(trust_root, return_to, immediate=False)
return HttpResponseRedirect(redirect_to)
|
Remove debug print from view
|
Remove debug print from view
|
Python
|
mit
|
monester/wot-battles,monester/wot-battles,monester/wot-battles,monester/wot-battles
|
69a417c421d774c4998ae721b85211a60757ce85
|
fit_blackbody.py
|
fit_blackbody.py
|
import numpy as np
from scipy.optimize import curve_fit
from astropy import units as u
from planck import planck_function
def bb_flux(wavelength, temperature, angular_radius):
bb_flux = (np.pi) * planck_function(wavelength, temperature) * (angular_radius)**2
return bb_flux
def bb_flux_nounits(wavelength, temperature, angular_radius):
flux = bb_flux(wavelength, temperature, angular_radius)
return flux.value
def calculate_chisq(y_data, y_data_uncertainties, x_data, func, parameters):
chisq = np.sum(((y_data - func(x_data, *parameters))/y_data_uncertainties)**2)
return chisq
def bb_fit_parameters(wavelengths, fluxes, flux_uncertainties):
popt, pcov = curve_fit(bb_flux_nounits, wavelengths, fluxes, p0=[5000, 1.0e-10])
temperature = popt[0]
angular_radius = popt[1]
perr = np.sqrt(np.diag(pcov))
chisq = calculate_chisq(fluxes, flux_uncertainties, wavelengths, bb_flux_nounits, popt)
return temperature, angular_radius, perr
|
import numpy as np
from scipy.optimize import curve_fit
from astropy import units as u
from planck import planck_function, dplanck_dT
def bb_flux(wavelength, temperature, angular_radius):
bb_flux = (np.pi) * planck_function(wavelength, temperature) * (angular_radius)**2
return bb_flux
def bb_flux_nounits(wavelength, temperature, angular_radius):
flux = bb_flux(wavelength, temperature, angular_radius)
return flux.value
def dBB_dT(wavelength, temperature, angular_radius):
dBB_dT = (np.pi) * dplanck_dT(wavelength, temperature) * (angular_radius)**2
return dBB_dT
def dBB_dT_nounits(wavelength, temperature, angular_radius):
dBB_dT_nounits = dBB_dT(wavelength, temperature, angular_radius)
return dBB_dT_nounits
def calculate_chisq(y_data, y_data_uncertainties, x_data, func, parameters):
chisq = np.sum(((y_data - func(x_data, *parameters))/y_data_uncertainties)**2)
return chisq
def bb_fit_parameters(wavelengths, fluxes, flux_uncertainties):
popt, pcov = curve_fit(bb_flux_nounits, wavelengths, fluxes, p0=[5000, 1.0e-10])
temperature = popt[0]
angular_radius = popt[1]
perr = np.sqrt(np.diag(pcov))
return temperature, angular_radius, perr
|
Add derivative of BB flux with temperature
|
Add derivative of BB flux with temperature
Added functions which calculate the derivative of the blackbody flux
with temperature (with units and without units)
|
Python
|
mit
|
JALusk/SNoBoL,JALusk/SuperBoL,JALusk/SNoBoL
|
fcc4eb6feaf05c950bcb0cb3f5861e631dacd8d4
|
migrations/versions/16ef0d8ffae1_add_user_roles.py
|
migrations/versions/16ef0d8ffae1_add_user_roles.py
|
"""Add user roles
Revision ID: 16ef0d8ffae1
Revises: 8acb1453abb
Create Date: 2015-03-17 15:45:05.406297
"""
# revision identifiers, used by Alembic.
revision = '16ef0d8ffae1'
down_revision = '8acb1453abb'
from alembic import op
import sqlalchemy as sa
from findaconf import db
from findaconf.models import Group
def upgrade():
roles = ['user', 'moderator', 'admin']
[db.session.add(Group(title=role)) for role in roles]
db.session.commit()
def downgrade():
[db.session.delete(role) for role in Group.query.all()]
db.session.commit()
|
"""Add user roles
Revision ID: 16ef0d8ffae1
Revises: 8acb1453abb
Create Date: 2015-03-17 15:45:05.406297
"""
# revision identifiers, used by Alembic.
revision = '16ef0d8ffae1'
down_revision = '8acb1453abb'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
from findaconf.models import Group
def upgrade():
roles = ['user', 'moderator', 'admin']
data = [{'title': r} for r in roles]
# Create an ad-hoc table to use for the insert statement.
group_table = table('group',
column('title', sa.String),
)
# Insert data.
op.bulk_insert(group_table, data)
def downgrade():
op.execute(Group.__table__.delete())
|
Refactor data migration for Group model
|
Refactor data migration for Group model
Undelying explanation as in commit f580a63909166efccc8030eb36d8b2fe697f7236
|
Python
|
mit
|
koorukuroo/findaconf,cuducos/findaconf,koorukuroo/findaconf,cuducos/findaconf,cuducos/findaconf,koorukuroo/findaconf
|
eddf3ec729fd385fd3ec2ec425db1d777a302e46
|
tensorprob/distributions/exponential.py
|
tensorprob/distributions/exponential.py
|
import tensorflow as tf
from .. import config
from ..distribution import Distribution
@Distribution
def Exponential(lambda_, name=None):
X = tf.placeholder(config.dtype, name=name)
Distribution.logp = tf.log(lambda_) - lambda_*X
def cdf(lim):
return tf.constant(1, dtype=config.dtype) - tf.exp(-lambda_*lim)
Distribution.integral = lambda lower, upper: cdf(upper) - cdf(lower)
return X
|
import tensorflow as tf
from .. import config
from ..distribution import Distribution
@Distribution
def Exponential(lambda_, name=None):
X = tf.placeholder(config.dtype, name=name)
Distribution.logp = tf.log(lambda_) - lambda_*X
def integral(lower, upper):
return tf.exp(-lambda_*lower) - tf.exp(-lambda_*upper)
Distribution.integral = integral
return X
|
Correct integral used for Exponential distributon
|
Correct integral used for Exponential distributon
|
Python
|
mit
|
ibab/tensorprob,tensorprob/tensorprob,ibab/tensorfit
|
56902792b2a7fdd25bd64781e9e98a63db2ee348
|
all/__init__.py
|
all/__init__.py
|
###----------------------------------------------------------------------------
from .help import HyperHelpCommand, HyperHelpNavigateCommand
from .help import HyperHelpListener
###----------------------------------------------------------------------------
|
###----------------------------------------------------------------------------
__version_tuple = (1, 0, 0)
__version__ = ".".join([str(num) for num in __version_tuple])
# These are exposed to Sublime to implement the core of the help system.
from .help import HyperHelpCommand, HyperHelpNavigateCommand
from .help import HyperHelpListener
# These are exposed to packages that may want to interface with the hyperhelp
# core for use in their own packages.
from .operations import package_help_scan
###----------------------------------------------------------------------------
def version():
"""
Get the currently installed version of hyperhelp as a tuple.
"""
return __version_tuple
###----------------------------------------------------------------------------
|
Include a package version number
|
Include a package version number
This includes in the core package the concept of a version number that
underlying code could use to determine what version of the core it is
interfacing with.
This is only really needed for packages that get at the underlying
core code in hyperhelp, which at the moment would only be the
companion HyperHelpAuthor package.
To this end (as an experiment) the code for loading in the help index
files is exposed to anyone that wants to import it as a test for how
this will eventually work. In particular, the idea is to put all of
the symbols meant to be accessible to outside code into the
hyperhelp.all module namespace (whicn is unfortunate but there seems
to be no satisfactory way around it).
|
Python
|
mit
|
OdatNurd/hyperhelp
|
28d48dd681dd20d839aa0748ee478947a0eb3da0
|
hiss/__init__.py
|
hiss/__init__.py
|
import pygame
class Sprite():
def __init__(self, name):
self.name = name
self.costumes = []
def addCostume(self, costumePath):
costume = pygame.image.load(costumePath)
self.costumes.append(costume)
|
import pygame, time, random
class Stage():
def __init__(self, name):
self.name = name
self.costumes = []
def addCostume(self, costumePath):
costume = pygame.image.load(costumePath)
self.costumes.append(costume)
class Sprite(Stage):
def __init__(self, name):
Stage.__init__(self, name)
self.xpos = 0
self.ypos = 0
def beginGame():
pygame.init()
screen = pygame.display.set_mode((800, 600)) # Add customizable dimensions later on?
caption = pygame.display.set_caption("Hiss Project")
|
Split Sprite and Stage, begin code
|
Split Sprite and Stage, begin code
The way I see it, sprites should be an extension of the stage. After all, the stage is like one big sprite that can't move.
Also, begin code has been added.
|
Python
|
mit
|
PySlither/Slither,PySlither/Slither
|
eef8c0c99a6a02602cc9da75eadf180e65ad55b0
|
collectd_haproxy/__init__.py
|
collectd_haproxy/__init__.py
|
version_info = (1, 0, 1)
__version__ = ".".join(map(str, version_info))
try:
import collectd
collectd_present = True
except ImportError:
collectd_present = False
from .plugin import HAProxyPlugin
if collectd_present:
HAProxyPlugin.register(collectd)
|
try:
import collectd
collectd_present = True
except ImportError:
collectd_present = False
from .plugin import HAProxyPlugin
version_info = (1, 0, 1)
__version__ = ".".join(map(str, version_info))
if collectd_present:
HAProxyPlugin.register(collectd)
|
Fix style test complaing about non-top import.
|
Fix style test complaing about non-top import.
|
Python
|
mit
|
wglass/collectd-haproxy
|
342515edc89d6666a5dc9064de7d2ceea9a7b468
|
accelerator/tests/test_program_cycle.py
|
accelerator/tests/test_program_cycle.py
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
from django.test import TestCase
from accelerator.tests.factories import ProgramCycleFactory
class TestProgramCycle(TestCase):
def test_display_name_no_short_name(self):
cycle = ProgramCycleFactory(short_name=None)
assert cycle.name in str(cycle)
def test_program_cycle_with_open_applications_has_default_application_type(self):
cycle = ProgramCycleFactory()
if (cycle.applications_open and
not cycle.default_application_type):
self.assertRaises("Open applications must have a default application type.")
def test_program_cycle_with_open_applications_has_default_application_type_and_associated_programs(self):
cycle = ProgramCycleFactory()
if (cycle.applications_open and
not cycle.default_application_type
and cycle.programs.exists()):
self.assertRaises("Default application type can’t be removed"
"from the cycle until the program cycle is"
"disassociated with all programs")
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
from django.test import TestCase
from accelerator.tests.factories import ProgramCycleFactory
class TestProgramCycle(TestCase):
def test_display_name_no_short_name(self):
cycle = ProgramCycleFactory(short_name=None)
assert cycle.name in str(cycle)
# def test_program_cycle_has_default_application_type(self):
# cycle = ProgramCycleFactory()
# if (cycle.applications_open and
# not cycle.default_application_type):
# self.assertRaises("Open applications must have"
# "a default application type.")
# def test_program_cycle_cannot_remove_default_application_type(self):
# cycle = ProgramCycleFactory()
# if (cycle.applications_open and
# not cycle.default_application_type
# and cycle.programs.exists()):
# self.assertRaises("Default application type can’t be removed"
# "from the cycle until the program cycle is"
# "disassociated with all programs")
|
Add tests for the functionality added
|
[AC-7049] Add tests for the functionality added
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
8e10801ab28b3db02b301c27966aeaabc154329b
|
opps/core/models/image.py
|
opps/core/models/image.py
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models.publishable import Publishable
class Image(Publishable):
title = models.CharField(_(u"Title"), max_length=140)
slug = models.SlugField(_(u"Slug"), max_length=150, blank=True)
image = models.ImageField(upload_to="uploads/")
description = models.CharField(_(u"Description"), max_length=255,
null=True, blank=True)
credit = models.CharField(_(u"Credit"), max_length=255, blank=False)
def __unicode__(self):
return self.title
class Meta:
app_label = 'core'
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models.publishable import Publishable
from opps.core.models import Source
class Image(Publishable):
title = models.CharField(_(u"Title"), max_length=140)
slug = models.SlugField(_(u"Slug"), max_length=150, blank=True)
image = models.ImageField(upload_to="uploads/")
description = models.CharField(_(u"Description"), max_length=255,
null=True, blank=True)
source = models.ForeignKey(Source, null=True, blank=True)
def __unicode__(self):
return self.title
class Meta:
app_label = 'core'
|
Change credit to source models
|
Change credit to source models
|
Python
|
mit
|
opps/opps,williamroot/opps,jeanmask/opps,williamroot/opps,opps/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,opps/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,opps/opps
|
6dd1881fc2631602d7e34aede208abf42ed688aa
|
renderMenu.py
|
renderMenu.py
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
from datetime import datetime
from pytz import timezone
from flask import Flask, render_template, url_for
from models import app, db, FoodMenu, FoodServices
MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN')
@app.route('/')
def renderMenu():
nowWaterloo = datetime.now(timezone('America/Toronto'))
foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result
menu = json.loads(foodMenu)['response']['data']
serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result
locations = json.loads(serviceInfo)['response']['data']
return render_template('index.html', menu=menu, locations=locations, nowWaterloo=nowWaterloo, mixpanelToken=MIXPANEL_TOKEN)
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
from datetime import datetime
from pytz import timezone
from flask import Flask, render_template, url_for, jsonify
from models import app, db, FoodMenu, FoodServices
MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN')
@app.route('/')
def renderMenu():
nowWaterloo = datetime.now(timezone('America/Toronto'))
foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result
menu = json.loads(foodMenu)['response']['data']
serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result
locations = json.loads(serviceInfo)['response']['data']
return render_template('index.html', menu=menu, locations=locations, nowWaterloo=nowWaterloo, mixpanelToken=MIXPANEL_TOKEN)
@app.route('/foodmenu')
def foodmenu():
foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result
menu = json.loads(foodMenu)['response']['data']
return jsonify(menu)
@app.route('/foodservices')
def foodservices():
serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result
locations = json.loads(serviceInfo)['response']['data']
return jsonify(locations)
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
Add API endpoints to serve data in JSON format.
|
Add API endpoints to serve data in JSON format.
|
Python
|
mit
|
alykhank/FoodMenu,alykhank/FoodMenu,alykhank/FoodMenu
|
3b79447e1027cc4965ab3272c34740b82d79c66c
|
tools/perf/benchmarks/start_with_url.py
|
tools/perf/benchmarks/start_with_url.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import startup
import page_sets
from telemetry import benchmark
@benchmark.Disabled
class StartWithUrlCold(benchmark.Benchmark):
"""Measure time to start Chrome cold with startup URLs"""
tag = 'cold'
test = startup.StartWithUrl
page_set = page_sets.StartupPagesPageSet
options = {'cold': True,
'pageset_repeat': 5}
@benchmark.Enabled('android', 'has tabs')
class StartWithUrlWarm(benchmark.Benchmark):
"""Measure time to start Chrome warm with startup URLs"""
tag = 'warm'
test = startup.StartWithUrl
page_set = page_sets.StartupPagesPageSet
options = {'warm': True,
'pageset_repeat': 10}
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import startup
import page_sets
from telemetry import benchmark
@benchmark.Enabled('android', 'has tabs')
class StartWithUrlCold(benchmark.Benchmark):
"""Measure time to start Chrome cold with startup URLs"""
tag = 'cold'
test = startup.StartWithUrl
page_set = page_sets.StartupPagesPageSet
options = {'cold': True,
'pageset_repeat': 5}
@benchmark.Enabled('android', 'has tabs')
class StartWithUrlWarm(benchmark.Benchmark):
"""Measure time to start Chrome warm with startup URLs"""
tag = 'warm'
test = startup.StartWithUrl
page_set = page_sets.StartupPagesPageSet
options = {'warm': True,
'pageset_repeat': 10}
|
Enable statup_with_url.cold benchmark on android.
|
Enable statup_with_url.cold benchmark on android.
The benchmark works locally, and collects an important datapoint for our
current optimization work.
Review URL: https://codereview.chromium.org/508303004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#298526}
|
Python
|
bsd-3-clause
|
axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,jaruba/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,M4sse/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,Chilledheart/chromium,axinging/chromium-crosswalk,dednal/chromium.src,dushu1203/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,jaruba/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,Chilledheart/chromium,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium,dednal/chromium.src,Just-D/chromium-1,Chilledheart/chromium,Jonekee/chromium.src,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,markYoungH/chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,Jonekee/chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,ltilve/chromium,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,dednal/chromium.src,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,ltilve/chromium,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ltilve/chromium,ltilve/chromium,Just-D/chromium-1,dednal/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,Jonekee/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,jaruba/chromium.src,krieger-od/nwjs_chromium.src,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,Just-D/chromium-1,Chilledheart/chromium,jaruba/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,Just-D/chromium-1,M4sse/chromium.src,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk
|
f177e6acbafa514f6e6ac4563cb92c49f3213618
|
data_structures/Stack/Python/Stack.py
|
data_structures/Stack/Python/Stack.py
|
# Author: AlexBanks97
# Purpose: LIFO Stack implementation using python array.
# Date: October 15th 2017
class Stack(object):
def __init__(self):
# Initialize stack as empty array
self.stack = []
|
# Author: AlexBanks97
# Purpose: LIFO Stack implementation using python array.
# Date: October 15th 2017
class Stack(object):
def __init__(self):
# Initialize stack as empty array
self.stack = []
# Return and remove the last element of the stack array.
def pop(self):
# If the stack is not empty, pop.
if self.stack.length > 0:
return self.stack.pop()
|
Add pop method and implementation
|
Add pop method and implementation
|
Python
|
cc0-1.0
|
ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,EUNIX-TRIX/al-go-rithms,Deepak345/al-go-rithms,Cnidarias/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,EUNIX-TRIX/al-go-rithms,manikTharaka/al-go-rithms,manikTharaka/al-go-rithms,Cnidarias/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,Deepak345/al-go-rithms,Deepak345/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,EUNIX-TRIX/al-go-rithms,Deepak345/al-go-rithms,Cnidarias/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,Deepak345/al-go-rithms,manikTharaka/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms,EUNIX-TRIX/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,Deepak345/al-go-rithms,EUNIX-TRIX/al-go-rithms,Deepak345/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms,EUNIX-TRIX/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,EUNIX-TRIX/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms
|
e0ce0095fd488852a7d565ecaf49eba0b8dbd7d5
|
db/sql_server/pyodbc.py
|
db/sql_server/pyodbc.py
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
add_column_string = 'ALTER TABLE %s ADD %s;'
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
Add column support for sql server
|
Add column support for sql server
--HG--
extra : convert_revision : svn%3A69d324d9-c39d-4fdc-8679-7745eae9e2c8/trunk%40111
|
Python
|
apache-2.0
|
theatlantic/django-south,theatlantic/django-south
|
98405875fd8ec682caa04244a900e6ce9eac9acb
|
pavement.py
|
pavement.py
|
import sys
from paver.easy import task, needs, path, sh, cmdopts, options
from paver.setuputils import setup, install_distutils_tasks
from distutils.extension import Extension
from distutils.dep_util import newer
sys.path.insert(0, path('.').abspath())
import version
setup(name='microdrop-plugin-manager',
version=version.getVersion(),
description='Microdrop plugin manager.',
keywords='',
author='Christian Fobel',
author_email='christian@fobel.net',
url='https://github.com/wheeler-microfluidics/mpm',
license='LGPLv2.1',
packages=['mpm', ],
install_requires=['configobj', 'path-helpers', 'pip-helpers>=0.6',
'progressbar2', 'pyyaml', 'si-prefix>=0.4.post3'],
# Install data listed in `MANIFEST.in`
include_package_data=True,
entry_points = {'console_scripts': ['mpm = mpm.bin:main']})
@task
@needs('generate_setup', 'minilib', 'setuptools.command.sdist')
def sdist():
"""Overrides sdist to make sure that our setup.py is generated."""
pass
|
import platform
import sys
from paver.easy import task, needs, path, sh, cmdopts, options
from paver.setuputils import setup, install_distutils_tasks
from distutils.extension import Extension
from distutils.dep_util import newer
sys.path.insert(0, path('.').abspath())
import version
install_requires = ['configobj', 'path-helpers', 'pip-helpers>=0.6',
'progressbar2', 'pyyaml', 'si-prefix>=0.4.post3']
if platform.system() == 'Windows':
install_requires += ['pywin32']
setup(name='microdrop-plugin-manager',
version=version.getVersion(),
description='Microdrop plugin manager.',
keywords='',
author='Christian Fobel',
author_email='christian@fobel.net',
url='https://github.com/wheeler-microfluidics/mpm',
license='LGPLv2.1',
packages=['mpm', ],
install_requires=install_requires,
# Install data listed in `MANIFEST.in`
include_package_data=True,
entry_points = {'console_scripts': ['mpm = mpm.bin:main']})
@task
@needs('generate_setup', 'minilib', 'setuptools.command.sdist')
def sdist():
"""Overrides sdist to make sure that our setup.py is generated."""
pass
|
Add pywin32 as Windows required package
|
[FIX] Add pywin32 as Windows required package
|
Python
|
bsd-3-clause
|
wheeler-microfluidics/mpm
|
f764e9e0f3cba7c387952fe8f19aa702825e8378
|
tests/test_core/test_server.py
|
tests/test_core/test_server.py
|
from mock import patch
import sure # noqa
from moto.server import main
def test_wrong_arguments():
try:
main(["name", "test1", "test2", "test3"])
assert False, ("main() when called with the incorrect number of args"
" should raise a system exit")
except SystemExit:
pass
@patch('moto.server.run_simple')
def test_right_arguments(run_simple):
main(["s3"])
func_call = run_simple.call_args[0]
func_call[0].should.equal("0.0.0.0")
func_call[1].should.equal(5000)
@patch('moto.server.run_simple')
def test_port_argument(run_simple):
main(["s3", "--port", "8080"])
func_call = run_simple.call_args[0]
func_call[0].should.equal("0.0.0.0")
func_call[1].should.equal(8080)
|
from mock import patch
import sure # noqa
from moto.server import main, create_backend_app, DomainDispatcherApplication
def test_wrong_arguments():
try:
main(["name", "test1", "test2", "test3"])
assert False, ("main() when called with the incorrect number of args"
" should raise a system exit")
except SystemExit:
pass
@patch('moto.server.run_simple')
def test_right_arguments(run_simple):
main(["s3"])
func_call = run_simple.call_args[0]
func_call[0].should.equal("0.0.0.0")
func_call[1].should.equal(5000)
@patch('moto.server.run_simple')
def test_port_argument(run_simple):
main(["s3", "--port", "8080"])
func_call = run_simple.call_args[0]
func_call[0].should.equal("0.0.0.0")
func_call[1].should.equal(8080)
def test_domain_dispatched():
dispatcher = DomainDispatcherApplication(create_backend_app)
backend_app = dispatcher.get_application("email.us-east1.amazonaws.com")
backend_app.view_functions.keys()[0].should.equal('EmailResponse.dispatch')
def test_domain_without_matches():
dispatcher = DomainDispatcherApplication(create_backend_app)
dispatcher.get_application.when.called_with("not-matching-anything.com").should.throw(RuntimeError)
def test_domain_dispatched_with_service():
# If we pass a particular service, always return that.
dispatcher = DomainDispatcherApplication(create_backend_app, service="s3")
backend_app = dispatcher.get_application("s3.us-east1.amazonaws.com")
backend_app.view_functions.keys()[0].should.equal('ResponseObject.key_response')
|
Add more test coverage for the server.
|
Add more test coverage for the server.
|
Python
|
apache-2.0
|
william-richard/moto,Affirm/moto,Brett55/moto,gjtempleton/moto,whummer/moto,Affirm/moto,rocky4570/moto,ZuluPro/moto,spulec/moto,Brett55/moto,gjtempleton/moto,okomestudio/moto,ZuluPro/moto,alexdebrie/moto,rouge8/moto,william-richard/moto,gjtempleton/moto,botify-labs/moto,braintreeps/moto,2rs2ts/moto,spulec/moto,okomestudio/moto,DataDog/moto,ZuluPro/moto,kefo/moto,ZuluPro/moto,botify-labs/moto,kennethd/moto,heddle317/moto,im-auld/moto,Affirm/moto,william-richard/moto,william-richard/moto,william-richard/moto,ImmobilienScout24/moto,heddle317/moto,whummer/moto,ZuluPro/moto,jrydberg/moto,dbfr3qs/moto,tootedom/moto,jszwedko/moto,spulec/moto,ZuluPro/moto,spulec/moto,mrucci/moto,Brett55/moto,gjtempleton/moto,whummer/moto,whummer/moto,zonk1024/moto,dbfr3qs/moto,2rs2ts/moto,Affirm/moto,botify-labs/moto,whummer/moto,behanceops/moto,dbfr3qs/moto,2rs2ts/moto,william-richard/moto,Affirm/moto,2rs2ts/moto,kefo/moto,araines/moto,heddle317/moto,riccardomc/moto,spulec/moto,okomestudio/moto,Brett55/moto,botify-labs/moto,rocky4570/moto,rocky4570/moto,whummer/moto,silveregg/moto,EarthmanT/moto,IlyaSukhanov/moto,ludia/moto,heddle317/moto,dbfr3qs/moto,okomestudio/moto,okomestudio/moto,Brett55/moto,andresriancho/moto,spulec/moto,kefo/moto,heddle317/moto,Affirm/moto,botify-labs/moto,dbfr3qs/moto,kefo/moto,gjtempleton/moto,rocky4570/moto,2rs2ts/moto,botify-labs/moto,rocky4570/moto,Brett55/moto,pior/moto,jotes/moto,dbfr3qs/moto,rocky4570/moto,kefo/moto,okomestudio/moto,2mf/moto
|
54add3fa95ab450e5afcbbf7fe8a3205bfc5889c
|
indra/tests/test_reading_scripts_aws.py
|
indra/tests/test_reading_scripts_aws.py
|
import boto3
from os import path, chdir
from subprocess import check_call
from nose.plugins.attrib import attr
from indra.tools.reading import submit_reading_pipeline as srp
s3 = boto3.client('s3')
HERE = path.dirname(path.abspath(__file__))
@attr('nonpublic')
def test_normal_pmid_reading_call():
chdir(path.expanduser('~'))
# Put an id file on s3
basename = 'local_pmid_test_run'
s3_prefix = 'reading_results/%s/' % basename
s3.put_object(Bucket='bigmech', Key=s3_prefix + 'pmids',
Body='\n'.join(['PMID000test%d' % n for n in range(4)]))
# Call the reading tool
sub = srp.PmidSubmitter(basename, ['sparser'])
job_name, cmd = sub._make_command(0, 2)
check_call(cmd)
# Remove garbage on s3
res = s3.list_objects(Bucket='bigmech', Prefix=s3_prefix)
for entry in res['Contents']:
print("Removing %s..." % entry['Key'])
s3.delete_object(Bucket='bigmech', Key=entry['Key'])
return
|
import boto3
from os import path, chdir
from subprocess import check_call
from nose.plugins.attrib import attr
from indra.tools.reading import submit_reading_pipeline as srp
from indra.sources import sparser
s3 = boto3.client('s3')
HERE = path.dirname(path.abspath(__file__))
@attr('nonpublic')
def test_normal_pmid_reading_call():
chdir(path.expanduser('~'))
# Put an id file on s3
basename = 'local_pmid_test_run'
s3_prefix = 'reading_results/%s/' % basename
s3.put_object(Bucket='bigmech', Key=s3_prefix + 'pmids',
Body='\n'.join(['PMID000test%d' % n for n in range(4)]))
# Call the reading tool
sub = srp.PmidSubmitter(basename, ['sparser'])
job_name, cmd = sub._make_command(0, 2)
check_call(cmd)
# Remove garbage on s3
res = s3.list_objects(Bucket='bigmech', Prefix=s3_prefix)
for entry in res['Contents']:
print("Removing %s..." % entry['Key'])
s3.delete_object(Bucket='bigmech', Key=entry['Key'])
return
@attr('nonpublic')
def test_bad_sparser():
txt = ('Disruption of the AP-1 binding site reversed the transcriptional '
'responses seen with Fos and Jun.')
sp = sparser.process_text(txt, timeout=1)
assert sp is None, "Reading succeeded unexpectedly."
|
Add test with currently known-stall sentance.
|
Add test with currently known-stall sentance.
|
Python
|
bsd-2-clause
|
bgyori/indra,pvtodorov/indra,pvtodorov/indra,sorgerlab/indra,sorgerlab/indra,sorgerlab/indra,pvtodorov/indra,bgyori/indra,johnbachman/indra,pvtodorov/indra,sorgerlab/belpy,johnbachman/indra,johnbachman/indra,johnbachman/belpy,johnbachman/belpy,sorgerlab/belpy,sorgerlab/belpy,johnbachman/belpy,bgyori/indra
|
fbe78315efcbf646710da6bf7d6a9d6c26fa8645
|
bayesian_jobs/handlers/clean_postgres.py
|
bayesian_jobs/handlers/clean_postgres.py
|
from selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
results = self.postgres.session.query(WorkerResult).join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None)
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
self.postgres.session.commit()
|
from selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
results = self.postgres.session.query(WorkerResult).join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None)
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
if s3.object_exists(result_object_key):
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
else:
entry.task_result = None
entry.error = True
self.postgres.session.commit()
|
Mark error flag if an object is not present on S3
|
Mark error flag if an object is not present on S3
|
Python
|
apache-2.0
|
fabric8-analytics/fabric8-analytics-jobs,fabric8-analytics/fabric8-analytics-jobs
|
435e8fc4d9ad8c071a96e37e483fcbc194a94fc6
|
tests/integration/files/file/base/_modules/runtests_decorators.py
|
tests/integration/files/file/base/_modules/runtests_decorators.py
|
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
import time
# Import Salt libs
import salt.utils.decorators
def _fallbackfunc():
return False, 'fallback'
def working_function():
'''
CLI Example:
.. code-block:: bash
'''
return True
@salt.utils.decorators.depends(True)
def booldependsTrue():
'''
CLI Example:
.. code-block:: bash
'''
return True
@salt.utils.decorators.depends(False)
def booldependsFalse():
return True
@salt.utils.decorators.depends('time')
def depends():
ret = {'ret': True,
'time': time.time()}
return ret
@salt.utils.decorators.depends('time123')
def missing_depends():
return True
@salt.utils.decorators.depends('time', fallback_function=_fallbackfunc)
def depends_will_not_fallback():
ret = {'ret': True,
'time': time.time()}
return ret
@salt.utils.decorators.depends('time123', fallback_function=_fallbackfunc)
def missing_depends_will_fallback():
ret = {'ret': True,
'time': time.time()}
return ret
|
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
import time
# Import Salt libs
import salt.utils.decorators
def _fallbackfunc():
return False, 'fallback'
def working_function():
'''
CLI Example:
.. code-block:: bash
'''
return True
@salt.utils.decorators.depends(True)
def booldependsTrue():
'''
CLI Example:
.. code-block:: bash
'''
return True
@salt.utils.decorators.depends(False)
def booldependsFalse():
return True
@salt.utils.decorators.depends('time')
def depends():
ret = {'ret': True,
'time': time.time()}
return ret
@salt.utils.decorators.depends('time123')
def missing_depends():
return True
@salt.utils.decorators.depends('time', fallback_function=_fallbackfunc)
def depends_will_not_fallback():
'''
CLI Example:
.. code-block:: bash
'''
ret = {'ret': True,
'time': time.time()}
return ret
@salt.utils.decorators.depends('time123', fallback_function=_fallbackfunc)
def missing_depends_will_fallback():
ret = {'ret': True,
'time': time.time()}
return ret
|
Fix tests: add module function docstring
|
Fix tests: add module function docstring
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
75a70e31791c523da6bf6b0ce4409a77f2784ed5
|
byceps/services/user/transfer/models.py
|
byceps/services/user/transfer/models.py
|
"""
byceps.services.user.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import Optional
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: Optional[str]
is_orga: bool
|
"""
byceps.services.user.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from dataclasses import dataclass
from typing import Optional
from ....typing import UserID
@dataclass(frozen=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: Optional[str]
is_orga: bool
|
Change user transfer model from `attrs` to `dataclass`
|
Change user transfer model from `attrs` to `dataclass`
|
Python
|
bsd-3-clause
|
m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
60a4da0ea090e95ad566743b5ceba874d051d8d9
|
pronto/serializers/obo.py
|
pronto/serializers/obo.py
|
import io
from typing import BinaryIO, ClassVar
from ._fastobo import FastoboSerializer
from .base import BaseSerializer
class OboSerializer(FastoboSerializer, BaseSerializer):
format = "obo"
def dump(self, file):
writer = io.TextIOWrapper(file)
try:
# dump the header
if self.ont.metadata:
header = self._to_header_frame(self.ont.metadata)
file.write(str(header).encode("utf-8"))
if self.ont._terms or self.ont._typedefs:
file.write(b"\n")
# dump terms
if self.ont._terms:
for i, (id, data) in enumerate(self.ont._terms.items()):
frame = self._to_term_frame(data)
file.write(str(frame).encode("utf-8"))
if i < len(self.ont._terms) - 1 or self.ont._relationships:
file.write(b"\n")
# dump typedefs
if self.ont._relationships:
for i, (id, data) in enumerate(self.ont._relationships.items()):
frame = self._to_typedef_frame(data)
file.write(str(frame).encode("utf-8"))
if i < len(self.ont._relationships) - 1:
file.write(b"\n")
finally:
writer.detach()
|
import io
from typing import BinaryIO, ClassVar
from ._fastobo import FastoboSerializer
from .base import BaseSerializer
class OboSerializer(FastoboSerializer, BaseSerializer):
format = "obo"
def dump(self, file):
writer = io.TextIOWrapper(file)
try:
# dump the header
if self.ont.metadata:
header = self._to_header_frame(self.ont.metadata)
file.write(str(header).encode("utf-8"))
if self.ont._terms or self.ont._relationships:
file.write(b"\n")
# dump terms
if self.ont._terms:
for i, (id, data) in enumerate(self.ont._terms.items()):
frame = self._to_term_frame(data)
file.write(str(frame).encode("utf-8"))
if i < len(self.ont._terms) - 1 or self.ont._relationships:
file.write(b"\n")
# dump typedefs
if self.ont._relationships:
for i, (id, data) in enumerate(self.ont._relationships.items()):
frame = self._to_typedef_frame(data)
file.write(str(frame).encode("utf-8"))
if i < len(self.ont._relationships) - 1:
file.write(b"\n")
finally:
writer.detach()
|
Fix bug in OboSerializer` causing `Ontology.dump` to crash
|
Fix bug in OboSerializer` causing `Ontology.dump` to crash
|
Python
|
mit
|
althonos/pronto
|
e51d35545d038b5cb7035cc74f39e4a5c2b0756a
|
thinglang/execution/classes.py
|
thinglang/execution/classes.py
|
from thinglang.lexer.symbols.base import LexicalIdentifier
class ThingInstance(object):
def __init__(self, cls):
self.cls = cls
self.methods = {
x.name: x for x in self.cls.children
}
self.members = {}
def __contains__(self, item):
return item in self.members or item in self.methods
def __getitem__(self, item):
return self.members.get(item) or self.methods.get(item)
def __str__(self):
return f'Thing<{self.cls}>(members={self.members}, methods={self.methods})'
|
from thinglang.lexer.symbols.base import LexicalIdentifier
class ThingInstance(object):
def __init__(self, cls):
self.cls = cls
self.methods = {
x.name: x for x in self.cls.children
}
self.members = {}
def __contains__(self, item):
return item in self.members or item in self.methods
def __getitem__(self, item):
return self.members.get(item) if item in self.members else self.methods[item]
def __str__(self):
return f'Thing<{self.cls}>(members={self.members}, methods={self.methods})'
|
Fix bug in ThingInstace __setitem__
|
Fix bug in ThingInstace __setitem__
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
324f05e1cbffdad2da209a7ee515f1d9a32cf93b
|
main.py
|
main.py
|
#!/usr/bin/env python
import sys
import json
from pprint import pprint
try:
import requests
except ImportError:
print(
'Script requires requests package. \n'
'You can install it by running "pip install requests"'
)
exit()
API_URL = 'http://jsonplaceholder.typicode.com/posts/'
def get_by_id(id):
response = requests.get(API_URL + str(id))
return json.loads(response.text)
def get_all():
response = requests.get(API_URL)
return json.loads(response.text)
def validate_id(post_id):
if not post_id.isdigit():
print('Post id should be digit')
return False
elif int(post_id) not in range(1, 100):
print('Post id should be bigger than 0 and smaller than 100')
return False
return True
# If user didn't provided id, print all posts.
# Else - validate id and get post by id.
if len(sys.argv) == 1:
pprint(get_all())
else:
post_id = sys.argv[1]
if validate_id(post_id):
pprint(get_by_id(int(post_id)))
else:
print('Quitting')
|
#!/usr/bin/env python
import sys
import json
from pprint import pprint
try:
import requests
except ImportError:
print(
'Script requires requests package. \n'
'You can install it by running "pip install requests"'
)
exit()
API_URL = 'http://jsonplaceholder.typicode.com/posts/'
def get_by_id(id):
response = requests.get(API_URL + str(id))
return json.loads(response.text)
def get_all():
response = requests.get(API_URL)
return json.loads(response.text)
def validate_id(post_id):
if not post_id.isdigit():
print('Post id should be digit')
return False
elif int(post_id) not in range(1, 100):
print('Post id should be bigger than 0 and smaller than 100')
return False
return True
print('Loading data')
# If user didn't provided id, print all posts.
# Else - validate id and get post by id.
if len(sys.argv) == 1:
pprint(get_all())
else:
post_id = sys.argv[1]
if validate_id(post_id):
pprint(get_by_id(int(post_id)))
else:
print('Quitting')
|
Add message about starting loading data
|
Add message about starting loading data
|
Python
|
mit
|
sevazhidkov/rest-wrapper
|
42804d3182b9b7489583250856e31a8daaef5fa3
|
protolint/__init__.py
|
protolint/__init__.py
|
# -*- coding: utf-8 -*-
"""
protolint
~~~~~~~~~
"""
from . import cli
from . import linter
from . import output
__version__ = (1, 0, 0)
|
# -*- coding: utf-8 -*-
"""
protolint
~~~~~~~~~
"""
__version__ = (1, 0, 0)
from . import cli
from . import linter
from . import output
|
Fix CLI module during build
|
Fix CLI module during build
|
Python
|
mit
|
sgammon/codeclimate-protobuf,sgammon/codeclimate-protobuf
|
baa088e1e6cc503b9f0bcfbacf62327a6527550b
|
kmeldb/mounts.py
|
kmeldb/mounts.py
|
import os
def get_fat_mounts():
fat_mounts = []
mounts = os.popen('mount')
for line in mounts.readlines():
device, ign1, mount_point, ign2, filesystem, options = line.split()
if 'fat' in filesystem:
fat_mounts.append((mount_point, filesystem, device))
return fat_mounts
def main():
mounts = get_fat_mounts()
for mount in mounts:
print(mount)
if __name__ == '__main__':
main()
|
import os
try:
import psutil
except ImportError:
print('Falling back to parsing mounts output')
HAVE_PSUTIL = False
else:
print('Using psutil')
HAVE_PSUTIL = True
def get_fat_mounts():
# global HAVE_PSUTIL
# HAVE_PSUTIL = False
fat_mounts = []
if HAVE_PSUTIL:
partitions = psutil.disk_partitions()
for part in partitions:
if 'fat' in part.fstype:
fat_mounts.append((part.mountpoint, part.fstype, part.device))
else:
mounts = os.popen('mount')
for line in mounts.readlines():
device, ign1, mount_point, ign2, filesystem, options = line.split()
if 'fat' in filesystem:
fat_mounts.append((mount_point, filesystem, device))
return fat_mounts
def main():
mounts = get_fat_mounts()
for mount in mounts:
print(mount)
if __name__ == '__main__':
main()
|
Use psutil.disk_partitions to get FAT formatted partitions.
|
Use psutil.disk_partitions to get FAT formatted partitions.
|
Python
|
apache-2.0
|
chrrrisw/kmel_db,chrrrisw/kmel_db
|
d89e43c649aba78ac9722ca39f9e0c67be0cc897
|
precision/accounts/models.py
|
precision/accounts/models.py
|
from django.db import models
# Create your models here.
|
from django.contrib.auth.models import AbstractUser
from django.db import models
from django.utils.translation import ugettext_lazy as _
class SchoolAdministrator(AbstractUser):
pass
|
Add an simple abstract user model for school administrators which will be used later
|
Add an simple abstract user model for school administrators which will be used later
|
Python
|
mit
|
FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management
|
4a29b5169524205bfa50a89379f4439d0de40296
|
refabric/context_managers.py
|
refabric/context_managers.py
|
from contextlib import contextmanager
from fabric.context_managers import settings, hide
from fabric.state import env
@contextmanager
def sudo(user=None):
with settings(sudo_user=user or env.sudo_user or env.user, use_sudo=True):
yield
silent = lambda *h: settings(hide('commands', *h), warn_only=True)
hide_prefix = lambda: settings(output_prefix=False)
abort_on_error = lambda: settings(warn_only=False)
|
from contextlib import contextmanager
from fabric.context_managers import settings, hide
from fabric.state import env
from refabric.state import apply_role_definitions
@contextmanager
def sudo(user=None):
with settings(sudo_user=user or env.sudo_user or env.user, use_sudo=True):
yield
@contextmanager
def role(name):
with settings(roles=[name]):
yield
apply_role_definitions(None)
silent = lambda *h: settings(hide('commands', *h), warn_only=True)
hide_prefix = lambda: settings(output_prefix=False)
abort_on_error = lambda: settings(warn_only=False)
|
Add role context manager setting role and definitions
|
Add role context manager setting role and definitions
|
Python
|
mit
|
5monkeys/refabric
|
5748b1a7dc4a5be3b2b9da9959eabe586347078a
|
tensorflow_federated/python/program/value_reference.py
|
tensorflow_federated/python/program/value_reference.py
|
# Copyright 2021, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines the abstract interface for classes that reference values."""
import abc
from typing import Any
from tensorflow_federated.python.core.impl.types import typed_object
class ValueReference(typed_object.TypedObject, metaclass=abc.ABCMeta):
"""An abstract interface for classes that reference values.
This interfaces provides the capability to maniplutate values without
requiring them to be materialized as Python objects.
"""
@abc.abstractmethod
def get_value(self) -> Any:
pass
|
# Copyright 2021, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines abstract interfaces representing references to values.
These abstract interfaces provide the capability to handle values without
requiring them to be materialized as Python objects. Instances of these
abstract interfaces represent values of type `tff.TensorType` and can be placed
on the server, elements of structures that are placed on the server, or
unplaced.
"""
import abc
from typing import Union
import numpy as np
from tensorflow_federated.python.core.impl.types import typed_object
class ServerArrayReference(typed_object.TypedObject, metaclass=abc.ABCMeta):
"""An abstract interface representing references to server placed values."""
@abc.abstractmethod
def get_value(self) -> Union[np.generic, np.ndarray]:
"""Returns the referenced value as a numpy scalar or array."""
raise NotImplementedError
|
Update the Value Reference API to be more precise about the types of values being referenced.
|
Update the Value Reference API to be more precise about the types of values being referenced.
PiperOrigin-RevId: 404647934
|
Python
|
apache-2.0
|
tensorflow/federated,tensorflow/federated,tensorflow/federated
|
622b81296b292035b970891cd259eaac113d20c1
|
apps/accounts/conf.py
|
apps/accounts/conf.py
|
from django.conf import settings # noqa
from appconf import AppConf
class AccountConf(AppConf):
"""
Custom settings for the account module. Mainly settings required for
the login on the remote system.
"""
PID = 3
LOGIN_TYPE = 'login'
LOGIN_SUCCESS_URL = 'home'
ENFORCE_LOGIN_TIMEOUT = 300 # 5 Minutes
ENFORCE_LOGIN_COOKIE_NAME = 'login_expiry'
ENFORCE_LOGIN_SALT = settings.AUTH_API_USER
ENFORCE_LOGIN_NAME = 'force_login_check'
# The name of the UNCCD role as provided by the remote system.
UNCCD_ROLE_NAME = 'UNCCD national focal point'
|
from django.conf import settings # noqa
from appconf import AppConf
class AccountConf(AppConf):
"""
Custom settings for the account module. Mainly settings required for
the login on the remote system.
"""
PID = 3
LOGIN_TYPE = 'login'
LOGIN_SUCCESS_URL = 'home'
ENFORCE_LOGIN_TIMEOUT = 300 # 5 Minutes
ENFORCE_LOGIN_COOKIE_NAME = 'login_expiry'
ENFORCE_LOGIN_SALT = settings.AUTH_API_USER
ENFORCE_LOGIN_NAME = 'force_login_check'
# The name of the UNCCD role as provided by the remote system.
UNCCD_ROLE_NAME = 'UNCCD Focal Point'
|
Change internal name of UNCCD role back to previous correct value
|
Change internal name of UNCCD role back to previous correct value
|
Python
|
apache-2.0
|
CDE-UNIBE/qcat,CDE-UNIBE/qcat,CDE-UNIBE/qcat,CDE-UNIBE/qcat
|
6e3151cd9e4c5309959c93b2ed683bb74d88a640
|
backend/breach/tests/test_sniffer.py
|
backend/breach/tests/test_sniffer.py
|
from mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
self.sniffer = Sniffer(self.endpoint)
self.source_ip = '147.102.239.229'
self.destination_host = 'dionyziz.com'
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read(self.source_ip, self.destination_host)
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_delete(self, requests):
self.sniffer.delete(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
|
from mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
self.sniffer = Sniffer(self.endpoint, '147.102.239.229', 'dionyziz.com', 'wlan0', '8080')
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start()
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read()
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_delete(self, requests):
self.sniffer.delete()
self.assertTrue(requests.post.called)
|
Migrate Sniffer unit test to new API
|
Migrate Sniffer unit test to new API
|
Python
|
mit
|
esarafianou/rupture,esarafianou/rupture,dimriou/rupture,dimriou/rupture,dimkarakostas/rupture,dimriou/rupture,esarafianou/rupture,dimkarakostas/rupture,dimkarakostas/rupture,esarafianou/rupture,dionyziz/rupture,dionyziz/rupture,dionyziz/rupture,dionyziz/rupture,dimriou/rupture,dimriou/rupture,dionyziz/rupture,dimkarakostas/rupture,dimkarakostas/rupture
|
15e713f76f1fbfef26d9a7d3d3c95fac2d8f213e
|
casepro/settings_production_momza.py
|
casepro/settings_production_momza.py
|
from __future__ import unicode_literals
import os
# import our default settings
from settings_production import * # noqa
PODS[0]['contact_id_fieldname'] = os.environ.get( # noqa: F405
'REGISTRATION_CONTACT_ID_FIELDNAME',
'registrant_id',
)
PODS[0]['field_mapping'] = [ # noqa: F405
{"field": "msisdn_registrant", "field_name": "Cell Number"},
{"field": "language", "field_name": "Language Preference"},
{"field": "faccode", "field_name": "Facility Code"},
{"field": "reg_type", "field_name": "Registration Type"},
{"field": "mom_dob", "field_name": "Mother's Date of Birth"},
{"field": "edd", "field_name": "Expected Due Date"},
]
|
from __future__ import unicode_literals
import os
# import our default settings
from settings_production import * # noqa
PODS[0]['contact_id_fieldname'] = os.environ.get( # noqa: F405
'REGISTRATION_CONTACT_ID_FIELDNAME',
'registrant_id',
)
PODS[0]['field_mapping'] = [ # noqa: F405
{"field": "faccode", "field_name": "Facility Code"},
{"field": "reg_type", "field_name": "Registration Type"},
{"field": "mom_dob", "field_name": "Mother's Date of Birth"},
{"field": "edd", "field_name": "Expected Due Date"},
]
|
Remove cell number and language from pod
|
Remove cell number and language from pod
We started using the identity store and the hub to fetch this information,
but unfortunately the field names are different depending on which service
the info is coming from.
These 2 fields are already displayed in the CasePro interface so it makes
sense to not use the pod at all for them.
|
Python
|
bsd-3-clause
|
praekelt/casepro,praekelt/casepro,praekelt/casepro
|
7bf391e772cbece78b521f1e357ced4bef6908f4
|
bin/upload_version.py
|
bin/upload_version.py
|
#!python
import os
import sys
import json
import requests
if __name__ == '__main__':
version = sys.argv[1]
filepath = sys.argv[2]
filename = filepath.split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'target_commitish': commit_sha
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
|
#!python
import os
import sys
import json
import requests
if __name__ == '__main__':
version = sys.argv[1]
filepath = sys.argv[2]
filename = filepath.split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'target_commitish': commit_sha,
'prerelease': True
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
|
Set automatic releases as 'prerelease'.
|
Set automatic releases as 'prerelease'.
|
Python
|
bsd-2-clause
|
pubnative/redash,pubnative/redash,imsally/redash,hudl/redash,denisov-vlad/redash,M32Media/redash,chriszs/redash,easytaxibr/redash,M32Media/redash,jmvasquez/redashtest,crowdworks/redash,pubnative/redash,ninneko/redash,hudl/redash,denisov-vlad/redash,amino-data/redash,EverlyWell/redash,imsally/redash,vishesh92/redash,44px/redash,EverlyWell/redash,guaguadev/redash,stefanseifert/redash,chriszs/redash,getredash/redash,jmvasquez/redashtest,useabode/redash,alexanderlz/redash,getredash/redash,ninneko/redash,rockwotj/redash,getredash/redash,guaguadev/redash,amino-data/redash,vishesh92/redash,easytaxibr/redash,alexanderlz/redash,hudl/redash,M32Media/redash,stefanseifert/redash,jmvasquez/redashtest,guaguadev/redash,vishesh92/redash,akariv/redash,crowdworks/redash,useabode/redash,chriszs/redash,useabode/redash,getredash/redash,stefanseifert/redash,guaguadev/redash,guaguadev/redash,imsally/redash,44px/redash,ninneko/redash,stefanseifert/redash,akariv/redash,EverlyWell/redash,crowdworks/redash,useabode/redash,EverlyWell/redash,crowdworks/redash,pubnative/redash,akariv/redash,44px/redash,ninneko/redash,rockwotj/redash,moritz9/redash,akariv/redash,denisov-vlad/redash,rockwotj/redash,amino-data/redash,amino-data/redash,alexanderlz/redash,easytaxibr/redash,getredash/redash,moritz9/redash,moritz9/redash,imsally/redash,ninneko/redash,rockwotj/redash,moritz9/redash,chriszs/redash,easytaxibr/redash,alexanderlz/redash,M32Media/redash,easytaxibr/redash,denisov-vlad/redash,pubnative/redash,akariv/redash,hudl/redash,44px/redash,jmvasquez/redashtest,stefanseifert/redash,vishesh92/redash,denisov-vlad/redash,jmvasquez/redashtest
|
694ea053fe87e4811acf0dde47826fec3eb1c9f7
|
source/run.py
|
source/run.py
|
import asyncio
import time
from autoreiv import AutoReiv
def main():
while True:
bot = AutoReiv()
bot.load()
try:
bot.run(bot.config.get('login'), bot.config.get('password'))
except Exception as e:
print('* Crashed with error: {}'.format(e))
finally:
print('* Disconnected.')
asyncio.set_event_loop(asyncio.new_event_loop())
print('* Waiting 10 seconds before reconnecting (press ^C to stop)...')
try:
time.sleep(10)
except KeyboardInterrupt:
break
if __name__ == '__main__':
main()
|
import asyncio
import time
from autoreiv import AutoReiv
def main():
while True:
bot = AutoReiv()
bot.load()
try:
bot.run(bot.config.get('login'), bot.config.get('password'))
except Exception as e:
print('* Crashed with error: {}'.format(e))
finally:
if not bot.is_closed:
bot.close()
print('* Disconnected.')
asyncio.set_event_loop(asyncio.new_event_loop())
print('* Waiting 10 seconds before reconnecting (press ^C to stop)...')
try:
time.sleep(10)
except KeyboardInterrupt:
break
if __name__ == '__main__':
main()
|
Make sure the event loop gets closed on disconnect
|
Make sure the event loop gets closed on disconnect
|
Python
|
mit
|
diath/AutoReiv
|
27efc29b76ff0a65cd5ff12360701ca61231f53f
|
examples/thread_pool.py
|
examples/thread_pool.py
|
from diesel import quickstart, sleep
from diesel.util.pool import ThreadPool
from diesel.protocols.http import HttpClient, HttpHeaders
import random
def handle_it(i):
print 'S', i
sleep(random.random())
print 'E', i
def c():
for x in xrange(0, 20):
yield x
make_it = c().next
threads = ThreadPool(10, handle_it, make_it)
quickstart(threads)
|
from diesel import quickstart, sleep, quickstop
from diesel.util.pool import ThreadPool
import random
def handle_it(i):
print 'S', i
sleep(random.random())
print 'E', i
def c():
for x in xrange(0, 20):
yield x
make_it = c().next
def stop_it():
quickstop()
threads = ThreadPool(10, handle_it, make_it, stop_it)
quickstart(threads)
|
Clean it up with a finalizer.
|
Clean it up with a finalizer.
|
Python
|
bsd-3-clause
|
dieseldev/diesel
|
e5a872bd128e6b3ea3cc82df4094d41843148bce
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import sys
from os.path import abspath, dirname
from django.conf import settings
import django
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend',
ROOT_URLCONF='email_log.tests.urls',
)
def runtests():
if hasattr(django, 'setup'):
django.setup()
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
test_args = ['email_log.tests']
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
test_args = ['tests']
failures = runner_class(failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests()
|
#!/usr/bin/env python
import sys
from os.path import abspath, dirname
from django.conf import settings
import django
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend',
MIDDLEWARE_CLASSES=(
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
),
ROOT_URLCONF='email_log.tests.urls',
)
def runtests():
if hasattr(django, 'setup'):
django.setup()
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
test_args = ['email_log.tests']
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
test_args = ['tests']
failures = runner_class(failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests()
|
Add AuthenticationMiddleware to tests (for 1.7)
|
Add AuthenticationMiddleware to tests (for 1.7)
|
Python
|
mit
|
treyhunner/django-email-log,treyhunner/django-email-log
|
5a4e8ec1179b2ae3b37190ea45fb0d72ce4d7a90
|
canopen/sync.py
|
canopen/sync.py
|
class SyncProducer(object):
"""Transmits a SYNC message periodically."""
#: COB-ID of the SYNC message
cob_id = 0x80
def __init__(self, network):
self.network = network
self.period = None
self._task = None
def transmit(self):
"""Send out a SYNC message once."""
self.network.send_message(self.cob_id, [])
def start(self, period=None):
"""Start periodic transmission of SYNC message in a background thread.
:param float period:
Period of SYNC message in seconds.
"""
if period is not None:
self.period = period
if not self.period:
raise ValueError("A valid transmission period has not been given")
self._task = self.network.send_periodic(self.cob_id, [], self.period)
def stop(self):
"""Stop periodic transmission of SYNC message."""
self._task.stop()
|
class SyncProducer(object):
"""Transmits a SYNC message periodically."""
#: COB-ID of the SYNC message
cob_id = 0x80
def __init__(self, network):
self.network = network
self.period = None
self._task = None
def transmit(self, count=None):
"""Send out a SYNC message once.
:param int count:
Counter to add in message.
"""
data = [count] if count is not None else []
self.network.send_message(self.cob_id, data)
def start(self, period=None):
"""Start periodic transmission of SYNC message in a background thread.
:param float period:
Period of SYNC message in seconds.
"""
if period is not None:
self.period = period
if not self.period:
raise ValueError("A valid transmission period has not been given")
self._task = self.network.send_periodic(self.cob_id, [], self.period)
def stop(self):
"""Stop periodic transmission of SYNC message."""
self._task.stop()
|
Allow specifying counter in SYNC message
|
Allow specifying counter in SYNC message
Addresses #63
|
Python
|
mit
|
christiansandberg/canopen,christiansandberg/canopen
|
91bf74104c0eee2ca3d8d4fdd293390daf173166
|
checker/main.py
|
checker/main.py
|
#!/usr/bin/env python
import os
import sys
import subprocess
import getopt
class Chdir:
def __init__(self, newPath):
self.savedPath = os.getcwd()
os.chdir(newPath)
class Checker:
def __init__(self, path):
self.path = path
def get_jobs(self):
Chdir(self.path)
jobs = []
for dirname, dirnames, filenames in os.walk('.'):
for filename in filenames:
i = os.path.join(dirname, filename)
if i != "./__init__.py":
jobs.append(self.path + i[2:])
self.run_jobs(jobs)
def run_jobs(self, jobs):
for job in jobs:
subprocess.call(job)
if __name__ == '__main__':
opts, path = getopt.getopt(sys.argv[1], "h")
for opt, arg in opts:
if opt == '-h':
print './main.py /full/path/to/jobs'
sys.exit()
check = Checker(path)
check.get_jobs()
|
#!/usr/bin/env python
import os
import sys
import subprocess
import getopt
class Checker:
def __init__(self, path):
if not os.path.isdir(path):
sys.exit(1);
self.path = os.path.realpath(path)
self.jobs = self.getExecutableFiles(self.path)
def getExecutableFiles(self,path):
files = []
for dirname, dirnames, filenames in os.walk(path):
for filename in filenames:
filename_path = os.path.join(dirname, filename)
if os.access(filename_path,os.X_OK):
files.append(filename_path)
return files;
def run(self):
for job in self.jobs:
subprocess.call(job)
if __name__ == '__main__':
opts, path = getopt.getopt(sys.argv[1], "h")
for opt, arg in opts:
if opt == '-h':
print './main.py /full/path/to/jobs'
sys.exit()
check = Checker(path)
check.run()
|
Streamline the filesystem looping code.
|
Streamline the filesystem looping code.
|
Python
|
mit
|
bsuweb/checker
|
a32f7cab9ce32c1c2169b55b1e37957a093e47f8
|
collect_district_court_case_details.py
|
collect_district_court_case_details.py
|
import datetime
import pymongo
import os
from courtreader import readers
# Connect to database
client = pymongo.MongoClient(os.environ['DISTRICT_DB'])
db = client.va_district_court_cases
# Connect to District Court Reader
reader = readers.DistrictCourtReader()
reader.connect()
# Fill in cases
while True:
case = db.cases.find_one({
'FIPSCode': '702', \
'date_collected': {'$exists': False} \
})
if case is None: break
print case['CaseNumber']
case_details = reader.get_case_details_by_number( \
case['FIPSCode'], case['CaseNumber'])
case_details['date_collected'] = datetime.datetime.utcnow()
updated_case = dict(case.items() + case_details.items())
db.cases.replace_one({'_id': case['_id']}, updated_case)
print 'Finished'
|
import datetime
import pymongo
import os
import sys
from courtreader import readers
# Connect to database
client = pymongo.MongoClient(os.environ['DISTRICT_DB'])
db = client.va_district_court_cases
# Connect to District Court Reader
reader = readers.DistrictCourtReader()
reader.connect()
# Fill in cases
while True:
case = db.cases.find_one({
'FIPSCode': sys.argv[1],
'date_collected': {'$exists': False}
})
if case is None: break
print case['CaseNumber']
case_details = reader.get_case_details_by_number( \
case['FIPSCode'], case['CaseNumber'])
case_details['date_collected'] = datetime.datetime.utcnow()
updated_case = dict(case.items() + case_details.items())
db.cases.replace_one({'_id': case['_id']}, updated_case)
print 'Finished'
|
Set FIPS code from command line
|
Set FIPS code from command line
|
Python
|
mit
|
bschoenfeld/va-court-scraper,bschoenfeld/va-court-scraper
|
e0e2b4fc60a945e9680c171109fd1cbb6f21e304
|
celery/run_carrizo.py
|
celery/run_carrizo.py
|
import dem
import tasks
from celery import *
carrizo = dem.DEMGrid('tests/data/carrizo.tif')
d = 100
max_age = 10**3.5
age_step = 1
num_ages = max_age/age_step
num_angles = 180
ages = np.linspace(0, max_age, num=num_ages)
angles = np.linspace(-np.pi/2, np.pi/2, num=num_angles)
template_fits = [tasks.match_template.s(carrizo, d, age, alpha) for age in ages for alpha in angles]
compare_callback = tasks.compare_fits.s()
res = chord(template_fits)(compare_callback)
|
import dem
import tasks
from celery import *
carrizo = dem.DEMGrid('tests/data/carrizo.tif')
d = 100
max_age = 10**3.5
age_step = 1
num_ages = max_age/age_step
num_angles = 180
ages = np.linspace(0, max_age, num=num_ages)
angles = np.linspace(-np.pi/2, np.pi/2, num=num_angles)
template_fits = [tasks.match_template.s(carrizo, d, age, alpha) for age in ages for alpha in angles]
compare_callback = tasks.compare_fits.s()
res = chord(template_fits)(compare_callback)
scarplet.save_results(carrizo, res, base_dir='results/')
res.forget()
|
Add test script for Carrizo data
|
Add test script for Carrizo data
|
Python
|
mit
|
stgl/scarplet,rmsare/scarplet
|
d2b4810d74364394e7e7ecf8f8c5b1011a250f77
|
notescli/commands.py
|
notescli/commands.py
|
import config
import cliparser
import indexer
import io
import os
def command_ls(index):
with index.index.searcher() as searcher:
results = searcher.documents()
print "Indexed files:"
for result in results:
print result["filename"]
def command_view(index, query):
result_file = indexer.find_result(index.index, query)
if result_file is None:
print "No results found"
else:
print open(result_file).read()
def command_add(config, filename):
full_path = os.path.join(config.notes_path, filename)
io.edit_file(full_path)
print "Added", full_path
def command_edit(index, query):
result_file = indexer.find_result(index.index, query)
if result_file is None:
print "No results found"
else:
io.edit_file(result_file)
def command_rm(index, query):
result_file = indexer.find_result(index.index, query)
if result_file is None:
print "No results found"
else:
print "Are you sure you want to delete %s? (y/n)" % result_file
choice = io.get_choice()
if choice == "y":
os.remove(result_file)
def command_reindex(config):
indexer.reindex(config)
|
import config
import cliparser
import indexer
import io
import os
def command_ls(index):
print "Indexed files:"
for filename in index.list_files():
print filename
def command_view(index, query):
result_file = indexer.find_result(index.index, query)
if result_file is None:
print "No results found"
else:
print open(result_file).read()
def command_add(config, filename):
full_path = os.path.join(config.notes_path, filename)
io.edit_file(full_path)
print "Added", full_path
def command_edit(index, query):
result_file = indexer.find_result(index.index, query)
if result_file is None:
print "No results found"
else:
io.edit_file(result_file)
def command_rm(index, query):
result_file = indexer.find_result(index.index, query)
if result_file is None:
print "No results found"
else:
print "Are you sure you want to delete %s? (y/n)" % result_file
choice = io.get_choice()
if choice == "y":
os.remove(result_file)
def command_reindex(config):
indexer.reindex(config)
|
Replace implementation of ls by using the indexer
|
Replace implementation of ls by using the indexer
|
Python
|
mit
|
phss/notes-cli
|
a5b4a657a1717e2fb9e4c53f93b5232dd58a1c68
|
shop_richcatalog/views.py
|
shop_richcatalog/views.py
|
from shop.views import ShopListView, ShopDetailView
from shop_richcatalog.models import Catalog
from shop.models import Product
class CatalogListView(ShopListView):
'''
TODO.
'''
model = Catalog
#generic_template = "shop_richcatalog/catalog_list.html"
class CatalogDetailView(ShopDetailView):
'''
TODO.
'''
model = Catalog
#generic_template = "shop_richcatalog/catalog_detail.html"
def get_context_data(self, **kwargs):
'''
TODO.
'''
# get context data from superclass
ctx = super(CatalogDetailView, self).get_context_data(**kwargs)
# update the context with active products in this catalog
product_list = self.object.products.filter(active=True)
if product_list:
ctx.update({"product_list": product_list})
# return the context
return ctx
|
from shop.views import ShopListView, ShopDetailView
from shop_richcatalog.models import Catalog
from shop.models import Product
class CatalogListView(ShopListView):
'''
Display all catalogs in a tree.
'''
model = Catalog
class CatalogDetailView(ShopDetailView):
'''
Display detailed catalog information.
'''
model = Catalog
def get_context_data(self, **kwargs):
'''
Get catalog context data.
'''
# get context data from superclass
ctx = super(CatalogDetailView, self).get_context_data(**kwargs)
# update the context with active products in this catalog
product_list = self.object.products.filter(active=True)
if product_list:
ctx.update({"product_list": product_list})
# return the context
return ctx
|
Document the view classes and make fix their spacing for pep8.
|
Document the view classes and make fix their spacing for pep8.
|
Python
|
bsd-3-clause
|
nimbis/django-shop-richcatalog,nimbis/django-shop-richcatalog
|
9639eb34f53444387621ed0a27ef9b273b38df79
|
slackclient/_slackrequest.py
|
slackclient/_slackrequest.py
|
import json
import requests
import six
class SlackRequest(object):
@staticmethod
def do(token, request="?", post_data=None, domain="slack.com"):
'''
Perform a POST request to the Slack Web API
Args:
token (str): your authentication token
request (str): the method to call from the Slack API. For example: 'channels.list'
post_data (dict): key/value arguments to pass for the request. For example:
{'channel': 'CABC12345'}
domain (str): if for some reason you want to send your request to something other
than slack.com
'''
post_data = post_data or {}
# Pull file out so it isn't JSON encoded like normal fields.
files = {'file': post_data.pop('file')} if 'file' in post_data else None
for k, v in six.iteritems(post_data):
if not isinstance(v, six.string_types):
post_data[k] = json.dumps(v)
url = 'https://{0}/api/{1}'.format(domain, request)
post_data['token'] = token
return requests.post(url, data=post_data, files=files)
|
import json
import requests
import six
class SlackRequest(object):
@staticmethod
def do(token, request="?", post_data=None, domain="slack.com"):
'''
Perform a POST request to the Slack Web API
Args:
token (str): your authentication token
request (str): the method to call from the Slack API. For example: 'channels.list'
post_data (dict): key/value arguments to pass for the request. For example:
{'channel': 'CABC12345'}
domain (str): if for some reason you want to send your request to something other
than slack.com
'''
post_data = post_data or {}
# Pull file out so it isn't JSON encoded like normal fields.
# Only do this for requests that are UPLOADING files; downloading files
# use the 'file' argument to point to a File ID.
upload_requests = ['files.upload']
files = None
if request in upload_requests:
files = {'file': post_data.pop('file')} if 'file' in post_data else None
for k, v in six.iteritems(post_data):
if not isinstance(v, six.string_types):
post_data[k] = json.dumps(v)
url = 'https://{0}/api/{1}'.format(domain, request)
post_data['token'] = token
return requests.post(url, data=post_data, files=files)
|
Fix bug preventing API calls requiring a file ID
|
Fix bug preventing API calls requiring a file ID
For example, an API call to files.info takes a file ID argument named
"file", which was stripped out by this call. Currently, there is only
one request type that accepts file data (files.upload). Every other use
of 'file' is an ID that aught to be contained in the request.
|
Python
|
mit
|
slackhq/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient
|
2395d08c672250b5df273eb36415c8200dd7f801
|
tests/tests_twobody/test_mean_elements.py
|
tests/tests_twobody/test_mean_elements.py
|
import pytest
from poliastro.twobody.mean_elements import get_mean_elements
def test_get_mean_elements_raises_error_if_invalid_body():
body = "PlanetNine"
with pytest.raises(ValueError) as excinfo:
get_mean_elements(body)
assert f"The input body '{body}' is invalid." in excinfo.exconly()
|
import pytest
from poliastro.bodies import Sun
from poliastro.twobody.mean_elements import get_mean_elements
def test_get_mean_elements_raises_error_if_invalid_body():
body = Sun
with pytest.raises(ValueError) as excinfo:
get_mean_elements(body)
assert f"The input body '{body}' is invalid." in excinfo.exconly()
|
Add test for error check
|
Add test for error check
|
Python
|
mit
|
poliastro/poliastro
|
599811e2a80b6f123d3beeb8906c0b82e975da86
|
maintenancemode/views/defaults.py
|
maintenancemode/views/defaults.py
|
from django.template import Context, loader
from maintenancemode import http
def temporary_unavailable(request, template_name='503.html'):
"""
Default 503 handler, which looks for the requested URL in the redirects
table, redirects if found, and displays 404 page if not redirected.
Templates: `503.html`
Context:
request_path
The path of the requested URL (e.g., '/app/pages/bad_page/')
"""
t = loader.get_template(template_name) # You need to create a 503.html template.
return http.HttpResponseTemporaryUnavailable(t.render(Context({})))
|
from django.template import RequestContext, loader
from maintenancemode import http
def temporary_unavailable(request, template_name='503.html'):
"""
Default 503 handler, which looks for the requested URL in the redirects
table, redirects if found, and displays 404 page if not redirected.
Templates: `503.html`
Context:
request_path
The path of the requested URL (e.g., '/app/pages/bad_page/')
"""
t = loader.get_template(template_name) # You need to create a 503.html template.
context = RequestContext(request, {'request_path': request.path})
return http.HttpResponseTemporaryUnavailable(t.render(context))
|
Use RequestContext instead of just Context.
|
Use RequestContext instead of just Context.
|
Python
|
bsd-3-clause
|
aarsan/django-maintenancemode,21strun/django-maintenancemode,shanx/django-maintenancemode,21strun/django-maintenancemode,shanx/django-maintenancemode,aarsan/django-maintenancemode
|
0983715cd2ee4eb3ac411e1ff24fa2e49df54eb5
|
src/manage.py
|
src/manage.py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
# Browsers doesn't use content negotiation using ETags with HTTP 1.0 servers
# Force Django to use HTTP 1.1 when using the runserver command
from wsgiref import simple_server
simple_server.ServerHandler.http_version = "1.1"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Allow to tests ETags when using the runserver command
|
Allow to tests ETags when using the runserver command
|
Python
|
agpl-3.0
|
jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud
|
efebbe998ac67810f6e0f86b685ab18f1ccf2bda
|
nio_cli/commands/config.py
|
nio_cli/commands/config.py
|
from .base import Base
import requests
class Config(Base):
""" Get basic nio info """
def __init__(self, options, *args, **kwargs):
super().__init__(options, *args, **kwargs)
self._resource = 'services' if self.options['services'] else 'blocks'
self._resource_name = \
self.options['<service-name>'] if self.options['services'] else \
self.options['<block-name>'] if self.options['blocks'] else \
""
def run(self):
response = requests.get(
self._base_url.format(
'{}/{}'.format(self._resource, self._resource_name)),
auth=self._auth)
try:
config = response.json()
print(requests.get(
self._base_url.format(self._resource),
json=config,
auth=self._auth).json())
except Exception as e:
print(e)
|
from .base import Base
import requests
class Config(Base):
""" Get basic nio info """
def __init__(self, options, *args, **kwargs):
super().__init__(options, *args, **kwargs)
self._resource = 'services' if self.options['services'] else 'blocks'
self._resource_name = \
self.options['<service-name>'] if self.options['services'] else \
self.options['<block-name>'] if self.options['blocks'] else \
""
def run(self):
response = requests.get(
self._base_url.format(
'{}/{}'.format(self._resource, self._resource_name)),
auth=self._auth)
try:
config = response.json()
print(config)
except Exception as e:
print(e)
|
Remove additional http get request
|
Remove additional http get request
|
Python
|
apache-2.0
|
nioinnovation/nio-cli,neutralio/nio-cli
|
2a1a5073e069b1fbf5b7803417b59339ec72d026
|
netdisco/discoverables/belkin_wemo.py
|
netdisco/discoverables/belkin_wemo.py
|
""" Discovers Belkin Wemo devices. """
from . import SSDPDiscoverable
class Discoverable(SSDPDiscoverable):
""" Adds support for discovering Belkin WeMo platform devices. """
def info_from_entry(self, entry):
""" Returns most important info from a uPnP entry. """
device = entry.description['device']
return (device['friendlyName'], device['modelName'],
entry.values['location'])
def get_entries(self):
""" Returns all Belkin Wemo entries. """
return self.find_by_device_description(
{'manufacturer': 'Belkin International Inc.'})
|
""" Discovers Belkin Wemo devices. """
from . import SSDPDiscoverable
class Discoverable(SSDPDiscoverable):
""" Adds support for discovering Belkin WeMo platform devices. """
def info_from_entry(self, entry):
""" Returns most important info from a uPnP entry. """
device = entry.description['device']
return (device['friendlyName'], device['modelName'],
entry.values['location'], device['macAddress'])
def get_entries(self):
""" Returns all Belkin Wemo entries. """
return self.find_by_device_description(
{'manufacturer': 'Belkin International Inc.'})
|
Add MAC address to wemo discovery attributes
|
Add MAC address to wemo discovery attributes
|
Python
|
mit
|
sfam/netdisco,brburns/netdisco,balloob/netdisco
|
8900aa1b47449bd6ad204725c3a98f75e17eb3ba
|
python/array_manipulation.py
|
python/array_manipulation.py
|
#!/bin/python3
import math
import os
import random
import re
import sys
def arrayManipulation(n, queries):
# An array used to capture the difference of an element
# compared to the previous element.
# Therefore the value of diffs[n] after all array manipulations is
# the cumulative sum of values from diffs[0] to diffs[n - 1]
diffs = [0] * n
for a, b, k in queries:
# Adds "k" to all subsequent elements in the array
diffs[a - 1] += k
# Ignore if b is out of range
if (b < n):
# Subtracts "k" from all subsequent elements in the array
diffs[b] -= k
sumSoFar = 0
maxSoFar = 0
for diff in diffs:
sumSoFar += diff
if sumSoFar > maxSoFar:
maxSoFar = sumSoFar
return maxSoFar
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
nm = input().split()
n = int(nm[0])
m = int(nm[1])
queries = []
for _ in range(m):
queries.append(list(map(int, input().rstrip().split())))
result = arrayManipulation(n, queries)
fptr.write(str(result) + '\n')
fptr.close()
|
#!/bin/python3
import math
import os
import random
import re
import sys
def arrayManipulation(n, queries):
diffs = getArrayOfDiffs(n, queries)
return maxFromDiffs(diffs)
def maxFromDiffs(diffs):
sumSoFar = 0
maxSoFar = 0
for diff in diffs:
sumSoFar += diff
if sumSoFar > maxSoFar:
maxSoFar = sumSoFar
return maxSoFar
def getArrayOfDiffs(n, queries):
# An array used to capture the difference of an element
# compared to the previous element.
# Therefore the value of diffs[n] after all array manipulations is
# the cumulative sum of values from diffs[0] to diffs[n - 1]
diffs = [0] * n
for a, b, k in queries:
# Adds "k" to all subsequent elements in the array
diffs[a - 1] += k
# Ignore if b is out of range
if (b < n):
# Subtracts "k" from all subsequent elements in the array
diffs[b] -= k
return diffs
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
nm = input().split()
n = int(nm[0])
m = int(nm[1])
queries = []
for _ in range(m):
queries.append(list(map(int, input().rstrip().split())))
result = arrayManipulation(n, queries)
fptr.write(str(result) + '\n')
fptr.close()
|
Refactor into getArrayOfDiffs and maxFromDiffs
|
Refactor into getArrayOfDiffs and maxFromDiffs
|
Python
|
mit
|
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
|
e8584f4193658399ea9bb2317915caff78fce88b
|
corehq/apps/commtrack/management/commands/update_supply_point_locations.py
|
corehq/apps/commtrack/management/commands/update_supply_point_locations.py
|
from django.core.management.base import BaseCommand
from casexml.apps.case.models import CommCareCase
from corehq.util.couch import iter_update, DocUpdate
class Command(BaseCommand):
help = ("Make sure all supply point cases have their owner_id set "
"to the location_id")
def handle(self, *args, **options):
def add_location(case):
if not case['location_id']:
return None
if case['owner_id'] != case['location_id']:
case['owner_id'] = case['location_id']
return DocUpdate(case)
iter_update(
CommCareCase.get_db(),
add_location,
self.get_case_ids(),
verbose=True
)
def get_case_ids(self):
return (case['id'] for case in CommCareCase.get_db().view(
'commtrack/supply_point_by_loc',
reduce=False,
include_docs=False,
).all())
|
from xml.etree import ElementTree
from django.core.management.base import BaseCommand
from casexml.apps.case.mock import CaseBlock
from casexml.apps.case.models import CommCareCase
from dimagi.utils.chunked import chunked
from dimagi.utils.couch.database import iter_docs
from corehq.apps.domain.models import Domain
from corehq.apps.hqcase.utils import submit_case_blocks
def needs_update(case):
return (case.get('location_id', None) and
case['owner_id'] != case['location_id'])
def case_block(case):
return ElementTree.tostring(CaseBlock(
create=False,
case_id=case['_id'],
owner_id=case['location_id'],
).as_xml())
def get_cases(domain):
supply_point_ids = (case['id'] for case in CommCareCase.get_db().view(
'commtrack/supply_point_by_loc',
startkey=[domain],
endkey=[domain, {}],
reduce=False,
include_docs=False,
).all())
return iter_docs(CommCareCase.get_db(), supply_point_ids)
def update_supply_points(domain):
case_blocks = (case_block(c) for c in get_cases(domain) if needs_update(c))
if case_blocks:
for chunk in chunked(case_blocks, 100):
submit_case_blocks(chunk, domain)
print "updated {} cases on domain {}".format(len(chunk), domain)
class Command(BaseCommand):
help = ("Make sure all supply point cases have their owner_id set "
"to the location_id")
def handle(self, *args, **options):
all_domains = Domain.get_all_names()
total = len(all_domains)
finished = 0
for domain in all_domains:
update_supply_points(domain)
finished += 1
if finished % 100 == 0:
print "Processed {} of {} domains".format(finished, total)
|
Use CaseBlocks to update case owner_ids
|
Use CaseBlocks to update case owner_ids
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq
|
553731a0ea12a8303076dc3d83bfbba91e6bc3e8
|
scripts/merge_duplicate_users.py
|
scripts/merge_duplicate_users.py
|
from django.db.models.functions import Lower
from django.db.models import Count
from bluebottle.members.models import Member
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.activities.models import Activity, Contributor
from bluebottle.initiatives.models import Initiative
for client in Client.objects.all():
with LocalTenant(client):
duplicate = Member.objects.annotate(
lower=Lower('email')
).values('lower').annotate(count=Count('lower')).filter(count__gt=1)
for result in duplicate:
first = Member.objects.filter(email__iexact=result['lower']).order_by('date_joined').first()
for duplicate in Member.objects.filter(
email__iexact=result['lower']
).exclude(pk=first.pk).order_by('date_joined'):
for activity in Activity.objects.filter(owner=duplicate):
activity.owner = first
activity.execute_triggers(send_messages=False)
activity.save()
for contributor in Contributor.objects.filter(user=duplicate):
contributor.user = first
contributor.execute_triggers(send_messages=False)
contributor.save()
for initiative in Initiative.objects.filter(owner=duplicate):
initiative.owner = first
initiative.execute_triggers(send_messages=False)
initiative.save()
duplicate.anonymize()
|
from django.db.models.functions import Lower
from django.db.models import Count
from bluebottle.members.models import Member
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.activities.models import Activity, Contributor
from bluebottle.initiatives.models import Initiative
for client in Client.objects.all():
with LocalTenant(client):
duplicate = Member.objects.annotate(
lower=Lower('email')
).values('lower').annotate(count=Count('lower')).filter(count__gt=1)
for result in duplicate:
first = Member.objects.filter(email__iexact=result['lower']).order_by('date_joined').first()
for duplicate in Member.objects.filter(
email__iexact=result['lower']
).exclude(pk=first.pk).order_by('date_joined'):
for activity in Activity.objects.filter(owner=duplicate):
activity.owner = first
activity.execute_triggers(send_messages=False)
activity.save()
for contributor in Contributor.objects.filter(user=duplicate):
contributor.user = first
contributor.execute_triggers(send_messages=False)
contributor.save()
for initiative in Initiative.objects.filter(owner=duplicate):
initiative.owner = first
initiative.execute_triggers(send_messages=False)
initiative.save()
duplicate.anonymize()
duplicate.email = 'merged-{}@example.com'.format(first.pk)
duplicate.save()
|
Make sure we remember to which the user was merged
|
Make sure we remember to which the user was merged
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
2ca3f28b4423fc8ecd19591a039b7a5c814ab25b
|
webserver/codemanagement/validators.py
|
webserver/codemanagement/validators.py
|
from django.core.validators import RegexValidator
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_validator = RegexValidator(regex="^[A-Za-z][\w\-\.]+[A-Za-z]$",
message="Must be letters and numbers" +
" separated by dashes, dots, or underscores")
|
from django.core.validators import RegexValidator
from django.core.exceptions import ValidationError
from dulwich.repo import check_ref_format
import re
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_regex = re.compile(r'^[A-Za-z][\w\-\.]+[A-Za-z]$')
def tag_validator(value):
if not tag_regex.match(value):
msg = "Must be letters and numbers separated "
msg += "by dashes, dots, or underscores"
raise ValidationError(msg)
if not check_ref_format('refs/tags/' + value):
msg = "Invalid tag. Tags must adhere to ref formats defined here: "
msg += "https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html"
raise ValidationError(msg)
|
Make dulwich check the tag.
|
Make dulwich check the tag.
|
Python
|
bsd-3-clause
|
siggame/webserver,siggame/webserver,siggame/webserver
|
290bf5b5e577673a15e9a71033a5df2704ccff7a
|
opencademy/model/openacademy_session.py
|
opencademy/model/openacademy_session.py
|
# -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
# -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor", "=", True),
("category_id.name", "ilike", "Teacher"),
])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
# vim:expandtab:smart indent: tabstop=4:softtabstop=4:shifwidth=4;
|
Add domain or and ilike
|
[REF] openacademy: Add domain or and ilike
|
Python
|
apache-2.0
|
LihanHA/opencademy-project
|
42b69fdb0d9267c339200185feddefb430aea6ae
|
geartracker/admin.py
|
geartracker/admin.py
|
from django.contrib import admin
from geartracker.models import *
class ItemAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("make", "model", "size")}
list_display = ('__unicode__', 'type', 'metric_weight', 'acquired')
list_filter = ('archived', 'category', 'type', 'make', 'tags')
search_fields = ('make', 'model')
filter_horizontal = ('related', 'tags')
admin.site.register(Item, ItemAdmin)
class CategoryAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('__unicode__', 'number_items')
admin.site.register(Category, CategoryAdmin)
class TypeAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('category', 'name', 'number_items')
list_filter = ('category',)
admin.site.register(Type, TypeAdmin)
class ListItemRelationshipInline(admin.TabularInline):
model = ListItem
extra = 1
class ListAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
inlines = (ListItemRelationshipInline,)
list_display = ('name', 'total_metric_weight', 'start_date', 'end_date',
'public')
list_filter = ('public',)
admin.site.register(List, ListAdmin)
|
from django.contrib import admin
from geartracker.models import *
class ItemAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("make", "model", "size")}
list_display = ('__unicode__', 'type', 'metric_weight', 'acquired')
list_filter = ('archived', 'category', 'type', 'make')
search_fields = ('make', 'model')
filter_horizontal = ('related',)
admin.site.register(Item, ItemAdmin)
class CategoryAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('__unicode__', 'number_items')
admin.site.register(Category, CategoryAdmin)
class TypeAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('category', 'name', 'number_items')
list_filter = ('category',)
admin.site.register(Type, TypeAdmin)
class ListItemRelationshipInline(admin.TabularInline):
model = ListItem
extra = 1
class ListAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
inlines = (ListItemRelationshipInline,)
list_display = ('name', 'total_metric_weight', 'start_date', 'end_date',
'public')
list_filter = ('public',)
admin.site.register(List, ListAdmin)
|
Remove tags from list_filter and filter_horizontal
|
Remove tags from list_filter and filter_horizontal
|
Python
|
bsd-3-clause
|
pigmonkey/django-geartracker
|
2a6399a74110b6a9e0d48349c68775986c13a579
|
pyservice/context.py
|
pyservice/context.py
|
"""
RequestContext stores state relevant to the current request, as well as
keeping track of the plugin execution order and providing a simple method
`advance` for calling the next plugin in the chain.
"""
import collections
class Container(collections.defaultdict):
DEFAULT_FACTORY = lambda: None
def __init__(self):
super().__init__(self, Container.DEFAULT_FACTORY)
def __getattr__(self, name):
return self[name]
def __setattr__(self, name, value):
self[name] = value
class Context(object):
def __init__(self, service, operation):
self.service = service
self.operation = operation
def execute(self):
self.service.continue_execution(self)
|
"""
RequestContext stores state relevant to the current request, as well as
keeping track of the plugin execution order and providing a simple method
`advance` for calling the next plugin in the chain.
"""
import ujson
import collections
class Container(collections.defaultdict):
DEFAULT_FACTORY = lambda: None
def __init__(self):
super().__init__(self, Container.DEFAULT_FACTORY)
def __getattr__(self, name):
return self[name]
def __setattr__(self, name, value):
self[name] = value
class Context(object):
def __init__(self, service, operation, processor):
self.service = service
self.operation = operation
self.processor = processor
def process_request(self):
self.processor.continue_execution()
class Processor(object):
def __init__(self, service, operation, request_body):
self.service = service
self.operation = operation
self.context = Context(service, operation, self)
self.request = Container()
self.request_body = request_body
self.response = Container()
self.response_body = None
self.plugins = service.get_plugins(operation)
self.index = -1
self.state = "request" # request -> operation -> function
def execute(self):
self.context.process_request()
def continue_execution(self):
self.index += 1
plugins = self.plugins[self.state]
n = len(plugins)
if self.index > n:
# Terminal point so that service.invoke
# can safely call context.process_request()
return
elif self.index == n:
if self.state == "request":
self.index = -1
self.state = "operation"
self._deserialize_request()
self.continue_execution()
self._serialize_response()
elif self.state == "operation":
self.service.invoke(self.operation, self.request,
self.response, self.context)
# index < n
else:
if self.state == "request":
plugins[self.index](self.context)
elif self.state == "operation":
plugins[self.index](self.request, self.response, self.context)
def _deserialize_request(self):
self.request.update(ujson.loads(self.request_body))
def _serialize_response(self):
self.response_body = ujson.dumps(self.response)
|
Create class for request process recursion
|
Create class for request process recursion
|
Python
|
mit
|
numberoverzero/pyservice
|
cd342448675f3174bf74118de0447c1b0f169f3e
|
python/volumeBars.py
|
python/volumeBars.py
|
#!/usr/bin/env python
from rgbmatrix import RGBMatrix
from random import randint
import numpy
import math
import time
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
height = ledMatrix.height
width = ledMatrix.width
barWidth = width / 16
pi = numpy.pi
barHeights = numpy.empty([16])
for i in range(16):
barHeights[i] = i * pi / 16
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
heights = numpy.sin(barHeights)
barHeights += pi / 16
for x in range(width):
barHeight = int(heights[int(x / barWidth)] * height)
for y in range(height):
if height - y <= barHeight:
if y < 2:
nextFrame.SetPixel(x, y, 255, 0, 0)
elif y < 6:
nextFrame.SetPixel(x, y, 200, 200, 0)
else:
nextFrame.SetPixel(x, y, 0, 200, 0)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(0.2)
|
#!/usr/bin/env python
from rgbmatrix import RGBMatrix
from random import randint
import numpy
import math
import time
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
height = ledMatrix.height
width = ledMatrix.width
barWidth = width / 16
pi = numpy.pi
barHeights = numpy.empty([16])
for i in range(16):
barHeights[i] = i * pi / 16
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
heights = numpy.empty([16])
for i in range(len(barHeights)):
heights[i] = (math.sin(randint(-3, 3) * x) + math.cos(randint(-3, 3) * x) + math.cos(randint(-3, 3) * x)) / 3
barHeights += pi / 16
for x in range(width):
barHeight = int(heights[int(x / barWidth)] * height)
for y in range(height):
if height - y <= barHeight:
if y < 2:
nextFrame.SetPixel(x, y, 255, 0, 0)
elif y < 6:
nextFrame.SetPixel(x, y, 200, 200, 0)
else:
nextFrame.SetPixel(x, y, 0, 200, 0)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(0.2)
|
Create a more random function
|
Create a more random function
|
Python
|
mit
|
DarkAce65/rpi-led-matrix,DarkAce65/rpi-led-matrix
|
6c6021cd1a206a91432da096400358e5eb0255fe
|
nasa_data.py
|
nasa_data.py
|
import requests
import os
def get_apod():
os.makedirs("APODs", exist_ok=True)
try:
apod_data = requests.get("https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY").json()
image_url = apod_data["url"]
if image_url.endswith(".gif"):
raise TypeError
image_data = requests.get(image_url, stream=True)
except (requests.HTTPError or TypeError):
return
with open(os.path.join("APODs", os.path.basename(image_url)), "wb") as imagefile:
for chunk in image_data.iter_content(100000):
imagefile.write(chunk)
return os.path.abspath((os.path.join("APODs", os.path.basename(image_url))))
|
import requests
import os
def get_apod():
os.makedirs("APODs", exist_ok=True)
try:
# check if website is accessible
apod_data = requests.get("https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY")
apod_data.raise_for_status()
apod_data = apod_data.json()
# check if image is accessible
image_url = apod_data["url"]
image_data = requests.get(image_url, stream=True)
image_data.raise_for_status()
except requests.HTTPError:
return
with open(os.path.join("APODs", os.path.basename(image_url)), "wb") as imagefile:
for chunk in image_data.iter_content(100000):
imagefile.write(chunk)
# Twitter limitation: .gif must be smaller than 3MB
if image_url.endswith(".gif") and os.path.getsize(os.path.join("APODs", os.path.basename(image_url))) >= 3145728:
return
else:
return os.path.abspath((os.path.join("APODs", os.path.basename(image_url))))
|
Update 0.7.0 - specified try-block to check the status - changed except block - allowed .gif format but only up to 3MP (Twitter limitation)
|
Update 0.7.0
- specified try-block to check the status
- changed except block
- allowed .gif format but only up to 3MP (Twitter limitation)
|
Python
|
mit
|
FXelix/space_facts_bot
|
b702569c800953eb3476c927fbc1085e67c88dbd
|
ghettoq/messaging.py
|
ghettoq/messaging.py
|
from Queue import Empty
from itertools import cycle
class Queue(object):
def __init__(self, backend, name):
self.name = name
self.backend = backend
def put(self, payload):
self.backend.put(self.name, payload)
def get(self):
payload = self.backend.get(self.name)
if payload is not None:
return payload
raise Empty
class QueueSet(object):
def __init__(self, backend, queues):
self.backend = backend
self.queues = map(self.backend.Queue, queues)
self.cycle = cycle(self.queues)
def get(self):
while True:
try:
return self.cycle.next().get()
except QueueEmpty:
pass
|
from Queue import Empty
from itertools import cycle
class Queue(object):
def __init__(self, backend, name):
self.name = name
self.backend = backend
def put(self, payload):
self.backend.put(self.name, payload)
def get(self):
payload = self.backend.get(self.name)
if payload is not None:
return payload
raise Empty
class QueueSet(object):
def __init__(self, backend, queues):
self.backend = backend
self.queue_names = queues
self.queues = map(self.backend.Queue, self.queue_names)
self.cycle = cycle(self.queues)
self.all = frozenset(self.queue_names)
def get(self):
tried = set()
while True:
queue = self.cycle.next()
try:
return queue.get()
except QueueEmpty:
tried.add(queue)
if tried == self.all:
raise
|
Raise QueueEmpty when all queues has been tried.
|
QueueSet: Raise QueueEmpty when all queues has been tried.
|
Python
|
bsd-3-clause
|
ask/ghettoq
|
e4275c4f1a408dd9f8095bef4ed650ccc54401e9
|
packages/mono-llvm-2-10.py
|
packages/mono-llvm-2-10.py
|
GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = 'CFLAGS="-m32" CPPFLAGS="-m32" CXXFLAGS="-m32" LDFLAGS="-m32" ./configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
|
GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = './configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
|
Fix llvm so it doesn't corrupt the env when configuring itself
|
Fix llvm so it doesn't corrupt the env when configuring itself
|
Python
|
mit
|
BansheeMediaPlayer/bockbuild,mono/bockbuild,mono/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild
|
979aada6964a5c8ef1f5c787ce84d72420626901
|
migrations/versions/36cbde703cc0_add_build_priority.py
|
migrations/versions/36cbde703cc0_add_build_priority.py
|
"""Add Build.priority
Revision ID: 36cbde703cc0
Revises: fe743605e1a
Create Date: 2014-10-06 10:10:14.729720
"""
# revision identifiers, used by Alembic.
revision = '36cbde703cc0'
down_revision = 'fe743605e1a'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('build', sa.Column('priority', sa.Enum(), server_default='0', nullable=False))
def downgrade():
op.drop_column('build', 'priority')
|
"""Add Build.priority
Revision ID: 36cbde703cc0
Revises: fe743605e1a
Create Date: 2014-10-06 10:10:14.729720
"""
# revision identifiers, used by Alembic.
revision = '36cbde703cc0'
down_revision = '2c6662281b66'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('build', sa.Column('priority', sa.Enum(), server_default='0', nullable=False))
def downgrade():
op.drop_column('build', 'priority')
|
Update build priority down revision
|
Update build priority down revision
2c6662281b66
|
Python
|
apache-2.0
|
dropbox/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes
|
c0e00f3caf12ad95bc753e65fc3721623c552aa0
|
diceware.py
|
diceware.py
|
from random import randint
def sysrand(sides=6, rolls=5):
return ''.join(map(str, [randint(1, sides) for i in range(rolls)]))
def randorg(sides=6, rolls=5):
raise NotImplemented
def generate(suggestions=1, words=6, apikey=''):
with open('diceware.wordlist.asc.txt', 'r') as f:
wordlist = dict([map(str.strip, line.split()) for line in f if line.strip() != ''])
for i in range(suggestions):
password = []
getkey = randorg if apikey else sysrand
while len(password) < words:
key = None
while key not in wordlist:
key = getkey()
password.append(wordlist[key])
yield ' '.join(password)
|
from __future__ import print_function
from httplib import HTTPSConnection
from random import randint
from uuid import uuid4
import json, sys
def sysrand(suggestions, words, rolls=5, sides=6, **kwargs):
print('sysrand', file=sys.stderr)
for i in range(suggestions):
yield [''.join(map(str, [randint(1, sides) for i in range(rolls)])) for j in range(words)]
def randorg(suggestions, words, rolls=5, sides=6, apiKey=''):
conn = HTTPSConnection('api.random.org')
body = json.dumps({
'jsonrpc': '2.0',
'id': str(uuid4()),
'method': 'generateIntegers',
'params': {
'apiKey': apiKey,
'n' : rolls * words * suggestions,
'min': 1,
'max': sides
}
})
headers = {
'Content-Type': 'raw'
}
conn.request('POST', '/json-rpc/1/invoke', body, headers)
resp = conn.getresponse()
data = json.loads(resp.read())
conn.close()
digits = map(str, data['result']['random']['data'])
for i in range(suggestions):
start = i * words * rolls
yield [''.join(digits[start + (j * rolls):start + ((j + 1) * rolls)]) for j in range(words)]
def generate(suggestions=1, words=6, apikey=''):
with open('diceware.wordlist.asc.txt', 'r') as f:
wordlist = dict([map(str.strip, line.split()) for line in f if line.strip() != ''])
getkey = randorg if apikey else sysrand
for keys in getkey(suggestions, words, apiKey=apikey):
yield ' '.join([wordlist[k] for k in keys])
|
Add random.org support for generating keys
|
Add random.org support for generating keys
* Replace tabs with spaces (d'oh!)
* Implement random key functions as generators so that we can retrieve
numbers necessary for all suggestions in one request from random.org
without burning through requests
* Send a request to the random.org API and parse the response into an
array of keys
|
Python
|
mit
|
darthmall/Alfred-Diceware-Workflow
|
62d5c5b2bf33a228938924a44e229f2f2cb4e02c
|
registrasion/urls.py
|
registrasion/urls.py
|
from django.conf.urls import url, include, patterns
urlpatterns = patterns(
"registrasion.views",
url(r"^category/([0-9]+)$", "product_category", name="product_category"),
url(r"^checkout$", "checkout", name="checkout"),
url(r"^invoice/([0-9]+)$", "invoice", name="invoice"),
url(r"^invoice/([0-9]+)/pay$", "pay_invoice", name="pay_invoice"),
url(r"^profile$", "edit_profile", name="profile"),
url(r"^register$", "guided_registration", name="guided_registration"),
url(r"^register/([0-9]+)$", "guided_registration",
name="guided_registration"),
# Required by django-nested-admin.
url(r'^nested_admin/', include('nested_admin.urls')),
)
|
from django.conf.urls import url, patterns
urlpatterns = patterns(
"registrasion.views",
url(r"^category/([0-9]+)$", "product_category", name="product_category"),
url(r"^checkout$", "checkout", name="checkout"),
url(r"^invoice/([0-9]+)$", "invoice", name="invoice"),
url(r"^invoice/([0-9]+)/pay$", "pay_invoice", name="pay_invoice"),
url(r"^profile$", "edit_profile", name="profile"),
url(r"^register$", "guided_registration", name="guided_registration"),
url(r"^register/([0-9]+)$", "guided_registration",
name="guided_registration"),
)
|
Revert "Registrasion URLs now include django-nested-admin"
|
Revert "Registrasion URLs now include django-nested-admin"
This reverts commit 58eed33c429c1035801e840b41aa7104c02b9b5a.
|
Python
|
apache-2.0
|
chrisjrn/registrasion,chrisjrn/registrasion
|
b089522f108c9071013e0cc00813e29bc415595c
|
logbot/irc_client.py
|
logbot/irc_client.py
|
import irc.client
import sys
import os
class IrcClient(object):
def __init__(self, server, port, channel, bot_name):
self.server = server
self.port = port
self.channel = channel
self.bot_name = bot_name
def start(self):
self._client = irc.client.IRC()
self._client_connection = self._client.server().connect(self.server, self.port, self.bot_name)
self._add_handlers()
def _add_handlers(self):
self._client_connection.add_global_handler('pubmsg', self.logger)
self._client_connection.add_global_handler('welcome', self.joinner)
def joinner(self, connection, event):
connection.join(self.channel)
def logger(self, connection, event):
sys.stdout.write(event.arguments[0])
sys.stdout.flush()
def graceful_stop(self, signum, frame):
self._client.disconnect_all("{0} is going home now.".format(self.bot_name))
os._exit(0)
def process_forever(self):
self._client.process_forever()
|
import irc.client
import sys
import os
class IrcClient(object):
def __init__(self, server, port, channel, bot_name):
self.server = server
self.port = port
self.channel = channel
self.bot_name = bot_name
def start(self):
self._client = irc.client.IRC()
self._client_connection = self._client.server().connect(self.server, self.port, self.bot_name)
self._add_handlers()
def _add_handlers(self):
self._client_connection.add_global_handler('pubmsg', self.logger)
self._client_connection.add_global_handler('welcome', self.joinner)
def joinner(self, connection, event):
connection.join(self.channel)
def logger(self, connection, event):
sys.stdout.write("{0}: {1}\n".format(event.source.nick, event.arguments[0]))
sys.stdout.flush()
def graceful_stop(self, signum, frame):
self._client.disconnect_all("{0} is going home now.\n".format(self.bot_name))
os._exit(0)
def process_forever(self):
self._client.process_forever()
|
Add nick to the log
|
Add nick to the log
|
Python
|
mit
|
mlopes/LogBot
|
ced2be321f347f3e28e79e5cfac4e4a83f6b6819
|
fireplace/cards/blackrock/collectible.py
|
fireplace/cards/blackrock/collectible.py
|
from ..utils import *
##
# Minions
# Flamewaker
class BRM_002:
events = [
OWN_SPELL_PLAY.after(Hit(RANDOM_ENEMY_MINION, 1) * 2)
]
##
# Spells
# Solemn Vigil
class BRM_001:
action = [Draw(CONTROLLER) * 2]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
# Dragon's Breath
class BRM_003:
action = [Hit(TARGET, 4)]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
|
from ..utils import *
##
# Minions
# Flamewaker
class BRM_002:
events = [
OWN_SPELL_PLAY.after(Hit(RANDOM_ENEMY_MINION, 1) * 2)
]
# Imp Gang Boss
class BRM_006:
events = [
Damage(SELF).on(Summon(CONTROLLER, "BRM_006t"))
]
##
# Spells
# Solemn Vigil
class BRM_001:
action = [Draw(CONTROLLER) * 2]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
# Dragon's Breath
class BRM_003:
action = [Hit(TARGET, 4)]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
# Demonwrath
class BRM_005:
action = [Hit(ALL_MINIONS - DEMON, 2)]
|
Implement Demonwrath and Imp Gang Boss
|
Implement Demonwrath and Imp Gang Boss
|
Python
|
agpl-3.0
|
smallnamespace/fireplace,Meerkov/fireplace,smallnamespace/fireplace,butozerca/fireplace,NightKev/fireplace,butozerca/fireplace,Ragowit/fireplace,amw2104/fireplace,Meerkov/fireplace,beheh/fireplace,liujimj/fireplace,liujimj/fireplace,oftc-ftw/fireplace,amw2104/fireplace,Ragowit/fireplace,jleclanche/fireplace,oftc-ftw/fireplace
|
2959fa0a9f69cbfb7611bbc12488089921d26ab8
|
IPython/frontend/html/notebook/__init__.py
|
IPython/frontend/html/notebook/__init__.py
|
"""The IPython HTML Notebook"""
# check for tornado 2.1.0
msg = "The IPython Notebook requires tornado >= 2.1.0"
try:
import tornado
except ImportError:
raise ImportError(msg)
else:
if tornado.version_info < (2,1,0):
raise ImportError(msg+", but you have %s"%tornado.version)
del msg
|
"""The IPython HTML Notebook"""
# check for tornado 2.1.0
msg = "The IPython Notebook requires tornado >= 2.1.0"
try:
import tornado
except ImportError:
raise ImportError(msg)
try:
version_info = tornado.version_info
except AttributeError:
raise ImportError(msg + ", but you have < 1.1.0")
if version_info < (2,1,0):
raise ImportError(msg + ", but you have %s" % tornado.version)
del msg
|
Fix for tornado check for tornado < 1.1.0
|
Fix for tornado check for tornado < 1.1.0
Tornado < 1.1.0 does not have the ``version_info`` variable to check.
Debian squeeze has tornado 1.0.1.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
fb256b042a485aefa2d9e45b39daa551a3f779ff
|
examples/open_file_dialog.py
|
examples/open_file_dialog.py
|
import webview
import threading
"""
This example demonstrates creating an open file dialog.
"""
def open_file_dialog():
import time
time.sleep(5)
print(webview.create_file_dialog(webview.OPEN_DIALOG, allow_multiple=True))
if __name__ == '__main__':
t = threading.Thread(target=open_file_dialog)
t.start()
webview.create_window("Open file dialog example", "http://www.flowrl.com")
|
import webview
import threading
"""
This example demonstrates creating an open file dialog.
"""
def open_file_dialog():
import time
time.sleep(5)
file_types = ('Image Files (*.bmp;*.jpg;*.gif)', 'All files (*.*)')
print(webview.create_file_dialog(webview.OPEN_DIALOG, allow_multiple=True, file_types=file_types))
if __name__ == '__main__':
t = threading.Thread(target=open_file_dialog)
t.start()
webview.create_window("Open file dialog example", "http://www.flowrl.com")
|
Modify example to include file_types param
|
[All] Modify example to include file_types param
|
Python
|
bsd-3-clause
|
r0x0r/pywebview,r0x0r/pywebview,shivaprsdv/pywebview,r0x0r/pywebview,shivaprsdv/pywebview,shivaprsdv/pywebview,shivaprsdv/pywebview,r0x0r/pywebview,r0x0r/pywebview
|
d1e5f55681eda2b2b358013ad5dca3a58619c914
|
pycom/objects.py
|
pycom/objects.py
|
# encoding: utf-8
### Attribute Wrapper
class AttrWrapper(object):
attrs = []
def __setattr__(self, name, value):
if name not in self.attrs:
raise AttributeError("'%s' is not supported" % name)
object.__setattr__(self, name, value)
def __repr__(self):
attrs = []
template = "%s=%s"
for name in self.attrs:
try:
attrs.append(template % (name, getattr(self, name)))
except AttributeError:
pass
return "%s(%s)" % (self.__class__.__name__, ", ".join(attrs))
|
# encoding: utf-8
### Attribute Wrapper
class AttrWrapper(object):
attrs = []
def __setattr__(self, name, value):
if name not in self.attrs:
raise AttributeError("'%s' is not supported" % name)
object.__setattr__(self, name, value)
def __repr__(self):
attrs = []
template = "%s=%s"
for name in self.attrs:
try:
attrs.append(template % (name, getattr(self, name)))
except AttributeError:
pass
return "%s(%s)" % (self.__class__.__name__, ", ".join(attrs))
def val(obj, name, default=None):
if hasattr(obj, name):
return obj.name
elif name in obj:
return obj[name]
elif isinstance(obj, (list, tuple)) and isinstance(name, int):
try:
return obj[name]
except Exception:
return default
else:
return default
|
Add the function to get the value of object, dict, list, or tuple.
|
Add the function to get the value of object, dict, list, or tuple.
|
Python
|
mit
|
xgfone/pycom,xgfone/xutils
|
482b8f7738da51c394969e526b37093d3c52d663
|
pyconkr/tests.py
|
pyconkr/tests.py
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.http import HttpResponse
from django.test import Client
from django.core.urlresolvers import reverse_lazy, reverse
from django.contrib.auth import get_user_model
from pyconkr.helper import render_io_error
User = get_user_model()
class HelperFunctionTestCase(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_render_io_error(self):
a = render_io_error("test reason")
self.assertEqual(a.status_code, 406, "render io error status code must be 406")
class PaymentTestCase(TestCase):
def setUp(self):
self.client = Client()
self.user = User.objects.create_user('testname', 'test@test.com', 'testpassword')
self.client.login(username='testname', password='testpassword')
def tearDown(self):
pass
def test_view_registration_payment(self):
url = reverse('registration_payment')
response = self.client.post(url, {'test': 1})
self.assertEqual(response['content-type'], 'application/javascript', 'error raise and must be ajax' )
print response.content
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.http import HttpResponse
from django.test import Client
from django.core.urlresolvers import reverse_lazy, reverse
from django.contrib.auth import get_user_model
from pyconkr.helper import render_io_error
User = get_user_model()
class HelperFunctionTestCase(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_render_io_error(self):
a = render_io_error("test reason")
self.assertEqual(a.status_code, 406, "render io error status code must be 406")
class PaymentTestCase(TestCase):
def setUp(self):
self.client = Client()
self.user = User.objects.create_user('testname', 'test@test.com', 'testpassword')
self.client.login(username='testname', password='testpassword')
def tearDown(self):
pass
def test_view_registration_payment(self):
url = reverse('registration_payment')
response = self.client.post(url, {'test': 1})
self.assertEqual(response['content-type'], 'application/json', 'Result has to be JSON')
class ProfileTest(TestCase):
def test_profile_is_created_when_user_save(self):
user = User.objects.create_user('test', 'test@email.com', 'password')
self.assertNotEqual(user.profile, None)
|
Add profile model signal test case
|
Add profile model signal test case
|
Python
|
mit
|
pythonkr/pyconapac-2016,pythonkr/pyconapac-2016,pythonkr/pyconapac-2016
|
ab99a515995e121944e0e7b355e8980984a2fd98
|
util.py
|
util.py
|
__author__ = 'zifnab'
import string
from passlib.hash import sha512_crypt
import database
from flask_login import login_user
def random_string(size=10, chars=string.ascii_letters + string.digits):
import random
return ''.join(random.choice(chars) for x in range(size))
def create_user(**kwargs):
username = kwargs.get('username')
password = kwargs.get('password')
email = kwargs.get('email')
hash = sha512_crypt.encrypt(password)
user = database.User(username=username,
hash=hash,
email=email)
if database.User.objects().count() == 0:
user.admin = True
user.save()
login_user(user)
def authenticate_user(username, password):
user = database.User.objects(username__iexact=username).first()
if user is None:
return None
if (sha512_crypt.verify(password, user.hash)):
return user
else:
return None
def lookup_user(username):
user = database.User.objects(username__iexact=username).first()
return user
|
__author__ = 'zifnab'
import string
from passlib.hash import sha512_crypt
from random import SystemRandom
import database
from flask_login import login_user
_random = SystemRandom()
def random_string(size=10, chars=string.ascii_letters + string.digits):
return ''.join(_random.choice(chars) for x in range(size))
def create_user(**kwargs):
username = kwargs.get('username')
password = kwargs.get('password')
email = kwargs.get('email')
hash = sha512_crypt.encrypt(password)
user = database.User(username=username,
hash=hash,
email=email)
if database.User.objects().count() == 0:
user.admin = True
user.save()
login_user(user)
def authenticate_user(username, password):
user = database.User.objects(username__iexact=username).first()
if user is None:
return None
if (sha512_crypt.verify(password, user.hash)):
return user
else:
return None
def lookup_user(username):
user = database.User.objects(username__iexact=username).first()
return user
|
Use a cryptographically secure PRNG in random_string().
|
Use a cryptographically secure PRNG in random_string().
By default python uses a non-CS PRNG, so with some analysis, "random_string"s could be predicted.
|
Python
|
mit
|
zifnab06/zifb.in,zifnab06/zifb.in
|
2aa45922f7d018398e028c2aed964cf2ec00038a
|
bika/lims/browser/widgets/recordswidget.py
|
bika/lims/browser/widgets/recordswidget.py
|
from AccessControl import ClassSecurityInfo
from Products.ATExtensions.widget import RecordsWidget as ATRecordsWidget
from Products.Archetypes.Registry import registerWidget
class RecordsWidget(ATRecordsWidget):
security = ClassSecurityInfo()
_properties = ATRecordsWidget._properties.copy()
_properties.update({
'macro': "bika_widgets/recordswidget",
'helper_js': ("bika_widgets/recordswidget.js",),
'helper_css': ("bika_widgets/recordswidget.css",),
'allowDelete': True,
})
registerWidget(RecordsWidget,
title = 'RecordsWidget',
description = (''),
)
|
from AccessControl import ClassSecurityInfo
from Products.ATExtensions.widget import RecordsWidget as ATRecordsWidget
from Products.Archetypes.Registry import registerWidget
class RecordsWidget(ATRecordsWidget):
security = ClassSecurityInfo()
_properties = ATRecordsWidget._properties.copy()
_properties.update({
'macro': "bika_widgets/recordswidget",
'helper_js': ("bika_widgets/recordswidget.js",),
'helper_css': ("bika_widgets/recordswidget.css",),
'allowDelete': True,
})
def process_form(self, instance, field, form, empty_marker=None,
emptyReturnsMarker=False):
"""
Basic impl for form processing in a widget plus allowing empty
values to be saved
"""
value = form.get(field.getName(), empty_marker)
print value
if not value:
return value, {}
if value is empty_marker:
return empty_marker
if emptyReturnsMarker and value == '':
return empty_marker
return value, {}
registerWidget(RecordsWidget,
title = 'RecordsWidget',
description = (''),
)
|
Allow empty values in Records Widget
|
Allow empty values in Records Widget
|
Python
|
agpl-3.0
|
anneline/Bika-LIMS,veroc/Bika-LIMS,veroc/Bika-LIMS,veroc/Bika-LIMS,DeBortoliWines/Bika-LIMS,labsanmartin/Bika-LIMS,rockfruit/bika.lims,labsanmartin/Bika-LIMS,anneline/Bika-LIMS,rockfruit/bika.lims,DeBortoliWines/Bika-LIMS,anneline/Bika-LIMS,DeBortoliWines/Bika-LIMS,labsanmartin/Bika-LIMS
|
c088a28c9f7020cb64c25eb0e83dfdcd286015d3
|
app/assets.py
|
app/assets.py
|
from flask.ext.assets import Bundle
app_css = Bundle(
'app.scss',
'map.scss',
filters='scss',
output='styles/app.css'
)
app_js = Bundle(
'app.js',
'descriptor.js',
'map.js',
'resources.js',
filters='jsmin',
output='scripts/app.js'
)
vendor_css = Bundle(
'vendor/semantic.min.css',
output='styles/vendor.css'
)
vendor_js = Bundle(
'vendor/jquery.min.js',
'vendor/async.js',
'vendor/address-autocomplete.js',
'vendor/papaparse.min.js',
'vendor/semantic.min.js',
'vendor/tablesort.min.js',
filters='jsmin',
output='scripts/vendor.js'
)
|
from flask.ext.assets import Bundle
app_css = Bundle(
'*.scss',
filters='scss',
output='styles/app.css'
)
app_js = Bundle(
'app.js',
'descriptor.js',
'map.js',
'resources.js',
filters='jsmin',
output='scripts/app.js'
)
vendor_css = Bundle(
'vendor/semantic.min.css',
output='styles/vendor.css'
)
vendor_js = Bundle(
'vendor/jquery.min.js',
'vendor/async.js',
'vendor/address-autocomplete.js',
'vendor/papaparse.min.js',
'vendor/semantic.min.js',
'vendor/tablesort.min.js',
filters='jsmin',
output='scripts/vendor.js'
)
|
Generalize scss bundle to track all scss files
|
Generalize scss bundle to track all scss files
|
Python
|
mit
|
hack4impact/maps4all,hack4impact/asylum-connect-catalog,hack4impact/maps4all-jlc-sp2,hack4impact/maps4all,hack4impact/asylum-connect-catalog,hack4impact/asylum-connect-catalog,AsylumConnect/asylum-connect-catalog,hack4impact/maps4all,hack4impact/maps4all-jlc-sp2,hack4impact/maps4all-jlc-sp2,hack4impact/asylum-connect-catalog,hack4impact/maps4all-jlc-sp2,AsylumConnect/asylum-connect-catalog,hack4impact/maps4all,AsylumConnect/asylum-connect-catalog,AsylumConnect/asylum-connect-catalog
|
ddbc9624aacf9e15897bdfb46fc2016888db114b
|
git/pmstats2/get-pm-stats.py
|
git/pmstats2/get-pm-stats.py
|
#!/usr/bin/env python
# get-pmstats.py
# Henry J Schmale
# November 25, 2017
#
# Calculates the additions and deletions per day within a git repository
# by parsing out the git log. It opens the log itself.
# Produces output as a CSV
import subprocess
from datetime import datetime
changes_by_date = {}
git_log = subprocess.Popen(
'git log --numstat --pretty="%at"',
stdout=subprocess.PIPE,
shell=True)
date = None
day_changes = [0, 0]
for line in git_log.stdout:
args = line.rstrip().split()
if len(args) == 1:
old_date = date
date = datetime.fromtimestamp(int(args[0]))
if day_changes != [0, 0] and date.date() != old_date.date():
changes_by_date[str(date.date())] = day_changes
day_changes = [0, 0]
elif len(args) >= 3:
day_changes = [sum(x) for x in zip(day_changes, map(int, args[0:2]))]
print('date,ins,del')
for key,vals in changes_by_date.items():
print(','.join(map(str, [key, vals[0], vals[1]])))
|
#!/usr/bin/env python
# get-pmstats.py
# Henry J Schmale
# November 25, 2017
#
# Calculates the additions and deletions per day within a git repository
# by parsing out the git log. It opens the log itself.
# Produces output as a CSV
import subprocess
from datetime import datetime
def chomp_int(val):
try:
return int(val)
except ValueError:
return 0
changes_by_date = {}
git_log = subprocess.Popen(
'git log --numstat --pretty="%at"',
stdout=subprocess.PIPE,
shell=True)
date = None
day_changes = [0, 0]
for line in git_log.stdout:
args = line.rstrip().split()
if len(args) == 1:
old_date = date
date = datetime.fromtimestamp(int(args[0]))
if day_changes != [0, 0] and date.date() != old_date.date():
changes_by_date[str(date.date())] = day_changes
day_changes = [0, 0]
elif len(args) >= 3:
day_changes = [sum(x) for x in zip(day_changes, map(chomp_int, args[0:2]))]
print('date,ins,del')
for key,vals in changes_by_date.items():
print(','.join(map(str, [key, vals[0], vals[1]])))
|
Fix script for repos with binaries
|
Fix script for repos with binaries
|
Python
|
mit
|
HSchmale16/UsefulScripts,HSchmale16/UsefulScripts,HSchmale16/UsefulScripts,HSchmale16/UsefulScripts,HSchmale16/UsefulScripts
|
c1a2a1052d215f9971c7bb1e580fd88ab0b395f8
|
background_hang_reporter_job/tracked.py
|
background_hang_reporter_job/tracked.py
|
class AllHangs(object):
title = "All Hangs"
@staticmethod
def matches_hang(_):
return True
class DevtoolsHangs(object):
title = "Devtools Hangs"
@staticmethod
def matches_hang(hang):
#pylint: disable=unused-variable
stack, duration, thread, runnable, process, annotations, build_date, platform = hang
return stack is not None and any(frame is not None and "devtools/" in frame
for lib, frame in stack)
def get_tracked_stats():
return [AllHangs, DevtoolsHangs]
|
class AllHangs(object):
title = "All Hangs"
@staticmethod
def matches_hang(_):
return True
class DevtoolsHangs(object):
title = "Devtools Hangs"
@staticmethod
def matches_hang(hang):
#pylint: disable=unused-variable
stack, duration, thread, runnable, process, annotations, build_date, platform = hang
return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame
for lib, frame in stack)
def get_tracked_stats():
return [AllHangs, DevtoolsHangs]
|
Fix frame string check in devtools tracking
|
Fix frame string check in devtools tracking
|
Python
|
mit
|
squarewave/background-hang-reporter-job,squarewave/background-hang-reporter-job
|
a8596fd4a76460bd3e15509825d3cb3f82a3f8c4
|
test/integration/ggrc/converters/test_import_delete.py
|
test/integration/ggrc/converters/test_import_delete.py
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from ggrc.converters import errors
from integration.ggrc import TestCase
class TestBasicCsvImport(TestCase):
def setUp(self):
TestCase.setUp(self)
self.client.get("/login")
def test_policy_basic_import(self):
filename = "ca_setup_for_deletion.csv"
self.import_file(filename)
filename = "ca_deletion.csv"
response_data_dry = self.import_file(filename, dry_run=True)
response_data = self.import_file(filename)
self.assertEqual(response_data_dry, response_data)
self.assertEqual(response_data[0]["deleted"], 2)
self.assertEqual(response_data[0]["ignored"], 0)
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from integration.ggrc import TestCase
class TestBasicCsvImport(TestCase):
def setUp(self):
TestCase.setUp(self)
self.client.get("/login")
def test_policy_basic_import(self):
filename = "ca_setup_for_deletion.csv"
self.import_file(filename)
filename = "ca_deletion.csv"
response_data = self.import_file(filename)
self.assertEqual(response_data[0]["deleted"], 2)
self.assertEqual(response_data[0]["ignored"], 0)
|
Optimize basic delete import tests
|
Optimize basic delete import tests
The dry-run check is now automatically performed on each import and we
do not need to duplicate the work in the delete test.
|
Python
|
apache-2.0
|
selahssea/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core
|
9d10d279a1f7de2a5572229d68a7065fb9353ab9
|
linkedin_scraper/parsers/employment.py
|
linkedin_scraper/parsers/employment.py
|
from typing import Tuple
from linkedin_scraper.parsers.base import BaseParser
class EmploymentParser(BaseParser):
def __init__(self):
self.professions_list = self.get_lines_from_datafile(
'professions_list.txt')
def parse(self, item: str) -> Tuple[str, str]:
"""
Parse LinkedIn employment string into position and company.
:param item: employment string
:return: position, company
"""
if ' at ' in item:
# Simplest case, standard LinkedIn format <position> at <company>
return tuple(item.split(' at ', maxsplit=1))
words = item.split()
for index, word in enumerate(reversed(item.split())):
normalized_word = word.strip(',.-').lower()
if normalized_word in self.professions_list:
founded_profession_index = len(words) - index
break
else:
# We don't know which is which so return whole string as a position
return item, ''
# We found profession name in employment string, everything
# after it is company name
return (' '.join(words[:founded_profession_index]).rstrip(',.- '),
' '.join(words[founded_profession_index:]).lstrip(',.- '))
|
from typing import Tuple
from linkedin_scraper.parsers.base import BaseParser
class EmploymentParser(BaseParser):
def __init__(self):
self.professions_list = self.get_lines_from_datafile(
'professions_list.txt')
def parse(self, item: str) -> Tuple[str, str]:
"""
Parse LinkedIn employment string into position and company.
:param item: employment string
:return: position, company
"""
if ' at ' in item:
# Simplest case, standard LinkedIn format <position> at <company>
return tuple(item.split(' at ', maxsplit=1))
words = item.split()
for index, word in enumerate(reversed(words)):
normalized_word = word.strip(',.-').lower()
if normalized_word in self.professions_list:
founded_profession_index = len(words) - index
break
else:
# We don't know which is which so return whole string as a position
return item, ''
# We found profession name in employment string, everything
# after it is company name
return (' '.join(words[:founded_profession_index]).rstrip(',.- '),
' '.join(words[founded_profession_index:]).lstrip(',.- '))
|
Remove duplicated split call from EmploymentParser.
|
Remove duplicated split call from EmploymentParser.
|
Python
|
mit
|
nihn/linkedin-scraper,nihn/linkedin-scraper
|
650a4733aa6e15b80e2adeec34fc479a3b2885e3
|
src/cmdline/config.py
|
src/cmdline/config.py
|
import os
import sys
try:
import pkg_resources
d = pkg_resources.get_distribution('metermaid')
pkg_locations = (
os.path.join(d.location, 'config'),
os.path.join(os.path.dirname(d.location), 'config'),
)
except ImportError:
pkg_locations = ()
def get_config_paths(filename=None):
script_name = os.path.basename(sys.argv[0])
for dirpath in pkg_locations + (
os.path.join(sys.prefix, 'config'),
'/etc/{}'.format(script_name),
os.path.expanduser('~/.{}'.format(script_name)),
):
full_path = dirpath
if filename:
full_path = os.path.join(full_path, filename)
yield full_path
|
import os
import sys
try:
import pkg_resources
d = pkg_resources.get_distribution('metermaid')
pkg_locations = (
os.path.join(d.location, 'config'),
os.path.join(os.path.dirname(d.location), 'config'),
)
except ImportError:
pkg_locations = ()
def get_config_paths(filename=None):
script_name = os.path.basename(sys.argv[0])
for dirpath in pkg_locations + (
os.path.join(sys.prefix, 'config'),
os.path.join(sys.prefix, 'etc', script_name),
os.path.expanduser('~/.{}'.format(script_name)),
):
full_path = dirpath
if filename:
full_path = os.path.join(full_path, filename)
yield full_path
|
Use etc relative to sys.prefix
|
Use etc relative to sys.prefix
|
Python
|
apache-2.0
|
rca/cmdline
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.