commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
638b8be8a07262803c087e796e40a51858c08983
|
__init__.py
|
__init__.py
|
from . import LayerView
def getMetaData():
return { "name": "LayerView", "type": "View" }
def register(app):
return LayerView.LayerView()
|
from . import LayerView
def getMetaData():
return {
'type': 'view',
'plugin': {
"name": "Layer View"
},
'view': {
'name': 'Layers'
}
}
def register(app):
return LayerView.LayerView()
|
Update plugin metadata to the new format
|
Update plugin metadata to the new format
|
Python
|
agpl-3.0
|
totalretribution/Cura,markwal/Cura,quillford/Cura,DeskboxBrazil/Cura,lo0ol/Ultimaker-Cura,senttech/Cura,bq/Ultimaker-Cura,ad1217/Cura,fieldOfView/Cura,fieldOfView/Cura,DeskboxBrazil/Cura,Curahelper/Cura,Curahelper/Cura,hmflash/Cura,bq/Ultimaker-Cura,hmflash/Cura,markwal/Cura,quillford/Cura,derekhe/Cura,totalretribution/Cura,lo0ol/Ultimaker-Cura,ynotstartups/Wanhao,fxtentacle/Cura,fxtentacle/Cura,senttech/Cura,ynotstartups/Wanhao,derekhe/Cura,ad1217/Cura
|
badba5070ac40a70de2be47b6d58afd0364ed7fe
|
staticassets/views.py
|
staticassets/views.py
|
import mimetypes
from django.http import HttpResponse, HttpResponseNotModified, Http404
from django.contrib.staticfiles.views import serve as staticfiles_serve
from django.views.static import was_modified_since, http_date
from staticassets import finder, settings
def serve(request, path, **kwargs):
mimetype, encoding = mimetypes.guess_type(path)
if not mimetype in settings.MIMETYPES.values():
return staticfiles_serve(request, path, **kwargs)
bundle = request.GET.get('bundle') in ('1', 't', 'true')
asset = finder.find(path, bundle=bundle)
if not asset:
raise Http404("Static asset not found")
# Respect the If-Modified-Since header.
modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE')
if not was_modified_since(modified_since, asset.mtime, asset.size):
return HttpResponseNotModified(mimetype=asset.attributes.content_type)
response = HttpResponse(asset.content, mimetype=asset.attributes.content_type)
response['Last-Modified'] = http_date(asset.mtime)
response['Content-Length'] = asset.size
return response
|
import mimetypes
from django.http import HttpResponse, HttpResponseNotModified, Http404
from django.contrib.staticfiles.views import serve as staticfiles_serve
from django.views.static import was_modified_since, http_date
from staticassets import finder, settings
def serve(request, path, **kwargs):
mimetype, encoding = mimetypes.guess_type(path)
if not mimetype in settings.MIMETYPES.values():
return staticfiles_serve(request, path, **kwargs)
bundle = request.GET.get('bundle') in ('1', 't', 'true')
asset = finder.find(path, bundle=bundle)
if not asset:
raise Http404("Static asset not found")
# Respect the If-Modified-Since header.
modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE')
if not was_modified_since(modified_since, asset.mtime, asset.size):
return HttpResponseNotModified(content_type=asset.attributes.content_type)
response = HttpResponse(asset.content, content_type=asset.attributes.content_type)
response['Last-Modified'] = http_date(asset.mtime)
response['Content-Length'] = asset.size
return response
|
Use correct argument for content type in serve view
|
Use correct argument for content type in serve view
|
Python
|
mit
|
davidelias/django-staticassets,davidelias/django-staticassets,davidelias/django-staticassets
|
d74e134ca63b7d3cd053d21168ca526a493999df
|
mysql.py
|
mysql.py
|
#!/usr/bin/env python
#
# igcollect - Mysql Status
#
# Copyright (c) 2016, InnoGames GmbH
#
import time
import socket
import MySQLdb
hostname = socket.gethostname().replace(".", "_")
now = str(int(time.time()))
db = MySQLdb.connect(host = 'localhost', read_default_file='/etc/mysql/my.cnf')
cur = db.cursor()
# Check for global status
cur.execute("show global status")
for row in cur.fetchall():
if row[1].isdigit():
print "servers.{0}.software.mysql.status.{1} {2} {3}".format(hostname, row[0], row[1], now)
cur.execute("show variables")
for row in cur.fetchall():
if row[1].isdigit():
print "servers.{0}.software.mysql.variables.{1} {2} {3}".format(hostname, row[0], row[1], now)
|
#!/usr/bin/env python
#
# igcollect - Mysql Status
#
# Copyright (c) 2016, InnoGames GmbH
#
import time
import socket
import MySQLdb
hostname = socket.gethostname().replace(".", "_")
now = str(int(time.time()))
db = MySQLdb.connect(user = 'root', host = 'localhost', read_default_file='/etc/mysql/my.cnf')
cur = db.cursor()
# Check for global status
cur.execute("show global status")
for row in cur.fetchall():
if row[1].isdigit():
print "servers.{0}.software.mysql.status.{1} {2} {3}".format(hostname, row[0], row[1], now)
cur.execute("show variables")
for row in cur.fetchall():
if row[1].isdigit():
print "servers.{0}.software.mysql.variables.{1} {2} {3}".format(hostname, row[0], row[1], now)
|
Use root user to connect
|
Use root user to connect
|
Python
|
mit
|
innogames/igcollect
|
297f42a2013428c2f6caefdf83735cc4a528e225
|
caching.py
|
caching.py
|
import os
import cPickle as pickle
try: DATA_DIR = os.path.dirname(os.path.realpath(__file__))
except: DATA_DIR = os.getcwd()
cache_path = lambda name: os.path.join(DATA_DIR, '%s.cache' % name)
def get_cache(name):
return pickle.load(open(cache_path(name), 'r'))
def save_cache(obj, name):
pickle.dump(obj, open(cache_path(name), 'w'), protocol=-1)
|
import os
import cPickle as pickle
home_dir = os.path.expanduser('~')
DATA_DIR = os.path.join(home_dir, '.tax_resolve')
if not os.path.exists(DATA_DIR):
try:
os.mkdir(DATA_DIR)
except: DATA_DIR = os.getcwd()
cache_path = lambda name: os.path.join(DATA_DIR, '%s.cache' % name)
def get_cache(name):
return pickle.load(open(cache_path(name), 'r'))
def save_cache(obj, name):
pickle.dump(obj, open(cache_path(name), 'w'), protocol=-1)
|
Use user's local application data directory instead of the module path.
|
Use user's local application data directory instead of the module path.
|
Python
|
mit
|
bendmorris/tax_resolve
|
310cebbe1f4a4d92c8f181d7e4de9cc4f75a14dc
|
indra/assemblers/__init__.py
|
indra/assemblers/__init__.py
|
try:
from pysb_assembler import PysbAssembler
except ImportError:
pass
try:
from graph_assembler import GraphAssembler
except ImportError:
pass
try:
from sif_assembler import SifAssembler
except ImportError:
pass
try:
from cx_assembler import CxAssembler
except ImportError:
pass
try:
from english_assembler import EnglishAssembler
except ImportError:
pass
try:
from sbgn_assembler import SBGNAssembler
except ImportError:
pass
try:
from index_card_assembler import IndexCardAssembler
except ImportError:
pass
|
try:
from indra.assemblers.pysb_assembler import PysbAssembler
except ImportError:
pass
try:
from indra.assemblers.graph_assembler import GraphAssembler
except ImportError:
pass
try:
from indra.assemblers.sif_assembler import SifAssembler
except ImportError:
pass
try:
from indra.assemblers.cx_assembler import CxAssembler
except ImportError:
pass
try:
from indra.assemblers.english_assembler import EnglishAssembler
except ImportError:
pass
try:
from indra.assemblers.sbgn_assembler import SBGNAssembler
except ImportError:
pass
try:
from indra.assemblers.index_card_assembler import IndexCardAssembler
except ImportError:
pass
|
Update to absolute imports in assemblers
|
Update to absolute imports in assemblers
|
Python
|
bsd-2-clause
|
johnbachman/indra,bgyori/indra,sorgerlab/belpy,sorgerlab/indra,pvtodorov/indra,pvtodorov/indra,pvtodorov/indra,johnbachman/indra,bgyori/indra,jmuhlich/indra,pvtodorov/indra,sorgerlab/indra,jmuhlich/indra,johnbachman/belpy,johnbachman/belpy,sorgerlab/belpy,johnbachman/indra,sorgerlab/belpy,johnbachman/belpy,jmuhlich/indra,bgyori/indra,sorgerlab/indra
|
d150db290a72590e0f7cf9dae485bf98901bb2c2
|
web_ui/helpers.py
|
web_ui/helpers.py
|
from web_ui import app
from flask import session
from datetime import datetime
# For calculating scores
epoch = datetime.utcfromtimestamp(0)
epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000
def score(star_object):
import random
return random.random() * 100 - random.random() * 10
def get_active_persona():
from nucleus.models import Persona
""" Return the currently active persona or 0 if there is no controlled persona. """
if 'active_persona' not in session or session['active_persona'] is None:
"""Activate first Persona with a private key"""
controlled_persona = Persona.query.filter('sign_private != ""').first()
if controlled_persona is None:
return ""
else:
session['active_persona'] = controlled_persona.id
return session['active_persona']
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS']
|
from web_ui import app
from flask import session
from datetime import datetime
# For calculating scores
epoch = datetime.utcfromtimestamp(0)
epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000
def score(star_object):
import random
return random.random() * 100 - random.random() * 10
def get_active_persona():
from nucleus.models import Persona
""" Return the currently active persona or 0 if there is no controlled persona. """
if 'active_persona' not in session or session['active_persona'] is None:
"""Activate first Persona with a private key"""
controlled_persona = Persona.query.filter('sign_private != ""').first()
if controlled_persona is None:
return ""
else:
session['active_persona'] = controlled_persona.id
return session['active_persona']
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS']
def reset_userdata():
"""Reset all userdata files"""
import os
for fileid in ["DATABASE", "SECRET_KEY_FILE", "PASSWORD_HASH_FILE"]:
try:
os.remove(app.config[fileid])
except OSError:
app.logger.warning("RESET: {} not found".format(fileid))
else:
app.logger.warning("RESET: {} deleted")
|
Add helper method for resetting user data
|
Add helper method for resetting user data
|
Python
|
apache-2.0
|
ciex/souma,ciex/souma,ciex/souma
|
7180aba7ce183e64ef12e4fc384408036c2fe901
|
product_template_tree_prices/__init__.py
|
product_template_tree_prices/__init__.py
|
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from . import product
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
FIX produ template tree prices
|
FIX produ template tree prices
|
Python
|
agpl-3.0
|
ingadhoc/product,ingadhoc/product
|
186a72b91798b11d13ea7f2538141f620b0787a8
|
tests/test_metrics.py
|
tests/test_metrics.py
|
import json
from . import TestCase
class MetricsTests(TestCase):
def test_find(self):
url = '/metrics/find'
response = self.app.get(url)
self.assertEqual(response.status_code, 400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertJSON(response, [])
def test_expand(self):
url = '/metrics/expand'
response = self.app.get(url)
self.assertJSON(response, {'errors':
{'query': 'this parameter is required.'}},
status_code=400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.data.decode('utf-8')),
{'results': []})
|
from . import TestCase
class MetricsTests(TestCase):
def test_find(self):
url = '/metrics/find'
response = self.app.get(url)
self.assertEqual(response.status_code, 400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertJSON(response, [])
def test_expand(self):
url = '/metrics/expand'
response = self.app.get(url)
self.assertJSON(response, {'errors':
{'query': 'this parameter is required.'}},
status_code=400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertJSON(response, {'results': []})
def test_noop(self):
url = '/dashboard/find'
response = self.app.get(url)
self.assertJSON(response, {'dashboards': []})
url = '/dashboard/load/foo'
response = self.app.get(url)
self.assertJSON(response, {'error': "Dashboard 'foo' does not exist."},
status_code=404)
url = '/events/get_data'
response = self.app.get(url)
self.assertJSON(response, [])
|
Add test for noop routes
|
Add test for noop routes
|
Python
|
apache-2.0
|
vladimir-smirnov-sociomantic/graphite-api,michaelrice/graphite-api,GeorgeJahad/graphite-api,vladimir-smirnov-sociomantic/graphite-api,michaelrice/graphite-api,alphapigger/graphite-api,raintank/graphite-api,hubrick/graphite-api,rackerlabs/graphite-api,Knewton/graphite-api,raintank/graphite-api,Knewton/graphite-api,bogus-py/graphite-api,cybem/graphite-api-iow,DaveBlooman/graphite-api,rackerlabs/graphite-api,brutasse/graphite-api,DaveBlooman/graphite-api,hubrick/graphite-api,raintank/graphite-api,tpeng/graphite-api,winguru/graphite-api,winguru/graphite-api,bogus-py/graphite-api,tpeng/graphite-api,cybem/graphite-api-iow,absalon-james/graphite-api,alphapigger/graphite-api,absalon-james/graphite-api,brutasse/graphite-api,GeorgeJahad/graphite-api
|
2410255e846c5fbd756ed97868299e1674c89467
|
flash_example.py
|
flash_example.py
|
from BlinkyTape import BlinkyTape
bb = BlinkyTape('/dev/tty.usbmodemfa131')
while True:
for x in range(60):
bb.sendPixel(10, 10, 10)
bb.show()
for x in range(60):
bb.sendPixel(0, 0, 0)
bb.show()
|
from BlinkyTape import BlinkyTape
import time
#bb = BlinkyTape('/dev/tty.usbmodemfa131')
bb = BlinkyTape('COM8')
while True:
for x in range(60):
bb.sendPixel(100, 100, 100)
bb.show()
time.sleep(.5)
for x in range(60):
bb.sendPixel(0, 0, 0)
bb.show()
time.sleep(.5)
|
Set it to flash black and white every second
|
Set it to flash black and white every second
|
Python
|
mit
|
Blinkinlabs/BlinkyTape_Python,jpsingleton/BlinkyTape_Python,railsagainstignorance/blinkytape
|
251a0d1b1df0fd857a86878ecb7e4c6bc26a93ef
|
paci/helpers/display_helper.py
|
paci/helpers/display_helper.py
|
"""Helper to output stuff"""
from tabulate import tabulate
def print_list(header, entries):
"""Prints out a list"""
print(tabulate(entries, header, tablefmt="grid"))
def std_input(text, default):
"""Get input or return default if none is given."""
return input(text.format(default)) or default
|
"""Helper to output stuff"""
from tabulate import tabulate
def print_list(header, entries):
"""Prints out a list"""
print(tabulate(entries, header, tablefmt="grid"))
def print_table(entries):
"""Prints out a table"""
print(tabulate(entries, tablefmt="plain"))
def std_input(text, default):
"""Get input or return default if none is given."""
return input(text.format(default)) or default
|
Add function to just print a simple table
|
Add function to just print a simple table
|
Python
|
mit
|
tradebyte/paci,tradebyte/paci
|
bb768ef543469395ccbd0b2761442d9dcfa8e0c5
|
testanalyzer/analyze_repos.py
|
testanalyzer/analyze_repos.py
|
import pandas as pd
import shutil
import utils as u
import validators
from analyzer import Analyzer
from git import Repo
if __name__ == "__main__":
repos = pd.read_pickle("data/test.pkl")
for _, repo in repos.iterrows():
if not validators.url(repo["url"]):
print("Error: Invalid URL.")
exit(1)
project_name = u.get_name_from_url(repo["url"])
print("Cloning {}...".format(project_name))
Repo.clone_from(repo["url"], project_name)
print("Analyzing...")
analyzer = Analyzer(project_name)
code_counts, test_counts = analyzer.run()
print(code_counts)
print(test_counts)
shutil.rmtree(project_name)
|
import pandas as pd
import shutil
import utils as u
import validators
from analyzer import Analyzer
from git import Repo
if __name__ == "__main__":
repos = pd.read_pickle("data/repos.pkl")
repos["code_lines"] = 0
repos["code_classes"] = 0
repos["code_functions"] = 0
repos["test_lines"] = 0
repos["test_classes"] = 0
repos["test_functions"] = 0
for i, repo in repos.iterrows():
if not validators.url(repo["url"]):
print("Error: Invalid URL.")
exit(1)
project_name = u.get_name_from_url(repo["url"])
print("Cloning {}...".format(project_name))
Repo.clone_from(repo["url"], project_name)
print("Analyzing...")
analyzer = Analyzer(project_name)
code_counts, test_counts = analyzer.run()
repos.set_value(i, "code_lines", code_counts["line_count"])
repos.set_value(i, "code_classes", code_counts["class_count"])
repos.set_value(i, "code_functions", code_counts["function_count"])
repos.set_value(i, "test_lines", test_counts["line_count"])
repos.set_value(i, "test_classes", test_counts["class_count"])
repos.set_value(i, "test_functions", test_counts["function_count"])
shutil.rmtree(project_name)
repos.to_pickle("data/dataset.pkl")
|
Update dataframe with counts and serialize
|
Update dataframe with counts and serialize
|
Python
|
mpl-2.0
|
CheriPai/TestAnalyzer,CheriPai/TestAnalyzer,CheriPai/TestAnalyzer
|
05d498cc1f216ba722ce887b212ac5e750fb0c8d
|
tests/test_player_creation.py
|
tests/test_player_creation.py
|
from webtest import TestApp
import dropshot
def test_create_player():
app = TestApp(dropshot.app)
params = {'username': 'chapmang',
'password': 'deadparrot',
'email': 'chapmang@dropshot.com'}
app.post('/players', params)
res = app.get('/players')
assert res.status_int == 200
|
from webtest import TestApp
import dropshot
def test_create_player():
app = TestApp(dropshot.app)
params = {'username': 'chapmang',
'password': 'deadparrot',
'email': 'chapmang@dropshot.com'}
expected = {'count': 1,
'offset': 0,
'players': [
{'gamesPlayed': 0,
'username': 'chapmang'}
]}
app.post('/players', params)
res = app.get('/players')
assert res.status_int == 200
assert res.content_type == 'application/json'
assert res.json == expected
|
Make player creation test check for valid response.
|
Make player creation test check for valid response.
|
Python
|
mit
|
dropshot/dropshot-server
|
fc904d8fd02cecfb2c3d69d6101caaab7b224e93
|
_bin/person_list_generator.py
|
_bin/person_list_generator.py
|
# Console outputs a person list
import os
import csv
with open('tmp/person_list_input.csv') as csvfile:
csvreader = csv.reader(csvfile)
for row in csvreader:
print """ - role: {}
name: {}""".format(row[0], row[1])
|
# Console outputs a person list
import os
import csv
with open('tmp/person_list_input.csv') as csvfile:
csvreader = csv.reader(csvfile)
for row in csvreader:
stream = open('tmp/person_list_output.yml', 'a')
stream.write( """ - role: {}\n name: {}\n""".format(row[0], row[1])
)
stream.close()
|
Make person list generator output to file
|
Make person list generator output to file
The console was going beyond the terminal history limit for 14-15
|
Python
|
mit
|
johnathan99j/history-project,johnathan99j/history-project,newtheatre/history-project,newtheatre/history-project,johnathan99j/history-project,newtheatre/history-project,johnathan99j/history-project,newtheatre/history-project,johnathan99j/history-project,newtheatre/history-project
|
10b9d412c26b90bb86fe1abd04c3fe0f86826104
|
pelicanconf_with_pagination.py
|
pelicanconf_with_pagination.py
|
from pelicanconf import *
# Over-ride so there is paging.
DEFAULT_PAGINATION = 5
|
import sys
# Hack for Travis, where local imports don't work.
if '' not in sys.path:
sys.path.insert(0, '')
from pelicanconf import *
# Over-ride so there is paging.
DEFAULT_PAGINATION = 5
|
Fix Python import path on Travis.
|
Fix Python import path on Travis.
|
Python
|
apache-2.0
|
dhermes/bossylobster-blog,dhermes/bossylobster-blog,dhermes/bossylobster-blog,dhermes/bossylobster-blog,dhermes/bossylobster-blog,dhermes/bossylobster-blog,dhermes/bossylobster-blog
|
c6ecf6160664bc61cf6dc213af1f2fe3fd6a3617
|
editorsnotes/djotero/models.py
|
editorsnotes/djotero/models.py
|
from django.db import models
from editorsnotes.main.models import Document
import utils
import json
class ZoteroLink(models.Model):
doc = models.OneToOneField(Document, related_name='_zotero_link')
zotero_url = models.URLField()
zotero_data = models.TextField(blank=True)
date_information = models.TextField(blank=True)
def __str__(self):
return 'Zotero data: %s' % self.doc.__str__()
def get_zotero_fields(self):
z = json.loads(self.zotero_data)
z['itemType'] = utils.type_map['readable'][z['itemType']]
if self.date_information:
date_parts = json.loads(self.date_information)
for part in date_parts:
z[part] = date_parts[part]
if z['creators']:
names = utils.resolve_names(z, 'facets')
z.pop('creators')
output = z.items()
for name in names:
for creator_type, creator_value in name.items():
output.append((creator_type, creator_value))
else:
output = z.items()
return output
|
from django.db import models
from editorsnotes.main.models import Document
import utils
import json
class ZoteroLink(models.Model):
doc = models.OneToOneField(Document, related_name='_zotero_link')
zotero_url = models.URLField(blank=True)
zotero_data = models.TextField()
date_information = models.TextField(blank=True)
def __str__(self):
return 'Zotero data: %s' % self.doc.__str__()
def get_zotero_fields(self):
z = json.loads(self.zotero_data)
z['itemType'] = utils.type_map['readable'][z['itemType']]
if self.date_information:
date_parts = json.loads(self.date_information)
for part in date_parts:
z[part] = date_parts[part]
if z['creators']:
names = utils.resolve_names(z, 'facets')
z.pop('creators')
output = z.items()
for name in names:
for creator_type, creator_value in name.items():
output.append((creator_type, creator_value))
else:
output = z.items()
return output
|
Allow blank zotero url reference, but require zotero json data
|
Allow blank zotero url reference, but require zotero json data
|
Python
|
agpl-3.0
|
editorsnotes/editorsnotes,editorsnotes/editorsnotes
|
9fda3df6ae1f31af139c03eaf8b385746816f3b4
|
spec/helper.py
|
spec/helper.py
|
from pygametemplate import Game
from example_view import ExampleView
class TestGame(Game):
"""An altered Game class for testing purposes."""
def __init__(self, StartingView, resolution):
super(TestGame, self).__init__(StartingView, resolution)
def log(self, *error_message):
"""Altered log function which just raises errors."""
raise
game = TestGame(ExampleView, (1280, 720))
|
from pygametemplate import Game
from example_view import ExampleView
class TestGame(Game):
"""An altered Game class for testing purposes."""
def __init__(self, StartingView, resolution):
super(TestGame, self).__init__(StartingView, resolution)
game = TestGame(ExampleView, (1280, 720))
|
Remove TestGame.log() method as log() isn't a method of Game anymore
|
Remove TestGame.log() method as log() isn't a method of Game anymore
|
Python
|
mit
|
AndyDeany/pygame-template
|
36b8ec51dc6e1caca90db41d83d4dc21d70005a5
|
app/task.py
|
app/task.py
|
from mongoengine import Document, DateTimeField, EmailField, IntField, \
ReferenceField, StringField
import datetime, enum
class Priority(enum.IntEnum):
LOW = 0,
MIDDLE = 1,
HIGH = 2
"""
This defines the basic model for a Task as we want it to be stored in the
MongoDB.
"""
class Task(Document):
title = StringField(max_length=150, required=True)
description = StringField(max_length=800, required=True)
creator = EmailField(max_length=120, required=True)
assigne = EmailField(max_length=120, required=True)
created_at = DateTimeField(default=datetime.datetime.now, required=True)
status = IntField(default=0, required=True)
priority = IntField(default=Priority.LOW, required=True)
|
from mongoengine import Document, DateTimeField, EmailField, IntField, \
ReferenceField, StringField, ValidationError
import datetime, enum, Exception
from app import logger
class Priority(enum.IntEnum):
"""
This defines the priority levels a Task can have.
"""
LOW = 0,
MIDDLE = 1,
HIGH = 2
class Status(enum.IntEnum):
"""
This defines statuses a Task can have.
"""
OPEN = 0
IN_PROGRESS = 1
CLOSED = 2
class Task(Document):
"""
This defines the basic model for a Task as we want it to be stored in the
MongoDB.
title (str): The title of the Task.
description (str): A description of the Task.
creator (str): The task creators email address.
assigne (str): The email address of the person the Task is assigned to.
created_at (datetime): The point in the time when the Task was created.
status (Status): The current status of the Task.
priority(Priority): The priority level of the Task.
"""
title = StringField(max_length=150, required=True)
description = StringField(max_length=800, required=True)
creator = EmailField(max_length=120, required=True)
assigne = EmailField(max_length=120, required=True)
created_at = DateTimeField(default=datetime.datetime.now, required=True)
status = IntField(default=Status.OPEN, required=True)
priority = IntField(default=Priority.LOW, required=True)
|
Add a Status enum and documentation
|
Add a Status enum and documentation
|
Python
|
mit
|
Zillolo/lazy-todo
|
acf3819d433f3ebc3d3eed17c61f2542f7429f8e
|
trimesh/resources/__init__.py
|
trimesh/resources/__init__.py
|
import os
import inspect
# find the current absolute path using inspect
_pwd = os.path.dirname(
os.path.abspath(
inspect.getfile(
inspect.currentframe())))
def get_resource(name, decode=True):
"""
Get a resource from the trimesh/resources folder.
Parameters
-------------
name : str
File path relative to `trimesh/resources`
decode : bool
Whether or not to decode result as UTF-8
Returns
-------------
resource : str or bytes
File data
"""
# get the resource using relative names
with open(os.path.join(_pwd, name), 'rb') as f:
resource = f.read()
# make sure we return it as a string if asked
if decode and hasattr(resource, 'decode'):
return resource.decode('utf-8')
return resource
|
import os
# find the current absolute path to this directory
_pwd = os.path.dirname(__file__)
def get_resource(name, decode=True):
"""
Get a resource from the trimesh/resources folder.
Parameters
-------------
name : str
File path relative to `trimesh/resources`
decode : bool
Whether or not to decode result as UTF-8
Returns
-------------
resource : str or bytes
File data
"""
# get the resource using relative names
with open(os.path.join(_pwd, name), 'rb') as f:
resource = f.read()
# make sure we return it as a string if asked
if decode and hasattr(resource, 'decode'):
return resource.decode('utf-8')
return resource
|
Use __file__ instead of inspect, for compatibility with frozen environments
|
RF: Use __file__ instead of inspect, for compatibility with frozen environments
|
Python
|
mit
|
mikedh/trimesh,mikedh/trimesh,dajusc/trimesh,mikedh/trimesh,mikedh/trimesh,dajusc/trimesh
|
83dabc9fc1142e1575843d3a68c6241185543936
|
fabtastic/db/__init__.py
|
fabtastic/db/__init__.py
|
from django.conf import settings
from fabtastic.db import util
db_engine = util.get_db_setting('ENGINE')
if 'postgresql_psycopg2' in db_engine:
from fabtastic.db.postgres import *
else:
raise NotImplementedError("Fabtastic: DB engine '%s' is not supported" % db_engine)
|
from django.conf import settings
from fabtastic.db import util
db_engine = util.get_db_setting('ENGINE')
if 'postgresql_psycopg2' in db_engine:
from fabtastic.db.postgres import *
else:
print("Fabtastic WARNING: DB engine '%s' is not supported" % db_engine)
|
Make the warning for SQLite not being supported a print instead of an exception.
|
Make the warning for SQLite not being supported a print instead of an exception.
|
Python
|
bsd-3-clause
|
duointeractive/django-fabtastic
|
208f90497c7a6867f9aeece84b1161926ca1627b
|
nethud/nh_client.py
|
nethud/nh_client.py
|
"""
An example client. Run simpleserv.py first before running this.
"""
import json
from twisted.internet import reactor, protocol
# a client protocol
class EchoClient(protocol.Protocol):
"""Once connected, send a message, then print the result."""
def connectionMade(self):
data = '{"register": {"email": "Qalthos@gmail.com", ' + \
'"username": "Qalthos",' + \
'"password": "password"}}'
#~ data = '{"auth": {"username": "Qalthos", "password": "password"}}'
print data
self.transport.write(data)
def dataReceived(self, data):
"As soon as any data is received, write it back."
print "Server said:", data
def connectionLost(self, reason):
print "Connection lost"
class EchoFactory(protocol.ClientFactory):
protocol = EchoClient
def clientConnectionFailed(self, connector, reason):
print "Connection failed - goodbye!"
reactor.stop()
def clientConnectionLost(self, connector, reason):
print "Connection lost - goodbye!"
reactor.stop()
# this connects the protocol to a server runing on port 8000
def main():
f = EchoFactory()
reactor.connectTCP("games-ng.csh.rit.edu", 53421, f)
reactor.run()
# this only runs if the module was *not* imported
if __name__ == '__main__':
main()
|
"""
An example client. Run simpleserv.py first before running this.
"""
import json
from twisted.internet import reactor, protocol
# a client protocol
class EchoClient(protocol.Protocol):
"""Once connected, send a message, then print the result."""
def connectionMade(self):
self.send_message('auth', username='Qalthos', password='password')
def dataReceived(self, data):
"As soon as any data is received, write it back."
print "Server said:", data
def connectionLost(self, reason):
print "Connection lost"
# Nethack Protocol Wrapper
def send_message(self, command, **kw):
data = json.dumps(dict(command=kw))
self.transport.write(data)
class EchoFactory(protocol.ClientFactory):
protocol = EchoClient
def clientConnectionFailed(self, connector, reason):
print "Connection failed - goodbye!"
reactor.stop()
def clientConnectionLost(self, connector, reason):
print "Connection lost - goodbye!"
reactor.stop()
# this connects the protocol to a server runing on port 8000
def main():
f = EchoFactory()
reactor.connectTCP("games-ng.csh.rit.edu", 53421, f)
reactor.run()
# this only runs if the module was *not* imported
if __name__ == '__main__':
main()
|
Simplify nethack protocol to a single method.
|
Simplify nethack protocol to a single method.
|
Python
|
mit
|
ryansb/netHUD
|
881e693d16d12109c3ececffda61336b020c172a
|
portable_mds/tests/conftest.py
|
portable_mds/tests/conftest.py
|
import os
import tempfile
import shutil
import tzlocal
import pytest
from ..mongoquery.mds import MDS
@pytest.fixture(params=[1], scope='function')
def mds_all(request):
'''Provide a function level scoped FileStore instance talking to
temporary database on localhost:27017 with both v0 and v1.
'''
ver = request.param
tempdirname = tempfile.mkdtemp()
mds = MDS({'directory': tempdirname,
'timezone': tzlocal.get_localzone().zone}, version=ver)
filenames = ['run_starts.json', 'run_stops.json', 'event_descriptors.json',
'events.json']
for fn in filenames:
with open(os.path.join(tempdirname, fn), 'w') as f:
f.write('[]')
def delete_dm():
shutil.rmtree(tempdirname)
request.addfinalizer(delete_dm)
return mds
|
import os
import tempfile
import shutil
import tzlocal
import pytest
import portable_mds.mongoquery.mds
import portable_mds.sqlite.mds
variations = [portable_mds.mongoquery.mds,
portable_mds.sqlite.mds]
@pytest.fixture(params=variations, scope='function')
def mds_all(request):
'''Provide a function level scoped FileStore instance talking to
temporary database on localhost:27017 with both v0 and v1.
'''
tempdirname = tempfile.mkdtemp()
mds = request.param.MDS({'directory': tempdirname,
'timezone': tzlocal.get_localzone().zone}, version=1)
filenames = ['run_starts.json', 'run_stops.json', 'event_descriptors.json',
'events.json']
for fn in filenames:
with open(os.path.join(tempdirname, fn), 'w') as f:
f.write('[]')
def delete_dm():
shutil.rmtree(tempdirname)
request.addfinalizer(delete_dm)
return mds
|
Test sqlite and mongoquery variations.
|
TST: Test sqlite and mongoquery variations.
|
Python
|
bsd-3-clause
|
ericdill/databroker,ericdill/databroker
|
bd5c215c1c481f3811753412bca6b509bb00591a
|
me_api/app.py
|
me_api/app.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from flask import Flask
from .middleware.me import me
from .cache import cache
def _register_module(app, module):
if module == 'douban':
from .middleware import douban
app.register_blueprint(douban.douban_api)
elif module == 'github':
from .middleware import github
app.register_blueprint(github.github_api)
elif module == 'instagram':
from .middleware import instagram
app.register_blueprint(instagram.instagram_api)
elif module == 'keybase':
from .middleware import keybase
app.register_blueprint(keybase.keybase_api)
elif module == 'medium':
from .middleware import medium
app.register_blueprint(medium.medium_api)
elif module == 'stackoverflow':
from .middleware import stackoverflow
app.register_blueprint(stackoverflow.stackoverflow_api)
def create_app(config):
app = Flask(__name__)
app.config.from_object(config)
cache.init_app(app)
modules = config.modules['modules']
app.register_blueprint(me)
for module in modules.keys():
_register_module(app, module)
return app
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from flask import Flask
from werkzeug.utils import import_string
from me_api.middleware.me import me
from me_api.cache import cache
middlewares = {
'douban': 'me_api.middleware.douban:douban_api',
'github': 'me_api.middleware.github:github_api',
'instagram': 'me_api.middleware.instagram:instagram_api',
'keybase': 'me_api.middleware.keybase:keybase_api',
'medium': 'me_api.middleware.medium:medium_api',
'stackoverflow': 'me_api.middleware.stackoverflow:stackoverflow_api',
}
def create_app(config):
app = Flask(__name__)
app.config.from_object(config)
cache.init_app(app)
modules = config.modules['modules']
app.register_blueprint(me)
for module in modules.keys():
blueprint = import_string(middlewares[module])
app.register_blueprint(blueprint)
return app
|
Improve the way that import middlewares
|
Improve the way that import middlewares
|
Python
|
mit
|
lord63/me-api
|
af6f4868f4329fec75e43fe0cdcd1a7665c5238a
|
contentcuration/manage.py
|
contentcuration/manage.py
|
#!/usr/bin/env python
import os
import sys
# Attach Python Cloud Debugger
if __name__ == "__main__":
#import warnings
#warnings.filterwarnings('ignore', message=r'Module .*? is being added to sys\.path', append=True)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contentcuration.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
#import warnings
#warnings.filterwarnings('ignore', message=r'Module .*? is being added to sys\.path', append=True)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contentcuration.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Remove comment on attaching cloud debugger
|
Remove comment on attaching cloud debugger
|
Python
|
mit
|
DXCanas/content-curation,DXCanas/content-curation,DXCanas/content-curation,DXCanas/content-curation
|
948b9987afa95d7a69bd61f3d8f9fea822323b01
|
wagtaildraftail/draft_text.py
|
wagtaildraftail/draft_text.py
|
from __future__ import absolute_import, unicode_literals
import json
from draftjs_exporter.html import HTML
from wagtail.wagtailcore.rich_text import RichText
from wagtaildraftail.settings import get_exporter_config
class DraftText(RichText):
def __init__(self, value, **kwargs):
super(DraftText, self).__init__(value or '{}', **kwargs)
self.exporter = HTML(get_exporter_config())
def get_json(self):
return self.source
def __html__(self):
return self.exporter.render(json.loads(self.source))
|
from __future__ import absolute_import, unicode_literals
import json
from django.utils.functional import cached_property
from draftjs_exporter.html import HTML
from wagtail.wagtailcore.rich_text import RichText
from wagtaildraftail.settings import get_exporter_config
class DraftText(RichText):
def __init__(self, value, **kwargs):
super(DraftText, self).__init__(value or '{}', **kwargs)
self.exporter = HTML(get_exporter_config())
def get_json(self):
return self.source
@cached_property
def _html(self):
return self.exporter.render(json.loads(self.source))
def __html__(self):
return self._html
def __eq__(self, other):
return self.__html__() == other.__html__()
|
Implement equality check for DraftText nodes
|
Implement equality check for DraftText nodes
Compare the (cached) rendered html of a node
|
Python
|
mit
|
gasman/wagtaildraftail,gasman/wagtaildraftail,gasman/wagtaildraftail,springload/wagtaildraftail,gasman/wagtaildraftail,springload/wagtaildraftail,springload/wagtaildraftail,springload/wagtaildraftail
|
5c851ee3d333518829ce26bfc06fd1038e70651c
|
corehq/util/decorators.py
|
corehq/util/decorators.py
|
from functools import wraps
import logging
from corehq.util.global_request import get_request
from dimagi.utils.logging import notify_exception
def handle_uncaught_exceptions(mail_admins=True):
"""Decorator to log uncaught exceptions and prevent them from
bubbling up the call chain.
"""
def _outer(fn):
@wraps(fn)
def _handle_exceptions(*args, **kwargs):
try:
return fn(*args, **kwargs)
except Exception as e:
msg = "Uncaught exception from {}.{}".format(fn.__module__, fn.__name__)
if mail_admins:
notify_exception(get_request(), msg)
else:
logging.exception(msg)
return _handle_exceptions
return _outer
|
from functools import wraps
import logging
from corehq.util.global_request import get_request
from dimagi.utils.logging import notify_exception
class ContextDecorator(object):
"""
A base class that enables a context manager to also be used as a decorator.
https://docs.python.org/3/library/contextlib.html#contextlib.ContextDecorator
"""
def __call__(self, fn):
@wraps(fn)
def decorated(*args, **kwds):
with self:
return fn(*args, **kwds)
return decorated
def handle_uncaught_exceptions(mail_admins=True):
"""Decorator to log uncaught exceptions and prevent them from
bubbling up the call chain.
"""
def _outer(fn):
@wraps(fn)
def _handle_exceptions(*args, **kwargs):
try:
return fn(*args, **kwargs)
except Exception as e:
msg = "Uncaught exception from {}.{}".format(fn.__module__, fn.__name__)
if mail_admins:
notify_exception(get_request(), msg)
else:
logging.exception(msg)
return _handle_exceptions
return _outer
class change_log_level(ContextDecorator):
"""
Temporarily change the log level of a specific logger.
Can be used as either a context manager or decorator.
"""
def __init__(self, logger, level):
self.logger = logging.getLogger(logger)
self.new_level = level
self.original_level = self.logger.level
def __enter__(self):
self.logger.setLevel(self.new_level)
def __exit__(self, exc_type, exc_val, exc_tb):
self.logger.setLevel(self.original_level)
|
Add util to temporarily alter log levels
|
Add util to temporarily alter log levels
Also backport ContextDecorator from python 3. I saw this just the other
day and it looks like an awesome pattern, and a much clearer way to
write decorators.
|
Python
|
bsd-3-clause
|
qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq
|
a35d6f59d214741f554dde1363d2eac7addb04cb
|
crypto_enigma/__init__.py
|
crypto_enigma/__init__.py
|
#!/usr/bin/env python
# encoding: utf8
"""An Enigma machine simulator with rich textual display functionality."""
from ._version import __version__, __author__
#__all__ = ['machine', 'components']
from .components import *
from .machine import *
|
#!/usr/bin/env python
# encoding: utf8
"""An Enigma machine simulator with rich textual display functionality.
Limitations
~~~~~~~~~~~
Note that the correct display of some characters used to represent
components (thin Naval rotors) assumes support for Unicode, while some
aspects of the display of machine state depend on support for combining
Unicode. This is a `known
limitation <https://github.com/orome/crypto-enigma-py/issues/1>`__ that
will be addressed in a future release.
Note also that at the start of any scripts that use this package, you should
.. parsed-literal::
from __future__ import unicode_literals
before any code that uses the API, or confiure IPython (in `ipython_config.py`) with
.. parsed-literal::
c.InteractiveShellApp.exec_lines += ["from __future__ import unicode_literals"]
or explicitly suppply Unicode strings (e.g., as in many of the examples here with :code:`u'TESTING'`).
"""
from ._version import __version__, __author__
#__all__ = ['machine', 'components']
from .components import *
from .machine import *
|
Add limitations to package documentation
|
Add limitations to package documentation
|
Python
|
bsd-3-clause
|
orome/crypto-enigma-py
|
08291f3948108da15b9832c495fade04cf2e22c4
|
tests/tests.py
|
tests/tests.py
|
#!/usr/bin/env python3
from selenium import webdriver
import unittest
class AdminPageTest(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_visit_admin_page(self):
# Visit admin page
self.browser.get('http://localhost:8000/admin')
# Check page title
self.assertIn('Django site admin', self.browser.title)
class API_fetch_tests(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.quit()
def test_fetch_Ingredient_JSON(self):
pass
def test_fetch_Drink_JSON(self):
pass
if __name__ == '__main__':
print('test')
unittest.main()
|
#!/usr/bin/env python3
from selenium import webdriver
import unittest
class AdminPageTest(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_visit_admin_page(self):
# Visit admin page
self.browser.get('http://localhost:8000/admin')
# Check page title
self.assertIn('Django site admin', self.browser.title)
class API_fetch_tests(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.quit()
def test_fetch_Ingredient_JSON(self):
pass
def test_fetch_Drink_JSON(self):
pass
class ReactAppTests(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.quit()
def test_fetch_index(self):
self.browser.get('http://localhost:8000/index')
self.assertIn('Cocktails', self.browser.title)
if __name__ == '__main__':
print('test')
unittest.main()
|
Add test to check title of index
|
Add test to check title of index
|
Python
|
mit
|
jake-jake-jake/cocktails,jake-jake-jake/cocktails,jake-jake-jake/cocktails,jake-jake-jake/cocktails
|
44520918dc0fad40f3afcfc2cdfde6f3208543cd
|
garden_lighting/MCP23017/raspberry.py
|
garden_lighting/MCP23017/raspberry.py
|
import time
import os
import wiringpi
from garden_lighting.MCP23017.MCP23017 import MCP23017
class RaspberryMCP23017(MCP23017):
def __init__(self, dev_addr, rst_pin=0xFF, i2cport=1):
super().__init__(dev_addr, rst_pin, i2cport)
def initDevice(self):
'''
Does a reset to put all registers in initial state
'''
os.system("gpio export " + str(self.RstPin) + " out")
# Set pin numbering mode
# We don't need performance, don't want root and don't want to interfere with
# other wiringpi instances -> sysfspy
wiringpi.wiringPiSetupSys()
# Define the reset pin as output
wiringpi.pinMode(self.RstPin, wiringpi.GPIO.OUTPUT)
# Create a reset impulse
wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.LOW)
# wait for 50 ms
time.sleep(.050)
wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.HIGH)
|
import time
import wiringpi
from garden_lighting.MCP23017.MCP23017 import MCP23017
class RaspberryMCP23017(MCP23017):
def __init__(self, dev_addr, rst_pin=0xFF, i2cport=1):
super().__init__(dev_addr, rst_pin, i2cport)
def initDevice(self):
'''
Does a reset to put all registers in initial state
'''
# Set pin numbering mode
# wiringPiSetupSys() did not work because pins were low after booting and running the write commands
# This requires root!
wiringpi.wiringPiSetupGpio()
# Define the reset pin as output
wiringpi.pinMode(self.RstPin, wiringpi.GPIO.OUTPUT)
# Create a reset impulse
wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.LOW)
# wait for 50 ms
time.sleep(.050)
wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.HIGH)
|
Use wiringPiSetupGpio, which required root. With wiringPiSetupSys some gpios stayed on low after boot.
|
Use wiringPiSetupGpio, which required root. With wiringPiSetupSys some gpios stayed on low after boot.
|
Python
|
mit
|
ammannbros/garden-lighting,ammannbros/garden-lighting,ammannbros/garden-lighting,ammannbros/garden-lighting
|
f30a923b881e908fa607e276de1d152d803248f1
|
pgpdump/__main__.py
|
pgpdump/__main__.py
|
import sys
from . import BinaryData
for filename in sys.argv[1:]:
with open(filename) as infile:
data = BinaryData(infile.read())
for packet in data.packets():
print hex(packet.key_id), packet.creation_date
|
import sys
import cProfile
from . import AsciiData, BinaryData
def parsefile(name):
with open(name) as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
data = BinaryData(infile.read())
counter = 0
for packet in data.packets():
counter += 1
print counter
def main():
for filename in sys.argv[1:]:
parsefile(filename)
if __name__ == '__main__':
cProfile.run('main()', 'main.profile')
|
Update main to run a profiler
|
Update main to run a profiler
Signed-off-by: Dan McGee <a6e5737275ff1276377ee261739f3ee963671241@gmail.com>
|
Python
|
bsd-3-clause
|
toofishes/python-pgpdump
|
fddc7e09bcebf9b4875906ad03e58699237b13be
|
src/nodeconductor_assembly_waldur/packages/filters.py
|
src/nodeconductor_assembly_waldur/packages/filters.py
|
import django_filters
from nodeconductor.core.filters import UUIDFilter
from . import models
class PackageTemplateFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.PackageTemplate
fields = ('name', 'settings_uuid',)
class OpenStackPackageFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
customer = UUIDFilter(name='tenant__service_project_link__project__customer')
project = UUIDFilter(name='tenant__service_project_link__project')
class Meta(object):
model = models.OpenStackPackage
fields = ('name', 'customer', 'project')
|
import django_filters
from nodeconductor.core.filters import UUIDFilter
from . import models
class PackageTemplateFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
settings_uuid = UUIDFilter(name='service_settings__uuid')
class Meta(object):
model = models.PackageTemplate
fields = ('name', 'settings_uuid',)
class OpenStackPackageFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_type='icontains')
customer = UUIDFilter(name='tenant__service_project_link__project__customer__uuid')
project = UUIDFilter(name='tenant__service_project_link__project__uuid')
tenant = UUIDFilter(name='tenant__uuid')
class Meta(object):
model = models.OpenStackPackage
fields = ('name', 'customer', 'project', 'tenant')
|
Enable filtering OpenStack package by tenant.
|
Enable filtering OpenStack package by tenant.
|
Python
|
mit
|
opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind
|
f5d4f543cc7265433bf6040335b2f6d592b52b91
|
lmod/__init__.py
|
lmod/__init__.py
|
from functools import partial
from os import environ
from subprocess import Popen, PIPE
LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '')
def module(command, *args):
cmd = (environ['LMOD_CMD'], 'python', '--terse', command)
result = Popen(cmd + args, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
show = partial(module, 'show')
load = partial(module, 'load')
unload = partial(module, 'unload')
restore = partial(module, 'restore')
save = partial(module, 'save')
|
import os # require by lmod output evaluated by exec()
from functools import partial
from os import environ
from subprocess import Popen, PIPE
LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '')
def module(command, *args):
cmd = (environ['LMOD_CMD'], 'python', '--terse', command)
result = Popen(cmd + args, stdout=PIPE, stderr=PIPE)
if command in ('load', 'unload', 'restore', 'save'):
exec(result.stdout.read())
return result.stderr.read().decode()
def avail():
string = module('avail')
modules = []
for entry in string.split():
if not (entry.startswith('/') or entry.endswith('/')):
modules.append(entry)
return modules
def list():
string = module('list').strip()
if string != "No modules loaded":
return string.split()
return []
def savelist(system=LMOD_SYSTEM_NAME):
names = module('savelist').split()
if system:
suffix = '.{}'.format(system)
n = len(suffix)
names = [name[:-n] for name in names if name.endswith(suffix)]
return names
show = partial(module, 'show')
load = partial(module, 'load')
unload = partial(module, 'unload')
restore = partial(module, 'restore')
save = partial(module, 'save')
|
Add import os in lmod to fix regression
|
Add import os in lmod to fix regression
|
Python
|
mit
|
cmd-ntrf/jupyter-lmod,cmd-ntrf/jupyter-lmod,cmd-ntrf/jupyter-lmod
|
5d7f2f84600abcede94a0aaee087ef299cf740a6
|
farmers_api/farmers/views.py
|
farmers_api/farmers/views.py
|
from rest_framework import viewsets
from .models import Farmer
from .serializers import FarmerSerializer
class FarmerViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Farmer.objects.all()
serializer_class = FarmerSerializer
|
from rest_framework import viewsets
from .models import Farmer
from .serializers import FarmerSerializer
class FarmerViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Farmer.objects.all()
serializer_class = FarmerSerializer
filter_fields = ('town',)
|
Add filter on the town field on the Farmer model
|
Add filter on the town field on the Farmer model
|
Python
|
bsd-2-clause
|
tm-kn/farmers-api
|
d4e03bfcbc6292d3a50237f95c9d67ba5d89a475
|
swampdragon/pubsub_providers/redis_sub_provider.py
|
swampdragon/pubsub_providers/redis_sub_provider.py
|
import json
import tornadoredis.pubsub
import tornadoredis
from .base_provider import BaseProvider
class RedisSubProvider(BaseProvider):
def __init__(self):
self._subscriber = tornadoredis.pubsub.SockJSSubscriber(tornadoredis.Client())
def close(self, broadcaster):
for channel in self._subscriber.subscribers:
if broadcaster in self._subscriber.subscribers[channel]:
self._subscriber.subscribers[channel].pop(broadcaster)
def get_channel(self, base_channel, **channel_filter):
return self._construct_channel(base_channel, **channel_filter)
def subscribe(self, channels, broadcaster):
self._subscriber.subscribe(channels, broadcaster)
def unsubscribe(self, channels, broadcaster):
for channel in channels:
if broadcaster in self._subscriber.subscribers[channel]:
self._subscriber.subscribers[channel].pop(broadcaster)
def publish(self, channel, data):
if isinstance(data, dict):
data = json.dumps(data)
broadcasters = list(self._subscriber.subscribers[channel].keys())
if broadcasters:
for bc in broadcasters:
if not bc.session.is_closed:
bc.broadcast(broadcasters, data)
break
|
import json
import tornadoredis.pubsub
import tornadoredis
from .base_provider import BaseProvider
class RedisSubProvider(BaseProvider):
def __init__(self):
self._subscriber = tornadoredis.pubsub.SockJSSubscriber(tornadoredis.Client())
def close(self, broadcaster):
for channel in self._subscriber.subscribers:
if broadcaster in self._subscriber.subscribers[channel]:
self._subscriber.unsubscribe(channel, broadcaster)
def get_channel(self, base_channel, **channel_filter):
return self._construct_channel(base_channel, **channel_filter)
def subscribe(self, channels, broadcaster):
self._subscriber.subscribe(channels, broadcaster)
def unsubscribe(self, channels, broadcaster):
for channel in channels:
if broadcaster in self._subscriber.subscribers[channel]:
self._subscriber.subscribers[channel].pop(broadcaster)
def publish(self, channel, data):
if isinstance(data, dict):
data = json.dumps(data)
broadcasters = list(self._subscriber.subscribers[channel].keys())
if broadcasters:
for bc in broadcasters:
if not bc.session.is_closed:
bc.broadcast(broadcasters, data)
break
|
Use usubscribe rather than popping the broadcaster
|
Use usubscribe rather than popping the broadcaster
|
Python
|
bsd-3-clause
|
sahlinet/swampdragon,boris-savic/swampdragon,michael-k/swampdragon,denizs/swampdragon,aexeagmbh/swampdragon,jonashagstedt/swampdragon,jonashagstedt/swampdragon,d9pouces/swampdragon,boris-savic/swampdragon,sahlinet/swampdragon,aexeagmbh/swampdragon,jonashagstedt/swampdragon,bastianh/swampdragon,boris-savic/swampdragon,michael-k/swampdragon,faulkner/swampdragon,bastianh/swampdragon,seclinch/swampdragon,Manuel4131/swampdragon,michael-k/swampdragon,faulkner/swampdragon,seclinch/swampdragon,Manuel4131/swampdragon,Manuel4131/swampdragon,seclinch/swampdragon,sahlinet/swampdragon,denizs/swampdragon,d9pouces/swampdragon,aexeagmbh/swampdragon,denizs/swampdragon,bastianh/swampdragon,faulkner/swampdragon,d9pouces/swampdragon,h-hirokawa/swampdragon,h-hirokawa/swampdragon
|
e30e5e9780cfe674a70856609ad6010056936263
|
picdump/webadapter.py
|
picdump/webadapter.py
|
import urllib.request
class WebAdapter:
def get(self, urllike):
url = self.mk_url(urllike)
try:
res = urllib.request.urlopen(url)
return res.read()
except Exception as e:
raise e
def open(self, urllike):
url = self.mk_url(urllike)
try:
return urllib.request.urlopen(url)
except Exception as e:
raise e
def mk_url(self, urllike):
return str(urllike)
|
import requests
class WebAdapter:
def __init__(self):
self.cookies = {}
def get(self, urllike):
res = requests.get(str(urllike), cookies=self.cookies)
self.cookies = res.cookies
return res.text
|
Use requests instead of urllib.request
|
Use requests instead of urllib.request
|
Python
|
mit
|
kanosaki/PicDump,kanosaki/PicDump
|
b38f465e512f9b7e79935c156c60ef56d6122387
|
aiohttp_middlewares/constants.py
|
aiohttp_middlewares/constants.py
|
"""
=============================
aiohttp_middlewares.constants
=============================
Collection of constants for ``aiohttp_middlewares`` project.
"""
#: Set of idempotent HTTP methods
IDEMPOTENT_METHODS = frozenset({'GET', 'HEAD', 'OPTIONS', 'TRACE'})
#: Set of non-idempotent HTTP methods
NON_IDEMPOTENT_METHODS = frozenset({'POST', 'PUT', 'PATCH', 'DELETE'})
|
"""
=============================
aiohttp_middlewares.constants
=============================
Collection of constants for ``aiohttp_middlewares`` project.
"""
#: Set of idempotent HTTP methods
IDEMPOTENT_METHODS = frozenset({'GET', 'HEAD', 'OPTIONS', 'TRACE'})
#: Set of non-idempotent HTTP methods
NON_IDEMPOTENT_METHODS = frozenset({'DELETE', 'PATCH', 'POST', 'PUT'})
|
Order HTTP methods in constant.
|
chore: Order HTTP methods in constant.
|
Python
|
bsd-3-clause
|
playpauseandstop/aiohttp-middlewares,playpauseandstop/aiohttp-middlewares
|
fbb2c05aef76c02094c13f5edeaecd9b7428ff11
|
alignak_backend/models/uipref.py
|
alignak_backend/models/uipref.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Resource information of host
"""
def get_name():
"""
Get name of this resource
:return: name of this resource
:rtype: str
"""
return 'uipref'
def get_schema():
"""
Schema structure of this resource
:return: schema dictionnary
:rtype: dict
"""
return {
'allow_unknown': True,
'schema': {
'type': {
'type': 'string',
'ui': {
'title': "Preference's type",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'user': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'data': {
'type': 'list',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': []
},
}
}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Resource information of host
"""
def get_name():
"""
Get name of this resource
:return: name of this resource
:rtype: str
"""
return 'uipref'
def get_schema():
"""
Schema structure of this resource
:return: schema dictionnary
:rtype: dict
"""
return {
'allow_unknown': True,
'schema': {
'type': {
'type': 'string',
'ui': {
'title': "Preference's type",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'user': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'data': {
'type': 'dict',
'ui': {
'title': "Preference's dictionary",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': []
},
}
}
|
Update UI preferences model (dict)
|
Update UI preferences model (dict)
|
Python
|
agpl-3.0
|
Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend
|
53b9eff3ffc1768d3503021e7248351e24d59af7
|
tests/httpd.py
|
tests/httpd.py
|
import SimpleHTTPServer
import BaseHTTPServer
class Handler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_POST(s):
s.send_response(200)
s.end_headers()
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class(('0.0.0.0', 8328), Handler)
try:
httpd.serve_forever()
except KeyboardInterrupt:
httpd.server_close()
|
import BaseHTTPServer
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_POST(self):
content_type = self.headers.getheader('content-type')
content_length = int(self.headers.getheader('content-length'))
self.send_response(200)
self.send_header('Content-Type', content_type)
self.send_header('Content-Length', str(content_length))
self.end_headers()
self.wfile.write(self.rfile.read(content_length))
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class(('0.0.0.0', 8328), Handler)
try:
httpd.serve_forever()
except KeyboardInterrupt:
httpd.server_close()
|
Fix test http server, change to echo back request body
|
Fix test http server, change to echo back request body
|
Python
|
bsd-2-clause
|
chop-dbhi/django-webhooks,pombredanne/django-webhooks,pombredanne/django-webhooks,chop-dbhi/django-webhooks
|
b9143462c004af7d18a66fa92ad94585468751b9
|
IndexedRedis/fields/classic.py
|
IndexedRedis/fields/classic.py
|
# Copyright (c) 2017 Timothy Savannah under LGPL version 2.1. See LICENSE for more information.
#
# fields.classic - The IRField type which behaves like the "classic" IndexedRedis string-named fields.
#
# vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
from . import IRField, IR_NULL_STRINGS, irNull
from ..compat_str import tobytes
class IRClassicField(IRField):
'''
IRClassicField - The IRField type which behaves like the "classic" IndexedRedis string-named fields.
This will store and retrieve data encoding into the default encoding (@see IndexedRedis.compat_str.setDefaultIREncoding)
and have a default value of empty string.
'''
CAN_INDEX = True
def __init__(self, name='', hashIndex=False):
IRField.__init__(self, name=name, hashIndex=hashIndex, defaultValue='')
def __new__(self, name='', hashIndex=False):
return IRField.__new__(self, name)
# vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
|
# Copyright (c) 2017 Timothy Savannah under LGPL version 2.1. See LICENSE for more information.
#
# fields.classic - The IRField type which behaves like the "classic" IndexedRedis string-named fields.
#
# vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
from . import IRField, IR_NULL_STRINGS, irNull
from ..compat_str import tobytes, encoded_str_type
class IRClassicField(IRField):
'''
IRClassicField - The IRField type which behaves like the "classic" IndexedRedis string-named fields.
This will store and retrieve data encoding into the default encoding (@see IndexedRedis.compat_str.setDefaultIREncoding)
and have a default value of empty string.
'''
CAN_INDEX = True
def __init__(self, name='', hashIndex=False):
IRField.__init__(self, name=name, valueType=encoded_str_type, hashIndex=hashIndex, defaultValue='')
def __new__(self, name='', hashIndex=False):
return IRField.__new__(self, name)
# vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
|
Change IRFieldClassic to use 'encoded_str_type'
|
Change IRFieldClassic to use 'encoded_str_type'
|
Python
|
lgpl-2.1
|
kata198/indexedredis,kata198/indexedredis
|
effa5f84fc93ced38ad9e5d3b0a16bea2d3914ef
|
caminae/common/templatetags/field_verbose_name.py
|
caminae/common/templatetags/field_verbose_name.py
|
from django import template
register = template.Library()
def field_verbose_name(obj, field):
"""Usage: {{ object|get_object_field }}"""
return obj._meta.get_field(field).verbose_name
register.filter(field_verbose_name)
register.filter('verbose', field_verbose_name)
|
from django import template
from django.db.models.fields.related import FieldDoesNotExist
register = template.Library()
def field_verbose_name(obj, field):
"""Usage: {{ object|get_object_field }}"""
try:
return obj._meta.get_field(field).verbose_name
except FieldDoesNotExist:
a = getattr(obj, '%s_verbose_name' % field)
if a is None:
raise
return unicode(a)
register.filter(field_verbose_name)
register.filter('verbose', field_verbose_name)
|
Allow column to be a property
|
Allow column to be a property
|
Python
|
bsd-2-clause
|
makinacorpus/Geotrek,Anaethelion/Geotrek,mabhub/Geotrek,camillemonchicourt/Geotrek,Anaethelion/Geotrek,johan--/Geotrek,johan--/Geotrek,johan--/Geotrek,makinacorpus/Geotrek,camillemonchicourt/Geotrek,GeotrekCE/Geotrek-admin,mabhub/Geotrek,makinacorpus/Geotrek,Anaethelion/Geotrek,Anaethelion/Geotrek,GeotrekCE/Geotrek-admin,johan--/Geotrek,mabhub/Geotrek,makinacorpus/Geotrek,mabhub/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,camillemonchicourt/Geotrek
|
bdb78cd1bb13981a20ecb0cf9eb981d784c95b0e
|
fellowms/forms.py
|
fellowms/forms.py
|
from django.forms import ModelForm, widgets
from .models import Fellow, Event, Expense, Blog
class FellowForm(ModelForm):
class Meta:
model = Fellow
exclude = [
"inauguration_year",
"mentor",
]
class EventForm(ModelForm):
class Meta:
model = Event
exclude = [
"status",
"budget_approve",
]
# We don't want to expose fellows' data
# so we will request the email
# and match on the database.
labels = {
'fellow': 'Fellow',
'url': "Event's homepage url",
'name': "Event's name",
}
class ExpenseForm(ModelForm):
class Meta:
model = Expense
exclude = [
'id',
'status',
]
class BlogForm(ModelForm):
class Meta:
model = Blog
fields = '__all__'
|
from django.forms import ModelForm, widgets
from .models import Fellow, Event, Expense, Blog
class FellowForm(ModelForm):
class Meta:
model = Fellow
exclude = [
"home_lon",
"home_lat",
"inauguration_year",
"mentor",
]
class EventForm(ModelForm):
class Meta:
model = Event
exclude = [
"status",
"budget_approve",
]
# We don't want to expose fellows' data
# so we will request the email
# and match on the database.
labels = {
'fellow': 'Fellow',
'url': "Event's homepage url",
'name': "Event's name",
}
class ExpenseForm(ModelForm):
class Meta:
model = Expense
exclude = [
'id',
'status',
]
class BlogForm(ModelForm):
class Meta:
model = Blog
fields = '__all__'
|
Update form to handle home_lon and home_lat
|
Update form to handle home_lon and home_lat
|
Python
|
bsd-3-clause
|
softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat
|
ca2b02d551e9bb4c8625ae79f7878892673fa731
|
corehq/apps/es/domains.py
|
corehq/apps/es/domains.py
|
from .es_query import HQESQuery
from . import filters
class DomainES(HQESQuery):
index = 'domains'
@property
def builtin_filters(self):
return [
real_domains,
commconnect_domains,
created,
] + super(DomainES, self).builtin_filters
def real_domains():
return filters.term("is_test", False)
def commconnect_domains():
return filters.term("commconnect_enabled", True)
def created(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('date_created', gt, gte, lt, lte)
|
from .es_query import HQESQuery
from . import filters
class DomainES(HQESQuery):
index = 'domains'
@property
def builtin_filters(self):
return [
real_domains,
commcare_domains,
commconnect_domains,
commtrack_domains,
created,
] + super(DomainES, self).builtin_filters
def real_domains():
return filters.term("is_test", False)
def commcare_domains():
return filters.AND(filters.term("commconnect_enabled", False),
filters.term("commtrack_enabled", False))
def commconnect_domains():
return filters.term("commconnect_enabled", True)
def commtrack_domains():
return filters.term("commtrack_enabled", True)
def created(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('date_created', gt, gte, lt, lte)
|
Add CommCare, CommTrack filters for DomainES
|
Add CommCare, CommTrack filters for DomainES
|
Python
|
bsd-3-clause
|
qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq
|
11f933e986dd9e2c62b852ca38a37f959c10145e
|
tools/utils.py
|
tools/utils.py
|
#!/usr/bin/env python
''' This script provides utils for python scripts in cameo.
'''
import os
import sys
import subprocess
def TryAddDepotToolsToPythonPath():
depot_tools = FindDepotToolsInPath()
if depot_tools:
sys.path.append(depot_tools)
def FindDepotToolsInPath():
paths = os.getenv('PATH').split(os.path.pathsep)
for path in paths:
if os.path.basename(path) == 'depot_tools':
return path
return None
def IsWindows():
return sys.platform == 'cygwin' or sys.platform.startswith('win')
def IsLinux():
return sys.platform.startswith('linux')
def IsMac():
return sys.platform.startswith('darwin')
def GitExe():
if IsWindows():
return 'git.bat'
else:
return 'git'
def GetCommandOutput(command, cwd=None):
proc = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, bufsize=1,
cwd=cwd)
output = proc.communicate()[0]
result = proc.returncode
if result:
raise Exception('%s: %s' % (subprocess.list2cmdline(command), output))
return output
|
#!/usr/bin/env python
''' This script provides utils for python scripts in cameo.
'''
import os
import sys
import subprocess
def TryAddDepotToolsToPythonPath():
depot_tools = FindDepotToolsInPath()
if depot_tools:
sys.path.append(depot_tools)
def FindDepotToolsInPath():
paths = os.getenv('PATH').split(os.path.pathsep)
for path in paths:
if os.path.basename(path) == '':
# path is end with os.path.pathsep
path = os.path.dirname(path)
if os.path.basename(path) == 'depot_tools':
return path
return None
def IsWindows():
return sys.platform == 'cygwin' or sys.platform.startswith('win')
def IsLinux():
return sys.platform.startswith('linux')
def IsMac():
return sys.platform.startswith('darwin')
def GitExe():
if IsWindows():
return 'git.bat'
else:
return 'git'
def GetCommandOutput(command, cwd=None):
proc = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, bufsize=1,
cwd=cwd)
output = proc.communicate()[0]
result = proc.returncode
if result:
raise Exception('%s: %s' % (subprocess.list2cmdline(command), output))
return output
|
Fix FindDepotToolsInPath not working in some cases
|
Fix FindDepotToolsInPath not working in some cases
When depot tools' path in PATH is like '/home/project/depot_tools/',
FindDepotToolsInPath will not detect it because os.path.basename will
get empty string.
Fix this by getting its parent if its basename is empty.
BUG=https://github.com/otcshare/cameo/issues/29
|
Python
|
bsd-3-clause
|
shaochangbin/crosswalk,rakuco/crosswalk,RafuCater/crosswalk,DonnaWuDongxia/crosswalk,tomatell/crosswalk,mrunalk/crosswalk,jpike88/crosswalk,dreamsxin/crosswalk,stonegithubs/crosswalk,weiyirong/crosswalk-1,siovene/crosswalk,baleboy/crosswalk,crosswalk-project/crosswalk,fujunwei/crosswalk,ZhengXinCN/crosswalk,minggangw/crosswalk,axinging/crosswalk,RafuCater/crosswalk,darktears/crosswalk,qjia7/crosswalk,Shouqun/crosswalk,heke123/crosswalk,axinging/crosswalk,marcuspridham/crosswalk,marcuspridham/crosswalk,axinging/crosswalk,baleboy/crosswalk,marcuspridham/crosswalk,XiaosongWei/crosswalk,PeterWangIntel/crosswalk,dreamsxin/crosswalk,rakuco/crosswalk,xzhan96/crosswalk,baleboy/crosswalk,ZhengXinCN/crosswalk,baleboy/crosswalk,mrunalk/crosswalk,jondong/crosswalk,pozdnyakov/crosswalk,bestwpw/crosswalk,seanlong/crosswalk,huningxin/crosswalk,marcuspridham/crosswalk,minggangw/crosswalk,Pluto-tv/crosswalk,mrunalk/crosswalk,seanlong/crosswalk,amaniak/crosswalk,bestwpw/crosswalk,lincsoon/crosswalk,mrunalk/crosswalk,Shouqun/crosswalk,minggangw/crosswalk,fujunwei/crosswalk,lincsoon/crosswalk,lincsoon/crosswalk,seanlong/crosswalk,fujunwei/crosswalk,zeropool/crosswalk,seanlong/crosswalk,XiaosongWei/crosswalk,heke123/crosswalk,jpike88/crosswalk,stonegithubs/crosswalk,kurli/crosswalk,baleboy/crosswalk,amaniak/crosswalk,weiyirong/crosswalk-1,rakuco/crosswalk,siovene/crosswalk,jondwillis/crosswalk,xzhan96/crosswalk,lincsoon/crosswalk,Bysmyyr/crosswalk,rakuco/crosswalk,hgl888/crosswalk-efl,chinakids/crosswalk,jpike88/crosswalk,DonnaWuDongxia/crosswalk,PeterWangIntel/crosswalk,darktears/crosswalk,qjia7/crosswalk,jondwillis/crosswalk,Bysmyyr/crosswalk,shaochangbin/crosswalk,PeterWangIntel/crosswalk,tedshroyer/crosswalk,chinakids/crosswalk,chuan9/crosswalk,alex-zhang/crosswalk,crosswalk-project/crosswalk-efl,dreamsxin/crosswalk,fujunwei/crosswalk,zliang7/crosswalk,crosswalk-project/crosswalk,fujunwei/crosswalk,wuhengzhi/crosswalk,hgl888/crosswalk-efl,baleboy/crosswalk,weiyirong/crosswalk-1,chuan9/crosswalk,Bysmyyr/crosswalk,qjia7/crosswalk,pk-sam/crosswalk,axinging/crosswalk,huningxin/crosswalk,stonegithubs/crosswalk,tedshroyer/crosswalk,darktears/crosswalk,amaniak/crosswalk,myroot/crosswalk,tomatell/crosswalk,myroot/crosswalk,heke123/crosswalk,Pluto-tv/crosswalk,leonhsl/crosswalk,TheDirtyCalvinist/spacewalk,minggangw/crosswalk,zliang7/crosswalk,jpike88/crosswalk,hgl888/crosswalk-efl,PeterWangIntel/crosswalk,mrunalk/crosswalk,zeropool/crosswalk,heke123/crosswalk,Shouqun/crosswalk,pk-sam/crosswalk,weiyirong/crosswalk-1,baleboy/crosswalk,rakuco/crosswalk,crosswalk-project/crosswalk,zeropool/crosswalk,stonegithubs/crosswalk,jondong/crosswalk,seanlong/crosswalk,chuan9/crosswalk,dreamsxin/crosswalk,kurli/crosswalk,PeterWangIntel/crosswalk,pk-sam/crosswalk,tomatell/crosswalk,pk-sam/crosswalk,pozdnyakov/crosswalk,pk-sam/crosswalk,myroot/crosswalk,xzhan96/crosswalk,shaochangbin/crosswalk,Shouqun/crosswalk,jondwillis/crosswalk,RafuCater/crosswalk,huningxin/crosswalk,hgl888/crosswalk,kurli/crosswalk,XiaosongWei/crosswalk,hgl888/crosswalk,rakuco/crosswalk,shaochangbin/crosswalk,tedshroyer/crosswalk,dreamsxin/crosswalk,amaniak/crosswalk,crosswalk-project/crosswalk,DonnaWuDongxia/crosswalk,zeropool/crosswalk,kurli/crosswalk,siovene/crosswalk,darktears/crosswalk,zliang7/crosswalk,xzhan96/crosswalk,alex-zhang/crosswalk,tomatell/crosswalk,huningxin/crosswalk,TheDirtyCalvinist/spacewalk,xzhan96/crosswalk,XiaosongWei/crosswalk,marcuspridham/crosswalk,lincsoon/crosswalk,Bysmyyr/crosswalk,bestwpw/crosswalk,weiyirong/crosswalk-1,alex-zhang/crosswalk,Bysmyyr/crosswalk,zeropool/crosswalk,chinakids/crosswalk,RafuCater/crosswalk,siovene/crosswalk,XiaosongWei/crosswalk,Pluto-tv/crosswalk,wuhengzhi/crosswalk,wuhengzhi/crosswalk,weiyirong/crosswalk-1,dreamsxin/crosswalk,huningxin/crosswalk,bestwpw/crosswalk,RafuCater/crosswalk,wuhengzhi/crosswalk,Bysmyyr/crosswalk,wuhengzhi/crosswalk,fujunwei/crosswalk,chuan9/crosswalk,baleboy/crosswalk,Bysmyyr/crosswalk,TheDirtyCalvinist/spacewalk,tomatell/crosswalk,pk-sam/crosswalk,weiyirong/crosswalk-1,zliang7/crosswalk,stonegithubs/crosswalk,hgl888/crosswalk,pozdnyakov/crosswalk,xzhan96/crosswalk,minggangw/crosswalk,amaniak/crosswalk,siovene/crosswalk,xzhan96/crosswalk,tomatell/crosswalk,minggangw/crosswalk,lincsoon/crosswalk,Bysmyyr/crosswalk,leonhsl/crosswalk,zliang7/crosswalk,chuan9/crosswalk,heke123/crosswalk,Pluto-tv/crosswalk,fujunwei/crosswalk,lincsoon/crosswalk,RafuCater/crosswalk,tedshroyer/crosswalk,jpike88/crosswalk,bestwpw/crosswalk,leonhsl/crosswalk,qjia7/crosswalk,Pluto-tv/crosswalk,amaniak/crosswalk,hgl888/crosswalk,siovene/crosswalk,ZhengXinCN/crosswalk,ZhengXinCN/crosswalk,ZhengXinCN/crosswalk,stonegithubs/crosswalk,bestwpw/crosswalk,crosswalk-project/crosswalk,tomatell/crosswalk,Shouqun/crosswalk,jondong/crosswalk,rakuco/crosswalk,leonhsl/crosswalk,zeropool/crosswalk,marcuspridham/crosswalk,minggangw/crosswalk,jondwillis/crosswalk,axinging/crosswalk,qjia7/crosswalk,crosswalk-project/crosswalk,alex-zhang/crosswalk,jondwillis/crosswalk,chinakids/crosswalk,kurli/crosswalk,tedshroyer/crosswalk,marcuspridham/crosswalk,TheDirtyCalvinist/spacewalk,hgl888/crosswalk-efl,Pluto-tv/crosswalk,seanlong/crosswalk,crosswalk-project/crosswalk-efl,dreamsxin/crosswalk,jondong/crosswalk,alex-zhang/crosswalk,darktears/crosswalk,DonnaWuDongxia/crosswalk,PeterWangIntel/crosswalk,TheDirtyCalvinist/spacewalk,zeropool/crosswalk,Pluto-tv/crosswalk,zliang7/crosswalk,ZhengXinCN/crosswalk,DonnaWuDongxia/crosswalk,myroot/crosswalk,bestwpw/crosswalk,zliang7/crosswalk,hgl888/crosswalk,jondong/crosswalk,heke123/crosswalk,crosswalk-project/crosswalk,jpike88/crosswalk,DonnaWuDongxia/crosswalk,marcuspridham/crosswalk,heke123/crosswalk,shaochangbin/crosswalk,hgl888/crosswalk,axinging/crosswalk,ZhengXinCN/crosswalk,crosswalk-project/crosswalk,tedshroyer/crosswalk,mrunalk/crosswalk,siovene/crosswalk,amaniak/crosswalk,xzhan96/crosswalk,kurli/crosswalk,crosswalk-project/crosswalk-efl,jondong/crosswalk,hgl888/crosswalk-efl,pozdnyakov/crosswalk,crosswalk-project/crosswalk-efl,huningxin/crosswalk,crosswalk-project/crosswalk-efl,darktears/crosswalk,chinakids/crosswalk,crosswalk-project/crosswalk-efl,qjia7/crosswalk,rakuco/crosswalk,heke123/crosswalk,axinging/crosswalk,leonhsl/crosswalk,alex-zhang/crosswalk,jondong/crosswalk,hgl888/crosswalk,tedshroyer/crosswalk,XiaosongWei/crosswalk,lincsoon/crosswalk,wuhengzhi/crosswalk,alex-zhang/crosswalk,PeterWangIntel/crosswalk,pozdnyakov/crosswalk,pozdnyakov/crosswalk,minggangw/crosswalk,leonhsl/crosswalk,hgl888/crosswalk,DonnaWuDongxia/crosswalk,crosswalk-project/crosswalk-efl,myroot/crosswalk,chuan9/crosswalk,stonegithubs/crosswalk,shaochangbin/crosswalk,chuan9/crosswalk,XiaosongWei/crosswalk,zliang7/crosswalk,hgl888/crosswalk-efl,RafuCater/crosswalk,hgl888/crosswalk-efl,darktears/crosswalk,jpike88/crosswalk,TheDirtyCalvinist/spacewalk,myroot/crosswalk,Shouqun/crosswalk,chinakids/crosswalk,darktears/crosswalk,jondong/crosswalk,jondwillis/crosswalk,pk-sam/crosswalk,leonhsl/crosswalk,jondwillis/crosswalk
|
91ff0fcb40d5d5318b71f0eb4b0873fb470265a0
|
migrations/versions/f0c9c797c230_populate_application_settings_with_.py
|
migrations/versions/f0c9c797c230_populate_application_settings_with_.py
|
"""populate application_settings with started apps
Revision ID: f0c9c797c230
Revises: 31850461ed3
Create Date: 2017-02-16 01:02:02.951573
"""
# revision identifiers, used by Alembic.
revision = 'f0c9c797c230'
down_revision = '31850461ed3'
from alembic import op
import sqlalchemy as sa
from puffin.core import docker, applications
def upgrade():
running_applications = docker.get_all_running_applications()
for running_application in running_applications:
user = running_application[0]
application = running_application[1]
applications.set_application_started(user, application, True)
def downgrade():
pass
|
"""populate application_settings with started apps
Revision ID: f0c9c797c230
Revises: 31850461ed3
Create Date: 2017-02-16 01:02:02.951573
"""
# revision identifiers, used by Alembic.
revision = 'f0c9c797c230'
down_revision = '31850461ed3'
from alembic import op
import sqlalchemy as sa
from puffin.core import docker, applications
def upgrade():
running_applications = docker.get_all_running_applications()
for a in running_applications:
user = a[0]
application = a[1]
applications.set_application_started(user, application, True)
def downgrade():
started_applications = applications.get_all_started_applications()
for a in started_applications:
user = a[0]
application = a[1]
applications.set_application_started(user, application, False)
|
Add downgrade started applications migration
|
Add downgrade started applications migration
|
Python
|
agpl-3.0
|
loomchild/puffin,loomchild/puffin,loomchild/puffin,puffinrocks/puffin,puffinrocks/puffin,loomchild/jenca-puffin,loomchild/puffin,loomchild/jenca-puffin,loomchild/puffin
|
50ead4fe13eec7ad9760f0f577212beb8e8a51be
|
pombola/info/views.py
|
pombola/info/views.py
|
from django.views.generic import DetailView
from models import InfoPage
class InfoPageView(DetailView):
"""Show the page, or 'index' if no slug"""
model = InfoPage
|
from django.views.generic import DetailView
from models import InfoPage
class InfoPageView(DetailView):
"""Show the page for the given slug"""
model = InfoPage
queryset = InfoPage.objects.filter(kind=InfoPage.KIND_PAGE)
|
Use a queryset to display only kind=page
|
Use a queryset to display only kind=page
|
Python
|
agpl-3.0
|
mysociety/pombola,mysociety/pombola,geoffkilpin/pombola,hzj123/56th,ken-muturi/pombola,mysociety/pombola,patricmutwiri/pombola,patricmutwiri/pombola,ken-muturi/pombola,ken-muturi/pombola,hzj123/56th,ken-muturi/pombola,ken-muturi/pombola,hzj123/56th,mysociety/pombola,hzj123/56th,geoffkilpin/pombola,geoffkilpin/pombola,hzj123/56th,ken-muturi/pombola,patricmutwiri/pombola,mysociety/pombola,geoffkilpin/pombola,patricmutwiri/pombola,patricmutwiri/pombola,mysociety/pombola,geoffkilpin/pombola,hzj123/56th,patricmutwiri/pombola,geoffkilpin/pombola
|
528edba420089249bd58c0621e06225db84e223f
|
opps/contrib/logging/models.py
|
opps/contrib/logging/models.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from opps.core.models import NotUserPublishable
class Logging(NotUserPublishable):
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
null=True, blank=True,
)
application = models.CharField(
_(u"Application"),
max_length=75,
null=True, blank=True,
db_index=True)
action = models.CharField(
_(u"Action"),
max_length=50,
null=True, blank=True,
db_index=True)
text = models.TextField(
_(u"Text"),
null=True, blank=True,
db_index=True)
def save(self, *args, **kwargs):
self.published = True
super(Logging, self).save(*args, **kwargs)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from opps.core.models import NotUserPublishable
class Logging(NotUserPublishable):
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
null=True, blank=True,
verbose_name=_(u'User')
)
application = models.CharField(
_(u"Application"),
max_length=75,
null=True, blank=True,
db_index=True)
action = models.CharField(
_(u"Action"),
max_length=50,
null=True, blank=True,
db_index=True)
text = models.TextField(
_(u"Text"),
null=True, blank=True,
db_index=True)
def save(self, *args, **kwargs):
self.published = True
super(Logging, self).save(*args, **kwargs)
class Meta:
verbose_name = _(u'Logging')
verbose_name_plural = _(u'Loggings')
|
Add missing translation on logging contrib app
|
Add missing translation on logging contrib app
|
Python
|
mit
|
jeanmask/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,opps/opps,YACOWS/opps,opps/opps,opps/opps,williamroot/opps,YACOWS/opps,opps/opps
|
411decbdb193b28bb3060e02e81bfa29483e85a9
|
staticgen_demo/blog/staticgen_views.py
|
staticgen_demo/blog/staticgen_views.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
def _get_paginator(self, url):
response = self.client.get(url)
print 'status_code: %s' % response.status_code
if not response.status_code == 200:
pass
else:
context = {}
if hasattr(response, 'context_data'):
context = response.context_data
elif hasattr(response, 'context'):
context = response.context
print context
try:
return context['paginator'], context['is_paginated']
except KeyError:
pass
return None, False
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
|
Remove debug code from staticgen views.
|
Remove debug code from staticgen views.
|
Python
|
bsd-3-clause
|
mishbahr/staticgen-demo,mishbahr/staticgen-demo,mishbahr/staticgen-demo
|
673d6cecfaeb0e919f30997f793ee2bb18e399ee
|
tempest/api_schema/response/compute/v2/hypervisors.py
|
tempest/api_schema/response/compute/v2/hypervisors.py
|
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from tempest.api_schema.response.compute import hypervisors
hypervisors_servers = copy.deepcopy(hypervisors.common_hypervisors_detail)
# Defining extra attributes for V3 show hypervisor schema
hypervisors_servers['response_body']['properties']['hypervisors']['items'][
'properties']['servers'] = {
'type': 'array',
'items': {
'type': 'object',
'properties': {
# NOTE: Now the type of 'id' is integer,
# but here allows 'string' also because we
# will be able to change it to 'uuid' in
# the future.
'id': {'type': ['integer', 'string']},
'name': {'type': 'string'}
}
}
}
# In V2 API, if there is no servers (VM) on the Hypervisor host then 'servers'
# attribute will not be present in response body So it is not 'required'.
|
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from tempest.api_schema.response.compute import hypervisors
hypervisors_servers = copy.deepcopy(hypervisors.common_hypervisors_detail)
# Defining extra attributes for V3 show hypervisor schema
hypervisors_servers['response_body']['properties']['hypervisors']['items'][
'properties']['servers'] = {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'uuid': {'type': 'string'},
'name': {'type': 'string'}
}
}
}
# In V2 API, if there is no servers (VM) on the Hypervisor host then 'servers'
# attribute will not be present in response body So it is not 'required'.
|
Fix V2 hypervisor server schema attribute
|
Fix V2 hypervisor server schema attribute
Nova v2 hypervisor server API return attribute "uuid" in response's
server dict. Current response schema does not have this attribute instead
it contain "id" which is wrong.
This patch fix the above issue.
NOTE- "uuid" attribute in this API response is always a uuid.
Change-Id: I78c67834de930012b70874938f345524d69264ba
|
Python
|
apache-2.0
|
jaspreetw/tempest,openstack/tempest,Vaidyanath/tempest,vedujoshi/tempest,NexusIS/tempest,FujitsuEnablingSoftwareTechnologyGmbH/tempest,tonyli71/tempest,hayderimran7/tempest,xbezdick/tempest,akash1808/tempest,roopali8/tempest,tudorvio/tempest,alinbalutoiu/tempest,flyingfish007/tempest,manasi24/jiocloud-tempest-qatempest,flyingfish007/tempest,izadorozhna/tempest,afaheem88/tempest_neutron,queria/my-tempest,pczerkas/tempest,afaheem88/tempest,FujitsuEnablingSoftwareTechnologyGmbH/tempest,yamt/tempest,sebrandon1/tempest,bigswitch/tempest,masayukig/tempest,Tesora/tesora-tempest,manasi24/jiocloud-tempest-qatempest,hpcloud-mon/tempest,bigswitch/tempest,ebagdasa/tempest,openstack/tempest,neerja28/Tempest,izadorozhna/tempest,Tesora/tesora-tempest,NexusIS/tempest,jamielennox/tempest,eggmaster/tempest,roopali8/tempest,rzarzynski/tempest,yamt/tempest,queria/my-tempest,rzarzynski/tempest,vedujoshi/tempest,manasi24/tempest,redhat-cip/tempest,Juniper/tempest,varunarya10/tempest,redhat-cip/tempest,hpcloud-mon/tempest,rakeshmi/tempest,masayukig/tempest,JioCloud/tempest,Juniper/tempest,Juraci/tempest,cisco-openstack/tempest,dkalashnik/tempest,LIS/lis-tempest,rakeshmi/tempest,CiscoSystems/tempest,dkalashnik/tempest,nunogt/tempest,Lilywei123/tempest,tudorvio/tempest,tonyli71/tempest,pandeyop/tempest,danielmellado/tempest,neerja28/Tempest,Juraci/tempest,LIS/lis-tempest,JioCloud/tempest,danielmellado/tempest,zsoltdudas/lis-tempest,pczerkas/tempest,zsoltdudas/lis-tempest,eggmaster/tempest,manasi24/tempest,jamielennox/tempest,sebrandon1/tempest,afaheem88/tempest,varunarya10/tempest,afaheem88/tempest_neutron,Lilywei123/tempest,cisco-openstack/tempest,nunogt/tempest,pandeyop/tempest,hayderimran7/tempest,Vaidyanath/tempest,alinbalutoiu/tempest,ebagdasa/tempest,akash1808/tempest,xbezdick/tempest,jaspreetw/tempest,CiscoSystems/tempest
|
6de1083784d8a73e234dd14cabd17e7ee5852949
|
tools/clean_output_directory.py
|
tools/clean_output_directory.py
|
#!/usr/bin/env python
#
# Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
#
import shutil
import sys
import utils
def Main():
build_root = utils.GetBuildRoot(utils.GuessOS())
print 'Deleting %s' % build_root
if sys.platform != 'win32':
shutil.rmtree(build_root, ignore_errors=True)
else:
# Intentionally ignore return value since a directory might be in use.
subprocess.call(['rmdir', '/Q', '/S', build_root],
env=os.environ.copy(),
shell=True)
return 0
if __name__ == '__main__':
sys.exit(Main())
|
#!/usr/bin/env python
#
# Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
#
import shutil
import sys
import subprocess
import utils
def Main():
build_root = utils.GetBuildRoot(utils.GuessOS())
print 'Deleting %s' % build_root
if sys.platform != 'win32':
shutil.rmtree(build_root, ignore_errors=True)
else:
# Intentionally ignore return value since a directory might be in use.
subprocess.call(['rmdir', '/Q', '/S', build_root],
env=os.environ.copy(),
shell=True)
return 0
if __name__ == '__main__':
sys.exit(Main())
|
Add missing import to utility python script
|
Add missing import to utility python script
BUG=
R=kustermann@google.com
Review URL: https://codereview.chromium.org//1222793010.
|
Python
|
bsd-3-clause
|
dart-lang/sdk,dart-lang/sdk,dartino/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-lang/sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk
|
13c968f9f345f58775750f1f83ca7881cee2755a
|
bootstrap/conf/salt/state/run-tracking-db/scripts/import_sample_data.py
|
bootstrap/conf/salt/state/run-tracking-db/scripts/import_sample_data.py
|
import pandas as pd
import sys
df = pd.read_csv(sys.argv[1])
df.columns = [c.lower() for c in df.columns]
from sqlalchemy import create_engine
engine = create_engine('postgresql://pcawg_admin:pcawg@localhost:5432/germline_genotype_tracking')
df.to_sql("pcawg_samples", engine)
|
import pandas as pd
import sys
df = pd.read_csv(sys.argv[1])
df.columns = [c.lower() for c in df.columns]
from sqlalchemy import create_engine
engine = create_engine('postgresql://pcawg_admin:pcawg@run-tracking-db.service.consul:5432/germline_genotype_tracking')
df.to_sql("pcawg_samples", engine)
|
Use Tracking DB Service URL rather than localhost in the DB connection string.
|
Use Tracking DB Service URL rather than localhost in the DB connection string.
|
Python
|
mit
|
llevar/germline-regenotyper,llevar/germline-regenotyper
|
fedff2e76d8d96f1ea407f7a3a48aa8dc7a7e50a
|
analysis/opensimulator-stats-analyzer/src/ostagraph.py
|
analysis/opensimulator-stats-analyzer/src/ostagraph.py
|
#!/usr/bin/python
import argparse
import matplotlib.pyplot as plt
from pylab import *
from osta.osta import *
############
### MAIN ###
############
parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument(
'--select',
help = "Select the full name of a stat to graph (e.g. \"scene.Keynote 1.RootAgents\")")
parser.add_argument(
'--out',
help = "Path to output the graph rather the interactively display. Filename extension determines graphics type (e.g. \"graph.jpg\")",
default = argparse.SUPPRESS)
parser.add_argument(
'statsLogPath',
help = "Path to the stats log file.",
metavar = "stats-log-path")
opts = parser.parse_args()
osta = Osta()
osta.parse(opts.statsLogPath)
stat = osta.getStat(opts.select)
if not stat == None:
plt.plot(stat['abs']['values'])
plt.title(stat['fullName'])
plt.ylabel(stat['name'])
if 'out' in opts:
savefig(opts.out)
else:
plt.show()
else:
print "No such stat as %s" % (opts.select)
|
#!/usr/bin/python
import argparse
import matplotlib.pyplot as plt
from pylab import *
from osta.osta import *
############
### MAIN ###
############
parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument(
'--select',
help = "Select the full name of a stat to graph (e.g. \"scene.Keynote 1.RootAgents\")")
parser.add_argument(
'--out',
help = "Path to output the graph rather the interactively display. Filename extension determines graphics type (e.g. \"graph.jpg\")",
default = argparse.SUPPRESS)
parser.add_argument(
'statsLogPath',
help = "Path to the stats log file.",
metavar = "stats-log-path")
opts = parser.parse_args()
osta = Osta()
osta.parse(opts.statsLogPath)
stat = osta.getStat(opts.select)
if not stat == None:
plt.plot(stat['abs']['values'])
plt.title(stat['fullName'])
plt.xlabel("samples")
plt.ylabel(stat['name'])
if 'out' in opts:
savefig(opts.out)
else:
plt.show()
else:
print "No such stat as %s" % (opts.select)
|
Make x axis label samples for now, though eventually should have a date option
|
Make x axis label samples for now, though eventually should have a date option
|
Python
|
bsd-3-clause
|
justinccdev/opensimulator-tools,justinccdev/opensimulator-tools,justinccdev/opensimulator-tools,justinccdev/opensimulator-tools
|
114b3f3403e970943618e7096b0b898b8aa5589f
|
microdrop/core_plugins/electrode_controller_plugin/on_plugin_install.py
|
microdrop/core_plugins/electrode_controller_plugin/on_plugin_install.py
|
from datetime import datetime
import logging
from path_helpers import path
from pip_helpers import install
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
logging.info(str(datetime.now()))
requirements_file = path(__file__).parent.joinpath('requirements.txt')
if requirements_file.exists():
logging.info(install(['-U', '-r', requirements_file], verbose=True))
|
from datetime import datetime
import logging
from path_helpers import path
from pip_helpers import install
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
logging.info(str(datetime.now()))
requirements_file = path(__file__).parent.joinpath('requirements.txt')
if requirements_file.exists():
logging.info(install(['-U', '-r', requirements_file]))
|
Remove verbose keywork for pip install
|
Remove verbose keywork for pip install
|
Python
|
bsd-3-clause
|
wheeler-microfluidics/microdrop
|
7a78525bb8cc6176dfbe348e5f95373c1d70628f
|
functions.py
|
functions.py
|
#-*- coding: utf-8 -*-
def getClientIP( req ):
'''
Get the client ip address
'''
xForwardedFor=req.META.get('HTTP_X_FORWARDED_FOR')
if xForwardedFor:
ip=xForwardedFor.split(',')[0]
else:
ip=req.META.get('REMOTE_ADDR')
return ip
def getBool( val, trueOpts=['YES', 'Y', '1', 'TRUE', 'T'] ):
'''
Retrieve the boolean value from string
'''
if val:
return str(val).upper() in trueOpts
return False
|
#-*- coding: utf-8 -*-
def getClientIP( req ):
'''
Get the client ip address
@param req The request;
'''
xForwardedFor=req.META.get('HTTP_X_FORWARDED_FOR')
if xForwardedFor:
ip=xForwardedFor.split(',')[0]
else:
ip=req.META.get('REMOTE_ADDR')
return ip
def getBool( val, defVal=False, trueOpts=['YES', 'Y', '1', 'TRUE', 'T'] ):
'''
Retrieve the boolean value from string
@param val The value to be parse to bool
@param defVal The default value if the val is None
@param trueOpts The available values of TRUE
'''
if val:
return str(val).upper() in trueOpts
return defVal
def checkRecaptcha( req, secret, simple=True ):
'''
Checking the recaptcha and return the result.
@param req The request;
@param secret The secret retreived from Google reCaptcha registration;
@param simple Retrue the simple boolean value of verification if True, otherwise, return the JSON value of verification;
'''
import requests
apiurl='https://www.google.com/recaptcha/api/siteverify'
fieldname='g-recaptcha-response'
answer=req.POST.get(fieldname, None)
clientIP=getClientIP( req )
rst=requests.post(apiurl, data={'secret': secret, 'response':answer, 'remoteip': clientIP}).json()
if simple:
return getBool(rst.get('success', 'False'))
return r.json()
|
Add the checkRecaptcha( req, secret, simple=True ) function
|
Add the checkRecaptcha( req, secret, simple=True ) function
|
Python
|
apache-2.0
|
kensonman/webframe,kensonman/webframe,kensonman/webframe
|
f2f74f53fe8f5b4ac4cd728c4181b3a66b4e873d
|
euler009.py
|
euler009.py
|
# euler 009
# A Pythagorean triplet is a set of three natural numbers, a < b < c, for which,
# a^2 + b^2 = c^2
#
# For example, 32 + 42 = 9 + 16 = 25 = 52.
#
# There exists exactly one Pythagorean triplet for which a + b + c = 1000.
# Find the product abc.
|
# Project Euler
# 9 - Special Pythagorean triplet
# A Pythagorean triplet is a set of three natural numbers, a < b < c, for which,
# a^2 + b^2 = c^2
#
# For example, 3^2 + 4^2 = 9 + 16 = 25 = 5^2.
#
# There exists exactly one Pythagorean triplet for which a + b + c = 1000.
# Find the product abc.
|
Change problem description so it actually makes sense
|
Change problem description so it actually makes sense
|
Python
|
mit
|
josienb/project_euler,josienb/project_euler
|
87f892731678049b5a706a36487982ebb9da3991
|
pybossa_discourse/globals.py
|
pybossa_discourse/globals.py
|
# -*- coding: utf8 -*-
"""Jinja globals module for pybossa-discourse."""
from flask import Markup, request
from . import discourse_client
class DiscourseGlobals(object):
"""A class to implement Discourse Global variables."""
def __init__(self, app):
self.url = app.config['DISCOURSE_URL']
app.jinja_env.globals.update(discourse=self)
def comments(self):
"""Return an HTML snippet used to embed Discourse comments."""
return Markup("""
<div id="discourse-comments"></div>
<script type="text/javascript">
DiscourseEmbed = {{
discourseUrl: '{0}/',
discourseEmbedUrl: '{1}'
}};
window.onload = function() {{
let d = document.createElement('script'),
head = document.getElementsByTagName('head')[0],
body = document.getElementsByTagName('body')[0];
d.type = 'text/javascript';
d.async = true;
d.src = '{0}/javascripts/embed.js';
(head || body).appendChild(d);
}}
</script>
""").format(self.url, request.base_url)
def notifications(self):
"""Return a count of unread notifications for the current user."""
notifications = discourse_client.user_notifications()
if not notifications:
return 0
return sum([1 for n in notifications['notifications']
if not n['read']])
|
# -*- coding: utf8 -*-
"""Jinja globals module for pybossa-discourse."""
from flask import Markup, request
from . import discourse_client
class DiscourseGlobals(object):
"""A class to implement Discourse Global variables."""
def __init__(self, app):
self.url = app.config['DISCOURSE_URL']
self.api = discourse_client
app.jinja_env.globals.update(discourse=self)
def comments(self):
"""Return an HTML snippet used to embed Discourse comments."""
return Markup("""
<div id="discourse-comments"></div>
<script type="text/javascript">
DiscourseEmbed = {{
discourseUrl: '{0}/',
discourseEmbedUrl: '{1}'
}};
window.onload = function() {{
let d = document.createElement('script'),
head = document.getElementsByTagName('head')[0],
body = document.getElementsByTagName('body')[0];
d.type = 'text/javascript';
d.async = true;
d.src = '{0}/javascripts/embed.js';
(head || body).appendChild(d);
}}
</script>
""").format(self.url, request.base_url)
def notifications(self):
"""Return a count of unread notifications for the current user."""
notifications = discourse_client.user_notifications()
if not notifications:
return 0
return sum([1 for n in notifications['notifications']
if not n['read']])
|
Add API client to global envar
|
Add API client to global envar
|
Python
|
bsd-3-clause
|
alexandermendes/pybossa-discourse
|
edb07b507aa93ead278cecd168da83a4be68b2ba
|
bluebottle/settings/travis.py
|
bluebottle/settings/travis.py
|
# SECRET_KEY and DATABASES needs to be defined before the base settings is imported.
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
from .base import *
#
# Put the travis-ci environment specific overrides below.
#
SELENIUM_TESTS = True
|
# SECRET_KEY and DATABASES needs to be defined before the base settings is imported.
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
from .base import *
#
# Put the travis-ci environment specific overrides below.
#
# Disable Selenium testing for now on Travis because it fails inconsistent.
# SELENIUM_TESTS = True
|
Disable front-end testing on Travis
|
Disable front-end testing on Travis
Travis has dificulty running the front-end tests.
For now we'll disable them.
|
Python
|
bsd-3-clause
|
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
|
f3f428480a8e61bf22532503680e718fd5f0d286
|
fb/views.py
|
fb/views.py
|
from django.shortcuts import render
# Create your views here.
|
from django.shortcuts import render
from fb.models import UserPost
def index(request):
if request.method == 'GET':
posts = UserPost.objects.all()
context = {
'posts': posts,
}
return render(request, 'index.html', context)
|
Write the first view - news feed.
|
Write the first view - news feed.
|
Python
|
apache-2.0
|
pure-python/brainmate
|
e978b8be7ccfd9206d618f5a3de855a306ceccfe
|
test.py
|
test.py
|
import unittest
from enigma import Enigma, Steckerbrett, Umkehrwalze, Walzen
class RotorTestCase(unittest.TestCase):
def test_rotor_encoding(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q')
self.assertEqual('E', rotor.encode('A'))
def test_rotor_reverse_encoding(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q')
self.assertEqual('U', rotor.encode_reverse('A'))
def test_rotor_different_setting(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q',
setting='B')
self.assertEqual('K', rotor.encode('A'))
self.assertEqual('K', rotor.encode_reverse('A'))
def run_tests():
runner = unittest.TextTestRunner()
suite = unittest.TestLoader().loadTestsFromTestCase(RotorTestCase)
runner.run(suite)
if __name__ == '__main__': # pragma: no cover
run_tests()
|
import unittest
from enigma import Enigma, Steckerbrett, Umkehrwalze, Walzen
class RotorTestCase(unittest.TestCase):
def test_rotor_encoding(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q')
self.assertEqual('E', rotor.encode('A'))
def test_rotor_reverse_encoding(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q')
self.assertEqual('U', rotor.encode_reverse('A'))
def test_rotor_different_setting(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q',
setting='B')
self.assertEqual('K', rotor.encode('A'))
self.assertEqual('K', rotor.encode_reverse('A'))
def test_rotor_different_offset(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q',
offset='B')
self.assertEqual('D', rotor.encode('A'))
self.assertEqual('W', rotor.encode_reverse('A'))
def run_tests():
runner = unittest.TextTestRunner()
suite = unittest.TestLoader().loadTestsFromTestCase(RotorTestCase)
runner.run(suite)
if __name__ == '__main__': # pragma: no cover
run_tests()
|
Test if rotor encodes with different offset properly
|
Test if rotor encodes with different offset properly
|
Python
|
mit
|
ranisalt/enigma
|
691093e38598959f98b319f7c57852496a26ba90
|
apps/careers/models.py
|
apps/careers/models.py
|
import watson
from cms.apps.pages.models import ContentBase
from cms.models import HtmlField, SearchMetaBase
from django.db import models
class Careers(ContentBase):
# The heading that the admin places this content under.
classifier = "apps"
# The urlconf used to power this content's views.
urlconf = "phixflow.apps.careers.urls"
standfirst = models.TextField(
blank=True,
null=True
)
per_page = models.IntegerField(
"careers per page",
default=5,
blank=True,
null=True
)
def __unicode__(self):
return self.page.title
class Career(SearchMetaBase):
page = models.ForeignKey(
Careers
)
title = models.CharField(
max_length=256,
)
slug = models.CharField(
max_length=256,
unique=True
)
location = models.CharField(
max_length=256,
blank=True,
null=True
)
summary = models.TextField(
blank=True,
null=True
)
description = HtmlField()
email_address = models.EmailField()
order = models.PositiveIntegerField(
default=0
)
class Meta:
ordering = ('order',)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return self.page.page.reverse('career', kwargs={
'slug': self.slug,
})
watson.register(Career)
|
import watson
from cms.apps.pages.models import ContentBase
from cms.models import HtmlField, SearchMetaBase
from django.db import models
class Careers(ContentBase):
# The heading that the admin places this content under.
classifier = "apps"
# The urlconf used to power this content's views.
urlconf = "{{ project_name }}.apps.careers.urls"
standfirst = models.TextField(
blank=True,
null=True
)
per_page = models.IntegerField(
"careers per page",
default=5,
blank=True,
null=True
)
def __unicode__(self):
return self.page.title
class Career(SearchMetaBase):
page = models.ForeignKey(
Careers
)
title = models.CharField(
max_length=256,
)
slug = models.CharField(
max_length=256,
unique=True
)
location = models.CharField(
max_length=256,
blank=True,
null=True
)
summary = models.TextField(
blank=True,
null=True
)
description = HtmlField()
email_address = models.EmailField()
order = models.PositiveIntegerField(
default=0
)
class Meta:
ordering = ('order',)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return self.page.page.reverse('career', kwargs={
'slug': self.slug,
})
watson.register(Career)
|
Fix project name in urlconf
|
Fix project name in urlconf
|
Python
|
mit
|
onespacemedia/cms-jobs,onespacemedia/cms-jobs
|
88e0ec5ff58f7dabb531749472a410498c8e7827
|
py_skiplist/iterators.py
|
py_skiplist/iterators.py
|
from itertools import dropwhile, count, cycle
import random
def geometric(p):
return (next(dropwhile(lambda _: random.randint(1, int(1. / p)) == 1, count())) for _ in cycle([1]))
def uniform(n):
"""
Simple deterministic distribution for testing internal of the skiplist
"""
return (n for _ in cycle([1]))
|
from itertools import dropwhile, count, repeat
import random
def geometric(p):
return (next(dropwhile(lambda _: random.randint(1, int(1. / p)) == 1, count())) for _ in repeat(1))
# Simple deterministic distribution for testing internals of the skiplist.
uniform = repeat
|
Use itertools.repeat over slower alternatives.
|
Use itertools.repeat over slower alternatives.
|
Python
|
mit
|
ZhukovAlexander/skiplist-python
|
52d65ff926a079b4e07e8bc0fda3e3c3fe8f9437
|
thumbor/detectors/queued_detector/__init__.py
|
thumbor/detectors/queued_detector/__init__.py
|
from remotecv import pyres_tasks
from remotecv.unique_queue import UniqueQueue
from thumbor.detectors import BaseDetector
class QueuedDetector(BaseDetector):
queue = UniqueQueue()
def detect(self, callback):
engine = self.context.modules.engine
self.queue.enqueue_unique(pyres_tasks.DetectTask,
args=[self.detection_type, self.context.request.image_url],
key=self.context.request.image_url)
self.context.prevent_result_storage = True
callback([])
|
from remotecv.unique_queue import UniqueQueue
from thumbor.detectors import BaseDetector
class QueuedDetector(BaseDetector):
queue = UniqueQueue()
def detect(self, callback):
engine = self.context.modules.engine
self.queue.enqueue_unique_from_string('remotecv.pyres_tasks.DetectTask', 'Detect',
args=[self.detection_type, self.context.request.image_url],
key=self.context.request.image_url)
self.context.prevent_result_storage = True
callback([])
|
Remove dependency from remotecv worker on queued detector
|
Remove dependency from remotecv worker on queued detector
|
Python
|
mit
|
fanhero/thumbor,fanhero/thumbor,fanhero/thumbor,fanhero/thumbor
|
c9f8663e6b0bf38f6c041a3a6b77b8a0007a9f09
|
urls.py
|
urls.py
|
from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from django.views.generic.simple import direct_to_template
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^i4p/', include('i4p.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
(r'^admin/', include(admin.site.urls)),
(r'^$', direct_to_template, {'template' : 'base.html'}),
(r'^accounts/', include('registration.backends.default.urls')),
)
|
from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from django.views.generic.simple import direct_to_template
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^i4p/', include('i4p.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
(r'^admin/', include(admin.site.urls)),
url(r'^$', direct_to_template, {'template' : 'base.html'}, name="i4p-index"),
(r'^accounts/', include('registration.backends.default.urls')),
)
|
Add a name to the index URL
|
Add a name to the index URL
|
Python
|
agpl-3.0
|
ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople
|
73ff56f4b8859e82b0d69a6505c982e26de27859
|
util.py
|
util.py
|
def product(nums):
r = 1
for n in nums:
r *= n
return r
def choose(n, k):
if 0 <= k <= n:
ntok = 1
ktok = 1
for t in range(1, min(k, n - k) + 1):
ntok *= n
ktok *= t
n -= 1
return ntok // ktok
else:
return 0
def format_floats(floats):
fstr = ' '.join('{:10.08f}' for _ in floats)
return fstr.format(*floats)
|
import colorsys
import random
def randcolor():
hue = random.random()
sat = random.randint(700, 1000) / 1000
val = random.randint(700, 1000) / 1000
return tuple(int(f*255) for f in colorsys.hsv_to_rgb(hue, sat, val))
def product(nums):
r = 1
for n in nums:
r *= n
return r
def choose(n, k):
if 0 <= k <= n:
ntok = 1
ktok = 1
for t in range(1, min(k, n - k) + 1):
ntok *= n
ktok *= t
n -= 1
return ntok // ktok
else:
return 0
def format_floats(floats):
fstr = ' '.join('{:10.08f}' for _ in floats)
return fstr.format(*floats)
|
Add randcolor function to uitl
|
Add randcolor function to uitl
|
Python
|
unlicense
|
joseph346/cellular
|
7801c5d7430233eb78ab8b2a91f5960bd808b2c7
|
app/admin/views.py
|
app/admin/views.py
|
from flask import Blueprint, render_template
from flask_security import login_required
admin = Blueprint('admin', __name__)
@admin.route('/')
@admin.route('/index')
@login_required
def index():
return render_template('admin/index.html', title='Admin')
|
from flask import Blueprint, render_template, redirect, url_for
from flask_security import current_user
admin = Blueprint('admin', __name__)
@admin.route('/')
@admin.route('/index')
def index():
return render_template('admin/index.html', title='Admin')
@admin.before_request
def require_login():
if not current_user.is_authenticated:
return redirect(url_for('security.login', next='admin'))
|
Move admin authentication into before_request handler
|
Move admin authentication into before_request handler
|
Python
|
mit
|
Encrylize/flask-blogger,Encrylize/flask-blogger,Encrylize/flask-blogger
|
6ce14f21cec2c37939f68aaf40d5227c80636e53
|
app_bikes/forms.py
|
app_bikes/forms.py
|
from dal import autocomplete
from django import forms
class BikeModelForm(forms.ModelForm):
def __init__(self, *args, **kw):
super(BikeModelForm, self).__init__(*args, **kw)
if self.instance is not None:
# Saved instance is loaded, setup choices to display the selected value
self.fields['id_station'].widget.choices = ((self.instance.id_station, self.instance.station),)
def validate_unique(self):
super(BikeModelForm, self).validate_unique()
if self.errors and 'id_station' in self.data:
# A station was chosen, reinit choices with it
self.fields['id_station'].widget.choices = ((self.cleaned_data['id_station'], self.data['station']),)
class Meta:
widgets = {
'id_station': autocomplete.ListSelect2(url='station-autocomplete',
forward=('provider',),
attrs={'data-allow-clear': 'false'})
}
class Media:
js = ('js/admin_form.js',)
|
from dal import autocomplete
from django import forms
class BikeModelForm(forms.ModelForm):
def __init__(self, *args, **kw):
super(BikeModelForm, self).__init__(*args, **kw)
if self.instance is not None:
# Saved instance is loaded, setup choices to display the selected value
self.fields['id_station'].widget.choices = ((self.instance.id_station, self.instance.station),)
def validate_unique(self):
"""If the form had an error and a station was chosen, we need to setup the widget choices to the previously selected value for the autocomplete to display it properly"""
super(BikeModelForm, self).validate_unique()
if self.errors and 'id_station' in self.data:
self.fields['id_station'].widget.choices = ((self.cleaned_data['id_station'], self.data['station']),)
class Meta:
widgets = {
'id_station': autocomplete.ListSelect2(url='station-autocomplete',
forward=('provider',),
attrs={'data-allow-clear': 'false'})
}
class Media:
js = ('js/admin_form.js',)
|
Add docstring to explain the code
|
Add docstring to explain the code
|
Python
|
agpl-3.0
|
laboiteproject/laboite-backend,laboiteproject/laboite-backend,bgaultier/laboitepro,bgaultier/laboitepro,bgaultier/laboitepro,laboiteproject/laboite-backend
|
0b1f38b8354a0ad6a021f247a7bc1336ae5d50fb
|
arcade/__init__.py
|
arcade/__init__.py
|
"""
The Arcade Library
A Python simple, easy to use module for creating 2D games.
"""
import arcade.key
import arcade.color
from .version import *
from .window_commands import *
from .draw_commands import *
from .sprite import *
from .physics_engines import *
from .physics_engine_2d import *
from .application import *
from .sound import *
from .shape_objects import *
|
"""
The Arcade Library
A Python simple, easy to use module for creating 2D games.
"""
import arcade.key
import arcade.color
from arcade.version import *
from arcade.window_commands import *
from arcade.draw_commands import *
from arcade.sprite import *
from arcade.physics_engines import *
from arcade.physics_engine_2d import *
from arcade.application import *
from arcade.sound import *
from arcade.shape_objects import *
|
Change some of the relative imports, which fail in doctests, to absolute imports.
|
Change some of the relative imports, which fail in doctests, to absolute imports.
|
Python
|
mit
|
mikemhenry/arcade,mikemhenry/arcade
|
ed4f786de54dde50cb26cfe4859507579806a14b
|
portal_sale_distributor/models/ir_action_act_window.py
|
portal_sale_distributor/models/ir_action_act_window.py
|
##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, api
from odoo.tools.safe_eval import safe_eval
class ActWindowView(models.Model):
_inherit = 'ir.actions.act_window'
def read(self, fields=None, load='_classic_read'):
result = super().read(fields, load=load)
if result and result[0].get('context'):
ctx = safe_eval(result[0].get('context', '{}'))
if ctx.get('portal_products'):
pricelist = self.env.user.partner_id.property_product_pricelist
ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id})
result[0].update({'context': ctx})
return result
|
##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, api
from odoo.tools.safe_eval import safe_eval
class ActWindowView(models.Model):
_inherit = 'ir.actions.act_window'
def read(self, fields=None, load='_classic_read'):
result = super().read(fields, load=load)
for value in result:
if value.get('context') and 'portal_products' in value.get('context'):
eval_ctx = dict(self.env.context)
try:
ctx = safe_eval(value.get('context', '{}'), eval_ctx)
except:
ctx = {}
pricelist = self.env.user.partner_id.property_product_pricelist
ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id.id})
value.update({'context': str(ctx)})
return result
|
Adjust to avoid bugs with other values in context
|
[FIX] portal_sale_distributor: Adjust to avoid bugs with other values in context
closes ingadhoc/sale#493
X-original-commit: 441d30af0c3fa8cbbe129893107436ea69cca740
Signed-off-by: Juan José Scarafía <1d1652a8631a1f5a0ea40ef8dcad76f737ce6379@adhoc.com.ar>
|
Python
|
agpl-3.0
|
ingadhoc/sale,ingadhoc/sale,ingadhoc/sale,ingadhoc/sale
|
4f3234433b97e7f243d54e9e95399f5cabecd315
|
common/djangoapps/course_modes/migrations/0005_auto_20151217_0958.py
|
common/djangoapps/course_modes/migrations/0005_auto_20151217_0958.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('course_modes', '0004_auto_20151113_1457'),
]
operations = [
migrations.RemoveField(
model_name='coursemode',
name='expiration_datetime',
),
migrations.AddField(
model_name='coursemode',
name='_expiration_datetime',
field=models.DateTimeField(db_column=b'expiration_datetime', default=None, blank=True, help_text='OPTIONAL: After this date/time, users will no longer be able to enroll in this mode. Leave this blank if users can enroll in this mode until enrollment closes for the course.', null=True, verbose_name='Upgrade Deadline'),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('course_modes', '0004_auto_20151113_1457'),
]
operations = [
migrations.SeparateDatabaseAndState(
database_operations=[],
state_operations=[
migrations.RemoveField(
model_name='coursemode',
name='expiration_datetime',
),
migrations.AddField(
model_name='coursemode',
name='_expiration_datetime',
field=models.DateTimeField(db_column=b'expiration_datetime', default=None, blank=True, help_text='OPTIONAL: After this date/time, users will no longer be able to enroll in this mode. Leave this blank if users can enroll in this mode until enrollment closes for the course.', null=True, verbose_name='Upgrade Deadline'),
),
]
)
]
|
Change broken course_modes migration to not touch the database.
|
Change broken course_modes migration to not touch the database.
Changing a field name in the way that we did (updating the Python
variable name and switching it to point to the old DB column) confuses
Django's migration autodetector. No DB changes are actually necessary,
but it thinks that a field has been removed and a new one added. This
means that Django will ask users to generate the migration it thinks
is necessary, which ended up with us dropping data. The fix is to run
the same migration on Django's internal model of the DB state, but not
actually touch the DB.
|
Python
|
agpl-3.0
|
antoviaque/edx-platform,pepeportela/edx-platform,edx/edx-platform,teltek/edx-platform,ahmedaljazzar/edx-platform,mitocw/edx-platform,edx-solutions/edx-platform,hastexo/edx-platform,IndonesiaX/edx-platform,pepeportela/edx-platform,Lektorium-LLC/edx-platform,mitocw/edx-platform,marcore/edx-platform,romain-li/edx-platform,philanthropy-u/edx-platform,amir-qayyum-khan/edx-platform,angelapper/edx-platform,UOMx/edx-platform,jzoldak/edx-platform,10clouds/edx-platform,defance/edx-platform,antoviaque/edx-platform,Edraak/edraak-platform,JioEducation/edx-platform,10clouds/edx-platform,tanmaykm/edx-platform,Stanford-Online/edx-platform,Lektorium-LLC/edx-platform,raccoongang/edx-platform,pabloborrego93/edx-platform,ESOedX/edx-platform,defance/edx-platform,analyseuc3m/ANALYSE-v1,ahmedaljazzar/edx-platform,edx-solutions/edx-platform,mbareta/edx-platform-ft,cecep-edu/edx-platform,eduNEXT/edunext-platform,stvstnfrd/edx-platform,MakeHer/edx-platform,jjmiranda/edx-platform,teltek/edx-platform,edx-solutions/edx-platform,CourseTalk/edx-platform,franosincic/edx-platform,solashirai/edx-platform,franosincic/edx-platform,CredoReference/edx-platform,CredoReference/edx-platform,caesar2164/edx-platform,pepeportela/edx-platform,kmoocdev2/edx-platform,marcore/edx-platform,naresh21/synergetics-edx-platform,philanthropy-u/edx-platform,solashirai/edx-platform,franosincic/edx-platform,arbrandes/edx-platform,philanthropy-u/edx-platform,gymnasium/edx-platform,analyseuc3m/ANALYSE-v1,ampax/edx-platform,franosincic/edx-platform,MakeHer/edx-platform,louyihua/edx-platform,alu042/edx-platform,waheedahmed/edx-platform,angelapper/edx-platform,lduarte1991/edx-platform,synergeticsedx/deployment-wipro,cpennington/edx-platform,EDUlib/edx-platform,teltek/edx-platform,Endika/edx-platform,philanthropy-u/edx-platform,a-parhom/edx-platform,TeachAtTUM/edx-platform,synergeticsedx/deployment-wipro,jolyonb/edx-platform,kmoocdev2/edx-platform,deepsrijit1105/edx-platform,eduNEXT/edx-platform,shabab12/edx-platform,procangroup/edx-platform,cpennington/edx-platform,wwj718/edx-platform,ESOedX/edx-platform,fintech-circle/edx-platform,lduarte1991/edx-platform,proversity-org/edx-platform,teltek/edx-platform,shabab12/edx-platform,raccoongang/edx-platform,EDUlib/edx-platform,msegado/edx-platform,defance/edx-platform,mitocw/edx-platform,a-parhom/edx-platform,caesar2164/edx-platform,a-parhom/edx-platform,proversity-org/edx-platform,waheedahmed/edx-platform,JioEducation/edx-platform,cpennington/edx-platform,alu042/edx-platform,franosincic/edx-platform,edx/edx-platform,analyseuc3m/ANALYSE-v1,CredoReference/edx-platform,wwj718/edx-platform,doganov/edx-platform,Livit/Livit.Learn.EdX,devs1991/test_edx_docmode,10clouds/edx-platform,solashirai/edx-platform,ESOedX/edx-platform,eduNEXT/edx-platform,gymnasium/edx-platform,defance/edx-platform,prarthitm/edxplatform,cecep-edu/edx-platform,procangroup/edx-platform,tanmaykm/edx-platform,msegado/edx-platform,wwj718/edx-platform,synergeticsedx/deployment-wipro,jolyonb/edx-platform,devs1991/test_edx_docmode,analyseuc3m/ANALYSE-v1,devs1991/test_edx_docmode,chrisndodge/edx-platform,caesar2164/edx-platform,alu042/edx-platform,Endika/edx-platform,waheedahmed/edx-platform,JioEducation/edx-platform,Stanford-Online/edx-platform,edx/edx-platform,Endika/edx-platform,IndonesiaX/edx-platform,mbareta/edx-platform-ft,longmen21/edx-platform,naresh21/synergetics-edx-platform,longmen21/edx-platform,TeachAtTUM/edx-platform,jjmiranda/edx-platform,gsehub/edx-platform,eduNEXT/edunext-platform,EDUlib/edx-platform,jzoldak/edx-platform,prarthitm/edxplatform,itsjeyd/edx-platform,angelapper/edx-platform,jzoldak/edx-platform,chrisndodge/edx-platform,Lektorium-LLC/edx-platform,solashirai/edx-platform,Endika/edx-platform,doganov/edx-platform,UOMx/edx-platform,cecep-edu/edx-platform,naresh21/synergetics-edx-platform,eduNEXT/edx-platform,ampax/edx-platform,shabab12/edx-platform,CourseTalk/edx-platform,ESOedX/edx-platform,BehavioralInsightsTeam/edx-platform,CredoReference/edx-platform,appsembler/edx-platform,prarthitm/edxplatform,fintech-circle/edx-platform,MakeHer/edx-platform,hastexo/edx-platform,procangroup/edx-platform,amir-qayyum-khan/edx-platform,cecep-edu/edx-platform,Livit/Livit.Learn.EdX,miptliot/edx-platform,UOMx/edx-platform,IndonesiaX/edx-platform,devs1991/test_edx_docmode,procangroup/edx-platform,hastexo/edx-platform,pepeportela/edx-platform,miptliot/edx-platform,proversity-org/edx-platform,ahmedaljazzar/edx-platform,wwj718/edx-platform,romain-li/edx-platform,gsehub/edx-platform,synergeticsedx/deployment-wipro,10clouds/edx-platform,amir-qayyum-khan/edx-platform,gsehub/edx-platform,Livit/Livit.Learn.EdX,fintech-circle/edx-platform,BehavioralInsightsTeam/edx-platform,devs1991/test_edx_docmode,IndonesiaX/edx-platform,MakeHer/edx-platform,eduNEXT/edunext-platform,stvstnfrd/edx-platform,Livit/Livit.Learn.EdX,amir-qayyum-khan/edx-platform,angelapper/edx-platform,pabloborrego93/edx-platform,eduNEXT/edx-platform,gsehub/edx-platform,chrisndodge/edx-platform,devs1991/test_edx_docmode,TeachAtTUM/edx-platform,eduNEXT/edunext-platform,itsjeyd/edx-platform,solashirai/edx-platform,TeachAtTUM/edx-platform,a-parhom/edx-platform,jzoldak/edx-platform,antoviaque/edx-platform,lduarte1991/edx-platform,edx/edx-platform,doganov/edx-platform,devs1991/test_edx_docmode,miptliot/edx-platform,longmen21/edx-platform,ampax/edx-platform,edx-solutions/edx-platform,jolyonb/edx-platform,arbrandes/edx-platform,proversity-org/edx-platform,miptliot/edx-platform,marcore/edx-platform,CourseTalk/edx-platform,waheedahmed/edx-platform,BehavioralInsightsTeam/edx-platform,romain-li/edx-platform,ampax/edx-platform,appsembler/edx-platform,tanmaykm/edx-platform,pabloborrego93/edx-platform,longmen21/edx-platform,raccoongang/edx-platform,louyihua/edx-platform,louyihua/edx-platform,doganov/edx-platform,raccoongang/edx-platform,romain-li/edx-platform,arbrandes/edx-platform,kmoocdev2/edx-platform,UOMx/edx-platform,cecep-edu/edx-platform,prarthitm/edxplatform,mitocw/edx-platform,longmen21/edx-platform,romain-li/edx-platform,Stanford-Online/edx-platform,Edraak/edraak-platform,itsjeyd/edx-platform,appsembler/edx-platform,antoviaque/edx-platform,kmoocdev2/edx-platform,alu042/edx-platform,IndonesiaX/edx-platform,pabloborrego93/edx-platform,jolyonb/edx-platform,chrisndodge/edx-platform,kmoocdev2/edx-platform,hastexo/edx-platform,caesar2164/edx-platform,jjmiranda/edx-platform,Lektorium-LLC/edx-platform,CourseTalk/edx-platform,arbrandes/edx-platform,gymnasium/edx-platform,jjmiranda/edx-platform,doganov/edx-platform,cpennington/edx-platform,deepsrijit1105/edx-platform,msegado/edx-platform,appsembler/edx-platform,louyihua/edx-platform,BehavioralInsightsTeam/edx-platform,devs1991/test_edx_docmode,tanmaykm/edx-platform,JioEducation/edx-platform,ahmedaljazzar/edx-platform,itsjeyd/edx-platform,deepsrijit1105/edx-platform,mbareta/edx-platform-ft,deepsrijit1105/edx-platform,wwj718/edx-platform,fintech-circle/edx-platform,waheedahmed/edx-platform,Edraak/edraak-platform,Stanford-Online/edx-platform,Edraak/edraak-platform,stvstnfrd/edx-platform,stvstnfrd/edx-platform,msegado/edx-platform,MakeHer/edx-platform,shabab12/edx-platform,msegado/edx-platform,mbareta/edx-platform-ft,naresh21/synergetics-edx-platform,gymnasium/edx-platform,marcore/edx-platform,lduarte1991/edx-platform,EDUlib/edx-platform
|
faba2bc98f08cddea51d2e0093aa5c2981c8bf15
|
gdrived.py
|
gdrived.py
|
#!/usr/bin/env python
#
# Copyright 2012 Jim Lawton. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import time
import daemon
class GDriveDaemon(daemon.Daemon):
def run(self):
while True:
time.sleep(1)
|
#!/usr/bin/env python
#
# Copyright 2012 Jim Lawton. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import time
import daemon
UPDATE_INTERVAL = 30 # Sync update interval in seconds.
class GDriveDaemon(daemon.Daemon, object):
def __init__(self):
"Class constructor."
# Use pidfile in Gdrive config directory.
pidfile = None
# Use loglevel from GDrive config.
loglevel = None
# Use logfile in GDrive config directory.
stdout = None
super(GDriveDaemon, self).__init__(pidfile, loglevel, stdout)
def run(self):
"Run the daemon."
while True:
time.sleep(UPDATE_INTERVAL)
|
Add update interval constant. Add detail to constructor.
|
Add update interval constant. Add detail to constructor.
|
Python
|
apache-2.0
|
babycaseny/gdrive-linux,jimlawton/gdrive-linux-googlecode,jimlawton/gdrive-linux,jmfield2/gdrive-linux
|
ee3941e2c3a0355314b270c04de6a623f5a0730c
|
plugins/stats.py
|
plugins/stats.py
|
import operator
class Plugin:
def __call__(self, bot):
bot.on_hear(r"(lol|:D|:P)", self.on_hear)
bot.on_respond(r"stats", self.on_respond)
bot.on_help("stats", self.on_help)
def on_hear(self, bot, msg, reply):
stats = bot.storage.get("stats", {})
for word in msg["match"]:
word_stats = stats.get(word, {})
word_stats[msg["sender"]] = word_stats.get(msg["sender"], 0) + 1
stats[word] = word_stats
bot.storage["stats"] = stats
def on_respond(self, bot, msg, reply):
def respond(word, description):
stats = bot.storage.get("stats", {}).get(word, {})
if stats:
person = max(stats.items(), key=operator.itemgetter(1))[0]
reply(description.format(person))
respond("lol", "{0} laughs the most.")
respond(":D", "{0} is the happiest.")
respond(":P", "{0} sticks their tounge out the most.")
def on_help(self, bot, msg, reply):
reply("Syntax: stats")
|
import operator
class Plugin:
def __call__(self, bot):
bot.on_hear(r".*", self.on_hear_anything)
bot.on_hear(r"(lol|:D|:P)", self.on_hear)
bot.on_respond(r"stats", self.on_respond)
bot.on_help("stats", self.on_help)
def on_hear_anything(self, bot, msg, reply):
stats = bot.storage.get("stats", {})
word_stats = stats.get(word, {})
word_stats[""] = word_stats.get("", 0) + 1
stats[word] = word_stats
bot.storage["stats"] = stats
def on_hear(self, bot, msg, reply):
stats = bot.storage.get("stats", {})
for word in msg["match"]:
word_stats = stats.get(word, {})
word_stats[msg["sender"]] = word_stats.get(msg["sender"], 0) + 1
stats[word] = word_stats
break # only allow one word
bot.storage["stats"] = stats
def on_respond(self, bot, msg, reply):
def respond(word, description):
stats = bot.storage.get("stats", {}).get(word, {})
if stats:
person = max(stats.items(), key=operator.itemgetter(1))[0]
reply(description.format(person))
respond("", "{0} is most talkative.")
respond("lol", "{0} laughs the most.")
respond(":D", "{0} is the happiest.")
respond(":P", "{0} sticks their tounge out the most.")
def on_help(self, bot, msg, reply):
reply("Display statistics.")
reply("Syntax: stats")
|
Add statistics about general speaking
|
Add statistics about general speaking
|
Python
|
mit
|
thomasleese/smartbot-old,Cyanogenoid/smartbot,tomleese/smartbot,Muzer/smartbot
|
f2fd7fa693b5be7ae37445fc185611e80aacddf3
|
pebble/PblBuildCommand.py
|
pebble/PblBuildCommand.py
|
import sh, os
from PblCommand import PblCommand
class PblBuildCommand(PblCommand):
name = 'build'
help = 'Build your Pebble project'
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
def run(self, args):
waf_path = os.path.join(os.path.join(self.sdk_path(args), 'Pebble'), 'waf')
print "Path to waf: {}".format(waf_path)
os.system(waf_path + " configure build")
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), os.path.join('..', '..', '..')))
|
import sh, os
from PblCommand import PblCommand
class PblBuildCommand(PblCommand):
name = 'build'
help = 'Build your Pebble project'
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
def run(self, args):
waf_path = os.path.join(os.path.join(self.sdk_path(args), 'Pebble'), 'waf')
print "Path to waf: {}".format(waf_path)
os.system(waf_path + " configure build")
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
Fix bug to actually find the sdk
|
Fix bug to actually find the sdk
|
Python
|
mit
|
pebble/libpebble,pebble/libpebble,pebble/libpebble,pebble/libpebble
|
898028dea2e04d52c32854752bda34d331c7696f
|
ynr/apps/candidatebot/management/commands/candidatebot_import_email_from_csv.py
|
ynr/apps/candidatebot/management/commands/candidatebot_import_email_from_csv.py
|
from __future__ import unicode_literals
import csv
from django.core.management.base import BaseCommand
from candidatebot.helpers import CandidateBot
from popolo.models import Person
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'filename',
help='Path to the file with the email addresses'
)
parser.add_argument(
'--source',
help='Source of the data. The source CSV column takes precedence'
)
def handle(self, **options):
with open(options['filename'], 'r') as fh:
reader = csv.DictReader(fh)
for row in reader:
source = row.get('source', options.get('source'))
if not row['democlub_id']:
continue
if not source:
raise ValueError("A source is required")
try:
bot = CandidateBot(row['democlub_id'])
bot.add_email(row['email'])
bot.save(source)
# print(person)
except Person.DoesNotExist:
print("Person ID {} not found".format(
row['democlub_id']))
# print(row)
|
from __future__ import unicode_literals
import csv
from django.core.management.base import BaseCommand
from candidatebot.helpers import CandidateBot
from popolo.models import Person
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'filename',
help='Path to the file with the email addresses'
)
parser.add_argument(
'--source',
help='Source of the data. The source CSV column takes precedence'
)
def handle(self, **options):
with open(options['filename'], 'r') as fh:
reader = csv.DictReader(fh)
for row in reader:
source = row.get('source', options.get('source'))
if not row['democlub_id']:
continue
if not source:
raise ValueError("A source is required")
try:
bot = CandidateBot(row['democlub_id'])
try:
bot.add_email(row['email'])
bot.save(source)
except ValueError:
#Email exists, move on
pass
except Person.DoesNotExist:
print("Person ID {} not found".format(
row['democlub_id']))
# print(row)
|
Move on if email exists
|
Move on if email exists
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
4616fdefc1c7df8acccdd89ea792fa24ecfa9ca6
|
perf-tests/src/perf-tests.py
|
perf-tests/src/perf-tests.py
|
def main():
pass
if __name__ == "__main__":
# execute only if run as a script
main()
|
import json
import time
import datetime
import subprocess
import os.path
import sys
import queue
import threading
from coreapi import *
from jobsapi import *
import benchmarks
import graph
def check_environment_variable(env_var_name):
print("Checking: {e} environment variable existence".format(
e=env_var_name))
if os.environ.get(env_var_name) is None:
print("Fatal: {e} environment variable has to be specified"
.format(e=env_var_name))
sys.exit(1)
else:
print(" ok")
def check_environment_variables():
environment_variables = [
"F8A_API_URL",
"F8A_JOB_API_URL",
"RECOMMENDER_API_TOKEN",
"JOB_API_TOKEN",
"AWS_ACCESS_KEY_ID",
"AWS_SECRET_ACCESS_KEY",
"S3_REGION_NAME"]
for environment_variable in environment_variables:
check_environment_variable(environment_variable)
def main():
check_environment_variables()
pass
if __name__ == "__main__":
# execute only if run as a script
main()
|
Check environment variables before the tests are started
|
Check environment variables before the tests are started
|
Python
|
apache-2.0
|
tisnik/fabric8-analytics-common,tisnik/fabric8-analytics-common,jpopelka/fabric8-analytics-common,tisnik/fabric8-analytics-common,jpopelka/fabric8-analytics-common,jpopelka/fabric8-analytics-common
|
72f763d9759438abd731585a1b5ef67e62e27181
|
pyethapp/__init__.py
|
pyethapp/__init__.py
|
# -*- coding: utf-8 -*-
# ############# version ##################
from pkg_resources import get_distribution, DistributionNotFound
import os.path
import subprocess
try:
_dist = get_distribution('pyethapp')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'pyethapp')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = None
else:
__version__ = _dist.version
if not __version__:
try:
# try to parse from setup.py
for l in open(os.path.join(__path__[0], '..', 'setup.py')):
if l.startswith("version = '"):
__version__ = l.split("'")[1]
break
except:
pass
finally:
if not __version__:
__version__ = 'undefined'
# add git revision and commit status
try:
rev = subprocess.check_output(['git', 'rev-parse', 'HEAD'])
is_dirty = len(subprocess.check_output(['git', 'diff', '--shortstat']).strip())
__version__ += '-' + rev[:4] + '-dirty' if is_dirty else ''
except:
pass
# ########### endversion ##################
|
# -*- coding: utf-8 -*-
# ############# version ##################
from pkg_resources import get_distribution, DistributionNotFound
import os.path
import subprocess
import re
GIT_DESCRIBE_RE = re.compile('^(?P<version>v\d+\.\d+\.\d+)-(?P<git>\d+-g[a-fA-F0-9]+(?:-dirty)?)$')
__version__ = None
try:
_dist = get_distribution('pyethapp')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'pyethapp')):
# not installed, but there is another version that *is*
raise DistributionNotFound
__version__ = _dist.version
except DistributionNotFound:
pass
if not __version__:
try:
rev = subprocess.check_output(['git', 'describe', '--tags', '--dirty'],
stderr=subprocess.STDOUT)
match = GIT_DESCRIBE_RE.match(rev)
if match:
__version__ = "{}+git-{}".format(match.group("version"), match.group("git"))
except:
pass
if not __version__:
__version__ = 'undefined'
# ########### endversion ##################
|
Use version gathering logic from hydrachain
|
Use version gathering logic from hydrachain
|
Python
|
mit
|
gsalgado/pyethapp,RomanZacharia/pyethapp,ethereum/pyethapp,RomanZacharia/pyethapp,ethereum/pyethapp,changwu-tw/pyethapp,changwu-tw/pyethapp,gsalgado/pyethapp
|
4541605e27c9fef6cc23b245de50867ff22ea6aa
|
erpnext/accounts/doctype/accounting_dimension/test_accounting_dimension.py
|
erpnext/accounts/doctype/accounting_dimension/test_accounting_dimension.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestAccountingDimension(unittest.TestCase):
pass
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from erpnext.accounts.doctype.sales_invoice.test_sales_invoice import create_sales_invoice
from erpnext.accounts.doctype.journal_entry.test_journal_entry import make_journal_entry
class TestAccountingDimension(unittest.TestCase):
def setUp(self):
frappe.set_user("Administrator")
if not frappe.db.exists("Accounting Dimension", {"document_type": "Department"}):
dimension = frappe.get_doc({
"doctype": "Accounting Dimension",
"document_type": "Department",
}).insert()
def test_dimension_against_sales_invoice(self):
si = create_sales_invoice(do_not_save=1)
si.append("items", {
"item_code": "_Test Item",
"warehouse": "_Test Warehouse - _TC",
"qty": 1,
"rate": 100,
"income_account": "Sales - _TC",
"expense_account": "Cost of Goods Sold - _TC",
"cost_center": "_Test Cost Center - _TC",
"department": "_Test Department - _TC"
})
si.save()
si.submit()
gle = frappe.get_doc("GL Entry", {"voucher_no": si.name, "account": "Sales - _TC"})
self.assertEqual(gle.department, "_Test Department - _TC")
def test_dimension_against_journal_entry(self):
je = make_journal_entry("Sales - _TC", "Sales Expenses - _TC", 500, save=False)
je.accounts[0].update({"department": "_Test Department - _TC"})
je.accounts[1].update({"department": "_Test Department - _TC"})
je.save()
je.submit()
gle = frappe.get_doc("GL Entry", {"voucher_no": je.name, "account": "Sales - _TC"})
gle1 = frappe.get_doc("GL Entry", {"voucher_no": je.name, "account": "Sales Expenses - _TC"})
self.assertEqual(gle.department, "_Test Department - _TC")
self.assertEqual(gle1.department, "_Test Department - _TC")
|
Test Case for accounting dimension
|
fix: Test Case for accounting dimension
|
Python
|
agpl-3.0
|
gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext
|
27112881583e53d790e66d31a2bb4d2a996ee405
|
python/sparknlp/functions.py
|
python/sparknlp/functions.py
|
from pyspark.sql.functions import udf
from pyspark.sql.types import *
from pyspark.sql import DataFrame
import sys
import sparknlp
def map_annotations(f, output_type: DataType):
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
output_type
)
def map_annotations_strict(f):
from sparknlp.annotation import Annotation
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
ArrayType(Annotation.dataType())
)
def map_annotations_col(dataframe: DataFrame, f, column, output_column, output_type):
dataframe.withColumn(output_column, map_annotations(f, output_type)(column))
def filter_by_annotations_col(dataframe, f, column):
this_udf = udf(
lambda content: f(content),
BooleanType()
)
return dataframe.filter(this_udf(column))
def explode_annotations_col(dataframe: DataFrame, column, output_column):
from pyspark.sql.functions import explode
return dataframe.withColumn(output_column, explode(column))
|
from pyspark.sql.functions import udf
from pyspark.sql.types import *
from pyspark.sql import DataFrame
from sparknlp.annotation import Annotation
import sys
import sparknlp
def map_annotations(f, output_type: DataType):
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
output_type
)
def map_annotations_strict(f):
sys.modules['sparknlp.annotation'] = sparknlp # Makes Annotation() pickle serializable in top-level
return udf(
lambda content: f(content),
ArrayType(Annotation.dataType())
)
def map_annotations_col(dataframe: DataFrame, f, column, output_column, output_type):
dataframe.withColumn(output_column, map_annotations(f, output_type)(column))
def filter_by_annotations_col(dataframe, f, column):
this_udf = udf(
lambda content: f(content),
BooleanType()
)
return dataframe.filter(this_udf(column))
def explode_annotations_col(dataframe: DataFrame, column, output_column):
from pyspark.sql.functions import explode
return dataframe.withColumn(output_column, explode(column))
|
Move import to top level to avoid import fail after fist time on sys.modules hack
|
Move import to top level to avoid import fail after fist time on sys.modules hack
|
Python
|
apache-2.0
|
JohnSnowLabs/spark-nlp,JohnSnowLabs/spark-nlp,JohnSnowLabs/spark-nlp,JohnSnowLabs/spark-nlp
|
c266f5171e875d8dc3abe924e4b6c9ed2a486422
|
tests/sentry/web/frontend/test_organization_home.py
|
tests/sentry/web/frontend/test_organization_home.py
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationHomePermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationHomePermissionTest, self).setUp()
self.path = reverse('sentry-organization-home', args=[self.organization.slug])
def test_teamless_member_can_load(self):
self.assert_teamless_member_can_access(self.path)
def test_org_member_can_load(self):
self.assert_org_member_can_access(self.path)
class OrganizationHomeTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team = self.create_team(organization=organization)
project = self.create_project(team=team)
path = reverse('sentry-organization-home', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-home.html')
assert resp.context['organization'] == organization
assert resp.context['team_list'] == [(team, [project])]
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationHomePermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationHomePermissionTest, self).setUp()
self.path = reverse('sentry-organization-home', args=[self.organization.slug])
def test_teamless_member_can_load(self):
self.assert_teamless_member_can_access(self.path)
def test_org_member_can_load(self):
self.assert_org_member_can_access(self.path)
def test_non_member_cannot_load(self):
self.assert_non_member_cannot_access(self.path)
class OrganizationHomeTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team = self.create_team(organization=organization)
project = self.create_project(team=team)
path = reverse('sentry-organization-home', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-home.html')
assert resp.context['organization'] == organization
assert resp.context['team_list'] == [(team, [project])]
|
Add test for non-member access
|
Add test for non-member access
|
Python
|
bsd-3-clause
|
drcapulet/sentry,daevaorn/sentry,ifduyue/sentry,hongliang5623/sentry,Natim/sentry,wujuguang/sentry,jokey2k/sentry,korealerts1/sentry,llonchj/sentry,songyi199111/sentry,vperron/sentry,BayanGroup/sentry,BuildingLink/sentry,kevinlondon/sentry,fotinakis/sentry,imankulov/sentry,korealerts1/sentry,boneyao/sentry,jean/sentry,hongliang5623/sentry,kevinastone/sentry,fuziontech/sentry,fotinakis/sentry,korealerts1/sentry,pauloschilling/sentry,ifduyue/sentry,camilonova/sentry,argonemyth/sentry,argonemyth/sentry,daevaorn/sentry,jean/sentry,wujuguang/sentry,fotinakis/sentry,looker/sentry,mvaled/sentry,mitsuhiko/sentry,ngonzalvez/sentry,JTCunning/sentry,kevinastone/sentry,TedaLIEz/sentry,camilonova/sentry,jean/sentry,boneyao/sentry,fotinakis/sentry,wong2/sentry,Kryz/sentry,llonchj/sentry,Kryz/sentry,jokey2k/sentry,drcapulet/sentry,songyi199111/sentry,JackDanger/sentry,mvaled/sentry,kevinlondon/sentry,zenefits/sentry,ngonzalvez/sentry,kevinastone/sentry,wong2/sentry,alexm92/sentry,ifduyue/sentry,BayanGroup/sentry,hongliang5623/sentry,JTCunning/sentry,ifduyue/sentry,mitsuhiko/sentry,fuziontech/sentry,beeftornado/sentry,mvaled/sentry,daevaorn/sentry,1tush/sentry,nicholasserra/sentry,JackDanger/sentry,1tush/sentry,zenefits/sentry,Natim/sentry,ewdurbin/sentry,beeftornado/sentry,nicholasserra/sentry,looker/sentry,looker/sentry,wujuguang/sentry,daevaorn/sentry,ngonzalvez/sentry,BuildingLink/sentry,zenefits/sentry,camilonova/sentry,gg7/sentry,felixbuenemann/sentry,argonemyth/sentry,imankulov/sentry,ewdurbin/sentry,JamesMura/sentry,BuildingLink/sentry,pauloschilling/sentry,gencer/sentry,boneyao/sentry,nicholasserra/sentry,ifduyue/sentry,gg7/sentry,JamesMura/sentry,llonchj/sentry,TedaLIEz/sentry,mvaled/sentry,BuildingLink/sentry,drcapulet/sentry,BuildingLink/sentry,alexm92/sentry,JackDanger/sentry,BayanGroup/sentry,zenefits/sentry,gg7/sentry,songyi199111/sentry,mvaled/sentry,vperron/sentry,looker/sentry,gencer/sentry,kevinlondon/sentry,mvaled/sentry,alexm92/sentry,gencer/sentry,looker/sentry,ewdurbin/sentry,vperron/sentry,1tush/sentry,pauloschilling/sentry,beeftornado/sentry,Kryz/sentry,Natim/sentry,gencer/sentry,felixbuenemann/sentry,JamesMura/sentry,jean/sentry,JTCunning/sentry,JamesMura/sentry,imankulov/sentry,wong2/sentry,JamesMura/sentry,TedaLIEz/sentry,gencer/sentry,jokey2k/sentry,felixbuenemann/sentry,fuziontech/sentry,zenefits/sentry,jean/sentry
|
5e2aae6070d60f2149c49e1137ab2a99f3966b3a
|
python/volumeBars.py
|
python/volumeBars.py
|
#!/usr/bin/env python
from rgbmatrix import RGBMatrix
from random import randint
import numpy
import math
import time
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
height = ledMatrix.height
width = ledMatrix.width
barWidth = width / 4
pi = numpy.pi
barHeights = numpy.array([0, pi / 4, pi / 2, pi * 3 / 4])
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
heights = numpy.sin(barHeights)
barHeights += pi / 4
for x in range(width):
barHeight = int(heights[int(x / barWidth)] * height)
for y in range(height):
if height - y <= barHeight:
nextFrame.SetPixel(x, y, randint(0, 255), randint(0, 255), randint(0, 255))
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(0.2)
|
#!/usr/bin/env python
from rgbmatrix import RGBMatrix
from random import randint
import numpy
import math
import time
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
height = ledMatrix.height
width = ledMatrix.width
barWidth = width / 4
pi = numpy.pi
barHeights = numpy.array([0, pi / 4, pi / 2, pi * 3 / 4])
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
heights = numpy.sin(barHeights)
barHeights += pi / 4
for x in range(width):
barHeight = int(heights[int(x / barWidth)] * height)
for y in range(height):
if height - y <= barHeight:
if y > 14
nextFrame.SetPixel(x, y, 255, 0, 0)
else if y > 10
nextFrame.SetPixel(x, y, 200, 200, 0)
else
nextFrame.SetPixel(x, y, 0, 200, 0)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(0.2)
|
Add specific colors for heights
|
Add specific colors for heights
|
Python
|
mit
|
DarkAce65/rpi-led-matrix,DarkAce65/rpi-led-matrix
|
214d5f7e09e9b5e854e7471c6dc337456f428647
|
quickavro/_compat.py
|
quickavro/_compat.py
|
# -*- coding: utf-8 -*-
import sys
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
default_encoding = "UTF-8"
def with_metaclass(meta, *bases):
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
if PY3:
basestring = (str, bytes)
def ensure_bytes(s):
if type(s) == str:
return bytes(s, default_encoding)
else:
return bytes(s)
def ensure_str(s):
if type(s) == bytes:
return s.decode(default_encoding)
else:
return s
else:
range = xrange
ensure_bytes = lambda s: s
|
# -*- coding: utf-8 -*-
import sys
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
default_encoding = "UTF-8"
def with_metaclass(meta, *bases):
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
if PY3:
basestring = (str, bytes)
def ensure_bytes(s):
if type(s) == str:
return bytes(s, default_encoding)
else:
return bytes(s)
def ensure_str(s):
if type(s) == bytes:
return s.decode(default_encoding)
else:
return s
else:
range = xrange
ensure_bytes = lambda s: s
ensure_str = lambda s: s
|
Add missing ensure_str for PY2
|
Add missing ensure_str for PY2
|
Python
|
apache-2.0
|
ChrisRx/quickavro,ChrisRx/quickavro
|
a88eb2c7fc2c2d875836f0a4c201ede0c082aceb
|
selectable/tests/__init__.py
|
selectable/tests/__init__.py
|
from django.db import models
from ..base import ModelLookup
from ..registry import registry
class Thing(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=100)
def __unicode__(self):
return self.name
def __str__(self):
return self.name
class OtherThing(models.Model):
name = models.CharField(max_length=100)
thing = models.ForeignKey(Thing)
def __unicode__(self):
return self.name
def __str__(self):
return self.name
class ManyThing(models.Model):
name = models.CharField(max_length=100)
things = models.ManyToManyField(Thing)
def __unicode__(self):
return self.name
def __str__(self):
return self.name
class ThingLookup(ModelLookup):
model = Thing
search_fields = ('name__icontains', )
registry.register(ThingLookup)
from .test_base import *
from .test_decorators import *
from .test_fields import *
from .test_functional import *
from .test_forms import *
from .test_templatetags import *
from .test_views import *
from .test_widgets import *
|
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from ..base import ModelLookup
from ..registry import registry
@python_2_unicode_compatible
class Thing(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=100)
def __str__(self):
return self.name
@python_2_unicode_compatible
class OtherThing(models.Model):
name = models.CharField(max_length=100)
thing = models.ForeignKey(Thing)
def __str__(self):
return self.name
@python_2_unicode_compatible
class ManyThing(models.Model):
name = models.CharField(max_length=100)
things = models.ManyToManyField(Thing)
def __str__(self):
return self.name
class ThingLookup(ModelLookup):
model = Thing
search_fields = ('name__icontains', )
registry.register(ThingLookup)
from .test_base import *
from .test_decorators import *
from .test_fields import *
from .test_functional import *
from .test_forms import *
from .test_templatetags import *
from .test_views import *
from .test_widgets import *
|
Update the test model definitions.
|
Update the test model definitions.
|
Python
|
bsd-2-clause
|
affan2/django-selectable,affan2/django-selectable,mlavin/django-selectable,mlavin/django-selectable,affan2/django-selectable,mlavin/django-selectable
|
c3b1f8c97f89e5b9e8b8e74992631bac33bdde5f
|
tests/test_read_user_choice.py
|
tests/test_read_user_choice.py
|
# -*- coding: utf-8 -*-
import click
import pytest
from cookiecutter.compat import read_user_choice
OPTIONS = ['hello', 'world', 'foo', 'bar']
EXPECTED_PROMPT = """Select varname:
1 - hello
2 - world
3 - foo
4 - bar
Choose from 1, 2, 3, 4!"""
@pytest.mark.parametrize('user_choice, expected_value', enumerate(OPTIONS, 1))
def test_click_invocation(mocker, user_choice, expected_value):
choice = mocker.patch('click.Choice')
choice.return_value = click.Choice(OPTIONS)
prompt = mocker.patch('click.prompt')
prompt.return_value = str(user_choice)
assert read_user_choice('varname', OPTIONS) == expected_value
prompt.assert_called_once_with(
EXPECTED_PROMPT,
type=click.Choice(OPTIONS),
default='1'
)
|
# -*- coding: utf-8 -*-
import click
import pytest
from cookiecutter.compat import read_user_choice
OPTIONS = ['hello', 'world', 'foo', 'bar']
EXPECTED_PROMPT = """Select varname:
1 - hello
2 - world
3 - foo
4 - bar
Choose from 1, 2, 3, 4!"""
@pytest.mark.parametrize('user_choice, expected_value', enumerate(OPTIONS, 1))
def test_click_invocation(mocker, user_choice, expected_value):
choice = mocker.patch('click.Choice')
choice.return_value = click.Choice(OPTIONS)
prompt = mocker.patch('click.prompt')
prompt.return_value = str(user_choice)
assert read_user_choice('varname', OPTIONS) == expected_value
prompt.assert_called_once_with(
EXPECTED_PROMPT,
type=click.Choice(OPTIONS),
default='1'
)
@pytest.fixture(params=[1, True, False, None, [], {}])
def invalid_options(request):
return ['foo', 'bar', request.param]
def test_raise_on_non_str_options(invalid_options):
with pytest.raises(TypeError):
read_user_choice('foo', invalid_options)
|
Implement a test if read_user_choice raises on invalid options
|
Implement a test if read_user_choice raises on invalid options
|
Python
|
bsd-3-clause
|
lucius-feng/cookiecutter,foodszhang/cookiecutter,tylerdave/cookiecutter,atlassian/cookiecutter,kkujawinski/cookiecutter,Vauxoo/cookiecutter,sp1rs/cookiecutter,pjbull/cookiecutter,dajose/cookiecutter,benthomasson/cookiecutter,luzfcb/cookiecutter,stevepiercy/cookiecutter,luzfcb/cookiecutter,foodszhang/cookiecutter,vintasoftware/cookiecutter,nhomar/cookiecutter,dajose/cookiecutter,terryjbates/cookiecutter,lgp171188/cookiecutter,audreyr/cookiecutter,atlassian/cookiecutter,willingc/cookiecutter,christabor/cookiecutter,vintasoftware/cookiecutter,nhomar/cookiecutter,venumech/cookiecutter,sp1rs/cookiecutter,michaeljoseph/cookiecutter,takeflight/cookiecutter,willingc/cookiecutter,lucius-feng/cookiecutter,stevepiercy/cookiecutter,audreyr/cookiecutter,christabor/cookiecutter,ramiroluz/cookiecutter,hackebrot/cookiecutter,lgp171188/cookiecutter,takeflight/cookiecutter,cguardia/cookiecutter,kkujawinski/cookiecutter,tylerdave/cookiecutter,benthomasson/cookiecutter,Springerle/cookiecutter,ramiroluz/cookiecutter,michaeljoseph/cookiecutter,terryjbates/cookiecutter,janusnic/cookiecutter,pjbull/cookiecutter,venumech/cookiecutter,agconti/cookiecutter,drgarcia1986/cookiecutter,agconti/cookiecutter,janusnic/cookiecutter,ionelmc/cookiecutter,moi65/cookiecutter,drgarcia1986/cookiecutter,hackebrot/cookiecutter,moi65/cookiecutter,ionelmc/cookiecutter,Vauxoo/cookiecutter,cguardia/cookiecutter,Springerle/cookiecutter
|
47e79b3a01ca4541d79412cdab856f84871e68f8
|
templates/vnc_api_lib_ini_template.py
|
templates/vnc_api_lib_ini_template.py
|
import string
template = string.Template("""
[global]
;WEB_SERVER = 127.0.0.1
;WEB_PORT = 9696 ; connection through quantum plugin
WEB_SERVER = 127.0.0.1
WEB_PORT = 8082 ; connection to api-server directly
BASE_URL = /
;BASE_URL = /tenants/infra ; common-prefix for all URLs
; Authentication settings (optional)
[auth]
AUTHN_TYPE = keystone
AUTHN_SERVER=$__contrail_openstack_ip__
AUTHN_PORT = 35357
AUTHN_URL = /v2.0/tokens
""")
|
import string
template = string.Template("""
[global]
;WEB_SERVER = 127.0.0.1
;WEB_PORT = 9696 ; connection through quantum plugin
WEB_SERVER = 127.0.0.1
WEB_PORT = 8082 ; connection to api-server directly
BASE_URL = /
;BASE_URL = /tenants/infra ; common-prefix for all URLs
; Authentication settings (optional)
[auth]
AUTHN_TYPE = keystone
AUTHN_PROTOCOL = http
AUTHN_SERVER=$__contrail_openstack_ip__
AUTHN_PORT = 35357
AUTHN_URL = /v2.0/tokens
""")
|
Add auth protocol for keystone connection in vnc_api
|
Add auth protocol for keystone connection in vnc_api
|
Python
|
apache-2.0
|
Juniper/contrail-provisioning,Juniper/contrail-provisioning
|
e8b50a70fc7de1842ebe8bc796736459bf154432
|
dyconnmap/cluster/__init__.py
|
dyconnmap/cluster/__init__.py
|
# -*- coding: utf-8 -*-
"""
"""
# Author: Avraam Marimpis <avraam.marimpis@gmail.com>
from .ng import NeuralGas
from .mng import MergeNeuralGas
from .rng import RelationalNeuralGas
from .gng import GrowingNeuralGas
from .som import SOM
from .umatrix import umatrix
__all__ = [
"NeuralGas",
"MergeNeuralGas",
"RelationalNeuralGas",
"GrowingNeuralGas",
"SOM",
"umatrix",
]
|
# -*- coding: utf-8 -*-
"""
"""
# Author: Avraam Marimpis <avraam.marimpis@gmail.com>
from .ng import NeuralGas
from .mng import MergeNeuralGas
from .rng import RelationalNeuralGas
from .gng import GrowingNeuralGas
from .som import SOM
from .umatrix import umatrix
from .validity import ray_turi, davies_bouldin
__all__ = [
"NeuralGas",
"MergeNeuralGas",
"RelationalNeuralGas",
"GrowingNeuralGas",
"SOM",
"umatrix",
"ray_turi",
"davies_bouldin",
]
|
Add the new cluster validity methods in the module.
|
Add the new cluster validity methods in the module.
|
Python
|
bsd-3-clause
|
makism/dyfunconn
|
3d38848287b168cfbe3c9fe5297e7f322027634d
|
tests/test_parsePoaXml_test.py
|
tests/test_parsePoaXml_test.py
|
import unittest
import os
import re
os.sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import parsePoaXml
import generatePoaXml
# Import test settings last in order to override the regular settings
import poa_test_settings as settings
def override_settings():
# For now need to override settings to use test data
generatePoaXml.settings = settings
def create_test_directories():
try:
os.mkdir(settings.TEST_TEMP_DIR)
except OSError:
pass
try:
os.mkdir(settings.TARGET_OUTPUT_DIR)
except OSError:
pass
class TestParsePoaXml(unittest.TestCase):
def setUp(self):
override_settings()
create_test_directories()
self.passes = []
self.passes.append('elife-02935-v2.xml')
self.passes.append('elife-04637-v2.xml')
self.passes.append('elife-15743-v1.xml')
self.passes.append('elife-02043-v2.xml')
def test_parse(self):
for xml_file_name in self.passes:
file_path = settings.XLS_PATH + xml_file_name
articles = parsePoaXml.build_articles_from_article_xmls([file_path])
self.assertEqual(len(articles), 1)
if __name__ == '__main__':
unittest.main()
|
import unittest
import os
import re
os.sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import parsePoaXml
import generatePoaXml
# Import test settings last in order to override the regular settings
import poa_test_settings as settings
class TestParsePoaXml(unittest.TestCase):
def setUp(self):
self.passes = []
self.passes.append('elife-02935-v2.xml')
self.passes.append('elife-04637-v2.xml')
self.passes.append('elife-15743-v1.xml')
self.passes.append('elife-02043-v2.xml')
def test_parse(self):
for xml_file_name in self.passes:
file_path = settings.XLS_PATH + xml_file_name
articles = parsePoaXml.build_articles_from_article_xmls([file_path])
self.assertEqual(len(articles), 1)
if __name__ == '__main__':
unittest.main()
|
Delete excess code in the latest test scenario.
|
Delete excess code in the latest test scenario.
|
Python
|
mit
|
gnott/elife-poa-xml-generation,gnott/elife-poa-xml-generation
|
ddad1cf5c4b90ad7997fee72ecd4949dafa43315
|
custom/enikshay/model_migration_sets/private_nikshay_notifications.py
|
custom/enikshay/model_migration_sets/private_nikshay_notifications.py
|
from casexml.apps.case.util import get_datetime_case_property_changed
from custom.enikshay.const import (
ENROLLED_IN_PRIVATE,
REAL_DATASET_PROPERTY_VALUE,
)
class PrivateNikshayNotifiedDateSetter(object):
"""Sets the date_private_nikshay_notification property for use in reports
"""
def __init__(self, domain, person, episode):
self.domain = domain
self.person = person
self.episode = episode
def update_json(self):
if not self.should_update:
return {}
registered_datetime = get_datetime_case_property_changed(
self.episode, 'private_nikshay_registered', 'true',
)
if registered_datetime is not None:
return {
'date_private_nikshay_notification': str(registered_datetime.date())
}
else:
return {}
@property
def should_update(self):
if self.episode.get_case_property('date_private_nikshay_notification') is not None:
return False
if self.episode.get_case_property('private_nikshay_registered') != 'true':
return False
if self.episode.get_case_property(ENROLLED_IN_PRIVATE) != 'true':
return False
if self.person.get_case_property('dataset') != REAL_DATASET_PROPERTY_VALUE:
return False
return True
|
from casexml.apps.case.util import get_datetime_case_property_changed
from custom.enikshay.const import ENROLLED_IN_PRIVATE
class PrivateNikshayNotifiedDateSetter(object):
"""Sets the date_private_nikshay_notification property for use in reports
"""
def __init__(self, domain, person, episode):
self.domain = domain
self.person = person
self.episode = episode
def update_json(self):
if not self.should_update:
return {}
registered_datetime = get_datetime_case_property_changed(
self.episode, 'private_nikshay_registered', 'true',
)
if registered_datetime is not None:
return {
'date_private_nikshay_notification': str(registered_datetime.date())
}
else:
return {}
@property
def should_update(self):
if self.episode.get_case_property('date_private_nikshay_notification') is not None:
return False
if self.episode.get_case_property('private_nikshay_registered') != 'true':
return False
if self.episode.get_case_property(ENROLLED_IN_PRIVATE) != 'true':
return False
return True
|
Remove redundant case property check
|
Remove redundant case property check
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
b3224d83dd1fea7a4b50f93c775a824d82aec806
|
scan/commands/include.py
|
scan/commands/include.py
|
'''
Created on Mar 8,2015
@author: qiuyx
'''
from scan.commands.command import Command
import xml.etree.ElementTree as ET
class Include(Command):
'''
classdocs
'''
def __init__(self, scanFile=None, macros=None,errHandler=None):
'''
@param scanFile: The included scan file path located at /scan/example
Defaults as None.
@param macros:
Usage::
>>>icl=Include(scanFile='PrepMotor.scn',macros='macro=value')
'''
self.__scanFile=scanFile
self.__macros=macros
self.__errHandler=errHandler
def genXML(self):
xml = ET.Element('include')
ET.SubElement(xml, 'scan_file').text = self.__scanFile
if self.__macros:
ET.SubElement(xml, 'macros').text = self.__macros
if self.__errHandler:
ET.SubElement(xml,'error_handler').text = self.__errHandler
return xml
def __repr__(self):
return self.toCmdString()
def toCmdString(self):
result = "Include('%s'" % self.__scanFile
if self.__macros:
result += ", macros='%s'" % self.__macros
if self.__errHandler:
result += ", errHandler='%s'" % self.__errHandler
result += ")"
return result
|
'''
Created on Mar 8,2015
@author: qiuyx
'''
from scan.commands.command import Command
import xml.etree.ElementTree as ET
class Include(Command):
'''
classdocs
'''
def __init__(self, scan, macros=None, errhandler=None):
'''
@param scan: Name of included scan file, must be on the server's list of script_paths
@param macros: "name=value, other=42"
Usage::
>>>icl=Include(scanFile='PrepMotor.scn', macros='macro=value')
'''
self.__scanFile = scan
self.__macros = macros
self.__errHandler = errhandler
def genXML(self):
xml = ET.Element('include')
ET.SubElement(xml, 'scan_file').text = self.__scanFile
if self.__macros:
ET.SubElement(xml, 'macros').text = self.__macros
if self.__errHandler:
ET.SubElement(xml,'error_handler').text = self.__errHandler
return xml
def __repr__(self):
return self.toCmdString()
def toCmdString(self):
result = "Include('%s'" % self.__scanFile
if self.__macros:
result += ", macros='%s'" % self.__macros
if self.__errHandler:
result += ", errHandler='%s'" % self.__errHandler
result += ")"
return result
|
Include command: All arguments lowercase. Require file, default to empty macros
|
Include command: All arguments lowercase. Require file, default to empty
macros
|
Python
|
epl-1.0
|
PythonScanClient/PyScanClient,PythonScanClient/PyScanClient
|
b47ecb3464585d762c17694190286388c25dbaf8
|
examples/convert_units.py
|
examples/convert_units.py
|
# -*- coding: utf-8 -*-
from chatterbot import ChatBot
bot = ChatBot(
"Unit Converter",
logic_adapters=[
"chatterbot.logic.UnitConversion",
],
input_adapter="chatterbot.input.VariableInputTypeAdapter",
output_adapter="chatterbot.output.OutputAdapter"
)
questions = ['How many meters are in a kilometer?', 'How many meters are in one inch?', '0 celsius to fahrenheit', 'one hour is how many minutes ?']
# Prints the convertion given the specific question
for q in questions:
response = bot.get_response(q)
print(q + " - Response: " + response.text)
|
# -*- coding: utf-8 -*-
from chatterbot import ChatBot
bot = ChatBot(
"Unit Converter",
logic_adapters=[
"chatterbot.logic.UnitConversion",
],
input_adapter="chatterbot.input.VariableInputTypeAdapter",
output_adapter="chatterbot.output.OutputAdapter"
)
questions = ['How many meters are in a kilometer?',
'How many meters are in one inch?',
'0 celsius to fahrenheit',
'one hour is how many minutes ?']
# Prints the convertion given the specific question
for q in questions:
response = bot.get_response(q)
print(q + " - Response: " + response.text)
|
Break list declaration in multiple lines
|
Break list declaration in multiple lines
|
Python
|
bsd-3-clause
|
gunthercox/ChatterBot,vkosuri/ChatterBot
|
d38617dd6bf5c7b0f17245dd5a5e95a335ac6626
|
tracpro/orgs_ext/middleware.py
|
tracpro/orgs_ext/middleware.py
|
from django.utils.translation import ugettext_lazy as _
from django.http import HttpResponseBadRequest
from temba_client.base import TembaAPIError
class HandleTembaAPIError(object):
""" Catch all Temba exception errors """
def process_exception(self, request, exception):
if isinstance(exception, TembaAPIError):
return HttpResponseBadRequest(
_("Org does not have a valid API Key. " +
"Please edit the org through Site Manage or contact your administrator."))
pass
|
from django.utils.translation import ugettext_lazy as _
from django.http import HttpResponseBadRequest
from temba_client.base import TembaAPIError, TembaConnectionError
class HandleTembaAPIError(object):
""" Catch all Temba exception errors """
def process_exception(self, request, exception):
rapidProConnectionErrorString = _(
"RapidPro appears to be down right now or we cannot connect due to your internet connection. "
"Please try again later.")
if isinstance(exception, TembaAPIError):
rapidpro_connection_error_codes = ["502", "503", "504"]
if any(code in exception.caused_by.message for code in rapidpro_connection_error_codes):
return HttpResponseBadRequest(
rapidProConnectionErrorString)
else:
return HttpResponseBadRequest(
_("Org does not have a valid API Key. "
"Please edit the org through Site Manage or contact your administrator."))
elif isinstance(exception, TembaConnectionError):
return HttpResponseBadRequest(
rapidProConnectionErrorString)
pass
|
Check forerror codes from temba connection issues
|
Check forerror codes from temba connection issues
|
Python
|
bsd-3-clause
|
xkmato/tracpro,rapidpro/tracpro,rapidpro/tracpro,xkmato/tracpro,xkmato/tracpro,rapidpro/tracpro,xkmato/tracpro
|
be19869d76bb655990464094ad17617b7b48ab3b
|
server/create_tiff.py
|
server/create_tiff.py
|
# A romanesco script to convert a slide to a TIFF using vips.
import subprocess
import os
out_path = os.path.join(_tempdir, out_filename)
convert_command = (
'vips',
'tiffsave',
'"%s"' % in_path,
'"%s"' % out_path,
'--compression', 'jpeg',
'--Q', '90',
'--tile',
'--tile-width', '256',
'--tile-height', '256',
'--pyramid',
'--bigtiff'
)
proc = subprocess.Popen(convert_command)
proc.wait()
if proc.returncode:
raise Exception('VIPS process failed (rc=%d).' % proc.returncode)
|
# A romanesco script to convert a slide to a TIFF using vips.
import subprocess
import os
out_path = os.path.join(_tempdir, out_filename)
convert_command = (
'vips',
'tiffsave',
'"%s"' % in_path,
'"%s"' % out_path,
'--compression', 'jpeg',
'--Q', '90',
'--tile',
'--tile-width', '256',
'--tile-height', '256',
'--pyramid',
'--bigtiff'
)
proc = subprocess.Popen(convert_command)
proc.wait()
if proc.returncode:
raise Exception('VIPS command failed (rc=%d): %s' % (
proc.returncode, ' '.join(convert_command)))
|
Print command in failure case
|
Print command in failure case
|
Python
|
apache-2.0
|
DigitalSlideArchive/large_image,DigitalSlideArchive/large_image,girder/large_image,DigitalSlideArchive/large_image,girder/large_image,girder/large_image
|
e5acbfc176de3b531528c8b15f57e5d3feab3ad1
|
melody/constraints/abstract_constraint.py
|
melody/constraints/abstract_constraint.py
|
"""
File: abstract_constraint.py
Purpose: Define a constraint, in an abstract sense, related to a number of actors.
"""
from abc import ABCMeta, abstractmethod
class AbstractConstraint(object):
"""
Class that represents a constraint, a set of actors that define a constraint amongst themselves.
ParameterMap: A map from template note to contextual note..
"""
__metaclass__ = ABCMeta
def __init__(self, actors):
self.__actors = list(actors)
@property
def actors(self):
return list(self.__actors)
@abstractmethod
def clone(self, new_actors=None):
"""
Clone the constraint.
:return:
"""
@abstractmethod
def verify(self, solution_context):
"""
Verify that the actor map parameters are consistent with constraint.
:params solution_context: aka pmap, map of actors to ContextualNotes.
:return: Boolean if verification holds.
May throw Exception dependent on implementation.
"""
@abstractmethod
def values(self, solution_context, v_note):
"""
Method to generate all possible note values for actor v_note's target.
The method returns a set of values for v_note.
:param solution_context: includes parameter map.
:param v_note: source actor, whose target values we are computing.
:return: The set of all possible values for v_note's target.
Note: The return value is a set!
"""
|
"""
File: abstract_constraint.py
Purpose: Define a constraint, in an abstract sense, related to a number of actors.
"""
from abc import ABCMeta, abstractmethod
class AbstractConstraint(object):
"""
Class that represents a constraint, a set of actors that define a constraint amongst themselves.
ParameterMap: A map from template note to contextual note..
"""
__metaclass__ = ABCMeta
def __init__(self, actors):
self.__actors = list(actors)
@property
def actors(self):
return list(self.__actors)
@abstractmethod
def clone(self, new_actors=None):
"""
Clone the constraint.
:return:
"""
@abstractmethod
def verify(self, solution_context):
"""
Verify that the actor map parameters are consistent with constraint.
:params solution_context: aka pmap, map of actors to ContextualNotes.
:return: Boolean if verification holds.
May throw Exception dependent on implementation.
"""
@abstractmethod
def values(self, solution_context, v_note):
"""
Method to generate all possible note values for actor v_note's target.
The method returns a set of values for v_note.
:param solution_context: includes parameter map.
:param v_note: source actor, whose target values we are computing.
:return: The set of all possible values for v_note's target.
Note: The return value is a set!
"""
def __hash__(self):
return hash(len(self.actors))
def __eq__(self, other):
if not isinstance(other, AbstractConstraint):
return NotImplemented
return self is other
|
Add hash and eq methods
|
Add hash and eq methods
|
Python
|
mit
|
dpazel/music_rep
|
8c2ad666266d4dbe8310007bd82dc907a288ee5a
|
databroker/__init__.py
|
databroker/__init__.py
|
# Import intake to run driver discovery first and avoid circular import issues.
import intake
del intake
import warnings
import logging
logger = logging.getLogger(__name__)
from .v1 import Broker, Header, ALL, temp, temp_config
from .utils import (lookup_config, list_configs, describe_configs,
wrap_in_doct, DeprecatedDoct, wrap_in_deprecated_doct)
from .discovery import MergedCatalog, EntrypointsCatalog, V0Catalog
# A catalog created from discovered entrypoints and v0 catalogs.
catalog = MergedCatalog([EntrypointsCatalog(), V0Catalog()])
# set version string using versioneer
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
### Legacy imports ###
try:
from .databroker import DataBroker
except ImportError:
pass
else:
from .databroker import (DataBroker, DataBroker as db,
get_events, get_table, stream, get_fields,
restream, process)
from .pims_readers import get_images
|
# Import intake to run driver discovery first and avoid circular import issues.
import intake
import warnings
import logging
logger = logging.getLogger(__name__)
from .v1 import Broker, Header, ALL, temp, temp_config
from .utils import (lookup_config, list_configs, describe_configs,
wrap_in_doct, DeprecatedDoct, wrap_in_deprecated_doct)
from .discovery import MergedCatalog, EntrypointsCatalog, V0Catalog
# A catalog created from discovered entrypoints, v0, and intake YAML catalogs.
catalog = MergedCatalog([EntrypointsCatalog(), V0Catalog(), intake.cat])
# set version string using versioneer
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
### Legacy imports ###
try:
from .databroker import DataBroker
except ImportError:
pass
else:
from .databroker import (DataBroker, DataBroker as db,
get_events, get_table, stream, get_fields,
restream, process)
from .pims_readers import get_images
|
Include intake.cat. YAML is easier than entry_points.
|
Include intake.cat. YAML is easier than entry_points.
|
Python
|
bsd-3-clause
|
ericdill/databroker,ericdill/databroker
|
0624417fbac1cf23316ee0a58ae41c0519a390c4
|
go/apps/surveys/definition.py
|
go/apps/surveys/definition.py
|
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
from go.apps.surveys.tasks import export_vxpolls_data
class SendSurveyAction(ConversationAction):
action_name = 'send_survey'
action_display_name = 'Send Survey'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_survey', batch_id=self._conv.batch.key,
msg_options={}, delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
def perform_action(self, action_data):
return export_vxpolls_data.delay(self._conv.user_account.key,
self._conv.key)
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'surveys'
actions = (
SendSurveyAction,
DownloadUserDataAction,
)
|
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
from go.apps.surveys.tasks import export_vxpolls_data
class SendSurveyAction(ConversationAction):
action_name = 'send_survey'
action_display_name = 'Send Survey'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_survey', batch_id=self._conv.batch.key,
msg_options={}, delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
action_display_verb = 'Send CSV via e-mail'
def perform_action(self, action_data):
return export_vxpolls_data.delay(self._conv.user_account.key,
self._conv.key)
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'surveys'
actions = (
SendSurveyAction,
DownloadUserDataAction,
)
|
Change download survey data display verb to 'Send CSV via e-mail'.
|
Change download survey data display verb to 'Send CSV via e-mail'.
|
Python
|
bsd-3-clause
|
praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go
|
7a3c4eed8888c8c2befc94020bebbfc18e1d6156
|
src/redis_client.py
|
src/redis_client.py
|
import txredisapi as redis
from twisted.internet import defer, reactor
import config
connection = None
@defer.inlineCallbacks
def run_redis_client(on_started):
pony = yield redis.makeConnection(config.redis['host'],
config.redis['port'],
config.redis['db'],
poolsize = 8,
reconnect = True,
isLazy = True)
global connection
connection = pony
on_started()
|
import txredisapi as redis
from twisted.internet import defer, reactor
import config
connection = None
def run_redis_client(on_started):
df = redis.makeConnection(config.redis['host'],
config.redis['port'],
config.redis['db'],
poolsize = 8,
reconnect = True,
isLazy = False)
def done(pony):
global connection
connection = pony
on_started()
df.addCallback(done)
|
Fix the Redis client connection to actually work
|
Fix the Redis client connection to actually work
It previously lied.
|
Python
|
mit
|
prophile/compd,prophile/compd
|
fdee90e6fd4669222c2da0530d9bc90131b5145e
|
djoser/social/views.py
|
djoser/social/views.py
|
from rest_framework import generics, permissions, status
from rest_framework.response import Response
from social_django.utils import load_backend, load_strategy
from djoser.conf import settings
from djoser.social.serializers import ProviderAuthSerializer
class ProviderAuthView(generics.CreateAPIView):
permission_classes = [permissions.AllowAny]
serializer_class = ProviderAuthSerializer
def get(self, request, *args, **kwargs):
redirect_uri = request.GET.get("redirect_uri")
if redirect_uri not in settings.SOCIAL_AUTH_ALLOWED_REDIRECT_URIS:
return Response(status=status.HTTP_400_BAD_REQUEST)
strategy = load_strategy(request)
strategy.session_set("redirect_uri", redirect_uri)
backend_name = self.kwargs["provider"]
backend = load_backend(strategy, backend_name, redirect_uri=redirect_uri)
authorization_url = backend.auth_url()
return Response(data={"authorization_url": authorization_url})
|
from rest_framework import generics, permissions, status
from rest_framework.response import Response
from social_django.utils import load_backend, load_strategy
from djoser.conf import settings
from djoser.social.serializers import ProviderAuthSerializer
class ProviderAuthView(generics.CreateAPIView):
permission_classes = [permissions.AllowAny]
serializer_class = ProviderAuthSerializer
def get(self, request, *args, **kwargs):
redirect_uri = request.GET.get("redirect_uri")
if redirect_uri not in settings.SOCIAL_AUTH_ALLOWED_REDIRECT_URIS:
return Response("Missing SOCIAL_AUTH_ALLOWED_REDIRECT_URIS", status=status.HTTP_400_BAD_REQUEST)
strategy = load_strategy(request)
strategy.session_set("redirect_uri", redirect_uri)
backend_name = self.kwargs["provider"]
backend = load_backend(strategy, backend_name, redirect_uri=redirect_uri)
authorization_url = backend.auth_url()
return Response(data={"authorization_url": authorization_url})
|
Fix for Friendly tips when Missing SOCIAL_AUTH_ALLOWED_REDIRECT_URIS
|
Fix for Friendly tips when Missing SOCIAL_AUTH_ALLOWED_REDIRECT_URIS
i forget add SOCIAL_AUTH_ALLOWED_REDIRECT_URIS to my config
it return 400 error, i don't know why , i pay more time find the issues
so i add Friendly tips
-- sorry , my english is not well
and thank you all
|
Python
|
mit
|
sunscrapers/djoser,sunscrapers/djoser,sunscrapers/djoser
|
db14ed2c23b3838796e648faade2c73b786d61ff
|
tartpy/eventloop.py
|
tartpy/eventloop.py
|
"""
Very basic implementation of an event loop
==========================================
The eventloop is a singleton to schedule and run events.
Exports
-------
- ``EventLoop``: the basic eventloop
"""
import queue
import sys
import threading
import time
import traceback
from .singleton import Singleton
def _format_exception(exc_info):
"""Create a message with details on the exception."""
exc_type, exc_value, exc_tb = exc_info
return {'exception': {'type': exc_type,
'value': exc_value,
'traceback': exc_tb},
'traceback': traceback.format_exception(*exc_info)}
class EventLoop(object, metaclass=Singleton):
"""A generic event loop object."""
def __init__(self):
self.queue = queue.Queue()
def schedule(self, event):
"""Schedule an event.
The events have the form::
(event, error)
where `event` is a thunk and `error` is called with an
exception message (output of `_format_exception`) if there is
an error when executing `event`.
"""
self.queue.put(event)
def stop(self):
"""Stop the loop."""
pass
def run_step(self, block=True):
"""Process one event."""
ev, error = self.queue.get(block=block)
try:
ev()
except Exception as exc:
error(_format_exception(sys.exc_info()))
def run(self):
"""Process all events in the queue."""
try:
while True:
self.run_step(block=False)
except queue.Empty:
return
|
"""
Very basic implementation of an event loop
==========================================
The eventloop is a singleton to schedule and run events.
Exports
-------
- ``EventLoop``: the basic eventloop
"""
import queue
import sys
import threading
import time
import traceback
from .singleton import Singleton
def exception_message():
"""Create a message with details on the exception."""
exc_type, exc_value, exc_tb = exc_info = sys.exc_info()
return {'exception': {'type': exc_type,
'value': exc_value,
'traceback': exc_tb},
'traceback': traceback.format_exception(*exc_info)}
class EventLoop(object, metaclass=Singleton):
"""A generic event loop object."""
def __init__(self):
self.queue = queue.Queue()
def schedule(self, event):
"""Schedule an event.
The events have the form::
(event, error)
where `event` is a thunk and `error` is called with an
exception message (output of `exception_message`) if there is
an error when executing `event`.
"""
self.queue.put(event)
def stop(self):
"""Stop the loop."""
pass
def run_step(self, block=True):
"""Process one event."""
ev, error = self.queue.get(block=block)
try:
ev()
except Exception as exc:
error(exception_message())
def run(self):
"""Process all events in the queue."""
try:
while True:
self.run_step(block=False)
except queue.Empty:
return
|
Make exception message builder a nicer function
|
Make exception message builder a nicer function
It is used by clients in other modules.
|
Python
|
mit
|
waltermoreira/tartpy
|
3976a5b38e1b5356b83d22d9113aa83c9f09fdec
|
admin/manage.py
|
admin/manage.py
|
from freeposte import manager, db
from freeposte.admin import models
from passlib import hash
@manager.command
def flushdb():
""" Flush the database
"""
db.drop_all()
@manager.command
def initdb():
""" Initialize the database
"""
db.create_all()
@manager.command
def admin(localpart, domain_name, password):
""" Create an admin user
"""
domain = models.Domain(name=domain_name)
user = models.User(
localpart=localpart,
domain=domain,
global_admin=True,
password=hash.sha512_crypt.encrypt(password)
)
db.session.add(domain)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
manager.run()
|
from freeposte import manager, db
from freeposte.admin import models
from passlib import hash
@manager.command
def flushdb():
""" Flush the database
"""
db.drop_all()
@manager.command
def initdb():
""" Initialize the database
"""
db.create_all()
@manager.command
def admin(localpart, domain_name, password):
""" Create an admin user
"""
domain = models.Domain.query.get(domain_name)
if not domain:
domain = models.Domain(name=domain_name)
db.session.add(domain)
user = models.User(
localpart=localpart,
domain=domain,
global_admin=True,
password=hash.sha512_crypt.encrypt(password)
)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
manager.run()
|
Allow admin creation after initial setup
|
Allow admin creation after initial setup
|
Python
|
mit
|
kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io
|
2a7d28573d1e4f07250da1d30209304fdb6de90d
|
sqlobject/tests/test_blob.py
|
sqlobject/tests/test_blob.py
|
import pytest
from sqlobject import BLOBCol, SQLObject
from sqlobject.compat import PY2
from sqlobject.tests.dbtest import setupClass, supports
########################################
# BLOB columns
########################################
class ImageData(SQLObject):
image = BLOBCol(default=b'emptydata', length=256)
def test_BLOBCol():
if not supports('blobData'):
pytest.skip("blobData isn't supported")
setupClass(ImageData)
if PY2:
data = ''.join([chr(x) for x in range(256)])
else:
data = bytes(range(256))
prof = ImageData()
prof.image = data
iid = prof.id
ImageData._connection.cache.clear()
prof2 = ImageData.get(iid)
assert prof2.image == data
ImageData(image='string')
assert ImageData.selectBy(image='string').count() == 1
|
import pytest
from sqlobject import BLOBCol, SQLObject
from sqlobject.compat import PY2
from sqlobject.tests.dbtest import setupClass, supports
########################################
# BLOB columns
########################################
class ImageData(SQLObject):
image = BLOBCol(default=b'emptydata', length=256)
def test_BLOBCol():
if not supports('blobData'):
pytest.skip("blobData isn't supported")
setupClass(ImageData)
if PY2:
data = ''.join([chr(x) for x in range(256)])
else:
data = bytes(range(256))
prof = ImageData(image=data)
iid = prof.id
ImageData._connection.cache.clear()
prof2 = ImageData.get(iid)
assert prof2.image == data
ImageData(image=b'string')
assert ImageData.selectBy(image=b'string').count() == 1
|
Use byte string for test
|
Tests(blob): Use byte string for test
|
Python
|
lgpl-2.1
|
sqlobject/sqlobject,drnlm/sqlobject,sqlobject/sqlobject,drnlm/sqlobject
|
95fa71c4439343764cac95a1667e08dc21cb6ebe
|
plugins.py
|
plugins.py
|
from fabric.api import *
import os
import re
__all__ = []
@task
def test(plugin_path):
"""
Symbolically link a host file that contains a redcap plugin into the ./redcap/plugins folder
:param plugin_path: path to plugin folder relative to VagrantFile
:return:
"""
if not os.path.exists(plugin_path):
abort("The folder %s does not exist. Please provide a relative path to a plugin folder you would like to test in the local vm" % plugin_path)
redcap_root = env.live_project_full_path
source_path = "/vagrant/" + plugin_path
target_folder = "/".join([redcap_root, env.plugins_path])
run("ln -sf %s %s" % (source_path, target_folder))
|
from fabric.api import *
import os
import re
__all__ = []
@task
def test(plugin_path):
"""
Symbolically link a host file that contains a redcap plugin into the ./redcap/plugins folder
:param plugin_path: path to plugin folder relative to VagrantFile
:return:
"""
if not os.path.exists(plugin_path):
abort("The folder %s does not exist. Please provide a relative path to a plugin folder you would like to test in the local vm" % plugin_path)
redcap_root = env.live_project_full_path
source_path = "/vagrant/" + plugin_path
target_folder = "/".join([redcap_root, env.plugins_path])
with settings(user=env.deploy_user):
run("ln -sf %s %s" % (source_path, target_folder))
|
Fix plugin test by running scripts as user deploy
|
Fix plugin test by running scripts as user deploy
|
Python
|
bsd-3-clause
|
ctsit/redcap_deployment,ctsit/redcap_deployment,ctsit/redcap_deployment,ctsit/redcap_deployment
|
9c35a41c6594d0ac482a558abf4772150c2a67e9
|
squash/dashboard/urls.py
|
squash/dashboard/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework.authtoken.views import obtain_auth_token
from rest_framework.routers import DefaultRouter
from . import views
router = DefaultRouter()
router.register(r'job', views.JobViewSet)
router.register(r'metric', views.MetricViewSet)
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include(router.urls)),
url(r'^api/token/', obtain_auth_token, name='api-token'),
url(r'^(?P<pk>[0-9]+)/dashboard/',
views.MetricDashboardView.as_view(),
name='metric-detail'),
url(r'^$', views.HomeView.as_view(), name='home'),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework.authtoken.views import obtain_auth_token
from rest_framework.routers import DefaultRouter
from . import views
<<<<<<< HEAD
router = DefaultRouter()
router.register(r'job', views.JobViewSet)
router.register(r'metric', views.MetricViewSet)
=======
api_router = DefaultRouter()
api_router.register(r'jobs', views.JobViewSet)
api_router.register(r'metrics', views.MetricViewSet)
api_router.register(r'packages', views.PackageViewSet)
>>>>>>> b962845... Make all API resource collection endpoints plural
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include(router.urls)),
url(r'^api/token/', obtain_auth_token, name='api-token'),
url(r'^(?P<pk>[0-9]+)/dashboard/',
views.MetricDashboardView.as_view(),
name='metric-detail'),
url(r'^$', views.HomeView.as_view(), name='home'),
]
|
Make all API resource collection endpoints plural
|
Make all API resource collection endpoints plural
/api/job/ -> /api/jobs/
/api/metric/ -> /api/metrics/
It's a standard convention in RESTful APIs to make endpoints for
collections plural.
|
Python
|
mit
|
lsst-sqre/qa-dashboard,lsst-sqre/qa-dashboard,lsst-sqre/qa-dashboard
|
32a61c6df871ad148a8c51b7b4cab3f392a2c61a
|
tsune/urls.py
|
tsune/urls.py
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'tsune.views.home', name='home'),
# url(r'^tsune/', include('tsune.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:go.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^user/', include('authentication.urls')),
url(r'^cardbox/', include('cardbox.deck_urls', namespace="deck")),
url(r'^cardbox/cards/', include('cardbox.card_urls', namespace="card")),
)
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from django.http import HttpResponseRedirect
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'tsune.views.home', name='home'),
# url(r'^tsune/', include('tsune.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:go.contrib.admindocs.urls')),
url(r'^$', lambda x: HttpResponseRedirect('/cardbox/')), # redirect to /cardbox/
url(r'^admin/', include(admin.site.urls)),
url(r'^user/', include('authentication.urls')),
url(r'^cardbox/', include('cardbox.deck_urls', namespace="deck")),
url(r'^cardbox/cards/', include('cardbox.card_urls', namespace="card")),
)
|
Add redirect to cardbox if root url is accessed
|
Add redirect to cardbox if root url is accessed
|
Python
|
mit
|
DummyDivision/Tsune,DummyDivision/Tsune,DummyDivision/Tsune
|
171bbf488db643d01d6a58c9376ba85c200711d5
|
gtts/tokenizer/tests/test_pre_processors.py
|
gtts/tokenizer/tests/test_pre_processors.py
|
# -*- coding: utf-8 -*-
import unittest
from gtts.tokenizer.pre_processors import tone_marks, end_of_line, abbreviations, word_sub
class TestPreProcessors(unittest.TestCase):
def test_tone_marks(self):
_in = "lorem!ipsum?"
_out = "lorem! ipsum? "
self.assertEqual(tone_marks(_in), _out)
def test_end_of_line(self):
_in = """test-
ing"""
_out = "testing"
self.assertEqual(end_of_line(_in), _out)
def test_abbreviations(self):
_in = "jr. sr. dr."
_out = "jr sr dr"
self.assertEqual(abbreviations(_in), _out)
def test_word_sub(self):
_in = "M. Bacon"
_out = "Monsieur Bacon"
self.assertEqual(word_sub(_in), _out)
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
import unittest
from gtts.tokenizer.pre_processors import tone_marks, end_of_line, abbreviations, word_sub
class TestPreProcessors(unittest.TestCase):
def test_tone_marks(self):
_in = "lorem!ipsum?"
_out = "lorem! ipsum? "
self.assertEqual(tone_marks(_in), _out)
def test_end_of_line(self):
_in = """test-
ing"""
_out = "testing"
self.assertEqual(end_of_line(_in), _out)
def test_abbreviations(self):
_in = "jr. sr. dr."
_out = "jr sr dr"
self.assertEqual(abbreviations(_in), _out)
def test_word_sub(self):
_in = "Esq. Bacon"
_out = "Esquire Bacon"
self.assertEqual(word_sub(_in), _out)
if __name__ == '__main__':
unittest.main()
|
Update unit test for ad8bcd8
|
Update unit test for ad8bcd8
|
Python
|
mit
|
pndurette/gTTS
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.