commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
81215120afffe54b17be3f38bbc2ac292452c0c4
|
addons/mail/models/ir_attachment.py
|
addons/mail/models/ir_attachment.py
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class IrAttachment(models.Model):
_inherit = 'ir.attachment'
@api.multi
def _post_add_create(self):
""" Overrides behaviour when the attachment is created through the controller
"""
super(IrAttachment, self)._post_add_create()
for record in self:
record.register_as_main_attachment(force=False)
@api.multi
def unlink(self):
self.remove_as_main_attachment()
super(IrAttachment, self).unlink()
@api.multi
def remove_as_main_attachment(self):
for attachment in self:
related_record = self.env[attachment.res_model].browse(attachment.res_id)
if related_record and hasattr(related_record, 'message_main_attachment_id'):
if related_record.message_main_attachment_id == attachment:
related_record.message_main_attachment_id = False
def register_as_main_attachment(self, force=True):
""" Registers this attachment as the main one of the model it is
attached to.
"""
self.ensure_one()
related_record = self.env[self.res_model].browse(self.res_id)
# message_main_attachment_id field can be empty, that's why we compare to False;
# we are just checking that it exists on the model before writing it
if related_record and hasattr(related_record, 'message_main_attachment_id'):
if force or not related_record.message_main_attachment_id:
related_record.message_main_attachment_id = self
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class IrAttachment(models.Model):
_inherit = 'ir.attachment'
@api.multi
def _post_add_create(self):
""" Overrides behaviour when the attachment is created through the controller
"""
super(IrAttachment, self)._post_add_create()
for record in self:
record.register_as_main_attachment(force=False)
def register_as_main_attachment(self, force=True):
""" Registers this attachment as the main one of the model it is
attached to.
"""
self.ensure_one()
related_record = self.env[self.res_model].browse(self.res_id)
# message_main_attachment_id field can be empty, that's why we compare to False;
# we are just checking that it exists on the model before writing it
if related_record and hasattr(related_record, 'message_main_attachment_id'):
if force or not related_record.message_main_attachment_id:
related_record.message_main_attachment_id = self
|
Revert "[FIX] mail: remove attachment as main at unlink"
|
Revert "[FIX] mail: remove attachment as main at unlink"
This reverts commit abc45b1
Since by default the ondelete attribute of a many2one is `set null`,
this was completely unnecessary to begin with.
Bug caused by this commit:
Unlink a record that has some attachments.
The unlink first removes the record, then its related attachments.
It calls remove_as_main_attachment, which reads the attachment res_model and
res_id. This triggers a check that the related record can be read.
However the related record has already been removed, an exception is raised.
It is thus impossible to unlink a record.
Closes #32563
closes odoo/odoo#32572
Signed-off-by: Raphael Collet (rco) <fcee45b878db1f337818c5c606c1542797080a40@openerp.com>
|
Python
|
agpl-3.0
|
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
|
17cdaa0cf7313e4edf5ef28ea2634410bd085d4f
|
rsk_mind/transformer/transformer.py
|
rsk_mind/transformer/transformer.py
|
class Transformer(object):
class Feats():
exclude = None
def __init__(self):
for field in self.get_feats():
getattr(self.Feats, field).bind(field, self)
def get_feats(self):
return [x for x in dir(self.Feats) if not (x.startswith('__') or x in ['exclude'])]
def get_transformer_func(self, feat_name):
return getattr(self.Feats, feat_name).transform
|
class Transformer(object):
"""
Base class for all transformer
"""
class Feats:
"""
Define feats on dataset
"""
exclude = None
def __init__(self):
for field in self.get_feats():
getattr(self.Feats, field).bind(field, self)
def get_feats(self):
"""
:return: a list of feats
"""
return [x for x in dir(self.Feats) if not (x.startswith('__') or x in ['exclude'])]
def get_transformer_func(self, feat_name):
"""
:param feat_name: name of feat
:return: a transformer function on feat
"""
return getattr(self.Feats, feat_name).transform
def get_excluded_feats(self):
"""
:return: a list with excluded feats
"""
return self.Feats.exclude
|
Add documentation and some methods
|
Add documentation and some methods
|
Python
|
mit
|
rsk-mind/rsk-mind-framework
|
1ea4e06fb3dc08a27a37b379e9ba2fffd5303625
|
ca_on_school_boards_english_public/__init__.py
|
ca_on_school_boards_english_public/__init__.py
|
from utils import CanadianJurisdiction
from opencivicdata.divisions import Division
from pupa.scrape import Organization
class OntarioEnglishPublicSchoolBoards(CanadianJurisdiction):
classification = 'legislature' # just to avoid clash
division_id = 'ocd-division/country:ca/province:on'
division_name = 'Ontario English Public School Board boundary"'
name = 'Ontario English Public School Boards'
url = 'http://www.edu.gov.on.ca/eng/sbinfo/boardList.html'
def get_organizations(self):
organization = Organization(self.name, classification=self.classification)
for division in Division.get(self.division_id).children('school_district'):
organization.add_post(role='Chair', label=division.name, division_id=division.id)
for i in range (0, 15): # XXX made-up number
organization.add_post(role='Trustee (seat {})'.format(i), label=division.name, division_id=division.id)
yield organization
|
from utils import CanadianJurisdiction
from opencivicdata.divisions import Division
from pupa.scrape import Organization
class OntarioEnglishPublicSchoolBoards(CanadianJurisdiction):
classification = 'legislature' # just to avoid clash
division_id = 'ocd-division/country:ca/province:on'
division_name = 'Ontario English Public School Board boundary"'
name = 'Ontario English Public School Boards'
url = 'http://www.edu.gov.on.ca/eng/sbinfo/boardList.html'
def get_organizations(self):
organization = Organization(self.name, classification=self.classification)
for division in Division.get(self.division_id).children('school_district'):
organization.add_post(role='Chair', label=division.name, division_id=division.id)
for i in range (0, 15): # XXX made-up number
organization.add_post(role='Trustee', label='{} (seat {})'.format(division.name, i), division_id=division.id)
yield organization
|
Fix where the seat number appears
|
Fix where the seat number appears
|
Python
|
mit
|
opencivicdata/scrapers-ca,opencivicdata/scrapers-ca
|
9f18d05091abfb6b13914c4b29970ed6fc5d367d
|
penelophant/models/__init__.py
|
penelophant/models/__init__.py
|
""" Penelophant Models """
from .User import User
from .UserAuthentication import UserAuthentication
from .Auction import Auction
from .Bid import Bid
from .Invoice import Invoice
|
""" Penelophant Models """
from .User import User
from .UserAuthentication import UserAuthentication
from .Auction import Auction
from .Bid import Bid
from .Invoice import Invoice
from .auctions.DoubleBlindAuction import DoubleBlindAuction
|
Load in the double blind auction
|
Load in the double blind auction
|
Python
|
apache-2.0
|
kevinoconnor7/penelophant,kevinoconnor7/penelophant
|
d89093f739cf5c953fb81d1c5c3e6dde5e90fb0c
|
check-if-a-given-sequence-of-moves-for-a-robot-is-circular-or-not.py
|
check-if-a-given-sequence-of-moves-for-a-robot-is-circular-or-not.py
|
from operator import add
import math
moves = raw_input("Enter the moves: ")
start_position = [0,0]
current_position = [0,0]
'''
heading = [1,90] - 1 step North
[1, -90] - 1 step South
[1,0] - East
[1,360] - West
'''
heading = [1,0]
for move in moves:
if move.upper() == "G":
angle = heading[1]
step = heading[0]
move_coord = [ round(step*math.cos(math.radians(angle))), round(step*math.sin(math.radians(angle))) ]
current_position = map(add, current_position, move_coord)
elif move.upper() == "L":
heading = map(add, heading, [0, 90])
elif move.upper() == "R":
heading = map(add, heading, [0, -90])
if start_position == current_position:
print "Given sequence of moves is circular"
else:
print "Given sequence of moves is NOT circular"
|
'''
URL: http://www.geeksforgeeks.org/check-if-a-given-sequence-of-moves-for-a-robot-is-circular-or-not/
====
Python 2.7 compatible
Problem statement:
====================
Given a sequence of moves for a robot, check if the sequence is circular or not. A sequence of moves is circular if first and last positions of robot are same. A move can be on of the following.
G - Go one unit
L - Turn left
R - Turn right
'''
from operator import add
import math
moves = raw_input("Enter the moves: ")
start_position = [0,0]
current_position = [0,0]
'''
heading = [1,90] - 1 step North
[1, -90] - 1 step South
[1,0] - East
[1,360] - West
'''
heading = [1,0]
for move in moves:
if move.upper() == "G":
angle = heading[1]
step = heading[0]
# move_coord holds the x and y coordinate movement for the robot
move_coord = [ round(step*math.cos(math.radians(angle))), round(step*math.sin(math.radians(angle))) ]
current_position = map(add, current_position, move_coord)
elif move.upper() == "L":
# turn the robot 90 degrees anti-clockwise
heading = map(add, heading, [0, 90])
elif move.upper() == "R":
# turn the robot 90 degrees clockwise
heading = map(add, heading, [0, -90])
if start_position == current_position:
print "Given sequence of moves is circular"
else:
print "Given sequence of moves is NOT circular"
|
Check if path is circular or not. Write from correct repository.
|
[Correct]: Check if path is circular or not. Write from correct
repository.
|
Python
|
apache-2.0
|
MayankAgarwal/GeeksForGeeks
|
ab3dc6466b617a5bf5a0bec2c122eca645c1d29f
|
cloudera-framework-assembly/src/main/resources/python/script_util.py
|
cloudera-framework-assembly/src/main/resources/python/script_util.py
|
import os
def hdfs_make_qualified(path):
return path if 'CF_HADOOP_DEFAULT_FS' not in os.environ else os.environ['CF_HADOOP_DEFAULT_FS'] + path
|
import os
import re
def hdfs_make_qualified(path):
return path if (re.match(r'[.]*://[.]*', path) or 'CF_HADOOP_DEFAULT_FS' not in os.environ) \
else os.environ['CF_HADOOP_DEFAULT_FS'] + path
|
Update python script util to detect if paths are already fully qualified
|
Update python script util to detect if paths are already fully qualified
|
Python
|
apache-2.0
|
ggear/cloudera-framework,ggear/cloudera-framework,ggear/cloudera-framework
|
16c8f23cd6ad9f9a10592bb40d1a18eb2c673d34
|
common.py
|
common.py
|
import mechanize
import os
class McGillException(Exception):
pass
urls = {
'login': 'twbkwbis.P_WWWLogin',
'transcript': 'bzsktran.P_Display_Form?user_type=S&tran_type=V'
}
_base_url = 'https://banweb.mcgill.ca/pban1/%s'
urls = {k: _base_url % v for k,v in urls.items()}
browser = mechanize.Browser()
def login(sid=None, pin=None):
if sid is None:
sid = os.environ.get('MCGILL_SID', None)
if pin is None:
pin = os.environ.get('MCGILL_PIN', None)
if sid is None or pin is None:
raise McGillException('McGill ID or PIN not provided.')
browser.open(urls['login'])
browser.select_form('loginform')
browser['sid'] = sid
browser['PIN'] = pin
response = browser.submit()
if 'Authorization Failure' in response.read():
raise McGillException('Invalid McGill ID or PIN.')
|
import mechanize
import os
class error(Exception):
pass
urls = {
'login': 'twbkwbis.P_WWWLogin',
'transcript': 'bzsktran.P_Display_Form?user_type=S&tran_type=V'
}
_base_url = 'https://banweb.mcgill.ca/pban1/%s'
urls = {k: _base_url % v for k,v in urls.items()}
browser = mechanize.Browser()
def login(sid=None, pin=None):
if sid is None:
sid = os.environ.get('MCGILL_SID', None)
if pin is None:
pin = os.environ.get('MCGILL_PIN', None)
if sid is None or pin is None:
raise error('McGill ID or PIN not provided.')
browser.open(urls['login'])
browser.select_form('loginform')
browser['sid'] = sid
browser['PIN'] = pin
response = browser.submit()
if 'Authorization Failure' in response.read():
raise error('Invalid McGill ID or PIN.')
|
Rename McGillException to error (mcgill.error)
|
Rename McGillException to error (mcgill.error)
|
Python
|
mit
|
isbadawi/minerva
|
f158cecd2e5155a0e8bb2e04d097db5a7b146836
|
pitchfork/config/config.example.py
|
pitchfork/config/config.example.py
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_KWARGS = {'tz_aware': True}
MONGO_DATABASE = 'pitchfork'
ADMIN_USERNAME = 'cloud_username'
ADMIN_NAME = 'Admin Full Name'
ADMIN_EMAIL = 'Admin Email'
SECRET_KEY = 'secret_key_for_cookie'
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
If you are running the application within docker using the provided
Dockerfile and docker-compose then you will need to change the MONGO_HOST
option to use the correct container.
import os
MONGO_HOST = os.environ['PITCHFORK_DB_1_PORT_27017_TCP_ADDR']
"""
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_KWARGS = {'tz_aware': True}
MONGO_DATABASE = 'pitchfork'
ADMIN_USERNAME = 'cloud_username'
ADMIN_NAME = 'Admin Full Name'
ADMIN_EMAIL = 'Admin Email'
SECRET_KEY = 'secret_key_for_cookie'
|
Add in helper documenation to assist with docker container run
|
Add in helper documenation to assist with docker container run
|
Python
|
apache-2.0
|
rackerlabs/pitchfork,rackerlabs/pitchfork,oldarmyc/pitchfork,rackerlabs/pitchfork,oldarmyc/pitchfork,oldarmyc/pitchfork
|
21e47557da10e1f4bb14e32d15194bf95211882a
|
python/getmonotime.py
|
python/getmonotime.py
|
import getopt, sys
if __name__ == '__main__':
sippy_path = None
try:
opts, args = getopt.getopt(sys.argv[1:], 's:S:i:o:b')
except getopt.GetoptError:
usage()
for o, a in opts:
if o == '-S':
sippy_path = a.strip()
continue
if sippy_path != None:
sys.path.insert(0, sippy_path)
from sippy.Time.clock_dtime import clock_getdtime, CLOCK_MONOTONIC
print clock_getdtime(CLOCK_MONOTONIC)
|
import getopt, sys
if __name__ == '__main__':
sippy_path = None
try:
opts, args = getopt.getopt(sys.argv[1:], 'rS:')
except getopt.GetoptError:
usage()
out_realtime = False
for o, a in opts:
if o == '-S':
sippy_path = a.strip()
continue
if o == '-r':
out_realtime = True
if sippy_path != None:
sys.path.insert(0, sippy_path)
from sippy.Time.clock_dtime import clock_getdtime, CLOCK_MONOTONIC
if not out_realtime:
print(clock_getdtime(CLOCK_MONOTONIC))
else:
from sippy.Time.clock_dtime import CLOCK_REALTIME
print("%f %f" % (clock_getdtime(CLOCK_MONOTONIC), clock_getdtime(CLOCK_REALTIME)))
|
Add an option to also output realtime along with monotime.
|
Add an option to also output realtime along with monotime.
|
Python
|
bsd-2-clause
|
sippy/rtpproxy,jevonearth/rtpproxy,dsanders11/rtpproxy,sippy/rtpproxy,synety-jdebp/rtpproxy,jevonearth/rtpproxy,dsanders11/rtpproxy,jevonearth/rtpproxy,jevonearth/rtpproxy,synety-jdebp/rtpproxy,synety-jdebp/rtpproxy,sippy/rtpproxy,dsanders11/rtpproxy,synety-jdebp/rtpproxy
|
5bd17a3088c2d1958d86efc4411b575c123e6275
|
tests/functional/test_l10n.py
|
tests/functional/test_l10n.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import random
import pytest
from ..pages.home import HomePage
@pytest.mark.nondestructive
def test_change_language(base_url, selenium):
page = HomePage(base_url, selenium).open()
initial = page.footer.language
# avoid selecting the same language or locales that have homepage redirects
excluded = [initial, 'ja', 'zh-TW', 'zh-CN']
available = [l for l in page.footer.languages if l not in excluded]
new = random.choice(available)
page.footer.select_language(new)
assert new in selenium.current_url, 'Language is not in URL'
assert new == page.footer.language, 'Language has not been selected'
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import random
import pytest
from ..pages.home import HomePage
@pytest.mark.nondestructive
def test_change_language(base_url, selenium):
page = HomePage(base_url, selenium).open()
initial = page.footer.language
# avoid selecting the same language or locales that have homepage redirects
excluded = [initial, 'ja', 'ja-JP-mac', 'zh-TW', 'zh-CN']
available = [l for l in page.footer.languages if l not in excluded]
new = random.choice(available)
page.footer.select_language(new)
assert new in selenium.current_url, 'Language is not in URL'
assert new == page.footer.language, 'Language has not been selected'
|
Exclude ja-JP-mac on homepage language select functional test
|
Exclude ja-JP-mac on homepage language select functional test
|
Python
|
mpl-2.0
|
hoosteeno/bedrock,flodolo/bedrock,gauthierm/bedrock,gauthierm/bedrock,alexgibson/bedrock,sgarrity/bedrock,l-hedgehog/bedrock,mkmelin/bedrock,flodolo/bedrock,Sancus/bedrock,analytics-pros/mozilla-bedrock,glogiotatidis/bedrock,gauthierm/bedrock,craigcook/bedrock,mozilla/bedrock,alexgibson/bedrock,mermi/bedrock,hoosteeno/bedrock,pascalchevrel/bedrock,mermi/bedrock,CSCI-462-01-2017/bedrock,flodolo/bedrock,craigcook/bedrock,Sancus/bedrock,jgmize/bedrock,davehunt/bedrock,alexgibson/bedrock,gerv/bedrock,CSCI-462-01-2017/bedrock,gauthierm/bedrock,sgarrity/bedrock,sylvestre/bedrock,sylvestre/bedrock,TheoChevalier/bedrock,jpetto/bedrock,davehunt/bedrock,analytics-pros/mozilla-bedrock,l-hedgehog/bedrock,jpetto/bedrock,flodolo/bedrock,gerv/bedrock,schalkneethling/bedrock,glogiotatidis/bedrock,TheJJ100100/bedrock,hoosteeno/bedrock,TheJJ100100/bedrock,CSCI-462-01-2017/bedrock,mozilla/bedrock,pascalchevrel/bedrock,sylvestre/bedrock,kyoshino/bedrock,kyoshino/bedrock,davehunt/bedrock,mozilla/bedrock,schalkneethling/bedrock,kyoshino/bedrock,Sancus/bedrock,TheJJ100100/bedrock,CSCI-462-01-2017/bedrock,Sancus/bedrock,mkmelin/bedrock,MichaelKohler/bedrock,ericawright/bedrock,MichaelKohler/bedrock,hoosteeno/bedrock,glogiotatidis/bedrock,TheoChevalier/bedrock,jpetto/bedrock,glogiotatidis/bedrock,l-hedgehog/bedrock,schalkneethling/bedrock,kyoshino/bedrock,TheoChevalier/bedrock,pascalchevrel/bedrock,mermi/bedrock,sylvestre/bedrock,davehunt/bedrock,alexgibson/bedrock,mozilla/bedrock,gerv/bedrock,craigcook/bedrock,ericawright/bedrock,mkmelin/bedrock,TheoChevalier/bedrock,jgmize/bedrock,analytics-pros/mozilla-bedrock,ericawright/bedrock,schalkneethling/bedrock,TheJJ100100/bedrock,jgmize/bedrock,mkmelin/bedrock,jgmize/bedrock,MichaelKohler/bedrock,gerv/bedrock,analytics-pros/mozilla-bedrock,l-hedgehog/bedrock,mermi/bedrock,MichaelKohler/bedrock,sgarrity/bedrock,jpetto/bedrock,ericawright/bedrock,pascalchevrel/bedrock,sgarrity/bedrock,craigcook/bedrock
|
62febcd8d6fcefdf2db3f411807fcf96c91228b8
|
tests/example_app.py
|
tests/example_app.py
|
# -*- coding: utf-8 -*-
"""
example app to test running an app as subprocess within pty
"""
from __future__ import print_function, unicode_literals
import time
# http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def main():
print('hi, there!')
time.sleep(0.010)
print('let\'s get to know each other better...')
name = raw_input("Please enter your name: ")
print(bcolors.BOLD + bcolors.UNDERLINE + 'Hi %s, have a nice day!' % name +
bcolors.ENDC)
print('It was a pleasure talking to you...')
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
"""
example app to test running an app as subprocess within pty
"""
from __future__ import print_function, unicode_literals
import sys, time
PY3 = sys.version_info[0] >= 3
if PY3:
raw_input = input
# http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def main():
print('hi, there!')
time.sleep(0.010)
print('let\'s get to know each other better...')
name = raw_input("Please enter your name: ")
print(bcolors.BOLD + bcolors.UNDERLINE + 'Hi %s, have a nice day!' % name +
bcolors.ENDC)
print('It was a pleasure talking to you...')
if __name__ == '__main__':
main()
|
Define raw_input as input under Python 3
|
Define raw_input as input under Python 3
|
Python
|
mit
|
finklabs/inquirer,finklabs/whaaaaat
|
a0f09e23dd19f0cf223034f9b787a4f038cd995d
|
testsuite/driver/my_typing.py
|
testsuite/driver/my_typing.py
|
"""
This module provides some type definitions and backwards compatibility shims
for use in the testsuite driver.
The testsuite driver can be typechecked using mypy [1].
[1] http://mypy-lang.org/
"""
try:
from typing import *
import typing
except:
# The backwards compatibility stubs must live in another module lest
# mypy complains.
from typing_stubs import * # type: ignore
####################################################
# Backwards compatibility shims
#
# N.B. mypy appears to typecheck as though the "then" clause of if structures
# is taken. We exploit this below.
# TextIO is missing on some older Pythons.
if 'TextIO' in globals():
TextIO = typing.TextIO
else:
TextIO = None # type: ignore
####################################################
# Testsuite-specific types
WayName = NewType("WayName", str)
TestName = NewType("TestName", str)
OutputNormalizer = Callable[[str], str]
IssueNumber = NewType("IssueNumber", int)
# Used by perf_notes
GitHash = NewType("GitHash", str)
GitRef = NewType("GitRef", str)
TestEnv = NewType("TestEnv", str)
MetricName = NewType("MetricName", str)
|
"""
This module provides some type definitions and backwards compatibility shims
for use in the testsuite driver.
The testsuite driver can be typechecked using mypy [1].
[1] http://mypy-lang.org/
"""
try:
from typing import *
import typing
except:
# The backwards compatibility stubs must live in another module lest
# mypy complains.
from typing_stubs import * # type: ignore
####################################################
# Backwards compatibility shims
#
# N.B. mypy appears to typecheck as though the "then" clause of if structures
# is taken. We exploit this below.
# TextIO is missing on some older Pythons.
if 'TextIO' not in globals():
try:
TextIO = typing.TextIO
except ImportError:
TextIO = None # type: ignore
else:
TextIO = None # type: ignore
####################################################
# Testsuite-specific types
WayName = NewType("WayName", str)
TestName = NewType("TestName", str)
OutputNormalizer = Callable[[str], str]
IssueNumber = NewType("IssueNumber", int)
# Used by perf_notes
GitHash = NewType("GitHash", str)
GitRef = NewType("GitRef", str)
TestEnv = NewType("TestEnv", str)
MetricName = NewType("MetricName", str)
|
Simplify Python <3.5 fallback for TextIO
|
testsuite: Simplify Python <3.5 fallback for TextIO
(cherry picked from commit d092d8598694c23bc07cdcc504dff52fa5f33be1)
|
Python
|
bsd-3-clause
|
sdiehl/ghc,sdiehl/ghc,sdiehl/ghc,sdiehl/ghc,sdiehl/ghc,sdiehl/ghc,sdiehl/ghc
|
63fd5cf2c05f1b3e343e315184d99c3b46ed0a33
|
Functions/Leave.py
|
Functions/Leave.py
|
'''
Created on Dec 20, 2011
@author: Tyranic-Moron
'''
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
import GlobalVars
import re
class Instantiate(Function):
Help = "leave/gtfo - makes the bot leave the current channel"
def GetResponse(self, HubbeBot, message):
if message.Type != 'PRIVMSG':
return
match = re.search('^leave|gtfo$', message.Command, re.IGNORECASE)
if not match:
return
if message.User.Name not in GlobalVars.admins:
return IRCResponse(ResponseType.Say, 'Only my admins can tell me to %s' % message.Command, message.ReplyTo)
if len(message.ParameterList) > 0:
return IRCResponse(ResponseType.Raw, 'PART %s :%s' % (message.ReplyTo, message.Parameters), '')
else:
return IRCResponse(ResponseType.Raw, 'PART %s :toodles!' % (message.ReplyTo), '')
|
'''
Created on Dec 20, 2011
@author: Tyranic-Moron
'''
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
import GlobalVars
import re
class Instantiate(Function):
Help = "leave/gtfo - makes the bot leave the current channel"
def GetResponse(self, HubbeBot, message):
if message.Type != 'PRIVMSG':
return
match = re.search('^leave|gtfo$', message.Command, re.IGNORECASE)
if not match:
return
if message.User.Name not in GlobalVars.admins:
return IRCResponse(ResponseType.Say, 'Only my admins can tell me to %s' % message.Command, message.ReplyTo)
if len(message.ParameterList) > 0:
HubbeBot.channels.remove(message.ReplyTo)
return IRCResponse(ResponseType.Raw, 'PART %s :%s' % (message.ReplyTo, message.Parameters), '')
else:
HubbeBot.channels.remove(message.ReplyTo)
return IRCResponse(ResponseType.Raw, 'PART %s :toodles!' % (message.ReplyTo), '')
|
Update list of channels when leaving one.
|
Update list of channels when leaving one.
|
Python
|
mit
|
HubbeKing/Hubbot_Twisted
|
0aa9b8e4cb9cf542a0eaed81664d6c2bb310c19d
|
enthought/traits/ui/editors/date_editor.py
|
enthought/traits/ui/editors/date_editor.py
|
#------------------------------------------------------------------------------
#
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: Judah De Paula
# Date: 10/7/2008
#
#------------------------------------------------------------------------------
"""
A Traits UI editor that wraps a WX calendar panel.
"""
from enthought.traits.trait_types import Bool, Int
from enthought.traits.ui.editor_factory import EditorFactory
#-- DateEditor definition -----------------------------------------------------
class DateEditor ( EditorFactory ):
"""
Editor factory for date/time editors.
"""
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# Is multiselect enabled for a CustomEditor?
# True: Must be a List of Dates. False: A Date instance.
multi_select = Bool(False)
# Should users be able to pick future dates when using the CustomEditor?
allow_future = Bool(True)
# How many months to show at a time.
months = Int(3)
#-- end DateEditor definition -------------------------------------------------
#-- eof -----------------------------------------------------------------------
|
#------------------------------------------------------------------------------
#
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: Judah De Paula
# Date: 10/7/2008
#
#------------------------------------------------------------------------------
"""
A Traits UI editor that wraps a WX calendar panel.
"""
from enthought.traits.trait_types import Bool, Int
from enthought.traits.ui.editor_factory import EditorFactory
#-- DateEditor definition -----------------------------------------------------
class DateEditor ( EditorFactory ):
"""
Editor factory for date/time editors.
"""
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
#-- CustomEditor traits ----------------------------------------------------
# True: Must be a List of Dates. False: A Date instance.
multi_select = Bool(False)
# Should users be able to pick future dates when using the CustomEditor?
allow_future = Bool(True)
# How many months to show at a time.
months = Int(3)
# How much space to put between the individual months.
padding = Int(5)
#-- end DateEditor definition -------------------------------------------------
#-- eof -----------------------------------------------------------------------
|
Add inter-month padding trait to factory.
|
Add inter-month padding trait to factory.
|
Python
|
bsd-3-clause
|
burnpanck/traits,burnpanck/traits
|
67ab3d4565fe2467c2eb58e77176fd0ccf7d1d65
|
estmator_project/estmator_project/views.py
|
estmator_project/estmator_project/views.py
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseNotAllowed
from django.shortcuts import render
from django.views.generic import TemplateView
from est_client.forms import ClientCreateForm
from est_quote.forms import QuoteCreateForm, ClientListForm
class IndexView(TemplateView):
template_name = 'index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
return context
class AboutView(TemplateView):
template_name = 'about.html'
def get_context_data(self, **kwargs):
context = super(AboutView, self).get_context_data(**kwargs)
return context
@login_required
def menu_view(request):
if request.method == 'GET':
client_form = ClientCreateForm()
quote_form = QuoteCreateForm()
client_list_form = ClientListForm()
context = {
'client_form': client_form.as_p,
'quote_form': quote_form.as_p,
'client_list_form': client_list_form.as_p
}
return render(
request, 'menu.html', context
)
else:
return HttpResponseNotAllowed(['GET', 'POST'])
def connection_test(request):
return HttpResponse(status=204)
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseNotAllowed
from django.shortcuts import render
from django.views.generic import TemplateView
from est_client.forms import (ClientCreateForm, CompanyCreateForm,
CompanyListForm)
from est_quote.forms import QuoteCreateForm, ClientListForm
class IndexView(TemplateView):
template_name = 'index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
return context
class AboutView(TemplateView):
template_name = 'about.html'
def get_context_data(self, **kwargs):
context = super(AboutView, self).get_context_data(**kwargs)
return context
@login_required
def menu_view(request):
if request.method == 'GET':
quote_form = QuoteCreateForm()
client_form = ClientCreateForm()
client_list_form = ClientListForm()
company_form = CompanyCreateForm()
company_list_form = CompanyListForm()
context = {
'quote_form': quote_form.as_p,
'client_form': client_form.as_p,
'client_list_form': client_list_form.as_p,
'company_form': company_form.as_p,
'company_list_form': company_list_form.as_p
}
return render(
request, 'menu.html', context
)
else:
return HttpResponseNotAllowed(['GET', 'POST'])
def connection_test(request):
return HttpResponse(status=204)
|
Add new company forms to menu view
|
Add new company forms to menu view
|
Python
|
mit
|
Estmator/EstmatorApp,Estmator/EstmatorApp,Estmator/EstmatorApp
|
72c4a1285eac5f70dfae243e911e0ab3c540d870
|
sconsole/static.py
|
sconsole/static.py
|
'''
Holds static data components, like the palette
'''
def get_palette(theme='std'):
'''
Return the preferred palette theme
Themes:
std
The standard theme used by the console
'''
if theme == 'bright':
return [
('banner', 'white', 'dark blue')
]
else:
return [
('banner', 'white', 'dark blue')
]
|
'''
Holds static data components, like the palette
'''
def msg(msg, logfile='console_log.txt'):
'''
Send a message to a logfile, defaults to console_log.txt.
This is useful to replace a print statement since curses does put
a bit of a damper on this
'''
with open(logfile, 'a+') as fp_:
fp_.write(str(msg))
def get_palette(theme='std'):
'''
Return the preferred palette theme
Themes:
std
The standard theme used by the console
'''
if theme == 'bright':
return [
('banner', 'white', 'dark blue')
]
else:
return [
('banner', 'white', 'dark blue')
]
|
Add a logmsg utility function
|
Add a logmsg utility function
|
Python
|
apache-2.0
|
saltstack/salt-console
|
f0a143065981d2dcee9ceacd3c2f30cfeb073025
|
tx_salaries/search_indexes.py
|
tx_salaries/search_indexes.py
|
from haystack import indexes
from tx_salaries.models import Employee
class EmployeeIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
content_auto = indexes.EdgeNgramField(model_attr='position__person__name')
compensation = indexes.FloatField(model_attr='compensation', null=True)
title = indexes.CharField(model_attr='title__name', faceted=True)
title_slug = indexes.CharField(model_attr='title__stats__slug', faceted=True)
department = indexes.CharField(model_attr='position__organization__name', faceted=True)
department_slug = indexes.CharField(model_attr='position__organization__stats__slug')
entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True)
entity_slug = indexes.CharField(model_attr='position__organization__parent__stats__slug')
def get_model(self):
return Employee
|
from haystack import indexes
from tx_salaries.models import Employee
class EmployeeIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
content_auto = indexes.EdgeNgramField(model_attr='position__person__name')
compensation = indexes.FloatField(model_attr='compensation', null=True)
title = indexes.CharField(model_attr='position__post__label', faceted=True)
title_slug = indexes.CharField(model_attr='position__post__stats__slug', faceted=True)
department = indexes.CharField(model_attr='position__organization__name', faceted=True)
department_slug = indexes.CharField(model_attr='position__organization__stats__slug')
entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True)
entity_slug = indexes.CharField(model_attr='position__organization__parent__stats__slug')
def get_model(self):
return Employee
|
Index posts instead of employee titles to fix search results
|
Index posts instead of employee titles to fix search results
|
Python
|
apache-2.0
|
texastribune/tx_salaries,texastribune/tx_salaries
|
7462a6da95443606c4219a6b2ccb8cf422f94037
|
orges/test/integration/test_main.py
|
orges/test/integration/test_main.py
|
from nose.tools import eq_
from orges.invoker.simple import SimpleInvoker
from orges.invoker.pluggable import PluggableInvoker
from orges.main import custom_optimize
from orges.optimizer.gridsearchoptimizer import GridSearchOptimizer
import orges.test.utils as utils
def test_custom_optimize_running_too_long_aborts():
invoker = PluggableInvoker(None, invoker=SimpleInvoker(None))
optimizer = GridSearchOptimizer()
f = utils.one_param_sleep_and_negate_f
val = custom_optimize(f, optimizer=optimizer, invoker=invoker, timeout=1)[1]
# f(a=0) is 0, f(a=1) is -1. Because of the timeout we never see a=1, hence
# we except the minimum before the timeout to be 0.
eq_(val, 0)
if __name__ == '__main__':
import nose
nose.runmodule()
|
from nose.tools import eq_
# from orges.invoker.simple import SimpleInvoker
from orges.invoker.multiprocess import MultiProcessInvoker
from orges.invoker.pluggable import PluggableInvoker
from orges.main import custom_optimize
from orges.optimizer.gridsearchoptimizer import GridSearchOptimizer
import orges.test.utils as utils
def test_custom_optimize_running_too_long_aborts():
invoker = PluggableInvoker(None, invoker=MultiProcessInvoker())
optimizer = GridSearchOptimizer()
f = utils.one_param_sleep_and_negate_f
val = custom_optimize(f, optimizer=optimizer, invoker=invoker, timeout=1)[1]
# f(a=0) is 0, f(a=1) is -1. Because of the timeout we never see a=1, hence
# we except the minimum before the timeout to be 0.
eq_(val, 0)
if __name__ == '__main__':
import nose
nose.runmodule()
|
Change test to use MultiProcessInvoker
|
Change test to use MultiProcessInvoker
|
Python
|
bsd-3-clause
|
cigroup-ol/metaopt,cigroup-ol/metaopt,cigroup-ol/metaopt
|
916441874a3bca016e950557230f7b3a84b3ee97
|
packages/grid/backend/grid/utils.py
|
packages/grid/backend/grid/utils.py
|
# stdlib
from typing import Any
from typing import Optional
# third party
from nacl.signing import SigningKey
# syft absolute
from syft import flags
from syft.core.common.message import SyftMessage
from syft.core.io.address import Address
from syft.core.node.common.action.exception_action import ExceptionMessage
from syft.lib.python.dict import Dict
# grid absolute
from grid.core.node import get_client
def send_message_with_reply(
signing_key: SigningKey,
message_type: SyftMessage,
address: Optional[Address] = None,
reply_to: Optional[Address] = None,
**content: Any
) -> Dict:
client = get_client(signing_key)
if address is None:
address = client.address
if reply_to is None:
reply_to = client.address
if flags.USE_NEW_SERVICE:
msg = message_type(address=address, reply_to=reply_to, kwargs=content).sign(
signing_key=signing_key
)
reply = client.send_immediate_msg_with_reply(msg=msg)
reply = reply.payload
else:
msg = message_type(address=address, reply_to=reply_to, **content)
reply = client.send_immediate_msg_with_reply(msg=msg)
check_if_syft_reply_is_exception(reply)
return reply
def check_if_syft_reply_is_exception(reply: Dict) -> None:
if isinstance(reply, ExceptionMessage):
raise Exception(reply.exception_msg)
|
# stdlib
from typing import Any
from typing import Optional
# third party
from nacl.signing import SigningKey
# syft absolute
from syft.core.common.message import SyftMessage
from syft.core.io.address import Address
from syft.core.node.common.action.exception_action import ExceptionMessage
from syft.lib.python.dict import Dict
# grid absolute
from grid.core.node import node
def send_message_with_reply(
signing_key: SigningKey,
message_type: SyftMessage,
address: Optional[Address] = None,
reply_to: Optional[Address] = None,
**content: Any
) -> Dict:
if not address:
address = node.address
if not reply_to:
reply_to = node.address
# if flags.USE_NEW_SERVICE:
msg = message_type(address=address, reply_to=reply_to, kwargs=content).sign(
signing_key=signing_key
)
reply = node.recv_immediate_msg_with_reply(msg=msg)
reply = reply.message
check_if_syft_reply_is_exception(reply)
reply = reply.payload
return reply
def check_if_syft_reply_is_exception(reply: Dict) -> None:
if isinstance(reply, ExceptionMessage):
raise Exception(reply.exception_msg)
|
Replace SyftClient calls -> Direct Node calls - This avoids to build client ast for every single request making the backend faster
|
Replace SyftClient calls -> Direct Node calls
- This avoids to build client ast for every single request
making the backend faster
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
51b1f612ab8058da89cc8aaa6b1db99139c7eda0
|
versions/settings.py
|
versions/settings.py
|
from django.conf import settings
from django.utils import importlib
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
Based on the method of the same name in Django Rest Framework.
"""
try:
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except ImportError as e:
raise ImportError("Could not import '{}' for CleanerVersion setting '{}'. {}: {}.".format(
(val, setting_name, e.__class__.__name__, e)))
_cache = {}
def get_versioned_delete_collector_class():
"""
Gets the class to use for deletion collection.
This is done as a method instead of just defining a module-level variable because
Django doesn't like attributes of the django.conf.settings object to be accessed
in top-level module scope.
:return: class
"""
key = 'VERSIONED_DELETE_COLLECTOR'
try:
cls = _cache[key]
except KeyError:
cls = import_from_string(getattr(settings, key, 'versions.deletion.VersionedCollector'), key)
_cache[key] = cls
return cls
|
from django.conf import settings
import importlib
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
Based on the method of the same name in Django Rest Framework.
"""
try:
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except ImportError as e:
raise ImportError("Could not import '{}' for CleanerVersion setting '{}'. {}: {}.".format(
(val, setting_name, e.__class__.__name__, e)))
_cache = {}
def get_versioned_delete_collector_class():
"""
Gets the class to use for deletion collection.
This is done as a method instead of just defining a module-level variable because
Django doesn't like attributes of the django.conf.settings object to be accessed
in top-level module scope.
:return: class
"""
key = 'VERSIONED_DELETE_COLLECTOR'
try:
cls = _cache[key]
except KeyError:
cls = import_from_string(getattr(settings, key, 'versions.deletion.VersionedCollector'), key)
_cache[key] = cls
return cls
|
Use python 2.7+ standard importlib instead of deprecated django importlib
|
Use python 2.7+ standard importlib instead of deprecated django importlib
|
Python
|
apache-2.0
|
swisscom/cleanerversion,anfema/cleanerversion,anfema/cleanerversion,pretix/cleanerversion,pretix/cleanerversion,swisscom/cleanerversion,swisscom/cleanerversion,pretix/cleanerversion,anfema/cleanerversion
|
befce70e7931f5949a7db10af4bae2cb4c21ba08
|
localore/people/wagtail_hooks.py
|
localore/people/wagtail_hooks.py
|
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('first_name', 'last_name', 'production', 'role')
list_filter = ('role', 'production')
search_fields = ('first_name', 'last_name', 'biography')
wagtailmodeladmin_register(PeopleAdmin)
|
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('first_name', 'last_name', 'production', 'role')
list_filter = ('role', 'production')
ordering = ('-production',)
search_fields = ('first_name', 'last_name', 'biography')
wagtailmodeladmin_register(PeopleAdmin)
|
Order people by associated production.
|
Order people by associated production.
|
Python
|
mpl-2.0
|
ghostwords/localore,ghostwords/localore,ghostwords/localore
|
19f86127b5c1b738684f493354b2f532f0f58634
|
tests/test-recipes/metadata/always_include_files_glob/run_test.py
|
tests/test-recipes/metadata/always_include_files_glob/run_test.py
|
import os
import sys
import json
def main():
prefix = os.environ['PREFIX']
info_file = os.path.join(prefix, 'conda-meta',
'always_include_files_regex-0.1-0.json')
with open(info_file, 'r') as fh:
info = json.load(fh)
if sys.platform == 'darwin':
assert sorted(info['files']) == [u'lib/libpng.dylib', u'lib/libpng16.16.dylib', u'lib/libpng16.dylib']
elif sys.platform == 'linux2':
assert sorted(info['files']) == ['lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0']
if __name__ == '__main__':
main()
|
import os
import sys
import json
def main():
prefix = os.environ['PREFIX']
info_file = os.path.join(prefix, 'conda-meta',
'always_include_files_regex-0.1-0.json')
with open(info_file, 'r') as fh:
info = json.load(fh)
if sys.platform == 'darwin':
assert sorted(info['files']) == ['lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib']
elif sys.platform.startswith('linux'):
assert sorted(info['files']) == ['lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0']
if __name__ == '__main__':
main()
|
Check for sys.platform == linux, not linux2
|
Check for sys.platform == linux, not linux2
|
Python
|
bsd-3-clause
|
sandhujasmine/conda-build,shastings517/conda-build,rmcgibbo/conda-build,mwcraig/conda-build,sandhujasmine/conda-build,sandhujasmine/conda-build,rmcgibbo/conda-build,shastings517/conda-build,dan-blanchard/conda-build,dan-blanchard/conda-build,ilastik/conda-build,mwcraig/conda-build,shastings517/conda-build,ilastik/conda-build,frol/conda-build,frol/conda-build,ilastik/conda-build,frol/conda-build,dan-blanchard/conda-build,mwcraig/conda-build,rmcgibbo/conda-build
|
8cb4f5d8879c573a4fe690c4f53c2b0a99d18d69
|
nbresuse/handlers.py
|
nbresuse/handlers.py
|
import os
import json
import psutil
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
def get_metrics():
cur_process = psutil.Process()
all_processes = [cur_process] + cur_process.children(recursive=True)
rss = sum([p.memory_info().rss for p in all_processes])
return {
'rss': rss,
'limits': {
'memory': int(os.environ.get('MEM_LIMIT', None))
}
}
class MetricsHandler(IPythonHandler):
def get(self):
self.finish(json.dumps(get_metrics()))
def setup_handlers(web_app):
route_pattern = url_path_join(web_app.settings['base_url'], '/metrics')
web_app.add_handlers('.*', [(route_pattern, MetricsHandler)])
|
import os
import json
import psutil
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
def get_metrics():
cur_process = psutil.Process()
all_processes = [cur_process] + cur_process.children(recursive=True)
rss = sum([p.memory_info().rss for p in all_processes])
mem_limit = os.environ.get('MEM_LIMIT', None)
if mem_limit is not None:
mem_limit = int(mem_limit)
return {
'rss': rss,
'limits': {
'memory': mem_limit
}
}
class MetricsHandler(IPythonHandler):
def get(self):
self.finish(json.dumps(get_metrics()))
def setup_handlers(web_app):
route_pattern = url_path_join(web_app.settings['base_url'], '/metrics')
web_app.add_handlers('.*', [(route_pattern, MetricsHandler)])
|
Handle case of memory limit not set.
|
Handle case of memory limit not set.
|
Python
|
bsd-2-clause
|
yuvipanda/nbresuse,yuvipanda/nbresuse
|
3fa14c2c1663fe04301b4f98cc59f2d385d8f876
|
config/database.py
|
config/database.py
|
databases = {
'mysql': {
'driver': 'mysql',
'host': '',
'database': '',
'user': '',
'password': '',
'prefix': ''
}
}
|
databases = {
'mysql': {
'driver': 'mysql',
'host': '127.0.0.1',
'database': 'salam_bot',
'user': 'root',
'password': '',
'prefix': ''
}
}
|
Change db configs to default for CI
|
Change db configs to default for CI
|
Python
|
mit
|
erjanmx/salam-bot
|
15dd5b534e8c16c78195739eb78cc1e271564542
|
symcalc.py
|
symcalc.py
|
from sympy.abc import *
from flask import Flask, request
app = Flask(__name__)
@app.route('/code', methods=['GET', 'POST'])
def code():
return str(eval(request.json['code']))
if __name__ == "__main__":
app.run(debug=True, port=80)
|
from sympy.abc import *
from sympy.core.symbol import Symbol
while True:
try:
line = input('')
print()
if '=' in line:
exec(line)
else:
_ = eval(line)
print(_)
except EOFError:
continue
except Exception as e:
print('Error:', e)
|
Use local console input to drive the calc
|
Use local console input to drive the calc
|
Python
|
mit
|
boppreh/symcalc
|
13ba81df82f2c43838066ec9cd0fa1222324349f
|
srsly/util.py
|
srsly/util.py
|
# coding: utf8
from __future__ import unicode_literals
from pathlib import Path
import sys
def force_path(location, require_exists=True):
if not isinstance(location, Path):
location = Path(location)
if require_exists and not location.exists():
raise ValueError("Can't read file: {}".format(location))
return location
def force_string(location):
if sys.version_info[0] == 2: # Python 2
return str(location).decode("utf8")
return str(location)
|
# coding: utf8
from __future__ import unicode_literals
from pathlib import Path
import sys
is_python2 = sys.version_info[0] == 2
is_python3 = sys.version_info[0] == 3
if is_python2:
basestring_ = basestring # noqa: F821
else:
basestring_ = str
def force_path(location, require_exists=True):
if not isinstance(location, Path):
location = Path(location)
if require_exists and not location.exists():
raise ValueError("Can't read file: {}".format(location))
return location
def force_string(location):
if isinstance(location, basestring_):
return location
if sys.version_info[0] == 2: # Python 2
return str(location).decode("utf8")
return str(location)
|
Improve compat handling in force_string
|
Improve compat handling in force_string
If we know we already have a string, no need to force it into a strinbg
|
Python
|
mit
|
explosion/srsly,explosion/srsly,explosion/srsly,explosion/srsly
|
74815ade33a3e9e76da43e01e74752ff502e99d1
|
datadict/datadict_utils.py
|
datadict/datadict_utils.py
|
import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0)
if trim_index:
df.index = df.index.str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
|
import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
|
Fix issue where pandas.Index doesn't have str method
|
Fix issue where pandas.Index doesn't have str method
(Index.str was only introduced in pandas 0.19.2)
|
Python
|
bsd-3-clause
|
sibis-platform/ncanda-data-integration,sibis-platform/ncanda-datacore,sibis-platform/ncanda-datacore,sibis-platform/ncanda-datacore,sibis-platform/ncanda-data-integration
|
fc7c52a489a6113b7b26607c42c6f5b38b2feb85
|
manage.py
|
manage.py
|
import os
import coverage
from flask_script import Manager, Shell
from flask_migrate import Migrate, MigrateCommand
from config import basedir
from app import create_app, db
from app.models import User
app = create_app(os.getenv("MYDICTIONARY_CONFIG") or "default")
migrate = Migrate(app, db)
manager = Manager(app)
def make_shell_context():
return dict(app=app, db=db, User=User)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command("db", MigrateCommand)
cov = coverage.coverage(branch=True, include="app/*")
@manager.command
def test(coverage=False):
""" Run the unit tests. """
if coverage:
cov.start()
import unittest
tests = unittest.TestLoader().discover("tests")
unittest.TextTestRunner(verbosity=2).run(tests)
if coverage:
cov.stop()
cov.save()
print("Coverage Summary:")
cov.report()
cov_dir = os.path.join(basedir, "tmp/coverage")
cov.html_report(directory=cov_dir)
print("HTML version: %s/index.html" % cov_dir)
cov.erase()
if __name__ == "__main__":
manager.run()
|
import os
import coverage
from flask_script import Manager, Shell
from flask_migrate import Migrate, MigrateCommand
from config import basedir
from app import create_app, db
from app.models import User, Dictionary
app = create_app(os.getenv("MYDICTIONARY_CONFIG") or "default")
migrate = Migrate(app, db)
manager = Manager(app)
def make_shell_context():
return dict(app=app, db=db, User=User, Dictionary=Dictionary)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command("db", MigrateCommand)
cov = coverage.coverage(branch=True, include="app/*")
@manager.command
def test(coverage=False):
""" Run the unit tests. """
if coverage:
cov.start()
import unittest
tests = unittest.TestLoader().discover("tests")
unittest.TextTestRunner(verbosity=2).run(tests)
if coverage:
cov.stop()
cov.save()
print("Coverage Summary:")
cov.report()
cov_dir = os.path.join(basedir, "tmp/coverage")
cov.html_report(directory=cov_dir)
print("HTML version: %s/index.html" % cov_dir)
cov.erase()
if __name__ == "__main__":
manager.run()
|
Add Dictionary to shell context
|
Add Dictionary to shell context
|
Python
|
mit
|
Encrylize/MyDictionary,Encrylize/MyDictionary,Encrylize/MyDictionary
|
24e48a82c627996332a73608d139f9ce8713642d
|
cref/app/web/views.py
|
cref/app/web/views.py
|
import flask
from cref.app.web import app
from cref.app.web.tasks import predict_structure
def success(result):
return flask.jsonify({
'status': 'success',
'retval': result
})
def failure(reason='Unknown'):
return flask.jsonify({
'status': 'failure',
'reason': reason
})
@app.route('/predict/', methods=['POST'])
def predict():
sequence = flask.request.get_json(force=True)['sequence']
resp = predict_structure.delay(sequence)
return success({'task_id': resp.id})
@app.route('/predict/<sequence>', methods=['GET'])
def predict_test(sequence):
resp = predict_structure.delay(sequence)
return success({'task_id': resp.id})
@app.route('/status/<task_id>')
def status(task_id):
result = predict_structure.AsyncResult(task_id)
return success({'state': result.state})
@app.route('/result/<task_id>')
def result(task_id):
result = predict_structure.AsyncResult(task_id)
if result.ready():
return success({'pdb_file': result.get()})
else:
return failure('Task is pending')
|
import flask
from cref.app.web import app
from cref.app.web.tasks import predict_structure
def success(result):
return flask.jsonify({
'status': 'success',
'retval': result
})
def failure(reason='Unknown'):
return flask.jsonify({
'status': 'failure',
'reason': reason
})
@app.route('/predict/', methods=['POST'])
def predict():
params = flask.request.get_json(force=True)
resp = predict_structure.delay(params['sequence'])
return success({'task_id': resp.id})
@app.route('/predict/<sequence>', methods=['GET'])
def predict_test(sequence):
resp = predict_structure.delay(sequence)
return success({'task_id': resp.id})
@app.route('/status/<task_id>')
def status(task_id):
result = predict_structure.AsyncResult(task_id)
return success({'state': result.state})
@app.route('/result/<task_id>')
def result(task_id):
result = predict_structure.AsyncResult(task_id)
if result.ready():
return success({'pdb_file': result.get()})
else:
return failure('Task is pending')
@app.route('/predictions/<task_id>/<path:filename>')
def download_file(filename):
return flask.send_from_directory(
'/home/mchelem/dev/cref2/predictions/',
filename, as_attachment=True
)
|
Add method to serve prediction result files
|
Add method to serve prediction result files
|
Python
|
mit
|
mchelem/cref2,mchelem/cref2,mchelem/cref2
|
f2d6c7781ab1555cc3392ff9a642a2cb208580f8
|
mail/views.py
|
mail/views.py
|
from django.shortcuts import redirect
from django.http import JsonResponse
from django.core.mail import EmailMessage
from django.middleware import csrf
from rest_framework.decorators import api_view
@api_view(['POST', 'GET'])
def send_contact_message(request):
if request.method == 'POST':
to_address = request.POST.get("to_address", "").split(',')
from_name = request.POST.get("from_name", "")
from_address = request.POST.get("from_address", "")
from_string = '{} <{}>'.format(from_name, from_address)
subject = request.POST.get("subject", "")
message_body = request.POST.get("message_body", "")
csrf_token = request.POST.get("csrfmiddlewaretoken", "")
email = EmailMessage(subject,
message_body,
'noreply@openstax.org',
["mwharrison@rice.edu"],
reply_to=[from_string])
email.send(fail_silently=False)
#return redirect('/contact-thank-you')
data = {'subject': subject,
'message_body': message_body,
'to_address': to_address,
'reply_to': [from_string],
'from_address': 'noreply@openstax.org',
'csrf_token': csrf_token,
}
return JsonResponse(data)
# if this is not posting a message, let's send the csfr token back
else:
csrf_token = csrf.get_token(request)
data = {'csrf_token': csrf_token}
return JsonResponse(data)
|
from django.shortcuts import redirect
from django.http import JsonResponse
from django.core.mail import EmailMessage
from django.middleware import csrf
from rest_framework.decorators import api_view
@api_view(['POST', 'GET'])
def send_contact_message(request):
if request.method == 'POST':
to_address = request.POST.get("to_address", "").split(',')
from_name = request.POST.get("from_name", "")
from_address = request.POST.get("from_address", "")
from_string = '{} <{}>'.format(from_name, from_address)
subject = request.POST.get("subject", "")
message_body = request.POST.get("message_body", "")
csrf_token = request.POST.get("csrfmiddlewaretoken", "")
email = EmailMessage(subject,
message_body,
'noreply@openstax.org',
to_address,
reply_to=[from_string])
email.send(fail_silently=False)
#return redirect('/contact-thank-you')
data = {'subject': subject,
'message_body': message_body,
'to_address': to_address,
'reply_to': [from_string],
'from_address': 'noreply@openstax.org',
'csrf_token': csrf_token,
}
return JsonResponse(data)
# if this is not posting a message, let's send the csfr token back
else:
csrf_token = csrf.get_token(request)
data = {'csrf_token': csrf_token}
return JsonResponse(data)
|
Revert "change to field for testing"
|
Revert "change to field for testing"
|
Python
|
agpl-3.0
|
openstax/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,Connexions/openstax-cms,Connexions/openstax-cms
|
161cdf644aa9b8575f42dab537c5e3e01a186ec6
|
test/python_api/default-constructor/sb_address.py
|
test/python_api/default-constructor/sb_address.py
|
"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.SetLoadAddress(0xffff, lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.Clear()
|
"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.SetLoadAddress(0xffff, lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.GetSectionType()
obj.GetSymbolContext(lldb.eSymbolContextEverything)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetBlock()
obj.GetSymbol()
obj.GetLineEntry()
obj.Clear()
|
Add new SBAddress APIs to the fuzz tests.
|
Add new SBAddress APIs to the fuzz tests.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@137625 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb
|
70e7b932c1c6013306a53f47c14d969d4ada8ab4
|
api/home/models.py
|
api/home/models.py
|
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
POSITIONS = (
('HERO', 'Hero'),
('SEC_1', 'Secondary 1'),
('SEC_2', 'Secondary 2'),
('THIRD_1', 'Third 1'),
('THIRD_2', 'Third 2'),
('THIRD_3', 'Third 3'),
)
class HomepageBlock(models.Model):
limit = models.Q(app_label='shows', model='show') | models.Q(app_label='articles',
model='article') | models.Q(
app_label='events', model='event')
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE, limit_choices_to=limit)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
published_at = models.DateTimeField()
position = models.CharField(max_length=12, choices=POSITIONS)
override_kicker = models.CharField(max_length=64, default='')
override_title = models.CharField(max_length=265, default='')
override_description = models.TextField(default='')
override_background_color = models.CharField(max_length=64, default='')
|
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
POSITIONS = (
('HERO', 'Hero'),
('SEC_1', 'Secondary 1'),
('SEC_2', 'Secondary 2'),
('THIRD_1', 'Third 1'),
('THIRD_2', 'Third 2'),
('THIRD_3', 'Third 3'),
)
class HomepageBlock(models.Model):
limit = models.Q(app_label='shows', model='show') | models.Q(app_label='articles',
model='article') | models.Q(
app_label='events', model='event')
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE, limit_choices_to=limit)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
published_at = models.DateTimeField()
position = models.CharField(max_length=12, choices=POSITIONS)
override_kicker = models.CharField(max_length=64, blank=True, default='')
override_title = models.CharField(max_length=265, blank=True, default='')
override_description = models.TextField(default='', blank=True)
override_background_color = models.CharField(max_length=64, blank=True, default='')
|
Allow overrides to be blank
|
Allow overrides to be blank
|
Python
|
mit
|
urfonline/api,urfonline/api,urfonline/api
|
cdf3686150309800cb28f584b64b9175aa4b5662
|
tests/unit_tests/gather_tests/MameSink_test.py
|
tests/unit_tests/gather_tests/MameSink_test.py
|
import pytest
from cps2_zmq.gather import MameSink
@pytest.mark.parametrize("message, expected",[
({'wid': 420, 'message': 'closing'}, 'worksink closing'),
({'wid': 420, 'message': 'threaddead'}, '420 is dead'),
({'wid': 420, 'message': 'some result'}, 'another message'),
])
def test_process_message(message, expected, worker):
sink = MameSink.MameSink("inproc://help")
worker.wid = message['wid']
sink.setup_workers2([worker])
result = sink._process_message(message)
assert result == expected
sink._cleanup()
# @pytest.mark.parametrize("messages, expected", [
# ([{'frame_number': 1141, 'sprites': [[420, 69, 300, 1], [1, 1, 1, 1]], 'palettes': [[]]},
# {'frame_number': 0, 'sprites': [], 'palettes': []}], 1)
# ])
# @pytest.mark.timeout(timeout=10, method='thread')
# def test_run(workers, messages, expected):
# sink = MameSink.MameSink("inproc://frommockworkers")
# sink.setup_workers2(workers)
# pass
|
import pytest
from cps2_zmq.gather import MameSink
@pytest.fixture(scope="module")
def sink():
sink = MameSink.MameSink("inproc://frommockworkers")
yield sink
sink.cleanup()
class TestSink(object):
@pytest.fixture(autouse=True)
def refresh(self, sink):
pass
yield
sink._msgsrecv = 0
@pytest.mark.parametrize("message, expected", [
({'wid': 420, 'message': 'closing'}, 'worksink closing'),
({'wid': 420, 'message': 'threaddead'}, '420 is dead'),
({'wid': 420, 'message': 'some result'}, 'another message'),
])
def test_process_message(self, message, expected, sink, worker):
worker.wid = message['wid']
sink.setup_workers2([worker])
result = sink._process_message(message)
assert result == expected
def test_run(self, sink, tworkers):
# sink = MameSink.MameSink("inproc://frommockworkers")
messages = ['some result', 'closing', 'threaddead']
for worker in tworkers:
worker.messages = [{'wid' : worker.wid, 'message': msg} for msg in messages]
worker.connect_push("inproc://frommockworkers")
sink.setup_workers2(tworkers)
sink.start()
#block and let the sink run
sink.join()
assert not sink.workers
assert sink._msgsrecv == len(tworkers) * len(messages)
|
Test Class now returns to base state between different groups of tests
|
Test Class now returns to base state between different groups of tests
|
Python
|
mit
|
goosechooser/cps2-zmq
|
8ecc9ec44437fea301ab8bdf6ed8b9b9a2a5c242
|
IPython/zmq/pylab/backend_payload_svg.py
|
IPython/zmq/pylab/backend_payload_svg.py
|
# Standard library imports
from cStringIO import StringIO
# System library imports.
from matplotlib.backends.backend_svg import new_figure_manager
from matplotlib._pylab_helpers import Gcf
# Local imports.
from backend_payload import add_plot_payload
def show():
""" Deliver a SVG payload.
"""
figure_manager = Gcf.get_active()
if figure_manager is not None:
# Make the background transparent.
# figure_manager.canvas.figure.patch.set_alpha(0.0)
# Set the background to white instead so it looks good on black.
figure_manager.canvas.figure.set_facecolor('white')
figure_manager.canvas.figure.set_edgecolor('white')
data = svg_from_canvas(figure_manager.canvas)
add_plot_payload('svg', data)
def svg_from_canvas(canvas):
""" Return a string containing the SVG representation of a FigureCanvasSvg.
"""
string_io = StringIO()
canvas.print_svg(string_io)
return string_io.getvalue()
|
"""Produce SVG versions of active plots for display by the rich Qt frontend.
"""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
from cStringIO import StringIO
# System library imports.
from matplotlib.backends.backend_svg import new_figure_manager
from matplotlib._pylab_helpers import Gcf
# Local imports.
from backend_payload import add_plot_payload
#-----------------------------------------------------------------------------
# Functions
#-----------------------------------------------------------------------------
def show():
""" Deliver a SVG payload.
"""
for figure_manager in Gcf.get_all_fig_managers():
# Make the background transparent.
# figure_manager.canvas.figure.patch.set_alpha(0.0)
# Set the background to white instead so it looks good on black.
figure_manager.canvas.figure.set_facecolor('white')
figure_manager.canvas.figure.set_edgecolor('white')
data = svg_from_canvas(figure_manager.canvas)
add_plot_payload('svg', data)
def svg_from_canvas(canvas):
""" Return a string containing the SVG representation of a FigureCanvasSvg.
"""
string_io = StringIO()
canvas.print_svg(string_io)
return string_io.getvalue()
|
Fix svg rich backend to correctly show multiple figures.
|
Fix svg rich backend to correctly show multiple figures.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
d1504f3c3129c926bd9897a6660669f146e64c38
|
cachupy/cachupy.py
|
cachupy/cachupy.py
|
import datetime
class Cache:
EXPIRE_IN = 'expire_in'
def __init__(self):
self.store = {}
def get(self, key):
"""Gets a value based upon a key."""
self._check_expiry(key)
return self.store[key]['value']
def set(self, dictionary, expire_in):
"""Sets a dictionary to the cache with a timedelta expiration."""
for key in dictionary.keys():
self.store[key] = {
Cache.EXPIRE_IN: datetime.datetime.now() + expire_in,
'value': dictionary[key]
}
def has(self, key):
"""Returns whether a key is in the cache."""
self._check_expiry(key)
return key in self.store
def _check_expiry(self, key):
"""Removes a key/value pair if it's expired."""
if key in self.store and datetime.datetime.now() > self.store[key][Cache.EXPIRE_IN]:
self.store.pop(key, None)
|
import datetime
class Cache:
EXPIRE_IN = 'expire_in'
VALUE = 'value'
def __init__(self):
self.lock = False
self.store = {}
def get(self, key):
"""Gets a value based upon a key."""
self._check_expiry(key)
return self.store[key][Cache.VALUE]
def set(self, expire_in, *args):
"""Sets a dictionary to the cache with a timedelta expiration."""
for arg in args:
if isinstance(arg, dict):
for k,v in arg.items():
self._set(k, v, expire_in)
else:
for v in arg:
self._set(v[0], v[1], expire_in)
def has(self, key):
"""Returns whether a key is in the cache."""
self._check_expiry(key)
return key in self.store
def _set(self, key, value, expire_in):
self.store[key] = {
Cache.EXPIRE_IN: datetime.datetime.now() + expire_in,
Cache.VALUE: value
}
def _check_expiry(self, key):
"""Removes a key/value pair if it's expired."""
if not self.lock and key in self.store and datetime.datetime.now() > self.store[key][Cache.EXPIRE_IN]:
self.store.pop(key, None)
|
Change signature of set() method.
|
Change signature of set() method.
|
Python
|
mit
|
patrickbird/cachupy
|
a470ce2a7557216be5cb36cdf3d895ea486e6d64
|
src/testers/tls.py
|
src/testers/tls.py
|
# -*- coding: utf-8 -*-
import ssl
from src.testers.decorators import requires_userinfo
@requires_userinfo
def available(test):
"""
Check if MongoDB is compiled with OpenSSL support
"""
return 'OpenSSLVersion' in test.tester.info \
or 'openssl' in test.tester.info
@requires_userinfo
def enabled(test):
"""
Check if TLS/SSL is enabled on the server side
"""
if not available(test):
return 3
try:
with test.tester.conn._socket_for_writes() as socket_info:
socket = socket_info.sock
return isinstance(socket, ssl.SSLSocket)
except (KeyError, AttributeError):
return False
def valid(test):
"""
Verify if server certificate is valid
"""
if not enabled(test):
return 3
with test.tester.conn._socket_for_writes() as socket_info:
cert = socket_info.sock.getpeercert()
if not cert:
return [2, 'Your server is presenting a self-signed certificate, which will not '
'protect your connections from man-in-the-middle attacks.']
return True
|
# -*- coding: utf-8 -*-
import ssl
from src.testers.decorators import requires_userinfo
@requires_userinfo
def available(test):
"""
Check if MongoDB is compiled with OpenSSL support
"""
return 'OpenSSLVersion' in test.tester.info \
or 'openssl' in test.tester.info
@requires_userinfo
def enabled(test):
"""
Check if TLS/SSL is enabled on the server side
"""
if not available(test):
return 3
try:
with test.tester.conn._socket_for_writes() as socket_info:
socket = socket_info.sock
return isinstance(socket, ssl.SSLSocket)
except (KeyError, AttributeError):
return False
@requires_userinfo
def valid(test):
"""
Verify if server certificate is valid
"""
if not enabled(test):
return 3
with test.tester.conn._socket_for_writes() as socket_info:
cert = socket_info.sock.getpeercert()
if not cert:
return [2, 'Your server is presenting a self-signed certificate, which will not '
'protect your connections from man-in-the-middle attacks.']
return True
|
Fix ServerSelectionTimeoutError: No servers found yet
|
Fix ServerSelectionTimeoutError: No servers found yet
|
Python
|
mit
|
stampery/mongoaudit
|
36e00778befd9e6763236b771a77184d31c3c885
|
babbage_fiscal/tasks.py
|
babbage_fiscal/tasks.py
|
from celery import Celery
import requests
from .config import get_engine, _set_connection_string
from .loader import FDPLoader
app = Celery('fdp_loader')
app.config_from_object('babbage_fiscal.celeryconfig')
@app.task
def load_fdp_task(package, callback, connection_string=None):
if connection_string is not None:
_set_connection_string(connection_string)
FDPLoader.load_fdp_to_db(package, get_engine())
ret = requests.get(callback)
|
from celery import Celery
import requests
from .config import get_engine, _set_connection_string
from .loader import FDPLoader
app = Celery('fdp_loader')
app.config_from_object('babbage_fiscal.celeryconfig')
@app.task
def load_fdp_task(package, callback, connection_string=None):
if connection_string is not None:
_set_connection_string(connection_string)
try:
FDPLoader.load_fdp_to_db(package, get_engine())
requests.get(callback, params={'package': package, 'status': 'done'})
except Exception as e:
requests.get(callback, params={'package': package, 'status': 'fail', 'error': str(e)})
|
Add more info to the callback
|
Add more info to the callback
|
Python
|
mit
|
openspending/babbage.fiscal-data-package
|
5089846e116fdd386de692f187f7c03304cfcd1d
|
attachments_to_filesystem/__openerp__.py
|
attachments_to_filesystem/__openerp__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2015 Therp BV (<http://therp.nl>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Move existing attachments to filesystem",
"version": "1.0",
"author": "Therp BV",
"license": "AGPL-3",
"complexity": "normal",
"category": "Knowledge Management",
"depends": [
'base',
],
"data": [
"data/ir_cron.xml",
"data/init.xml",
],
"test": [
],
"auto_install": False,
"installable": True,
"application": False,
"external_dependencies": {
'python': ['dateutil', 'pytz'],
},
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2015 Therp BV (<http://therp.nl>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Move existing attachments to filesystem",
"version": "1.0",
"author": "Therp BV,Odoo Community Association (OCA)",
"license": "AGPL-3",
"complexity": "normal",
"category": "Knowledge Management",
"depends": [
'base',
],
"data": [
"data/ir_cron.xml",
"data/init.xml",
],
"test": [
],
"auto_install": False,
"installable": True,
"application": False,
"external_dependencies": {
'python': ['dateutil', 'pytz'],
},
}
|
Add Odoo Community Association (OCA) in authors
|
Add Odoo Community Association (OCA) in authors
|
Python
|
agpl-3.0
|
xpansa/knowledge,Endika/knowledge,sergiocorato/knowledge,algiopensource/knowledge,anas-taji/knowledge,acsone/knowledge,acsone/knowledge,ClearCorp-dev/knowledge,xpansa/knowledge,Endika/knowledge,Endika/knowledge,sergiocorato/knowledge,jobiols/knowledge,anas-taji/knowledge,xpansa/knowledge,ClearCorp/knowledge,ClearCorp-dev/knowledge,sergiocorato/knowledge,Endika/knowledge,algiopensource/knowledge,acsone/knowledge,algiopensource/knowledge,jobiols/knowledge,ClearCorp-dev/knowledge,jobiols/knowledge,raycarnes/knowledge,raycarnes/knowledge,xpansa/knowledge,sergiocorato/knowledge,anas-taji/knowledge,raycarnes/knowledge,raycarnes/knowledge,algiopensource/knowledge,anas-taji/knowledge,acsone/knowledge,jobiols/knowledge,ClearCorp/knowledge,ClearCorp-dev/knowledge,ClearCorp/knowledge,ClearCorp/knowledge
|
6973cc19a9d4fb4e0867a98fced1c4c33fc0cee8
|
students/psbriant/session08/test_circle.py
|
students/psbriant/session08/test_circle.py
|
"""
Name: Paul Briant
Date: 11/22/16
Class: Introduction to Python
Session: 08
Assignment: circle lab
Description:
Tests for Circle lab
"""
# -------------------------------Imports----------------------------------------
import math
from circle import Circle
# -------------------------------Functions--------------------------------------
def test_radius():
c = Circle(4)
assert c.radius == 4
def test_diameter():
c = Circle(5)
assert c.diameter == 10
def test_change_radius():
c = Circle(5)
c.radius = 2
assert c.diameter == 4
def test_change_diameter():
c = Circle(5)
c.diameter = 12
assert c.diameter == 12
assert c.radius == 6
def test_area():
"""
"""
|
"""
Name: Paul Briant
Date: 11/22/16
Class: Introduction to Python
Session: 08
Assignment: circle lab
Description:
Tests for Circle lab
"""
# -------------------------------Imports----------------------------------------
import math
from circle import Circle
# -------------------------------Functions--------------------------------------
def test_radius():
c = Circle(4)
assert c.radius == 4
def test_diameter():
c = Circle(5)
assert c.diameter == 10
def test_change_radius():
c = Circle(5)
c.radius = 2
assert c.diameter == 4
def test_change_diameter():
c = Circle(5)
c.diameter = 12
assert c.diameter == 12
assert c.radius == 6
def test_area():
"""
Verify area of circle
"""
c = Circle(5)
assert c.area == 25
|
Add test for area property of circle.
|
Add test for area property of circle.
|
Python
|
unlicense
|
weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016
|
24fd3b98f06b30d8827ba472dc305514ed71a5e5
|
cropimg/widgets.py
|
cropimg/widgets.py
|
from django.forms.widgets import Input, ClearableFileInput
from django.template.loader import render_to_string
class CIImgWidget(ClearableFileInput):
def render(self, name, value, attrs=None):
try:
attrs["data-value"] = getattr(value, "url", "")
except ValueError: # attribute has no file associated with it.
attrs["data-value"] = ""
return super(CIImgWidget, self).render(name, value, attrs)
class CIThumbnailWidget(Input):
input_type = "text"
def render(self, name, value, attrs=None):
if attrs:
attrs.update(self.attrs)
attrs["type"] = "hidden"
input_field = super(CIThumbnailWidget, self).render(name, value, attrs)
return render_to_string("cropimg/cropimg_widget.html",
{
"name": name, "value": value, "attrs": attrs,
"input_field": input_field
})
class Media:
js = ("cropimg/js/jquery_init.js", "cropimg/js/cropimg.jquery.js",
"cropimg/js/cropimg_init.js")
css = {"all": ["cropimg/resource/cropimg.css"]}
|
from django.forms.widgets import Input, ClearableFileInput
from django.template.loader import render_to_string
class CIImgWidget(ClearableFileInput):
def render(self, name, value, attrs=None):
try:
attrs["data-value"] = getattr(value, "url", "")
except ValueError: # attribute has no file associated with it.
attrs["data-value"] = ""
return super(CIImgWidget, self).render(name, value, attrs)
class CIThumbnailWidget(Input):
input_type = "text"
def render(self, name, value, attrs=None, renderer=None):
if attrs:
attrs.update(self.attrs)
attrs["type"] = "hidden"
input_field = super(CIThumbnailWidget, self).render(name, value, attrs)
return render_to_string("cropimg/cropimg_widget.html",
{
"name": name, "value": value, "attrs": attrs,
"input_field": input_field
})
class Media:
js = ("cropimg/js/jquery_init.js", "cropimg/js/cropimg.jquery.js",
"cropimg/js/cropimg_init.js")
css = {"all": ["cropimg/resource/cropimg.css"]}
|
Make compatible with Django >2.1
|
Make compatible with Django >2.1
|
Python
|
mit
|
rewardz/cropimg-django,rewardz/cropimg-django,rewardz/cropimg-django
|
eacc66e5a9ab3310c75924dcb340e4944e9424d4
|
tests/specifications/external_spec_test.py
|
tests/specifications/external_spec_test.py
|
from fontbakery.checkrunner import Section
from fontbakery.fonts_spec import spec_factory
def check_filter(checkid, font=None, **iterargs):
if checkid in (
"com.google.fonts/check/035", # ftxvalidator
"com.google.fonts/check/036", # ots-sanitize
"com.google.fonts/check/037", # Font Validator
"com.google.fonts/check/038", # Fontforge
"com.google.fonts/check/039", # Fontforge
):
return False, "Skipping external tools."
return True, None
def test_external_specification():
"""Test the creation of external specifications."""
specification = spec_factory(default_section=Section("Dalton Maag OpenType"))
specification.set_check_filter(check_filter)
specification.auto_register(
globals(), spec_imports=['fontbakery.specifications.opentype'])
assert len(specification.sections) > 1
|
from fontbakery.checkrunner import Section
from fontbakery.fonts_spec import spec_factory
def check_filter(checkid, font=None, **iterargs):
if checkid in (
"com.google.fonts/check/035", # ftxvalidator
"com.google.fonts/check/036", # ots-sanitize
"com.google.fonts/check/037", # Font Validator
"com.google.fonts/check/038", # Fontforge
"com.google.fonts/check/039", # Fontforge
):
return False, "Skipping external tools."
return True, None
def test_external_specification():
"""Test the creation of external specifications."""
specification = spec_factory(default_section=Section("Dalton Maag OpenType"))
specification.set_check_filter(check_filter)
specification.auto_register(
globals(), spec_imports=["fontbakery.specifications.opentype"])
# Probe some tests
expected_tests = ["com.google.fonts/check/002", "com.google.fonts/check/180"]
specification.test_expected_checks(expected_tests)
# Probe tests we don't want
assert "com.google.fonts/check/035" not in specification._check_registry.keys()
assert len(specification.sections) > 1
|
Test for expected and unexpected checks
|
Test for expected and unexpected checks
|
Python
|
apache-2.0
|
googlefonts/fontbakery,graphicore/fontbakery,graphicore/fontbakery,googlefonts/fontbakery,googlefonts/fontbakery,moyogo/fontbakery,moyogo/fontbakery,moyogo/fontbakery,graphicore/fontbakery
|
6adbbe71dcde926fbd9288b4a43b45ff1a339cdc
|
turbustat/statistics/stats_utils.py
|
turbustat/statistics/stats_utils.py
|
import numpy as np
def hellinger(data1, data2):
'''
Calculate the Hellinger Distance between two datasets.
Parameters
----------
data1 : numpy.ndarray
1D array.
data2 : numpy.ndarray
1D array.
Returns
-------
distance : float
Distance value.
'''
distance = (1 / np.sqrt(2)) * \
np.sqrt(np.nansum((np.sqrt(data1) - np.sqrt(data2)) ** 2.))
return distance
def standardize(x):
return (x - np.nanmean(x)) / np.nanstd(x)
|
import numpy as np
def hellinger(data1, data2):
'''
Calculate the Hellinger Distance between two datasets.
Parameters
----------
data1 : numpy.ndarray
1D array.
data2 : numpy.ndarray
1D array.
Returns
-------
distance : float
Distance value.
'''
distance = (1 / np.sqrt(2)) * \
np.sqrt(np.nansum((np.sqrt(data1) - np.sqrt(data2)) ** 2.))
return distance
def standardize(x):
return (x - np.nanmean(x)) / np.nanstd(x)
def kl_divergence(P, Q):
'''
Kullback Leidler Divergence
Parameters
----------
P,Q : numpy.ndarray
Two Discrete Probability distributions
Returns
-------
kl_divergence : float
'''
P = P[~np.isnan(P)]
Q = Q[~np.isnan(Q)]
P = P[np.isfinite(P)]
Q = Q[np.isfinite(Q)]
return np.nansum(np.where(Q != 0, P * np.log(P / Q), 0))
|
Move KL Div to utils file
|
Move KL Div to utils file
|
Python
|
mit
|
e-koch/TurbuStat,Astroua/TurbuStat
|
cd219d5ee0ecbd54705c5add4239cef1513b8c2a
|
dodocs/__init__.py
|
dodocs/__init__.py
|
"""Main function
Copyright (c) 2015 Francesco Montesano
MIT Licence
"""
import sys
import colorama
from dodocs.cmdline import parse
__version__ = "0.0.1"
colorama.init(autoreset=True)
def main(argv=None):
"""
Main code
Parameters
----------
argv : list of strings, optional
command line arguments
"""
args = parse(argv=argv)
if args.subparser_name == "profile":
from dodocs.profiles import main
main(args)
# elif args.subparser_name == "mkvenv":
# from dodocs.venvs import create
# create(args)
# elif args.subparser_name == "build":
# print("building")
else:
msg = colorama.Fore.RED + "Please provide a command."
msg += " Valid commands are:\n * profile" # \n * create"
sys.exit(msg)
|
"""Main function
Copyright (c) 2015 Francesco Montesano
MIT Licence
"""
import sys
import colorama
from dodocs.cmdline import parse
__version__ = "0.0.1"
colorama.init(autoreset=True)
def main(argv=None):
"""
Main code
Parameters
----------
argv : list of strings, optional
command line arguments
"""
args = parse(argv=argv)
try:
args.func(args)
except AttributeError:
# defaults profile to list
if args.subparser_name == 'profile' and args.profile_cmd is None:
main([args.subparser_name, 'list'])
# in the other cases suggest to run -h
msg = colorama.Fore.RED + "Please provide a valid command."
print(msg)
msg = "Type\n " + sys.argv[0]
if args.subparser_name is not None:
msg += " " + args.subparser_name
msg += ' -h'
print(msg)
|
Use args.func. Deal with failures, default "profile'
|
Use args.func. Deal with failures, default "profile'
|
Python
|
mit
|
montefra/dodocs
|
f8b28c73e0bb46aaa760d4c4afadd75feacbe57a
|
tools/benchmark/benchmark_date_guessing.py
|
tools/benchmark/benchmark_date_guessing.py
|
#!/usr/bin/env python
import os
import pytest
import sys
from mediawords.tm.guess_date import guess_date, McGuessDateException
def main():
if (len(sys.argv) < 2):
sys.stderr.write('usage: ' + sys.argv[0] + ' <directory of html files>')
exit()
directory = os.fsencode(sys.argv[1]).decode("utf-8")
for file in os.listdir(directory):
filename = os.fsdecode(file)
if filename.endswith(".txt"):
fh = open(os.path.join(directory,filename))
content = fh.read()
print(filename + ": " + str(len(content)))
date_guess = guess_date(
url='http://dont.know.the.date/some/path.html',
html=content
)
print(date_guess.date)
main()
|
#!/usr/bin/env python3
import os
import sys
from mediawords.tm.guess_date import guess_date
def benchmark_date_guessing():
"""Benchmark Python date guessing code."""
if len(sys.argv) < 2:
sys.exit("Usage: %s <directory of html files>" % sys.argv[0])
directory = sys.argv[1]
for file in os.listdir(directory):
filename = os.fsdecode(file)
if filename.endswith(".txt"):
fh = open(os.path.join(directory, filename))
content = fh.read()
print(filename + ": " + str(len(content)))
date_guess = guess_date(url='http://dont.know.the.date/some/path.html',
html=content)
print(date_guess.date)
if __name__ == '__main__':
benchmark_date_guessing()
|
Clean up date guessing benchmarking code
|
Clean up date guessing benchmarking code
* Remove unused imports
* use sys.exit(message) instead of exit()
* Use Pythonic way to call main function (if __name__ == '__main__')
* Reformat code
* Avoid encoding / decoding things to / from UTF-8
|
Python
|
agpl-3.0
|
berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud
|
a459c9bd1135ede49e9b2f55a633f86d7cdb81e2
|
tests/mocks/RPi.py
|
tests/mocks/RPi.py
|
# -*- coding: utf-8 -*-
"""
Mocks for tests on other HW than Raspberry Pi.
"""
class GPIO(object):
BOARD = 'board'
IN = 'in'
OUT = 'out'
PUD_UP = 'pud_up'
FALLING = 'falling'
HIGH = 'high'
LOW = 'low'
@classmethod
def setmode(cls, mode):
print("Mock: set GPIO mode {}".format(mode))
@classmethod
def setup(cls, pin, direction, **kwargs):
print("Mock: setup GPIO pin {} to {}".format(pin, direction))
@classmethod
def output(cls, pin, status):
print("Mock: output GPIO pin {} to {}".format(pin, status))
@classmethod
def add_event_detect(cls, pin, status, **kwargs):
print("Mock: detect GPIO pin {} when {}".format(pin, status))
@classmethod
def cleanup(cls):
print("Mock: quit GPIO")
|
# -*- coding: utf-8 -*-
"""
Mocks for tests on other HW than Raspberry Pi.
"""
class GPIO(object):
BOARD = 'board'
IN = 'in'
OUT = 'out'
PUD_UP = 'pud_up'
FALLING = 'falling'
HIGH = 'high'
LOW = 'low'
@classmethod
def setmode(cls, mode):
print("Mock: set GPIO mode {}".format(mode))
@classmethod
def setup(cls, pin, direction, **kwargs):
print("Mock: setup GPIO pin {} to {}".format(pin, direction))
@classmethod
def output(cls, pin, status):
pass
@classmethod
def add_event_detect(cls, pin, status, **kwargs):
print("Mock: detect GPIO pin {} when {}".format(pin, status))
@classmethod
def cleanup(cls):
print("Mock: quit GPIO")
|
Remove print message in mocks
|
Remove print message in mocks
|
Python
|
mit
|
werdeil/pibooth,werdeil/pibooth
|
7e015e6955dfe392649b5ca0cdeb5a7701700f24
|
laalaa/apps/advisers/serializers.py
|
laalaa/apps/advisers/serializers.py
|
from rest_framework import serializers
from rest_framework_gis import serializers as gis_serializers
from .models import Location, Office, Organisation
class DistanceField(serializers.ReadOnlyField):
def to_representation(self, obj):
# miles
return obj.mi
class OrganisationSerializer(serializers.ModelSerializer):
class Meta:
model = Organisation
fields = ('name', 'website',)
class LocationSerializer(gis_serializers.GeoModelSerializer):
class Meta:
model = Location
fields = (
'address', 'city', 'postcode', 'point', 'type')
class OfficeSerializer(gis_serializers.GeoModelSerializer):
location = LocationSerializer()
organisation = OrganisationSerializer()
distance = DistanceField()
class Meta:
model = Office
fields = (
'telephone', 'location', 'organisation', 'distance')
|
from rest_framework import serializers
from rest_framework_gis import serializers as gis_serializers
from .models import Location, Office, Organisation
class DistanceField(serializers.ReadOnlyField):
def to_representation(self, obj):
# miles
return obj.mi
class OrganisationSerializer(serializers.ModelSerializer):
class Meta:
model = Organisation
fields = ('name', 'website',)
class LocationSerializer(gis_serializers.GeoModelSerializer):
class Meta:
model = Location
fields = (
'address', 'city', 'postcode', 'point', 'type')
class OfficeSerializer(gis_serializers.GeoModelSerializer):
location = LocationSerializer()
organisation = OrganisationSerializer()
distance = DistanceField()
categories = serializers.SlugRelatedField(
slug_field='code', many=True, read_only=True)
class Meta:
model = Office
fields = (
'telephone', 'location', 'organisation', 'distance',
'categories')
|
Add list of category codes to offices
|
Add list of category codes to offices
|
Python
|
mit
|
ministryofjustice/laa-legal-adviser-api,ministryofjustice/laa-legal-adviser-api,ministryofjustice/laa-legal-adviser-api
|
c568f4d3ea475f341490bc81e89c28016e8412a2
|
corehq/apps/locations/dbaccessors.py
|
corehq/apps/locations/dbaccessors.py
|
from corehq.apps.users.models import CommCareUser
def _users_by_location(location_id, include_docs):
return CommCareUser.view(
'locations/users_by_location_id',
startkey=[location_id],
endkey=[location_id, {}],
include_docs=include_docs,
).all()
def get_users_by_location_id(location_id):
"""
Get all users for a given location
"""
return _users_by_location(location_id, include_docs=True)
def get_user_ids_by_location(location_id):
return [user['id'] for user in
_users_by_location(location_id, include_docs=False)]
|
from corehq.apps.users.models import CommCareUser
def _users_by_location(location_id, include_docs, wrap):
view = CommCareUser.view if wrap else CommCareUser.get_db().view
return view(
'locations/users_by_location_id',
startkey=[location_id],
endkey=[location_id, {}],
include_docs=include_docs,
).all()
def get_users_by_location_id(location_id, wrap=True):
"""
Get all users for a given location
"""
return _users_by_location(location_id, include_docs=True, wrap=wrap)
def get_user_ids_by_location(location_id):
return [user['id'] for user in
_users_by_location(location_id, include_docs=False, wrap=False)]
|
Allow getting the unwrapped doc
|
Allow getting the unwrapped doc
|
Python
|
bsd-3-clause
|
qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq
|
39256f49c952dbd5802d5321c8a74b2c41934e38
|
timedelta/__init__.py
|
timedelta/__init__.py
|
import os
__version__ = open(os.path.join(os.path.dirname(__file__), "VERSION")).read().strip()
try:
from fields import TimedeltaField
from helpers import (
divide, multiply, modulo,
parse, nice_repr,
percentage, decimal_percentage,
total_seconds
)
except ImportError:
pass
|
import os
__version__ = open(os.path.join(os.path.dirname(__file__), "VERSION")).read().strip()
try:
import django
from fields import TimedeltaField
from helpers import (
divide, multiply, modulo,
parse, nice_repr,
percentage, decimal_percentage,
total_seconds
)
except (ImportError, django.core.exceptions.ImproperlyConfigured):
pass
|
Fix running on unconfigured virtualenv.
|
Fix running on unconfigured virtualenv.
|
Python
|
bsd-3-clause
|
sookasa/django-timedelta-field
|
7a7b6351f21c95b3620059984470b0b7619c1e9d
|
docopt_dispatch.py
|
docopt_dispatch.py
|
"""Dispatch from command-line arguments to functions."""
import re
from collections import OrderedDict
from docopt import docopt
__all__ = ('dispatch', 'DispatchError')
__author__ = 'Vladimir Keleshev <vladimir@keleshev.com>'
__version__ = '0.0.0'
__license__ = 'MIT'
__keywords__ = 'docopt dispatch function adapter kwargs'
__url__ = 'https://github.com/halst/docopt-dispatch'
class DispatchError(Exception):
pass
class Dispatch(object):
def __init__(self):
self._functions = OrderedDict()
def on(self, argument):
def decorator(function):
self._functions[argument] = function
return function
return decorator
def __call__(self, *args, **kwargs):
arguments = docopt(*args, **kwargs)
for argument, function in self._functions.items():
if arguments[argument]:
function(**self._kwargify(arguments))
return
raise DispatchError('None of dispatch conditions (%s) is triggered'
% ', '.join(self._functions.keys()))
@staticmethod
def _kwargify(arguments):
kwargify = lambda string: re.sub('\W', '_', string).strip('_')
return dict((kwargify(key), value) for key, value in arguments.items())
dispatch = Dispatch()
|
"""Dispatch from command-line arguments to functions."""
import re
from collections import OrderedDict
__all__ = ('dispatch', 'DispatchError')
__author__ = 'Vladimir Keleshev <vladimir@keleshev.com>'
__version__ = '0.0.0'
__license__ = 'MIT'
__keywords__ = 'docopt dispatch function adapter kwargs'
__url__ = 'https://github.com/halst/docopt-dispatch'
class DispatchError(Exception):
pass
class Dispatch(object):
def __init__(self):
self._functions = OrderedDict()
def on(self, argument):
def decorator(function):
self._functions[argument] = function
return function
return decorator
def __call__(self, *args, **kwargs):
from docopt import docopt
arguments = docopt(*args, **kwargs)
for argument, function in self._functions.items():
if arguments[argument]:
function(**self._kwargify(arguments))
return
raise DispatchError('None of dispatch conditions (%s) is triggered'
% ', '.join(self._functions.keys()))
@staticmethod
def _kwargify(arguments):
kwargify = lambda string: re.sub('\W', '_', string).strip('_')
return dict((kwargify(key), value) for key, value in arguments.items())
dispatch = Dispatch()
|
Load docopt lazily (so that setup.py works)
|
Load docopt lazily (so that setup.py works)
|
Python
|
mit
|
keleshev/docopt-dispatch
|
e50fdd79a49adce75559ea07024d056b6b386761
|
docs/config/all.py
|
docs/config/all.py
|
# Global configuration information used across all the
# translations of documentation.
#
# Import the base theme configuration
from cakephpsphinx.config.all import *
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = '2.x'
# The search index version.
search_version = 'chronos-2'
# The marketing display name for the book.
version_name = ''
# Project name shown in the black header bar
project = 'Chronos'
# Other versions that display in the version picker menu.
version_list = [
{'name': '1.x', 'number': '/chronos/1.x', 'title': '1.x'},
{'name': '2.x', 'number': '/chronos/2.x', 'title': '2.x', 'current': True},
]
# Languages available.
languages = ['en', 'fr', 'ja', 'pt']
# The GitHub branch name for this version of the docs
# for edit links to point at.
branch = '2.x'
# Current version being built
version = '2.x'
# Language in use for this directory.
language = 'en'
show_root_link = True
repository = 'cakephp/chronos'
source_path = 'docs/'
is_prerelease = True
hide_page_contents = ('search', '404', 'contents')
|
# Global configuration information used across all the
# translations of documentation.
#
# Import the base theme configuration
from cakephpsphinx.config.all import *
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = '2.x'
# The search index version.
search_version = 'chronos-2'
# The marketing display name for the book.
version_name = ''
# Project name shown in the black header bar
project = 'Chronos'
# Other versions that display in the version picker menu.
version_list = [
{'name': '1.x', 'number': '/chronos/1.x', 'title': '1.x'},
{'name': '2.x', 'number': '/chronos/2.x', 'title': '2.x', 'current': True},
]
# Languages available.
languages = ['en', 'fr', 'ja', 'pt']
# The GitHub branch name for this version of the docs
# for edit links to point at.
branch = '2.x'
# Current version being built
version = '2.x'
# Language in use for this directory.
language = 'en'
show_root_link = True
repository = 'cakephp/chronos'
source_path = 'docs/'
hide_page_contents = ('search', '404', 'contents')
|
Remove pre-release flag as 2.x is mainline now
|
Remove pre-release flag as 2.x is mainline now
|
Python
|
mit
|
cakephp/chronos
|
0b7686f14f47cc00665cfe3d6a396a5c14e6b9b3
|
src/puzzle/heuristics/analyze.py
|
src/puzzle/heuristics/analyze.py
|
import collections
from src.puzzle.problems import crossword_problem
_PROBLEM_TYPES = set()
def identify(line):
scores = {}
for t in _PROBLEM_TYPES:
score = t.score(line)
if score:
scores[t] = t.score(line)
# Return sorted values, highest first.
return collections.OrderedDict(
sorted(scores.items(), key=lambda x: x[1], reverse=True))
def identify_all(lines):
return map(identify, lines)
def init(lines=None):
register(crossword_problem.CrosswordProblem)
if lines:
identify_all(lines)
def reset():
_PROBLEM_TYPES.clear()
def problem_types():
return _PROBLEM_TYPES
def register(cls):
_PROBLEM_TYPES.add(cls)
|
from src.data import meta
from src.puzzle.problems import crossword_problem
_PROBLEM_TYPES = set()
def identify(line):
scores = meta.Meta()
for t in _PROBLEM_TYPES:
score = t.score(line)
if score:
scores[t] = t.score(line)
return scores
def identify_all(lines):
return map(identify, lines)
def init(lines=None):
register(crossword_problem.CrosswordProblem)
if lines:
identify_all(lines)
def reset():
_PROBLEM_TYPES.clear()
def problem_types():
return _PROBLEM_TYPES
def register(cls):
_PROBLEM_TYPES.add(cls)
|
Switch identify to Meta object.
|
Switch identify to Meta object.
|
Python
|
mit
|
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
|
8e608c2155a4a466f1a4bf87df05c4e4ebd90c1c
|
django/__init__.py
|
django/__init__.py
|
VERSION = (1, 0, 'post-release-SVN')
def get_version():
"Returns the version as a human-format string."
v = '.'.join([str(i) for i in VERSION[:-1]])
if VERSION[-1]:
from django.utils.version import get_svn_revision
v = '%s-%s-%s' % (v, VERSION[-1], get_svn_revision())
return v
|
VERSION = (1, 1, 0, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
version = '%s %s' % (version, VERSION[3])
if VERSION[3] != 'final':
version = '%s %s' % (version, VERSION[4])
from django.utils.version import get_svn_revision
svn_rev = get_svn_revision()
if svn_rev != u'SVN-unknown':
version = "%s %s" % (version, svn_rev)
return version
|
Update django.VERSION in trunk per previous discussion
|
Update django.VERSION in trunk per previous discussion
git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@9103 bcc190cf-cafb-0310-a4f2-bffc1f526a37
|
Python
|
bsd-3-clause
|
blindroot/django,lsqtongxin/django,jgoclawski/django,blighj/django,fenginx/django,riklaunim/django-custom-multisite,jn7163/django,alrifqi/django,ryangallen/django,charettes/django,eugena/django,AndrewGrossman/django,yakky/django,eugena/django,denys-duchier/django,EliotBerriot/django,dwightgunning/django,hassanabidpk/django,tanmaythakur/django,carljm/django,jasonwzhy/django,ifduyue/django,postrational/django,shacker/django,darkryder/django,DONIKAN/django,gannetson/django,hasadna/django,sjlehtin/django,benspaulding/django,akaariai/django,zerc/django,RaoUmer/django,hottwaj/django,apollo13/django,frishberg/django,rizumu/django,ASCrookes/django,tysonclugg/django,adrianholovaty/django,evansd/django,koordinates/django,Endika/django,zhoulingjun/django,labcodes/django,sbellem/django,theo-l/django,mitya57/django,haxoza/django,dursk/django,MarcJoan/django,sjlehtin/django,mcella/django,tayfun/django,tanmaythakur/django,caotianwei/django,bitcity/django,seanwestfall/django,eugena/django,abomyi/django,simonw/django,zulip/django,hellhovnd/django,cainmatt/django,adambrenecki/django,MarcJoan/django,rtindru/django,gunchleoc/django,django-nonrel/django,jpic/django,helenst/django,pauloxnet/django,dpetzold/django,peterlauri/django,pipermerriam/django,dfunckt/django,sarvex/django,yigitguler/django,jhg/django,twz915/django,willhardy/django,jasonwzhy/django,alimony/django,RevelSystems/django,pasqualguerrero/django,maxsocl/django,sarthakmeh03/django,drjeep/django,MikeAmy/django,jhoos/django,akaariai/django,weiawe/django,vmarkovtsev/django,xadahiya/django,jallohm/django,unaizalakain/django,akshatharaj/django,ryanahall/django,IRI-Research/django,codepantry/django,NullSoldier/django,jarshwah/django,GitAngel/django,follow99/django,unaizalakain/django,denisenkom/django,beckastar/django,himleyb85/django,divio/django,zerc/django,archen/django,harisibrahimkv/django,sergei-maertens/django,nhippenmeyer/django,follow99/django,mdj2/django,sergei-maertens/django,myang321/django,syaiful6/django,marissazhou/django,gchp/django,monetate/django,mttr/django,barbuza/django,irwinlove/django,bspink/django,jrrembert/django,DasIch/django,djbaldey/django,felixxm/django,andela-ooladayo/django,tcwicklund/django,theo-l/django,druuu/django,seocam/django,t0in4/django,sam-tsai/django,darkryder/django,sopier/django,rwillmer/django,AndrewGrossman/django,kholidfu/django,runekaagaard/django-contrib-locking,dursk/django,supriyantomaftuh/django,denys-duchier/django,kamyu104/django,henryfjordan/django,myang321/django,marissazhou/django,andrewsmedina/django,payeldillip/django,iambibhas/django,claudep/django,IRI-Research/django,mitya57/django,epandurski/django,ericfc/django,areski/django,devops2014/djangosite,epandurski/django,apollo13/django,Sonicbids/django,jvkops/django,ABaldwinHunter/django-clone-classic,github-account-because-they-want-it/django,anant-dev/django,andela-ooladayo/django,camilonova/django,seanwestfall/django,mojeto/django,beck/django,MoritzS/django,sadaf2605/django,b-me/django,rajsadho/django,simone/django-gb,vitaly4uk/django,cobalys/django,hottwaj/django,payeldillip/django,rlugojr/django,shtouff/django,dfdx2/django,extremewaysback/django,ryanahall/django,djbaldey/django,nju520/django,andela-ifageyinbo/django,blueyed/django,DasIch/django,taaviteska/django,mttr/django,mrfuxi/django,seocam/django,YangSongzhou/django,lzw120/django,Endika/django,jvkops/django,kcpawan/django,apollo13/django,github-account-because-they-want-it/django,gitaarik/django,makinacorpus/django,davgibbs/django,benjaminjkraft/django,kamyu104/django,mrfuxi/django,frankvdp/django,erikr/django,Korkki/django,simonw/django,adambrenecki/django,alx-eu/django,mcardillo55/django,django/django,django/django,programadorjc/django,solarissmoke/django,neiudemo1/django,olasitarska/django,BrotherPhil/django,webgeodatavore/django,hobarrera/django,elijah513/django,ecederstrand/django,wetneb/django,adamchainz/django,TridevGuha/django,benjaminjkraft/django,SujaySKumar/django,b-me/django,dhruvagarwal/django,mathspace/django,riteshshrv/django,beckastar/django,evansd/django,t0in4/django,savoirfairelinux/django,hassanabidpk/django,hunter007/django,MatthewWilkes/django,auready/django,bak1an/django,alilotfi/django,jylaxp/django,kisna72/django,TimYi/django,kisna72/django,taaviteska/django,krisys/django,googleinterns/django,supriyantomaftuh/django,kevintaw/django,charettes/django,techdragon/django,zulip/django,tcwicklund/django,Proggie02/TestRepo,szopu/django,AlexHill/django,andreif/django,tayfun/django,alrifqi/django,kutenai/django,sarthakmeh03/django,alexmorozov/django,redhat-openstack/django,reinout/django,EliotBerriot/django,ajaali/django,alexallah/django,andrewsmedina/django,joequery/django,jenalgit/django,neiudemo1/django,asser/django,ataylor32/django,lmorchard/django,andreif/django,oberlin/django,frePPLe/django,gohin/django,ASCrookes/django,rogerhu/django,ryanahall/django,roselleebarle04/django,vitaly4uk/django,aerophile/django,jarshwah/django,rogerhu/django,waytai/django,Sonicbids/django,postrational/django,bobcyw/django,aisipos/django,treyhunner/django,rsvip/Django,davgibbs/django,nemesisdesign/django,chrishas35/django-travis-ci,stevenewey/django,hunter007/django,rizumu/django,irwinlove/django,alexmorozov/django,mrfuxi/django,jallohm/django,EliotBerriot/django,RossBrunton/django,Nepherhotep/django,spisneha25/django,mmardini/django,jasonbot/django,PolicyStat/django,SoftwareMaven/django,caotianwei/django,kevintaw/django,b-me/django,mitar/django,piquadrat/django,mattseymour/django,scorphus/django,vitan/django,ericholscher/django,yceruto/django,rapilabs/django,beck/django,zedr/django,MounirMesselmeni/django,varunnaganathan/django,jnovinger/django,TimYi/django,asser/django,krisys/django,salamer/django,blindroot/django,shownomercy/django,leekchan/django_test,dbaxa/django,davidharrigan/django,aspidites/django,sadaf2605/django,rhertzog/django,auready/django,hnakamur/django,pipermerriam/django,rajsadho/django,delinhabit/django,AlexHill/django,beck/django,weiawe/django,Balachan27/django,h4r5h1t/django-hauthy,stewartpark/django,django-nonrel/django-nonrel,nielsvanoch/django,hackerbot/DjangoDev,wkschwartz/django,jeezybrick/django,mcardillo55/django,runekaagaard/django-contrib-locking,timgraham/django,spisneha25/django,Argon-Zhou/django,quxiaolong1504/django,gannetson/django,rmboggs/django,guettli/django,hybrideagle/django,jylaxp/django,bikong2/django,adamchainz/django,gengue/django,ajoaoff/django,hybrideagle/django,z0by/django,jgoclawski/django,bitcity/django,Matt-Deacalion/django,rsalmaso/django,lunafeng/django,yewang15215/django,stewartpark/django,scorphus/django,hynekcer/django,avneesh91/django,xwolf12/django,alx-eu/django,yakky/django,charettes/django,MarkusH/django,Anonymous-X6/django,GaussDing/django,quxiaolong1504/django,poiati/django,takeshineshiro/django,hynekcer/django,leeon/annotated-django,sergei-maertens/django,seanwestfall/django,henryfjordan/django,Nepherhotep/django,theo-l/django,avanov/django,sopier/django,raphaelmerx/django,etos/django,camilonova/django,BMJHayward/django,guettli/django,petecummings/django,1013553207/django,dgladkov/django,apocquet/django,oscaro/django,sephii/django,aleida/django,benspaulding/django,nealtodd/django,blighj/django,Korkki/django,sgzsh269/django,MounirMesselmeni/django,errx/django,atul-bhouraskar/django,Argon-Zhou/django,vitaly4uk/django,vitan/django,techdragon/django,harisibrahimkv/django,sam-tsai/django,Y3K/django,WSDC-NITWarangal/django,RevelSystems/django,robhudson/django,freakboy3742/django,kamyu104/django,Beauhurst/django,TridevGuha/django,delinhabit/django,aspidites/django,andresgz/django,techdragon/django,indevgr/django,TridevGuha/django,ulope/django,JorgeCoock/django,donkirkby/django,aerophile/django,matiasb/django,eyohansa/django,katrid/django,tragiclifestories/django,monetate/django,shacker/django,aleida/django,baylee/django,MounirMesselmeni/django,EmadMokhtar/Django,leereilly/django-1,rockneurotiko/django,JavML/django,mewtaylor/django,andela-ifageyinbo/django,rizumu/django,andela-ifageyinbo/django,sgzsh269/django,mattrobenolt/django,zanderle/django,duqiao/django,sarvex/django,tuhangdi/django,wetneb/django,whs/django,t0in4/django,lwiecek/django,mbox/django,feroda/django,wweiradio/django,elijah513/django,mshafiq9/django,payeldillip/django,mlavin/django,andyzsf/django,daniponi/django,vincepandolfo/django,ar45/django,quxiaolong1504/django,mlavin/django,digimarc/django,pelme/django,dursk/django,davidharrigan/django,taaviteska/django,dracos/django,avneesh91/django,gdub/django,double-y/django,anant-dev/django,pquentin/django,timgraham/django,archen/django,KokareIITP/django,yask123/django,kosz85/django,kutenai/django,ccn-2m/django,SebasSBM/django,wweiradio/django,z0by/django,labcodes/django,synasius/django,kaedroho/django,MatthewWilkes/django,wkschwartz/django,jmcarp/django,aidanlister/django,Sonicbids/django,hasadna/django,MarkusH/django,googleinterns/django,alrifqi/django,rrrene/django,dudepare/django,nju520/django,ar45/django,joequery/django,georgemarshall/django,Mixser/django,eyohansa/django,hcsturix74/django,mathspace/django,andreif/django,leekchan/django_test,MoritzS/django,django-nonrel/django,MounirMesselmeni/django,yograterol/django,Adnn/django,mattrobenolt/django,zerc/django,ironbox360/django,marcelocure/django,deployed/django,fenginx/django,syaiful6/django,edevil/django,yograterol/django,ataylor32/django,shownomercy/django,mttr/django,memtoko/django,treyhunner/django,andyzsf/django,gcd0318/django,dfunckt/django,Matt-Deacalion/django,erikr/django,zhoulingjun/django,imtapps/django-imt-fork,knifenomad/django,gchp/django,Proggie02/TestRepo,gchp/django,pauloxnet/django,uranusjr/django,andrewsmedina/django,synasius/django,fenginx/django,yigitguler/django,jyotsna1820/django,roselleebarle04/django,blueyed/django,tomchristie/django,rynomster/django,helenst/django,edmorley/django,ASCrookes/django,shacker/django,Argon-Zhou/django,nju520/django,jenalgit/django,ptoraskar/django,liu602348184/django,pjdelport/django,yamila-moreno/django,tanmaythakur/django,divio/django,szopu/django,BlindHunter/django,apollo13/django,marissazhou/django,monetate/django,jmcarp/django,Nepherhotep/django,olasitarska/django,jpic/django,litchfield/django,HousekeepLtd/django,ccn-2m/django,kutenai/django,AndrewGrossman/django,daniponi/django,frdb194/django,lzw120/django,zerc/django,spisneha25/django,jenalgit/django,filias/django,manhhomienbienthuy/django,mcardillo55/django,beni55/django,Adnn/django,koniiiik/django,crazy-canux/django,alexallah/django,dsanders11/django,extremewaysback/django,aroche/django,frishberg/django,xadahiya/django,phalt/django,phalt/django,salamer/django,divio/django,dfdx2/django,mathspace/django,jaywreddy/django,etos/django,HousekeepLtd/django,saydulk/django,marqueedev/django,EmadMokhtar/Django,kangfend/django,rwillmer/django,tbeadle/django,ebar0n/django,ataylor32/django,Adnn/django,dpetzold/django,GaussDing/django,bspink/django,mcrowson/django,webgeodatavore/django,nealtodd/django,Balachan27/django,saydulk/django,rapilabs/django,fafaman/django,YangSongzhou/django,frishberg/django,oscaro/django,rrrene/django,Beauhurst/django,dpetzold/django,PolicyStat/django,quamilek/django,chrisfranzen/django,ghedsouza/django,gohin/django,jscn/django,adelton/django,rsvip/Django,erikr/django,fpy171/django,varunnaganathan/django,dgladkov/django,willhardy/django,asser/django,mewtaylor/django,shaib/django,donkirkby/django,ebar0n/django,poiati/django,Yong-Lee/django,knifenomad/django,coldmind/django,akaihola/django,sdcooke/django,jhg/django,robhudson/django,elena/django,wweiradio/django,dfdx2/django,crazy-canux/django,zedr/django,mattseymour/django,PetrDlouhy/django,syphar/django,eyohansa/django,beck/django,jmcarp/django,synasius/django,Yong-Lee/django,kennethlove/django,marqueedev/django,shaistaansari/django,TimBuckley/effective_django,sjlehtin/django,xwolf12/django,jylaxp/django,hellhovnd/django,nhippenmeyer/django,himleyb85/django,SebasSBM/django,indevgr/django,fpy171/django,anant-dev/django,mitar/django,jaywreddy/django,timgraham/django,marcelocure/django,baylee/django,aerophile/django,rockneurotiko/django,hasadna/django,lmorchard/django,atul-bhouraskar/django,delhivery/django,alilotfi/django,techdragon/django,chyeh727/django,matiasb/django,bspink/django,darkryder/django,koniiiik/django,hottwaj/django,mitar/django,ebar0n/django,gdub/django,cobalys/django,andresgz/django,ericfc/django,blindroot/django,jgoclawski/django,petecummings/django,jyotsna1820/django,kholidfu/django,elijah513/django,dhruvagarwal/django,apocquet/django,KokareIITP/django,oinopion/django,zanderle/django,alx-eu/django,beckastar/django,pauloxnet/django,cobalys/django,jasonwzhy/django,Mixser/django,pauloxnet/django,mjtamlyn/django,BlindHunter/django,1013553207/django,jsoref/django,henryfjordan/django,denis-pitul/django,filias/django,twz915/django,knifenomad/django,BlindHunter/django,sdcooke/django,asser/django,ifduyue/django,saydulk/django,koniiiik/django,willharris/django,tanmaythakur/django,hobarrera/django,sergei-maertens/django,simonw/django,barbuza/django,supriyantomaftuh/django,evansd/django,programadorjc/django,hobarrera/django,DrMeers/django,aidanlister/django,hnakamur/django,hnakamur/django,shaistaansari/django,Y3K/django,gannetson/django,x111ong/django,arun6582/django,felixxm/django,areski/django,SoftwareMaven/django,kevintaw/django,jejimenez/django,mojeto/django,andreif/django,kennethlove/django,adamchainz/django,MoritzS/django,indevgr/django,blaze33/django,pquentin/django,tuhangdi/django,areski/django,yewang15215/django,ticosax/django,rynomster/django,ar45/django,camilonova/django,dracos/django,TimYi/django,sjlehtin/django,RaoUmer/django,follow99/django,dydek/django,yakky/django,hobarrera/django,iambibhas/django,filias/django,litchfield/django,andela-ooladayo/django,lmorchard/django,EliotBerriot/django,tysonclugg/django,litchfield/django,huang4fstudio/django,piquadrat/django,peterlauri/django,denis-pitul/django,maxsocl/django,beckastar/django,tayfun/django,whs/django,1013553207/django,vitan/django,pjdelport/django,BMJHayward/django,davidharrigan/django,waytai/django,yask123/django,mshafiq9/django,hynekcer/django,tuhangdi/django,yograterol/django,curtisstpierre/django,bobcyw/django,blighj/django,liuliwork/django,auvipy/django,ojake/django,sarvex/django,frdb194/django,indevgr/django,xadahiya/django,oinopion/django,craynot/django,aidanlister/django,dgladkov/django,rapilabs/django,wsmith323/django,theo-l/django,tayfun/django,fafaman/django,tbeadle/django,dgladkov/django,harisibrahimkv/django,x111ong/django,NullSoldier/django,akaihola/django,Leila20/django,Beauhurst/django,raphaelmerx/django,piquadrat/django,AltSchool/django,xrmx/django,claudep/django,liu602348184/django,JorgeCoock/django,mdj2/django,dydek/django,ticosax/django,vmarkovtsev/django,elky/django,hassanabidpk/django,bspink/django,yigitguler/django,denisenkom/django,rajsadho/django,simonw/django,epandurski/django,robhudson/django,sarthakmeh03/django,rlugojr/django,eyohansa/django,ulope/django,alimony/django,jrrembert/django,alilotfi/django,webgeodatavore/django,joakim-hove/django,seanwestfall/django,takeshineshiro/django,double-y/django,jhoos/django,vitan/django,petecummings/django,monetate/django,ghickman/django,ecederstrand/django,crazy-canux/django,guettli/django,blighj/django,ASCrookes/django,crazy-canux/django,stewartpark/django,mattrobenolt/django,ptoraskar/django,JavML/django,ABaldwinHunter/django-clone,shtouff/django,aroche/django,intgr/django,matiasb/django,maxsocl/django,freakboy3742/django,elijah513/django,WillGuan105/django,SebasSBM/django,mitchelljkotler/django,makinacorpus/django,stevenewey/django,curtisstpierre/django,moreati/django,hcsturix74/django,jrrembert/django,dracos/django,syphar/django,mshafiq9/django,pasqualguerrero/django,WillGuan105/django,gitaarik/django,myang321/django,vincepandolfo/django,marckuz/django,elkingtonmcb/django,elky/django,reinout/django,auready/django,Matt-Deacalion/django,Beeblio/django,riklaunim/django-custom-multisite,sgzsh269/django,himleyb85/django,codepantry/django,sephii/django,saydulk/django,gdi2290/django,charettes/django,phalt/django,coldmind/django,dex4er/django,felixxm/django,aisipos/django,rajsadho/django,denys-duchier/django,Vixionar/django,Y3K/django,kaedroho/django,gannetson/django,bliti/django-nonrel-1.5,sdcooke/django,ABaldwinHunter/django-clone-classic,postrational/django,iambibhas/django,hunter007/django,zsiciarz/django,loic/django,dfdx2/django,takis/django,delhivery/django,shownomercy/django,mewtaylor/django,adrianholovaty/django,haxoza/django,pelme/django,BrotherPhil/django,jgeskens/django,zsiciarz/django,lmorchard/django,mdj2/django,lsqtongxin/django,uranusjr/django,hkchenhongyi/django,jgoclawski/django,manhhomienbienthuy/django,freakboy3742/django,ericholscher/django,ptoraskar/django,simone/django-gb,evansd/django,solarissmoke/django,druuu/django,vsajip/django,jyotsna1820/django,ABaldwinHunter/django-clone,felixjimenez/django,craynot/django,solarissmoke/django,HonzaKral/django,mattrobenolt/django,xadahiya/django,dbaxa/django,AltSchool/django,denisenkom/django,yamila-moreno/django,RossBrunton/django,dracos/django,djbaldey/django,YangSongzhou/django,gcd0318/django,mrbox/django,leekchan/django_test,rhertzog/django,lsqtongxin/django,GitAngel/django,Endika/django,filias/django,reinout/django,simone/django-gb,krishna-pandey-git/django,hackerbot/DjangoDev,JavML/django,knifenomad/django,YYWen0o0/python-frame-django,waytai/django,liavkoren/djangoDev,PolicyStat/django,jejimenez/django,liavkoren/djangoDev,bak1an/django,frePPLe/django,gcd0318/django,jarshwah/django,alilotfi/django,hottwaj/django,ABaldwinHunter/django-clone-classic,mitchelljkotler/django,bobcyw/django,himleyb85/django,hybrideagle/django,Leila20/django,vmarkovtsev/django,andyzsf/django,whs/django,camilonova/django,intgr/django,mitya57/django,jscn/django,ryanahall/django,felixjimenez/django,zhaodelong/django,jasonbot/django,djbaldey/django,kswiat/django,shtouff/django,ojengwa/django-1,zedr/django,takeshineshiro/django,DONIKAN/django,gdi2290/django,jpic/django,yograterol/django,ojake/django,ryangallen/django,guettli/django,frePPLe/django,jnovinger/django,coldmind/django,deployed/django,elkingtonmcb/django,WSDC-NITWarangal/django,savoirfairelinux/django,katrid/django,rlugojr/django,baylee/django,ojengwa/django-1,wsmith323/django,edmorley/django,scorphus/django,abomyi/django,dbaxa/django,pipermerriam/django,avneesh91/django,akshatharaj/django,bak1an/django,jnovinger/django,ghickman/django,xrmx/django,ArnossArnossi/django,lunafeng/django,programadorjc/django,jgeskens/django,barbuza/django,yamila-moreno/django,duqiao/django,schinckel/django,sam-tsai/django,erikr/django,willhardy/django,jscn/django,blaze33/django,dydek/django,katrid/django,devops2014/djangosite,carljm/django,ziima/django,loic/django,Vixionar/django,rlugojr/django,darjeeling/django,PetrDlouhy/django,hcsturix74/django,rrrene/django,auvipy/django,lisael/pg-django,kangfend/django,feroda/django,risicle/django,ArnossArnossi/django,Vixionar/django,zhaodelong/django,seocam/django,MikeAmy/django,mcella/django,rsalmaso/django,jhg/django,liuliwork/django,liuliwork/django,bikong2/django,hnakamur/django,rwillmer/django,kutenai/django,TimYi/django,HonzaKral/django,1013553207/django,davidharrigan/django,ericfc/django,yamila-moreno/django,curtisstpierre/django,kisna72/django,ryangallen/django,WSDC-NITWarangal/django,payeldillip/django,unaizalakain/django,PetrDlouhy/django,megaumi/django,ironbox360/django,x111ong/django,ytjiang/django,Proggie02/TestRepo,MatthewWilkes/django,karyon/django,delhivery/django,fpy171/django,jsoref/django,Proggie02/TestRepo,aspidites/django,darjeeling/django,tragiclifestories/django,leereilly/django-1,MoritzS/django,Balachan27/django,digimarc/django,mitya57/django,duqiao/django,sarthakmeh03/django,dfunckt/django,kamyu104/django,hcsturix74/django,ArnossArnossi/django,davgibbs/django,ccn-2m/django,barbuza/django,myang321/django,mojeto/django,mcella/django,duqiao/django,rtindru/django,liu602348184/django,lzw120/django,alrifqi/django,andela-ifageyinbo/django,megaumi/django,gdi2290/django,z0by/django,oscaro/django,frdb194/django,tbeadle/django,oberlin/django,sdcooke/django,beni55/django,alexmorozov/django,moreati/django,ivandevp/django,dsanders11/django,jallohm/django,treyhunner/django,nhippenmeyer/django,twz915/django,kevintaw/django,archen/django,HonzaKral/django,gohin/django,irwinlove/django,rsvip/Django,tomchristie/django,mjtamlyn/django,hkchenhongyi/django,liuliwork/django,carljm/django,rtindru/django,chrishas35/django-travis-ci,lisael/pg-django,darjeeling/django,joequery/django,denys-duchier/django,x111ong/django,wweiradio/django,epandurski/django,PetrDlouhy/django,spisneha25/django,fenginx/django,ecederstrand/django,h4r5h1t/django-hauthy,jasonbot/django,jeezybrick/django,mammique/django,unaizalakain/django,xwolf12/django,dsanders11/django,delinhabit/django,nemesisdesign/django,GhostThrone/django,rapilabs/django,BlindHunter/django,ghickman/django,GaussDing/django,yask123/django,Mixser/django,akintoey/django,marckuz/django,karyon/django,googleinterns/django,yakky/django,RossBrunton/django,ajoaoff/django,sopier/django,elena/django,sbellem/django,akaariai/django,YangSongzhou/django,felixjimenez/django,ytjiang/django,ptoraskar/django,koordinates/django,chrishas35/django-travis-ci,Korkki/django,GhostThrone/django,benjaminjkraft/django,django-nonrel/django,BrotherPhil/django,syphar/django,chyeh727/django,dbaxa/django,chrisfranzen/django,Beeblio/django,loic/django,scorphus/django,loic/django,andresgz/django,alexallah/django,auready/django,cainmatt/django,github-account-because-they-want-it/django,denis-pitul/django,lsqtongxin/django,riteshshrv/django,hynekcer/django,kosz85/django,ericholscher/django,nemesisdesign/django,oscaro/django,risicle/django,ABaldwinHunter/django-clone-classic,haxoza/django,etos/django,AlexHill/django,zulip/django,vsajip/django,weiawe/django,lunafeng/django,codepantry/django,ulope/django,doismellburning/django,abomyi/django,mattseymour/django,shaib/django,codepantry/django,hackerbot/DjangoDev,adrianholovaty/django,rtindru/django,gengue/django,tomchristie/django,deployed/django,tragiclifestories/django,Nepherhotep/django,wkschwartz/django,daniponi/django,extremewaysback/django,adambrenecki/django,aisipos/django,solarissmoke/django,varunnaganathan/django,MarkusH/django,edmorley/django,zulip/django,oberlin/django,andresgz/django,tysonclugg/django,ryangallen/django,h4r5h1t/django-hauthy,phalt/django,helenst/django,nju520/django,ivandevp/django,dydek/django,darjeeling/django,krishna-pandey-git/django,rynomster/django,jaywreddy/django,jylaxp/django,mcrowson/django,willharris/django,GitAngel/django,aroche/django,t0in4/django,frankvdp/django,savoirfairelinux/django,gdub/django,leereilly/django-1,rsalmaso/django,Vixionar/django,ticosax/django,GitAngel/django,ckirby/django,willhardy/django,sbellem/django,jyotsna1820/django,felixxm/django,blaze33/django,redhat-openstack/django,avanov/django,frdb194/django,wetneb/django,zsiciarz/django,jdelight/django,gunchleoc/django,yewang15215/django,MikeAmy/django,yewang15215/django,poiati/django,supriyantomaftuh/django,kholidfu/django,ckirby/django,vsajip/django,Anonymous-X6/django,DONIKAN/django,elena/django,memtoko/django,devops2014/djangosite,Matt-Deacalion/django,bikong2/django,fafaman/django,feroda/django,donkirkby/django,moreati/django,tbeadle/django,kcpawan/django,chrisfranzen/django,jsoref/django,rmboggs/django,nemesisdesign/django,zanderle/django,mrbox/django,koordinates/django,vincepandolfo/django,baylee/django,EmadMokhtar/Django,imtapps/django-imt-fork,JorgeCoock/django,marctc/django,Beeblio/django,kangfend/django,roselleebarle04/django,dsanders11/django,DasIch/django,jn7163/django,georgemarshall/django,poiati/django,joakim-hove/django,dwightgunning/django,benjaminjkraft/django,leeon/annotated-django,jasonwzhy/django,drjeep/django,WillGuan105/django,Argon-Zhou/django,zsiciarz/django,mbox/django,vincepandolfo/django,varunnaganathan/django,bitcity/django,salamer/django,aroche/django,redhat-openstack/django,SujaySKumar/django,frePPLe/django,ytjiang/django,ojake/django,schinckel/django,koordinates/django,pipermerriam/django,wsmith323/django,NullSoldier/django,dpetzold/django,rsalmaso/django,frankvdp/django,rmboggs/django,intgr/django,ziima/django,areski/django,georgemarshall/django,errx/django,benspaulding/django,bobcyw/django,chrisfranzen/django,shacker/django,YYWen0o0/python-frame-django,hassanabidpk/django,vitaly4uk/django,DONIKAN/django,gunchleoc/django,anant-dev/django,sadaf2605/django,katrid/django,lisael/pg-django,oinopion/django,gunchleoc/django,syphar/django,waytai/django,drjeep/django,harisibrahimkv/django,dwightgunning/django,piquadrat/django,ckirby/django,Adnn/django,memtoko/django,django-nonrel/django,rockneurotiko/django,GhostThrone/django,ataylor32/django,YYWen0o0/python-frame-django,ecederstrand/django,marissazhou/django,mattseymour/django,django/django,uranusjr/django,WillGuan105/django,edevil/django,Korkki/django,RaoUmer/django,karyon/django,wetneb/django,nhippenmeyer/django,jvkops/django,mammique/django,maxsocl/django,googleinterns/django,quamilek/django,wsmith323/django,dhruvagarwal/django,jgeskens/django,avanov/django,marckuz/django,felixjimenez/django,jasonbot/django,zhaodelong/django,marqueedev/django,Yong-Lee/django,adelton/django,RevelSystems/django,ojengwa/django-1,runekaagaard/django-contrib-locking,hunter007/django,beni55/django,carljm/django,jscn/django,Beeblio/django,blindroot/django,delhivery/django,bliti/django-nonrel-1.5,rmboggs/django,ckirby/django,aspidites/django,pasqualguerrero/django,HousekeepLtd/django,tuhangdi/django,labcodes/django,chyeh727/django,marcelocure/django,claudep/django,syaiful6/django,zhoulingjun/django,TimBuckley/effective_django,dudepare/django,risicle/django,divio/django,ghickman/django,dex4er/django,mmardini/django,claudep/django,dfunckt/django,atul-bhouraskar/django,shaistaansari/django,mlavin/django,sgzsh269/django,DrMeers/django,riklaunim/django-custom-multisite,jn7163/django,stevenewey/django,jrrembert/django,krishna-pandey-git/django,MarcJoan/django,ironbox360/django,megaumi/django,tcwicklund/django,DrMeers/django,AltSchool/django,leeon/annotated-django,hellhovnd/django,bikong2/django,elkingtonmcb/django,gengue/django,cainmatt/django,programadorjc/django,akshatharaj/django,redhat-openstack/django,vmarkovtsev/django,sam-tsai/django,intgr/django,joequery/django,apocquet/django,jdelight/django,JavML/django,ajaali/django,xrmx/django,darkryder/django,ghedsouza/django,syaiful6/django,MatthewWilkes/django,BrotherPhil/django,hkchenhongyi/django,peterlauri/django,blueyed/django,django/django,jhoos/django,marcelocure/django,elky/django,ticosax/django,MarkusH/django,ziima/django,arun6582/django,davgibbs/django,robhudson/django,ghedsouza/django,SujaySKumar/django,double-y/django,ziima/django,liu602348184/django,timgraham/django,webgeodatavore/django,rhertzog/django,extremewaysback/django,lwiecek/django,jn7163/django,arun6582/django,hellhovnd/django,willharris/django,Y3K/django,jarshwah/django,marqueedev/django,tcwicklund/django,xrmx/django,caotianwei/django,b-me/django,wkschwartz/django,risicle/django,rynomster/django,auvipy/django,mrfuxi/django,mojeto/django,edmorley/django,ajoaoff/django,ebar0n/django,nielsvanoch/django,craynot/django,jaywreddy/django,huang4fstudio/django,kswiat/django,ArnossArnossi/django,Leila20/django,gcd0318/django,druuu/django,koniiiik/django,gitaarik/django,olasitarska/django,blueyed/django,mttr/django,rockneurotiko/django,django-nonrel/django-nonrel,schinckel/django,ghedsouza/django,marckuz/django,github-account-because-they-want-it/django,jeezybrick/django,aerophile/django,mcella/django,ABaldwinHunter/django-clone,rizumu/django,ojake/django,alx-eu/django,quamilek/django,atul-bhouraskar/django,gengue/django,matiasb/django,ivandevp/django,dudepare/django,neiudemo1/django,Endika/django,mcrowson/django,NullSoldier/django,Anonymous-X6/django,avanov/django,oberlin/django,ar45/django,roselleebarle04/django,Yong-Lee/django,z0by/django,treyhunner/django,jsoref/django,elkingtonmcb/django,kosz85/django,dudepare/django,seocam/django,delinhabit/django,takis/django,ajaali/django,sarvex/django,Balachan27/django,schinckel/django,mmardini/django,joakim-hove/django,dex4er/django,quamilek/django,sbellem/django,alexallah/django,pelme/django,hkchenhongyi/django,double-y/django,kisna72/django,krisys/django,akaariai/django,kswiat/django,errx/django,HonzaKral/django,AndrewGrossman/django,edevil/django,riteshshrv/django,imtapps/django-imt-fork,alimony/django,tragiclifestories/django,adelton/django,reinout/django,DasIch/django,quxiaolong1504/django,h4r5h1t/django-hauthy,hackerbot/DjangoDev,moreati/django,joakim-hove/django,sopier/django,ABaldwinHunter/django-clone,yask123/django,arun6582/django,ajoaoff/django,kosz85/django,jdelight/django,aidanlister/django,SoftwareMaven/django,andela-ooladayo/django,cainmatt/django,django-nonrel/django-nonrel,feroda/django,nealtodd/django,takeshineshiro/django,AltSchool/django,shaistaansari/django,BMJHayward/django,SoftwareMaven/django,SujaySKumar/django,zhoulingjun/django,jdelight/django,shaib/django,mammique/django,aisipos/django,litchfield/django,fafaman/django,mjtamlyn/django,akintoey/django,mitchelljkotler/django,JorgeCoock/django,ccn-2m/django,mcardillo55/django,georgemarshall/django,elky/django,makinacorpus/django,stevenewey/django,mewtaylor/django,takis/django,yceruto/django,kaedroho/django,ivandevp/django,RossBrunton/django,bitcity/django,bliti/django-nonrel-1.5,elena/django,rogerhu/django,sadaf2605/django,mrbox/django,tysonclugg/django,zanderle/django,ironbox360/django,gchp/django,taaviteska/django,doismellburning/django,KokareIITP/django,kennethlove/django,Mixser/django,kholidfu/django,ytjiang/django,aleida/django,adelton/django,ifduyue/django,KokareIITP/django,apocquet/django,karyon/django,henryfjordan/django,marctc/django,pquentin/django,lunafeng/django,mcrowson/django,haxoza/django,ericfc/django,szopu/django,mlavin/django,jvkops/django,fpy171/django,BMJHayward/django,krishna-pandey-git/django,mmardini/django,mitchelljkotler/django,kcpawan/django,gohin/django,MarcJoan/django,HousekeepLtd/django,irwinlove/django,druuu/django,mathspace/django,follow99/django,alexmorozov/django,adamchainz/django,sephii/django,lwiecek/django,daniponi/django,takis/django,nielsvanoch/django,uranusjr/django,rhertzog/django,jmcarp/django,frishberg/django,TimBuckley/effective_django,SebasSBM/django,GaussDing/django,drjeep/django,liavkoren/djangoDev,dhruvagarwal/django,hybrideagle/django,jhoos/django,peterlauri/django,rsvip/Django,ojengwa/django-1,jnovinger/django,gitaarik/django,curtisstpierre/django,marctc/django,jeezybrick/django,twz915/django,neiudemo1/django,WSDC-NITWarangal/django,raphaelmerx/django,shaib/django,kcpawan/django,ifduyue/django,alimony/django,kangfend/django,jenalgit/django,shtouff/django,abomyi/django,willharris/django,ajaali/django,TridevGuha/django,rrrene/django,whs/django,riteshshrv/django,mbox/django,akintoey/django,caotianwei/django,rwillmer/django,mjtamlyn/django,Beauhurst/django,chyeh727/django,dwightgunning/django,gdub/django,mrbox/django,jpic/django,yceruto/django,zhaodelong/django,MikeAmy/django,bak1an/django,savoirfairelinux/django,stewartpark/django,raphaelmerx/django,marctc/django,eugena/django,Anonymous-X6/django,IRI-Research/django,auvipy/django,digimarc/django,frankvdp/django,pjdelport/django,akshatharaj/django,synasius/django,avneesh91/django,RevelSystems/django,etos/django,krisys/django,manhhomienbienthuy/django,pasqualguerrero/django,jhg/django,jejimenez/django,Leila20/django,weiawe/django,denis-pitul/django,lwiecek/django,donkirkby/django,jejimenez/django,beni55/django,manhhomienbienthuy/django,xwolf12/django,doismellburning/django,digimarc/django,akintoey/django,jallohm/django,nealtodd/django,craynot/django,tomchristie/django,labcodes/django,shownomercy/django,oinopion/django,megaumi/django,huang4fstudio/django,RaoUmer/django,akaihola/django,huang4fstudio/django,coldmind/django,mshafiq9/django,petecummings/django,dursk/django,salamer/django,GhostThrone/django
|
5daf394146660b28d5d51795e5220729a9836347
|
babyonboard/api/tests/test_models.py
|
babyonboard/api/tests/test_models.py
|
from django.test import TestCase
from ..models import Temperature, HeartBeats
class TemperatureTest(TestCase):
"""Test class for temperature model"""
def setUp(self):
Temperature.objects.create(temperature=20.5)
def test_create_temperature(self):
temperature = Temperature.objects.get(temperature=20.5)
self.assertIsNotNone(temperature)
self.assertTrue(isinstance(temperature, Temperature))
self.assertEqual(temperature.__str__(), str(temperature.temperature))
class HeartBeatsTest(TestCase):
"""Test class for heartbeats model"""
def setUp(self):
HeartBeats.objects.create(beats=70)
def test_create_heartbeats(self):
heartBeats = HeartBeats.objects.get(beats=70)
self.assertIsNotNone(heartBeats)
self.assertTrue(isinstance(heartBeats, HeartBeats))
self.assertEqual(heartBeats.__str__(), str(heartBeats.beats))
|
from django.test import TestCase
from ..models import Temperature, HeartBeats, Breathing
class TemperatureTest(TestCase):
"""Test class for temperature model"""
def setUp(self):
Temperature.objects.create(temperature=20.5)
def test_create_temperature(self):
temperature = Temperature.objects.get(temperature=20.5)
self.assertIsNotNone(temperature)
self.assertTrue(isinstance(temperature, Temperature))
self.assertEqual(temperature.__str__(), str(temperature.temperature))
class HeartBeatsTest(TestCase):
"""Test class for heartbeats model"""
def setUp(self):
HeartBeats.objects.create(beats=70)
def test_create_heartbeats(self):
heartBeats = HeartBeats.objects.get(beats=70)
self.assertIsNotNone(heartBeats)
self.assertTrue(isinstance(heartBeats, HeartBeats))
self.assertEqual(heartBeats.__str__(), str(heartBeats.beats))
class BreathingTest(TestCase):
"""Test class for breathing model"""
def setUp(self):
Breathing.objects.create(is_breathing=True)
def test_create_breathing(self):
breathing = Breathing.objects.get(is_breathing=True)
self.assertIsNotNone(breathing)
self.assertTrue(isinstance(breathing, Breathing))
self.assertEqual(breathing.__str__(), str(breathing.is_breathing))
|
Implement tests for breathing model
|
Implement tests for breathing model
|
Python
|
mit
|
BabyOnBoard/BabyOnBoard-API,BabyOnBoard/BabyOnBoard-API
|
9b8f1dc59b528c57122c6bebef52cfe0545fa3a5
|
virtual_machine.py
|
virtual_machine.py
|
class VirtualMachine:
def __init__(self, ram_size=256, stack_size=32):
self.data = [None]*ram_size
self.stack = [None]*stack_size
self.stack_size = stack_size
self.stack_top = 0
def push(self, value):
"""Push something onto the stack."""
if self.stack_top+1 > self.stack_size:
raise IndexError
self.stack[self.stack_top] = value
self.stack_top += 1
def pop(self):
"""Pop something from the stack. Crash if empty."""
if self.stack_top == 0:
raise IndexError
self.stack_top -= 1
return self.stack.pop(self.stack_top)
def read_memory(self, index):
"""Read from memory, crashing if index is out of bounds."""
return self.data[index]
def write_memory(self, index, value):
"""Write to memory. Crash if index is out of bounds."""
self.data[index] = value
|
class VirtualMachine:
def __init__(self, bytecodes, ram_size=256, stack_size=32, executing=True):
self.bytecodes = bytecodes
self.data = [None]*ram_size
self.stack = [None]*stack_size
self.stack_size = stack_size
self.stack_top = 0
self.executing = executing
self.pc = 0
def push(self, value):
"""Push something onto the stack."""
if self.stack_top+1 > self.stack_size:
raise IndexError
self.stack[self.stack_top] = value
self.stack_top += 1
def pop(self):
"""Pop something from the stack. Crash if empty."""
if self.stack_top == 0:
raise IndexError
self.stack_top -= 1
return self.stack.pop(self.stack_top)
def read_memory(self, index):
"""Read from memory, crashing if index is out of bounds."""
return self.data[index]
def write_memory(self, index, value):
"""Write to memory. Crash if index is out of bounds."""
self.data[index] = value
def run(self):
while self.executing:
self.bytecodes[self.pc].execute(self)
if self.bytecodes[self.pc].autoincrement:
self.pc += 1
|
Add run() to the vm, which iterates through all the bytecodes and executes them
|
Add run() to the vm, which iterates through all the bytecodes and executes them
|
Python
|
bsd-3-clause
|
darbaga/simple_compiler
|
44c74743d25b1fa15d1cb1337f2c4e2d306ac6da
|
virtual_machine.py
|
virtual_machine.py
|
class VirtualMachine:
def __init__(self, ram_size=256, executing=True):
self.data = [None]*ram_size
self.stack = []
self.executing = executing
self.pc = 0
def push(self, value):
"""Push something onto the stack."""
self.stack += [value]
def pop(self):
"""Pop something from the stack. Crash if empty."""
return self.stack.pop()
def read_memory(self, index):
"""Read from memory, crashing if index is out of bounds."""
return self.data[index]
def write_memory(self, index, value):
"""Write to memory. Crash if index is out of bounds."""
self.data[index] = value
def run(self, bytecodes):
self.bytecodes = bytecodes
while self.executing:
increment = self.bytecodes[self.pc].autoincrement
self.bytecodes[self.pc].execute(self)
if increment:
self.pc += 1
|
class VirtualMachine:
def __init__(self, ram_size=512, executing=True):
self.data = {i: None for i in range(ram_size)}
self.stack = []
self.executing = executing
self.pc = 0
self.devices_start = 256
def push(self, value):
"""Push something onto the stack."""
self.stack += [value]
def pop(self):
"""Pop something from the stack. Crash if empty."""
return self.stack.pop()
def read_memory(self, index):
"""Read from memory, crashing if index is out of bounds."""
if isinstance(self.data[index], DeviceProxy):
return self.data[index].read(index)
else:
return self.data[index]
def write_memory(self, index, value):
"""Write to memory. Crash if index is out of bounds."""
if isinstance(self.data[index], DeviceProxy):
self.data[index].write(index, value)
else:
self.data[index] = value
def register_device(self, device, needed_addresses):
"""Given an instantiated device and the number of required addresses, registers it in memory"""
# If not enough addresses, just error out
if self.devices_start+needed_addresses > len(self.data):
raise Exception('Not enough addresses to allocate')
proxyed_device = DeviceProxy(device, self.devices_start)
for i in range(self.devices_start, self.devices_start+needed_addresses):
self.data[i] = proxyed_device
self.devices_start += needed_addresses
def run(self, bytecodes):
self.bytecodes = bytecodes
while self.executing:
increment = self.bytecodes[self.pc].autoincrement
self.bytecodes[self.pc].execute(self)
if increment:
self.pc += 1
|
Add facilities to register virtual devices with the vm
|
Add facilities to register virtual devices with the vm
|
Python
|
bsd-3-clause
|
darbaga/simple_compiler
|
69742860652f84fccf0bac80f0e72bb03ddf64b1
|
eve_proxy/tasks.py
|
eve_proxy/tasks.py
|
from django.conf import settings
import logging
from datetime import datetime, timedelta
from celery.decorators import task
from eve_proxy.models import CachedDocument
@task(ignore_result=True)
def clear_stale_cache(cache_extension=0):
log = clear_stale_cache.get_logger()
time = datetime.utcnow() - timedelta(seconds=cache_extension)
objs = CachedDocument.objects.filter(cached_until__lt=time)
log.info('Removing %s stale cache documents' % objs.count())
objs.delete()
@task(ignore_result=True)
def clear_old_logs():
log = clear_old_logs.get_logger()
time = datetime.utcnow() - timedelta(days=settings.EVE_PROXY_KEEP_LOGS)
objs = ApiAccessLog.objects.filter(time_access__lt=time)
log.info('Removing %s old access logs' % objs.count())
objs.delete()
|
from django.conf import settings
import logging
from datetime import datetime, timedelta
from celery.decorators import task
from eve_proxy.models import CachedDocument, ApiAccessLog
@task(ignore_result=True)
def clear_stale_cache(cache_extension=0):
log = clear_stale_cache.get_logger()
time = datetime.utcnow() - timedelta(seconds=cache_extension)
objs = CachedDocument.objects.filter(cached_until__lt=time)
log.info('Removing %s stale cache documents' % objs.count())
objs.delete()
@task(ignore_result=True)
def clear_old_logs():
log = clear_old_logs.get_logger()
time = datetime.utcnow() - timedelta(days=settings.EVE_PROXY_KEEP_LOGS)
objs = ApiAccessLog.objects.filter(time_access__lt=time)
log.info('Removing %s old access logs' % objs.count())
objs.delete()
|
Fix the log autoclean job
|
Fix the log autoclean job
|
Python
|
bsd-3-clause
|
nikdoof/test-auth
|
13b4d336f5556be0210b703aaee05e3b5224fb05
|
tests/GIR/test_001_connection.py
|
tests/GIR/test_001_connection.py
|
# coding=utf-8
import sys
import struct
import unittest
from test_000_config import TestConfig
from gi.repository import Midgard, GObject
class TestConnection(Midgard.Connection):
def __init__(self):
Midgard.init()
Midgard.Connection.__init__(self)
@staticmethod
def openConnection():
config = TestConfig()
mgd = Midgard.Connection()
mgd.open_config(config)
return mgd
class TestMethods(unittest.TestCase):
def testOpenConfig(self):
config = TestConfig()
mgd = TestConnection()
self.assertEqual(mgd.get_error_string(), "MGD_ERR_OK")
self.assertTrue(mgd.open_config(config))
self.assertEqual(mgd.get_error_string(), "MGD_ERR_OK")
def testInheritance(self):
mgd = TestConnection()
self.assertIsInstance(mgd, GObject.GObject)
if __name__ == "__main__":
unittest.main()
|
# coding=utf-8
import sys
import struct
import unittest
from test_000_config import TestConfig
from gi.repository import Midgard, GObject
class TestConnection(Midgard.Connection):
def __init__(self):
Midgard.init()
Midgard.Connection.__init__(self)
@staticmethod
def openConnection():
config = TestConfig()
mgd = Midgard.Connection()
if mgd.open_config(config) is True:
return mgd
print mgd.get_error_string()
return None
class TestMethods(unittest.TestCase):
def testOpenConfig(self):
config = TestConfig()
mgd = TestConnection()
self.assertEqual(mgd.get_error_string(), "MGD_ERR_OK")
self.assertTrue(mgd.open_config(config))
self.assertEqual(mgd.get_error_string(), "MGD_ERR_OK")
def testInheritance(self):
mgd = TestConnection()
self.assertIsInstance(mgd, GObject.GObject)
if __name__ == "__main__":
unittest.main()
|
Return Midgard.Connection if one is opened. None otherwise
|
Return Midgard.Connection if one is opened. None otherwise
|
Python
|
lgpl-2.1
|
midgardproject/midgard-core,piotras/midgard-core,midgardproject/midgard-core,piotras/midgard-core,midgardproject/midgard-core,piotras/midgard-core,piotras/midgard-core,midgardproject/midgard-core
|
bdcfb1ff4c076485a5fc3b00beaf81becec0717b
|
tests/utils/DependencyChecker.py
|
tests/utils/DependencyChecker.py
|
# -*- coding: utf-8 -*-
import subprocess as subp
class DependencyChecker(object):
def _check_test_dependencies(self):
for dep in self.DEPENDENCIES:
cmd = 'if hash {} 2/dev/null; then ' \
'echo 1; else echo 0; fi'.format(dep)
available = subp.check_output(cmd, shell=True, stderr=subp.STDOUT)
available = available.strip() is '1'
if not available:
error = "The required test dependency '{0}'" \
" is not available.".format(dep)
self.fail(error)
|
# -*- coding: utf-8 -*-
import sys
import subprocess as subp
class DependencyChecker(object):
def _check_test_dependencies(self):
for dep in self.DEPENDENCIES:
cmd = 'if hash {} 2/dev/null; then ' \
'echo 1; else echo 0; fi'.format(dep)
available = subp.check_output(cmd, shell=True, stderr=subp.STDOUT)
if sys.version_info >= (3, 3) and isinstance(available, bytes):
available = available.decode('utf-8')
available = available.strip() is '1'
if not available:
error = "The required test dependency '{0}'" \
" is not available.".format(dep)
self.fail(error)
|
Fix binary to str conversion
|
release/0.6.2: Fix binary to str conversion
|
Python
|
bsd-3-clause
|
nok/sklearn-porter
|
1f59870fd321be570ce6cfead96307fcc3366e09
|
d1lod/tests/test_sesame_interface.py
|
d1lod/tests/test_sesame_interface.py
|
import pytest
from d1lod.sesame import Store, Repository, Interface
from d1lod import dataone
def test_interface_can_be_created(interface):
assert isinstance(interface, Interface)
|
import pytest
from d1lod.sesame import Store, Repository, Interface
from d1lod import dataone
def test_interface_can_be_created(interface):
assert isinstance(interface, Interface)
def test_can_add_a_dataset():
"""Test whether the right triples are added when we add a known dataset.
We pass the store to this test because we'll need to specify namespaces.
"""
namespaces = {
'owl': 'http://www.w3.org/2002/07/owl#',
'rdfs': 'http://www.w3.org/2000/01/rdf-schema#',
'rdf': 'http://www.w3.org/1999/02/22-rdf-syntax-ns#',
'xsd': 'http://www.w3.org/2001/XMLSchema#',
'foaf': 'http://xmlns.com/foaf/0.1/',
'dcterms': 'http://purl.org/dc/terms/',
'datacite': 'http://purl.org/spar/datacite/',
'glbase': 'http://schema.geolink.org/',
'd1dataset': 'http://lod.dataone.org/dataset/',
'd1person': 'http://lod.dataone.org/person/',
'd1org': 'http://lod.dataone.org/organization/',
'd1node': 'https://cn.dataone.org/cn/v1/node/',
'd1landing': 'https://search.dataone.org/#view/'
}
store = Store('localhost', 8080)
repo = Repository(store, 'test', ns = namespaces)
interface = Interface(repo)
repo.clear()
identifier = 'doi:10.6073/AA/knb-lter-cdr.70061.123'
doc = dataone.getSolrIndexFields(identifier)
interface.addDataset(doc)
assert interface.repository.size() == 20
|
Add repository test for adding a dataset
|
Add repository test for adding a dataset
|
Python
|
apache-2.0
|
ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod
|
5b9a76dc525f480a08ccbbedcbb3866faa5a50f3
|
django/santropolFeast/member/tests.py
|
django/santropolFeast/member/tests.py
|
from django.test import TestCase
from member.models import Member
from datetime import date
class MemberTestCase(TestCase):
def setUp(self):
Member.objects.create(firstname='Katrina', birthdate=date(1980, 4, 19))
def test_age_on_date(self):
"""The age on given date is properly computed"""
katrina = Member.objects.get(firstname='Katrina')
self.assertEqual(katrina.age_on_date(date(2016, 4, 19)), 36)
self.assertEqual(katrina.age_on_date(date(1950, 4, 19)), 0)
self.assertEqual(katrina.age_on_date(katrina.birthdate), 0)
|
from django.test import TestCase
from member.models import Member, Client
from datetime import date
class MemberTestCase(TestCase):
def setUp(self):
Member.objects.create(firstname='Katrina', lastname='Heide', birthdate=date(1980, 4, 19))
def test_age_on_date(self):
"""The age on given date is properly computed"""
katrina = Member.objects.get(firstname='Katrina')
self.assertEqual(katrina.age_on_date(date(2016, 4, 19)), 36)
self.assertEqual(katrina.age_on_date(date(1950, 4, 19)), 0)
self.assertEqual(katrina.age_on_date(katrina.birthdate), 0)
def test_str_is_fullname(self):
"""The member model must be rendered using the firstname and the lastname"""
member = Member.objects.get(firstname='Katrina')
self.assertEqual(str(member), 'Katrina Heide')
|
Add unit test on __str__ function
|
Add unit test on __str__ function
|
Python
|
agpl-3.0
|
madmath/sous-chef,madmath/sous-chef,savoirfairelinux/sous-chef,savoirfairelinux/santropol-feast,savoirfairelinux/sous-chef,savoirfairelinux/santropol-feast,madmath/sous-chef,savoirfairelinux/santropol-feast,savoirfairelinux/sous-chef
|
2e0030966f1f0baed8b9fda5e18cd01d8d0495d5
|
src/eduid_common/api/schemas/base.py
|
src/eduid_common/api/schemas/base.py
|
# -*- coding: utf-8 -*-
from marshmallow import Schema, fields
__author__ = 'lundberg'
class FluxStandardAction(Schema):
class Meta:
strict = True
type = fields.String(required=True)
payload = fields.Raw(required=False)
error = fields.Boolean(required=False)
meta = fields.Raw(required=False)
|
# -*- coding: utf-8 -*-
from marshmallow import Schema, fields, validates_schema, ValidationError
__author__ = 'lundberg'
class EduidSchema(Schema):
class Meta:
strict = True
@validates_schema(pass_original=True)
def check_unknown_fields(self, data, original_data):
for key in original_data:
if key not in self.fields:
raise ValidationError('Unknown field name: {!s}'.format(key))
class FluxStandardAction(EduidSchema):
type = fields.String(required=True)
payload = fields.Raw(required=False)
error = fields.Boolean(required=False)
meta = fields.Raw(required=False)
|
Add a schema with some functionality to inherit from
|
Add a schema with some functionality to inherit from
|
Python
|
bsd-3-clause
|
SUNET/eduid-common
|
857cbff1e8ec6e4db4ac25ad10a41311f3afcd66
|
pombola/core/migrations/0049_del_userprofile.py
|
pombola/core/migrations/0049_del_userprofile.py
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from django.db.utils import DatabaseError
from django.contrib.contenttypes.models import ContentType
class Migration(SchemaMigration):
def forwards(self, orm):
# Do the deletes in a separate transaction, as database errors when
# deleting a table that does not exist would cause a transaction to be
# rolled back
db.start_transaction()
ContentType.objects.filter(app_label='user_profile').delete()
# Commit the deletes to the various tables.
db.commit_transaction()
try:
db.delete_table('user_profile_userprofile')
except DatabaseError:
# table does not exist to delete, probably because the database was
# not created at a time when the user_profile app was still in use.
pass
def backwards(self, orm):
# There is no backwards - to create the user_profile tables again add the app
# back in and letting its migrations do the work.
pass
models = {}
complete_apps = ['user_profile']
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from south.models import MigrationHistory
from django.db import models
from django.db.utils import DatabaseError
from django.contrib.contenttypes.models import ContentType
class Migration(SchemaMigration):
def forwards(self, orm):
# Do the deletes in a separate transaction, as database errors when
# deleting a table that does not exist would cause a transaction to be
# rolled back
db.start_transaction()
ContentType.objects.filter(app_label='user_profile').delete()
# Remove the entries from South's tables as we don't want to leave
# incorrect entries in there.
MigrationHistory.objects.filter(app_name='user_profile').delete()
# Commit the deletes to the various tables.
db.commit_transaction()
try:
db.delete_table('user_profile_userprofile')
except DatabaseError:
# table does not exist to delete, probably because the database was
# not created at a time when the user_profile app was still in use.
pass
def backwards(self, orm):
# There is no backwards - to create the user_profile tables again add the app
# back in and letting its migrations do the work.
pass
models = {}
complete_apps = ['user_profile']
|
Delete entries from the South migration history too
|
Delete entries from the South migration history too
|
Python
|
agpl-3.0
|
mysociety/pombola,mysociety/pombola,patricmutwiri/pombola,hzj123/56th,patricmutwiri/pombola,geoffkilpin/pombola,ken-muturi/pombola,mysociety/pombola,patricmutwiri/pombola,hzj123/56th,geoffkilpin/pombola,hzj123/56th,mysociety/pombola,ken-muturi/pombola,hzj123/56th,mysociety/pombola,patricmutwiri/pombola,ken-muturi/pombola,hzj123/56th,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,geoffkilpin/pombola,geoffkilpin/pombola,patricmutwiri/pombola,mysociety/pombola,hzj123/56th,geoffkilpin/pombola,ken-muturi/pombola,ken-muturi/pombola
|
df48b6854b8c57cfc48747a266c10a0fc4e78d70
|
api/v2/serializers/details/image_version.py
|
api/v2/serializers/details/image_version.py
|
from core.models import ApplicationVersion as ImageVersion
from rest_framework import serializers
from api.v2.serializers.summaries import (
LicenseSummarySerializer,
UserSummarySerializer,
IdentitySummarySerializer,
ImageVersionSummarySerializer)
from api.v2.serializers.fields import ProviderMachineRelatedField
class ImageVersionSerializer(serializers.HyperlinkedModelSerializer):
"""
Serializer for ApplicationVersion (aka 'image_version')
"""
# NOTE: Implicitly included via 'fields'
# id, application
parent = ImageVersionSummarySerializer()
# name, change_log, allow_imaging
licenses = LicenseSummarySerializer(many=True, read_only=True) # NEW
membership = serializers.SlugRelatedField(
slug_field='name',
read_only=True,
many=True) # NEW
user = UserSummarySerializer(source='created_by')
identity = IdentitySummarySerializer(source='created_by_identity')
machines = ProviderMachineRelatedField(many=True)
start_date = serializers.DateTimeField()
end_date = serializers.DateTimeField(allow_null=True)
class Meta:
model = ImageVersion
view_name = 'api:v2:providermachine-detail'
fields = ('id', 'parent', 'name', 'change_log',
'machines', 'allow_imaging',
'licenses', 'membership',
'user', 'identity',
'start_date', 'end_date')
|
from core.models import ApplicationVersion as ImageVersion
from rest_framework import serializers
from api.v2.serializers.summaries import (
LicenseSummarySerializer,
UserSummarySerializer,
IdentitySummarySerializer,
ImageSummarySerializer,
ImageVersionSummarySerializer)
from api.v2.serializers.fields import ProviderMachineRelatedField
class ImageVersionSerializer(serializers.HyperlinkedModelSerializer):
"""
Serializer for ApplicationVersion (aka 'image_version')
"""
# NOTE: Implicitly included via 'fields'
# id, application
parent = ImageVersionSummarySerializer()
# name, change_log, allow_imaging
licenses = LicenseSummarySerializer(many=True, read_only=True) # NEW
membership = serializers.SlugRelatedField(
slug_field='name',
read_only=True,
many=True) # NEW
user = UserSummarySerializer(source='created_by')
identity = IdentitySummarySerializer(source='created_by_identity')
machines = ProviderMachineRelatedField(many=True)
image = ImageSummarySerializer(source='application')
start_date = serializers.DateTimeField()
end_date = serializers.DateTimeField(allow_null=True)
class Meta:
model = ImageVersion
view_name = 'api:v2:providermachine-detail'
fields = ('id', 'parent', 'name', 'change_log',
'image', 'machines', 'allow_imaging',
'licenses', 'membership',
'user', 'identity',
'start_date', 'end_date')
|
Add 'image' to the attrs for VersionDetails
|
Add 'image' to the attrs for VersionDetails
|
Python
|
apache-2.0
|
CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend
|
ca4cede86022cc7784214cc1823e9b2886e3625b
|
IPython/nbconvert/exporters/python.py
|
IPython/nbconvert/exporters/python.py
|
"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import Unicode
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
file_extension = Unicode(
'py', config=True,
help="Extension of the file that should be written to disk")
def _output_mimetype_default(self):
return 'text/x-python'
|
"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import Unicode
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
file_extension = Unicode(
'py', config=True,
help="Extension of the file that should be written to disk")
output_mimetype = 'text/x-python'
|
Make output_mimetype accessible from the class.
|
Make output_mimetype accessible from the class.
Traitlets are only accessible by instantiating the class. There's no
clear reason that this needs to be a traitlet, anyway.
|
Python
|
bsd-3-clause
|
cornhundred/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,cornhundred/ipywidgets,ipython/ipywidgets,SylvainCorlay/ipywidgets,cornhundred/ipywidgets,ipython/ipywidgets,cornhundred/ipywidgets,jupyter-widgets/ipywidgets,SylvainCorlay/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,SylvainCorlay/ipywidgets,cornhundred/ipywidgets,SylvainCorlay/ipywidgets
|
8ddd296052cfe9c8293806af397bb746fd2ebd19
|
IPython/html/terminal/__init__.py
|
IPython/html/terminal/__init__.py
|
import os
from terminado import NamedTermManager
from .handlers import TerminalHandler, NewTerminalHandler, TermSocket
from . import api_handlers
def initialize(webapp):
shell = os.environ.get('SHELL', 'sh')
webapp.terminal_manager = NamedTermManager(shell_command=[shell])
handlers = [
(r"/terminals/new", NewTerminalHandler),
(r"/terminals/(\w+)", TerminalHandler),
(r"/terminals/websocket/(\w+)", TermSocket,
{'term_manager': webapp.terminal_manager}),
(r"/api/terminals", api_handlers.TerminalRootHandler),
(r"/api/terminals/(\w+)", api_handlers.TerminalHandler),
]
webapp.add_handlers(".*$", handlers)
|
import os
from terminado import NamedTermManager
from IPython.html.utils import url_path_join as ujoin
from .handlers import TerminalHandler, NewTerminalHandler, TermSocket
from . import api_handlers
def initialize(webapp):
shell = os.environ.get('SHELL', 'sh')
webapp.terminal_manager = NamedTermManager(shell_command=[shell])
base_url = webapp.settings['base_url']
handlers = [
(ujoin(base_url, "/terminals/new"), NewTerminalHandler),
(ujoin(base_url, r"/terminals/(\w+)"), TerminalHandler),
(ujoin(base_url, r"/terminals/websocket/(\w+)"), TermSocket,
{'term_manager': webapp.terminal_manager}),
(ujoin(base_url, r"/api/terminals"), api_handlers.TerminalRootHandler),
(ujoin(base_url, r"/api/terminals/(\w+)"), api_handlers.TerminalHandler),
]
webapp.add_handlers(".*$", handlers)
|
Put terminal handlers under base_url
|
Put terminal handlers under base_url
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
29315da6d17dd30c957afb1b297cc4d6d335a284
|
geotrek/core/tests/test_factories.py
|
geotrek/core/tests/test_factories.py
|
from django.test import TestCase
from .. import factories
class CoreFactoriesTest(TestCase):
"""
Ensure factories work as expected.
Here we just call each one to ensure they do not trigger any random
error without verifying any other expectation.
"""
def test_path_factory(self):
factories.PathFactory()
def test_topology_mixin_factory(self):
factories.TopologyFactory()
def test_path_aggregation_factory(self):
factories.PathAggregationFactory()
def test_source_management_factory(self):
factories.PathSourceFactory()
def test_challenge_management_factory(self):
factories.StakeFactory()
def test_usage_management_factory(self):
factories.UsageFactory()
def test_network_management_factory(self):
factories.NetworkFactory()
def test_path_management_factory(self):
factories.TrailFactory()
|
from django.test import TestCase
from .. import factories
class CoreFactoriesTest(TestCase):
"""
Ensure factories work as expected.
Here we just call each one to ensure they do not trigger any random
error without verifying any other expectation.
"""
def test_path_factory(self):
factories.PathFactory()
def test_topology_mixin_factory(self):
factories.TopologyFactory()
def test_path_aggregation_factory(self):
factories.PathAggregationFactory()
def test_source_management_factory(self):
factories.PathSourceFactory()
def test_challenge_management_factory(self):
factories.StakeFactory()
def test_usage_management_factory(self):
factories.UsageFactory()
def test_network_management_factory(self):
factories.NetworkFactory()
def test_path_management_factory(self):
factories.TrailFactory()
def test_path_in_bounds_existing_factory(self):
factories.PathFactory.create()
factories.PathInBoundsExistingGeomFactory()
def test_path_in_bounds_not_existing_factory(self):
with self.assertRaises(IndexError):
factories.PathInBoundsExistingGeomFactory()
|
Add cover test : core factories
|
Add cover test : core factories
|
Python
|
bsd-2-clause
|
GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek
|
1101ce85635e49ad3cbd1f88e7dae1b77f45514c
|
ava/text_to_speech/__init__.py
|
ava/text_to_speech/__init__.py
|
import time
import os
from tempfile import NamedTemporaryFile
from sys import platform as _platform
from gtts import gTTS
from pygame import mixer
from .playsound import playsound
from ..components import _BaseComponent
class TextToSpeech(_BaseComponent):
def __init__(self, queues):
super().__init__(queues)
self.queue_tts = None
def setup(self):
self.queue_tts = self._queues['QueueTextToSpeech']
def run(self):
while self._is_init:
sentence = self.queue_tts.get()
if sentence is None:
break
print('To say out loud : {}'.format(sentence))
tts = gTTS(text=sentence, lang='en')
if _platform == "darwin":
with NamedTemporaryFile() as audio_file:
tts.write_to_fp(audio_file)
audio_file.seek(0)
playsound(audio_file.name)
else:
filename = os.environ['TMP'] + str(
time.time()).split('.')[0] + ".mp3"
tts.save(filename)
if _platform == "linux" or _platform == "linux2":
mixer.init()
mixer.music.load(filename)
mixer.music.play()
else:
playsound(filename)
os.remove(filename)
self.queue_tts.task_done()
def stop(self):
print('Stopping {0}...'.format(self.__class__.__name__))
self._is_init = False
self.queue_tts.put(None)
|
import time
import os
import tempfile
from sys import platform as _platform
from gtts import gTTS
from pygame import mixer
from .playsound import playsound
from ..components import _BaseComponent
class TextToSpeech(_BaseComponent):
def __init__(self, queues):
super().__init__(queues)
self.queue_tts = None
def setup(self):
self.queue_tts = self._queues['QueueTextToSpeech']
def run(self):
while self._is_init:
sentence = self.queue_tts.get()
if sentence is None:
break
print('To say out loud : {}'.format(sentence))
tts = gTTS(text=sentence, lang='en')
if _platform == "darwin":
with tempfile.NamedTemporaryFile() as audio_file:
tts.write_to_fp(audio_file)
audio_file.seek(0)
playsound(audio_file.name)
else:
filename = tempfile.gettempdir() + '/' + str(time.time()).split('.')[0] + ".mp3"
tts.save(filename)
if _platform == "linux" or _platform == "linux2":
mixer.init()
mixer.music.load(filename)
mixer.music.play()
else:
playsound(filename)
os.remove(filename)
self.queue_tts.task_done()
def stop(self):
print('Stopping {0}...'.format(self.__class__.__name__))
self._is_init = False
self.queue_tts.put(None)
|
Add better handling of temp file
|
Add better handling of temp file
|
Python
|
mit
|
ava-project/AVA
|
c692038646417dfcd2e41f186b5814b3978847b6
|
conf_site/core/context_processors.py
|
conf_site/core/context_processors.py
|
from django.conf import settings
from django.utils import timezone
import pytz
def core_context(self):
"""Context processor for elements appearing on every page."""
context = {}
context["google_analytics_id"] = settings.GOOGLE_ANALYTICS_PROPERTY_ID
context["sentry_public_dsn"] = settings.SENTRY_PUBLIC_DSN
return context
def time_zone_context(self):
context = {}
# Duplicate the functionality of django.template.context_processors.tz.
context["TIME_ZONE"] = timezone.get_current_timezone_name()
# Add a list of time zones to the context.
context["TIME_ZONES"] = pytz.common_timezones
return context
|
from django.conf import settings
from django.contrib.sites.models import Site
from django.utils import timezone
import pytz
def core_context(self):
"""Context processor for elements appearing on every page."""
context = {}
context["conference_title"] = Site.objects.get_current().name
context["google_analytics_id"] = settings.GOOGLE_ANALYTICS_PROPERTY_ID
context["sentry_public_dsn"] = settings.SENTRY_PUBLIC_DSN
return context
def time_zone_context(self):
context = {}
# Duplicate the functionality of django.template.context_processors.tz.
context["TIME_ZONE"] = timezone.get_current_timezone_name()
# Add a list of time zones to the context.
context["TIME_ZONES"] = pytz.common_timezones
return context
|
Fix conference title context processor.
|
Fix conference title context processor.
|
Python
|
mit
|
pydata/conf_site,pydata/conf_site,pydata/conf_site
|
5f40bbf76cacb491b52d41536935ac0442f8aaba
|
superdesk/io/feed_parsers/pa_nitf.py
|
superdesk/io/feed_parsers/pa_nitf.py
|
#!/usr/bin/env python
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014, 2015 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from superdesk.io.feed_parsers.nitf import NITFFeedParser
from superdesk.io import register_feed_parser
class PAFeedParser(NITFFeedParser):
"""
NITF Parser extension for Press Association, it maps the category meta tag to an anpa category
"""
NAME = 'pa_nitf'
def _category_mapping(self, elem):
"""
Map the category supplied by PA to a best guess anpa_category in the system
:param elem:
:return: anpa category list qcode
"""
category = elem.get('content')[:1].upper()
if category in ('S', 'R', 'F'):
return [{'qcode': 'S'}]
if category == 'Z':
return [{'qcode': 'V'}]
return [{'qcode': 'I'}]
def __init__(self):
self.MAPPING = {'anpa_category': {'xpath': "head/meta/[@name='category']", 'filter': self._category_mapping}}
super().__init__()
register_feed_parser(PAFeedParser.NAME, PAFeedParser())
|
#!/usr/bin/env python
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014, 2015 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from superdesk.io.feed_parsers.nitf import NITFFeedParser
from superdesk.io import register_feed_parser
class PAFeedParser(NITFFeedParser):
"""
NITF Parser extension for Press Association, it maps the category meta tag to an anpa category
"""
NAME = 'pa_nitf'
def _category_mapping(self, elem):
"""
Map the category supplied by PA to a best guess anpa_category in the system
:param elem:
:return: anpa category list qcode
"""
if elem.get('content') is not None:
category = elem.get('content')[:1].upper()
if category in {'S', 'R', 'F'}:
return [{'qcode': 'S'}]
if category == 'Z':
return [{'qcode': 'V'}]
return [{'qcode': 'I'}]
def __init__(self):
self.MAPPING = {'anpa_category': {'xpath': "head/meta/[@name='category']", 'filter': self._category_mapping}}
super().__init__()
register_feed_parser(PAFeedParser.NAME, PAFeedParser())
|
Use a set for comparison
|
Use a set for comparison
|
Python
|
agpl-3.0
|
superdesk/superdesk-core,mdhaman/superdesk-core,petrjasek/superdesk-core,superdesk/superdesk-core,superdesk/superdesk-core,petrjasek/superdesk-core,marwoodandrew/superdesk-core,superdesk/superdesk-core,mugurrus/superdesk-core,mugurrus/superdesk-core,mdhaman/superdesk-core,ioanpocol/superdesk-core,mdhaman/superdesk-core,hlmnrmr/superdesk-core,ancafarcas/superdesk-core,hlmnrmr/superdesk-core,ioanpocol/superdesk-core,mugurrus/superdesk-core,petrjasek/superdesk-core,ioanpocol/superdesk-core,nistormihai/superdesk-core,ancafarcas/superdesk-core,nistormihai/superdesk-core,marwoodandrew/superdesk-core,petrjasek/superdesk-core
|
53b920751de0e620792511df406805a5cea420cb
|
ideascaly/utils.py
|
ideascaly/utils.py
|
# IdeaScaly
# Copyright 2015 Jorge Saldivar
# See LICENSE for details.
import six
import dateutil.parser
def parse_datetime(str_date):
try:
date_is = dateutil.parser.parse(str_date)
return date_is
except:
return None
def parse_html_value(html):
return html[html.find('>')+1:html.rfind('<')]
def parse_a_href(atag):
start = atag.find('"') + 1
end = atag.find('"', start)
return atag[start:end]
def convert_to_utf8_str(arg):
# written by Michael Norton (http://docondev.blogspot.com/)
if isinstance(arg, bytes):
arg = arg.decode('ascii')
if isinstance(arg, six.text_type):
arg = arg.encode('utf-8')
elif not isinstance(arg, bytes):
arg = six.text_type(arg).encode('utf-8')
return arg
def import_simplejson():
try:
import simplejson as json
except ImportError:
try:
import json # Python 2.6+
except ImportError:
raise ImportError("Can't load a json library")
return json
|
# IdeaScaly
# Copyright 2015 Jorge Saldivar
# See LICENSE for details.
import six
import dateutil.parser
def parse_datetime(str_date):
try:
date_is = dateutil.parser.parse(str_date)
return date_is
except:
return None
def parse_html_value(html):
return html[html.find('>')+1:html.rfind('<')]
def parse_a_href(atag):
start = atag.find('"') + 1
end = atag.find('"', start)
return atag[start:end]
def convert_to_utf8_str(arg):
# written by Michael Norton (http://docondev.blogspot.com/)
if isinstance(arg, six.text_type):
arg = arg.encode('utf-8')
elif not isinstance(arg, bytes):
arg = six.text_type(arg).encode('utf-8')
return arg
def import_simplejson():
try:
import simplejson as json
except ImportError:
try:
import json # Python 2.6+
except ImportError:
raise ImportError("Can't load a json library")
return json
|
Remove conditional that checked type of arg
|
Remove conditional that checked type of arg
|
Python
|
mit
|
joausaga/ideascaly
|
526b1028925a59957e805b29fc624dae318661ef
|
finances/models.py
|
finances/models.py
|
import os
import hashlib
import datetime
import peewee
database = peewee.Proxy()
class BaseModel(peewee.Model):
class Meta:
database = database
class User(BaseModel):
id = peewee.IntegerField(primary_key=True)
name = peewee.CharField(unique=True)
password = peewee.CharField()
salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace'))
join_date = peewee.DateTimeField(default=datetime.datetime.now)
class AuthError(Exception):
pass
class RegisterError(Exception):
pass
@classmethod
def auth(cls, name, password):
user = User.get(name=name)
pass_with_salt = password + user.salt
pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest()
if not pass_hash == user.password:
raise cls.AuthError('Wrong password!')
return user
@classmethod
def register(cls, name, password):
try:
User.get(name=name)
raise cls.RegisterError('User with that name does exist')
except User.DoesNotExist:
pass
user = User(name=name)
pass_with_salt = password + user.salt
user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest()
user.save()
|
import os
import hashlib
import datetime
import peewee
database = peewee.Proxy()
class BaseModel(peewee.Model):
class Meta:
database = database
class User(BaseModel):
id = peewee.IntegerField(primary_key=True)
name = peewee.CharField(unique=True)
password = peewee.CharField()
salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace'))
join_date = peewee.DateTimeField(default=datetime.datetime.now)
class AuthError(Exception):
pass
class RegisterError(Exception):
pass
@classmethod
def auth(cls, name, password):
user = User.get(name=name)
pass_with_salt = password + user.salt
pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest()
if not pass_hash == user.password:
raise cls.AuthError('Wrong password!')
return user
@classmethod
def register(cls, name, password):
try:
User.get(name=name)
raise cls.RegisterError('User with that name does exist')
except User.DoesNotExist:
pass
user = User(name=name)
pass_with_salt = password + user.salt
user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest()
user.save()
def __repr__(self):
return '<User %r>' % self.username
|
Add __repr__ for User model
|
Add __repr__ for User model
|
Python
|
mit
|
Afonasev/YourFinances
|
149e9e2aab4922634e0ab5f130f9a08f9fda7d17
|
podcast/templatetags/podcast_tags.py
|
podcast/templatetags/podcast_tags.py
|
from __future__ import unicode_literals
import re
from django import template
from django.contrib.sites.shortcuts import get_current_site
from django.contrib.syndication.views import add_domain
from django.template import TemplateSyntaxError
from django.utils.translation import ugettext_lazy as _
register = template.Library()
@register.simple_tag(takes_context=True)
def show_url(context, *args, **kwargs):
"""Return the show feed URL with different protocol."""
if len(kwargs) != 2:
raise TemplateSyntaxError(_('"show_url" tag takes exactly two keyword arguments.'))
current_site = get_current_site(context['request'])
url = add_domain(current_site.domain, kwargs['url'])
return re.sub(r'https?:\/\/', '%s://' % kwargs['protocol'], url)
|
from __future__ import unicode_literals
import re
from django import template
from django.contrib.sites.shortcuts import get_current_site
from django.contrib.syndication.views import add_domain
from django.template import TemplateSyntaxError
from django.utils.translation import ugettext_lazy as _
register = template.Library()
@register.simple_tag(takes_context=True)
def show_url(context, *args, **kwargs):
"""Return the show feed URL with different protocol."""
if len(kwargs) != 2:
raise TemplateSyntaxError(_('"show_url" tag takes exactly two keyword arguments.'))
try:
request = context['request']
except IndexError:
raise TemplateSyntaxError(_('"show_url" tag requires request in the template context. Add the request context processor to settings.'))
current_site = get_current_site(request)
url = add_domain(current_site.domain, kwargs['url'])
return re.sub(r'https?:\/\/', '%s://' % kwargs['protocol'], url)
|
Handle request in context error
|
Handle request in context error
|
Python
|
bsd-3-clause
|
richardcornish/django-itunespodcast,richardcornish/django-itunespodcast,richardcornish/django-applepodcast,richardcornish/django-applepodcast
|
8931025d53f472c3f1cb9c320eb796f0ea14274e
|
dddp/msg.py
|
dddp/msg.py
|
"""Django DDP utils for DDP messaging."""
from dddp import THREAD_LOCAL as this
def obj_change_as_msg(obj, msg):
"""Generate a DDP msg for obj with specified msg type."""
serializer = this.serializer
data = serializer.serialize([obj])[0]
# collection name is <app>.<model>
name = data['model']
# cast ID as string
if not isinstance(data['pk'], basestring):
data['pk'] = '%d' % data['pk']
payload = {
'msg': msg,
'collection': name,
'id': data['pk'],
}
if msg != 'removed':
payload['fields'] = data['fields']
return (name, payload)
|
"""Django DDP utils for DDP messaging."""
from copy import deepcopy
from dddp import THREAD_LOCAL as this
from django.db.models.expressions import ExpressionNode
def obj_change_as_msg(obj, msg):
"""Generate a DDP msg for obj with specified msg type."""
# check for F expressions
exps = [
name for name, val in vars(obj).items()
if isinstance(val, ExpressionNode)
]
if exps:
# clone and update obj with values but only for the expression fields
obj = deepcopy(obj)
# Django 1.8 makes obj._meta public --> pylint: disable=W0212
for name, val in obj._meta.model.objects.values(*exps).get(pk=obj.pk):
setattr(obj, name, val)
# run serialization now that all fields are "concrete" (not F expressions)
serializer = this.serializer
data = serializer.serialize([obj])[0]
# collection name is <app>.<model>
name = data['model']
# cast ID as string
if not isinstance(data['pk'], basestring):
data['pk'] = '%d' % data['pk']
payload = {
'msg': msg,
'collection': name,
'id': data['pk'],
}
if msg != 'removed':
payload['fields'] = data['fields']
return (name, payload)
|
Support serializing objects that are saved with F expressions by reading field values for F expressions from database explicitly before serializing.
|
Support serializing objects that are saved with F expressions by reading field values for F expressions from database explicitly before serializing.
|
Python
|
mit
|
django-ddp/django-ddp,commoncode/django-ddp,django-ddp/django-ddp,commoncode/django-ddp,commoncode/django-ddp,commoncode/django-ddp,django-ddp/django-ddp,django-ddp/django-ddp
|
c81cc838d6e8109020dafae7e4ed1ff5aa7ebb88
|
invoke/__init__.py
|
invoke/__init__.py
|
from ._version import __version_info__, __version__ # noqa
from .tasks import task, ctask, Task # noqa
from .collection import Collection # noqa
from .context import Context # noqa
def run(command, **kwargs):
"""
Invoke ``command`` in a subprocess and return a `.Result` object.
This function is simply a convenience wrapper for creating an anonymous
`.Context` object and calling its `.Context.run` method, which lets you use
Invoke's powerful local command execution without requiring the rest of its
API.
"""
return Context().run(command, **kwargs)
|
from ._version import __version_info__, __version__ # noqa
from .tasks import task, ctask, Task # noqa
from .collection import Collection # noqa
from .context import Context # noqa
from .config import Config # noqa
def run(command, **kwargs):
"""
Invoke ``command`` in a subprocess and return a `.Result` object.
This function is simply a convenience wrapper for creating an anonymous
`.Context` object and calling its `.Context.run` method, which lets you use
Invoke's powerful local command execution without requiring the rest of its
API.
"""
return Context().run(command, **kwargs)
|
Add Config to root convenience imports
|
Add Config to root convenience imports
|
Python
|
bsd-2-clause
|
pyinvoke/invoke,mattrobenolt/invoke,frol/invoke,mkusz/invoke,pfmoore/invoke,mkusz/invoke,pyinvoke/invoke,mattrobenolt/invoke,pfmoore/invoke,kejbaly2/invoke,kejbaly2/invoke,tyewang/invoke,frol/invoke,singingwolfboy/invoke
|
931fca0b7cca4a631388eeb6114145c8d4ff6e18
|
lims/celery.py
|
lims/celery.py
|
import os
from celery import Celery
from celery.schedules import crontab
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'lims.settings')
app = Celery('lims', broker='redis://localhost', backend='redis')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
app.conf.beat_schedule = {
'process-deadlines': {
'task': 'lims.projects.tasks.process_deadlines',
'schedule': crontab(minute=0, hour='*/3'),
}
}
|
import os
from celery import Celery
from celery.schedules import crontab
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'lims.settings')
app = Celery('lims', broker=os.environ.get('REDIS_URL', 'redis://127.0.0.1:6379'), backend='redis')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
app.conf.beat_schedule = {
'process-deadlines': {
'task': 'lims.projects.tasks.process_deadlines',
'schedule': crontab(minute=0, hour='*/3'),
}
}
|
Extend time period at which deadline processing happens
|
Extend time period at which deadline processing happens
|
Python
|
mit
|
GETLIMS/LIMS-Backend,GETLIMS/LIMS-Backend
|
15b3e52e75dddcd09b1fff807e836c60a0c62a03
|
python2.7libs/createDlp.py
|
python2.7libs/createDlp.py
|
#-------------------------------------------------------------------------------
## Description
"""
Create Cache Dependency by just cliking.
"""
#-------------------------------------------------------------------------------
__version__ = "1.0.0"
import hou
def main():
sel_cache = hou.ui.selectFile(title="Select BGEO cache")
if len(sel_cache) >= 1:
crDlp = hou.node("/shop").createNode("vm_geo_file")
crDlp.setParms({"file": sel_cache})
dlp_path = crDlp.path()
crGeo = hou.node("/obj").createNode("geo", node_name="geo_dlp1")
crGeo.setParms({"shop_geometrypath": dlp_path})
fileobj = crGeo.node("file1")
fileobj.destroy()
|
#-------------------------------------------------------------------------------
## Description
"""
Create Cache Dependency by just cliking.
"""
#-------------------------------------------------------------------------------
__version__ = '2.0.0'
#-------------------------------------------------------------------------------
import hou
HOUDINI_MAJOR_VERSION = hou.applicationVersion()[0]
def main():
sel_nodes = hou.selectedNodes()
if len(sel_nodes) > 1:
hou.ui.setStatusMessage("Single node slection is only available.",
severity=hou.severityType.Error)
return
elif sel_nodes[0].type().name() != 'geo':
hou.ui.setStatusMessage("Geometry Network node is only available.",
severity=hou.severityType.Error)
return
sel_cache = hou.ui.selectFile(title='Select BGEO cache')
if len(sel_cache) == 0:
hou.ui.setStatusMessage("Any cache file is not selected.",
severity=hou.severityType.Error)
return
if len(sel_nodes) == 1 and sel_nodes[0].type().name() == 'geo':
print 'test'
for node in sel_nodes:
dlp = createFileLoader(sel_cache)
node.setParms({'shop_geometrypath': dlp.path()})
else:
dlp = createFileLoader(sel_cache)
crGeo = hou.node('/obj').createNode('geo', node_name='geo_dlp1', run_init_scripts=True)
crGeo.setParms({'shop_geometrypath': dlp.path()})
children = crGeo.children()
for c in children:
c.destroy()
def createFileLoader(sel_cache):
if HOUDINI_MAJOR_VERSION <= 15:
dlp = hou.node('/shop').createNode('vm_geo_file')
else:
dlp = hou.node('/mat').createNode('file')
dlp.setParms({'file': sel_cache})
return dlp
|
Add functionality to Create DLP tool.
|
Add functionality to Create DLP tool.
|
Python
|
mit
|
takavfx/Bento
|
a34ce653f262888c17ae92e348adb0892b74a94c
|
download.py
|
download.py
|
import youtube_dl, os
from multiprocessing.pool import ThreadPool
from youtube_dl.utils import DownloadError
from datetime import datetime
from uuid import uuid4
class Download:
link = ""
done = False
error = False
started = None
uuid = ""
total = 0
finished = 0
title = ""
def __init__(self, link):
self.link = link
self.started = datetime.now()
self.uuid = str(uuid4())
def download(self):
curr_path = os.path.dirname(os.path.abspath(__file__))
output_path = curr_path + "/downloads/" + self.uuid + "/%(title)s-%(id)s.%(ext)s"
try:
youtube_dl._real_main(["--yes-playlist", "-R", "10", "-x", "--audio-format", "mp3",
"--output", output_path,
"--restrict-filenames", "-v", self.link])
except DownloadError:
self.error = True
finally:
self.done = True
def get_files(self):
file_path = os.path.dirname(os.path.abspath(__file__)) + "/downloads/" + self.uuid
return [f for f in os.listdir(file_path) if os.isfile(os.join(file_path, f))]
def start(self):
pool = ThreadPool()
pool.apply_async(self.download)
|
import os, youtube_dl
from youtube_dl import YoutubeDL
from multiprocessing.pool import ThreadPool
from youtube_dl.utils import DownloadError
from datetime import datetime
from uuid import uuid4
class Download:
link = ''
done = False
error = False
started = None
uuid = ''
total = 0
finished = 0
title = ''
def __init__(self, link):
self.link = link
self.started = datetime.now()
self.uuid = str(uuid4())
def __call__(self, info):
if info['status'] == 'finished':
self.finished += 1
print("\n \n INFO: " + str(info) + "\n")
def download(self):
curr_path = os.path.dirname(os.path.abspath(__file__))
output_tmpl = curr_path + '/downloads/' + self.uuid + '/%(title)s-%(id)s.%(ext)s'
try:
options = {
'format': 'bestaudio/best',
'postprocessors': [{
'key': 'FFmpegExtractAudio',
'preferredcodec': 'mp3',
'preferredquality': '5',
}],
'progress_hooks': [self],
'outtmpl': output_tmpl,
}
ydl = YoutubeDL(options)
ydl.download([self.link])
except DownloadError:
self.error = True
finally:
self.done = True
def get_files(self):
file_path = os.path.dirname(os.path.abspath(__file__)) + '/downloads/' + self.uuid
return [f for f in os.listdir(file_path) if os.isfile(os.join(file_path, f))]
def start(self):
pool = ThreadPool()
pool.apply_async(self.download)
|
Use hooks for progress updates
|
Use hooks for progress updates
|
Python
|
mit
|
pielambr/PLDownload,pielambr/PLDownload
|
7c607bff6fa043c5d380403d673ac6690a7277cc
|
meinberlin/apps/newsletters/forms.py
|
meinberlin/apps/newsletters/forms.py
|
from django import forms
from django.apps import apps
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from adhocracy4.projects.models import Project
from . import models
Organisation = apps.get_model(settings.A4_ORGANISATIONS_MODEL)
class NewsletterForm(forms.ModelForm):
class Meta:
model = models.Newsletter
fields = ['sender_name', 'sender', 'receivers', 'project',
'organisation', 'subject', 'body']
def __init__(self, user=None, organisation=None, *args, **kwargs):
super().__init__(*args, **kwargs)
choices = [(value, string)
for value, string in models.RECEIVER_CHOICES
if value != models.PLATFORM or (user and user.is_superuser)]
self.fields['receivers'] = forms.ChoiceField(
label=_('Receivers'),
choices=choices,
widget=forms.RadioSelect(),
)
project_qs = Project.objects
if organisation:
project_qs = Project.objects.filter(organisation=organisation.id)
self.fields['project'] = forms.ModelChoiceField(
label=_('Project'),
queryset=project_qs,
required=False, empty_label=None)
self.fields['organisation'] = forms.ModelChoiceField(
label=_('Organisation'),
queryset=Organisation.objects,
required=False, empty_label=None)
|
from django import forms
from django.apps import apps
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from adhocracy4.projects.models import Project
from . import models
Organisation = apps.get_model(settings.A4_ORGANISATIONS_MODEL)
class NewsletterForm(forms.ModelForm):
class Meta:
model = models.Newsletter
fields = ['sender_name', 'sender', 'receivers', 'project',
'organisation', 'subject', 'body']
def __init__(self, user=None, organisation=None, *args, **kwargs):
super().__init__(*args, **kwargs)
choices = [(value, string)
for value, string in models.RECEIVER_CHOICES
if value != models.PLATFORM or (user and user.is_superuser)]
self.fields['receivers'] = forms.ChoiceField(
label=_('Receivers'),
choices=choices,
widget=forms.RadioSelect(),
)
project_qs = Project.objects
if organisation:
project_qs = Project.objects.filter(organisation=organisation.id)
self.fields['project'] = forms.ModelChoiceField(
label=_('Project'),
queryset=project_qs,
required=False, empty_label=None)
self.fields['organisation'] = forms.ModelChoiceField(
label=_('Organisation'),
queryset=Organisation.objects,
required=False, empty_label=None)
def clean(self):
cleaned_data = super().clean()
if cleaned_data.get('receivers') == str(models.PROJECT) and \
not cleaned_data.get('projects'):
self.add_error('project', _('Select a Project'))
|
Validate for no project selection in newsletter
|
Validate for no project selection in newsletter
|
Python
|
agpl-3.0
|
liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin
|
94172dc29d9ccbce1c2ac752ce09baefafbf8a6c
|
nbgrader/tests/apps/test_nbgrader.py
|
nbgrader/tests/apps/test_nbgrader.py
|
import os
from .. import run_python_module, run_command
from .base import BaseTestApp
class TestNbGrader(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_python_module(["nbgrader", "--help-all"])
def test_no_subapp(self):
"""Is the help displayed when no subapp is given?"""
run_python_module(["nbgrader"], retcode=1)
def test_generate_config(self):
"""Is the config file properly generated?"""
# it already exists, because we create it in conftest.py
os.remove("nbgrader_config.py")
# try recreating it
run_python_module(["nbgrader", "--generate-config"])
assert os.path.isfile("nbgrader_config.py")
# does it fail if it already exists?
run_python_module(["nbgrader", "--generate-config"], retcode=1)
def test_check_version(self):
"""Is the version the same regardless of how we run nbgrader?"""
out1 = run_command(["nbgrader", "--version"])
out2 = run_python_module(["nbgrader", "--version"])
assert out1 == out2
|
import os
import sys
from .. import run_python_module, run_command
from .base import BaseTestApp
class TestNbGrader(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_python_module(["nbgrader", "--help-all"])
def test_no_subapp(self):
"""Is the help displayed when no subapp is given?"""
run_python_module(["nbgrader"], retcode=1)
def test_generate_config(self):
"""Is the config file properly generated?"""
# it already exists, because we create it in conftest.py
os.remove("nbgrader_config.py")
# try recreating it
run_python_module(["nbgrader", "--generate-config"])
assert os.path.isfile("nbgrader_config.py")
# does it fail if it already exists?
run_python_module(["nbgrader", "--generate-config"], retcode=1)
def test_check_version(self):
"""Is the version the same regardless of how we run nbgrader?"""
if sys.platform == 'win32':
out1 = "\r\n".join(run_command(["nbgrader.cmd", "--version"]).split("\r\n")[2:])
else:
out1 = run_command(["nbgrader", "--version"])
out2 = run_python_module(["nbgrader", "--version"])
assert out1 == out2
|
Fix issue with how nbgrader is called
|
Fix issue with how nbgrader is called
|
Python
|
bsd-3-clause
|
jhamrick/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader
|
8af349128b725e47b89f28ddc005d142a44c5765
|
openarc/env.py
|
openarc/env.py
|
#!/usr/bin/env python2.7
import os
import json
class OAEnv(object):
@property
def static_http_root(self):
if self.envcfg['httpinfo']['secure'] is True:
security = "https://"
else:
security = "http://"
return "%s%s" % ( security, self.envcfg['httpinfo']['httproot'] )
@property
def dbinfo(self):
return self.envcfg['dbinfo']
@property
def crypto(self):
return self.envcfg['crypto']
def __init__(self, requested_env):
cfg_file = "%s/envcfg.json" % ( os.environ.get("OPENARC_CFG_DIR") )
with open( cfg_file ) as f:
self.envcfg = json.loads( f.read() )[requested_env]
#This is where we hold library state.
#You will get cut if you don't manipulate the p_* variables
#via getenv() and initenv()
p_refcount_env = 0
p_env = None
def initenv(envstr):
"""envstr: one of local, dev, qa, prod.
Does not return OAEnv variable; for that, you
must call getenv"""
global p_env
global p_refcount_env
if p_refcount_env == 0:
p_env = OAEnv(envstr)
p_refcount_env += 1
def getenv():
"""Accessor method for global state"""
global p_env
return p_env
|
#!/usr/bin/env python2.7
import os
import json
class OAEnv(object):
@property
def static_http_root(self):
if self.envcfg['httpinfo']['secure'] is True:
security = "https://"
else:
security = "http://"
return "%s%s" % ( security, self.envcfg['httpinfo']['httproot'] )
@property
def dbinfo(self):
return self.envcfg['dbinfo']
@property
def crypto(self):
return self.envcfg['crypto']
@property
def extcreds(self):
return self.envcfg['extcreds']
def __init__(self, requested_env):
cfg_file = "%s/envcfg.json" % ( os.environ.get("OPENARC_CFG_DIR") )
with open( cfg_file ) as f:
self.envcfg = json.loads( f.read() )[requested_env]
#This is where we hold library state.
#You will get cut if you don't manipulate the p_* variables
#via getenv() and initenv()
p_refcount_env = 0
p_env = None
def initenv(envstr):
"""envstr: one of local, dev, qa, prod.
Does not return OAEnv variable; for that, you
must call getenv"""
global p_env
global p_refcount_env
if p_refcount_env == 0:
p_env = OAEnv(envstr)
p_refcount_env += 1
def getenv():
"""Accessor method for global state"""
global p_env
return p_env
|
Allow retrieval of external api credentials
|
Allow retrieval of external api credentials
|
Python
|
bsd-3-clause
|
kchoudhu/openarc
|
935552df10dc3a17cf3edb897e83861bbeaae803
|
tests/test_thread.py
|
tests/test_thread.py
|
import os
import unittest
from common import gobject, gtk, testhelper
# Enable PyGILState API
os.environ['PYGTK_USE_GIL_STATE_API'] = ''
gobject.threads_init()
class TestThread(unittest.TestCase):
def from_thread_cb(self, test, enum):
assert test == self.obj
assert int(enum) == 0
assert type(enum) != int
def idle_cb(self):
self.obj = testhelper.get_test_thread()
self.obj.connect('from-thread', self.from_thread_cb)
self.obj.emit('emit-signal')
def testExtensionModule(self):
gtk.idle_add(self.idle_cb)
gtk.timeout_add(50, self.timeout_cb)
gtk.main()
def timeout_cb(self):
gtk.main_quit()
|
import os
import unittest
from common import gobject, gtk, testhelper
# Enable PyGILState API
os.environ['PYGTK_USE_GIL_STATE_API'] = ''
gobject.threads_init()
class TestThread(unittest.TestCase):
def from_thread_cb(self, test, enum):
assert test == self.obj
assert int(enum) == 0
assert type(enum) != int
def idle_cb(self):
self.obj = testhelper.get_test_thread()
self.obj.connect('from-thread', self.from_thread_cb)
self.obj.emit('emit-signal')
def testExtensionModule(self):
gobject.idle_add(self.idle_cb)
gobject.timeout_add(50, self.timeout_cb)
gtk.main()
def timeout_cb(self):
gtk.main_quit()
|
Add pygtk_postinstall.py Updated Deprecate gtk.idle_add and friends. Merge
|
Add pygtk_postinstall.py Updated Deprecate gtk.idle_add and friends. Merge
* Makefile.am: Add pygtk_postinstall.py
* docs/random/missing-symbols: Updated
* gtk/__init__.py: Deprecate gtk.idle_add and friends.
* gtk/gtk.defs: Merge in 2.6 api, for GtkLabel functions,
thanks to Gian Mario Tagliaretti, fixes bug #163296
* tests/test_thread.py: Don't use gtk.idle_add
|
Python
|
lgpl-2.1
|
choeger/pygobject-cmake,GNOME/pygobject,nzjrs/pygobject,atizo/pygobject,pexip/pygobject,jdahlin/pygobject,davibe/pygobject,Distrotech/pygobject,sfeltman/pygobject,pexip/pygobject,choeger/pygobject-cmake,MathieuDuponchelle/pygobject,alexef/pygobject,alexef/pygobject,alexef/pygobject,sfeltman/pygobject,davidmalcolm/pygobject,MathieuDuponchelle/pygobject,davibe/pygobject,Distrotech/pygobject,atizo/pygobject,MathieuDuponchelle/pygobject,atizo/pygobject,Distrotech/pygobject,sfeltman/pygobject,thiblahute/pygobject,thiblahute/pygobject,davidmalcolm/pygobject,choeger/pygobject-cmake,davidmalcolm/pygobject,pexip/pygobject,davibe/pygobject,nzjrs/pygobject,thiblahute/pygobject,jdahlin/pygobject,nzjrs/pygobject,davibe/pygobject,GNOME/pygobject,GNOME/pygobject,Distrotech/pygobject,jdahlin/pygobject
|
6621bef05b2d4cb3fc138622194fe39765ebcb7c
|
tests/unit/helper.py
|
tests/unit/helper.py
|
import mock
import github3
import unittest
MockedSession = mock.create_autospec(github3.session.GitHubSession)
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_session_mock(self, *args):
session = MockedSession()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
|
import mock
import github3
import unittest
def build_url(self, *args, **kwargs):
# We want to assert what is happening with the actual calls to the
# Internet. We can proxy this.
return github3.session.GitHubSession().build_url(*args, **kwargs)
class UnitHelper(unittest.TestCase):
# Sub-classes must assign the class to this during definition
described_class = None
# Sub-classes must also assign a dictionary to this during definition
example_data = {}
def create_mocked_session(self):
MockedSession = mock.create_autospec(github3.session.GitHubSession)
return MockedSession()
def create_session_mock(self, *args):
session = self.create_mocked_session()
base_attrs = ['headers', 'auth']
attrs = dict(
(key, mock.Mock()) for key in set(args).union(base_attrs)
)
session.configure_mock(**attrs)
session.delete.return_value = None
session.get.return_value = None
session.patch.return_value = None
session.post.return_value = None
session.put.return_value = None
return session
def setUp(self):
self.session = self.create_session_mock()
self.instance = self.described_class(self.example_data, self.session)
# Proxy the build_url method to the class so it can build the URL and
# we can assert things about the call that will be attempted to the
# internet
self.described_class._build_url = build_url
|
Fix the issue where the mock is persisting calls
|
Fix the issue where the mock is persisting calls
|
Python
|
bsd-3-clause
|
krxsky/github3.py,jim-minter/github3.py,agamdua/github3.py,degustaf/github3.py,christophelec/github3.py,wbrefvem/github3.py,h4ck3rm1k3/github3.py,sigmavirus24/github3.py,ueg1990/github3.py,icio/github3.py,balloob/github3.py,itsmemattchung/github3.py
|
ededa7c9c616ac97dd6ce8638c6b959a0c51663c
|
examples/oauth/jupyterhub_config.py
|
examples/oauth/jupyterhub_config.py
|
# Configuration file for Jupyter Hub
c = get_config()
# spawn with Docker
c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner'
# The docker instances need access to the Hub, so the default loopback port doesn't work:
from IPython.utils.localinterfaces import public_ips
c.JupyterHub.hub_ip = public_ips()[0]
# OAuth with GitHub
c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator'
c.Authenticator.whitelist = whitelist = set()
c.Authenticator.admin_users = admin = set()
import os
join = os.path.join
here = os.path.dirname(__file__)
with open(join(here, 'userlist')) as f:
for line in f:
if not line:
continue
parts = line.split()
name = parts[0]
whitelist.add(name)
if len(parts) > 1 and parts[1] == 'admin':
admin.add(name)
c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL']
# ssl config
ssl = join(here, 'ssl')
keyfile = join(ssl, 'ssl.key')
certfile = join(ssl, 'ssl.cert')
if os.path.exists(keyfile):
c.JupyterHub.ssl_key = keyfile
if os.path.exists(certfile):
c.JupyterHub.ssl_cert = certfile
|
# Configuration file for Jupyter Hub
c = get_config()
# spawn with Docker
c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner'
# The docker instances need access to the Hub, so the default loopback port doesn't work:
from jupyter_client.localinterfaces import public_ips
c.JupyterHub.hub_ip = public_ips()[0]
# OAuth with GitHub
c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator'
c.Authenticator.whitelist = whitelist = set()
c.Authenticator.admin_users = admin = set()
import os
join = os.path.join
here = os.path.dirname(__file__)
with open(join(here, 'userlist')) as f:
for line in f:
if not line:
continue
parts = line.split()
name = parts[0]
whitelist.add(name)
if len(parts) > 1 and parts[1] == 'admin':
admin.add(name)
c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL']
# ssl config
ssl = join(here, 'ssl')
keyfile = join(ssl, 'ssl.key')
certfile = join(ssl, 'ssl.cert')
if os.path.exists(keyfile):
c.JupyterHub.ssl_key = keyfile
if os.path.exists(certfile):
c.JupyterHub.ssl_cert = certfile
|
Replace legacy ipython import with jupyter_client
|
Replace legacy ipython import with jupyter_client
|
Python
|
bsd-3-clause
|
jhamrick/dockerspawner,jupyter/dockerspawner,quantopian/dockerspawner,Fokko/dockerspawner,Fokko/dockerspawner,minrk/dockerspawner,quantopian/dockerspawner,minrk/dockerspawner,jupyter/dockerspawner,jhamrick/dockerspawner
|
5831ce15a94d1941e0521bae328f0ede48bfbe8b
|
juliet_importer.py
|
juliet_importer.py
|
import os
import imp
modules = {}
def load_modules(path="./modules/"): # Consider adding recursive searching at some point in the future
modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py")
names = os.listdir(path)
for name in names:
if not name.endswith(".py"): continue
print("Importing module {0}".format(name))
try:
modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name)
except ImportError as e:
print("Error importing module {0} from directory {1}".format(name,os.getcwd()))
print(e)
continue
print("Success")
load_modules()
|
import os
import imp
modules = {}
def load_modules(path="./modules/"):
try:
modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py")
except ImportError as e:
print("Error importing module {0} from directory {1}".format(name,os.getcwd()))
print(e)
for root, dirs, files in os.walk(path):
for name in files:
if not name.endswith(".py"): continue
print("Importing module {0}".format(name))
try:
modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name)
except ImportError as e:
print("Error importing module {0} from directory {1}".format(name,os.getcwd()))
print(e)
continue
print("Success")
load_modules()
|
Add recursive search to import function
|
Add recursive search to import function
|
Python
|
bsd-2-clause
|
halfbro/juliet
|
381e89972bf4d12daae7aa399f1348a215fa85d9
|
jira/exceptions.py
|
jira/exceptions.py
|
import json
class JIRAError(Exception):
"""General error raised for all problems in operation of the client."""
def __init__(self, status_code=None, text=None, url=None):
self.status_code = status_code
self.text = text
self.url = url
def __str__(self):
if self.text:
return 'HTTP {0}: "{1}"\n{2}'.format(self.status_code, self.text, self.url)
else:
return 'HTTP {0}: {1}'.format(self.status_code, self.url)
def raise_on_error(r):
if r.status_code >= 400:
error = ''
if r.text:
try:
response = json.loads(r.text)
if 'message' in response:
# JIRA 5.1 errors
error = response['message']
elif 'errorMessages' in response:
# JIRA 5.0.x error messages sometimes come wrapped in this array
errorMessages = response['errorMessages']
if isinstance(errorMessages, (list, tuple)) and len(errorMessages) > 0:
error = errorMessages[0]
else:
error = errorMessages
else:
error = r.text
except ValueError:
error = r.text
raise JIRAError(r.status_code, error, r.url)
|
import json
class JIRAError(Exception):
"""General error raised for all problems in operation of the client."""
def __init__(self, status_code=None, text=None, url=None):
self.status_code = status_code
self.text = text
self.url = url
def __str__(self):
if self.text:
return 'HTTP {0}: "{1}"\n{2}'.format(self.status_code, self.text, self.url)
else:
return 'HTTP {0}: {1}'.format(self.status_code, self.url)
def raise_on_error(r):
if r.status_code >= 400:
error = ''
if r.text:
try:
response = json.loads(r.text)
if 'message' in response:
# JIRA 5.1 errors
error = response['message']
elif 'errorMessages' in response and len(response['errorMessages']) > 0:
# JIRA 5.0.x error messages sometimes come wrapped in this array
# Sometimes this is present but empty
errorMessages = response['errorMessages']
if isinstance(errorMessages, (list, tuple)):
error = errorMessages[0]
else:
error = errorMessages
elif 'errors' in response and len(response['errors']) > 0:
# JIRA 6.x error messages are found in this array.
error = response['errors']
else:
error = r.text
except ValueError:
error = r.text
raise JIRAError(r.status_code, error, r.url)
|
Fix for empty errorMessages, moved length check to main logic for deciding which error message to use and added check for 'errors' in the response.
|
Fix for empty errorMessages, moved length check to main logic for deciding which error
message to use and added check for 'errors' in the response.
|
Python
|
bsd-2-clause
|
pycontribs/jira,jameskeane/jira-python,rayyen/jira,pycontribs/jira,dbaxa/jira,coddingtonbear/jira,milo-minderbinder/jira,systemadev/jira-python,tsarnowski/jira-python,kinow/jira,jameskeane/jira-python,awurster/jira,stevencarey/jira,VikingDen/jira,awurster/jira,kinow/jira,m42e/jira,VikingDen/jira,tsarnowski/jira-python,dbaxa/jira,rayyen/jira,dwmarshall/pycontribs-jira,milo-minderbinder/jira,dwmarshall/pycontribs-jira,m42e/jira,akosiaris/jira,systemadev/jira-python,akosiaris/jira,stevencarey/jira,coddingtonbear/jira
|
2050385a5f5fdcffe333ae17463d6469af0b5cd8
|
mopidy/__init__.py
|
mopidy/__init__.py
|
from __future__ import unicode_literals
import sys
import warnings
from distutils.version import StrictVersion as SV
import pykka
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'Mopidy requires Python >= 2.7, < 3, but found %s' %
'.'.join(map(str, sys.version_info[:3])))
if (isinstance(pykka.__version__, basestring)
and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')):
sys.exit(
'Mopidy requires Pykka >= 1.1, < 2, but found %s' % pykka.__version__)
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
|
from __future__ import unicode_literals
import platform
import sys
import warnings
from distutils.version import StrictVersion as SV
import pykka
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
if (isinstance(pykka.__version__, basestring)
and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')):
sys.exit(
'ERROR: Mopidy requires Pykka >= 1.1, < 2, but found %s.' %
pykka.__version__)
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
|
Update Python and Pykka version check error messages
|
Update Python and Pykka version check error messages
|
Python
|
apache-2.0
|
jmarsik/mopidy,adamcik/mopidy,priestd09/mopidy,woutervanwijk/mopidy,glogiotatidis/mopidy,tkem/mopidy,bencevans/mopidy,hkariti/mopidy,jcass77/mopidy,pacificIT/mopidy,vrs01/mopidy,ali/mopidy,bencevans/mopidy,mokieyue/mopidy,rawdlite/mopidy,swak/mopidy,tkem/mopidy,rawdlite/mopidy,jcass77/mopidy,woutervanwijk/mopidy,swak/mopidy,swak/mopidy,SuperStarPL/mopidy,SuperStarPL/mopidy,dbrgn/mopidy,mopidy/mopidy,bencevans/mopidy,jcass77/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,mokieyue/mopidy,SuperStarPL/mopidy,jodal/mopidy,mopidy/mopidy,ali/mopidy,tkem/mopidy,pacificIT/mopidy,quartz55/mopidy,dbrgn/mopidy,ali/mopidy,rawdlite/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,rawdlite/mopidy,priestd09/mopidy,jodal/mopidy,priestd09/mopidy,dbrgn/mopidy,hkariti/mopidy,jmarsik/mopidy,mopidy/mopidy,ZenithDK/mopidy,jmarsik/mopidy,dbrgn/mopidy,quartz55/mopidy,SuperStarPL/mopidy,kingosticks/mopidy,bacontext/mopidy,hkariti/mopidy,kingosticks/mopidy,tkem/mopidy,jodal/mopidy,jmarsik/mopidy,diandiankan/mopidy,diandiankan/mopidy,ZenithDK/mopidy,vrs01/mopidy,ZenithDK/mopidy,vrs01/mopidy,bacontext/mopidy,ali/mopidy,bencevans/mopidy,hkariti/mopidy,bacontext/mopidy,swak/mopidy,quartz55/mopidy,mokieyue/mopidy,diandiankan/mopidy,adamcik/mopidy,glogiotatidis/mopidy,kingosticks/mopidy,adamcik/mopidy,quartz55/mopidy,bacontext/mopidy,vrs01/mopidy,mokieyue/mopidy,diandiankan/mopidy
|
fb042cd3ff15f35672e543f040053859c18cff24
|
timedelta/templatetags/timedelta.py
|
timedelta/templatetags/timedelta.py
|
from django import template
register = template.Library()
# Don't really like using relative imports, but no choice here!
from ..helpers import nice_repr, iso8601_repr, total_seconds as _total_seconds
@register.filter(name='timedelta')
def timedelta(value, display="long"):
return nice_repr(value, display)
@register.filter(name='iso8601')
def iso8601(value):
return iso8601_repr(value)
@register.filter(name='total_seconds')
def total_seconds(value):
return _total_seconds(value)
@register.filter(name='total_seconds_sort')
def total_seconds(value, places=10):
return ("%0" + str(places) + "i") % _total_seconds(value)
|
from django import template
register = template.Library()
# Don't really like using relative imports, but no choice here!
from ..helpers import nice_repr, iso8601_repr, total_seconds as _total_seconds
@register.filter(name='timedelta')
def timedelta(value, display="long"):
if value is None:
return value
return nice_repr(value, display)
@register.filter(name='iso8601')
def iso8601(value):
if value is None:
return value
return iso8601_repr(value)
@register.filter(name='total_seconds')
def total_seconds(value):
if value is None:
return value
return _total_seconds(value)
@register.filter(name='total_seconds_sort')
def total_seconds(value, places=10):
if value is None:
return value
return ("%0" + str(places) + "i") % _total_seconds(value)
|
Allow for calling our filters on objects that are None
|
Allow for calling our filters on objects that are None
|
Python
|
bsd-3-clause
|
sookasa/django-timedelta-field
|
e000f5db7bf8aee6b3ae267824491d03b20fbb36
|
saau/sections/transportation/data.py
|
saau/sections/transportation/data.py
|
from operator import attrgetter, itemgetter
from itertools import chain
from ...utils.py3_hook import with_hook
with with_hook():
from arcrest import Catalog
import numpy as np
def get_layers(service):
layers = service.layers
return {
layer.name: layer
for layer in layers
}
def mend_extent(extent):
extent.wkid = extent.spatialReference.wkid
return extent
def get_data(requested_layers):
catalog = Catalog('http://services.ga.gov.au/site_7/rest/services')
service = catalog['NM_Transport_Infrastructure']
layers = get_layers(service)
return chain.from_iterable(
layers[layer].QueryLayer(Geometry=mend_extent(layers[layer].extent))
for layer in requested_layers
)
def get_paths(request_layers):
paths = get_data(request_layers)
paths = map(itemgetter('geometry'), paths)
paths = filter(lambda path: hasattr(path, 'paths'), paths)
paths = map(attrgetter('paths'), paths)
paths = chain.from_iterable(paths)
return np.array([
tuple(
(part.x, part.y)
for part in path
)
for path in paths
])
|
from operator import itemgetter
from itertools import chain
from ...utils.py3_hook import with_hook
with with_hook():
from arcrest import Catalog
import numpy as np
def get_layers(service):
layers = service.layers
return {
layer.name: layer
for layer in layers
}
def mend_extent(extent):
extent.wkid = extent.spatialReference.wkid
return extent
def get_data(requested_layers):
catalog = Catalog('http://services.ga.gov.au/site_7/rest/services')
service = catalog['NM_Transport_Infrastructure']
layers = get_layers(service)
return chain.from_iterable(
layers[layer].QueryLayer(Geometry=mend_extent(layers[layer].extent))
for layer in requested_layers
)
def get_paths(request_layers):
paths = get_data(request_layers)
paths = map(itemgetter('geometry'), paths)
paths = chain.from_iterable(
geometry.paths
for geometry in paths
if hasattr(geometry, 'paths')
)
return np.array([
tuple(
(part.x, part.y)
for part in path
)
for path in paths
])
|
Remove map and filter use
|
Remove map and filter use
|
Python
|
mit
|
Mause/statistical_atlas_of_au
|
fc6806608c5e407882248185bca57afa712e065a
|
byceps/blueprints/news_admin/forms.py
|
byceps/blueprints/news_admin/forms.py
|
"""
byceps.blueprints.news_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import re
from wtforms import StringField, TextAreaField
from wtforms.validators import InputRequired, Length, Optional, Regexp
from ...util.l10n import LocalizedForm
SLUG_REGEX = re.compile('^[a-z0-9-]+$')
class ItemCreateForm(LocalizedForm):
slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')])
title = StringField('Titel', [InputRequired(), Length(max=80)])
body = TextAreaField('Text', [InputRequired(), Length(max=80)])
image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)])
class ItemUpdateForm(ItemCreateForm):
pass
|
"""
byceps.blueprints.news_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import re
from wtforms import StringField, TextAreaField
from wtforms.validators import InputRequired, Length, Optional, Regexp
from ...util.l10n import LocalizedForm
SLUG_REGEX = re.compile('^[a-z0-9-]+$')
class ItemCreateForm(LocalizedForm):
slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')])
title = StringField('Titel', [InputRequired(), Length(max=80)])
body = TextAreaField('Text', [InputRequired()])
image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)])
class ItemUpdateForm(ItemCreateForm):
pass
|
Fix validation of news creation form
|
Fix validation of news creation form
|
Python
|
bsd-3-clause
|
m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
4bd41e0e9381ef1c29b1a912a5d8d6ac99b03f4c
|
capstone/rl/learners/qlearning.py
|
capstone/rl/learners/qlearning.py
|
from ..learner import Learner
from ..policies import RandomPolicy
from ..util import max_action_value
from ..value_functions import TabularF
from ...utils import check_random_state
class QLearning(Learner):
def __init__(self, env, policy=None, qf=None, alpha=0.1, gamma=0.99,
n_episodes=1000, random_state=None, verbose=None):
super(QLearning, self).__init__(env, n_episodes=n_episodes, verbose=verbose)
self.alpha = alpha
self.gamma = gamma
self.random_state = check_random_state(random_state)
self.policy = policy or RandomPolicy(env.actions, self.random_state)
self.qf = qf or TabularF(self.random_state)
def best_qvalue(self, state, actions):
return max_action_value(self.qf, state, actions)
###########
# Learner #
###########
def episode(self):
while not self.env.is_terminal():
state = self.env.cur_state()
action = self.policy.action(state)
reward, next_state = self.env.do_action(action)
best_qvalue = self.best_qvalue(next_state, next_actions)
target = reward + (self.gamma * best_qvalue)
td_error = target - self.qf[state, action]
self.qf[state, action] += self.alpha * td_error
|
from ..learner import Learner
from ..policies import RandomPolicy
from ..util import max_action_value
from ..value_functions import TabularF
from ...utils import check_random_state
class QLearning(Learner):
def __init__(self, env, policy=None, qf=None, learning_rate=0.1,
discount_factor=0.99, n_episodes=1000, random_state=None,
verbose=None):
super(QLearning, self).__init__(env, n_episodes=n_episodes, verbose=verbose)
self.learning_rate = learning_rate
self.discount_factor = discount_factor
self.random_state = check_random_state(random_state)
self.policy = policy or RandomPolicy(env.actions, self.random_state)
self.qf = qf or TabularF(self.random_state)
def best_qvalue(self, state, actions):
return max_action_value(self.qf, state, actions)
###########
# Learner #
###########
def episode(self):
while not self.env.is_terminal():
state = self.env.cur_state()
action = self.policy.action(state)
reward, next_state = self.env.do_action(action)
best_qvalue = self.best_qvalue(next_state, next_actions)
target = reward + (self.discount_factor * best_qvalue)
td_error = target - self.qf[state, action]
self.qf[state, action] += self.learning_rate * td_error
|
Rename alpha -> learning_rate and gamma -> discount_factor
|
Rename alpha -> learning_rate and gamma -> discount_factor
|
Python
|
mit
|
davidrobles/mlnd-capstone-code
|
0167e246b74789cc0181b603520ec7f58ef7b5fe
|
pandas/core/api.py
|
pandas/core/api.py
|
# pylint: disable=W0614,W0401,W0611
import numpy as np
from pandas.core.algorithms import factorize, match, unique, value_counts
from pandas.core.common import isnull, notnull, save, load
from pandas.core.categorical import Categorical, Factor
from pandas.core.format import (set_printoptions, reset_printoptions,
set_eng_float_format)
from pandas.core.index import Index, Int64Index, MultiIndex
from pandas.core.series import Series, TimeSeries
from pandas.core.frame import DataFrame
from pandas.core.panel import Panel
from pandas.core.groupby import groupby
from pandas.core.reshape import (pivot_simple as pivot, get_dummies,
lreshape)
WidePanel = Panel
from pandas.tseries.offsets import DateOffset
from pandas.tseries.tools import to_datetime
from pandas.tseries.index import (DatetimeIndex, Timestamp,
date_range, bdate_range)
from pandas.tseries.period import Period, PeriodIndex
# legacy
from pandas.core.daterange import DateRange # deprecated
import pandas.core.datetools as datetools
|
# pylint: disable=W0614,W0401,W0611
import numpy as np
from pandas.core.algorithms import factorize, match, unique, value_counts
from pandas.core.common import isnull, notnull, save, load
from pandas.core.categorical import Categorical, Factor
from pandas.core.format import (set_printoptions, reset_printoptions,
set_eng_float_format)
from pandas.core.index import Index, Int64Index, MultiIndex
from pandas.core.series import Series, TimeSeries
from pandas.core.frame import DataFrame
from pandas.core.panel import Panel
from pandas.core.groupby import groupby
from pandas.core.reshape import (pivot_simple as pivot, get_dummies,
lreshape)
WidePanel = Panel
from pandas.tseries.offsets import DateOffset
from pandas.tseries.tools import to_datetime
from pandas.tseries.index import (DatetimeIndex, Timestamp,
date_range, bdate_range)
from pandas.tseries.period import Period, PeriodIndex
# legacy
from pandas.core.daterange import DateRange # deprecated
import pandas.core.datetools as datetools
from pandas.core.config import get_option,set_option,reset_option,\
reset_options,describe_options
|
Add new core.config API functions to the pandas top level module
|
ENH: Add new core.config API functions to the pandas top level module
|
Python
|
bsd-3-clause
|
pandas-dev/pandas,GuessWhoSamFoo/pandas,TomAugspurger/pandas,toobaz/pandas,MJuddBooth/pandas,cython-testbed/pandas,TomAugspurger/pandas,nmartensen/pandas,cython-testbed/pandas,DGrady/pandas,DGrady/pandas,datapythonista/pandas,kdebrab/pandas,dsm054/pandas,Winand/pandas,linebp/pandas,dsm054/pandas,toobaz/pandas,jmmease/pandas,zfrenchee/pandas,jorisvandenbossche/pandas,cbertinato/pandas,linebp/pandas,harisbal/pandas,rs2/pandas,linebp/pandas,nmartensen/pandas,jmmease/pandas,jreback/pandas,linebp/pandas,cbertinato/pandas,zfrenchee/pandas,nmartensen/pandas,MJuddBooth/pandas,cython-testbed/pandas,amolkahat/pandas,jmmease/pandas,cython-testbed/pandas,GuessWhoSamFoo/pandas,harisbal/pandas,zfrenchee/pandas,jmmease/pandas,jorisvandenbossche/pandas,GuessWhoSamFoo/pandas,gfyoung/pandas,amolkahat/pandas,pandas-dev/pandas,jreback/pandas,kdebrab/pandas,MJuddBooth/pandas,datapythonista/pandas,pratapvardhan/pandas,amolkahat/pandas,Winand/pandas,cbertinato/pandas,jreback/pandas,gfyoung/pandas,pandas-dev/pandas,jreback/pandas,louispotok/pandas,linebp/pandas,toobaz/pandas,gfyoung/pandas,Winand/pandas,jorisvandenbossche/pandas,rs2/pandas,DGrady/pandas,dsm054/pandas,winklerand/pandas,kdebrab/pandas,winklerand/pandas,TomAugspurger/pandas,datapythonista/pandas,winklerand/pandas,kdebrab/pandas,zfrenchee/pandas,pratapvardhan/pandas,Winand/pandas,TomAugspurger/pandas,datapythonista/pandas,toobaz/pandas,DGrady/pandas,cbertinato/pandas,rs2/pandas,rs2/pandas,DGrady/pandas,toobaz/pandas,gfyoung/pandas,harisbal/pandas,jorisvandenbossche/pandas,nmartensen/pandas,louispotok/pandas,harisbal/pandas,amolkahat/pandas,linebp/pandas,cbertinato/pandas,Winand/pandas,louispotok/pandas,Winand/pandas,pratapvardhan/pandas,nmartensen/pandas,winklerand/pandas,DGrady/pandas,gfyoung/pandas,cython-testbed/pandas,pratapvardhan/pandas,louispotok/pandas,zfrenchee/pandas,MJuddBooth/pandas,GuessWhoSamFoo/pandas,pratapvardhan/pandas,winklerand/pandas,amolkahat/pandas,kdebrab/pandas,pandas-dev/pandas,harisbal/pandas,jreback/pandas,dsm054/pandas,GuessWhoSamFoo/pandas,MJuddBooth/pandas,jmmease/pandas,winklerand/pandas,dsm054/pandas,louispotok/pandas,jmmease/pandas,nmartensen/pandas
|
ebcdf90a44d3ae87be8032f89bec26697e22cbf3
|
alexandra/__init__.py
|
alexandra/__init__.py
|
"""
Python support for Alexa applications.
Because like everything Amazon it involves a ton of tedious boilerplate.
"""
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
from alexandra.app import Application
from alexandra.session import Session
from alexandra.util import respond, reprompt
|
# flake8: noqa
"""
Python support for Alexa applications.
Because like everything Amazon it involves a ton of tedious boilerplate.
"""
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
from alexandra.app import Application
from alexandra.session import Session
from alexandra.util import respond, reprompt
|
Add a noqa to init
|
Add a noqa to init
|
Python
|
isc
|
erik/alexandra
|
2cb7c80bc4358631b897e3ea91d3c7eff684f69b
|
pmxbot/__init__.py
|
pmxbot/__init__.py
|
# -*- coding: utf-8 -*-
# vim:ts=4:sw=4:noexpandtab
from __future__ import absolute_import
import socket
import logging
from .dictlib import ConfigDict
config = ConfigDict(
bot_nickname = 'pmxbot',
database = 'sqlite:pmxbot.sqlite',
server_host = 'localhost',
server_port = 6667,
use_ssl = False,
password = None,
silent_bot = False,
log_channels = [],
other_channels = [],
places = ['London', 'Tokyo', 'New York'],
feed_interval = 15, # minutes
feeds = [dict(
name = 'pmxbot bitbucket',
channel = '#inane',
linkurl = 'http://bitbucket.org/yougov/pmxbot',
url = 'http://bitbucket.org/yougov/pmxbot',
),
],
librarypaste = 'http://paste.jaraco.com',
)
config['logs URL'] = 'http://' + socket.getfqdn()
config['log level'] = logging.INFO
"The config object"
|
# -*- coding: utf-8 -*-
# vim:ts=4:sw=4:noexpandtab
from __future__ import absolute_import
import socket
import logging as _logging
from .dictlib import ConfigDict
config = ConfigDict(
bot_nickname = 'pmxbot',
database = 'sqlite:pmxbot.sqlite',
server_host = 'localhost',
server_port = 6667,
use_ssl = False,
password = None,
silent_bot = False,
log_channels = [],
other_channels = [],
places = ['London', 'Tokyo', 'New York'],
feed_interval = 15, # minutes
feeds = [dict(
name = 'pmxbot bitbucket',
channel = '#inane',
linkurl = 'http://bitbucket.org/yougov/pmxbot',
url = 'http://bitbucket.org/yougov/pmxbot',
),
],
librarypaste = 'http://paste.jaraco.com',
)
config['logs URL'] = 'http://' + socket.getfqdn()
config['log level'] = _logging.INFO
"The config object"
|
Fix issue with conflated pmxbot.logging
|
Fix issue with conflated pmxbot.logging
|
Python
|
bsd-3-clause
|
jawilson/pmxbot,jawilson/pmxbot
|
c5c2d3c411ba38a7b110044e04657ae6584be861
|
scripts/helpers.py
|
scripts/helpers.py
|
def printSnapshot(doc):
print(u'Created {} => {}'.format(doc.id, doc.to_dict()))
def queryUsers(db):
users_ref = db.collection(u'users')
docs = users_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def queryRequests(db):
requests_ref = db.collection(u'requests')
docs = requests_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def getUser(userId, users):
for user in users:
if user.id == userId:
return user
return None
|
def printSnapshot(doc):
print(u'Created {} => {}'.format(doc.id, doc.to_dict()))
def queryUsers(db):
users_ref = db.collection(u'users')
docs = users_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def queryRequests(db):
requests_ref = db.collection(u'requests')
docs = requests_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def queryMessages(db):
messages_ref = db.collection(u'messages')
docs = messages_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def getUser(userId, users):
for user in users:
if user.id == userId:
return user
return None
|
Add script to clean the message table
|
Add script to clean the message table
|
Python
|
mit
|
frinder/frinder-app,frinder/frinder-app,frinder/frinder-app
|
2f6c82d74592c80b5042c0b808a658650896cbec
|
rebulk/__init__.py
|
rebulk/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Define simple search patterns in bulk to perform advanced matching on any string
"""
from .rebulk import Rebulk
from .match import Match
from .rules import Rule
from .pattern import REGEX_AVAILABLE
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Define simple search patterns in bulk to perform advanced matching on any string
"""
from .rebulk import Rebulk
from .match import Match
from .rules import Rule, AppendMatchRule, RemoveMatchRule
from .pattern import REGEX_AVAILABLE
|
Add global imports for rules classes
|
Add global imports for rules classes
|
Python
|
mit
|
Toilal/rebulk
|
75092d41fc93306ddc640463886e80620cbcbf46
|
pemi/transforms.py
|
pemi/transforms.py
|
def isblank(value):
return (
value is not False
and value != 0
and value != float(0)
and not bool(value)
)
def concatenate(delimiter=''):
def _concatenate(row):
return delimiter.join(row)
return _concatenate
def nvl(default=''):
def _nvl(row):
return next((v for v in row if not isblank(v)), default)
return _nvl
|
import pandas as pd
def isblank(value):
return (
value is not False
and value != 0
and value != float(0)
and (value is None or pd.isnull(value) or not value)
)
def concatenate(delimiter=''):
def _concatenate(row):
return delimiter.join(row)
return _concatenate
def nvl(default=''):
def _nvl(row):
return next((v for v in row if not isblank(v)), default)
return _nvl
|
Revert "DE-1903 - fix isblank() error"
|
Revert "DE-1903 - fix isblank() error"
This reverts commit 27fd096d9641971f34cb0811fc2240ebc4f3450b.
|
Python
|
mit
|
inside-track/pemi
|
fd7fb7ade0fc879e24543f13c39b00de073004bc
|
setuptools/tests/py26compat.py
|
setuptools/tests/py26compat.py
|
import sys
import tarfile
import contextlib
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
return contextlib.closing(tarfile.open(*args, **kwargs))
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
|
import sys
import tarfile
import contextlib
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
return contextlib.closing(tarfile.open(*args, **kwargs))
if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 2):
tarfile_open = _tarfile_open_ex
else:
tarfile_open = tarfile.open
|
Fix "AttributeError: 'TarFile' object has no attribute '__exit__'" with Python 3.1.
|
Fix "AttributeError: 'TarFile' object has no attribute '__exit__'" with Python 3.1.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
6830f29022746838677ecca420aeff190943c5ed
|
random/__init__.py
|
random/__init__.py
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Nomisma Quantitative Finance random number samplers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from nomisma_quant_finance.random.random_ops import multivariate_normal
from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle
__all__ = [
'multivariate_normal',
'stateless_random_shuffle'
]
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Random number samplers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from nomisma_quant_finance.random.random_ops import multivariate_normal
from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle
__all__ = [
'multivariate_normal',
'stateless_random_shuffle'
]
|
Remove remnants of internal project naming in one docstring.
|
Remove remnants of internal project naming in one docstring.
PiperOrigin-RevId: 263530441
|
Python
|
apache-2.0
|
google/tf-quant-finance,google/tf-quant-finance
|
0eb7ddce9f425c30c70bc1442618deb72c530911
|
networks/models.py
|
networks/models.py
|
from django.db import models
from helpers import models as helpermodels
# Create your models here.
class Networks(models.Model):
POLICIES = (
('reject', 'Reject'),
('drop', 'Ignore'),
('accept', 'Accept'),
)
name = models.CharField(max_length=30)
interface = models.CharField(max_length=10)
ip_range = helpermodels.IPNetworkField()
policy = models.CharField("default policy", choices=POLICIES, max_length=6)
|
from django.db import models
from helpers.models import IPNetworkField
# Create your models here.
class Network(models.Model):
POLICIES = (
('reject', 'Reject'),
('drop', 'Ignore'),
('accept', 'Accept'),
)
name = models.CharField(max_length=30)
interface = models.CharField(max_length=10)
ip_range = IPNetworkField()
policy = models.CharField("default policy", choices=POLICIES, max_length=6)
|
Fix Network model name; better import
|
Fix Network model name; better import
|
Python
|
mit
|
Kromey/piroute,Kromey/piroute,Kromey/piroute
|
563e7d5bc2fadd35b0fc71d45c949aa0b2e872a9
|
example/example/tasksapp/run_tasks.py
|
example/example/tasksapp/run_tasks.py
|
import os
import time
from dj_experiment.tasks.tasks import longtime_add, netcdf_save
from example.settings import (DJ_EXPERIMENT_BASE_DATA_DIR,
DJ_EXPERIMENT_DATA_DIR)
if __name__ == '__main__':
result = longtime_add.delay(1, 2)
# at this time, our task is not finished, so it will return False
print 'Task finished? ', result.ready()
print 'Task result: ', result.result
# sleep 10 seconds to ensure the task has been finished
time.sleep(10)
# now the task should be finished and ready method will return True
print 'Task finished? ', result.ready()
print 'Task result: ', result.result
rcmdatadir = os.path.join(DJ_EXPERIMENT_BASE_DATA_DIR,
DJ_EXPERIMENT_DATA_DIR)
result1 = netcdf_save.delay(14, rcmdatadir)
print 'Task netcdf finished? ', result1.ready()
print 'Task result1: ', result1.result
time.sleep(10)
print 'Task netcdf finished? ', result1.ready()
print 'Task result1: ', result1.result
|
import os
import time
from dj_experiment.tasks.tasks import longtime_add, netcdf_save
from example.settings import DJ_EXPERIMENT_BASE_DATA_DIR
if __name__ == '__main__':
result = longtime_add.delay(1, 2)
# at this time, our task is not finished, so it will return False
print 'Task finished? ', result.ready()
print 'Task result: ', result.result
# sleep 10 seconds to ensure the task has been finished
time.sleep(10)
# now the task should be finished and ready method will return True
print 'Task finished? ', result.ready()
print 'Task result: ', result.result
rcmdatadir = DJ_EXPERIMENT_BASE_DATA_DIR
result1 = netcdf_save.delay(14, rcmdatadir)
print 'Task netcdf finished? ', result1.ready()
print 'Task result1: ', result1.result
time.sleep(10)
print 'Task netcdf finished? ', result1.ready()
print 'Task result1: ', result1.result
|
Fix wrong path composition for data directory
|
Fix wrong path composition for data directory
|
Python
|
mit
|
francbartoli/dj-experiment,francbartoli/dj-experiment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.