commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
08581be11f891e21014a7863ab102d4586388d47
|
packs/docker/actions/build_image.py
|
packs/docker/actions/build_image.py
|
import os
from lib.base import DockerBasePythonAction
__all__ = [
'DockerBuildImageAction'
]
class DockerBuildImageAction(DockerBasePythonAction):
def run(self, dockerfile_path, tag):
if os.path.isdir(dockerfile_path):
return self.wrapper.build(path=dockerfile_path, tag=tag)
else:
with open(dockerfile_path, 'r') as fp:
return self.wrapper.build(fileobj=fp, tag=tag)
|
import os
from lib.base import DockerBasePythonAction
__all__ = [
'DockerBuildImageAction'
]
class DockerBuildImageAction(DockerBasePythonAction):
def run(self, dockerfile_path, tag):
if os.path.isdir(dockerfile_path):
return self.wrapper.build(path=dockerfile_path, tag=tag)
else:
dockerfile_path = os.path.expanduser(dockerfile_path)
with open(dockerfile_path, 'r') as fp:
return self.wrapper.build(fileobj=fp, tag=tag)
|
Expand user in the path.
|
Expand user in the path.
|
Python
|
apache-2.0
|
StackStorm/st2contrib,jtopjian/st2contrib,armab/st2contrib,Aamir-raza-1/st2contrib,tonybaloney/st2contrib,StackStorm/st2contrib,pearsontechnology/st2contrib,digideskio/st2contrib,meirwah/st2contrib,tonybaloney/st2contrib,dennybaa/st2contrib,jtopjian/st2contrib,armab/st2contrib,tonybaloney/st2contrib,digideskio/st2contrib,StackStorm/st2contrib,lakshmi-kannan/st2contrib,lmEshoo/st2contrib,armab/st2contrib,lmEshoo/st2contrib,meirwah/st2contrib,psychopenguin/st2contrib,pearsontechnology/st2contrib,pidah/st2contrib,pinterb/st2contrib,pearsontechnology/st2contrib,pearsontechnology/st2contrib,pidah/st2contrib,Aamir-raza-1/st2contrib,lakshmi-kannan/st2contrib,pidah/st2contrib,dennybaa/st2contrib,psychopenguin/st2contrib,pinterb/st2contrib
|
02fad660afbb6b5ca1fc4f1c3a1fcf3c95f9fd0d
|
pypeerassets/providers/node.py
|
pypeerassets/providers/node.py
|
'''Communicate with local or remote peercoin-daemon via JSON-RPC'''
from operator import itemgetter
try:
from peercoin_rpc import Client
except:
raise EnvironmentError("peercoin_rpc library is required for this to work,\
use pip to install it.")
def select_inputs(cls, total_amount):
'''finds apropriate utxo's to include in rawtx, while being careful
to never spend old transactions with a lot of coin age.
Argument is intiger, returns list of apropriate UTXO's'''
my_addresses = [i["address"] for i in cls.listreceivedbyaddress()]
utxo = []
utxo_sum = float(-0.01) ## starts from negative due to minimal fee
for tx in sorted(cls.listunspent(), key=itemgetter('confirmations')):
for v in cls.getrawtransaction(tx["txid"])["vout"]:
if v["scriptPubKey"]["addresses"][0] in my_addresses:
utxo.append({
"txid": tx["txid"],
"vout": v["n"],
"ScriptSig": v["scriptPubKey"]["hex"]
})
utxo_sum += float(v["value"])
if utxo_sum >= total_amount:
return utxo
if utxo_sum < total_amount:
raise ValueError("Not enough funds.")
Client.select_inputs = select_inputs
|
'''Communicate with local or remote peercoin-daemon via JSON-RPC'''
from operator import itemgetter
try:
from peercoin_rpc import Client
except:
raise EnvironmentError("peercoin_rpc library is required for this to work,\
use pip to install it.")
def select_inputs(cls, total_amount):
'''finds apropriate utxo's to include in rawtx, while being careful
to never spend old transactions with a lot of coin age.
Argument is intiger, returns list of apropriate UTXO's'''
utxo = []
utxo_sum = float(-0.01) ## starts from negative due to minimal fee
for tx in sorted(cls.listunspent(), key=itemgetter('confirmations')):
utxo.append({
"txid": tx["txid"],
"vout": tx["vout"],
"ScriptSig": tx["scriptPubKey"]
})
utxo_sum += float(tx["amount"])
if utxo_sum >= total_amount:
return utxo
if utxo_sum < total_amount:
raise ValueError("Not enough funds.")
Client.select_inputs = select_inputs
|
Refactor unspent utxo data gathering
|
Refactor unspent utxo data gathering
|
Python
|
bsd-3-clause
|
PeerAssets/pypeerassets,backpacker69/pypeerassets
|
c881dbef22ac7ca26ad6abceb3c2e02d0b759040
|
yutu/bot.py
|
yutu/bot.py
|
import discord
from discord.ext import commands
from pony import orm
DESCRIPTION = """
Hi, I'm Yutu!
I'm the bot for the Velvet fan discord.
I'm still learning so sorry if I do something wrong.
You can ask my programmer @Harkonen if you want to know more about me.
"""
class Yutu(commands.Bot):
def __init__(self):
super().__init__(commands.when_mentioned_or("~"),
game=discord.Game(name="~help"),
description=DESCRIPTION,
pm_help=None)
self.db = orm.Database()
async def on_ready(self):
print('We have logged in as {0.user}'.format(self))
self.owner_id = (await self.application_info()).owner.id
async def on_command_error(self, ctx: commands.Context, exception):
if(isinstance(exception, commands.errors.MissingRequiredArgument) or
isinstance(exception, commands.errors.BadArgument)):
await ctx.print_help()
elif isinstance(exception, commands.CommandOnCooldown):
await ctx.send(content=str(exception))
else:
await super().on_command_error(ctx, exception)
|
import discord
from discord.ext import commands
from pony import orm
DESCRIPTION = """
Hi, I'm Yutu!
I'm the bot for the Velvet fan discord.
I'm still learning so sorry if I do something wrong.
You can ask my programmer @Harkonen if you want to know more about me.
"""
class Yutu(commands.Bot):
def __init__(self):
super().__init__(commands.when_mentioned_or("~"),
game=discord.Game(name="~help"),
description=DESCRIPTION,
pm_help=None)
self.db = orm.Database()
self.get_command('help').after_invoke(self.post_help)
async def post_help(self, ctx: commands.Context):
await ctx.message.add_reaction("✅")
async def on_ready(self):
print('We have logged in as {0.user}'.format(self))
self.owner_id = (await self.application_info()).owner.id
async def on_command_error(self, ctx: commands.Context, exception):
if(isinstance(exception, commands.errors.MissingRequiredArgument) or
isinstance(exception, commands.errors.BadArgument)):
await ctx.print_help()
elif isinstance(exception, commands.CommandOnCooldown):
await ctx.send(content=str(exception))
else:
await super().on_command_error(ctx, exception)
|
Make Yutu acknowledge pm'ed help commands
|
Make Yutu acknowledge pm'ed help commands
|
Python
|
mit
|
HarkonenBade/yutu
|
e412a68afe691913525245d2a8a3a8e9e3ba532d
|
python/xicore.py
|
python/xicore.py
|
#!/usr/bin/env python
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import struct
import json
def sendraw(buf):
sys.stdout.write(struct.pack("<q", len(buf)))
sys.stdout.write(buf)
sys.stdout.flush()
def send(obj):
sendraw(json.dumps(obj))
def mainloop():
text = ''
while True:
sizebuf = sys.stdin.read(8)
if len(sizebuf) == 0:
return
(size,) = struct.unpack("<q", sizebuf)
cmd, arg = json.loads(sys.stdin.read(size))
print >> sys.stderr, cmd, arg
if cmd == 'key':
chars = arg['chars']
if chars == u'\x7f':
if len(text):
text = text[:-1]
else:
text += chars
send(['settext', text])
mainloop()
|
#!/usr/bin/env python
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import struct
import json
def sendraw(buf):
sys.stdout.write(struct.pack("<q", len(buf)))
sys.stdout.write(buf)
sys.stdout.flush()
def send(obj):
sendraw(json.dumps(obj))
def mainloop():
text = ''
while True:
sizebuf = sys.stdin.read(8)
if len(sizebuf) == 0:
return
(size,) = struct.unpack("<q", sizebuf)
cmd, arg = json.loads(sys.stdin.read(size))
print >> sys.stderr, cmd, arg
if cmd == 'key':
chars = arg['chars']
if chars == u'\x7f':
if len(text):
text = text[:-1]
else:
text += chars
send(['settext', text])
mainloop()
|
Replace tab indentation with 4 spaces
|
Replace tab indentation with 4 spaces
|
Python
|
apache-2.0
|
google/xi-editor,google/xi-editor,fuchsia-mirror/third_party-xi-editor,modelorganism/xi-editor,modelorganism/xi-editor,fuchsia-mirror/third_party-xi-editor,fuchsia-mirror/third_party-xi-editor,google/xi-editor,google/xi-editor,fuchsia-mirror/third_party-xi-editor,modelorganism/xi-editor
|
481df944700297300892bd14783310aad14c093c
|
test/selenium/src/lib/page/modal/delete_object.py
|
test/selenium/src/lib/page/modal/delete_object.py
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Modals for deleting objects"""
from lib import base
from lib import decorator
from lib.constants import locator
class DeleteObjectModal(base.Modal):
"""A generic modal for deleting an object"""
_page_model_cls_after_redirect = None
_locator = locator.ModalDeleteObject
def __init__(self, driver):
super(DeleteObjectModal, self).__init__(driver)
self.title_modal = base.Label(
driver, self._locator.MODAL_TITLE)
self.confirmation_text = base.Label(
driver, self._locator.CONFIRMATION_TEXT)
self.title_object = base.Label(
driver, self._locator.OBJECT_TITLE)
self.button_delete = base.Button(
driver, self._locator.BUTTON_DELETE)
@decorator.wait_for_redirect
def confirm_delete(self):
"""
Returns:
lib.page.dashboard.Dashboard
"""
self.button_delete.click()
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Modals for deleting objects"""
from lib import base
from lib.constants import locator
class DeleteObjectModal(base.Modal):
"""A generic modal for deleting an object"""
_page_model_cls_after_redirect = None
_locator = locator.ModalDeleteObject
def __init__(self, driver):
super(DeleteObjectModal, self).__init__(driver)
self.title_modal = base.Label(
driver, self._locator.MODAL_TITLE)
self.confirmation_text = base.Label(
driver, self._locator.CONFIRMATION_TEXT)
self.title_object = base.Label(
driver, self._locator.OBJECT_TITLE)
self.button_delete = base.Button(
driver, self._locator.BUTTON_DELETE)
def confirm_delete(self):
"""
Returns:
lib.page.dashboard.Dashboard
"""
self.button_delete.click()
|
Remove redirect from delete modal page object
|
Remove redirect from delete modal page object
(cherry picked from commit 480ecdb)
|
Python
|
apache-2.0
|
VinnieJohns/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core
|
9358b83c1dc0c6e0306416705d28a48f478878db
|
test/tests/python-imports/container.py
|
test/tests/python-imports/container.py
|
import curses
import dbm
import readline
import bz2
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import platform
if platform.python_implementation() != 'PyPy' and platform.python_version_tuple()[0] != '2':
# PyPy and Python 2 don't support lzma
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import zlib
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
|
import curses
import readline
import bz2
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import platform
isNotPypy = platform.python_implementation() != 'PyPy'
isCaveman = platform.python_version_tuple()[0] == '2'
if isCaveman:
import gdbm
else:
import dbm.gnu
if isNotPypy:
# PyPy and Python 2 don't support lzma
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import zlib
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
|
Reorder python import test to use gnu dbm imports instead of generic dbm
|
Reorder python import test to use gnu dbm imports instead of generic dbm
|
Python
|
apache-2.0
|
nodejs-docker-bot/official-images,robfrank/official-images,mattrobenolt/official-images,mattrobenolt/official-images,infosiftr/stackbrew,infosiftr/stackbrew,neo-technology/docker-official-images,chorrell/official-images,nodejs-docker-bot/official-images,pesho/docker-official-images,chorrell/official-images,pesho/docker-official-images,chorrell/official-images,31z4/official-images,31z4/official-images,infosiftr/stackbrew,jperrin/official-images,nodejs-docker-bot/official-images,docker-solr/official-images,infosiftr/stackbrew,emilevauge/official-images,davidl-zend/official-images,chorrell/official-images,mattrobenolt/official-images,31z4/official-images,mattrobenolt/official-images,jperrin/official-images,jperrin/official-images,dinogun/official-images,neo-technology/docker-official-images,31z4/official-images,docker-flink/official-images,docker-flink/official-images,chorrell/official-images,31z4/official-images,docker-flink/official-images,31z4/official-images,nodejs-docker-bot/official-images,pesho/docker-official-images,31z4/official-images,pesho/docker-official-images,davidl-zend/official-images,thresheek/official-images,mattrobenolt/official-images,docker-flink/official-images,robfrank/official-images,nodejs-docker-bot/official-images,robfrank/official-images,robfrank/official-images,emilevauge/official-images,dinogun/official-images,mattrobenolt/official-images,emilevauge/official-images,robfrank/official-images,dinogun/official-images,thresheek/official-images,thresheek/official-images,robfrank/official-images,emilevauge/official-images,robfrank/official-images,robfrank/official-images,docker-library/official-images,infosiftr/stackbrew,chorrell/official-images,pesho/docker-official-images,infosiftr/stackbrew,davidl-zend/official-images,docker-solr/official-images,dinogun/official-images,nodejs-docker-bot/official-images,thresheek/official-images,infosiftr/stackbrew,chorrell/official-images,docker-flink/official-images,thresheek/official-images,robfrank/official-images,emilevauge/official-images,davidl-zend/official-images,emilevauge/official-images,docker-library/official-images,jperrin/official-images,neo-technology/docker-official-images,emilevauge/official-images,docker-library/official-images,dinogun/official-images,docker-flink/official-images,thresheek/official-images,docker-solr/official-images,thresheek/official-images,davidl-zend/official-images,neo-technology/docker-official-images,docker-flink/official-images,robfrank/official-images,davidl-zend/official-images,neo-technology/docker-official-images,pesho/docker-official-images,mattrobenolt/official-images,chorrell/official-images,docker-library/official-images,docker-solr/official-images,nodejs-docker-bot/official-images,nodejs-docker-bot/official-images,thresheek/official-images,dinogun/official-images,docker-flink/official-images,mattrobenolt/official-images,docker-library/official-images,infosiftr/stackbrew,davidl-zend/official-images,dinogun/official-images,pesho/docker-official-images,emilevauge/official-images,docker-flink/official-images,thresheek/official-images,chorrell/official-images,docker-flink/official-images,docker-solr/official-images,infosiftr/stackbrew,neo-technology/docker-official-images,docker-solr/official-images,jperrin/official-images,pesho/docker-official-images,emilevauge/official-images,mattrobenolt/official-images,infosiftr/stackbrew,chorrell/official-images,dinogun/official-images,31z4/official-images,robfrank/official-images,dinogun/official-images,docker-library/official-images,neo-technology/docker-official-images,jperrin/official-images,infosiftr/stackbrew,jperrin/official-images,31z4/official-images,31z4/official-images,pesho/docker-official-images,31z4/official-images,infosiftr/stackbrew,dinogun/official-images,jperrin/official-images,pesho/docker-official-images,docker-solr/official-images,mattrobenolt/official-images,neo-technology/docker-official-images,emilevauge/official-images,infosiftr/stackbrew,pesho/docker-official-images,emilevauge/official-images,docker-flink/official-images,davidl-zend/official-images,neo-technology/docker-official-images,docker-library/official-images,docker-solr/official-images,thresheek/official-images,docker-library/official-images,31z4/official-images,docker-solr/official-images,31z4/official-images,pesho/docker-official-images,docker-library/official-images,davidl-zend/official-images,neo-technology/docker-official-images,docker-flink/official-images,docker-library/official-images,thresheek/official-images,docker-library/official-images,thresheek/official-images,docker-library/official-images,neo-technology/docker-official-images,docker-solr/official-images,jperrin/official-images,jperrin/official-images,dinogun/official-images,nodejs-docker-bot/official-images,jperrin/official-images,davidl-zend/official-images,docker-library/official-images,docker-solr/official-images,chorrell/official-images,neo-technology/docker-official-images,davidl-zend/official-images,nodejs-docker-bot/official-images,dinogun/official-images,nodejs-docker-bot/official-images,nodejs-docker-bot/official-images,chorrell/official-images,docker-flink/official-images,chorrell/official-images,jperrin/official-images,davidl-zend/official-images,mattrobenolt/official-images,davidl-zend/official-images,docker-solr/official-images,thresheek/official-images,mattrobenolt/official-images,docker-solr/official-images,docker-library/official-images,docker-solr/official-images,emilevauge/official-images,jperrin/official-images,infosiftr/stackbrew,robfrank/official-images,thresheek/official-images,neo-technology/docker-official-images,dinogun/official-images,neo-technology/docker-official-images,robfrank/official-images,31z4/official-images,mattrobenolt/official-images
|
5420d368c064953842023ccc07b531b071ec3514
|
src/tests/test_login_page.py
|
src/tests/test_login_page.py
|
from src.lib.page.login import LoginPage
from src.lib.base import BaseTest
class TestLoginPage(BaseTest):
def test_login_as_admin(self):
login_page = LoginPage(self.driver)
login_page.login()
self.driver.find_element_by_css_selector("li.user.user-dropdown.dropdown")
|
from src.lib.page.login import LoginPage
from src.lib.base import BaseTest
from src.lib.constants import selector
class TestLoginPage(BaseTest):
def test_login_as_admin(self):
login_page = LoginPage(self.driver)
login_page.login()
self.driver.find_element_by_css_selector(selector.LoginPage.BUTTON_LOGIN)
|
Modify login page test to use selectors defined in the module constants.
|
Modify login page test to use selectors defined in the module constants.
|
Python
|
apache-2.0
|
edofic/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,prasannav7/ggrc-core,NejcZupec/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,plamut/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,andrei-karalionak/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core
|
541d4080821692ed879bfee47eb0ce1a8b278dac
|
Python/reverse-words-in-a-string-iii.py
|
Python/reverse-words-in-a-string-iii.py
|
# Time: O(n)
# Space: O(1)
# Given a string, you need to reverse the order of characters in each word within a sentence
# while still preserving whitespace and initial word order.
#
# Example 1:
# Input: "Let's take LeetCode contest"
# Output: "s'teL ekat edoCteeL tsetnoc"
# Note: In the string, each word is separated by single space and
# there will not be any extra space in the string.
class Solution(object):
def reverseWords(self, s):
"""
:type s: str
:rtype: str
"""
def reverse(s, begin, end):
for i in xrange((end - begin) // 2):
s[begin + i], s[end - 1 - i] = s[end - 1 - i], s[begin + i]
s, i = list(s), 0
for j in xrange(len(s) + 1):
if j == len(s) or s[j] == ' ':
reverse(s, i, j)
i = j + 1
return "".join(s)
|
# Time: O(n)
# Space: O(1)
# Given a string, you need to reverse the order of characters in each word within a sentence
# while still preserving whitespace and initial word order.
#
# Example 1:
# Input: "Let's take LeetCode contest"
# Output: "s'teL ekat edoCteeL tsetnoc"
# Note: In the string, each word is separated by single space and
# there will not be any extra space in the string.
class Solution(object):
def reverseWords(self, s):
"""
:type s: str
:rtype: str
"""
def reverse(s, begin, end):
for i in xrange((end - begin) // 2):
s[begin + i], s[end - 1 - i] = s[end - 1 - i], s[begin + i]
s, i = list(s), 0
for j in xrange(len(s) + 1):
if j == len(s) or s[j] == ' ':
reverse(s, i, j)
i = j + 1
return "".join(s)
class Solution2(object):
def reverseWords(self, s):
reversed_words = [word[::-1] for word in s.split(' ')]
return ' '.join(reversed_words)
|
Add alternative solution for 'Reverse words in string III'
|
Add alternative solution for 'Reverse words in string III'
|
Python
|
mit
|
kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015
|
92759e9df89664ae515e51825982141750921ce3
|
src/sample_xblocks/basic/test/test_view_counter.py
|
src/sample_xblocks/basic/test/test_view_counter.py
|
""" Simple test for the view counter that verifies that it is updating properly """
from collections import namedtuple
from mock import Mock
from xblock.runtime import KvsFieldData, DictKeyValueStore
from xblock.view_counter import ViewCounter
from xblock.test.tools import assert_in, assert_equals
TestUsage = namedtuple('TestUsage', 'id, def_id') # pylint: disable=C0103
def test_view_counter_state():
key_store = DictKeyValueStore()
db_model = KvsFieldData(key_store)
tester = ViewCounter(Mock(), db_model, Mock())
assert_equals(tester.views, 0)
# View the XBlock five times
for i in xrange(5):
generated_html = tester.student_view({})
# Make sure the html fragment we're expecting appears in the body_html
assert_in('<span class="views">{0}</span>'.format(i + 1), generated_html.body_html())
assert_equals(tester.views, i + 1)
|
""" Simple test for the view counter that verifies that it is updating properly """
from collections import namedtuple
from mock import Mock
from xblock.runtime import KvsFieldData, DictKeyValueStore
from sample_xblocks.basic.view_counter import ViewCounter
from xblock.test.tools import assert_in, assert_equals
TestUsage = namedtuple('TestUsage', 'id, def_id') # pylint: disable=C0103
def test_view_counter_state():
key_store = DictKeyValueStore()
db_model = KvsFieldData(key_store)
tester = ViewCounter(Mock(), db_model, Mock())
assert_equals(tester.views, 0)
# View the XBlock five times
for i in xrange(5):
generated_html = tester.student_view({})
# Make sure the html fragment we're expecting appears in the body_html
assert_in('<span class="views">{0}</span>'.format(i + 1), generated_html.body_html())
assert_equals(tester.views, i + 1)
|
Use the correct location of view_counter in test
|
Use the correct location of view_counter in test
|
Python
|
apache-2.0
|
stvstnfrd/xblock-sdk,dcadams/xblock-sdk,edx/xblock-sdk,jamiefolsom/xblock-sdk,edx/xblock-sdk,stvstnfrd/xblock-sdk,nagyistoce/edx-xblock-sdk,lovehhf/xblock-sdk,edx-solutions/xblock-sdk,Pilou81715/hackathon_edX,Pilou81715/hackathon_edX,edx-solutions/xblock-sdk,lovehhf/xblock-sdk,lovehhf/xblock-sdk,nagyistoce/edx-xblock-sdk,dcadams/xblock-sdk,jamiefolsom/xblock-sdk,jamiefolsom/xblock-sdk,Lyla-Fischer/xblock-sdk,Lyla-Fischer/xblock-sdk,stvstnfrd/xblock-sdk,lovehhf/xblock-sdk,Lyla-Fischer/xblock-sdk,edx/xblock-sdk,Pilou81715/hackathon_edX,nagyistoce/edx-xblock-sdk,edx-solutions/xblock-sdk,Pilou81715/hackathon_edX,dcadams/xblock-sdk,nagyistoce/edx-xblock-sdk,edx-solutions/xblock-sdk,jamiefolsom/xblock-sdk
|
08718ce949e7f80b0cbe39c3eba4446133c6d72d
|
code/marv-api/marv_api/deprecation.py
|
code/marv-api/marv_api/deprecation.py
|
# Copyright 2020 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
import warnings
from dataclasses import dataclass
from typing import Any
@dataclass
class Info:
module: str
version: str
obj: Any
msg: str = None
def make_getattr(module, dct):
assert all(x.module == module for x in dct.values())
def __getattr__(name):
info = dct.get(name)
if info is None:
raise AttributeError(f'module {module} has no attribute {name}')
msg = (
f'{module}.{name} will be removed in {info.version}; '
f'{info.msg or "please let us know if this is an issue for you."}'
)
warnings.warn(msg, FutureWarning, stacklevel=2)
return info.obj
return __getattr__
|
# Copyright 2020 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
import functools
import warnings
from dataclasses import dataclass
from typing import Any
@dataclass
class Info:
module: str
version: str
obj: Any
msg: str = None
def make_getattr(module, dct):
assert all(x.module == module for x in dct.values())
def __getattr__(name):
info = dct.get(name)
if info is None:
raise AttributeError(f'module {module} has no attribute {name}')
msg = (
f'{module}.{name} will be removed in {info.version}; '
f'{info.msg or "please let us know if this is an issue for you."}'
)
warnings.warn(msg, FutureWarning, stacklevel=2)
return info.obj
return __getattr__
def deprecated(version, msg=None, name=None):
"""Wrap function to trigger deprecated message upon call."""
def deco(func):
@functools.wraps(func)
def wrapper(*args, **kw):
_msg = (
f'{func.__module__}.{name or func.__name__} will be removed in {version}; '
f'{msg or "please let us know if this is an issue for you."}'
)
warnings.warn(_msg, FutureWarning, stacklevel=2)
return func(*args, **kw)
return wrapper
return deco
|
Add decorator to declare function deprecated
|
Add decorator to declare function deprecated
|
Python
|
agpl-3.0
|
ternaris/marv-robotics,ternaris/marv-robotics
|
700fa0144c5276d8e31c01a243340f6cbac07e8f
|
sentry/client/handlers.py
|
sentry/client/handlers.py
|
import logging
class SentryHandler(logging.Handler):
def emit(self, record):
from sentry.client.models import get_client
get_client().create_from_record(record)
|
import logging
import sys
class SentryHandler(logging.Handler):
def emit(self, record):
from sentry.client.models import get_client
# Avoid typical config issues by overriding loggers behavior
if record.name == 'sentry.errors':
print >> sys.stderr, record.message
return
get_client().create_from_record(record)
|
Add a safety net for recursive logging
|
Add a safety net for recursive logging
|
Python
|
bsd-3-clause
|
ewdurbin/sentry,hongliang5623/sentry,Photonomie/raven-python,ewdurbin/raven-python,daevaorn/sentry,NickPresta/sentry,dcramer/sentry-old,looker/sentry,NickPresta/sentry,chayapan/django-sentry,Kryz/sentry,BuildingLink/sentry,gencer/sentry,boneyao/sentry,danriti/raven-python,imankulov/sentry,nikolas/raven-python,mvaled/sentry,gencer/sentry,inspirehep/raven-python,arthurlogilab/raven-python,daevaorn/sentry,recht/raven-python,icereval/raven-python,drcapulet/sentry,ewdurbin/sentry,akheron/raven-python,boneyao/sentry,JamesMura/sentry,zenefits/sentry,johansteffner/raven-python,ewdurbin/sentry,gg7/sentry,SilentCircle/sentry,dbravender/raven-python,alex/raven,alex/sentry,zenefits/sentry,someonehan/raven-python,JTCunning/sentry,JTCunning/sentry,jbarbuto/raven-python,beniwohli/apm-agent-python,vperron/sentry,someonehan/raven-python,korealerts1/sentry,arthurlogilab/raven-python,Kronuz/django-sentry,mvaled/sentry,beni55/sentry,llonchj/sentry,mitsuhiko/raven,beni55/sentry,tarkatronic/opbeat_python,BuildingLink/sentry,WoLpH/django-sentry,jbarbuto/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,nikolas/raven-python,gencer/sentry,recht/raven-python,alexm92/sentry,jbarbuto/raven-python,wong2/sentry,pauloschilling/sentry,lepture/raven-python,felixbuenemann/sentry,nicholasserra/sentry,icereval/raven-python,percipient/raven-python,jmagnusson/raven-python,Kronuz/django-sentry,Natim/sentry,icereval/raven-python,SilentCircle/sentry,songyi199111/sentry,argonemyth/sentry,beeftornado/sentry,camilonova/sentry,NickPresta/sentry,smarkets/raven-python,akalipetis/raven-python,recht/raven-python,chayapan/django-sentry,inspirehep/raven-python,patrys/opbeat_python,SilentCircle/sentry,zenefits/sentry,ronaldevers/raven-python,beniwohli/apm-agent-python,kevinlondon/sentry,jean/sentry,Goldmund-Wyldebeast-Wunderliebe/raven-python,mvaled/sentry,BayanGroup/sentry,gencer/sentry,jmp0xf/raven-python,kevinastone/sentry,openlabs/raven,daevaorn/sentry,patrys/opbeat_python,wong2/sentry,JamesMura/sentry,WoLpH/django-sentry,Photonomie/raven-python,nikolas/raven-python,Kryz/sentry,looker/sentry,songyi199111/sentry,ifduyue/sentry,NickPresta/sentry,1tush/sentry,mitsuhiko/sentry,fotinakis/sentry,tarkatronic/opbeat_python,dirtycoder/opbeat_python,mvaled/sentry,vperron/sentry,korealerts1/sentry,dirtycoder/opbeat_python,jean/sentry,nikolas/raven-python,alex/sentry,jmagnusson/raven-python,percipient/raven-python,argonemyth/sentry,hongliang5623/sentry,JackDanger/sentry,kevinlondon/sentry,pauloschilling/sentry,fuziontech/sentry,tbarbugli/sentry_fork,rdio/sentry,smarkets/raven-python,looker/sentry,BuildingLink/sentry,camilonova/sentry,JTCunning/sentry,ifduyue/sentry,someonehan/raven-python,jean/sentry,Kryz/sentry,hzy/raven-python,arthurlogilab/raven-python,JamesMura/sentry,felixbuenemann/sentry,ronaldevers/raven-python,fuziontech/sentry,ifduyue/sentry,llonchj/sentry,SilentCircle/sentry,1tush/sentry,kevinastone/sentry,looker/sentry,zenefits/sentry,pauloschilling/sentry,alex/sentry,jbarbuto/raven-python,looker/sentry,collective/mr.poe,gg7/sentry,felixbuenemann/sentry,1tush/sentry,jean/sentry,ronaldevers/raven-python,Natim/sentry,BuildingLink/sentry,daikeren/opbeat_python,Natim/sentry,Goldmund-Wyldebeast-Wunderliebe/raven-python,wujuguang/sentry,wujuguang/sentry,akalipetis/raven-python,JackDanger/sentry,BayanGroup/sentry,arthurlogilab/raven-python,boneyao/sentry,BayanGroup/sentry,mitsuhiko/raven,tbarbugli/sentry_fork,dcramer/sentry-old,Goldmund-Wyldebeast-Wunderliebe/raven-python,daikeren/opbeat_python,ticosax/opbeat_python,beeftornado/sentry,jmp0xf/raven-python,dbravender/raven-python,jokey2k/sentry,drcapulet/sentry,WoLpH/django-sentry,ticosax/opbeat_python,fotinakis/sentry,primepix/django-sentry,dcramer/sentry-old,BuildingLink/sentry,ewdurbin/raven-python,mvaled/sentry,ngonzalvez/sentry,llonchj/sentry,beni55/sentry,smarkets/raven-python,TedaLIEz/sentry,lepture/raven-python,hongliang5623/sentry,primepix/django-sentry,hzy/raven-python,akheron/raven-python,lopter/raven-python-old,Kronuz/django-sentry,nicholasserra/sentry,daevaorn/sentry,TedaLIEz/sentry,fotinakis/sentry,rdio/sentry,hzy/raven-python,vperron/sentry,imankulov/sentry,ifduyue/sentry,mvaled/sentry,korealerts1/sentry,ngonzalvez/sentry,tbarbugli/sentry_fork,rdio/sentry,getsentry/raven-python,chayapan/django-sentry,rdio/sentry,johansteffner/raven-python,getsentry/raven-python,jean/sentry,TedaLIEz/sentry,patrys/opbeat_python,ngonzalvez/sentry,camilonova/sentry,JackDanger/sentry,smarkets/raven-python,akheron/raven-python,kevinastone/sentry,Photonomie/raven-python,JamesMura/sentry,wujuguang/sentry,ticosax/opbeat_python,inspirehep/raven-python,beeftornado/sentry,argonemyth/sentry,beniwohli/apm-agent-python,tarkatronic/opbeat_python,ewdurbin/raven-python,fuziontech/sentry,zenefits/sentry,songyi199111/sentry,jokey2k/sentry,daikeren/opbeat_python,gencer/sentry,patrys/opbeat_python,dirtycoder/opbeat_python,icereval/raven-python,getsentry/raven-python,jmp0xf/raven-python,danriti/raven-python,nicholasserra/sentry,inspirehep/raven-python,wong2/sentry,gg7/sentry,imankulov/sentry,percipient/raven-python,johansteffner/raven-python,ifduyue/sentry,beniwohli/apm-agent-python,danriti/raven-python,alexm92/sentry,fotinakis/sentry,lepture/raven-python,akalipetis/raven-python,primepix/django-sentry,jmagnusson/raven-python,jokey2k/sentry,alexm92/sentry,JamesMura/sentry,drcapulet/sentry,kevinlondon/sentry,dbravender/raven-python,mitsuhiko/sentry
|
529d72ff62f3d4b8ab18a26beadd20322a118a28
|
client/scripts/osutil.py
|
client/scripts/osutil.py
|
import sys
class OSUtil():
def __init__(self):
pass
def platform(self):
platform = sys.platform # Map from python platform name to ue4 platform name
names = {
'cygwin': 'Win', # could be win32 also
'win32': 'Win',
'win64': 'Win',
'linux2': 'Linux',
'darwin': 'Mac',
}
return names[platform]
|
import sys, platform
class OSUtil():
def __init__(self):
pass
def platform(self):
win = 'Win'
mac = 'Mac'
linux = 'Linux'
if platform.release().endswith('Microsoft'):
# This is a hacky way to check whether I am running Ubuntu on Windows
return win
# Map from python platform name to ue4 platform name
names = {
'cygwin': win, # could be win32 also
'win32': win,
'win64': win,
'linux2': linux,
'darwin': mac,
}
return names[sys.platform]
|
Fix the platform check for windows.
|
Fix the platform check for windows.
|
Python
|
mit
|
qiuwch/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,qiuwch/unrealcv,qiuwch/unrealcv
|
4e90a2fd424eeb078957b779a211d9643c516566
|
tests/commands/test_settings.py
|
tests/commands/test_settings.py
|
# Copyright 2014-2015 Ivan Kravets <me@ikravets.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.commands.settings import cli
from platformio import app
def test_settings_check(clirunner, validate_cliresult):
result = clirunner.invoke(cli, ["get"])
validate_cliresult(result)
assert len(result.output)
for item in app.DEFAULT_SETTINGS.items():
assert item[0] in result.output
|
# Copyright 2014-2015 Ivan Kravets <me@ikravets.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.commands.settings import cli
from platformio import app
def test_settings_check(clirunner, validate_cliresult):
result = clirunner.invoke(cli, ["get"])
assert result.exit_code == 0
assert not result.exception
assert len(result.output)
for item in app.DEFAULT_SETTINGS.items():
assert item[0] in result.output
|
Fix test for settings command
|
Fix test for settings command
|
Python
|
apache-2.0
|
platformio/platformio-core,eiginn/platformio,platformio/platformio-core,ZachMassia/platformio,platformio/platformio,valeros/platformio
|
c86f915e324d7e66cb07cbcc9fb827c2dcdeda29
|
rst2pdf/utils.py
|
rst2pdf/utils.py
|
# -*- coding: utf-8 -*-
# See LICENSE.txt for licensing terms
#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import shlex
from reportlab.platypus import Spacer
from flowables import *
def parseRaw(data):
"""Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
"""
elements = []
lines = data.splitlines()
for line in lines:
lexer = shlex.shlex(line)
lexer.whitespace += ','
tokens = list(lexer)
command = tokens[0]
if command == 'PageBreak':
if len(tokens) == 1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(int(tokens[1]), int(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now:
# def depth(node):
# if node.parent == None:
# return 0
# else:
# return 1 + depth(node.parent)
|
# -*- coding: utf-8 -*-
# See LICENSE.txt for licensing terms
#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import shlex
from reportlab.platypus import Spacer
from flowables import *
from styles import adjustUnits
def parseRaw(data):
"""Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
"""
elements = []
lines = data.splitlines()
for line in lines:
lexer = shlex.shlex(line)
lexer.whitespace += ','
tokens = list(lexer)
command = tokens[0]
if command == 'PageBreak':
if len(tokens) == 1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(adjustUnits(tokens[1]),
adjustUnits(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now:
# def depth(node):
# if node.parent == None:
# return 0
# else:
# return 1 + depth(node.parent)
|
Add unit support for spacers
|
Add unit support for spacers
git-svn-id: 305ad3fa995f01f9ce4b4f46c2a806ba00a97020@779 3777fadb-0f44-0410-9e7f-9d8fa6171d72
|
Python
|
mit
|
aquavitae/rst2pdf,aquavitae/rst2pdf,sychen/rst2pdf,aquavitae/rst2pdf-py3-dev,tonioo/rst2pdf,tonioo/rst2pdf,openpolis/rst2pdf-patched-docutils-0.8,aquavitae/rst2pdf-py3-dev,sychen/rst2pdf,openpolis/rst2pdf-patched-docutils-0.8
|
f71045f6bef5c8b9f7274ec41a965ccbe1044a01
|
examples/test_markers.py
|
examples/test_markers.py
|
""" These tests demonstrate pytest marker use for finding and running tests.
Usage examples from this file:
pytest -v -m marker_test_suite # Runs A, B, C, D
pytest -v -m marker1 # Runs A
pytest -v -m marker2 # Runs B, C
pytest -v -m xkcd_code # Runs C
pytest test_markers.py -v -m "not marker2" # Runs A, D
(The "-v" will display the names of tests as they run.)
(Add "--collect-only" to display names of tests without running them.)
"""
import pytest
from seleniumbase import BaseCase
@pytest.mark.marker_test_suite
class MarkerTestSuite(BaseCase):
@pytest.mark.marker1
def test_A(self):
self.open("https://xkcd.com/1319/")
self.assert_text("Automation", "div#ctitle")
@pytest.mark.marker2
def test_B(self):
self.open("https://www.xkcd.com/1700/")
self.assert_text("New Bug", "div#ctitle")
@pytest.mark.marker2
@pytest.mark.xkcd_code # Tests can have multiple markers
def test_C(self):
self.open("https://xkcd.com/844/")
self.assert_text("Good Code", "div#ctitle")
def test_D(self):
self.open("https://xkcd.com/2021/")
self.assert_text("Software Development", "div#ctitle")
|
""" These tests demonstrate pytest marker use for finding and running tests.
Usage examples from this file:
pytest -v -m marker_test_suite # Runs A, B, C, D
pytest -v -m marker1 # Runs A
pytest -v -m marker2 # Runs B, C
pytest -v -m marker3 # Runs C
pytest test_markers.py -v -m "not marker2" # Runs A, D
(The "-v" will display the names of tests as they run.)
(Add "--collect-only" to display names of tests without running them.)
"""
import pytest
from seleniumbase import BaseCase
@pytest.mark.marker_test_suite
class MarkerTestSuite(BaseCase):
@pytest.mark.marker1
def test_A(self):
self.open("https://xkcd.com/1319/")
self.assert_text("Automation", "div#ctitle")
@pytest.mark.marker2
def test_B(self):
self.open("https://www.xkcd.com/1700/")
self.assert_text("New Bug", "div#ctitle")
@pytest.mark.marker2
@pytest.mark.marker3 # Tests can have multiple markers
def test_C(self):
self.open("https://xkcd.com/844/")
self.assert_text("Good Code", "div#ctitle")
def test_D(self):
self.open("https://xkcd.com/2021/")
self.assert_text("Software Development", "div#ctitle")
|
Update pytest marker test suite
|
Update pytest marker test suite
|
Python
|
mit
|
mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase
|
9b9582a1b7226ceb9cc65657ffb7fd7d51c8ea2a
|
lib/exp/featx/__init__.py
|
lib/exp/featx/__init__.py
|
__all__ = []
from lib.exp.featx.base import Featx
from lib.exp.tools.slider import Slider
class SlideFeatx(Featx, Slider):
def __init__(self, root, name):
Featx.__init__(self, root, name)
Slider.__init__(self, root, name)
def get_feats(self):
imgl = self.get_slides(None, gray=True, resize=True)
self.feats(imgl, prefix="s")
|
__all__ = []
from lib.exp.featx.base import Feats
from lib.exp.tools.slider import Slider
from lib.exp.tools.video import Video
from lib.exp.prepare import Prepare
class Featx(Feats):
def __init__(self, root, name):
Feats.__init__(self, root, name)
def get_slide_feats(self):
ss = Slider(self.root, self.name)
imgl = ss.get_slides(None, gray=True, resize=True)
self.feats(imgl, prefix="s")
def get_frame_feats(self):
pp = Prepare(self.root, self.name)
vv = Video(self.root, self.name)
imgl = vv.get_frames(pp.frame_ids(), gray=True)
self.feats(imgl, prefix="f")
|
Change to use `featx` in package
|
Change to use `featx` in package
|
Python
|
agpl-3.0
|
speed-of-light/pyslider
|
f0e180387a37437fe7e8d37fa2806e7d47736bfc
|
pyheufybot/bothandler.py
|
pyheufybot/bothandler.py
|
import os
from twisted.internet import reactor
from heufybot import HeufyBot, HeufyBotFactory
from config import Config
class BotHandler(object):
factories = {}
globalConfig = None
def __init__(self):
print "--- Loading configs..."
self.globalConfig = Config("globalconfig.yml")
self.globalConfig.loadConfig(None)
configList = self.getConfigList()
if len(configList) == 0:
print "WARNING: No server configs found. Using the global config instead."
else:
for filename in self.getConfigList():
config = Config(filename, globalConfig.settings)
def getConfigList(self):
root = os.path.join("config")
configs = []
for item in os.listdir(root):
if not os.path.isfile(os.path.join(root, item)):
continue
if not item.endswith(".yml"):
continue
if item == "globalconfig.yml":
continue
configs.append(item)
return configs
if __name__ == "__main__":
# Create folders
if not os.path.exists(os.path.join("config")):
os.makedirs("config")
handler = BotHandler()
|
import os
from twisted.internet import reactor
from heufybot import HeufyBot, HeufyBotFactory
from config import Config
class BotHandler(object):
factories = {}
globalConfig = None
def __init__(self):
print "--- Loading configs..."
self.globalConfig = Config("globalconfig.yml")
if not self.globalConfig.loadConfig(None):
return
configList = self.getConfigList()
if len(configList) == 0:
print "WARNING: No server configs found. Using the global config instead."
else:
for filename in self.getConfigList():
config = Config(filename, globalConfig.settings)
def getConfigList(self):
root = os.path.join("config")
configs = []
for item in os.listdir(root):
if not os.path.isfile(os.path.join(root, item)):
continue
if not item.endswith(".yml"):
continue
if item == "globalconfig.yml":
continue
configs.append(item)
return configs
if __name__ == "__main__":
# Create folders
if not os.path.exists(os.path.join("config")):
os.makedirs("config")
handler = BotHandler()
|
Make sure the application doesn't continue without a config
|
Make sure the application doesn't continue without a config
|
Python
|
mit
|
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
|
22df7a89020cbbcf80a88bcf3572dea591884861
|
avatar/urls.py
|
avatar/urls.py
|
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('avatar.views',
url('^add/$', 'add', name='avatar_add'),
url('^change/$', 'change', name='avatar_change'),
url('^delete/$', 'delete', name='avatar_delete'),
url('^render_primary/(?P<user>[\+\w]+)/(?P<size>[\d]+)/$', 'render_primary', name='avatar_render_primary'),
)
|
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('avatar.views',
url('^add/$', 'add', name='avatar_add'),
url('^change/$', 'change', name='avatar_change'),
url('^delete/$', 'delete', name='avatar_delete'),
url('^render_primary/(?P<user>[\w\d\.\-_]{3,30})/(?P<size>[\d]+)/$', 'render_primary', name='avatar_render_primary'),
)
|
Support for username with extra chars.
|
Support for username with extra chars.
|
Python
|
bsd-3-clause
|
tbabej/django-avatar,Brendtron5000/django-avatar,Nuevosmedios/django-avatar,stellalie/django-avatar,barbuza/django-avatar,allenling/django-avatar,allenling/django-avatar,tbabej/django-avatar,z4r/django-avatar,imgmix/django-avatar,hexenxp14/django-avatar,MachineandMagic/django-avatar,brajeshvit/avatarmodule,integricho/django-avatar,Kami/django-avatar,integricho/django-avatar,DrMeers/django-avatar,jessehon/django-avatar,z4r/django-avatar,rizumu/django-avatar,grantmcconnaughey/django-avatar,robertour/django-avatar,arctelix/django-avatar,robertour/django-avatar,ericroberts/django-avatar,jezdez/django-avatar,Nuevosmedios/django-avatar,bazerk/django-avatar,Mapiarz/django-avatar,MachineandMagic/django-avatar,lizrice/django-avatar,allenling/bugfixavatar,fedetorre/django-avatar,rizumu/django-avatar,dannybrowne86/django-avatar,Temesis/django-avatar,ayang/django-avatar,aptwebapps/django-avatar,ad-m/django-avatar,allenling/bugfixavatar,imgmix/django-avatar,miohtama/django-avatar,e4c5/django-avatar,holzenburg/django-avatar,jessehon/django-avatar,caumons/django-avatar,guzru/django-avatar,TomLottermann/django-avatar,brajeshvit/avatarmodule,therocode/django-avatar,Mapiarz/django-avatar,e4c5/django-avatar,holzenburg/django-avatar,ericroberts/django-avatar,barbuza/django-avatar,Brendtron5000/django-avatar,ad-m/django-avatar,stellalie/django-avatar,dannybrowne86/django-avatar,bazerk/django-avatar,heliodor/django-avatar,jezdez/django-avatar,aptwebapps/django-avatar,grantmcconnaughey/django-avatar,therocode/django-avatar,nai-central/django-avatar,nai-central/django-avatar,hexenxp14/django-avatar,ayang/django-avatar
|
3c613bc1b729904883bca77924d892012b93cdc3
|
powerline/renderers/pango_markup.py
|
powerline/renderers/pango_markup.py
|
# vim:fileencoding=utf-8:noet
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
renderer = PangoMarkupRenderer
|
# vim:fileencoding=utf-8:noet
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xmlrpclib import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
|
Use xmlrpclib.escape for escaping in PangoMarkupRenderer
|
Use xmlrpclib.escape for escaping in PangoMarkupRenderer
|
Python
|
mit
|
dragon788/powerline,cyrixhero/powerline,blindFS/powerline,QuLogic/powerline,magus424/powerline,areteix/powerline,dragon788/powerline,keelerm84/powerline,lukw00/powerline,xfumihiro/powerline,prvnkumar/powerline,magus424/powerline,s0undt3ch/powerline,dragon788/powerline,bartvm/powerline,bezhermoso/powerline,wfscheper/powerline,DoctorJellyface/powerline,junix/powerline,IvanAli/powerline,DoctorJellyface/powerline,cyrixhero/powerline,keelerm84/powerline,wfscheper/powerline,lukw00/powerline,darac/powerline,seanfisk/powerline,firebitsbr/powerline,Liangjianghao/powerline,Luffin/powerline,s0undt3ch/powerline,bezhermoso/powerline,S0lll0s/powerline,junix/powerline,wfscheper/powerline,xxxhycl2010/powerline,QuLogic/powerline,IvanAli/powerline,areteix/powerline,xfumihiro/powerline,xxxhycl2010/powerline,IvanAli/powerline,xxxhycl2010/powerline,blindFS/powerline,cyrixhero/powerline,prvnkumar/powerline,xfumihiro/powerline,Liangjianghao/powerline,DoctorJellyface/powerline,s0undt3ch/powerline,junix/powerline,blindFS/powerline,S0lll0s/powerline,darac/powerline,kenrachynski/powerline,prvnkumar/powerline,Luffin/powerline,Liangjianghao/powerline,seanfisk/powerline,magus424/powerline,EricSB/powerline,bartvm/powerline,firebitsbr/powerline,seanfisk/powerline,EricSB/powerline,russellb/powerline,QuLogic/powerline,darac/powerline,bartvm/powerline,lukw00/powerline,Luffin/powerline,kenrachynski/powerline,russellb/powerline,bezhermoso/powerline,S0lll0s/powerline,russellb/powerline,EricSB/powerline,firebitsbr/powerline,areteix/powerline,kenrachynski/powerline
|
62c24f6edaa91834d4a7b2a3f9b99b8b96322230
|
nova/policies/hide_server_addresses.py
|
nova/policies/hide_server_addresses.py
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
BASE_POLICY_NAME = 'os_compute_api:os-hide-server-addresses'
hide_server_addresses_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str='is_admin:False'),
]
def list_rules():
return hide_server_addresses_policies
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-hide-server-addresses'
hide_server_addresses_policies = [
base.create_rule_default(
BASE_POLICY_NAME,
'is_admin:False',
"""Hide server's 'addresses' key in the server response.
This set the 'addresses' key in the server response to an empty dictionary
when the server is in a specific set of states as defined in
CONF.api.hide_server_address_states.
By default 'addresses' is hidden only when the server is in 'BUILDING'
state.""",
[
{
'method': 'GET',
'path': '/servers/{id}'
},
{
'method': 'GET',
'path': '/servers/detail'
}
]),
]
def list_rules():
return hide_server_addresses_policies
|
Add policy description for 'os-hide-server-addresses'
|
Add policy description for 'os-hide-server-addresses'
This commit adds policy doc for 'os-hide-server-addresses' policies.
Partial implement blueprint policy-docs
Change-Id: I98edbd8579f052c74283bde2ec4f85d301a0807a
|
Python
|
apache-2.0
|
rahulunair/nova,gooddata/openstack-nova,mikalstill/nova,mahak/nova,Juniper/nova,mahak/nova,mikalstill/nova,Juniper/nova,rahulunair/nova,gooddata/openstack-nova,vmturbo/nova,openstack/nova,Juniper/nova,jianghuaw/nova,openstack/nova,gooddata/openstack-nova,klmitch/nova,gooddata/openstack-nova,jianghuaw/nova,klmitch/nova,klmitch/nova,mahak/nova,vmturbo/nova,klmitch/nova,jianghuaw/nova,phenoxim/nova,mikalstill/nova,vmturbo/nova,openstack/nova,vmturbo/nova,jianghuaw/nova,rahulunair/nova,phenoxim/nova,Juniper/nova
|
91e7a4f36637e7706dd17f1e093fe029f031bc3d
|
API/chat/migrations/0001_squashed_0002_auto_20150707_1647.py
|
API/chat/migrations/0001_squashed_0002_auto_20150707_1647.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
replaces = [(b'chat', '0001_squashed_0008_auto_20150702_1437'), (b'chat', '0002_auto_20150707_1647')]
dependencies = [
]
operations = [
migrations.CreateModel(
name='Channel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('text', models.TextField(max_length=2000)),
('datetime', models.DateTimeField()),
('channel', models.ForeignKey(to='chat.Channel')),
('username', models.CharField(max_length=20)),
],
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Channel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('text', models.TextField(max_length=2000)),
('datetime', models.DateTimeField()),
('channel', models.ForeignKey(to='chat.Channel')),
('username', models.CharField(max_length=20)),
],
),
]
|
Remove replaces line on 0001_squashed
|
[HOTFIX] Remove replaces line on 0001_squashed
|
Python
|
mit
|
gtklocker/ting,sirodoht/ting,gtklocker/ting,odyvarv/ting-1,VitSalis/ting,mbalamat/ting,VitSalis/ting,gtklocker/ting,sirodoht/ting,odyvarv/ting-1,gtklocker/ting,dionyziz/ting,mbalamat/ting,VitSalis/ting,odyvarv/ting-1,dionyziz/ting,mbalamat/ting,VitSalis/ting,odyvarv/ting-1,dionyziz/ting,mbalamat/ting,sirodoht/ting,dionyziz/ting,sirodoht/ting
|
1124da4ea6c30f0c36854ec938aa9ea60cca73d4
|
djangoappengine/db/expressions.py
|
djangoappengine/db/expressions.py
|
from django.db.models.sql.expressions import SQLEvaluator
from django.db.models.expressions import ExpressionNode
OPERATION_MAP = {
ExpressionNode.ADD: lambda x, y: x + y,
ExpressionNode.SUB: lambda x, y: x - y,
ExpressionNode.MUL: lambda x, y: x * y,
ExpressionNode.DIV: lambda x, y: x / y,
ExpressionNode.MOD: lambda x, y: x % y,
ExpressionNode.BITAND: lambda x, y: x & y,
ExpressionNode.BITOR: lambda x, y: x | y,
}
class ExpressionEvaluator(SQLEvaluator):
def __init__(self, expression, query, entity, allow_joins=True):
super(ExpressionEvaluator, self).__init__(expression, query,
allow_joins)
self.entity = entity
##################################################
# Vistor methods for final expression evaluation #
##################################################
def evaluate_node(self, node, qn, connection):
values = []
for child in node.children:
if hasattr(child, 'evaluate'):
value = child.evaluate(self, qn, connection)
else:
value = child
if value is not None:
values.append(value)
return OPERATION_MAP[node.connector](*values)
def evaluate_leaf(self, node, qn, connection):
return self.entity[qn(self.cols[node][1])]
|
from django.db.models.sql.expressions import SQLEvaluator
from django.db.models.expressions import ExpressionNode
OPERATION_MAP = {
ExpressionNode.ADD: lambda x, y: x + y,
ExpressionNode.SUB: lambda x, y: x - y,
ExpressionNode.MUL: lambda x, y: x * y,
ExpressionNode.DIV: lambda x, y: x / y,
ExpressionNode.MOD: lambda x, y: x % y,
ExpressionNode.BITAND: lambda x, y: x & y,
ExpressionNode.BITOR: lambda x, y: x | y,
}
class ExpressionEvaluator(SQLEvaluator):
def __init__(self, expression, query, entity, allow_joins=True):
super(ExpressionEvaluator, self).__init__(expression, query,
allow_joins)
self.entity = entity
##################################################
# Vistor methods for final expression evaluation #
##################################################
def evaluate_node(self, node, qn, connection):
values = []
for child in node.children:
if hasattr(child, 'evaluate'):
value = child.evaluate(self, qn, connection)
else:
value = child
if value is not None:
values.append(value)
return OPERATION_MAP[node.connector](*values)
def evaluate_leaf(self, node, qn, connection):
col = None
for n, c in self.cols:
if n is node:
col = c
break
if col is None:
raise ValueError("Given node not found")
return self.entity[qn(col[1])]
|
Fix ExpressionEvalutator for Django 1.5 changes to cols property
|
Fix ExpressionEvalutator for Django 1.5 changes to cols property
|
Python
|
bsd-3-clause
|
django-nonrel/djangoappengine,Implisit/djangoappengine,dwdraju/djangoappengine
|
7c4cf25868a907eb3ac718b2d4dbef2966fbe806
|
ext/dcos-installer/dcos_installer/util/__init__.py
|
ext/dcos-installer/dcos_installer/util/__init__.py
|
import logging
import os
CONFIG_PATH = '/genconf/config.yaml'
SSH_KEY_PATH = '/genconf/ssh_key'
IP_DETECT_PATH = '/genconf/ip-detect'
SERVE_DIR = '/genconf/serve'
STATE_DIR = '/genconf/state'
GENCONF_DIR = '/genconf'
log = logging.getLogger(__name__)
def write_file(data, path):
try:
with open(path, 'w') as f:
log.debug("Writing file %s", path)
f.write(data)
except:
log.error("Filed to write path %s", path)
def get_action_state(action_name):
"""
Check the action.json file and if the
success + failed + term == total then we are finished.
If not, return running.
"""
return {
"action_name": "deploy",
"action_state": "running",
"hosts_running": [],
"hosts_success": [],
"hosts_failed": [],
"hosts_terminated": [],
}
def clear_action_jsons():
"""
On startup, remove all the old action.json files (preflight,
postflight, deploy .json). This is because action state is
nullified when the installer shuts down. This way we do not
return inconsistent state in the get_action_state().
"""
pass
def create_directory(path):
if not os.path.exists(path):
os.mkdirs(path)
|
CONFIG_PATH = '/genconf/config.yaml'
SSH_KEY_PATH = '/genconf/ssh_key'
IP_DETECT_PATH = '/genconf/ip-detect'
SERVE_DIR = '/genconf/serve'
STATE_DIR = '/genconf/state'
GENCONF_DIR = '/genconf'
|
Remove dead code in dcos_installer.util
|
Remove dead code in dcos_installer.util
|
Python
|
apache-2.0
|
lingmann/dcos,xinxian0458/dcos,amitaekbote/dcos,branden/dcos,dcos/dcos,xinxian0458/dcos,surdy/dcos,lingmann/dcos,mesosphere-mergebot/dcos,jeid64/dcos,surdy/dcos,mesosphere-mergebot/mergebot-test-dcos,darkonie/dcos,mnaboka/dcos,mnaboka/dcos,dcos/dcos,jeid64/dcos,vishnu2kmohan/dcos,GoelDeepak/dcos,darkonie/dcos,asridharan/dcos,branden/dcos,dcos/dcos,BenWhitehead/dcos,jeid64/dcos,GoelDeepak/dcos,vishnu2kmohan/dcos,BenWhitehead/dcos,mesosphere-mergebot/dcos,mesosphere-mergebot/mergebot-test-dcos,dcos/dcos,jeid64/dcos,vishnu2kmohan/dcos,amitaekbote/dcos,kensipe/dcos,kensipe/dcos,asridharan/dcos,darkonie/dcos,mellenburg/dcos,GoelDeepak/dcos,dcos/dcos,GoelDeepak/dcos,mellenburg/dcos,BenWhitehead/dcos,lingmann/dcos,branden/dcos,kensipe/dcos,mesosphere-mergebot/mergebot-test-dcos,xinxian0458/dcos,mnaboka/dcos,surdy/dcos,amitaekbote/dcos,mnaboka/dcos,mellenburg/dcos,vishnu2kmohan/dcos,mellenburg/dcos,darkonie/dcos,mesosphere-mergebot/dcos,asridharan/dcos,surdy/dcos,mesosphere-mergebot/mergebot-test-dcos,asridharan/dcos,BenWhitehead/dcos,xinxian0458/dcos,darkonie/dcos,kensipe/dcos,lingmann/dcos,amitaekbote/dcos,branden/dcos,mesosphere-mergebot/dcos,mnaboka/dcos
|
3771d3165d4873592f53d8b2401806297fe2989f
|
door/models.py
|
door/models.py
|
from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField(default=timezone.now)
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField(default=timezone.now)
closed = models.DateTimeField(default=timezone.now)
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
|
from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField()
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField()
closed = models.DateTimeField()
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
|
Remove default datetime in door
|
Remove default datetime in door
|
Python
|
mit
|
hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website
|
61b36218cc0cf74e90ac7ee8d7f02b1ffffe3890
|
blues/wkhtmltopdf.py
|
blues/wkhtmltopdf.py
|
"""
wkhtmltopdf Blueprint
blueprints:
- blues.wkhtmltopdf
"""
from fabric.decorators import task
from refabric.context_managers import sudo
from refabric.contrib import blueprints
from . import debian
__all__ = ['setup', 'configure']
blueprint = blueprints.get(__name__)
@task
def setup():
"""
Install and configure wkhtmltopdf
"""
install()
configure()
def install():
with sudo():
packages = ['wkhtmltopdf', 'xvfb', 'xfonts-100dpi', 'xfonts-75dpi', 'xfonts-cyrillic']
debian.apt_get('install', *packages)
@task
def configure():
"""
Configure wkhtmltopdf
"""
destination = '/usr/local/bin/wkhtmltopdf.sh'
blueprint.upload('wkhtmltopdf.sh', destination)
with sudo():
debian.chmod(destination, '+x')
|
"""
wkhtmltopdf Blueprint
.. code-block:: yaml
blueprints:
- blues.wkhtmltopdf
settings:
wkhtmltopdf:
# wkhtmltopdf_version: 0.12.2.1
"""
from fabric.decorators import task
from refabric.context_managers import sudo, settings
from refabric.contrib import blueprints
from refabric.operations import run
from . import debian
__all__ = ['setup', 'configure']
blueprint = blueprints.get(__name__)
@task
def setup():
"""
Install and configure wkhtmltopdf
"""
install()
def install():
"""
Install wkhtmltox from the pkgs on sourceforge that are compiled with
patched QT. This version doesn't need X/Xvfb to run.
"""
# Can't be named version since it'll conflict with fabrics own version variable
wkhtmltox_ver = blueprint.get('wkhtmltopdf_version', '0.12.2.1')
wkhtmltox_pkg = 'wkhtmltox-{}_linux-{}-amd64.deb'.format(
wkhtmltox_ver, debian.lbs_codename())
wkhtmltox_url = 'http://downloads.sourceforge.net/project/wkhtmltopdf/{}/{}'.format(
wkhtmltox_ver, wkhtmltox_pkg)
run('curl --silent --location --show-error --remote-name "{}"'.format(
wkhtmltox_url))
with sudo():
with settings(warn_only=True):
run('dpkg -i {}'.format(wkhtmltox_pkg))
debian.apt_get('--fix-broken', 'install')
debian.rm(wkhtmltox_pkg)
@task
def configure():
"""
Configure wkhtmltopdf
"""
pass
|
Install wkhtmltox from the pkgs on sourceforge
|
Install wkhtmltox from the pkgs on sourceforge
They're compiled with patched QT. This version doesn't need X/Xvfb to run.
|
Python
|
mit
|
gelbander/blues,andreif/blues,chrippa/blues,5monkeys/blues,adisbladis/blues,5monkeys/blues,Sportamore/blues,adisbladis/blues,andreif/blues,gelbander/blues,chrippa/blues,Sportamore/blues,jocke-l/blues,jocke-l/blues,andreif/blues,Sportamore/blues,adisbladis/blues,gelbander/blues,chrippa/blues,5monkeys/blues,jocke-l/blues
|
63ff6313c1200910b749dc8d8488d6c7f2cd9c5f
|
axelrod/tests/unit/test_classification.py
|
axelrod/tests/unit/test_classification.py
|
"""Tests for the classification"""
import unittest
import axelrod
class TestClassification(unittest.TestCase):
def test_known_classifiers(self):
# Grabbing all the strategies: this will be changed to just be `axelrod.strategies`
strategies = axelrod.basic_strategies
strategies += axelrod.ordinary_strategies
strategies += axelrod.cheating_strategies
# A set of dimensions that are known to have been fully applied
known_keys = ['stochastic',
'memory_depth',
'inspects_opponent_source',
'manipulates_opponent_source',
'manipulates_opponent_state']
for s in strategies:
s = s()
self.assertTrue(None not in [s.behaviour[key] for key in known_keys])
|
"""Tests for the classification"""
import unittest
import axelrod
class TestClassification(unittest.TestCase):
def test_known_classifiers(self):
# Grabbing all the strategies: this will be changed to just be
# `axelrod.strategies`
strategies = axelrod.basic_strategies
strategies += axelrod.ordinary_strategies
strategies += axelrod.cheating_strategies
# A set of dimensions that are known to have been fully applied
known_keys = ['stochastic',
'memory_depth',
'inspects_opponent_source',
'manipulates_opponent_source',
'manipulates_opponent_state']
for s in strategies:
s = s()
self.assertTrue(None not in [s.behaviour[key] for key in known_keys])
def test_multiple_instances(self):
"""Certain instances of classes of strategies will have different
behaviours based on the initialisation variables"""
P1 = axelrod.Joss()
P2 = axelrod.Joss(0)
self.assertNotEqual(P1.behaviour, P2.behaviour)
|
Add a test that checks that different instances can have different behaviour.
|
Add a test that checks that different instances can have different
behaviour.
|
Python
|
mit
|
emmagordon/Axelrod,emmagordon/Axelrod,risicle/Axelrod,kathryncrouch/Axelrod,bootandy/Axelrod,uglyfruitcake/Axelrod,mojones/Axelrod,uglyfruitcake/Axelrod,bootandy/Axelrod,mojones/Axelrod,kathryncrouch/Axelrod,risicle/Axelrod
|
039afd96fd66844e3d0ac031458c976d74aca325
|
infra/bots/recipe_modules/flavor/__init__.py
|
infra/bots/recipe_modules/flavor/__init__.py
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'builder_name_schema',
'depot_tools/bot_update',
'depot_tools/cipd',
'depot_tools/gclient',
'depot_tools/git',
'docker',
'env',
'infra',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'run',
'vars',
]
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'builder_name_schema',
'docker',
'env',
'infra',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'run',
'vars',
]
|
Remove unnecessary depot_tools dependency in flavor module
|
[recipes] Remove unnecessary depot_tools dependency in flavor module
Change-Id: Ic1f3896a450bd81bb8c4859d3998c9873af821f6
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/263016
Reviewed-by: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Commit-Queue: Eric Boren <0e499112533c8544f0505ea0d08394fb5ad7d8fa@google.com>
|
Python
|
bsd-3-clause
|
HalCanary/skia-hc,google/skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc,google/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc,aosp-mirror/platform_external_skia,google/skia,HalCanary/skia-hc,google/skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc,google/skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc,google/skia,google/skia,HalCanary/skia-hc,google/skia,HalCanary/skia-hc,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc
|
44023406197bd9271afd60405e323503ce6963a1
|
tests/test_nova_api_docs_tracker.py
|
tests/test_nova_api_docs_tracker.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_nova_api_docs_tracker
----------------------------------
Tests for `nova_api_docs_tracker` module.
"""
import unittest
from nova_api_docs_tracker import nova_api_docs_tracker
class TestNova_api_docs_tracker(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_nova_api_docs_tracker
----------------------------------
Tests for `nova_api_docs_tracker` module.
"""
import unittest
from nova_api_docs_tracker import main
class TestNova_api_docs_tracker(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
Fix stub unit test for main.py rename
|
Fix stub unit test for main.py rename
|
Python
|
apache-2.0
|
missaugustina/nova-api-docs-tracker,missaugustina/nova-api-docs-tracker,missaugustina/nova-api-docs-tracker,missaugustina/nova-api-docs-tracker
|
b612c3703a3b1581bfc7826f1e29a3b6053f0f4e
|
pal/services/joke_service.py
|
pal/services/joke_service.py
|
import re
from os import path
from pal.services.service import Service
from pal.services.service import wrap_response
def get_jokes():
file_path = path.realpath(path.join(path.dirname(__file__),
"jokes.txt"))
with open(file_path, 'rb') as joke_file:
for line in joke_file.readlines():
if line.startswith("#"):
continue
yield line.strip().split(" :: ", 1)
class JokeService(Service):
_JOKES = {prompt: response for prompt, response in get_jokes()}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
|
import re
from os import path
from pal.services.service import Service
from pal.services.service import wrap_response
def get_jokes():
file_path = path.realpath(path.join(path.dirname(__file__),
"jokes.txt"))
with open(file_path, 'rb') as joke_file:
for line in joke_file.readlines():
if line.startswith("#"):
continue
prompt, response = map(str.strip, line.split("::", 1))
yield prompt, response.replace("\\n", "\n")
class JokeService(Service):
_JOKES = {prompt: response for prompt, response in get_jokes()}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
|
Make joke parsing more robust
|
Make joke parsing more robust
|
Python
|
bsd-3-clause
|
Machyne/pal,Machyne/pal,Machyne/pal,Machyne/pal
|
66cdb36231ff1192a8a2e6b15c4b8d524cfbff6d
|
powerline/renderers/pango_markup.py
|
powerline/renderers/pango_markup.py
|
# vim:fileencoding=utf-8:noet
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xmlrpclib import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
|
# vim:fileencoding=utf-8:noet
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xml.sax.saxutils import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
|
Use xml.sax.saxutils.escape in place of xmlrpclib.escape
|
Use xml.sax.saxutils.escape in place of xmlrpclib.escape
The latter is not available in python 3
|
Python
|
mit
|
dragon788/powerline,xxxhycl2010/powerline,cyrixhero/powerline,s0undt3ch/powerline,S0lll0s/powerline,EricSB/powerline,prvnkumar/powerline,blindFS/powerline,QuLogic/powerline,lukw00/powerline,dragon788/powerline,firebitsbr/powerline,Liangjianghao/powerline,DoctorJellyface/powerline,wfscheper/powerline,wfscheper/powerline,QuLogic/powerline,magus424/powerline,DoctorJellyface/powerline,junix/powerline,keelerm84/powerline,Luffin/powerline,xxxhycl2010/powerline,seanfisk/powerline,blindFS/powerline,areteix/powerline,darac/powerline,s0undt3ch/powerline,junix/powerline,seanfisk/powerline,cyrixhero/powerline,bezhermoso/powerline,xfumihiro/powerline,EricSB/powerline,xxxhycl2010/powerline,DoctorJellyface/powerline,lukw00/powerline,magus424/powerline,s0undt3ch/powerline,russellb/powerline,bartvm/powerline,areteix/powerline,Liangjianghao/powerline,wfscheper/powerline,S0lll0s/powerline,seanfisk/powerline,IvanAli/powerline,junix/powerline,bartvm/powerline,bezhermoso/powerline,EricSB/powerline,QuLogic/powerline,xfumihiro/powerline,Luffin/powerline,firebitsbr/powerline,russellb/powerline,bartvm/powerline,kenrachynski/powerline,bezhermoso/powerline,prvnkumar/powerline,Liangjianghao/powerline,darac/powerline,darac/powerline,kenrachynski/powerline,lukw00/powerline,prvnkumar/powerline,cyrixhero/powerline,magus424/powerline,IvanAli/powerline,areteix/powerline,Luffin/powerline,firebitsbr/powerline,keelerm84/powerline,russellb/powerline,S0lll0s/powerline,xfumihiro/powerline,blindFS/powerline,dragon788/powerline,IvanAli/powerline,kenrachynski/powerline
|
9564848e61df038396fbab95995a40f2e5a5970a
|
l10n_br_zip/__openerp__.py
|
l10n_br_zip/__openerp__.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localisation ZIP Codes',
'license': 'AGPL-3',
'author': 'Akretion, Odoo Community Association (OCA)',
'version': '8.0.1.0.1',
'depends': [
'l10n_br_base',
],
'data': [
'views/l10n_br_zip_view.xml',
'views/res_partner_view.xml',
'views/res_company_view.xml',
'views/res_bank_view.xml',
'wizard/l10n_br_zip_search_view.xml',
'security/ir.model.access.csv',
],
'test': ['test/zip_demo.yml'],
'category': 'Localization',
'installable': False,
}
|
# -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localisation ZIP Codes',
'license': 'AGPL-3',
'author': 'Akretion, Odoo Community Association (OCA)',
'version': '9.0.1.0.0',
'depends': [
'l10n_br_base',
],
'data': [
'views/l10n_br_zip_view.xml',
'views/res_partner_view.xml',
'views/res_company_view.xml',
'views/res_bank_view.xml',
'wizard/l10n_br_zip_search_view.xml',
'security/ir.model.access.csv',
],
'test': [
'test/zip_demo.yml'
],
'category': 'Localization',
'installable': True,
}
|
Change the version of module.
|
[MIG] Change the version of module.
|
Python
|
agpl-3.0
|
kmee/l10n-brazil,kmee/l10n-brazil
|
c0f7dc1dcfdbabff116f8d7132b191397fd9007f
|
src/sentry/api/serializers/models/filechange.py
|
src/sentry/api/serializers/models/filechange.py
|
from __future__ import absolute_import
import six
from sentry.api.serializers import Serializer, register
from sentry.models import Commit, CommitFileChange
from sentry.api.serializers.models.release import get_users_for_commits
@register(CommitFileChange)
class CommitFileChangeSerializer(Serializer):
def get_attrs(self, item_list, user):
commits = Commit.objects.filter(id__in=[f.commit_id for f in item_list]).select_related('author')
author_objs = get_users_for_commits(commits)
commits_by_id = {commit.id: commit for commit in commits}
result = {}
for item in item_list:
result[item] = {
'user': author_objs.get(commits_by_id[item.commit_id].author_id, {}),
'message': commits_by_id[item.commit_id].message
}
return result
def serialize(self, obj, attrs, user):
return {
'id': six.text_type(obj.id),
'org_id': obj.organization_id,
'author': attrs.get('user', {}),
'commit_message': attrs.get('message', ''),
'filename': obj.filename,
'type': obj.type
}
|
from __future__ import absolute_import
import six
from sentry.api.serializers import Serializer, register
from sentry.models import Commit, CommitFileChange
from sentry.api.serializers.models.release import get_users_for_commits
@register(CommitFileChange)
class CommitFileChangeSerializer(Serializer):
def get_attrs(self, item_list, user):
commits = Commit.objects.filter(id__in=[f.commit_id for f in item_list]).select_related('author')
author_objs = get_users_for_commits(commits)
commits_by_id = {commit.id: commit for commit in commits}
result = {}
for item in item_list:
commit = commits_by_id[item.commit_id]
result[item] = {
'user': author_objs.get(commit.author_id, {}),
'message': commit.message
}
return result
def serialize(self, obj, attrs, user):
return {
'id': six.text_type(obj.id),
'org_id': obj.organization_id,
'author': attrs.get('user', {}),
'commit_message': attrs.get('message', ''),
'filename': obj.filename,
'type': obj.type
}
|
Use dictionary lookup only once
|
Use dictionary lookup only once
|
Python
|
bsd-3-clause
|
looker/sentry,JackDanger/sentry,mvaled/sentry,gencer/sentry,beeftornado/sentry,jean/sentry,jean/sentry,BuildingLink/sentry,ifduyue/sentry,mvaled/sentry,ifduyue/sentry,BuildingLink/sentry,looker/sentry,JamesMura/sentry,looker/sentry,jean/sentry,gencer/sentry,JamesMura/sentry,ifduyue/sentry,JamesMura/sentry,BuildingLink/sentry,JackDanger/sentry,JackDanger/sentry,looker/sentry,jean/sentry,mvaled/sentry,JamesMura/sentry,BuildingLink/sentry,gencer/sentry,mvaled/sentry,ifduyue/sentry,mvaled/sentry,BuildingLink/sentry,JamesMura/sentry,gencer/sentry,mvaled/sentry,beeftornado/sentry,ifduyue/sentry,gencer/sentry,looker/sentry,jean/sentry,beeftornado/sentry
|
befadd8fc0482adb55f63ac51166f2330c897d7a
|
src/diamond/handler/httpHandler.py
|
src/diamond/handler/httpHandler.py
|
#!/usr/bin/env python
# coding=utf-8
from Handler import Handler
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
|
#!/usr/bin/env python
# coding=utf-8
"""
Send metrics to a http endpoint via POST
#### Dependencies
* urllib2
#### Configuration
Enable this handler
* handers = diamond.handler.httpHandler.HttpPostHandler
* url = http://www.example.com/endpoint
"""
from Handler import Handler
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
|
Add in basic HttpPostHandler docs
|
Add in basic HttpPostHandler docs
|
Python
|
mit
|
szibis/Diamond,sebbrandt87/Diamond,TinLe/Diamond,datafiniti/Diamond,Ssawa/Diamond,ramjothikumar/Diamond,signalfx/Diamond,codepython/Diamond,mfriedenhagen/Diamond,thardie/Diamond,works-mobile/Diamond,cannium/Diamond,eMerzh/Diamond-1,actmd/Diamond,Netuitive/Diamond,jriguera/Diamond,TinLe/Diamond,bmhatfield/Diamond,hvnsweeting/Diamond,rtoma/Diamond,dcsquared13/Diamond,jaingaurav/Diamond,python-diamond/Diamond,socialwareinc/Diamond,mzupan/Diamond,gg7/diamond,skbkontur/Diamond,saucelabs/Diamond,dcsquared13/Diamond,tusharmakkar08/Diamond,Ensighten/Diamond,sebbrandt87/Diamond,MediaMath/Diamond,russss/Diamond,krbaker/Diamond,anandbhoraskar/Diamond,szibis/Diamond,jumping/Diamond,python-diamond/Diamond,datafiniti/Diamond,Precis/Diamond,anandbhoraskar/Diamond,disqus/Diamond,Ssawa/Diamond,anandbhoraskar/Diamond,EzyInsights/Diamond,TinLe/Diamond,saucelabs/Diamond,bmhatfield/Diamond,acquia/Diamond,saucelabs/Diamond,datafiniti/Diamond,hamelg/Diamond,tuenti/Diamond,skbkontur/Diamond,python-diamond/Diamond,works-mobile/Diamond,thardie/Diamond,mfriedenhagen/Diamond,eMerzh/Diamond-1,h00dy/Diamond,actmd/Diamond,Netuitive/netuitive-diamond,stuartbfox/Diamond,ramjothikumar/Diamond,MediaMath/Diamond,TinLe/Diamond,tusharmakkar08/Diamond,Netuitive/netuitive-diamond,Precis/Diamond,Basis/Diamond,eMerzh/Diamond-1,ceph/Diamond,bmhatfield/Diamond,TAKEALOT/Diamond,signalfx/Diamond,socialwareinc/Diamond,metamx/Diamond,jaingaurav/Diamond,h00dy/Diamond,jaingaurav/Diamond,tellapart/Diamond,russss/Diamond,Nihn/Diamond-1,mzupan/Diamond,Netuitive/Diamond,EzyInsights/Diamond,ramjothikumar/Diamond,MichaelDoyle/Diamond,socialwareinc/Diamond,tellapart/Diamond,tuenti/Diamond,mfriedenhagen/Diamond,tusharmakkar08/Diamond,skbkontur/Diamond,Netuitive/netuitive-diamond,zoidbergwill/Diamond,hamelg/Diamond,jumping/Diamond,jriguera/Diamond,Precis/Diamond,hvnsweeting/Diamond,rtoma/Diamond,jaingaurav/Diamond,ceph/Diamond,jumping/Diamond,MichaelDoyle/Diamond,sebbrandt87/Diamond,TAKEALOT/Diamond,Ormod/Diamond,signalfx/Diamond,Slach/Diamond,tuenti/Diamond,zoidbergwill/Diamond,jriguera/Diamond,zoidbergwill/Diamond,hvnsweeting/Diamond,tellapart/Diamond,Ssawa/Diamond,timchenxiaoyu/Diamond,tuenti/Diamond,Precis/Diamond,Clever/Diamond,Clever/Diamond,Nihn/Diamond-1,timchenxiaoyu/Diamond,gg7/diamond,disqus/Diamond,CYBERBUGJR/Diamond,skbkontur/Diamond,Ormod/Diamond,Ensighten/Diamond,Ormod/Diamond,jriguera/Diamond,Slach/Diamond,Basis/Diamond,mzupan/Diamond,stuartbfox/Diamond,eMerzh/Diamond-1,CYBERBUGJR/Diamond,zoidbergwill/Diamond,krbaker/Diamond,h00dy/Diamond,thardie/Diamond,Clever/Diamond,szibis/Diamond,janisz/Diamond-1,anandbhoraskar/Diamond,mfriedenhagen/Diamond,Netuitive/netuitive-diamond,bmhatfield/Diamond,MichaelDoyle/Diamond,TAKEALOT/Diamond,MediaMath/Diamond,cannium/Diamond,Basis/Diamond,cannium/Diamond,Ensighten/Diamond,codepython/Diamond,metamx/Diamond,h00dy/Diamond,krbaker/Diamond,timchenxiaoyu/Diamond,Ormod/Diamond,gg7/diamond,TAKEALOT/Diamond,EzyInsights/Diamond,saucelabs/Diamond,cannium/Diamond,acquia/Diamond,actmd/Diamond,Ssawa/Diamond,timchenxiaoyu/Diamond,szibis/Diamond,actmd/Diamond,sebbrandt87/Diamond,metamx/Diamond,Basis/Diamond,gg7/diamond,joel-airspring/Diamond,Netuitive/Diamond,signalfx/Diamond,janisz/Diamond-1,MediaMath/Diamond,russss/Diamond,socialwareinc/Diamond,disqus/Diamond,jumping/Diamond,Nihn/Diamond-1,hamelg/Diamond,codepython/Diamond,acquia/Diamond,works-mobile/Diamond,Ensighten/Diamond,joel-airspring/Diamond,hamelg/Diamond,CYBERBUGJR/Diamond,works-mobile/Diamond,Nihn/Diamond-1,stuartbfox/Diamond,joel-airspring/Diamond,datafiniti/Diamond,dcsquared13/Diamond,EzyInsights/Diamond,hvnsweeting/Diamond,tellapart/Diamond,janisz/Diamond-1,CYBERBUGJR/Diamond,joel-airspring/Diamond,rtoma/Diamond,krbaker/Diamond,acquia/Diamond,Netuitive/Diamond,codepython/Diamond,russss/Diamond,tusharmakkar08/Diamond,Slach/Diamond,stuartbfox/Diamond,thardie/Diamond,ceph/Diamond,janisz/Diamond-1,ramjothikumar/Diamond,Slach/Diamond,ceph/Diamond,mzupan/Diamond,dcsquared13/Diamond,Clever/Diamond,MichaelDoyle/Diamond,rtoma/Diamond
|
edd8ac2d77b747cffbcf702e71f2633a148d64c6
|
wagtail/wagtailcore/hooks.py
|
wagtail/wagtailcore/hooks.py
|
from django.conf import settings
try:
from importlib import import_module
except ImportError:
# for Python 2.6, fall back on django.utils.importlib (deprecated as of Django 1.7)
from django.utils.importlib import import_module
_hooks = {}
def register(hook_name, fn=None):
"""
Register hook for ``hook_name``. Can be used as a decorator::
@register('hook_name')
def my_hook(...):
pass
or as a function call::
def my_hook(...):
pass
register('hook_name', my_hook)
"""
# Pretend to be a decorator if fn is not supplied
if fn is None:
return lambda fn: register(hook_name, fn)
if hook_name not in _hooks:
_hooks[hook_name] = []
_hooks[hook_name].append(fn)
_searched_for_hooks = False
def search_for_hooks():
global _searched_for_hooks
if not _searched_for_hooks:
for app_module in settings.INSTALLED_APPS:
try:
import_module('%s.wagtail_hooks' % app_module)
except ImportError:
continue
_searched_for_hooks = True
def get_hooks(hook_name):
search_for_hooks()
return _hooks.get(hook_name, [])
|
from django.conf import settings
try:
from importlib import import_module
except ImportError:
# for Python 2.6, fall back on django.utils.importlib (deprecated as of Django 1.7)
from django.utils.importlib import import_module
_hooks = {}
def register(hook_name, fn=None):
"""
Register hook for ``hook_name``. Can be used as a decorator::
@register('hook_name')
def my_hook(...):
pass
or as a function call::
def my_hook(...):
pass
register('hook_name', my_hook)
"""
# Pretend to be a decorator if fn is not supplied
if fn is None:
def decorator(fn):
register(hook_name, fn)
return fn
return decorator
if hook_name not in _hooks:
_hooks[hook_name] = []
_hooks[hook_name].append(fn)
_searched_for_hooks = False
def search_for_hooks():
global _searched_for_hooks
if not _searched_for_hooks:
for app_module in settings.INSTALLED_APPS:
try:
import_module('%s.wagtail_hooks' % app_module)
except ImportError:
continue
_searched_for_hooks = True
def get_hooks(hook_name):
search_for_hooks()
return _hooks.get(hook_name, [])
|
Return the function again from the hook decorator
|
Return the function again from the hook decorator
The decorator variant of hook registration did not return anything,
meaning that the decorated function would end up being `None`. This was
not noticed, as the functions are rarely called manually, as opposed to
being invoked via the hook.
|
Python
|
bsd-3-clause
|
kaedroho/wagtail,willcodefortea/wagtail,JoshBarr/wagtail,takeshineshiro/wagtail,torchbox/wagtail,dresiu/wagtail,m-sanders/wagtail,jnns/wagtail,bjesus/wagtail,jorge-marques/wagtail,nilnvoid/wagtail,timorieber/wagtail,rsalmaso/wagtail,Toshakins/wagtail,tangentlabs/wagtail,nimasmi/wagtail,WQuanfeng/wagtail,timorieber/wagtail,benjaoming/wagtail,mixxorz/wagtail,thenewguy/wagtail,benjaoming/wagtail,Toshakins/wagtail,inonit/wagtail,mixxorz/wagtail,stevenewey/wagtail,darith27/wagtail,nealtodd/wagtail,kurtrwall/wagtail,taedori81/wagtail,serzans/wagtail,nilnvoid/wagtail,taedori81/wagtail,Pennebaker/wagtail,JoshBarr/wagtail,kurtrwall/wagtail,FlipperPA/wagtail,m-sanders/wagtail,torchbox/wagtail,mixxorz/wagtail,100Shapes/wagtail,takeflight/wagtail,kaedroho/wagtail,rsalmaso/wagtail,taedori81/wagtail,wagtail/wagtail,Klaudit/wagtail,benemery/wagtail,jordij/wagtail,gogobook/wagtail,quru/wagtail,janusnic/wagtail,chrxr/wagtail,torchbox/wagtail,gasman/wagtail,gasman/wagtail,chimeno/wagtail,mephizzle/wagtail,janusnic/wagtail,kurtw/wagtail,marctc/wagtail,kaedroho/wagtail,takeflight/wagtail,gasman/wagtail,zerolab/wagtail,Klaudit/wagtail,kaedroho/wagtail,jorge-marques/wagtail,dresiu/wagtail,tangentlabs/wagtail,chrxr/wagtail,iansprice/wagtail,rjsproxy/wagtail,gogobook/wagtail,mikedingjan/wagtail,thenewguy/wagtail,hamsterbacke23/wagtail,takeshineshiro/wagtail,darith27/wagtail,rv816/wagtail,iho/wagtail,inonit/wagtail,mikedingjan/wagtail,nimasmi/wagtail,davecranwell/wagtail,jorge-marques/wagtail,hamsterbacke23/wagtail,stevenewey/wagtail,nealtodd/wagtail,mephizzle/wagtail,rsalmaso/wagtail,timorieber/wagtail,nutztherookie/wagtail,hanpama/wagtail,100Shapes/wagtail,rsalmaso/wagtail,timorieber/wagtail,marctc/wagtail,wagtail/wagtail,KimGlazebrook/wagtail-experiment,benemery/wagtail,zerolab/wagtail,nealtodd/wagtail,KimGlazebrook/wagtail-experiment,jordij/wagtail,hamsterbacke23/wagtail,nimasmi/wagtail,zerolab/wagtail,100Shapes/wagtail,mjec/wagtail,kurtrwall/wagtail,kurtw/wagtail,marctc/wagtail,iho/wagtail,Toshakins/wagtail,iansprice/wagtail,torchbox/wagtail,Pennebaker/wagtail,tangentlabs/wagtail,mayapurmedia/wagtail,benjaoming/wagtail,mephizzle/wagtail,m-sanders/wagtail,gogobook/wagtail,rv816/wagtail,dresiu/wagtail,quru/wagtail,willcodefortea/wagtail,KimGlazebrook/wagtail-experiment,nutztherookie/wagtail,mephizzle/wagtail,marctc/wagtail,Klaudit/wagtail,mjec/wagtail,jorge-marques/wagtail,zerolab/wagtail,dresiu/wagtail,chimeno/wagtail,jordij/wagtail,Pennebaker/wagtail,Toshakins/wagtail,jordij/wagtail,chrxr/wagtail,rjsproxy/wagtail,hanpama/wagtail,davecranwell/wagtail,gasman/wagtail,thenewguy/wagtail,jnns/wagtail,dresiu/wagtail,Tivix/wagtail,chimeno/wagtail,inonit/wagtail,rjsproxy/wagtail,JoshBarr/wagtail,hamsterbacke23/wagtail,bjesus/wagtail,nilnvoid/wagtail,iho/wagtail,WQuanfeng/wagtail,chimeno/wagtail,FlipperPA/wagtail,janusnic/wagtail,kurtw/wagtail,stevenewey/wagtail,mixxorz/wagtail,benjaoming/wagtail,FlipperPA/wagtail,nrsimha/wagtail,mikedingjan/wagtail,tangentlabs/wagtail,KimGlazebrook/wagtail-experiment,takeshineshiro/wagtail,gogobook/wagtail,takeshineshiro/wagtail,nimasmi/wagtail,mayapurmedia/wagtail,kaedroho/wagtail,bjesus/wagtail,FlipperPA/wagtail,willcodefortea/wagtail,taedori81/wagtail,mayapurmedia/wagtail,wagtail/wagtail,willcodefortea/wagtail,hanpama/wagtail,jnns/wagtail,kurtrwall/wagtail,bjesus/wagtail,serzans/wagtail,serzans/wagtail,nilnvoid/wagtail,takeflight/wagtail,thenewguy/wagtail,quru/wagtail,Klaudit/wagtail,darith27/wagtail,quru/wagtail,nutztherookie/wagtail,zerolab/wagtail,nrsimha/wagtail,iho/wagtail,WQuanfeng/wagtail,wagtail/wagtail,janusnic/wagtail,rv816/wagtail,inonit/wagtail,WQuanfeng/wagtail,Tivix/wagtail,davecranwell/wagtail,mikedingjan/wagtail,benemery/wagtail,JoshBarr/wagtail,gasman/wagtail,davecranwell/wagtail,jorge-marques/wagtail,Tivix/wagtail,thenewguy/wagtail,darith27/wagtail,mayapurmedia/wagtail,iansprice/wagtail,kurtw/wagtail,m-sanders/wagtail,takeflight/wagtail,nrsimha/wagtail,nrsimha/wagtail,hanpama/wagtail,stevenewey/wagtail,chrxr/wagtail,jnns/wagtail,taedori81/wagtail,mjec/wagtail,mixxorz/wagtail,iansprice/wagtail,rsalmaso/wagtail,nealtodd/wagtail,mjec/wagtail,Pennebaker/wagtail,serzans/wagtail,Tivix/wagtail,nutztherookie/wagtail,rjsproxy/wagtail,wagtail/wagtail,benemery/wagtail,chimeno/wagtail,rv816/wagtail
|
a6f291a3beb7ecb7d67b81fe92e7cca6db2139dc
|
example_scraper.py
|
example_scraper.py
|
#!/usr/bin/env python
import json
import requests
API = 'http://localhost:8000/api/1.0'
AUTH_PARAMS = {
'email': 'panda@pandaproject.net',
'api_key': 'edfe6c5ffd1be4d3bf22f69188ac6bc0fc04c84b'
}
# Create dataset
dataset = {
'name': 'Test Dataset from API',
'schema': [{
'column': 'A',
'type': 'unicode'
}, {
'column': 'B',
'type': 'unicode'
}, {
'column': 'C',
'type': 'unicode'
}]
}
response = requests.post(API + '/dataset/', json.dumps(dataset), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
dataset = json.loads(response.content)
# Write data
data = { 'objects': [{
'data': ['The', 'PANDA', 'lives.']
}, {
'data': ['More', 'data', 'here.']
}]}
response = requests.put(API + '/dataset/%s/data/' % dataset['slug'], json.dumps(data), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
print response.content
|
#!/usr/bin/env python
import json
import requests
API = 'http://localhost:8000/api/1.0'
AUTH_PARAMS = {
'email': 'panda@pandaproject.net',
'api_key': 'edfe6c5ffd1be4d3bf22f69188ac6bc0fc04c84b'
}
DATASET_SLUG = 'test-dataset'
# Check if dataset exists
response = requests.get(API + '/dataset/%s/' % DATASET_SLUG, params=AUTH_PARAMS)
# Create dataset if necessary
if response.status_code == 404:
dataset = {
'name': 'Test Dataset from API',
'schema': [{
'column': 'A',
'type': 'unicode'
}, {
'column': 'B',
'type': 'unicode'
}, {
'column': 'C',
'type': 'unicode'
}]
}
response = requests.put(API + '/dataset/%s/' % DATASET_SLUG, json.dumps(dataset), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
# Write data
data = { 'objects': [{
'data': ['The', 'PANDA', 'lives.']
}, {
'data': ['More', 'data', 'here.']
}]}
response = requests.put(API + '/dataset/%s/data/' % DATASET_SLUG, json.dumps(data), params=AUTH_PARAMS, headers={ 'Content-Type': 'application/json' })
|
Update example scraper to use known slug.
|
Update example scraper to use known slug.
|
Python
|
mit
|
PalmBeachPost/panda,pandaproject/panda,PalmBeachPost/panda,newsapps/panda,ibrahimcesar/panda,pandaproject/panda,NUKnightLab/panda,ibrahimcesar/panda,NUKnightLab/panda,datadesk/panda,PalmBeachPost/panda,PalmBeachPost/panda,NUKnightLab/panda,ibrahimcesar/panda,datadesk/panda,ibrahimcesar/panda,pandaproject/panda,newsapps/panda,NUKnightLab/panda,datadesk/panda,pandaproject/panda,ibrahimcesar/panda,pandaproject/panda,datadesk/panda,newsapps/panda,PalmBeachPost/panda,newsapps/panda,datadesk/panda
|
2c3281754bd0e57a263a85f518eb49fbe6a8d72b
|
corehq/apps/importer/management/commands/import_cases.py
|
corehq/apps/importer/management/commands/import_cases.py
|
import json
from datetime import datetime
from django.core.management import BaseCommand, CommandError
from corehq.apps.importer.tasks import do_import
from corehq.apps.importer.util import ImporterConfig, ExcelFile
from corehq.apps.users.models import WebUser
class Command(BaseCommand):
help = "import cases from excel manually."
args = '<import_file> <config_file> <domain> <user>'
label = "import cases from excel manually."
def handle(self, *args, **options):
if len(args) != 4:
raise CommandError('Usage is import_cases %s' % self.args)
start = datetime.now()
export_file, config_file, domain, user_id = args
if '@' in user_id:
user = WebUser.get_by_username(user_id)
else:
user = WebUser.get(user_id)
if not user.is_member_of(domain):
raise CommandError("%s can't access %s" % (user, domain))
with open(config_file, 'r') as f:
config = ImporterConfig.from_json(f.read())
config.couch_user_id = user._id
spreadsheet = ExcelFile(export_file, True)
print json.dumps(do_import(spreadsheet, config, domain))
print 'finished in %s seconds' % (datetime.now() - start).seconds
|
import json
from datetime import datetime
from django.core.management import BaseCommand, CommandError
from dimagi.utils.web import json_handler
from corehq.apps.importer.tasks import do_import
from corehq.apps.importer.util import ImporterConfig, ExcelFile
from corehq.apps.users.models import WebUser
class Command(BaseCommand):
help = "import cases from excel manually."
args = '<import_file> <config_file> <domain> <user>'
label = "import cases from excel manually."
def handle(self, *args, **options):
if len(args) != 4:
raise CommandError('Usage is import_cases %s' % self.args)
start = datetime.now()
export_file, config_file, domain, user_id = args
if '@' in user_id:
user = WebUser.get_by_username(user_id)
else:
user = WebUser.get(user_id)
if not user.is_member_of(domain):
raise CommandError("%s can't access %s" % (user, domain))
with open(config_file, 'r') as f:
config = ImporterConfig.from_json(f.read())
config.couch_user_id = user._id
spreadsheet = ExcelFile(export_file, True)
print json.dumps(do_import(spreadsheet, config, domain),
default=json_handler)
print 'finished in %s seconds' % (datetime.now() - start).seconds
|
Use json_handler to force ugettext_lazy
|
Use json_handler to force ugettext_lazy
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq
|
f81a2f9e3f1123ec474bce3278107a94c70e0dc3
|
python/helpers/pydev/_pydev_bundle/_pydev_filesystem_encoding.py
|
python/helpers/pydev/_pydev_bundle/_pydev_filesystem_encoding.py
|
def __getfilesystemencoding():
'''
Note: there's a copy of this method in interpreterInfo.py
'''
import sys
try:
ret = sys.getfilesystemencoding()
if not ret:
raise RuntimeError('Unable to get encoding.')
return ret
except:
try:
#Handle Jython
from java.lang import System # @UnresolvedImport
env = System.getProperty("os.name").lower()
if env.find('win') != -1:
return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement
return 'utf-8'
except:
pass
#Only available from 2.3 onwards.
if sys.platform == 'win32':
return 'mbcs'
return 'utf-8'
def getfilesystemencoding():
try:
ret = __getfilesystemencoding()
#Check if the encoding is actually there to be used!
if hasattr('', 'encode'):
''.encode(ret)
if hasattr('', 'decode'):
''.decode(ret)
return ret
except:
return 'utf-8'
|
import sys
def __getfilesystemencoding():
'''
Note: there's a copy of this method in interpreterInfo.py
'''
try:
ret = sys.getfilesystemencoding()
if not ret:
raise RuntimeError('Unable to get encoding.')
return ret
except:
try:
#Handle Jython
from java.lang import System # @UnresolvedImport
env = System.getProperty("os.name").lower()
if env.find('win') != -1:
return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement
return 'utf-8'
except:
pass
#Only available from 2.3 onwards.
if sys.platform == 'win32':
return 'mbcs'
return 'utf-8'
def getfilesystemencoding():
try:
ret = __getfilesystemencoding()
#Check if the encoding is actually there to be used!
if hasattr('', 'encode'):
''.encode(ret)
if hasattr('', 'decode'):
''.decode(ret)
return ret
except:
return 'utf-8'
|
Fix deadlock in remote debugger (PY-18546)
|
Fix deadlock in remote debugger (PY-18546)
|
Python
|
apache-2.0
|
salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,vvv1559/intellij-community,allotria/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,hurricup/intellij-community,xfournet/intellij-community,fitermay/intellij-community,lucafavatella/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,da1z/intellij-community,hurricup/intellij-community,xfournet/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,FHannes/intellij-community,retomerz/intellij-community,apixandru/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,allotria/intellij-community,xfournet/intellij-community,signed/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,vvv1559/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,signed/intellij-community,xfournet/intellij-community,semonte/intellij-community,apixandru/intellij-community,fitermay/intellij-community,retomerz/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,signed/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,da1z/intellij-community,allotria/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,FHannes/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,da1z/intellij-community,signed/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,hurricup/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,da1z/intellij-community,apixandru/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,FHannes/intellij-community,xfournet/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,hurricup/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,retomerz/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,signed/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,xfournet/intellij-community,retomerz/intellij-community,ibinti/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,signed/intellij-community,FHannes/intellij-community,asedunov/intellij-community,fitermay/intellij-community,da1z/intellij-community,signed/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,retomerz/intellij-community,xfournet/intellij-community,allotria/intellij-community,hurricup/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,da1z/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,da1z/intellij-community,allotria/intellij-community,fitermay/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,retomerz/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,ibinti/intellij-community,fitermay/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,asedunov/intellij-community,ibinti/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,allotria/intellij-community,ibinti/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,asedunov/intellij-community,xfournet/intellij-community,da1z/intellij-community,hurricup/intellij-community,asedunov/intellij-community,apixandru/intellij-community,semonte/intellij-community
|
8a72eff36b66492e17cfeb0383164a34dbf75ce0
|
addons/purchase/report/__init__.py
|
addons/purchase/report/__init__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import order
import request_quotation
import purchase_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import purchase_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Fix useless import following the removal of rml purchase reports
|
[FIX] Fix useless import following the removal of rml purchase reports
bzr revid: openerp-sle@openerp-sle.home-20140214150700-2zuukk4ahs4q1zhs
|
Python
|
agpl-3.0
|
odoousers2014/odoo,nuncjo/odoo,markeTIC/OCB,oasiswork/odoo,andreparames/odoo,nexiles/odoo,Endika/odoo,Danisan/odoo-1,spadae22/odoo,0k/OpenUpgrade,grap/OpenUpgrade,bealdav/OpenUpgrade,abstract-open-solutions/OCB,mustafat/odoo-1,papouso/odoo,provaleks/o8,OpusVL/odoo,odooindia/odoo,sinbazhou/odoo,slevenhagen/odoo-npg,rdeheele/odoo,rahuldhote/odoo,zchking/odoo,MarcosCommunity/odoo,draugiskisprendimai/odoo,rubencabrera/odoo,alhashash/odoo,ramitalat/odoo,matrixise/odoo,slevenhagen/odoo,lsinfo/odoo,xujb/odoo,rubencabrera/odoo,eino-makitalo/odoo,bplancher/odoo,dkubiak789/odoo,blaggacao/OpenUpgrade,patmcb/odoo,TRESCLOUD/odoopub,gavin-feng/odoo,windedge/odoo,javierTerry/odoo,numerigraphe/odoo,ihsanudin/odoo,goliveirab/odoo,havt/odoo,lgscofield/odoo,gorjuce/odoo,kifcaliph/odoo,lsinfo/odoo,nuuuboo/odoo,syci/OCB,Ernesto99/odoo,bakhtout/odoo-educ,n0m4dz/odoo,sebalix/OpenUpgrade,Gitlab11/odoo,Endika/odoo,pplatek/odoo,alqfahad/odoo,hifly/OpenUpgrade,TRESCLOUD/odoopub,jaxkodex/odoo,joshuajan/odoo,datenbetrieb/odoo,bealdav/OpenUpgrade,dgzurita/odoo,havt/odoo,salaria/odoo,cysnake4713/odoo,juanalfonsopr/odoo,NL66278/OCB,BT-ojossen/odoo,guerrerocarlos/odoo,mkieszek/odoo,hip-odoo/odoo,mustafat/odoo-1,VielSoft/odoo,jolevq/odoopub,sysadminmatmoz/OCB,synconics/odoo,xujb/odoo,erkrishna9/odoo,BT-ojossen/odoo,windedge/odoo,tvibliani/odoo,mustafat/odoo-1,glovebx/odoo,luiseduardohdbackup/odoo,javierTerry/odoo,x111ong/odoo,TRESCLOUD/odoopub,elmerdpadilla/iv,fuhongliang/odoo,Eric-Zhong/odoo,ChanduERP/odoo,christophlsa/odoo,gvb/odoo,abenzbiria/clients_odoo,incaser/odoo-odoo,nexiles/odoo,x111ong/odoo,sergio-incaser/odoo,AuyaJackie/odoo,OpenUpgrade-dev/OpenUpgrade,rubencabrera/odoo,nhomar/odoo,RafaelTorrealba/odoo,addition-it-solutions/project-all,waytai/odoo,janocat/odoo,jiangzhixiao/odoo,ThinkOpen-Solutions/odoo,ApuliaSoftware/odoo,bkirui/odoo,bakhtout/odoo-educ,pedrobaeza/odoo,ygol/odoo,doomsterinc/odoo,kifcaliph/odoo,tvtsoft/odoo8,guerrerocarlos/odoo,goliveirab/odoo,lsinfo/odoo,fjbatresv/odoo,thanhacun/odoo,rowemoore/odoo,luiseduardohdbackup/odoo,pedrobaeza/OpenUpgrade,fjbatresv/odoo,ApuliaSoftware/odoo,cedk/odoo,QianBIG/odoo,tinkhaven-organization/odoo,klunwebale/odoo,luiseduardohdbackup/odoo,joshuajan/odoo,factorlibre/OCB,BT-rmartin/odoo,tarzan0820/odoo,abenzbiria/clients_odoo,makinacorpus/odoo,pplatek/odoo,tarzan0820/odoo,microcom/odoo,hip-odoo/odoo,naousse/odoo,alhashash/odoo,Noviat/odoo,inspyration/odoo,apanju/odoo,damdam-s/OpenUpgrade,fgesora/odoo,abdellatifkarroum/odoo,fgesora/odoo,Endika/odoo,windedge/odoo,dalegregory/odoo,GauravSahu/odoo,lsinfo/odoo,sysadminmatmoz/OCB,apanju/GMIO_Odoo,fuselock/odoo,bwrsandman/OpenUpgrade,JCA-Developpement/Odoo,xzYue/odoo,savoirfairelinux/odoo,Maspear/odoo,leorochael/odoo,MarcosCommunity/odoo,JGarcia-Panach/odoo,guewen/OpenUpgrade,factorlibre/OCB,PongPi/isl-odoo,shingonoide/odoo,abstract-open-solutions/OCB,abenzbiria/clients_odoo,Ernesto99/odoo,dgzurita/odoo,osvalr/odoo,rdeheele/odoo,pplatek/odoo,aviciimaxwell/odoo,alqfahad/odoo,cloud9UG/odoo,ingadhoc/odoo,fdvarela/odoo8,slevenhagen/odoo-npg,bwrsandman/OpenUpgrade,Antiun/odoo,apanju/GMIO_Odoo,Eric-Zhong/odoo,xzYue/odoo,n0m4dz/odoo,oihane/odoo,lightcn/odoo,javierTerry/odoo,NeovaHealth/odoo,luiseduardohdbackup/odoo,microcom/odoo,odoo-turkiye/odoo,storm-computers/odoo,colinnewell/odoo,shaufi/odoo,sadleader/odoo,dfang/odoo,kybriainfotech/iSocioCRM,nexiles/odoo,agrista/odoo-saas,ovnicraft/odoo,virgree/odoo,kittiu/odoo,csrocha/OpenUpgrade,andreparames/odoo,jaxkodex/odoo,oasiswork/odoo,fuselock/odoo,blaggacao/OpenUpgrade,syci/OCB,gsmartway/odoo,florentx/OpenUpgrade,Ernesto99/odoo,dkubiak789/odoo,charbeljc/OCB,xujb/odoo,gvb/odoo,Nowheresly/odoo,BT-rmartin/odoo,apocalypsebg/odoo,kybriainfotech/iSocioCRM,kirca/OpenUpgrade,gorjuce/odoo,tangyiyong/odoo,erkrishna9/odoo,apocalypsebg/odoo,waytai/odoo,inspyration/odoo,makinacorpus/odoo,bguillot/OpenUpgrade,xzYue/odoo,alqfahad/odoo,arthru/OpenUpgrade,leoliujie/odoo,leorochael/odoo,bobisme/odoo,kirca/OpenUpgrade,incaser/odoo-odoo,draugiskisprendimai/odoo,grap/OpenUpgrade,rgeleta/odoo,jfpla/odoo,makinacorpus/odoo,mlaitinen/odoo,Bachaco-ve/odoo,gavin-feng/odoo,gorjuce/odoo,ramadhane/odoo,brijeshkesariya/odoo,bakhtout/odoo-educ,spadae22/odoo,luistorresm/odoo,goliveirab/odoo,wangjun/odoo,ubic135/odoo-design,JCA-Developpement/Odoo,realsaiko/odoo,tangyiyong/odoo,ShineFan/odoo,x111ong/odoo,bobisme/odoo,ThinkOpen-Solutions/odoo,dsfsdgsbngfggb/odoo,joariasl/odoo,datenbetrieb/odoo,VielSoft/odoo,luistorresm/odoo,n0m4dz/odoo,highco-groupe/odoo,gorjuce/odoo,ShineFan/odoo,synconics/odoo,thanhacun/odoo,nhomar/odoo-mirror,agrista/odoo-saas,sergio-incaser/odoo,nhomar/odoo-mirror,Danisan/odoo-1,leoliujie/odoo,nhomar/odoo,guerrerocarlos/odoo,colinnewell/odoo,addition-it-solutions/project-all,erkrishna9/odoo,hanicker/odoo,tvibliani/odoo,Gitlab11/odoo,sve-odoo/odoo,nhomar/odoo,JonathanStein/odoo,ecosoft-odoo/odoo,dkubiak789/odoo,Noviat/odoo,doomsterinc/odoo,juanalfonsopr/odoo,jusdng/odoo,ygol/odoo,tangyiyong/odoo,guewen/OpenUpgrade,fevxie/odoo,diagramsoftware/odoo,n0m4dz/odoo,laslabs/odoo,sebalix/OpenUpgrade,osvalr/odoo,colinnewell/odoo,Noviat/odoo,addition-it-solutions/project-all,numerigraphe/odoo,sysadminmatmoz/OCB,factorlibre/OCB,acshan/odoo,rubencabrera/odoo,pedrobaeza/OpenUpgrade,bobisme/odoo,pedrobaeza/OpenUpgrade,cdrooom/odoo,shivam1111/odoo,JGarcia-Panach/odoo,grap/OpenUpgrade,bguillot/OpenUpgrade,sve-odoo/odoo,dllsf/odootest,prospwro/odoo,havt/odoo,collex100/odoo,Maspear/odoo,mustafat/odoo-1,ThinkOpen-Solutions/odoo,oasiswork/odoo,bkirui/odoo,xzYue/odoo,dezynetechnologies/odoo,dfang/odoo,fgesora/odoo,brijeshkesariya/odoo,tinkhaven-organization/odoo,florentx/OpenUpgrade,Antiun/odoo,ovnicraft/odoo,sv-dev1/odoo,lombritz/odoo,alexteodor/odoo,poljeff/odoo,ShineFan/odoo,tarzan0820/odoo,havt/odoo,makinacorpus/odoo,damdam-s/OpenUpgrade,dsfsdgsbngfggb/odoo,zchking/odoo,cloud9UG/odoo,xujb/odoo,mmbtba/odoo,eino-makitalo/odoo,oliverhr/odoo,poljeff/odoo,dkubiak789/odoo,funkring/fdoo,osvalr/odoo,oliverhr/odoo,guewen/OpenUpgrade,mlaitinen/odoo,joariasl/odoo,microcom/odoo,ihsanudin/odoo,javierTerry/odoo,0k/OpenUpgrade,abenzbiria/clients_odoo,kybriainfotech/iSocioCRM,dariemp/odoo,fuselock/odoo,bplancher/odoo,hanicker/odoo,BT-ojossen/odoo,SAM-IT-SA/odoo,Elico-Corp/odoo_OCB,ovnicraft/odoo,nexiles/odoo,rahuldhote/odoo,shivam1111/odoo,mszewczy/odoo,colinnewell/odoo,ccomb/OpenUpgrade,ramadhane/odoo,ojengwa/odoo,cysnake4713/odoo,ingadhoc/odoo,QianBIG/odoo,KontorConsulting/odoo,collex100/odoo,slevenhagen/odoo,realsaiko/odoo,pedrobaeza/OpenUpgrade,odootr/odoo,lombritz/odoo,joshuajan/odoo,dezynetechnologies/odoo,hoatle/odoo,nitinitprof/odoo,windedge/odoo,gvb/odoo,srsman/odoo,bwrsandman/OpenUpgrade,NeovaHealth/odoo,OpenUpgrade/OpenUpgrade,shivam1111/odoo,lsinfo/odoo,ApuliaSoftware/odoo,lgscofield/odoo,stonegithubs/odoo,shaufi10/odoo,javierTerry/odoo,florentx/OpenUpgrade,tangyiyong/odoo,stephen144/odoo,Bachaco-ve/odoo,Nick-OpusVL/odoo,elmerdpadilla/iv,PongPi/isl-odoo,alqfahad/odoo,ehirt/odoo,apanju/odoo,guerrerocarlos/odoo,shaufi/odoo,guewen/OpenUpgrade,CopeX/odoo,dllsf/odootest,highco-groupe/odoo,dsfsdgsbngfggb/odoo,Maspear/odoo,spadae22/odoo,damdam-s/OpenUpgrade,FlorianLudwig/odoo,gsmartway/odoo,NeovaHealth/odoo,incaser/odoo-odoo,dezynetechnologies/odoo,JCA-Developpement/Odoo,srsman/odoo,bealdav/OpenUpgrade,SerpentCS/odoo,papouso/odoo,elmerdpadilla/iv,naousse/odoo,eino-makitalo/odoo,srsman/odoo,Maspear/odoo,Noviat/odoo,Adel-Magebinary/odoo,abdellatifkarroum/odoo,Daniel-CA/odoo,draugiskisprendimai/odoo,eino-makitalo/odoo,jaxkodex/odoo,chiragjogi/odoo,FlorianLudwig/odoo,markeTIC/OCB,rubencabrera/odoo,0k/OpenUpgrade,dsfsdgsbngfggb/odoo,deKupini/erp,makinacorpus/odoo,CopeX/odoo,stonegithubs/odoo,kirca/OpenUpgrade,zchking/odoo,jiachenning/odoo,ujjwalwahi/odoo,stephen144/odoo,tvibliani/odoo,oliverhr/odoo,mszewczy/odoo,gavin-feng/odoo,diagramsoftware/odoo,omprakasha/odoo,Gitlab11/odoo,jusdng/odoo,fdvarela/odoo8,doomsterinc/odoo,AuyaJackie/odoo,n0m4dz/odoo,abstract-open-solutions/OCB,ThinkOpen-Solutions/odoo,takis/odoo,Codefans-fan/odoo,sv-dev1/odoo,prospwro/odoo,ChanduERP/odoo,shingonoide/odoo,chiragjogi/odoo,SerpentCS/odoo,xujb/odoo,agrista/odoo-saas,simongoffin/website_version,mkieszek/odoo,klunwebale/odoo,tangyiyong/odoo,cedk/odoo,fjbatresv/odoo,Maspear/odoo,odoousers2014/odoo,abenzbiria/clients_odoo,CopeX/odoo,QianBIG/odoo,realsaiko/odoo,massot/odoo,mustafat/odoo-1,Codefans-fan/odoo,glovebx/odoo,jfpla/odoo,klunwebale/odoo,sergio-incaser/odoo,doomsterinc/odoo,elmerdpadilla/iv,oasiswork/odoo,Danisan/odoo-1,BT-rmartin/odoo,alexteodor/odoo,sv-dev1/odoo,stephen144/odoo,pedrobaeza/odoo,shaufi/odoo,mlaitinen/odoo,rgeleta/odoo,ingadhoc/odoo,idncom/odoo,virgree/odoo,incaser/odoo-odoo,Adel-Magebinary/odoo,nuuuboo/odoo,stonegithubs/odoo,savoirfairelinux/odoo,joshuajan/odoo,bealdav/OpenUpgrade,tvibliani/odoo,shivam1111/odoo,Daniel-CA/odoo,papouso/odoo,fossoult/odoo,blaggacao/OpenUpgrade,jiangzhixiao/odoo,avoinsystems/odoo,fuselock/odoo,funkring/fdoo,stephen144/odoo,demon-ru/iml-crm,odooindia/odoo,jpshort/odoo,odootr/odoo,fuhongliang/odoo,OpusVL/odoo,gavin-feng/odoo,kybriainfotech/iSocioCRM,sadleader/odoo,vnsofthe/odoo,provaleks/o8,JGarcia-Panach/odoo,BT-ojossen/odoo,optima-ict/odoo,Kilhog/odoo,jeasoft/odoo,ubic135/odoo-design,fjbatresv/odoo,Adel-Magebinary/odoo,FlorianLudwig/odoo,Ichag/odoo,Ernesto99/odoo,dllsf/odootest,hbrunn/OpenUpgrade,provaleks/o8,erkrishna9/odoo,leorochael/odoo,OpenUpgrade-dev/OpenUpgrade,lgscofield/odoo,bobisme/odoo,Drooids/odoo,jiangzhixiao/odoo,GauravSahu/odoo,OpenUpgrade-dev/OpenUpgrade,KontorConsulting/odoo,aviciimaxwell/odoo,OpenUpgrade/OpenUpgrade,xzYue/odoo,cpyou/odoo,rahuldhote/odoo,inspyration/odoo,hifly/OpenUpgrade,jaxkodex/odoo,csrocha/OpenUpgrade,mustafat/odoo-1,xzYue/odoo,patmcb/odoo,0k/odoo,Drooids/odoo,alqfahad/odoo,shaufi/odoo,goliveirab/odoo,Bachaco-ve/odoo,alexteodor/odoo,synconics/odoo,guewen/OpenUpgrade,Kilhog/odoo,ThinkOpen-Solutions/odoo,luistorresm/odoo,eino-makitalo/odoo,tarzan0820/odoo,sinbazhou/odoo,OpenUpgrade-dev/OpenUpgrade,draugiskisprendimai/odoo,markeTIC/OCB,ojengwa/odoo,odooindia/odoo,CubicERP/odoo,dgzurita/odoo,patmcb/odoo,odoo-turkiye/odoo,ecosoft-odoo/odoo,ApuliaSoftware/odoo,minhtuancn/odoo,sinbazhou/odoo,PongPi/isl-odoo,takis/odoo,stephen144/odoo,bealdav/OpenUpgrade,hanicker/odoo,KontorConsulting/odoo,mvaled/OpenUpgrade,fgesora/odoo,SAM-IT-SA/odoo,shingonoide/odoo,fjbatresv/odoo,vnsofthe/odoo,brijeshkesariya/odoo,bkirui/odoo,feroda/odoo,ujjwalwahi/odoo,stonegithubs/odoo,mvaled/OpenUpgrade,SerpentCS/odoo,hopeall/odoo,klunwebale/odoo,hoatle/odoo,JonathanStein/odoo,gvb/odoo,Gitlab11/odoo,savoirfairelinux/odoo,dalegregory/odoo,funkring/fdoo,fevxie/odoo,hopeall/odoo,OpenUpgrade/OpenUpgrade,tarzan0820/odoo,fevxie/odoo,alhashash/odoo,odoousers2014/odoo,provaleks/o8,jiangzhixiao/odoo,BT-rmartin/odoo,apanju/GMIO_Odoo,tvtsoft/odoo8,kittiu/odoo,colinnewell/odoo,oliverhr/odoo,Codefans-fan/odoo,CubicERP/odoo,charbeljc/OCB,Nowheresly/odoo,dgzurita/odoo,jaxkodex/odoo,shingonoide/odoo,lgscofield/odoo,apanju/GMIO_Odoo,cpyou/odoo,hanicker/odoo,havt/odoo,omprakasha/odoo,kybriainfotech/iSocioCRM,spadae22/odoo,takis/odoo,hoatle/odoo,chiragjogi/odoo,funkring/fdoo,bwrsandman/OpenUpgrade,QianBIG/odoo,damdam-s/OpenUpgrade,RafaelTorrealba/odoo,feroda/odoo,nitinitprof/odoo,ramitalat/odoo,oihane/odoo,glovebx/odoo,hassoon3/odoo,demon-ru/iml-crm,Daniel-CA/odoo,SAM-IT-SA/odoo,bakhtout/odoo-educ,takis/odoo,odooindia/odoo,QianBIG/odoo,jaxkodex/odoo,avoinsystems/odoo,mustafat/odoo-1,JonathanStein/odoo,sebalix/OpenUpgrade,nhomar/odoo,CatsAndDogsbvba/odoo,virgree/odoo,gorjuce/odoo,hoatle/odoo,Ichag/odoo,microcom/odoo,OpenUpgrade-dev/OpenUpgrade,csrocha/OpenUpgrade,florian-dacosta/OpenUpgrade,jeasoft/odoo,AuyaJackie/odoo,BT-fgarbely/odoo,bplancher/odoo,hopeall/odoo,naousse/odoo,factorlibre/OCB,mvaled/OpenUpgrade,charbeljc/OCB,cedk/odoo,pedrobaeza/odoo,cysnake4713/odoo,Nick-OpusVL/odoo,sinbazhou/odoo,fossoult/odoo,jesramirez/odoo,shaufi10/odoo,savoirfairelinux/odoo,KontorConsulting/odoo,rahuldhote/odoo,dsfsdgsbngfggb/odoo,ApuliaSoftware/odoo,ingadhoc/odoo,JCA-Developpement/Odoo,brijeshkesariya/odoo,JGarcia-Panach/odoo,Endika/OpenUpgrade,joariasl/odoo,datenbetrieb/odoo,Endika/OpenUpgrade,apocalypsebg/odoo,goliveirab/odoo,fuselock/odoo,bobisme/odoo,tinkerthaler/odoo,SAM-IT-SA/odoo,eino-makitalo/odoo,optima-ict/odoo,tvtsoft/odoo8,lombritz/odoo,Endika/odoo,laslabs/odoo,joshuajan/odoo,rgeleta/odoo,windedge/odoo,hoatle/odoo,cloud9UG/odoo,JGarcia-Panach/odoo,charbeljc/OCB,shivam1111/odoo,spadae22/odoo,bplancher/odoo,chiragjogi/odoo,vnsofthe/odoo,wangjun/odoo,jusdng/odoo,idncom/odoo,RafaelTorrealba/odoo,apanju/odoo,mszewczy/odoo,synconics/odoo,osvalr/odoo,Codefans-fan/odoo,datenbetrieb/odoo,Noviat/odoo,bkirui/odoo,BT-fgarbely/odoo,hubsaysnuaa/odoo,AuyaJackie/odoo,fossoult/odoo,gavin-feng/odoo,tinkerthaler/odoo,fuhongliang/odoo,rowemoore/odoo,pedrobaeza/odoo,hbrunn/OpenUpgrade,nuncjo/odoo,nuncjo/odoo,nagyistoce/odoo-dev-odoo,oliverhr/odoo,christophlsa/odoo,acshan/odoo,nitinitprof/odoo,matrixise/odoo,Elico-Corp/odoo_OCB,bguillot/OpenUpgrade,ThinkOpen-Solutions/odoo,bwrsandman/OpenUpgrade,ccomb/OpenUpgrade,addition-it-solutions/project-all,RafaelTorrealba/odoo,jiachenning/odoo,ramadhane/odoo,Drooids/odoo,ygol/odoo,jiangzhixiao/odoo,andreparames/odoo,GauravSahu/odoo,Kilhog/odoo,cysnake4713/odoo,hopeall/odoo,tinkerthaler/odoo,alqfahad/odoo,nagyistoce/odoo-dev-odoo,prospwro/odoo,JCA-Developpement/Odoo,laslabs/odoo,alhashash/odoo,stephen144/odoo,hubsaysnuaa/odoo,pplatek/odoo,leoliujie/odoo,blaggacao/OpenUpgrade,ramitalat/odoo,cedk/odoo,gsmartway/odoo,Danisan/odoo-1,factorlibre/OCB,KontorConsulting/odoo,hoatle/odoo,shivam1111/odoo,Endika/odoo,prospwro/odoo,eino-makitalo/odoo,Bachaco-ve/odoo,simongoffin/website_version,JonathanStein/odoo,Danisan/odoo-1,Eric-Zhong/odoo,funkring/fdoo,mmbtba/odoo,odoousers2014/odoo,hassoon3/odoo,hip-odoo/odoo,abstract-open-solutions/OCB,nexiles/odoo,bkirui/odoo,slevenhagen/odoo,nitinitprof/odoo,hbrunn/OpenUpgrade,arthru/OpenUpgrade,bobisme/odoo,cdrooom/odoo,ShineFan/odoo,CopeX/odoo,oasiswork/odoo,jesramirez/odoo,tvibliani/odoo,incaser/odoo-odoo,ehirt/odoo,fgesora/odoo,alexcuellar/odoo,nhomar/odoo,apanju/odoo,BT-astauder/odoo,kifcaliph/odoo,laslabs/odoo,optima-ict/odoo,tvibliani/odoo,brijeshkesariya/odoo,luistorresm/odoo,csrocha/OpenUpgrade,dalegregory/odoo,numerigraphe/odoo,rahuldhote/odoo,rgeleta/odoo,oihane/odoo,Daniel-CA/odoo,n0m4dz/odoo,papouso/odoo,ojengwa/odoo,slevenhagen/odoo-npg,chiragjogi/odoo,florentx/OpenUpgrade,BT-fgarbely/odoo,steedos/odoo,colinnewell/odoo,waytai/odoo,sv-dev1/odoo,nexiles/odoo,mlaitinen/odoo,sergio-incaser/odoo,SerpentCS/odoo,realsaiko/odoo,feroda/odoo,hip-odoo/odoo,waytai/odoo,Gitlab11/odoo,odootr/odoo,havt/odoo,Nowheresly/odoo,odootr/odoo,nuuuboo/odoo,shaufi10/odoo,ramitalat/odoo,jusdng/odoo,hubsaysnuaa/odoo,Bachaco-ve/odoo,mszewczy/odoo,avoinsystems/odoo,gavin-feng/odoo,ramitalat/odoo,hanicker/odoo,acshan/odoo,abstract-open-solutions/OCB,mlaitinen/odoo,gvb/odoo,arthru/OpenUpgrade,janocat/odoo,ChanduERP/odoo,rowemoore/odoo,ClearCorp-dev/odoo,fdvarela/odoo8,BT-ojossen/odoo,fossoult/odoo,OpusVL/odoo,fgesora/odoo,jusdng/odoo,charbeljc/OCB,nagyistoce/odoo-dev-odoo,JGarcia-Panach/odoo,slevenhagen/odoo,jiachenning/odoo,shingonoide/odoo,alexcuellar/odoo,diagramsoftware/odoo,idncom/odoo,vnsofthe/odoo,sysadminmatmoz/OCB,steedos/odoo,odoo-turkiye/odoo,Grirrane/odoo,poljeff/odoo,hassoon3/odoo,poljeff/odoo,mkieszek/odoo,rowemoore/odoo,hassoon3/odoo,cedk/odoo,sv-dev1/odoo,ClearCorp-dev/odoo,javierTerry/odoo,slevenhagen/odoo-npg,aviciimaxwell/odoo,odoo-turkiye/odoo,juanalfonsopr/odoo,odoo-turkiye/odoo,windedge/odoo,hassoon3/odoo,fuhongliang/odoo,ecosoft-odoo/odoo,leoliujie/odoo,datenbetrieb/odoo,odootr/odoo,mszewczy/odoo,sinbazhou/odoo,patmcb/odoo,vnsofthe/odoo,ecosoft-odoo/odoo,ClearCorp-dev/odoo,naousse/odoo,savoirfairelinux/OpenUpgrade,hifly/OpenUpgrade,VielSoft/odoo,srimai/odoo,savoirfairelinux/OpenUpgrade,mlaitinen/odoo,collex100/odoo,collex100/odoo,chiragjogi/odoo,hoatle/odoo,osvalr/odoo,christophlsa/odoo,hubsaysnuaa/odoo,minhtuancn/odoo,diagramsoftware/odoo,Grirrane/odoo,dezynetechnologies/odoo,leoliujie/odoo,leorochael/odoo,minhtuancn/odoo,papouso/odoo,CatsAndDogsbvba/odoo,makinacorpus/odoo,salaria/odoo,Codefans-fan/odoo,tinkerthaler/odoo,andreparames/odoo,pedrobaeza/odoo,GauravSahu/odoo,mszewczy/odoo,ThinkOpen-Solutions/odoo,ClearCorp-dev/odoo,jesramirez/odoo,deKupini/erp,alexcuellar/odoo,MarcosCommunity/odoo,sysadminmatmoz/OCB,draugiskisprendimai/odoo,OpenUpgrade/OpenUpgrade,OpenUpgrade/OpenUpgrade,ccomb/OpenUpgrade,ihsanudin/odoo,x111ong/odoo,factorlibre/OCB,chiragjogi/odoo,dgzurita/odoo,Elico-Corp/odoo_OCB,VielSoft/odoo,NL66278/OCB,mvaled/OpenUpgrade,srimai/odoo,avoinsystems/odoo,Noviat/odoo,syci/OCB,wangjun/odoo,fossoult/odoo,fevxie/odoo,ojengwa/odoo,christophlsa/odoo,VielSoft/odoo,abstract-open-solutions/OCB,Ichag/odoo,inspyration/odoo,leoliujie/odoo,Grirrane/odoo,osvalr/odoo,slevenhagen/odoo,dalegregory/odoo,Elico-Corp/odoo_OCB,Nowheresly/odoo,ihsanudin/odoo,ygol/odoo,nhomar/odoo,MarcosCommunity/odoo,jiachenning/odoo,synconics/odoo,simongoffin/website_version,x111ong/odoo,janocat/odoo,abdellatifkarroum/odoo,jeasoft/odoo,oihane/odoo,fevxie/odoo,glovebx/odoo,grap/OpenUpgrade,tvtsoft/odoo8,jeasoft/odoo,0k/odoo,minhtuancn/odoo,jpshort/odoo,feroda/odoo,CatsAndDogsbvba/odoo,hassoon3/odoo,nuncjo/odoo,guerrerocarlos/odoo,avoinsystems/odoo,osvalr/odoo,jeasoft/odoo,deKupini/erp,wangjun/odoo,0k/odoo,takis/odoo,collex100/odoo,nuuuboo/odoo,dgzurita/odoo,omprakasha/odoo,jeasoft/odoo,feroda/odoo,Bachaco-ve/odoo,jusdng/odoo,tarzan0820/odoo,provaleks/o8,apanju/odoo,joariasl/odoo,ChanduERP/odoo,leorochael/odoo,jaxkodex/odoo,cpyou/odoo,acshan/odoo,sadleader/odoo,dariemp/odoo,BT-fgarbely/odoo,Drooids/odoo,lombritz/odoo,simongoffin/website_version,gavin-feng/odoo,Eric-Zhong/odoo,brijeshkesariya/odoo,ShineFan/odoo,dalegregory/odoo,Danisan/odoo-1,bwrsandman/OpenUpgrade,andreparames/odoo,jusdng/odoo,hmen89/odoo,rgeleta/odoo,vnsofthe/odoo,blaggacao/OpenUpgrade,fuhongliang/odoo,apanju/GMIO_Odoo,tinkerthaler/odoo,juanalfonsopr/odoo,fuselock/odoo,spadae22/odoo,hifly/OpenUpgrade,glovebx/odoo,abdellatifkarroum/odoo,storm-computers/odoo,avoinsystems/odoo,wangjun/odoo,elmerdpadilla/iv,0k/odoo,jfpla/odoo,ramadhane/odoo,tinkhaven-organization/odoo,lombritz/odoo,hbrunn/OpenUpgrade,charbeljc/OCB,sadleader/odoo,omprakasha/odoo,savoirfairelinux/OpenUpgrade,shaufi10/odoo,steedos/odoo,lsinfo/odoo,ygol/odoo,rubencabrera/odoo,Antiun/odoo,BT-fgarbely/odoo,matrixise/odoo,pedrobaeza/OpenUpgrade,lightcn/odoo,SAM-IT-SA/odoo,microcom/odoo,sebalix/OpenUpgrade,KontorConsulting/odoo,cloud9UG/odoo,Nick-OpusVL/odoo,kittiu/odoo,sysadminmatmoz/OCB,massot/odoo,kybriainfotech/iSocioCRM,numerigraphe/odoo,syci/OCB,mkieszek/odoo,cdrooom/odoo,luiseduardohdbackup/odoo,CopeX/odoo,GauravSahu/odoo,apanju/GMIO_Odoo,salaria/odoo,sve-odoo/odoo,hubsaysnuaa/odoo,ihsanudin/odoo,laslabs/odoo,ojengwa/odoo,deKupini/erp,idncom/odoo,srimai/odoo,lgscofield/odoo,srimai/odoo,OpenUpgrade/OpenUpgrade,QianBIG/odoo,GauravSahu/odoo,klunwebale/odoo,tinkhaven-organization/odoo,fuselock/odoo,Bachaco-ve/odoo,n0m4dz/odoo,BT-fgarbely/odoo,tangyiyong/odoo,pedrobaeza/OpenUpgrade,Daniel-CA/odoo,acshan/odoo,laslabs/odoo,nagyistoce/odoo-dev-odoo,AuyaJackie/odoo,alexteodor/odoo,apanju/odoo,odooindia/odoo,xujb/odoo,zchking/odoo,dezynetechnologies/odoo,naousse/odoo,aviciimaxwell/odoo,steedos/odoo,ehirt/odoo,NL66278/OCB,nhomar/odoo-mirror,kittiu/odoo,cpyou/odoo,glovebx/odoo,virgree/odoo,ojengwa/odoo,ecosoft-odoo/odoo,Kilhog/odoo,OpenUpgrade/OpenUpgrade,fgesora/odoo,joariasl/odoo,goliveirab/odoo,salaria/odoo,Adel-Magebinary/odoo,AuyaJackie/odoo,ovnicraft/odoo,ramadhane/odoo,thanhacun/odoo,RafaelTorrealba/odoo,JGarcia-Panach/odoo,patmcb/odoo,thanhacun/odoo,prospwro/odoo,dariemp/odoo,oliverhr/odoo,kifcaliph/odoo,erkrishna9/odoo,Nick-OpusVL/odoo,cloud9UG/odoo,jiachenning/odoo,BT-astauder/odoo,collex100/odoo,matrixise/odoo,abdellatifkarroum/odoo,blaggacao/OpenUpgrade,virgree/odoo,BT-ojossen/odoo,CubicERP/odoo,Daniel-CA/odoo,leorochael/odoo,Ichag/odoo,sve-odoo/odoo,christophlsa/odoo,gsmartway/odoo,andreparames/odoo,optima-ict/odoo,ApuliaSoftware/odoo,savoirfairelinux/odoo,TRESCLOUD/odoopub,ingadhoc/odoo,gsmartway/odoo,rdeheele/odoo,kifcaliph/odoo,dezynetechnologies/odoo,provaleks/o8,AuyaJackie/odoo,storm-computers/odoo,thanhacun/odoo,shingonoide/odoo,nuuuboo/odoo,janocat/odoo,lgscofield/odoo,optima-ict/odoo,Nowheresly/odoo,jpshort/odoo,gsmartway/odoo,Drooids/odoo,patmcb/odoo,Endika/OpenUpgrade,shingonoide/odoo,guewen/OpenUpgrade,hip-odoo/odoo,jolevq/odoopub,collex100/odoo,hubsaysnuaa/odoo,MarcosCommunity/odoo,ramadhane/odoo,oihane/odoo,gorjuce/odoo,cedk/odoo,janocat/odoo,spadae22/odoo,VielSoft/odoo,alqfahad/odoo,dfang/odoo,joshuajan/odoo,lightcn/odoo,poljeff/odoo,bkirui/odoo,naousse/odoo,tinkhaven-organization/odoo,jesramirez/odoo,hbrunn/OpenUpgrade,glovebx/odoo,numerigraphe/odoo,massot/odoo,CatsAndDogsbvba/odoo,Endika/OpenUpgrade,odoo-turkiye/odoo,NeovaHealth/odoo,odootr/odoo,fossoult/odoo,florentx/OpenUpgrade,shaufi10/odoo,Ichag/odoo,jiangzhixiao/odoo,BT-rmartin/odoo,zchking/odoo,slevenhagen/odoo-npg,oihane/odoo,dezynetechnologies/odoo,storm-computers/odoo,fjbatresv/odoo,simongoffin/website_version,nitinitprof/odoo,CubicERP/odoo,kittiu/odoo,csrocha/OpenUpgrade,guewen/OpenUpgrade,Nowheresly/odoo,mszewczy/odoo,savoirfairelinux/OpenUpgrade,ingadhoc/odoo,ihsanudin/odoo,draugiskisprendimai/odoo,apocalypsebg/odoo,avoinsystems/odoo,Ernesto99/odoo,markeTIC/OCB,Maspear/odoo,andreparames/odoo,hifly/OpenUpgrade,bplancher/odoo,sebalix/OpenUpgrade,sinbazhou/odoo,bguillot/OpenUpgrade,doomsterinc/odoo,agrista/odoo-saas,dkubiak789/odoo,bobisme/odoo,BT-ojossen/odoo,florian-dacosta/OpenUpgrade,prospwro/odoo,gvb/odoo,tinkhaven-organization/odoo,MarcosCommunity/odoo,GauravSahu/odoo,CatsAndDogsbvba/odoo,makinacorpus/odoo,shaufi/odoo,apanju/odoo,joariasl/odoo,damdam-s/OpenUpgrade,BT-astauder/odoo,papouso/odoo,nagyistoce/odoo-dev-odoo,hmen89/odoo,christophlsa/odoo,CopeX/odoo,ubic135/odoo-design,Nick-OpusVL/odoo,abdellatifkarroum/odoo,leorochael/odoo,ubic135/odoo-design,demon-ru/iml-crm,salaria/odoo,ChanduERP/odoo,NeovaHealth/odoo,tarzan0820/odoo,rowemoore/odoo,odoousers2014/odoo,abstract-open-solutions/OCB,CopeX/odoo,steedos/odoo,ccomb/OpenUpgrade,janocat/odoo,JonathanStein/odoo,sv-dev1/odoo,markeTIC/OCB,odoo-turkiye/odoo,jfpla/odoo,srsman/odoo,alexcuellar/odoo,juanalfonsopr/odoo,PongPi/isl-odoo,alexcuellar/odoo,ehirt/odoo,csrocha/OpenUpgrade,ojengwa/odoo,0k/OpenUpgrade,microcom/odoo,sysadminmatmoz/OCB,funkring/fdoo,BT-astauder/odoo,dsfsdgsbngfggb/odoo,steedos/odoo,demon-ru/iml-crm,addition-it-solutions/project-all,OpenUpgrade-dev/OpenUpgrade,Gitlab11/odoo,alexcuellar/odoo,luistorresm/odoo,gvb/odoo,fdvarela/odoo8,sebalix/OpenUpgrade,fevxie/odoo,srsman/odoo,Grirrane/odoo,Endika/OpenUpgrade,wangjun/odoo,virgree/odoo,mkieszek/odoo,ubic135/odoo-design,Danisan/odoo-1,odootr/odoo,ramitalat/odoo,bwrsandman/OpenUpgrade,FlorianLudwig/odoo,ovnicraft/odoo,rdeheele/odoo,blaggacao/OpenUpgrade,brijeshkesariya/odoo,PongPi/isl-odoo,x111ong/odoo,Kilhog/odoo,rgeleta/odoo,guerrerocarlos/odoo,gorjuce/odoo,JonathanStein/odoo,Eric-Zhong/odoo,luistorresm/odoo,mvaled/OpenUpgrade,bakhtout/odoo-educ,realsaiko/odoo,mmbtba/odoo,hmen89/odoo,pplatek/odoo,Endika/OpenUpgrade,dariemp/odoo,fdvarela/odoo8,cloud9UG/odoo,oihane/odoo,nagyistoce/odoo-dev-odoo,Antiun/odoo,guerrerocarlos/odoo,charbeljc/OCB,BT-rmartin/odoo,Nowheresly/odoo,Endika/odoo,FlorianLudwig/odoo,nexiles/odoo,tangyiyong/odoo,jesramirez/odoo,nuuuboo/odoo,ihsanudin/odoo,florentx/OpenUpgrade,NeovaHealth/odoo,zchking/odoo,dfang/odoo,joariasl/odoo,CatsAndDogsbvba/odoo,ehirt/odoo,ujjwalwahi/odoo,shivam1111/odoo,nhomar/odoo-mirror,factorlibre/OCB,lombritz/odoo,tvtsoft/odoo8,kittiu/odoo,aviciimaxwell/odoo,CatsAndDogsbvba/odoo,Ichag/odoo,fuhongliang/odoo,arthru/OpenUpgrade,sadleader/odoo,ujjwalwahi/odoo,christophlsa/odoo,jpshort/odoo,kittiu/odoo,TRESCLOUD/odoopub,shaufi/odoo,incaser/odoo-odoo,waytai/odoo,x111ong/odoo,storm-computers/odoo,kirca/OpenUpgrade,ClearCorp-dev/odoo,ygol/odoo,mvaled/OpenUpgrade,Endika/odoo,luiseduardohdbackup/odoo,cpyou/odoo,bakhtout/odoo-educ,Ernesto99/odoo,incaser/odoo-odoo,abdellatifkarroum/odoo,dllsf/odootest,nhomar/odoo-mirror,stonegithubs/odoo,apocalypsebg/odoo,Codefans-fan/odoo,tinkhaven-organization/odoo,jpshort/odoo,pedrobaeza/odoo,rubencabrera/odoo,sinbazhou/odoo,tvtsoft/odoo8,shaufi10/odoo,idncom/odoo,numerigraphe/odoo,OpusVL/odoo,VielSoft/odoo,ygol/odoo,PongPi/isl-odoo,thanhacun/odoo,jfpla/odoo,omprakasha/odoo,srimai/odoo,grap/OpenUpgrade,Antiun/odoo,dariemp/odoo,Kilhog/odoo,sve-odoo/odoo,cedk/odoo,Eric-Zhong/odoo,lombritz/odoo,ccomb/OpenUpgrade,Daniel-CA/odoo,synconics/odoo,stonegithubs/odoo,apocalypsebg/odoo,takis/odoo,JonathanStein/odoo,mmbtba/odoo,highco-groupe/odoo,bakhtout/odoo-educ,massot/odoo,gsmartway/odoo,virgree/odoo,pedrobaeza/OpenUpgrade,CubicERP/odoo,hifly/OpenUpgrade,damdam-s/OpenUpgrade,Ernesto99/odoo,janocat/odoo,juanalfonsopr/odoo,waytai/odoo,lightcn/odoo,salaria/odoo,omprakasha/odoo,tinkerthaler/odoo,storm-computers/odoo,dalegregory/odoo,SAM-IT-SA/odoo,vnsofthe/odoo,savoirfairelinux/OpenUpgrade,datenbetrieb/odoo,dfang/odoo,RafaelTorrealba/odoo,FlorianLudwig/odoo,apocalypsebg/odoo,acshan/odoo,ovnicraft/odoo,cysnake4713/odoo,rahuldhote/odoo,jfpla/odoo,alexteodor/odoo,Grirrane/odoo,0k/odoo,ehirt/odoo,bealdav/OpenUpgrade,patmcb/odoo,florian-dacosta/OpenUpgrade,dgzurita/odoo,grap/OpenUpgrade,idncom/odoo,csrocha/OpenUpgrade,bguillot/OpenUpgrade,syci/OCB,grap/OpenUpgrade,xujb/odoo,demon-ru/iml-crm,srimai/odoo,Endika/OpenUpgrade,nitinitprof/odoo,bplancher/odoo,florian-dacosta/OpenUpgrade,mmbtba/odoo,matrixise/odoo,deKupini/erp,zchking/odoo,luiseduardohdbackup/odoo,florian-dacosta/OpenUpgrade,RafaelTorrealba/odoo,windedge/odoo,ChanduERP/odoo,mlaitinen/odoo,nuncjo/odoo,optima-ict/odoo,oasiswork/odoo,Adel-Magebinary/odoo,ApuliaSoftware/odoo,funkring/fdoo,alhashash/odoo,cdrooom/odoo,0k/OpenUpgrade,diagramsoftware/odoo,jolevq/odoopub,salaria/odoo,cloud9UG/odoo,sergio-incaser/odoo,hmen89/odoo,feroda/odoo,poljeff/odoo,SerpentCS/odoo,colinnewell/odoo,hopeall/odoo,markeTIC/OCB,dsfsdgsbngfggb/odoo,Gitlab11/odoo,diagramsoftware/odoo,havt/odoo,dariemp/odoo,Elico-Corp/odoo_OCB,bkirui/odoo,prospwro/odoo,SAM-IT-SA/odoo,nuncjo/odoo,hmen89/odoo,hanicker/odoo,NL66278/OCB,mmbtba/odoo,bguillot/OpenUpgrade,jeasoft/odoo,ovnicraft/odoo,omprakasha/odoo,MarcosCommunity/odoo,klunwebale/odoo,ehirt/odoo,pplatek/odoo,alhashash/odoo,massot/odoo,fuhongliang/odoo,idncom/odoo,dalegregory/odoo,ujjwalwahi/odoo,Adel-Magebinary/odoo,nuncjo/odoo,FlorianLudwig/odoo,feroda/odoo,dkubiak789/odoo,lightcn/odoo,ShineFan/odoo,Ichag/odoo,savoirfairelinux/odoo,SerpentCS/odoo,shaufi/odoo,naousse/odoo,markeTIC/OCB,fjbatresv/odoo,jolevq/odoopub,juanalfonsopr/odoo,agrista/odoo-saas,hbrunn/OpenUpgrade,datenbetrieb/odoo,poljeff/odoo,synconics/odoo,stonegithubs/odoo,pplatek/odoo,mmbtba/odoo,thanhacun/odoo,dariemp/odoo,0k/OpenUpgrade,Nick-OpusVL/odoo,kirca/OpenUpgrade,oasiswork/odoo,ramadhane/odoo,jpshort/odoo,hubsaysnuaa/odoo,ccomb/OpenUpgrade,mkieszek/odoo,takis/odoo,slevenhagen/odoo-npg,ccomb/OpenUpgrade,syci/OCB,provaleks/o8,KontorConsulting/odoo,kybriainfotech/iSocioCRM,hanicker/odoo,Drooids/odoo,jfpla/odoo,Grirrane/odoo,PongPi/isl-odoo,SerpentCS/odoo,arthru/OpenUpgrade,Elico-Corp/odoo_OCB,slevenhagen/odoo,Antiun/odoo,dkubiak789/odoo,ecosoft-odoo/odoo,Antiun/odoo,doomsterinc/odoo,sv-dev1/odoo,Adel-Magebinary/odoo,rgeleta/odoo,ingadhoc/odoo,NeovaHealth/odoo,hip-odoo/odoo,Eric-Zhong/odoo,kirca/OpenUpgrade,damdam-s/OpenUpgrade,NL66278/OCB,kirca/OpenUpgrade,highco-groupe/odoo,jiangzhixiao/odoo,nitinitprof/odoo,srsman/odoo,savoirfairelinux/OpenUpgrade,lsinfo/odoo,Drooids/odoo,acshan/odoo,dllsf/odootest,minhtuancn/odoo,mvaled/OpenUpgrade,Noviat/odoo,ChanduERP/odoo,jeasoft/odoo,Nick-OpusVL/odoo,Kilhog/odoo,arthru/OpenUpgrade,hifly/OpenUpgrade,papouso/odoo,tinkerthaler/odoo,ecosoft-odoo/odoo,aviciimaxwell/odoo,javierTerry/odoo,Codefans-fan/odoo,odoousers2014/odoo,BT-astauder/odoo,wangjun/odoo,oliverhr/odoo,addition-it-solutions/project-all,jpshort/odoo,fevxie/odoo,Maspear/odoo,ujjwalwahi/odoo,dfang/odoo,draugiskisprendimai/odoo,jolevq/odoopub,sebalix/OpenUpgrade,tvibliani/odoo,MarcosCommunity/odoo,leoliujie/odoo,rdeheele/odoo,ujjwalwahi/odoo,steedos/odoo,ShineFan/odoo,alexcuellar/odoo,nagyistoce/odoo-dev-odoo,BT-rmartin/odoo,sergio-incaser/odoo,rahuldhote/odoo,shaufi10/odoo,bguillot/OpenUpgrade,lgscofield/odoo,apanju/GMIO_Odoo,goliveirab/odoo,florian-dacosta/OpenUpgrade,aviciimaxwell/odoo,hopeall/odoo,minhtuancn/odoo,xzYue/odoo,CubicERP/odoo,rowemoore/odoo,minhtuancn/odoo,BT-fgarbely/odoo,diagramsoftware/odoo,lightcn/odoo,fossoult/odoo,slevenhagen/odoo-npg,rowemoore/odoo,doomsterinc/odoo,luistorresm/odoo,hopeall/odoo,srsman/odoo,slevenhagen/odoo,highco-groupe/odoo,srimai/odoo,CubicERP/odoo,lightcn/odoo,nuuuboo/odoo,numerigraphe/odoo,klunwebale/odoo,jiachenning/odoo,waytai/odoo
|
b196d5e58c611508bbc0bf891752d6abf135b67d
|
generic/example/manage.py
|
generic/example/manage.py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Make sure sys.path is setup correctly
|
Make sure sys.path is setup correctly
|
Python
|
apache-2.0
|
texastribune/tt_app_templates,texastribune/tt_app_templates
|
43d5041c09caadd7bd67195ba7519e8ef006f506
|
corehq/pillows/group.py
|
corehq/pillows/group.py
|
from corehq.apps.groups.models import Group
from corehq.pillows.mappings.group_mapping import GROUP_INDEX, GROUP_MAPPING
from dimagi.utils.decorators.memoized import memoized
from pillowtop.listener import AliasedElasticPillow
from django.conf import settings
class GroupPillow(AliasedElasticPillow):
"""
Simple/Common Case properties Indexer
"""
document_class = Group
couch_filter = "groups/all_groups"
es_host = settings.ELASTICSEARCH_HOST
es_port = settings.ELASTICSEARCH_PORT
es_timeout = 60
es_index_prefix = "hqgroups"
es_alias = "hqgroups"
es_type = "group"
es_meta = {
"settings": {
"analysis": {
"analyzer": {
"default": {
"type": "custom",
"tokenizer": "whitespace",
"filter": ["lowercase"]
},
}
}
}
}
es_index = GROUP_INDEX
default_mapping = GROUP_MAPPING
@memoized
def calc_meta(self):
#todo: actually do this correctly
"""
override of the meta calculator since we're separating out all the types,
so we just do a hash of the "prototype" instead to determined md5
"""
return self.calc_mapping_hash({"es_meta": self.es_meta,
"mapping": self.default_mapping})
def get_mapping_from_type(self, doc_dict):
"""
Define mapping uniquely to the user_type document.
See below on why date_detection is False
NOTE: DO NOT MODIFY THIS UNLESS ABSOLUTELY NECESSARY. A CHANGE BELOW WILL GENERATE A NEW
HASH FOR THE INDEX NAME REQUIRING A REINDEX+RE-ALIAS. THIS IS A SERIOUSLY RESOURCE
INTENSIVE OPERATION THAT REQUIRES SOME CAREFUL LOGISTICS TO MIGRATE
"""
#the meta here is defined for when the case index + type is created for the FIRST time
#subsequent data added to it will be added automatically, but date_detection is necessary
# to be false to prevent indexes from not being created due to the way we store dates
#all are strings EXCEPT the core case properties which we need to explicitly define below.
#that way date sort and ranges will work with canonical date formats for queries.
return {
self.get_type_string(doc_dict): self.default_mapping
}
def get_type_string(self, doc_dict):
return self.es_type
|
from corehq.apps.groups.models import Group
from corehq.pillows.mappings.group_mapping import GROUP_INDEX, GROUP_MAPPING
from dimagi.utils.decorators.memoized import memoized
from pillowtop.listener import AliasedElasticPillow
from django.conf import settings
class GroupPillow(HQPillow):
"""
Simple/Common Case properties Indexer
"""
document_class = Group
couch_filter = "groups/all_groups"
es_index_prefix = "hqgroups"
es_alias = "hqgroups"
es_type = "group"
es_index = GROUP_INDEX
default_mapping = GROUP_MAPPING
|
Switch over to extend HQPillow
|
Switch over to extend HQPillow
|
Python
|
bsd-3-clause
|
qedsoftware/commcare-hq,puttarajubr/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,gmimano/commcaretest,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq
|
9eb265fa2142b559b15063bc9322fc60b46a300b
|
mezzanine/project_template/deploy/gunicorn.conf.py
|
mezzanine/project_template/deploy/gunicorn.conf.py
|
from __future__ import unicode_literals
import os
bind = "127.0.0.1:%(gunicorn_port)s"
workers = (os.sysconf("SC_NPROCESSORS_ONLN") * 2) + 1
loglevel = "error"
proc_name = "%(proj_name)s"
|
from __future__ import unicode_literals
import multiprocessing
bind = "127.0.0.1:%(gunicorn_port)s"
workers = multiprocessing.cpu_count() * 2 + 1
loglevel = "error"
proc_name = "%(proj_name)s"
|
Update to use multiprocessing library
|
Update to use multiprocessing library
|
Python
|
bsd-2-clause
|
promil23/mezzanine,damnfine/mezzanine,Cajoline/mezzanine,stephenmcd/mezzanine,agepoly/mezzanine,damnfine/mezzanine,mush42/mezzanine,eino-makitalo/mezzanine,mush42/mezzanine,stephenmcd/mezzanine,stephenmcd/mezzanine,emile2016/mezzanine,frankier/mezzanine,promil23/mezzanine,readevalprint/mezzanine,webounty/mezzanine,dovydas/mezzanine,ryneeverett/mezzanine,wbtuomela/mezzanine,tuxinhang1989/mezzanine,cccs-web/mezzanine,joshcartme/mezzanine,tuxinhang1989/mezzanine,joshcartme/mezzanine,eino-makitalo/mezzanine,mush42/mezzanine,dsanders11/mezzanine,dekomote/mezzanine-modeltranslation-backport,Kniyl/mezzanine,vladir/mezzanine,tuxinhang1989/mezzanine,agepoly/mezzanine,AlexHill/mezzanine,molokov/mezzanine,Skytorn86/mezzanine,Kniyl/mezzanine,SoLoHiC/mezzanine,molokov/mezzanine,dustinrb/mezzanine,sjdines/mezzanine,gradel/mezzanine,saintbird/mezzanine,PegasusWang/mezzanine,cccs-web/mezzanine,PegasusWang/mezzanine,christianwgd/mezzanine,wyzex/mezzanine,adrian-the-git/mezzanine,sjdines/mezzanine,PegasusWang/mezzanine,agepoly/mezzanine,damnfine/mezzanine,eino-makitalo/mezzanine,frankchin/mezzanine,jerivas/mezzanine,viaregio/mezzanine,douglaskastle/mezzanine,douglaskastle/mezzanine,gradel/mezzanine,SoLoHiC/mezzanine,adrian-the-git/mezzanine,jjz/mezzanine,ZeroXn/mezzanine,biomassives/mezzanine,promil23/mezzanine,spookylukey/mezzanine,frankchin/mezzanine,fusionbox/mezzanine,frankchin/mezzanine,dsanders11/mezzanine,gradel/mezzanine,sjuxax/mezzanine,ZeroXn/mezzanine,viaregio/mezzanine,frankier/mezzanine,spookylukey/mezzanine,Skytorn86/mezzanine,jjz/mezzanine,saintbird/mezzanine,dsanders11/mezzanine,webounty/mezzanine,vladir/mezzanine,webounty/mezzanine,jerivas/mezzanine,readevalprint/mezzanine,wbtuomela/mezzanine,fusionbox/mezzanine,theclanks/mezzanine,ryneeverett/mezzanine,batpad/mezzanine,frankier/mezzanine,jjz/mezzanine,sjuxax/mezzanine,douglaskastle/mezzanine,Cajoline/mezzanine,dustinrb/mezzanine,emile2016/mezzanine,viaregio/mezzanine,dovydas/mezzanine,biomassives/mezzanine,AlexHill/mezzanine,molokov/mezzanine,jerivas/mezzanine,Cicero-Zhao/mezzanine,nikolas/mezzanine,ryneeverett/mezzanine,dekomote/mezzanine-modeltranslation-backport,dovydas/mezzanine,sjdines/mezzanine,vladir/mezzanine,adrian-the-git/mezzanine,emile2016/mezzanine,industrydive/mezzanine,saintbird/mezzanine,batpad/mezzanine,spookylukey/mezzanine,geodesign/mezzanine,ZeroXn/mezzanine,sjuxax/mezzanine,industrydive/mezzanine,geodesign/mezzanine,SoLoHiC/mezzanine,wyzex/mezzanine,wbtuomela/mezzanine,nikolas/mezzanine,readevalprint/mezzanine,dustinrb/mezzanine,industrydive/mezzanine,Cajoline/mezzanine,Kniyl/mezzanine,Skytorn86/mezzanine,wyzex/mezzanine,nikolas/mezzanine,christianwgd/mezzanine,dekomote/mezzanine-modeltranslation-backport,joshcartme/mezzanine,Cicero-Zhao/mezzanine,christianwgd/mezzanine,geodesign/mezzanine,theclanks/mezzanine,biomassives/mezzanine,theclanks/mezzanine
|
ebfe1254ea11112689fa606cd6c29100a26e058d
|
acme/acme/__init__.py
|
acme/acme/__init__.py
|
"""ACME protocol implementation.
This module is an implementation of the `ACME protocol`_. Latest
supported version: `v02`_.
.. _`ACME protocol`: https://github.com/letsencrypt/acme-spec
.. _`v02`:
https://github.com/letsencrypt/acme-spec/commit/d328fea2d507deb9822793c512830d827a4150c4
"""
|
"""ACME protocol implementation.
This module is an implementation of the `ACME protocol`_. Latest
supported version: `draft-ietf-acme-01`_.
.. _`ACME protocol`: https://github.com/ietf-wg-acme/acme/
.. _`draft-ietf-acme-01`:
https://github.com/ietf-wg-acme/acme/tree/draft-ietf-acme-acme-01
"""
|
Update the ACME github repository URL.
|
Update the ACME github repository URL.
|
Python
|
apache-2.0
|
jsha/letsencrypt,letsencrypt/letsencrypt,mitnk/letsencrypt,stweil/letsencrypt,bsmr-misc-forks/letsencrypt,stweil/letsencrypt,letsencrypt/letsencrypt,VladimirTyrin/letsencrypt,lmcro/letsencrypt,lmcro/letsencrypt,DavidGarciaCat/letsencrypt,DavidGarciaCat/letsencrypt,twstrike/le_for_patching,brentdax/letsencrypt,mitnk/letsencrypt,bsmr-misc-forks/letsencrypt,TheBoegl/letsencrypt,kuba/letsencrypt,wteiken/letsencrypt,kuba/letsencrypt,jtl999/certbot,VladimirTyrin/letsencrypt,thanatos/lets-encrypt-preview,dietsche/letsencrypt,TheBoegl/letsencrypt,jtl999/certbot,thanatos/lets-encrypt-preview,dietsche/letsencrypt,jsha/letsencrypt,brentdax/letsencrypt,twstrike/le_for_patching,wteiken/letsencrypt
|
dac411035f12f92f336d6c42aa3103b3c04f01ab
|
backend/populate_dimkarakostas.py
|
backend/populate_dimkarakostas.py
|
from string import ascii_lowercase
import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
|
from string import ascii_lowercase
import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip,
# method='serial'
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
|
Add method comment to population script for easy deploy
|
Add method comment to population script for easy deploy
|
Python
|
mit
|
dionyziz/rupture,dimkarakostas/rupture,esarafianou/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,dimkarakostas/rupture,dionyziz/rupture,esarafianou/rupture,dimriou/rupture,dimriou/rupture,esarafianou/rupture,esarafianou/rupture,dimkarakostas/rupture,dimriou/rupture,dionyziz/rupture
|
1eb2d3b9fa773455e9c69921b58529241e59b00e
|
thezombies/management/commands/report_invalid_urls.py
|
thezombies/management/commands/report_invalid_urls.py
|
from __future__ import division
from django.core.management.base import BaseCommand
from django.utils import timezone
from thezombies.models import (Agency, Probe)
REPORT_DATE_FORMATTER = u"{:%Y-%m-%d %I:%M%p %Z}\n"
class Command(BaseCommand):
"""Show some information on invalid/bad urls"""
def handle(self, *args, **kwargs):
agency_list = Agency.objects.all()
self.stdout.write(u"# Invalid URL Report\n")
report_date = REPORT_DATE_FORMATTER.format(timezone.localtime(timezone.now()))
self.stdout.write(u"Report generated: {0}\n\n".format(report_date))
for agency in agency_list:
self.stdout.write('## Agency: {0}\n\n'.format(agency.name))
probe_list = Probe.objects.filter(audit__agency=agency, result__contains={'valid_url': 'false'})
if probe_list.count() == 0:
self.stdout.write('None!\n\n')
for probe in probe_list:
self.stdout.write('* {0}'.format(probe.result.get('initial_url', '???')))
if probe_list.count() > 0:
self.stdout.write('\n')
|
from __future__ import division
from django.core.management.base import BaseCommand
from django.utils import timezone
from thezombies.models import (Agency, Probe)
REPORT_DATE_FORMATTER = u"{:%Y-%m-%d %I:%M%p %Z}\n"
class Command(BaseCommand):
"""Show some information on invalid/bad urls"""
def handle(self, *args, **kwargs):
agency_list = Agency.objects.all()
self.stdout.write(u"# Invalid URL Report\n")
report_date = REPORT_DATE_FORMATTER.format(timezone.localtime(timezone.now()))
self.stdout.write(u"Report generated: {0}\n\n".format(report_date))
for agency in agency_list:
self.stdout.write('## Agency: {0}\n\n'.format(agency.name))
probe_list = Probe.objects.filter(audit__agency=agency, result__contains={'valid_url': 'false'})
if probe_list.count() == 0:
self.stdout.write('None!\n\n')
else:
self.stdout.write('URL Count: {0}\n\n'.format(probe_list.count()))
for probe in probe_list:
self.stdout.write('* {0}'.format(probe.result.get('initial_url', '???')))
if probe_list.count() > 0:
self.stdout.write('\n')
|
Add url count per agency to the invalid url report
|
Add url count per agency to the invalid url report
|
Python
|
bsd-3-clause
|
sunlightlabs/thezombies,sunlightlabs/thezombies,sunlightlabs/thezombies,sunlightlabs/thezombies
|
462b8e53c8a1add0f471f53d31718816939f1372
|
cineapp/utils.py
|
cineapp/utils.py
|
# -*- coding: utf-8 -*-
def frange(start, end, step):
tmp = start
while(tmp <= end):
yield tmp
tmp += step
|
# -*- coding: utf-8 -*-
from cineapp import db
from cineapp.models import Movie, Mark
from sqlalchemy.sql.expression import literal, desc
def frange(start, end, step):
tmp = start
while(tmp <= end):
yield tmp
tmp += step
def get_activity_list(start, length):
"""
Returns an array containing activity records ordered by descending date
Params are a range of records we want to have in the returned array
"""
# Object_items
object_dict={"count": 0, "list": []}
object_list=[]
# Movie Query
movies_query=db.session.query(Movie.id,literal("user_id").label("user_id"),Movie.added_when.label("entry_date"),literal("movies").label("entry_type"))
# Marks Query
marks_query=db.session.query(Mark.movie_id,Mark.user_id.label("user_id"),Mark.updated_when.label("entry_date"),literal("marks").label("entry_type")).filter(Mark.mark != None)
# Homework Query
homework_query=db.session.query(Mark.movie_id,Mark.user_id.label("user_id"),Mark.homework_when.label("entry_date"),literal("homeworks").label("entry_type")).filter(Mark.homework_when != None)
# Build the union request
activity_list = movies_query.union(marks_query,homework_query).order_by(desc("entry_date")).slice(int(start),int(start) + int(length))
for cur_item in activity_list:
if cur_item.entry_type == "movies":
object_list.append({"entry_type": "movies", "object" : Movie.query.get(cur_item.id)})
elif cur_item.entry_type == "marks":
object_list.append({"entry_type": "marks", "object" : Mark.query.get((cur_item.user_id,cur_item.id))})
elif cur_item.entry_type == "homeworks":
object_list.append({"entry_type" : "homeworks", "object" : Mark.query.get((cur_item.user_id,cur_item.id))})
# Count activity number (Will be used for the datatable pagination)
object_dict["count"]=movies_query.union(marks_query,homework_query).order_by(desc("entry_date")).count()
object_dict["list"]=object_list
# Return the filled object
return object_dict
|
Move activity SQL query into a dedicated function
|
Move activity SQL query into a dedicated function
The SQL query based on the UNION predicate is now into a function which takes as
parameters the number of record we want to have in the result. This function
will be used for the activity dashboard and also for the new global activity
page using the datatable plugin.
The subqueries have been also updated in order to exclude some records which
shouldn't be fetched :
- For the homework don't fetch records without an homework date
- For the mark, don't fetch record without a mark
|
Python
|
mit
|
ptitoliv/cineapp,ptitoliv/cineapp,ptitoliv/cineapp
|
2280624b54ec8f1ebae656336fab13d032f504ad
|
antevents/__init__.py
|
antevents/__init__.py
|
# Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""
This is the main package for antevents. Directly within this package you fill
find the following modules:
* `base` - the core abstractions and classes of the system.
* `sensor` - defines data types and functions specifically for sensor events.
The rest of the functionality is in sub-packages:
* `adapters` - components to read/write events outside the system
* `internal` - some internal definitions
* `linq` - filters that allow linq-style query pipelines over event streams
"""
__version__ = "1.0"
|
# Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""
This is the main package for antevents. Directly within this package you fill
find the following module:
* `base` - the core abstractions and classes of the system.
The rest of the functionality is in sub-packages:
* `adapters` - components to read/write events outside the system
* `internal` - some internal definitions
* `linq` - filters that allow linq-style query pipelines over event streams
* `sensors` - interfaces to sensors go here
"""
__version__ = "1.0"
|
Update doc string on location of sensors
|
Update doc string on location of sensors
|
Python
|
apache-2.0
|
mpi-sws-rse/thingflow-python,mpi-sws-rse/antevents-python,mpi-sws-rse/thingflow-python,mpi-sws-rse/antevents-python
|
d36053764e8a5776d3c37a7e35beb9ba5cb67386
|
dask/diagnostics/__init__.py
|
dask/diagnostics/__init__.py
|
from .profile import Profiler, ResourceProfiler
from .progress import ProgressBar
|
from .profile import Profiler, ResourceProfiler
from .progress import ProgressBar
try:
from .profile_visualize import visualize
except ImportError:
pass
|
Add visualize to diagnostics import
|
Add visualize to diagnostics import
|
Python
|
bsd-3-clause
|
PhE/dask,mraspaud/dask,vikhyat/dask,dask/dask,jcrist/dask,vikhyat/dask,mrocklin/dask,mrocklin/dask,cpcloud/dask,pombredanne/dask,ContinuumIO/dask,ContinuumIO/dask,dask/dask,gameduell/dask,blaze/dask,blaze/dask,jakirkham/dask,chrisbarber/dask,jcrist/dask,PhE/dask,mikegraham/dask,jakirkham/dask,cowlicks/dask,mraspaud/dask,pombredanne/dask
|
6c417f49ebd0f466ebf8100a28006e7c5ea2ff3d
|
tests/lib/__init__.py
|
tests/lib/__init__.py
|
from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
except Exception:
return False
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
|
from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
def callback(cursor):
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
def error():
return False
return sql(callback, error)
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(callback, error):
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
return callback(cursor)
except Exception:
return error()
|
Create somewhat questionable sql function
|
Create somewhat questionable sql function
|
Python
|
mit
|
matthewfranglen/postgres-elasticsearch-fdw
|
b69301c57076f86e99f738d5434dd75fd912753d
|
tests/test_preview.py
|
tests/test_preview.py
|
import pytest
from web_test_base import *
class TestIATIPreview(WebTestBase):
requests_to_load = {
'IATI Preview': {
'url': 'http://preview.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://www.iatistandard.org/" in result
|
import pytest
from web_test_base import *
class TestIATIPreview(WebTestBase):
requests_to_load = {
'IATI Preview': {
'url': 'http://preview.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://www.iatistandard.org/" in result
@pytest.mark.parametrize("target_request", ["IATI Preview"])
def test_xml_web_address_form_presence(self, target_request):
"""
Test that there is a form to enter a URL of a valid XML file on the Preview Tool.
"""
req = self.loaded_request_from_test_name(target_request)
form_xpath = '//*[@id="main"]/div/div/div[1]/div/form'
form_action_xpath = '//*[@id="main"]/div/div/div[1]/div/form/@action'
form_method_xpath = '//*[@id="main"]/div/div/div[1]/div/form/@method'
input_xpath = '//*[@id="url"]'
button_xpath = '//*[@id="main"]/div/div/div[1]/div/form/div/div/span/button'
forms = utility.locate_xpath_result(req, form_xpath)
form_action = utility.locate_xpath_result(req, form_action_xpath)
form_method = utility.locate_xpath_result(req, form_method_xpath)
form_inputs = utility.locate_xpath_result(req, input_xpath)
form_buttons = utility.locate_xpath_result(req, input_xpath)
assert len(forms) == 1
assert form_action == ['index.php']
assert form_method == ['get']
assert len(form_inputs) == 1
assert len(form_buttons) == 1
|
Add test for form on Preview Tool
|
Add test for form on Preview Tool
Test to see that the form to enter a URL to use the Preview Tool
exists, has the correct action and has the correct elements.
|
Python
|
mit
|
IATI/IATI-Website-Tests
|
1b0d153b0f08e0ca5b962b0b9d839f745a035c62
|
tests/test_stock.py
|
tests/test_stock.py
|
import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_price_of_a_new_stock_class_should_be_None(self):
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, goog.price)
if __name__ == "__main__":
unittest.main()
|
import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
if __name__ == "__main__":
unittest.main()
|
Add comment and clean up code.
|
Add comment and clean up code.
|
Python
|
mit
|
bsmukasa/stock_alerter
|
9138112f3abef61a96485ade7e0b484a43429b81
|
tests/unit/actions.py
|
tests/unit/actions.py
|
"""Unit tests for `pycall.actions`."""
class TestActions(TestCase):
"""Test all `pycall.actions` classes to ensure they are actual
`pycall.actions.Action` subclasses.
"""
pass
|
"""Unit tests for `pycall.actions`."""
|
Revert "Adding test case stub."
|
Revert "Adding test case stub."
This reverts commit 6c6b08a63b308690144d73f54b98000e3b1b5672.
|
Python
|
unlicense
|
rdegges/pycall
|
ec6099421bad222595be15f4f0b2596952d8c9cc
|
username_to_uuid.py
|
username_to_uuid.py
|
""" Username to UUID
Converts a Minecraft username to it's UUID equivalent.
Uses the official Mojang API to fetch player data.
"""
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
uuid = json_data['id']
return uuid
|
""" Username to UUID
Converts a Minecraft username to it's UUID equivalent.
Uses the official Mojang API to fetch player data.
"""
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
try:
uuid = json_data['id']
except KeyError as e:
print("KeyError raised:", e);
return uuid
|
Improve robustness: surround the 'id' fetch from result array with a try clause.
|
Improve robustness: surround the 'id' fetch from result array with a try clause.
|
Python
|
mit
|
mrlolethan/MinecraftUsernameToUUID
|
88bd75c4b0e039c208a1471d84006cdfb4bbaf93
|
starbowmodweb/site/templatetags/bbformat.py
|
starbowmodweb/site/templatetags/bbformat.py
|
"""
This module defines all of our bbcode capabilities.
To add a new bbcode tag do the following:
def bbcode_<tag_name>(tag_name, value, options, parent, context):
return formatted_html
bbcode_parser.add_formatter("<tag_name>", func_name, **tag_options)
For more information on the different argumnents and options available see the bbcode docs:
http://bbcode.readthedocs.org/en/latest/
"""
from django import template
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
def bbformat(value):
return bbcode_parser.format(value)
register = template.Library()
register.filter('bbformat', bbformat)
|
"""
This module defines all of our bbcode capabilities.
To add a new bbcode tag do the following:
def bbcode_<tag_name>(tag_name, value, options, parent, context):
return formatted_html
bbcode_parser.add_formatter("<tag_name>", func_name, **tag_options)
For more information on the different argumnents and options available see the bbcode docs:
http://bbcode.readthedocs.org/en/latest/
"""
from django import template
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
def bbcode_email(tag_name, value, options, parent, context):
return '<a href="mailto:{}">{}</a>'.format(value, value)
def bbcode_font(tag_name, value, options, parent, context):
return '<span style="font-family: {}">{}</span>'.format(options[tag_name], value)
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
bbcode_parser.add_formatter("email", bbcode_email)
bbcode_parser.add_formatter("font", bbcode_font)
def bbformat(value):
return bbcode_parser.format(value)
register = template.Library()
register.filter('bbformat', bbformat)
|
Add support for email and font bbcode tags.
|
Add support for email and font bbcode tags.
|
Python
|
mit
|
Starbow/StarbowWebSite,Starbow/StarbowWebSite,Starbow/StarbowWebSite
|
4594ed6599d98f1773a6e393c617c3230a1d8bec
|
django_evolution/__init__.py
|
django_evolution/__init__.py
|
"""Django Evolution version and package information.
These variables and functions can be used to identify the version of
Review Board. They're largely used for packaging purposes.
"""
from __future__ import unicode_literals
# The version of Django Evolution
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (2, 1, 0, 'alpha', 0, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
__version_info__ = VERSION[:-1]
__version__ = get_package_version()
|
"""Django Evolution version and package information.
These variables and functions can be used to identify the version of
Review Board. They're largely used for packaging purposes.
"""
from __future__ import unicode_literals
# The version of Django Evolution
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (2, 1, 0, 'alpha', 0, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
tag = VERSION[3]
if tag != 'final':
if tag == 'alpha':
tag = 'a'
elif tag == 'beta':
tag = 'b'
version += '%s%s' % (tag, VERSION[4])
return version
def is_release():
return VERSION[5]
__version_info__ = VERSION[:-1]
__version__ = get_package_version()
|
Remove a deprecation warning when computing the package version.
|
Remove a deprecation warning when computing the package version.
In pretty much all of our Python packages, we generate a package version
using legacy identifiers of "alpha" and "beta". These get turned into
"a" and "b" by `pkg_resources`, and a warning is thrown to inform us
that we're doing it wrong.
To reduce those warnings, this change converts the legacy naming to
modern naming when generating the package version.
Testing Done:
Built packages. Saw the correct identifier in the package file, without
seeing any warnings.
Reviewed at https://reviews.reviewboard.org/r/11264/
|
Python
|
bsd-3-clause
|
beanbaginc/django-evolution
|
14c872b3405326079ba01f9309622bb0188bf8ce
|
Install/toolbox/scripts/utils.py
|
Install/toolbox/scripts/utils.py
|
import sys
import collections
def parameters_from_args(defaults_tuple=None, sys_args):
"""Provided a set of tuples for default values, return a list of mapped
variables."""
defaults = collections.OrderedDict(defaults_tuple)
if defaults_tuple is not None:
args = len(sys_args) - 1
for i, key in enumerate(defaults.keys()):
idx = i + 1
if idx <= args:
defaults[key] = sys_args[idx]
return defaults
def msg(output_msg, mtype='message', exception=None):
if mtype == 'error':
arcpy_messages = arcpy.GetMessages()
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
if config.mode == 'script':
if exception:
# print the raw exception
print exception
# Arcpy and Python stuff, hopefully also helpful
err_msg = "ArcPy Error: {msg_text}\nPython Error: ${tbinfo}".format(
msg_text=arcpy_messages, tbinfo=tbinfo)
else:
arcpy.AddMessage(output_msg)
if exception:
arcpy.AddError(exception)
arcpy.AddError(arcpy_messages)
arcpy.AddMessage("Python Error: ${tbinfo}".format(tbinfo=tbinfo))
elif config.mode == 'script':
print output_msg
else:
if mtype == 'message':
arcpy.AddMessage(output_msg)
elif mtype == 'warning':
arcpy.AddWarning(output_msg)
|
# -*- coding: utf-8 -*-
import csv
import collections
import sys
import re
import os
import binascii
def parameters_from_args(defaults_tuple=None, sys_args):
"""Provided a set of tuples for default values, return a list of mapped
variables."""
defaults = collections.OrderedDict(defaults_tuple)
if defaults_tuple is not None:
args = len(sys_args) - 1
for i, key in enumerate(defaults.keys()):
idx = i + 1
if idx <= args:
defaults[key] = sys_args[idx]
return defaults
def msg(output_msg, mtype='message', exception=None):
if mtype == 'error':
arcpy_messages = arcpy.GetMessages()
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
if config.mode == 'script':
if exception:
# print the raw exception
print exception
# Arcpy and Python stuff, hopefully also helpful
err_msg = "ArcPy Error: {msg_text}\nPython Error: ${tbinfo}".format(
msg_text=arcpy_messages, tbinfo=tbinfo)
else:
arcpy.AddMessage(output_msg)
if exception:
arcpy.AddError(exception)
arcpy.AddError(arcpy_messages)
arcpy.AddMessage("Python Error: ${tbinfo}".format(tbinfo=tbinfo))
elif config.mode == 'script':
print output_msg
else:
if mtype == 'message':
arcpy.AddMessage(output_msg)
elif mtype == 'warning':
arcpy.AddWarning(output_msg)
|
Update includes; set encoding for file.
|
Update includes; set encoding for file.
|
Python
|
mpl-2.0
|
genegis/genegis,genegis/genegis,genegis/genegis
|
d4c9603e4c5913b02746af3dec21f682d906e001
|
nn/file/__init__.py
|
nn/file/__init__.py
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope, dtypes
from .util import batch_queue, add_queue_runner
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope()
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope()
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope()
def monitored_batch_queue(*tensors):
queue = batch_queue(dtypes(*tensors))
collections.add_metric(queue.size(), "batches_in_queue")
add_queue_runner(queue, [queue.enqueue(tensors)])
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope, dtypes
from .util import batch_queue, add_queue_runner
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope()
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope()
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope()
def monitored_batch_queue(*tensors, metric_name="batches_in_queue"):
queue = batch_queue(dtypes(*tensors))
collections.add_metric(queue.size(), metric_name)
add_queue_runner(queue, [queue.enqueue(tensors)])
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
|
Make metric_name option of monitored_batch_queue
|
Make metric_name option of monitored_batch_queue
|
Python
|
unlicense
|
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
|
02f8f992aca37d21b9ae119f13b46de8eb1541ae
|
gsl/utils.py
|
gsl/utils.py
|
#!/usr/bin/env python
import logging
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
def yield_packages(handle, meta=False, retcode=None):
for lineno, line in enumerate(handle):
if line.startswith('#'):
continue
try:
data = line.split('\t')
keys = ['id', 'version', 'platform', 'arch', 'url', 'sha', 'size',
'alt_url', 'comment']
if len(data) != len(keys):
log.error('[%s] data has wrong number of columns. %s != %s', lineno + 1, len(data), len(keys))
ld = {k: v for (k, v) in zip(keys, line.split('\t'))}
if meta:
yield ld, lineno, line, retcode
else:
yield ld
except Exception, e:
log.error(str(e))
|
#!/usr/bin/env python
import logging
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
def yield_packages(handle, meta=False, retcode=None):
for lineno, line in enumerate(handle):
if line.startswith('#'):
continue
try:
data = line.split('\t')
keys = ['id', 'version', 'platform', 'arch', 'url', 'sha', 'size',
'alt_url', 'comment']
if len(data) != len(keys):
log.error('[%s] data has wrong number of columns. %s != %s', lineno + 1, len(data), len(keys))
retcode = 1
ld = {k: v for (k, v) in zip(keys, line.split('\t'))}
if meta:
yield ld, lineno, line, retcode
else:
yield ld
except Exception, e:
log.error(str(e))
|
Return retcode properly iff erroring
|
Return retcode properly iff erroring
fixes #13
|
Python
|
mit
|
erasche/community-package-cache,erasche/community-package-cache,gregvonkuster/cargo-port,erasche/community-package-cache,galaxyproject/cargo-port,galaxyproject/cargo-port,gregvonkuster/cargo-port,gregvonkuster/cargo-port
|
508167ee3c289258857aee0963d4917c39201d9a
|
tailor/listeners/mainlistener.py
|
tailor/listeners/mainlistener.py
|
from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
className = ctx.getText()
if not isUpperCamelCase(className):
print('Line', str(ctx.start.line) + ':', 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def exitStructName(self, ctx):
pass
|
from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
|
Add method to handle UpperCamelCase verification
|
Add method to handle UpperCamelCase verification
|
Python
|
mit
|
sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor
|
a9abf8361f8728dfb1ef18a27c5eaad84ca2f054
|
accounting/apps/clients/forms.py
|
accounting/apps/clients/forms.py
|
from django.forms import ModelForm
from .models import Client
class ClientForm(ModelForm):
class Meta:
model = Client
fields = (
"name",
"address_line_1",
"address_line_2",
"city",
"postal_code",
"country",
)
|
from django.forms import ModelForm
from .models import Client
class ClientForm(ModelForm):
class Meta:
model = Client
fields = (
"name",
"address_line_1",
"address_line_2",
"city",
"postal_code",
"country",
"organization",
)
|
Add the relationship field to the Client form
|
Add the relationship field to the Client form
|
Python
|
mit
|
kenjhim/django-accounting,dulaccc/django-accounting,kenjhim/django-accounting,kenjhim/django-accounting,dulaccc/django-accounting,dulaccc/django-accounting,kenjhim/django-accounting,dulaccc/django-accounting
|
a485998447ffbe5a19ce8f9b49e61ac313c8241a
|
glitter_events/search_indexes.py
|
glitter_events/search_indexes.py
|
# -*- coding: utf-8 -*-
from haystack import indexes
from .models import Event
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Event
def index_queryset(self, using=None):
return self.get_model().objects.published().select_related()
|
# -*- coding: utf-8 -*-
import datetime
from django.conf import settings
from django.utils import timezone
from haystack import indexes
from .models import Event
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Event
def index_queryset(self, using=None):
if getattr(settings, 'GLITTER_EVENTS_SEARCH_INDEX_EXPIRED', None):
today = datetime.datetime.combine(date=datetime.date.today(), time=datetime.time.min)
today = timezone.make_aware(today)
qs = self.get_model().objects.filter(start__gte=today).select_related()
else:
qs = self.get_model().objects.published().select_related()
return qs
|
Add option to ignore expired events for the search index
|
Add option to ignore expired events for the search index
|
Python
|
bsd-3-clause
|
blancltd/django-glitter-events,blancltd/django-glitter-events
|
cccd6e8fe76fc96b39791912ecfd07f867d8dacc
|
cms/djangoapps/export_course_metadata/management/commands/export_course_metadata_for_all_courses.py
|
cms/djangoapps/export_course_metadata/management/commands/export_course_metadata_for_all_courses.py
|
"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata(None, course.id)
|
"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata_task.delay(str(course.id))
|
Call celery task directly from management command instead of calling the signal
|
Call celery task directly from management command instead of calling the signal
AA-461
|
Python
|
agpl-3.0
|
eduNEXT/edunext-platform,EDUlib/edx-platform,arbrandes/edx-platform,edx/edx-platform,angelapper/edx-platform,edx/edx-platform,eduNEXT/edx-platform,eduNEXT/edx-platform,EDUlib/edx-platform,eduNEXT/edunext-platform,eduNEXT/edunext-platform,angelapper/edx-platform,EDUlib/edx-platform,eduNEXT/edx-platform,angelapper/edx-platform,edx/edx-platform,angelapper/edx-platform,edx/edx-platform,eduNEXT/edunext-platform,arbrandes/edx-platform,EDUlib/edx-platform,eduNEXT/edx-platform,arbrandes/edx-platform,arbrandes/edx-platform
|
e04cec6c4260a181c773371406323758d9f162bf
|
examples/adaptive_scan_demo.py
|
examples/adaptive_scan_demo.py
|
import matplotlib.pyplot as plt
from bluesky import RunEngine, Mover, SynGauss
from bluesky.examples import adaptive_scan
RE = RunEngine()
RE.verbose = False
motor = Mover('motor', ['pos'])
det = SynGauss('det', motor, 'pos', center=0, Imax=1, sigma=1)
def live_scalar_plotter(ax, y, x):
x_data, y_data = [], []
line, = ax.plot([], [], 'ro', markersize=10)
def update_plot(doc):
# Update with the latest data.
x_data.append(doc['data'][x])
y_data.append(doc['data'][y])
line.set_data(x_data, y_data)
# Rescale and redraw.
ax.relim(visible_only=True)
ax.autoscale_view(tight=True)
ax.figure.canvas.draw()
ax.figure.canvas.flush_events()
return update_plot
fig, ax = plt.subplots()
plt.show()
ax.set_xlim([-15, 5])
ax.set_ylim([0, 2])
# Point the function to our axes above, and specify what to plot.
my_plotter = live_scalar_plotter(ax, 'det', 'pos')
ad_scan = adaptive_scan(motor, det, 'pos', 'det', -15, 5, .01, 1, .05)
RE.run(ad_scan, subscriptions={'event': my_plotter})
|
import matplotlib.pyplot as plt
from bluesky import RunEngine
from bluesky.scans import AdaptiveAscan
from bluesky.examples import Mover, SynGauss
from bluesky.callbacks import LivePlot, LiveTable
from bluesky.tests.utils import setup_test_run_engine
#plt.ion()
RE = setup_test_run_engine()
motor = Mover('motor', ['pos'])
det = SynGauss('det', motor, 'pos', center=0, Imax=1, sigma=1)
#fig, ax = plt.subplots()
#ax.set_xlim([-15, 5])
#ax.set_ylim([0, 2])
# Point the function to our axes above, and specify what to plot.
#my_plotter = LivePlot('det', 'pos')
table = LiveTable(['det', 'pos'])
ad_scan = AdaptiveAscan(motor, [det], 'det', -15, 5, .01, 1, .05, True)
RE(ad_scan, subscriptions={'all': [table]}) #, my_plotter})
|
Fix adaptive example with LivePlot
|
WIP: Fix adaptive example with LivePlot
|
Python
|
bsd-3-clause
|
dchabot/bluesky,sameera2004/bluesky,ericdill/bluesky,klauer/bluesky,ericdill/bluesky,sameera2004/bluesky,dchabot/bluesky,klauer/bluesky
|
c3ed431f97e4ca24a00ff979a5204d65b251dd87
|
greenlight/views/__init__.py
|
greenlight/views/__init__.py
|
from .base import APIView
from django.http import Http404
from three import Three
class QCThree(Three):
def __init__(self):
self.endpoint = "http://dev-api.ville.quebec.qc.ca/open311/v2/"
self.format = "json"
self.jurisdiction = "ville.quebec.qc.ca"
QC_three = QCThree()
class ServicesView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.services())
class RequestsView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.requests())
class RequestView(APIView):
def get(self, request, id):
requests = QC_three.request(id)
if requests:
return self.OkAPIResponse(requests[0])
else:
raise Http404
|
from three import Three
from django.http import Http404
from .base import APIView
QC_three = Three(
endpoint = "http://dev-api.ville.quebec.qc.ca/open311/v2/",
format = "json",
jurisdiction = "ville.quebec.qc.ca",
)
class ServicesView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.services())
class RequestsView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.requests())
class RequestView(APIView):
def get(self, request, id):
requests = QC_three.request(id)
if requests:
return self.OkAPIResponse(requests[0])
else:
raise Http404
|
Initialize the three API wrapper differently to fix a bug.
|
Initialize the three API wrapper differently to fix a bug.
|
Python
|
mit
|
ironweb/lesfeuxverts-backend
|
9c11fa9d0a26d1e4caa47d2b3f0f1bf92cf8e965
|
examples/enable/gadgets/vu_demo.py
|
examples/enable/gadgets/vu_demo.py
|
from traits.api import HasTraits, Instance
from traitsui.api import View, UItem, Item, RangeEditor, Group, VGroup, HGroup
from enable.api import ComponentEditor
from enable.gadgets.vu_meter import VUMeter
class Demo(HasTraits):
vu = Instance(VUMeter)
traits_view = \
View(
HGroup(
VGroup(
VGroup(
Group(
UItem('vu', editor=ComponentEditor(size=(60, 60)),
style='custom'),
),
Item('object.vu.percent',
editor=RangeEditor(low=0.0, high=200.0,
mode='slider')),
),
'_',
VGroup(
Item('object.vu.angle', label="angle",
editor=RangeEditor(low=0.0, high=89.0,
mode='slider')),
Item('object.vu._beta',
editor=RangeEditor(low=0.0, high=1.0,
mode='slider')),
),
),
),
width=450,
height=380,
title="VU Meter",
resizable=True,
)
if __name__ == "__main__":
color = (0.9, 0.85, 0.7)
vu = VUMeter(border_visible=True, border_width=2, bgcolor=color)
demo = Demo(vu=vu)
demo.configure_traits()
|
from traits.api import HasTraits, Instance
from traitsui.api import View, UItem, Item, RangeEditor, Group, VGroup
from enable.api import ComponentEditor
from enable.gadgets.vu_meter import VUMeter
class Demo(HasTraits):
vu = Instance(VUMeter)
traits_view = \
View(
VGroup(
Group(
UItem('vu', editor=ComponentEditor(size=(60, 60)),
style='custom'),
),
Item('object.vu.percent',
editor=RangeEditor(low=0.0, high=200.0,
mode='slider')),
),
'_',
VGroup(
Item('object.vu.angle', label="angle",
editor=RangeEditor(low=0.0, high=89.0,
mode='slider')),
Item('object.vu._beta',
editor=RangeEditor(low=0.0, high=1.0,
mode='slider')),
),
width=450,
height=380,
title="VU Meter",
resizable=True,
)
if __name__ == "__main__":
color = (0.9, 0.85, 0.7)
vu = VUMeter(border_visible=True, border_width=2, bgcolor=color)
demo = Demo(vu=vu)
demo.configure_traits()
|
Remove extraneous Groups from the View in the VU Meter demo.
|
Remove extraneous Groups from the View in the VU Meter demo.
|
Python
|
bsd-3-clause
|
tommy-u/enable,tommy-u/enable,tommy-u/enable,tommy-u/enable
|
8e8986a17b7fa38417fe39ec8fbf4e1d3ee43f64
|
arduino_flasher/reset_arduino.py
|
arduino_flasher/reset_arduino.py
|
#!/usr/local/bin/python
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
|
#!/usr/local/bin/python
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(1)
time.sleep(0.2)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
|
Reset script first pulls pin high
|
Reset script first pulls pin high
|
Python
|
bsd-3-clause
|
Pavlos1/SensoringJMSS,Pavlos1/SensoringJMSS,Pavlos1/SensoringJMSS,Pavlos1/SensoringJMSS
|
1371f1a1b2914a7f2e328f69bdc599c1eada54db
|
Python-practice/fy_print_seq_len_in_fasta.py
|
Python-practice/fy_print_seq_len_in_fasta.py
|
#!/usr/bin/env python
'''
Script: fy_print_seq_len_in_fasta.py
Function: Print sequence length to STDOUT in fasta file
Note: Python3 is not default installed for most computer,
and the extra-installed module like Biopython could
not be directly used by python3. So, it's not the
righ time to use Python3 now.
Date: 2014/11/11
'''
import sys
if len(sys.argv) < 2:
print('Usage: ' + sys.argv[0] + ' <FASTA>')
sys.exit()
from Bio import SeqIO
for record in SeqIO.parse(sys.argv[1], 'fasta'):
print("%s %i" % (record.id, len(record)))
|
#!/usr/bin/env python
'''
Script: fy_print_seq_len_in_fasta.py
Function: Print sequence length to STDOUT in fasta file
Note: Python3 is not default installed for most computer,
and the extra-installed module like Biopython could
not be directly used by python3. So, it's not the
righ time to use Python3 now.
Date: 2014/11/11
'''
import sys
if len(sys.argv) < 2:
print('Usage: ' + sys.argv[0] + ' <FASTA>')
sys.exit()
from Bio import SeqIO
seqlen = []
num_of_seq = 0
total_len = 0
for record in SeqIO.parse(sys.argv[1], 'fasta'):
print("%s %i" % (record.id, len(record)))
num_of_seq += 1
total_len += len(record)
seqlen.append(len(record))
seqlen.sort()
min_len = seqlen[0]
max_len = seqlen[-1]
print("Number of sequences: " + str(num_of_seq))
print("Total length: " + str(total_len))
print("Max length: " + str(max_len))
print("Min length: " + str(min_len))
|
Add the statistics information of sequence length
|
Add the statistics information of sequence length
Include number of sequences, total length, maximum length, and minimum
length
|
Python
|
bsd-2-clause
|
lileiting/gfat
|
24c6e2852e319ec0d0f4e8d0539bc69a9915c3e7
|
tests/server/test_server.py
|
tests/server/test_server.py
|
import logging
import mock
import oauth2u
import oauth2u.server.log
def test_should_have_optional_port():
server = oauth2u.Server()
assert 8000 == server.port
def test_should_accept_custom_port():
server = oauth2u.Server(8888)
assert 8888 == server.port
def test_should_configure_log_with_default_configurations(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server()
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with()
def test_should_override_default_log_parameters(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server(log_config={'format': '%(message)s'})
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with(format='%(message)s')
|
import logging
import mock
import oauth2u
import oauth2u.server.log
def teardown_function(func):
logging.disable(logging.INFO)
def test_should_have_optional_port():
server = oauth2u.Server()
assert 8000 == server.port
def test_should_accept_custom_port():
server = oauth2u.Server(8888)
assert 8888 == server.port
def test_should_configure_log_with_default_configurations(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server()
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with()
def test_should_override_default_log_parameters(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server(log_config={'format': '%(message)s'})
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with(format='%(message)s')
|
Disable logging on test runner process
|
Disable logging on test runner process
|
Python
|
mit
|
globocom/oauth2u,globocom/oauth2u
|
1e29540ee08ca8faaed5e3a8ab1ac9def290155b
|
fileconversions/file_converter.py
|
fileconversions/file_converter.py
|
from . import conversions
from .file_formats import FileFormats
class FileConverter(object):
def get_conversion(self, source_format, target_format):
return {
'application/pdf': conversions.NoOp,
'image/jpeg': conversions.JpegToPdf,
'image/png': conversions.PngToPdf,
'image/gif': conversions.GifToPdf,
'image/tiff': conversions.TiffToPdf,
'text/plain': conversions.TextToPdf,
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': conversions.DocxToPdf,
'application/msword': conversions.DocToPdf,
'application/vnd.openxmlformats-officedocument.presentationml.presentation': conversions.PptxToPdf,
'application/vnd.ms-powerpoint': conversions.PptToPdf,
'application/vnd.oasis.opendocument.text': conversions.OdtToPdf,
'application/rtf': conversions.RtfToPdf,
}[source_format]()
|
from . import conversions
from .file_formats import FileFormats
class FileConverter(object):
def get_conversion(self, source_format, target_format):
return {
FileFormats.PDF: conversions.NoOp,
FileFormats.JPEG: conversions.JpegToPdf,
FileFormats.PNG: conversions.PngToPdf,
FileFormats.GIF: conversions.GifToPdf,
FileFormats.TIFF: conversions.TiffToPdf,
FileFormats.TXT: conversions.TextToPdf,
FileFormats.DOCX: conversions.DocxToPdf,
FileFormats.DOC: conversions.DocToPdf,
FileFormats.PPTX: conversions.PptxToPdf,
FileFormats.PPT: conversions.PptToPdf,
FileFormats.ODT: conversions.OdtToPdf,
FileFormats.RTF: conversions.RtfToPdf,
}[source_format]()
|
Fix how we find conversion to use file formats
|
Fix how we find conversion to use file formats
|
Python
|
mit
|
wilbertom/fileconversions
|
5aad2212340a5eba4bbf4615d58ed6b3c205bc7f
|
fabtools/tests/fabfiles/python.py
|
fabtools/tests/fabfiles/python.py
|
from __future__ import with_statement
from fabric.api import *
from fabtools import require
import fabtools
@task
def python():
"""
Check Python package installation
"""
require.python.virtualenv('/tmp/venv')
assert fabtools.files.is_dir('/tmp/venv')
assert fabtools.files.is_file('/tmp/venv/bin/python')
with fabtools.python.virtualenv('/tmp/venv'):
require.python.package('fabric')
assert fabtools.files.is_file('/tmp/venv/bin/fab')
|
from __future__ import with_statement
from fabric.api import task
@task
def python_virtualenv():
"""
Test Python virtualenv creation
"""
from fabtools import require
import fabtools
require.python.virtualenv('/tmp/venv')
assert fabtools.files.is_dir('/tmp/venv')
assert fabtools.files.is_file('/tmp/venv/bin/python')
@task
def python_package():
"""
Test Python package installation
"""
from fabtools import require
import fabtools
require.python.virtualenv('/tmp/venv')
with fabtools.python.virtualenv('/tmp/venv'):
require.python.package('fabric', download_cache='/var/cache/pip')
assert fabtools.files.is_file('/tmp/venv/bin/fab')
|
Speed up Python tests by caching pip downloads
|
Speed up Python tests by caching pip downloads
|
Python
|
bsd-2-clause
|
prologic/fabtools,ahnjungho/fabtools,ronnix/fabtools,davidcaste/fabtools,pombredanne/fabtools,pahaz/fabtools,badele/fabtools,AMOSoft/fabtools,wagigi/fabtools-python,fabtools/fabtools,hagai26/fabtools,n0n0x/fabtools-python,sociateru/fabtools,bitmonk/fabtools
|
c27a1fc4c0251b896667e21a0a88fb44a403242f
|
cistern/migrations.py
|
cistern/migrations.py
|
import os
from playhouse.migrate import *
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=None)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
|
import datetime
import os
from playhouse.migrate import *
def update():
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=datetime.datetime.now)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
|
Move migration to a function
|
Move migration to a function
|
Python
|
mit
|
archangelic/cistern
|
5e6d52277e34c254bad6b386cf05f490baf6a6f2
|
webapp-django/accounts/models.py
|
webapp-django/accounts/models.py
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solvedChallenges=models.CharField(solved=[],max_length=256)
solvedQuestions=models.CharField(solved=[],max_length=256)
score = models.IntegerField(default=0)
def __str__(self):
return str(self.user.username)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from challenges.models import Challenge
from questionnaire.models import Question
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solved_challenges = models.ManyToManyField(Challenge)
solved_questions = models.ManyToManyField(Question)
score = models.IntegerField(default=0, editable=False)
def __str__(self):
return str(self.user.username)
def calculate_score(self):
score = 0
for chal in self.solved_challenges.all():
score = score + chal.score
for ques in self.solved_questions.all():
score = score + ques.score
return score
def save(self, *args, **kwargs):
'''On save, update score '''
self.score = self.calculate_score()
return super(UserProfile, self).save(*args, **kwargs)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
|
Update accounts model with scoring system
|
Update accounts model with scoring system
|
Python
|
mit
|
super1337/Super1337-CTF,super1337/Super1337-CTF,super1337/Super1337-CTF
|
39e03951ec882f4dbff1ef4c42a71339d2a5d4fa
|
gaphor/UML/tests/test_activity.py
|
gaphor/UML/tests/test_activity.py
|
import pytest
from gaphor import UML
from gaphor.ui.diagrampage import tooliter
from gaphor.UML.toolbox import uml_toolbox_actions
@pytest.fixture
def action_factory():
return next(
t for t in tooliter(uml_toolbox_actions) if t.id == "toolbox-action"
).item_factory
def test_create_action_should_create_an_activity(diagram, action_factory):
action = action_factory(diagram)
assert action.subject.activity
assert action.subject.owner is action.subject.activity
def test_create_action_should_add_to_existing_activity(
diagram, action_factory, element_factory
):
activity = element_factory.create(UML.Activity)
action = action_factory(diagram)
assert action.subject.activity is activity
def test_create_action_should_add_to_existing_activity_in_package(
diagram, action_factory, element_factory
):
package = element_factory.create(UML.Package)
diagram.package = package
activity = element_factory.create(UML.Activity)
activity.package = package
action = action_factory(diagram)
assert action.subject.activity is activity
|
import pytest
from gaphor import UML
from gaphor.ui.diagrampage import tooliter
from gaphor.UML.toolbox import uml_toolbox_actions
activity_node_names = [
"action",
"initial-node",
"activity-final-node",
"flow-final-node",
"decision-node",
"fork-node",
"object-node",
"send-signal-action",
"accept-event-action",
]
@pytest.fixture
def item_factory(request):
return next(
t for t in tooliter(uml_toolbox_actions) if t.id == f"toolbox-{request.param}"
).item_factory
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_create_an_activity(diagram, item_factory):
action = item_factory(diagram)
assert action.subject.activity
assert action.subject.owner is action.subject.activity
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_add_to_existing_activity(
diagram, item_factory, element_factory
):
activity = element_factory.create(UML.Activity)
action = item_factory(diagram)
assert action.subject.activity is activity
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_add_to_existing_activity_in_package(
diagram, item_factory, element_factory
):
package = element_factory.create(UML.Package)
diagram.package = package
activity = element_factory.create(UML.Activity)
activity.package = package
action = item_factory(diagram)
assert action.subject.activity is activity
|
Test all activity nodes for namespacing
|
Test all activity nodes for namespacing
|
Python
|
lgpl-2.1
|
amolenaar/gaphor,amolenaar/gaphor
|
862301e319be09d3c163c8248f18ed23c3b1fab5
|
mla_game/apps/transcript/urls.py
|
mla_game/apps/transcript/urls.py
|
from django.conf.urls import url
urlpatterns = [
url(r'^upload-batch/', 'mla_game.apps.transcript.views.upload_batch', name='upload-batch'),
]
|
from django.conf.urls import url
urlpatterns = [
]
|
Remove unused URL, will revisit later
|
Remove unused URL, will revisit later
|
Python
|
mit
|
WGBH/FixIt,WGBH/FixIt,WGBH/FixIt
|
2a843e46fabf616517847a304170fbce75afd167
|
zeus/api/resources/auth_index.py
|
zeus/api/resources/auth_index.py
|
import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get("/users/me")
except ApiError as exc:
if exc.code == 401:
return {"isAuthenticated": False}
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(Identity.user_id == user["id"]))
email_list = list(Email.query.filter(Email.user_id == user["id"]))
return {
"isAuthenticated": True,
"user": user,
"emails": emails_schema.dump(email_list).data,
"identities": identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {"isAuthenticated": False, "user": None}
|
import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get("/users/me")
except ApiError as exc:
if exc.code == 401:
return {"isAuthenticated": False}
raise
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(Identity.user_id == user["id"]))
email_list = list(Email.query.filter(Email.user_id == user["id"]))
return {
"isAuthenticated": True,
"user": user,
"emails": emails_schema.dump(email_list).data,
"identities": identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {"isAuthenticated": False, "user": None}
|
Raise non-auth errors from GitHub
|
fix: Raise non-auth errors from GitHub
|
Python
|
apache-2.0
|
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
|
9174de810bc4be3376521eecdb82a84486591e73
|
oslo_utils/_i18n.py
|
oslo_utils/_i18n.py
|
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html .
"""
from oslo import i18n
_translators = i18n.TranslatorFactory(domain='oslo.utils')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
|
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html .
"""
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='oslo.utils')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
|
Update Oslo imports to remove namespace package
|
Update Oslo imports to remove namespace package
Change-Id: I4ec9b2a310471e4e07867073e9577731ac34027d
Blueprint: drop-namespace-packages
|
Python
|
apache-2.0
|
magic0704/oslo.utils,varunarya10/oslo.utils,openstack/oslo.utils
|
6a15b33d69d8d66643bb8886f9916fa28ecaedea
|
molo/yourwords/templatetags/competition_tag.py
|
molo/yourwords/templatetags/competition_tag.py
|
from django import template
from copy import copy
from molo.yourwords.models import (YourWordsCompetition, ThankYou,
YourWordsCompetitionIndexPage)
from molo.core.templatetags.core_tags import get_pages
register = template.Library()
@register.inclusion_tag(
'yourwords/your_words_competition_tag.html',
takes_context=True
)
def your_words_competition(context):
context = copy(context)
locale_code = context.get('locale_code')
page = YourWordsCompetitionIndexPage.objects.live().all().first()
if page:
competitions = (
YourWordsCompetition.objects.child_of(page).filter(
languages__language__is_main_language=True).specific())
else:
competitions = []
context.update({
'competitions': get_pages(context, competitions, locale_code)
})
return context
@register.assignment_tag(takes_context=True)
def load_thank_you_page_for_competition(context, competition):
page = competition.get_main_language_page()
locale = context.get('locale_code')
qs = ThankYou.objects.child_of(page).filter(
languages__language__is_main_language=True)
if not locale:
return qs
if qs:
return get_pages(context, qs, locale)
else:
return []
|
from django import template
from copy import copy
from molo.yourwords.models import (YourWordsCompetition, ThankYou,
YourWordsCompetitionIndexPage)
from molo.core.templatetags.core_tags import get_pages
register = template.Library()
@register.inclusion_tag(
'yourwords/your_words_competition_tag.html',
takes_context=True
)
def your_words_competition(context):
context = copy(context)
locale_code = context.get('locale_code')
page = YourWordsCompetitionIndexPage.objects.live().all().first()
if page:
competitions = (
YourWordsCompetition.objects.child_of(page).filter(
languages__language__is_main_language=True).specific())
else:
competitions = YourWordsCompetition.objects.none()
context.update({
'competitions': get_pages(context, competitions, locale_code)
})
return context
@register.assignment_tag(takes_context=True)
def load_thank_you_page_for_competition(context, competition):
page = competition.get_main_language_page()
locale = context.get('locale_code')
qs = ThankYou.objects.child_of(page).filter(
languages__language__is_main_language=True)
if not locale:
return qs
if qs:
return get_pages(context, qs, locale)
else:
return []
|
Return None if there is no competition
|
Return None if there is no competition
|
Python
|
bsd-2-clause
|
praekelt/molo.yourwords,praekelt/molo.yourwords
|
8c07012e423d592a4638d6dac58ca5e67d9dd5a6
|
apps/cowry/views.py
|
apps/cowry/views.py
|
from rest_framework import generics
from .permissions import IsOrderCreator
from .serializers import PaymentSerializer
from .models import Payment
class PaymentDetail(generics.RetrieveUpdateAPIView):
"""
View for working with Payments. Payments can be retrieved and the payment method and submethod can updated.
"""
model = Payment
serializer_class = PaymentSerializer
permission_classes = (IsOrderCreator,)
|
from rest_framework import generics
from rest_framework import response
from rest_framework import status
from . import payments
from .exceptions import PaymentException
from .models import Payment
from .permissions import IsOrderCreator
from .serializers import PaymentSerializer
class PaymentDetail(generics.RetrieveUpdateDestroyAPIView):
"""
View for working with Payments. Payments can be retrieved (GET), the payment method and submethod can updated (PUT)
and a payment can be cancelled (DELETE).
"""
model = Payment
serializer_class = PaymentSerializer
permission_classes = (IsOrderCreator,)
def destroy(self, request, *args, **kwargs):
payment = self.get_object()
try:
payments.cancel_payment(payment)
except (NotImplementedError, PaymentException) as e:
return response.Response(data=e, status=status.HTTP_400_BAD_REQUEST)
else:
return response.Response(status=status.HTTP_202_ACCEPTED)
|
Add payment cancel (delete) to Payment REST API.
|
Add payment cancel (delete) to Payment REST API.
|
Python
|
bsd-3-clause
|
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
|
b434c8dd697b4aa5c2d6daa345c0d9de27e7c05a
|
apps/survey/urls.py
|
apps/survey/urls.py
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
url(r'^intake/view/$', views.survey_intake_view, name='survey_intake_view'),
url(r'^intake/update/$', views.survey_intake_update, name='survey_intake_update'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_monthly ,name='survey_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_monthly_update ,name='survey_monthly_update'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
#url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
#url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'),
url(r'^intake/$', views.survey_data, name='survey_data'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_update_monthly ,name='survey_update_monthly'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
Add new decorator for suvery_data
|
Add new decorator for suvery_data
|
Python
|
agpl-3.0
|
chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork
|
1f6c1a4f596222b424d9f51ca30f5eb4d80f9942
|
mopidy_alsamixer/__init__.py
|
mopidy_alsamixer/__init__.py
|
import os
from mopidy import config, ext
__version__ = "1.1.1"
class Extension(ext.Extension):
dist_name = "Mopidy-ALSAMixer"
ext_name = "alsamixer"
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), "ext.conf")
return config.read(conf_file)
def get_config_schema(self):
schema = super().get_config_schema()
schema["card"] = config.Integer(minimum=0)
schema["control"] = config.String()
schema["min_volume"] = config.Integer(minimum=0, maximum=100)
schema["max_volume"] = config.Integer(minimum=0, maximum=100)
schema["volume_scale"] = config.String(
choices=("linear", "cubic", "log")
)
return schema
def setup(self, registry):
from mopidy_alsamixer.mixer import AlsaMixer
registry.add("mixer", AlsaMixer)
|
import pathlib
from mopidy import config, ext
__version__ = "1.1.1"
class Extension(ext.Extension):
dist_name = "Mopidy-ALSAMixer"
ext_name = "alsamixer"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["card"] = config.Integer(minimum=0)
schema["control"] = config.String()
schema["min_volume"] = config.Integer(minimum=0, maximum=100)
schema["max_volume"] = config.Integer(minimum=0, maximum=100)
schema["volume_scale"] = config.String(
choices=("linear", "cubic", "log")
)
return schema
def setup(self, registry):
from mopidy_alsamixer.mixer import AlsaMixer
registry.add("mixer", AlsaMixer)
|
Use pathlib to read ext.conf
|
Use pathlib to read ext.conf
|
Python
|
apache-2.0
|
mopidy/mopidy-alsamixer
|
641b2d07a4250a779ad6ff31f579968f69362cc0
|
numscons/numdist/__init__.py
|
numscons/numdist/__init__.py
|
from numdist_copy import default_lib_dirs, default_include_dirs, \
default_src_dirs, get_standard_file
from numdist_copy import msvc_runtime_library
from conv_template import process_file as process_c_file
from from_template import process_file as process_f_file
|
from numdist_copy import default_lib_dirs, default_include_dirs, \
default_src_dirs, get_standard_file
from numdist_copy import msvc_runtime_library
from conv_template import process_file as process_c_file, process_str as process_c_str
from from_template import process_file as process_f_file
|
Add process_c_str function to the numdist API
|
Add process_c_str function to the numdist API
|
Python
|
bsd-3-clause
|
cournape/numscons,cournape/numscons,cournape/numscons
|
d792679461357fa17b1f852d7a72921aed2fe271
|
bermann/rdd_test.py
|
bermann/rdd_test.py
|
import unittest
from bermann import RDD
class TestRDD(unittest.TestCase):
def test_cache_is_noop(self):
rdd = RDD([1, 2, 3])
cached = rdd.cache()
self.assertEqual(rdd, cached)
# collect
# count
# countByKey
# conntByValue
# distinct
# filter
# first
# flatMap
# flatMapValues
# foreach
# groupBy
# groupByKey
# isEmpty
# keyBy
# keys
# map
# mapValues
# max
# min
# name
if __name__ == '__main__':
unittest.main()
|
import unittest
from bermann import RDD
class TestRDD(unittest.TestCase):
def test_cache_is_noop(self):
rdd = RDD([1, 2, 3])
cached = rdd.cache()
self.assertEqual(rdd, cached)
def test_collect_empty_rdd_returns_empty_list(self):
rdd = RDD()
self.assertEqual([], rdd.collect())
def test_collect_non_empty_rdd_returns_contents(self):
rdd = RDD([1, 2, 3])
self.assertEqual(rdd.contents, rdd.collect())
def test_count_empty_rdd_returns_zero(self):
rdd = RDD()
self.assertEqual(0, rdd.count())
def test_collect_non_empty_rdd_returns_length(self):
rdd = RDD([1, 2, 3])
self.assertEqual(3, rdd.count())
# countByKey
# conntByValue
# distinct
# filter
# first
# flatMap
# flatMapValues
# foreach
# groupBy
# groupByKey
# isEmpty
# keyBy
# keys
# map
# mapValues
# max
# min
# name
if __name__ == '__main__':
unittest.main()
|
Add tests for count and collect
|
Add tests for count and collect
|
Python
|
mit
|
oli-hall/bermann
|
bcc79588e5e49c928210d6830fbe1a7386fcf5bb
|
apps/search/tasks.py
|
apps/search/tasks.py
|
import logging
from django.conf import settings
from django.db.models.signals import pre_delete
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
|
import logging
import warnings
from django.conf import settings
from django.db.models.signals import pre_delete
# ignore a deprecation warning from elasticutils until the fix is released
# refs https://github.com/mozilla/elasticutils/pull/160
warnings.filterwarnings("ignore",
category=DeprecationWarning,
module='celery.decorators')
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
|
Stop a deprecation warning that is thrown in elasticutils.
|
Stop a deprecation warning that is thrown in elasticutils.
This is not going to be needed once https://github.com/mozilla/elasticutils/pull/160
has been released.
|
Python
|
mpl-2.0
|
jezdez/kuma,whip112/Whip112,FrankBian/kuma,YOTOV-LIMITED/kuma,SphinxKnight/kuma,jgmize/kuma,RanadeepPolavarapu/kuma,ollie314/kuma,nhenezi/kuma,FrankBian/kuma,surajssd/kuma,YOTOV-LIMITED/kuma,yfdyh000/kuma,cindyyu/kuma,SphinxKnight/kuma,openjck/kuma,MenZil/kuma,RanadeepPolavarapu/kuma,yfdyh000/kuma,whip112/Whip112,carnell69/kuma,YOTOV-LIMITED/kuma,ollie314/kuma,RanadeepPolavarapu/kuma,SphinxKnight/kuma,surajssd/kuma,safwanrahman/kuma,utkbansal/kuma,groovecoder/kuma,chirilo/kuma,openjck/kuma,cindyyu/kuma,ronakkhunt/kuma,safwanrahman/kuma,robhudson/kuma,Elchi3/kuma,Elchi3/kuma,biswajitsahu/kuma,robhudson/kuma,yfdyh000/kuma,robhudson/kuma,biswajitsahu/kuma,hoosteeno/kuma,chirilo/kuma,groovecoder/kuma,scrollback/kuma,ronakkhunt/kuma,Elchi3/kuma,davehunt/kuma,ollie314/kuma,jwhitlock/kuma,tximikel/kuma,Elchi3/kuma,carnell69/kuma,hoosteeno/kuma,utkbansal/kuma,davehunt/kuma,anaran/kuma,mastizada/kuma,carnell69/kuma,bluemini/kuma,jwhitlock/kuma,SphinxKnight/kuma,scrollback/kuma,jgmize/kuma,chirilo/kuma,cindyyu/kuma,biswajitsahu/kuma,mozilla/kuma,a2sheppy/kuma,a2sheppy/kuma,nhenezi/kuma,MenZil/kuma,ollie314/kuma,tximikel/kuma,davidyezsetz/kuma,a2sheppy/kuma,surajssd/kuma,davehunt/kuma,yfdyh000/kuma,biswajitsahu/kuma,darkwing/kuma,RanadeepPolavarapu/kuma,tximikel/kuma,jezdez/kuma,bluemini/kuma,whip112/Whip112,surajssd/kuma,nhenezi/kuma,mozilla/kuma,openjck/kuma,nhenezi/kuma,davidyezsetz/kuma,darkwing/kuma,carnell69/kuma,scrollback/kuma,MenZil/kuma,MenZil/kuma,jgmize/kuma,varunkamra/kuma,darkwing/kuma,hoosteeno/kuma,cindyyu/kuma,groovecoder/kuma,YOTOV-LIMITED/kuma,darkwing/kuma,openjck/kuma,groovecoder/kuma,robhudson/kuma,openjck/kuma,ollie314/kuma,utkbansal/kuma,davehunt/kuma,escattone/kuma,groovecoder/kuma,bluemini/kuma,ronakkhunt/kuma,ollie314/kuma,jgmize/kuma,surajssd/kuma,a2sheppy/kuma,hoosteeno/kuma,jezdez/kuma,YOTOV-LIMITED/kuma,jwhitlock/kuma,utkbansal/kuma,a2sheppy/kuma,cindyyu/kuma,varunkamra/kuma,jwhitlock/kuma,jezdez/kuma,varunkamra/kuma,carnell69/kuma,carnell69/kuma,mozilla/kuma,biswajitsahu/kuma,anaran/kuma,yfdyh000/kuma,YOTOV-LIMITED/kuma,escattone/kuma,scrollback/kuma,varunkamra/kuma,utkbansal/kuma,RanadeepPolavarapu/kuma,MenZil/kuma,SphinxKnight/kuma,nhenezi/kuma,davehunt/kuma,whip112/Whip112,hoosteeno/kuma,chirilo/kuma,biswajitsahu/kuma,mastizada/kuma,safwanrahman/kuma,davidyezsetz/kuma,anaran/kuma,Elchi3/kuma,bluemini/kuma,whip112/Whip112,FrankBian/kuma,utkbansal/kuma,varunkamra/kuma,safwanrahman/kuma,ronakkhunt/kuma,tximikel/kuma,ronakkhunt/kuma,davehunt/kuma,tximikel/kuma,anaran/kuma,chirilo/kuma,darkwing/kuma,openjck/kuma,FrankBian/kuma,mastizada/kuma,anaran/kuma,varunkamra/kuma,groovecoder/kuma,davidyezsetz/kuma,SphinxKnight/kuma,bluemini/kuma,anaran/kuma,ronakkhunt/kuma,robhudson/kuma,MenZil/kuma,jezdez/kuma,bluemini/kuma,mozilla/kuma,chirilo/kuma,yfdyh000/kuma,scrollback/kuma,cindyyu/kuma,jgmize/kuma,safwanrahman/kuma,safwanrahman/kuma,whip112/Whip112,darkwing/kuma,jwhitlock/kuma,FrankBian/kuma,jgmize/kuma,davidyezsetz/kuma,RanadeepPolavarapu/kuma,hoosteeno/kuma,mastizada/kuma,surajssd/kuma,mozilla/kuma,escattone/kuma,robhudson/kuma,tximikel/kuma,jezdez/kuma
|
6155cfa0d16bfde8b412a3b2c68983ef939d518c
|
synapse/tests/test_init.py
|
synapse/tests/test_init.py
|
import os
import imp
import synapse
from synapse.tests.common import *
class InitTest(SynTest):
def test_init_modules(self):
os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math'
msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')'
with self.getLoggerStream('synapse', msg) as stream:
imp.reload(synapse)
self.true(stream.wait(10))
stream.seek(0)
self.isin(msg, stream.read())
self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4))
|
import os
import imp
import synapse
from synapse.tests.common import *
class InitTest(SynTest):
pass
'''
def test_init_modules(self):
os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math'
msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')'
with self.getLoggerStream('synapse', msg) as stream:
imp.reload(synapse)
self.true(stream.wait(10))
stream.seek(0)
self.isin(msg, stream.read())
self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4))
'''
|
Comment out broken init test
|
Comment out broken init test
|
Python
|
apache-2.0
|
vertexproject/synapse,vertexproject/synapse,vivisect/synapse,vertexproject/synapse
|
5f70d83408d177e803ce8edfb0ebd2b909722a64
|
troposphere/certificatemanager.py
|
troposphere/certificatemanager.py
|
# Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 15.1.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
class DomainValidationOption(AWSProperty):
props = {
'DomainName': (basestring, True),
'HostedZoneId': (basestring, False),
'ValidationDomain': (basestring, False),
}
class Certificate(AWSObject):
resource_type = "AWS::CertificateManager::Certificate"
props = {
'CertificateAuthorityArn': (basestring, False),
'CertificateTransparencyLoggingPreference': (basestring, False),
'DomainName': (basestring, True),
'DomainValidationOptions': ([DomainValidationOption], False),
'SubjectAlternativeNames': ([basestring], False),
'Tags': ((Tags, list), False),
'ValidationMethod': (basestring, False),
}
|
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 31.1.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
from .validators import integer
class ExpiryEventsConfiguration(AWSProperty):
props = {
'DaysBeforeExpiry': (integer, False),
}
class Account(AWSObject):
resource_type = "AWS::CertificateManager::Account"
props = {
'ExpiryEventsConfiguration': (ExpiryEventsConfiguration, True),
}
class DomainValidationOption(AWSProperty):
props = {
'DomainName': (basestring, True),
'HostedZoneId': (basestring, False),
'ValidationDomain': (basestring, False),
}
class Certificate(AWSObject):
resource_type = "AWS::CertificateManager::Certificate"
props = {
'CertificateAuthorityArn': (basestring, False),
'CertificateTransparencyLoggingPreference': (basestring, False),
'DomainName': (basestring, True),
'DomainValidationOptions': ([DomainValidationOption], False),
'SubjectAlternativeNames': ([basestring], False),
'Tags': ((Tags, list), False),
'ValidationMethod': (basestring, False),
}
|
Update CertificateManager per 2021-03-11 changes
|
Update CertificateManager per 2021-03-11 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere
|
c947ecffd771117ce531f5058356a17b7db82fdb
|
mockaioredis/commands/__init__.py
|
mockaioredis/commands/__init__.py
|
from mockredis import MockRedis as _MockRedis
from .generic import GenericCommandsMixin
from .hash import HashCommandsMixin
from .list import ListCommandsMixin
from .set import SetCommandsMixin
__all__ = ['MockRedis']
class MockRedis(GenericCommandsMixin, HashCommandsMixin, ListCommandsMixin, SetCommandsMixin):
"""Fake high-level aioredis.Redis interface"""
def __init__(self, connection=None, encoding=None, **kwargs):
# Just for API compatibility
self._conn = connection
self._redis = _MockRedis(**kwargs)
self._encoding = encoding
async def create_redis(address, *, db=None, password=None, ssl=None,
encoding=None, commands_factory=MockRedis,
loop=None):
'''Create a fake high-level MockRedis interface
This function is a coroutine
'''
return commands_factory(None, encoding=encoding)
|
from mockredis import MockRedis as _MockRedis
from .generic import GenericCommandsMixin
from .hash import HashCommandsMixin
from .list import ListCommandsMixin
from .set import SetCommandsMixin
__all__ = ['MockRedis']
class MockRedis(GenericCommandsMixin, HashCommandsMixin, ListCommandsMixin, SetCommandsMixin):
"""Fake high-level aioredis.Redis interface"""
def __init__(self, connection=None, encoding=None, **kwargs):
# Just for API compatibility
self._conn = connection
self._redis = _MockRedis(**kwargs)
self._encoding = encoding
async def wait_closed(self):
if self._conn:
await self._conn.wait_closed()
def close(self):
if self._conn:
self._conn.close()
async def create_redis(address, *, db=None, password=None, ssl=None,
encoding=None, commands_factory=MockRedis,
loop=None):
'''Create a fake high-level MockRedis interface
This function is a coroutine
'''
return commands_factory(None, encoding=encoding)
|
Add close comands to MockRedis class
|
chore: Add close comands to MockRedis class
This closes #15
Signed-off-by: Kai Blin <ad3597797f6179d503c382b2627cc19939309418@biosustain.dtu.dk>
|
Python
|
apache-2.0
|
kblin/mockaioredis,kblin/mockaioredis
|
022d8b992a88fd4c489c068ba57b4b2fcf6dde98
|
cloudsizzle/studyplanner/completedstudies/models.py
|
cloudsizzle/studyplanner/completedstudies/models.py
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class CompletedCourse(models.Model):
"""
Model for completed studies
"""
student = models.ForeignKey(User, related_name='completed_courses')
code = models.CharField(max_length=11)
name = models.CharField(max_length=100)
cr = models.IntegerField()
ocr = models.IntegerField()
grade = models.CharField(max_length=5)
date = models.DateField()
teacher = models.CharField(max_length=60)
class Teacher(models.Model):
"""
should be updated for the teachers to combine them with course information
"""
name = models.CharField(max_length = 30)
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class CompletedCourse(models.Model):
"""
Model for completed studies
"""
student = models.ForeignKey(User, related_name='completed_courses')
code = models.CharField(max_length=11)
name = models.CharField(max_length=100)
cr = models.IntegerField(null=True)
ocr = models.IntegerField(null=True)
grade = models.CharField(max_length=5)
date = models.DateField()
teacher = models.CharField(max_length=60)
class Teacher(models.Model):
"""
should be updated for the teachers to combine them with course information
"""
name = models.CharField(max_length = 30)
|
Allow null values for ocr and cr fields of CompletedCourse
|
Allow null values for ocr and cr fields of CompletedCourse
|
Python
|
mit
|
jpvanhal/cloudsizzle,jpvanhal/cloudsizzle
|
8dbcd07e0db34d39ad8f79067282d4359d79439d
|
usr/bin/graphical-app-launcher.py
|
usr/bin/graphical-app-launcher.py
|
#!/usr/bin/env python
import os
import subprocess
if __name__ == '__main__':
if os.environ.has_key('APP'):
graphical_app = os.environ['APP']
process = subprocess.Popen(graphical_app, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdoutdata, stderrordata = process.communicate()
print(stdoutdata)
subprocess.call(['sudo', 'supervisorctl', 'shutdown'],
stdout=subprocess.PIPE)
|
#!/usr/bin/env python
import os
import subprocess
if __name__ == '__main__':
if os.environ.has_key('APP'):
graphical_app = os.environ['APP']
if os.environ.has_key('ARGS'):
extra_args = os.environ['ARGS']
command = graphical_app + ' ' + extra_args
else:
command = graphical_app
process = subprocess.Popen(command, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdoutdata, stderrordata = process.communicate()
print(stdoutdata)
subprocess.call(['sudo', 'supervisorctl', 'shutdown'],
stdout=subprocess.PIPE)
|
Support an ARGS enviromental variable for extra command arguments.
|
Support an ARGS enviromental variable for extra command arguments.
|
Python
|
apache-2.0
|
thewtex/docker-opengl,thewtex/docker-opengl
|
830c73beafa359e01fb839901bcb91360c1de365
|
web/thesaurus_template_generators.py
|
web/thesaurus_template_generators.py
|
import json
from web.MetaInfo import MetaInfo
def generate_language_template(language_id, structure_id, version=None):
meta_info = MetaInfo()
if structure_id not in meta_info.data_structures:
raise ValueError
language_name = meta_info.languages.get(
language_id,
{'name': 'Human-Readable Language Name'}
)['name']
meta = {
'language': language_id,
'language_name': language_name,
'structure': structure_id,
}
if version:
meta['language_version'] = version
concepts = {
id: {'code': [""]}
for category in meta_info.structure(structure_id).categories.values()
for (id, name) in category.items()
}
return json.dumps({'meta': meta, 'concepts': concepts}, indent=2)
def generate_meta_template(structure_id, structure_name):
meta = {
'structure': structure_id,
'structure_name': structure_name,
}
categories = {
'First Category Name': {
'concept_id1': 'Name of Concept 1',
'concept_id2': 'Name of Concept 2'
},
'Second Category Name': {
'concept_id3': 'Name of Concept 3',
'concept_id4': 'Name of Concept 4'
}
}
return json.dumps({'meta': meta, 'categories': categories})
|
"""Generator functions for thesaurus files"""
import json
from web.MetaInfo import MetaInfo
def generate_language_template(language_id, structure_id, version=None):
"""Generate a template for the given language and structure"""
meta_info = MetaInfo()
if structure_id not in meta_info.data_structures:
raise ValueError
language_name = meta_info.languages.get(
language_id,
{'name': 'Human-Readable Language Name'}
)['name']
meta = {
'language': language_id,
'language_name': language_name,
'structure': structure_id,
}
if version:
meta['language_version'] = version
concepts = {
id: {
'name': name,
'code': [""],
}
for category in meta_info.structure(structure_id).categories.values()
for (id, name) in category.items()
}
return json.dumps({'meta': meta, 'concepts': concepts}, indent=2)
def generate_meta_template(structure_id, structure_name):
"""Generate a template for a `meta file`"""
meta = {
'structure': structure_id,
'structure_name': structure_name,
}
categories = {
'First Category Name': {
'concept_id1': 'Name of Concept 1',
'concept_id2': 'Name of Concept 2'
},
'Second Category Name': {
'concept_id3': 'Name of Concept 3',
'concept_id4': 'Name of Concept 4'
}
}
return json.dumps({'meta': meta, 'categories': categories})
|
Add name fields to generated templates
|
Add name fields to generated templates
|
Python
|
agpl-3.0
|
codethesaurus/codethesaur.us,codethesaurus/codethesaur.us
|
fe9e11af28e2ffe2b3da5ebb0971cd712136284c
|
nodeconductor/iaas/migrations/0011_cloudprojectmembership_availability_zone.py
|
nodeconductor/iaas/migrations/0011_cloudprojectmembership_availability_zone.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('iaas', '0010_auto_20150118_1834'),
]
operations = [
migrations.AddField(
model_name='cloudprojectmembership',
name='availability_zone',
field=models.CharField(max_length=100, blank=True),
preserve_default=True,
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('iaas', '0010_auto_20150118_1834'),
]
operations = [
migrations.AddField(
model_name='cloudprojectmembership',
name='availability_zone',
field=models.CharField(help_text='Optional availability group. Will be used for all instances provisioned in this tenant', max_length=100, blank=True),
preserve_default=True,
),
]
|
Add help_text to availability_zone field (nc-327)
|
Add help_text to availability_zone field (nc-327)
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
1187deb4140ad0c7d66f0f25ae6d019f8ffb6168
|
bluebottle/homepage/views.py
|
bluebottle/homepage/views.py
|
from django.utils import translation
from rest_framework import response
from bluebottle.utils.views import GenericAPIView
from .models import HomePage
from .serializers import HomePageSerializer
# Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object
class HomePageDetail(GenericAPIView):
serializer_class = HomePageSerializer
model = HomePage
def get(self, request, language='en'):
# Force requested language
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
homepage = HomePage().get(language)
serialized = HomePageSerializer().to_representation(homepage)
return response.Response(serialized)
|
from django.utils import translation
from rest_framework import response
from bluebottle.utils.views import GenericAPIView
from .models import HomePage
from .serializers import HomePageSerializer
# Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object
class HomePageDetail(GenericAPIView):
serializer_class = HomePageSerializer
model = HomePage
def get(self, request, language='en'):
# Force requested language
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
homepage = HomePage().get(language)
serialized = HomePageSerializer(
context=self.get_serializer_context()
).to_representation(homepage)
return response.Response(serialized)
|
Add proper context to homepage serializer
|
Add proper context to homepage serializer
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
424980a48e451d1b99397843001bd75fa58e474e
|
tests/test_fullqualname.py
|
tests/test_fullqualname.py
|
"""Tests for fullqualname."""
import nose
import sys
from fullqualname import fullqualname
def test_builtin_function():
# Test built-in function object.
obj = len
# Type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a function.
assert 'built-in function' in repr(obj)
if sys.version_info >= (3, ):
expected = 'builtins.len'
else:
expected = '__builtin__.len'
nose.tools.assert_equals(fullqualname(obj), expected)
|
"""Tests for fullqualname."""
import inspect
import nose
import sys
from fullqualname import fullqualname
def test_builtin_function():
# Test built-in function object.
obj = len
# Type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a function.
assert 'built-in function' in repr(obj)
if sys.version_info >= (3, ):
expected = 'builtins.len'
else:
expected = '__builtin__.len'
nose.tools.assert_equals(fullqualname(obj), expected)
def test_builtin_method():
# Test built-in method object.
obj = [1, 2, 3].append
# Object type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a method.
assert 'built-in method' in repr(obj)
# Object __self__ attribute is not a class.
assert not inspect.isclass(obj.__self__)
if sys.version_info >= (3, ):
expected = 'builtins.list.append'
else:
expected = '__builtin__.list.append'
nose.tools.assert_equals(fullqualname(obj), expected)
|
Add built-in method object test
|
Add built-in method object test
|
Python
|
bsd-3-clause
|
etgalloway/fullqualname
|
c259e42ea95fdc43ad9345d702d3cab901d88f93
|
rx/core/__init__.py
|
rx/core/__init__.py
|
# flake8: noqa
from .typing import Observer, Scheduler
from .disposable import Disposable
from .anonymousobserver import AnonymousObserver
from . import observerextensions
from .pipe import pipe
from .observable import Observable
from .observable import AnonymousObservable, ConnectableObservable
from .observable import GroupedObservable, BlockingObservable
from .observerbase import ObserverBase
|
# flake8: noqa
from .typing import Observer, Scheduler
from .disposable import Disposable
from .anonymousobserver import AnonymousObserver
from .pipe import pipe
from .observable import Observable
from .observable import AnonymousObservable, ConnectableObservable
from .observable import GroupedObservable, BlockingObservable
from .observerbase import ObserverBase
|
Remove observer extension from init
|
Remove observer extension from init
|
Python
|
mit
|
ReactiveX/RxPY,ReactiveX/RxPY
|
4068605116cb04b999c66d29056c88cf2c4ab46c
|
scripts/analytics/addon_count.py
|
scripts/analytics/addon_count.py
|
import sys
import logging
from datetime import datetime
from dateutil.parser import parse
from website.settings import ADDONS_AVAILABLE
from website.app import init_app
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def count():
counts = []
for addon in ADDONS_AVAILABLE:
user_count = addon.settings_models['user'].find().count() if addon.settings_models.get('user') else 0
node_count = addon.settings_models['node'].find().count() if addon.settings_models.get('node') else 0
counts.append({
'provider': addon.short_name,
'user_count': user_count,
'node_count': node_count
})
logger.info('{} counted. Users: {}, Nodes: {}'.format(addon.short_name, user_count, node_count))
return counts
def main():
addon_count = count()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
)
client.add_event('addon_count_analytics', addon_count)
else:
print(addon_count)
if __name__ == '__main__':
init_app()
try:
date = parse(sys.argv[1])
except IndexError:
date = datetime.now()
main(date)
|
import logging
from website.settings import ADDONS_AVAILABLE
from website.app import init_app
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def count():
counts = []
for addon in ADDONS_AVAILABLE:
user_count = addon.settings_models['user'].find().count() if addon.settings_models.get('user') else 0
node_count = addon.settings_models['node'].find().count() if addon.settings_models.get('node') else 0
counts.append({
'provider': addon.short_name,
'user_count': user_count,
'node_count': node_count
})
logger.info('{} counted. Users: {}, Nodes: {}'.format(addon.short_name, user_count, node_count))
return counts
def main():
addon_count = count()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
)
client.add_event('addon_count_analytics', addon_count)
else:
print(addon_count)
if __name__ == '__main__':
init_app()
main()
|
Remove date from addon count script
|
Remove date from addon count script
|
Python
|
apache-2.0
|
pattisdr/osf.io,saradbowman/osf.io,binoculars/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,cslzchen/osf.io,chrisseto/osf.io,acshi/osf.io,mfraezz/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,monikagrabowska/osf.io,sloria/osf.io,aaxelb/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,Johnetordoff/osf.io,hmoco/osf.io,acshi/osf.io,alexschiller/osf.io,acshi/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,baylee-d/osf.io,pattisdr/osf.io,caneruguz/osf.io,adlius/osf.io,caseyrollins/osf.io,mattclark/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,brianjgeiger/osf.io,mluo613/osf.io,cwisecarver/osf.io,rdhyee/osf.io,rdhyee/osf.io,mluo613/osf.io,erinspace/osf.io,rdhyee/osf.io,alexschiller/osf.io,adlius/osf.io,baylee-d/osf.io,saradbowman/osf.io,laurenrevere/osf.io,chennan47/osf.io,alexschiller/osf.io,mluo613/osf.io,monikagrabowska/osf.io,icereval/osf.io,chrisseto/osf.io,pattisdr/osf.io,cslzchen/osf.io,leb2dg/osf.io,mattclark/osf.io,acshi/osf.io,crcresearch/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,felliott/osf.io,icereval/osf.io,hmoco/osf.io,caneruguz/osf.io,felliott/osf.io,aaxelb/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,TomBaxter/osf.io,TomBaxter/osf.io,Nesiehr/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,mfraezz/osf.io,adlius/osf.io,icereval/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,laurenrevere/osf.io,caseyrollins/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,binoculars/osf.io,baylee-d/osf.io,adlius/osf.io,sloria/osf.io,chrisseto/osf.io,aaxelb/osf.io,TomBaxter/osf.io,sloria/osf.io,alexschiller/osf.io,felliott/osf.io,mluo613/osf.io,Nesiehr/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,crcresearch/osf.io,chennan47/osf.io,mluo613/osf.io,mfraezz/osf.io,rdhyee/osf.io,cwisecarver/osf.io,crcresearch/osf.io,felliott/osf.io,HalcyonChimera/osf.io,cwisecarver/osf.io,leb2dg/osf.io,hmoco/osf.io,Nesiehr/osf.io,binoculars/osf.io,cwisecarver/osf.io,laurenrevere/osf.io,monikagrabowska/osf.io
|
9995a3bb8b95caddc6319e68f405c70fd2a15d09
|
aldryn_faq/search_indexes.py
|
aldryn_faq/search_indexes.py
|
from aldryn_search.base import AldrynIndexBase
from aldryn_search.utils import strip_tags
from django.template import RequestContext
from haystack import indexes
from .models import Question
class QuestionIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return obj.title
def get_index_kwargs(self, language):
return {'language': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Question
def get_search_data(self, obj, language, request):
text = strip_tags(obj.title)
text += u' ' + strip_tags(obj.answer_text)
plugins = obj.answer.cmsplugin_set.filter(language=language)
for base_plugin in plugins:
instance, plugin_type = base_plugin.get_plugin_instance()
if instance is None:
# this is an empty plugin
continue
else:
text += strip_tags(instance.render_plugin(context=RequestContext(request))) + u' '
return text
|
from aldryn_search.base import AldrynIndexBase
from aldryn_search.utils import strip_tags
from django.template import RequestContext
from haystack import indexes
from .models import Question, Category
class QuestionIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return obj.title
def get_index_kwargs(self, language):
return {'language': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Question
def get_search_data(self, obj, language, request):
text = strip_tags(obj.title)
text += u' ' + strip_tags(obj.answer_text)
plugins = obj.answer.cmsplugin_set.filter(language=language)
for base_plugin in plugins:
instance, plugin_type = base_plugin.get_plugin_instance()
if instance is None:
# this is an empty plugin
continue
else:
text += strip_tags(instance.render_plugin(context=RequestContext(request))) + u' '
return text
class CategoryIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return ''
def get_index_kwargs(self, language):
return {'translations__language_code': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Category
def get_search_data(self, obj, language, request):
return strip_tags(obj.name)
|
Add search index for faq categories
|
Add search index for faq categories
|
Python
|
bsd-3-clause
|
czpython/aldryn-faq,czpython/aldryn-faq,mkoistinen/aldryn-faq,czpython/aldryn-faq,czpython/aldryn-faq
|
082bcfefddb4ba566e35e827d9e726aacdfb80d6
|
collection_pipelines/core.py
|
collection_pipelines/core.py
|
import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
|
import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
def return_value(self):
"""Processor return value when used with __or__ operator.
Returns:
CollectionPipelineProcessor: when processor is to be chained
with other processors.
any: any other value when processor is used as an output and is
meant to return value. In this way we can assign
the output result to python variable.
"""
return self
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
"""Overwrites the '|' operator.
Args:
other (CollectionPipelineProcessor)
Returns:
whatever other.return_value() returns.
"""
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other.return_value()
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
|
Allow to overwrite the pipeline processor return value
|
Allow to overwrite the pipeline processor return value
|
Python
|
mit
|
povilasb/pycollection-pipelines
|
2d05ede4db8bf80834e04ffb5f9d0ec11982851d
|
normandy/recipes/validators.py
|
normandy/recipes/validators.py
|
import json
import jsonschema
from django.core.exceptions import ValidationError
# Add path to required validator so we can get property name
def _required(validator, required, instance, schema):
'''Validate 'required' properties.'''
if not validator.is_type(instance, 'object'):
return
for index, requirement in enumerate(required):
if requirement not in instance:
error = jsonschema.ValidationError(
'This field may not be blank.',
path=[requirement]
)
yield error
# Construct validator as extension of Json Schema Draft 4.
Validator = jsonschema.validators.extend(
validator=jsonschema.validators.Draft4Validator,
validators={
'required': _required
}
)
def validate_json(value):
"""
Validate that a given value can be successfully parsed as JSON.
"""
try:
json.loads(value)
except json.JSONDecodeError as err:
raise ValidationError('%s is not valid JSON: %s', params=(value, err.msg))
|
import json
import jsonschema
from django.core.exceptions import ValidationError
# Add path to required validator so we can get property name
def _required(validator, required, instance, schema):
'''Validate 'required' properties.'''
if not validator.is_type(instance, 'object'):
return
for index, requirement in enumerate(required):
if requirement not in instance or instance[requirement] == '':
error = jsonschema.ValidationError(
'This field may not be blank.',
path=[requirement]
)
yield error
# Construct validator as extension of Json Schema Draft 4.
Validator = jsonschema.validators.extend(
validator=jsonschema.validators.Draft4Validator,
validators={
'required': _required
}
)
def validate_json(value):
"""
Validate that a given value can be successfully parsed as JSON.
"""
try:
json.loads(value)
except json.JSONDecodeError as err:
raise ValidationError('%s is not valid JSON: %s', params=(value, err.msg))
|
Check for empty strings in required validator
|
Check for empty strings in required validator
|
Python
|
mpl-2.0
|
Osmose/normandy,mozilla/normandy,Osmose/normandy,mozilla/normandy,Osmose/normandy,mozilla/normandy,Osmose/normandy,mozilla/normandy
|
fe8221e398bb9a1ddabf08002441acb37dfef515
|
scripts/release_test/arguments.py
|
scripts/release_test/arguments.py
|
import argparse, common, sys, tests
from features import check_features, get_features
def arguments(argv=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument(
'tests', nargs='*', help='The list of tests to run')
parser.add_argument(
'--features', '-f', default=[], action='append',
help='A list of features separated by colons')
args = parser.parse_args(argv)
if args.tests:
all_tests = [(t, getattr(tests, t, None)) for t in args.tests]
bad_tests = [t for (t, a) in all_tests if a is None]
if bad_tests:
raise ValueError('Bad test names: ' + ', '.join(bad_tests))
all_tests = tuple(a for (t, a) in all_tests)
else:
all_tests = tests.__all__
if args.features:
features = set(':'.join(args.features).split(':'))
check_features(features)
else:
features = get_features()
return all_tests, features
if __name__ == '__main__':
common.printer(arguments())
|
import argparse, common, sys, tests
from features import check_features, get_features, FEATURES
def arguments(argv=sys.argv[1:]):
parser = argparse.ArgumentParser()
names = [t.__name__.split('.')[1] for t in tests.__all__]
names = ', '.join(names)
parser.add_argument(
'tests', nargs='*',
help='The list of tests to run. Tests are:' + names)
features = ', '.join(FEATURES)
parser.add_argument(
'--features', '-f', default=[], action='append',
help='A list of features separated by colons. Features are: ' +
features)
args = parser.parse_args(argv)
if args.tests:
all_tests = [(t, getattr(tests, t, None)) for t in args.tests]
bad_tests = [t for (t, a) in all_tests if a is None]
if bad_tests:
raise ValueError('Bad test names: ' + ', '.join(bad_tests))
all_tests = tuple(a for (t, a) in all_tests)
else:
all_tests = tests.__all__
if args.features:
features = set(':'.join(args.features).split(':'))
check_features(features)
else:
features = get_features()
return all_tests, features
if __name__ == '__main__':
common.printer(arguments())
|
Improve help messages from release_test
|
Improve help messages from release_test
|
Python
|
mit
|
ManiacalLabs/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel
|
95d1bf068ebf2f57eaf44accbe15aa30d236d8ea
|
astropy/coordinates/tests/test_distance.py
|
astropy/coordinates/tests/test_distance.py
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from numpy import testing as npt
from ... import units as u
"""
This includes tests for distances/cartesian points that are *not* in the API
tests. Right now that's just regression tests.
"""
def test_distance_change():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
c.distance = Distance(1, unit=u.kpc)
oldx = c.x
assert (oldx - 0.35284083171901953) < 1e-10
#now x should increase when the distance increases
c.distance = Distance(2, unit=u.kpc)
assert c.x == oldx * 2
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from numpy import testing as npt
from ... import units as u
"""
This includes tests for distances/cartesian points that are *not* in the API
tests. Right now that's just regression tests.
"""
def test_distance_change():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
c.distance = Distance(1, unit=u.kpc)
oldx = c.x
assert (oldx - 0.35284083171901953) < 1e-10
#now x should increase when the distance increases
c.distance = Distance(2, unit=u.kpc)
assert c.x == oldx * 2
def test_distance_from_quantity():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
# a Quantity object should be able to supply a distance
q = 2 * u.kpc
c.distance = q
|
Add test for applying a distance to a coordinate via a quantity
|
Add test for applying a distance to a coordinate via a quantity
|
Python
|
bsd-3-clause
|
pllim/astropy,astropy/astropy,dhomeier/astropy,dhomeier/astropy,bsipocz/astropy,pllim/astropy,astropy/astropy,pllim/astropy,tbabej/astropy,aleksandr-bakanov/astropy,larrybradley/astropy,larrybradley/astropy,joergdietrich/astropy,aleksandr-bakanov/astropy,tbabej/astropy,StuartLittlefair/astropy,kelle/astropy,MSeifert04/astropy,saimn/astropy,tbabej/astropy,funbaker/astropy,AustereCuriosity/astropy,AustereCuriosity/astropy,DougBurke/astropy,mhvk/astropy,joergdietrich/astropy,joergdietrich/astropy,StuartLittlefair/astropy,dhomeier/astropy,pllim/astropy,lpsinger/astropy,larrybradley/astropy,stargaser/astropy,DougBurke/astropy,astropy/astropy,lpsinger/astropy,tbabej/astropy,mhvk/astropy,tbabej/astropy,saimn/astropy,pllim/astropy,bsipocz/astropy,astropy/astropy,mhvk/astropy,aleksandr-bakanov/astropy,saimn/astropy,MSeifert04/astropy,joergdietrich/astropy,saimn/astropy,StuartLittlefair/astropy,dhomeier/astropy,kelle/astropy,aleksandr-bakanov/astropy,funbaker/astropy,AustereCuriosity/astropy,MSeifert04/astropy,larrybradley/astropy,kelle/astropy,AustereCuriosity/astropy,StuartLittlefair/astropy,stargaser/astropy,stargaser/astropy,funbaker/astropy,stargaser/astropy,larrybradley/astropy,DougBurke/astropy,joergdietrich/astropy,saimn/astropy,kelle/astropy,bsipocz/astropy,funbaker/astropy,mhvk/astropy,DougBurke/astropy,AustereCuriosity/astropy,StuartLittlefair/astropy,astropy/astropy,lpsinger/astropy,dhomeier/astropy,kelle/astropy,MSeifert04/astropy,lpsinger/astropy,lpsinger/astropy,mhvk/astropy,bsipocz/astropy
|
1975aeb06a85d8983a3815ffd89076af66d61561
|
payments/urls.py
|
payments/urls.py
|
from django.conf.urls import patterns, url
try:
from account.decorators import login_required
except ImportError:
from django.contrib.auth.decorators import login_required
from .views import (
CancelView,
ChangeCardView,
ChangePlanView,
HistoryView,
SubscribeView
)
urlpatterns = patterns(
"payments.views",
url(r"^webhook/$", "webhook", name="payments_webhook"),
url(r"^a/subscribe/$", "subscribe", name="payments_ajax_subscribe"),
url(r"^a/change/card/$", "change_card", name="payments_ajax_change_card"),
url(r"^a/change/plan/$", "change_plan", name="payments_ajax_change_plan"),
url(r"^a/cancel/$", "cancel", name="payments_ajax_cancel"),
url(
r"^subscribe/$",
login_required(SubscribeView.as_view()),
name="payments_subscribe"
),
url(
r"^change/card/$",
login_required(ChangeCardView.as_view()),
name="payments_change_card"
),
url(
r"^change/plan/$",
login_required(ChangePlanView.as_view()),
name="payments_change_plan"
),
url(
r"^cancel/$",
login_required(CancelView.as_view()),
name="payments_cancel"
),
url(
r"^history/$",
login_required(HistoryView.as_view()),
name="payments_history"
),
)
|
from django.conf.urls import patterns, url
try:
from account.decorators import login_required
except ImportError:
from django.contrib.auth.decorators import login_required
from .views import (
CancelView,
ChangeCardView,
ChangePlanView,
HistoryView,
SubscribeView,
webhook,
subscribe,
change_card,
change_plan,
cancel
)
urlpatterns = patterns(
"",
url(r"^webhook/$", webhook, name="payments_webhook"),
url(r"^a/subscribe/$", subscribe, name="payments_ajax_subscribe"),
url(r"^a/change/card/$", change_card, name="payments_ajax_change_card"),
url(r"^a/change/plan/$", change_plan, name="payments_ajax_change_plan"),
url(r"^a/cancel/$", cancel, name="payments_ajax_cancel"),
url(
r"^subscribe/$",
login_required(SubscribeView.as_view()),
name="payments_subscribe"
),
url(
r"^change/card/$",
login_required(ChangeCardView.as_view()),
name="payments_change_card"
),
url(
r"^change/plan/$",
login_required(ChangePlanView.as_view()),
name="payments_change_plan"
),
url(
r"^cancel/$",
login_required(CancelView.as_view()),
name="payments_cancel"
),
url(
r"^history/$",
login_required(HistoryView.as_view()),
name="payments_history"
),
)
|
Use imported views instead of lazy import
|
Use imported views instead of lazy import
|
Python
|
mit
|
pinax/django-stripe-payments
|
d85a288cbacf6bc31b1d544dd269d392aed4a1ec
|
openquake/hazardlib/general.py
|
openquake/hazardlib/general.py
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2014, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
def git_suffix(fname):
"""
:returns: `<short git hash>` if Git repository found
"""
try:
gh = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'],
cwd=os.path.dirname(fname)).strip()
gh = "-git" + gh if gh else ''
return gh
except:
# trapping everything on purpose; git may not be installed or it
# may not work properly
return ''
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2014, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
def git_suffix(fname):
"""
:returns: `<short git hash>` if Git repository found
"""
try:
gh = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'],
stderr=open(os.devnull, 'w'), cwd=os.path.dirname(fname)).strip()
gh = "-git" + gh if gh else ''
return gh
except:
# trapping everything on purpose; git may not be installed or it
# may not work properly
return ''
|
Add stderr redirect to git_suffix to get more clean messages
|
Add stderr redirect to git_suffix to get more clean messages
|
Python
|
agpl-3.0
|
larsbutler/oq-hazardlib,rcgee/oq-hazardlib,gem/oq-engine,silviacanessa/oq-hazardlib,rcgee/oq-hazardlib,gem/oq-hazardlib,gem/oq-engine,g-weatherill/oq-hazardlib,larsbutler/oq-hazardlib,silviacanessa/oq-hazardlib,silviacanessa/oq-hazardlib,silviacanessa/oq-hazardlib,vup1120/oq-hazardlib,g-weatherill/oq-hazardlib,g-weatherill/oq-hazardlib,gem/oq-engine,gem/oq-hazardlib,vup1120/oq-hazardlib,larsbutler/oq-hazardlib,gem/oq-engine,mmpagani/oq-hazardlib,mmpagani/oq-hazardlib,vup1120/oq-hazardlib,g-weatherill/oq-hazardlib,mmpagani/oq-hazardlib,gem/oq-engine,gem/oq-hazardlib
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.