commit
stringlengths
40
40
old_file
stringlengths
4
236
new_file
stringlengths
4
236
old_contents
stringlengths
1
3.26k
new_contents
stringlengths
16
4.43k
subject
stringlengths
16
624
message
stringlengths
17
3.29k
lang
stringclasses
5 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
c37500894b309a691009b87b1305935ee57648cb
tests/test_test.py
tests/test_test.py
import pytest from web_test_base import * """ A class to test new features without running all of the tests. Usage: py.test tests/test_test.py -rsx """ class TestTest(WebTestBase): urls_to_get = [ "http://aidtransparency.net/" ] text_to_find = [ ("information", '//*[@id="home-strapline"]/h1') ] def test_locate_text(self, loaded_request, text_to_find): """ Tests that each page contains lthe specified text at the required location. """ result = self._get_text_from_xpath(loaded_request, text_to_find[1]) assert self._substring_in_list(text_to_find[0], result)
import pytest from web_test_base import * """ A class to test new features without running all of the tests. Usage: py.test tests/test_test.py -rsx """ class TestTest(WebTestBase): urls_to_get = [ "http://iatistandard.org/" , "http://iatistandard.org/202/namespaces-extensions/" ] text_to_find = [ ("technical publishing framework", '//*[@id="home-strapline"]/h1') ] def test_locate_text(self, loaded_request, text_to_find): """ Tests that each page contains lthe specified text at the required location. """ result = self._get_text_from_xpath(loaded_request, text_to_find[1]) assert self._substring_in_list(text_to_find[0], result)
Add test text finding that fails
Add test text finding that fails This indicates that a different method of specifying how and where to find text within a document is required.
Python
mit
IATI/IATI-Website-Tests
dd9dfa86fe0f7cb8d95b580ff9ae62753fb19026
gefion/checks/base.py
gefion/checks/base.py
# -*- coding: utf-8 -*- """Base classes.""" import time class Result(object): """Provides results of a Check. Attributes: availability (bool): Availability, usually reflects outcome of a check. runtime (float): Time consumed running the check, in seconds. message (string): Additional explainations for the result. timestamp (int): UTC timestamp of the check. """ def __init__(self, availability, runtime, message, timestamp=time.time()): """Initialise Result. Args: See class attributes. """ self.availability = availability self.runtime = runtime self.message = message self.timestamp = timestamp @property def api_serialised(self): """Return serialisable data for API monitor assignments.""" return {'availability': self.availability, 'runtime': self.runtime, 'message': self.message, 'timestamp': self.timestamp} class Check(object): """Performs checks for availability of resources. This should be inherited by checking implementations. """ def __init__(self, **kwargs): """Initialise Check.""" pass def check(self): """Check if specified resource is availability. Called without arguments. Returns: gefion.checkers.Result """ raise NotImplementedError
# -*- coding: utf-8 -*- """Base classes.""" import time class Result(object): """Provides results of a Check. Attributes: availability (bool): Availability, usually reflects outcome of a check. runtime (float): Time consumed running the check, in seconds. message (string): Additional explainations for the result. timestamp (int): UTC timestamp of the check. """ def __init__(self, availability, runtime, message, timestamp=time.time()): """Initialise Result. Args: See class attributes. """ self.availability = availability self.runtime = runtime self.message = message self.timestamp = timestamp @property def api_serialised(self): """Return serialisable data for API result submissions.""" return {'availability': self.availability, 'runtime': self.runtime, 'message': self.message, 'timestamp': self.timestamp} class Check(object): """Performs checks for availability of resources. This should be inherited by checking implementations. """ def __init__(self, **kwargs): """Initialise Check.""" pass def check(self): """Check if specified resource is availabile. Called without arguments. Returns: gefion.checkers.Result """ raise NotImplementedError
Fix typos in Result and Check docstrings
Fix typos in Result and Check docstrings
Python
bsd-3-clause
dargasea/gefion
2b9d702b6efd922069ceb44540b1ea7118e3f84b
gensysinfo.py
gensysinfo.py
#!/usr/bin/env python3 import psutil import os import time import math blocks = ['▁', 'β–‚', 'β–ƒ', 'β–„', 'β–…', 'β–†', 'β–‡', 'β–ˆ'] def create_bar(filled): if filled > 1: low = str(int(filled)) high = str(int(filled + 1)) filled = filled - int(filled) filled = int(filled * 100) if filled < 50: color = "green" elif filled < 80: color = "yellow" else: color = "red" block = math.floor(filled / (100 / 7) + 0.5) bar = '#[fg=' + color + ']β–•' bar += blocks[block] bar += '▏' if filled >= 100: bar += str(filled) else: bar += "{0:2}%".format(filled) bar += '#[fg=default]' return bar while True: meminfo = psutil.virtual_memory() numcpus = psutil.cpu_count() with open(os.path.expanduser("~/.memblock"), "w") as memblock: memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total)) with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock: cpuutilblock.write(create_bar(psutil.cpu_percent() / 100)) time.sleep(20)
#!/usr/bin/env python3 import psutil import os import time import math blocks = ['▁', 'β–‚', 'β–ƒ', 'β–„', 'β–…', 'β–†', 'β–‡', 'β–ˆ'] def create_bar(filled): filled = int(filled * 100) if filled < 50: color = "green" elif filled < 80: color = "yellow" else: color = "red" bar = '#[fg=' + color + ']β–•' if filled < 100: block = math.floor(filled / (100 / 7) + 0.5) bar += blocks[block] else: bar += blocks[7] bar += '▏' if filled >= 100: bar += str(filled) else: bar += "{0:2}%".format(filled) bar += '#[fg=default]' return bar while True: meminfo = psutil.virtual_memory() numcpus = psutil.cpu_count() with open(os.path.expanduser("~/.memblock"), "w") as memblock: memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total)) with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock: cpuutilblock.write(create_bar(psutil.cpu_percent() / 100)) time.sleep(20)
Allow over 100 again for when load becomes available
Allow over 100 again for when load becomes available
Python
mit
wilfriedvanasten/miscvar,wilfriedvanasten/miscvar,wilfriedvanasten/miscvar
93380d1574438f4e70145e0bbcde4c3331ef5fd3
massa/domain.py
massa/domain.py
# -*- coding: utf-8 -*- from sqlalchemy import ( Column, Date, Integer, MetaData, Numeric, String, Table, ) def define_tables(metadata): Table('measurement', metadata, Column('id', Integer, primary_key=True), Column('weight', Numeric(4, 1), nullable=False), Column('code', String(25), nullable=False), Column('note', String(140), nullable=True), Column('date_measured', Date(), nullable=False), ) class Db(object): def __init__(self, engine): self._meta = MetaData(engine) define_tables(self._meta) def make_tables(self): self._meta.create_all() def drop_tables(self): self._meta.drop_all() @property def measurement(self): return self._meta.tables['measurement'] class MeasurementService(object): def __init__(self, table): self._table = table def create(self, **kwargs): i = self._table.insert() i.execute(**kwargs)
# -*- coding: utf-8 -*- from sqlalchemy import ( Column, Date, Integer, MetaData, Numeric, String, Table, ) def define_tables(metadata): Table('measurement', metadata, Column('id', Integer, primary_key=True), Column('weight', Numeric(4, 1), nullable=False), Column('code', String(25), nullable=False), Column('note', String(140), nullable=True), Column('date_measured', Date(), nullable=False), ) class Db(object): def __init__(self, engine): self._meta = MetaData(engine) define_tables(self._meta) def make_tables(self): self._meta.create_all() def drop_tables(self): self._meta.drop_all() @property def measurement(self): return self._meta.tables['measurement'] class MeasurementService(object): def __init__(self, table): self._table = table def find_all(self): s = self._table.select() return s.execute() def create(self, **kwargs): i = self._table.insert() i.execute(**kwargs)
Add a method to find all measurements.
Add a method to find all measurements.
Python
mit
jaapverloop/massa
511abf77f16a7a92dde93a9f1318967b1d237635
go_doc_get.py
go_doc_get.py
import sublime import sublime_plugin import webbrowser def cleanPackage(pkgURI): pkg = pkgURI.split('.com/')[1] return pkg class GoDocGetCommand(sublime_plugin.TextCommand): def run(self, edit): view = self.view for region in view.sel(): selected = view.substr(region) if "github.corp" in selected: # if corporate go to page pkg = cleanPackage(selected) webbrowser.open('https://github.corp.dyndns.com/' + pkg) elif "github" in selected: # if public package go to doc pkg = cleanPackage(selected) webbrowser.open('https://godoc.org/github.com/' + pkg) else: # default to golang proper webbrowser.open('https://golang.org/pkg/' + selected)
import sublime import sublime_plugin import webbrowser def cleanPackage(pkgURI): pkg = pkgURI.split('.com/')[1] return pkg class GoDocGetCommand(sublime_plugin.TextCommand): def run(self, edit): view = self.view for region in view.sel(): selected = view.substr(region) if "github.corp" in selected: # if corporate go to page on master branch pkg = cleanPackage(selected) res = pkg.split('/') res.insert(2, 'tree/master') pkg = '/'.join(res) webbrowser.open('https://github.corp.dyndns.com/' + pkg) elif "github" in selected: # if public package go to doc pkg = cleanPackage(selected) webbrowser.open('https://godoc.org/github.com/' + pkg) else: # default to golang proper webbrowser.open('https://golang.org/pkg/' + selected)
Set specific branch to go to in GitHub
Set specific branch to go to in GitHub
Python
mit
lowellmower/go_doc_get
078a4d36c1dc088937b242ca63b88b4c03f33fa0
isitup/main.py
isitup/main.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals import requests def check(url): try: response = requests.get( "https://isitup.org/{0}.json".format(url), headers={'User-Agent': 'https://github.com/lord63/isitup'}) except requests.exceptions.ConnectionError: return ("A network problem(e.g. you're offline; refused connection)," "can't check the site right now.") except requests.exceptions.Timeout: return "The request timed out." status_code = response.json()["status_code"] if status_code == 1: return ("Yay, {0} is up.\nIt took {1[response_time]} ms " "for a {1[response_code]} response code with " "an ip of {1[response_ip]}".format(url, response.json())) if status_code == 2: return "{0} seems to be down!".format(url) if status_code == 3: return "We need a valid domain to check! Try again."
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals import requests def check(url): try: response = requests.get( "https://isitup.org/{0}.json".format(url), headers={'User-Agent': 'https://github.com/lord63/isitup'}) except requests.exceptions.ConnectionError: return ("A network problem(e.g. you're offline; refused connection)," "can't check the site right now.") except requests.exceptions.Timeout: return "The request timed out." except requests.exceptions.RequestException as error: return "Something bad happened:\n{0}".format(error) status_code = response.json()["status_code"] if status_code == 1: return ("Yay, {0} is up.\nIt took {1[response_time]} ms " "for a {1[response_code]} response code with " "an ip of {1[response_ip]}".format(url, response.json())) if status_code == 2: return "{0} seems to be down!".format(url) if status_code == 3: return "We need a valid domain to check! Try again."
Make sure handle all the exceptions
Make sure handle all the exceptions
Python
mit
lord63/isitup
a65eaeaef60492bfc6319fb9c810155d62c1a3b3
luigi/tasks/export/ftp/go_annotations.py
luigi/tasks/export/ftp/go_annotations.py
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import luigi from tasks.config import db from tasks.config import export from tasks.utils.files import atomic_output class GoAnnotation(luigi.Task): def output(self): return luigi.LocalTarget(export().go('rnacentral_annotations.tsv')) def run(self): with atomic_output(self.output()) as out: export(db(), out)
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import luigi from tasks.config import db from tasks.config import export from rnacentral.export.ftp import go_terms from tasks.utils.files import atomic_output class GoAnnotationExport(luigi.Task): def output(self): return luigi.LocalTarget(export().go('rnacentral_annotations.tsv')) def run(self): with atomic_output(self.output()) as out: go_terms.export(db(), out)
Update name and call correct export
Update name and call correct export This now calls the correct export function. Additionally, the class name is changed to reflect it does export.
Python
apache-2.0
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
d3cea746432b1bfd1b5f2d38972c1b761b96e8eb
fetchroots.py
fetchroots.py
import os import base64 from requests import Session, Request from OpenSSL import crypto #url = 'http://ct.googleapis.com/aviator/ct/v1/get-roots' url = 'https://ct.api.venafi.com/ct/v1/get-roots' s = Session() r = Request('GET', url) prepped = r.prepare() r = s.send(prepped) if r.status_code == 200: roots = r.json() # RFC 6962 defines the certificate objects as base64 encoded certs. # Importantly, these are not PEM formatted certs but base64 encoded # ASN.1 (DER) encoded for i in roots: certs = roots[i] for k in certs: try: certobj = crypto.load_certificate(crypto.FILETYPE_ASN1,base64.b64decode(k)) subject = certobj.get_subject() print 'CN={},OU={},O={},L={},S={},C={}'.format(subject.commonName, subject.organizationalUnitName, subject.organizationName, subject.localityName, subject.stateOrProvinceName, subject.countryName) except: print subject.get_components()
import os import base64 from requests import Session, Request from OpenSSL import crypto url = 'http://ct.googleapis.com/aviator/ct/v1/get-roots' s = Session() r = Request('GET', url) prepped = r.prepare() r = s.send(prepped) if r.status_code == 200: roots = r.json() # RFC 6962 defines the certificate objects as base64 encoded certs. # Importantly, these are not PEM formatted certs but base64 encoded # ASN.1 (DER) encoded for i in roots: certs = roots[i] for k in certs: try: certobj = crypto.load_certificate(crypto.FILETYPE_ASN1,base64.b64decode(k)) subject = certobj.get_subject() print 'CN={},OU={},O={},L={},S={},C={}'.format(subject.commonName, subject.organizationalUnitName, subject.organizationName, subject.localityName, subject.stateOrProvinceName, subject.countryName) except: print subject.get_components()
Update to use Google Aviator test log
Update to use Google Aviator test log
Python
apache-2.0
wgoulet/CTPyClient
95b90325b1dfa535fc802ad2a06f15e30010bf3a
fore/hotswap.py
fore/hotswap.py
import os import logging import threading log = logging.getLogger(__name__) class Hotswap(threading.Thread): def __init__(self, out, mod, *args, **kwargs): self.out = out self.gen = mod.generate(*args, **kwargs) threading.Thread.__init__(self) self.daemon = True def run(self): while True: self.out(self.gen.next())
import os import logging import threading log = logging.getLogger(__name__) class Hotswap(threading.Thread): def __init__(self, out, mod, *args, **kwargs): self.out = out self.gen = mod.generate(*args, **kwargs) threading.Thread.__init__(self) self.daemon = True def run(self): while True: self.out(next(self.gen))
Use next(it) instead of it.next()
Hotswap: Use next(it) instead of it.next()
Python
artistic-2.0
MikeiLL/appension,MikeiLL/appension,Rosuav/appension,MikeiLL/appension,Rosuav/appension,MikeiLL/appension,Rosuav/appension,Rosuav/appension
a727161f67edff10bb94785e70add7c42ba99dcc
morepath/tests/test_app.py
morepath/tests/test_app.py
from morepath.app import App, global_app import morepath def setup_module(module): morepath.disable_implicit() def test_global_app(): assert global_app.extends == [] assert global_app.name == 'global_app' def test_app_without_extends(): myapp = App() assert myapp.extends == [global_app] assert myapp.name == '' def test_app_with_extends(): parentapp = App() myapp = App('myapp', extends=parentapp) assert myapp.extends == [parentapp] assert myapp.name == 'myapp' def test_app_caching_lookup(): class MockClassLookup(object): called = 0 def all(self, key, classes): self.called += 1 return ["answer"] class MockApp(MockClassLookup, App): pass myapp = MockApp() lookup = myapp.lookup answer = lookup.component('foo', []) assert answer == 'answer' assert myapp.called == 1 # after this the answer will be cached for those parameters answer = lookup.component('foo', []) assert myapp.called == 1 answer = myapp.lookup.component('foo', []) assert myapp.called == 1 # but different parameters does trigger another call lookup.component('bar', []) assert myapp.called == 2
from morepath.app import App, global_app import morepath def setup_module(module): morepath.disable_implicit() def test_global_app(): assert global_app.extends == [] assert global_app.name == 'global_app' def test_app_without_extends(): myapp = App() assert myapp.extends == [global_app] assert myapp.name == '' def test_app_with_extends(): parentapp = App() myapp = App('myapp', extends=parentapp) assert myapp.extends == [parentapp] assert myapp.name == 'myapp' def test_app_caching_lookup(): class MockClassLookup(object): called = 0 def all(self, key, classes): self.called += 1 return ["answer"] class MockApp(MockClassLookup, App): pass myapp = MockApp() lookup = myapp.lookup answer = lookup.component('foo', []) assert answer == 'answer' assert myapp.called == 1 # after this the answer will be cached for those parameters answer = lookup.component('foo', []) assert myapp.called == 1 answer = myapp.lookup.component('foo', []) assert myapp.called == 1 # but different parameters does trigger another call lookup.component('bar', []) assert myapp.called == 2 def test_app_name(): app = morepath.App(name='foo') assert repr(app) == "<morepath.App 'foo'>"
Add coverage of __repr__ of app.
Add coverage of __repr__ of app.
Python
bsd-3-clause
morepath/morepath,faassen/morepath,taschini/morepath
7e00b8a4436ee4bdad4d248a29985b1cef741a53
nimbus/apps/media/utils.py
nimbus/apps/media/utils.py
def bsd_rand(seed): return (1103515245 * seed + 12345) & 0x7fffffff def baseconv(v1, a1, a2): n1 = {c: i for i, c in dict(enumerate(a1)).items()} b1 = len(a1) b2 = len(a2) d1 = 0 for i, c in enumerate(v1): d1 += n1[c] * pow(b1, b1 - i - 1) v2 = "" while d1: v2 = a2[d1 % b2] + v2 d1 //= b2 return v2 def url_hash_from_pk(pk): b10 = "0123456789" b62 = "abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ" return baseconv(str(bsd_rand(pk)), b10, b62)
from nimbus.settings import SECRET_KEY import hashlib def baseconv(v1, a1, a2): n1 = {c: i for i, c in enumerate(a1)} b1 = len(a1) b2 = len(a2) d1 = 0 for i, c in enumerate(v1): d1 += n1[c] * pow(b1, len(v1) - i - 1) v2 = "" while d1: v2 = a2[d1 % b2] + v2 d1 //= b2 return v2 m = hashlib.md5() m.update(SECRET_KEY) c = int(baseconv(m.hexdigest(), "0123456789abcdef", "0123456789")) c = c - (c % 2) + 1 def lcg(seed): return (1103515245 * seed + c) & 0x7fffffff def url_hash_from_pk(pk): b10 = "0123456789" b62 = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" return baseconv(str(lcg(pk)), b10, b62)
Patch bug and security vulnerability
Patch bug and security vulnerability
Python
mit
ethanal/Nimbus,ethanal/Nimbus,ethanal/Nimbus,ethanal/Nimbus
8a4a8cc351ae7fecd53932d0fb6ca0a7f9a83fbc
falcom/api/test/test_uris.py
falcom/api/test/test_uris.py
# Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. from hamcrest import * import unittest from .hamcrest import ComposedAssertion from ..uri import URI # There are three URIs that I need to use: # # http://catalog.hathitrust.org/api/volumes/brief/oclc/[OCLC].json # http://mirlyn-aleph.lib.umich.edu/cgi-bin/bc2meta?id=[BARCODE]&type=bc&schema=marcxml # http://www.worldcat.org/webservices/catalog/content/libraries/[OCLC]?wskey=[WC_KEY]&format=json&maximumLibraries=50 class URITest (unittest.TestCase): def test_null_uri_yields_empty_string (self): uri = URI(None) assert_that(uri(), is_(equal_to(""))) def test_empty_uri_yields_empty_string (self): uri = URI("") assert_that(uri(), is_(equal_to(""))) def test_simple_uri_yields_itself (self): uri = URI("hello") assert_that(uri(), is_(equal_to("hello")))
# Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. from hamcrest import * import unittest from .hamcrest import ComposedAssertion from ..uri import URI # There are three URIs that I need to use: # # http://catalog.hathitrust.org/api/volumes/brief/oclc/[OCLC].json # http://mirlyn-aleph.lib.umich.edu/cgi-bin/bc2meta?id=[BARCODE]&type=bc&schema=marcxml # http://www.worldcat.org/webservices/catalog/content/libraries/[OCLC]?wskey=[WC_KEY]&format=json&maximumLibraries=50 class URITest (unittest.TestCase): def test_null_uri_yields_empty_string (self): uri = URI(None) assert_that(uri(), is_(equal_to(""))) def test_simple_uri_yields_itself (self): uri = URI("hello") assert_that(uri(), is_(equal_to("hello"))) class GivenEmptyStrURI (unittest.TestCase): def setUp (self): self.uri = URI("") def test_when_called_without_args_yields_empty_str (self): assert_that(self.uri(), is_(equal_to("")))
Refactor a test into its own "given" test class
Refactor a test into its own "given" test class
Python
bsd-3-clause
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
c84aef2acef68d5feadb23aa045d9aa6e2f8512d
tests/app/dao/test_fees_dao.py
tests/app/dao/test_fees_dao.py
from app.dao.fees_dao import dao_update_fee, dao_get_fees, dao_get_fee_by_id from app.models import Fee from tests.db import create_fee class WhenUsingFeesDAO(object): def it_creates_a_fee(self, db_session): fee = create_fee() assert Fee.query.count() == 1 fee_from_db = Fee.query.filter(Fee.id == fee.id).first() assert fee == fee_from_db def it_updates_a_fee_dao(self, db, db_session, sample_fee): dao_update_fee(sample_fee.id, fee=10) fee_from_db = Fee.query.filter(Fee.id == sample_fee.id).first() assert sample_fee.fee == fee_from_db.fee def it_gets_all_fees(self, db, db_session, sample_fee): fees = [create_fee(fee=100, conc_fee=80), sample_fee] fees_from_db = dao_get_fees() assert Fee.query.count() == 2 assert set(fees) == set(fees_from_db) def it_gets_a_fee_by_id(self, db, db_session, sample_fee): fee = create_fee(fee=100, conc_fee=80) fetched_fee = dao_get_fee_by_id(fee.id) assert fetched_fee == fee
from app.dao.fees_dao import dao_update_fee, dao_get_fees, dao_get_fee_by_id from app.models import Fee from tests.db import create_fee class WhenUsingFeesDAO(object): def it_creates_a_fee(self, db_session): fee = create_fee() assert Fee.query.count() == 1 fee_from_db = Fee.query.filter(Fee.id == fee.id).first() assert fee == fee_from_db def it_updates_a_fee_dao(self, db, db_session, sample_fee): dao_update_fee(sample_fee.id, fee=10) fee_from_db = Fee.query.filter(Fee.id == sample_fee.id).first() assert fee_from_db.fee == 10 def it_gets_all_fees(self, db, db_session, sample_fee): fees = [create_fee(fee=100, conc_fee=80), sample_fee] fees_from_db = dao_get_fees() assert Fee.query.count() == 2 assert set(fees) == set(fees_from_db) def it_gets_a_fee_by_id(self, db, db_session, sample_fee): fee = create_fee(fee=100, conc_fee=80) fetched_fee = dao_get_fee_by_id(fee.id) assert fetched_fee == fee
Make fees dao test clearer
Make fees dao test clearer
Python
mit
NewAcropolis/api,NewAcropolis/api,NewAcropolis/api
5eb8297b6da0b0cfd885975d5b9993a07acca426
importlib_metadata/__init__.py
importlib_metadata/__init__.py
import os import sys import glob import email import itertools import contextlib class Distribution: def __init__(self, path): """ Construct a distribution from a path to the metadata dir """ self.path = path @classmethod def for_name(cls, name, path=sys.path): for path_item in path: glob_specs = ( os.path.join(path_item, f'{name}-*.*-info'), os.path.join(path_item, f'{name}.*-info'), ) globs = itertools.chain.from_iterable(map(glob.iglob, glob_specs)) match = next(globs) return cls(os.path.join(path_item, match)) @classmethod def for_module(cls, mod): return cls.for_name(cls.name_for_module(mod)) @staticmethod def name_for_module(mod): return getattr(mod, '__dist_name__', mod.__name__) @property def metadata(self): return email.message_from_string( self.load_metadata('METADATA') or self.load_metadata('PKG-INFO') ) def load_metadata(self, name): fn = os.path.join(self.path, name) with contextlib.suppress(FileNotFoundError): with open(fn, encoding='utf-8') as strm: return strm.read() @property def version(self): return self.metadata['Version']
import os import sys import glob import email import itertools import contextlib class Distribution: def __init__(self, path): """ Construct a distribution from a path to the metadata dir """ self.path = path @classmethod def for_name(cls, name, path=sys.path): glob_groups = map(glob.iglob, cls._search_globs(name, path)) globs = itertools.chain.from_iterable(glob_groups) return cls(next(globs)) @staticmethod def _search_globs(name, path): """ Generate search globs for locating distribution metadata in path """ for path_item in path: yield os.path.join(path_item, f'{name}-*.*-info') # in develop install, no version is present yield os.path.join(path_item, f'{name}.*-info') @classmethod def for_module(cls, mod): return cls.for_name(cls.name_for_module(mod)) @staticmethod def name_for_module(mod): return getattr(mod, '__dist_name__', mod.__name__) @property def metadata(self): return email.message_from_string( self.load_metadata('METADATA') or self.load_metadata('PKG-INFO') ) def load_metadata(self, name): fn = os.path.join(self.path, name) with contextlib.suppress(FileNotFoundError): with open(fn, encoding='utf-8') as strm: return strm.read() @property def version(self): return self.metadata['Version']
Fix logic in path search.
Fix logic in path search.
Python
apache-2.0
python/importlib_metadata
694575e2707bdf7a2e042e2dd443a46481bc9d39
source/segue/__init__.py
source/segue/__init__.py
# :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt.
# :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. import os import imp import uuid def discover_processors(paths=None, options=None): '''Return processor plugins discovered on *paths*. If *paths* is None will try to use environment variable :envvar:`SEGUE_PROCESSOR_PLUGIN_PATH` Each discovered plugin should have a register function that can be called to return a processor instance. The register function should accept arbitrary keyword arguments. *options* will be passed to the register functions as keyword arguments. ''' processors = [] if paths is None: plugin_path = os.environ.get('SEGUE_PROCESSOR_PLUGIN_PATH') if plugin_path: paths = plugin_path.split(os.pathsep) else: paths = [] if options is None: options = {} for path in paths: for base, directories, filenames in os.walk(path): for filename in filenames: name, extension = os.path.splitext(filename) if extension != '.py': continue module_path = os.path.join(base, filename) module_name = uuid.uuid4().hex module = imp.load_source(module_name, module_path) processor = module.register(**options) return processors
Add helper function for discovering processor plugins.
Add helper function for discovering processor plugins.
Python
apache-2.0
4degrees/segue
8754f8b73b140fa597de1f70a0cf636d198fadb2
extension_course/tests/conftest.py
extension_course/tests/conftest.py
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa location_id, minimal_event_dict, municipality, organization, place, user, user_api_client, django_db_modify_db_settings, django_db_setup)
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa location_id, minimal_event_dict, municipality, organization, place, user, user_api_client, django_db_modify_db_settings, django_db_setup, make_minimal_event_dict, make_keyword_id, make_keyword)
Add new make_* fixtures to extension_course tests
Add new make_* fixtures to extension_course tests
Python
mit
City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents
e0d909e25fbf47ebad35756032c9230fe3d3bdaa
example/example/tasksapp/run_tasks.py
example/example/tasksapp/run_tasks.py
import time from dj_experiment.tasks.tasks import longtime_add, netcdf_save if __name__ == '__main__': result = longtime_add.delay(1, 2) # at this time, our task is not finished, so it will return False print 'Task finished? ', result.ready() print 'Task result: ', result.result # sleep 10 seconds to ensure the task has been finished time.sleep(10) # now the task should be finished and ready method will return True print 'Task finished? ', result.ready() print 'Task result: ', result.result result1 = netcdf_save.delay(14, '') print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result time.sleep(10) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result
import os import time from dj_experiment.tasks.tasks import longtime_add, netcdf_save from example.settings import (DJ_EXPERIMENT_BASE_DATA_DIR, DJ_EXPERIMENT_DATA_DIR) if __name__ == '__main__': result = longtime_add.delay(1, 2) # at this time, our task is not finished, so it will return False print 'Task finished? ', result.ready() print 'Task result: ', result.result # sleep 10 seconds to ensure the task has been finished time.sleep(10) # now the task should be finished and ready method will return True print 'Task finished? ', result.ready() print 'Task result: ', result.result rcmdatadir = os.path.join(DJ_EXPERIMENT_BASE_DATA_DIR, DJ_EXPERIMENT_DATA_DIR) result1 = netcdf_save.delay(14, rcmdatadir) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result time.sleep(10) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result
Fix parameters in task call
Fix parameters in task call
Python
mit
francbartoli/dj-experiment,francbartoli/dj-experiment
21e15235b2cd767e0da56a2a0d224824fda58c42
Tools/idle/ZoomHeight.py
Tools/idle/ZoomHeight.py
# Sample extension: zoom a window to maximum height import re import sys class ZoomHeight: menudefs = [ ('windows', [ ('_Zoom Height', '<<zoom-height>>'), ]) ] windows_keydefs = { '<<zoom-height>>': ['<Alt-F2>'], } unix_keydefs = { '<<zoom-height>>': ['<Control-x><Control-z>'], } def __init__(self, editwin): self.editwin = editwin def zoom_height_event(self, event): top = self.editwin.top geom = top.wm_geometry() m = re.match(r"(\d+)x(\d+)\+(-?\d+)\+(-?\d+)", geom) if not m: top.bell() return width, height, x, y = map(int, m.groups()) newheight = top.winfo_screenheight() if sys.platform == 'win32': newy = 0 newheight = newheight - 72 else: newy = 24 newheight = newheight - 96 if height >= newheight: newgeom = "" else: newgeom = "%dx%d+%d+%d" % (width, newheight, x, newy) top.wm_geometry(newgeom)
# Sample extension: zoom a window to maximum height import re import sys class ZoomHeight: menudefs = [ ('windows', [ ('_Zoom Height', '<<zoom-height>>'), ]) ] windows_keydefs = { '<<zoom-height>>': ['<Alt-F2>'], } unix_keydefs = { '<<zoom-height>>': ['<Control-x><Control-z>'], } def __init__(self, editwin): self.editwin = editwin def zoom_height_event(self, event): top = self.editwin.top zoom_height(top) def zoom_height(top): geom = top.wm_geometry() m = re.match(r"(\d+)x(\d+)\+(-?\d+)\+(-?\d+)", geom) if not m: top.bell() return width, height, x, y = map(int, m.groups()) newheight = top.winfo_screenheight() if sys.platform == 'win32': newy = 0 newheight = newheight - 72 else: newy = 24 newheight = newheight - 96 if height >= newheight: newgeom = "" else: newgeom = "%dx%d+%d+%d" % (width, newheight, x, newy) top.wm_geometry(newgeom)
Move zoom height functionality to separate function.
Move zoom height functionality to separate function.
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
280c81a3990116f66de9af8e6fd6e71d0215a386
client.py
client.py
#!/usr/bin/env python from configReader import ConfigReader import sys import os, os.path import os.path from time import time from math import floor import hashlib import random import requests f = open('adjectives.txt','r') adjectives = [line.rstrip() for line in f] f.close() configReader = ConfigReader(name="clientConfig.txt") path = sys.argv[1] timeHash = hashlib.md5(str(time())).hexdigest()[0:6] adjective = random.choice(adjectives) keys=configReader.getKeys() endpoint=keys['endpoint'] username=keys['username'] password=keys['password'] finalLocation=keys['finalLocation'] urlPath = adjective+timeHash+".png" print "Uploading",path,"as",urlPath,"to",endpoint r = requests.post(endpoint,params={'name':urlPath},files={'file':open(path,'rb')}) print r.status_code if r.status_code==200: print os.path.join(finalLocation,urlPath) os.system("echo '"+os.path.join(finalLocation,urlPath)+"'|pbcopy")
#!/usr/bin/env python from configReader import ConfigReader import sys import os, os.path import os.path from time import time from math import floor import hashlib import random import requests f = open('adjectives.txt','r') adjectives = [line.rstrip() for line in f] f.close() configReader = ConfigReader(name="clientConfig.txt") path = sys.argv[1] timeHash = hashlib.md5(str(time())).hexdigest()[0:6] adjective = random.choice(adjectives) keys=configReader.getKeys() endpoint=keys['endpoint'] username=keys['username'] password=keys['password'] finalLocation=keys['finalLocation'] urlPath = adjective+timeHash+".png" print "Uploading",path,"as",urlPath,"to",endpoint r = requests.post(endpoint,auth=(username,password),params={'name':urlPath},files={'file':open(path,'rb')}) print r.status_code if r.status_code==200: print os.path.join(finalLocation,urlPath) os.system("echo '"+os.path.join(finalLocation,urlPath)+"'|pbcopy")
Add authentication to the serverside
Add authentication to the serverside
Python
mit
ollien/Screenshot-Uploader,ollien/Screenshot-Uploader
a68f3ea83c191478f6a7b0dc6a4b49ff6c297ae2
imports.py
imports.py
#!/usr/bin/env python # vim: set fileencoding=utf-8 : from flask import flash from old_xml_import import old_xml_import from sml_import import sml_import import gzip from model import db, Sample from sqlalchemy.sql import func def move_import(xmlfile, filename, user): if filename.endswith('.gz'): xmlfile = gzip.GzipFile(fileobj=xmlfile, mode='rb', filename=filename) filename = filename[:-len('.gz')] if filename.endswith('.xml'): move = old_xml_import(xmlfile, user) elif filename.endswith('.sml'): move = sml_import(xmlfile, user) else: flash("unknown fileformat: '%s'" % xmlfile.filename, 'error') if move: move.temperature_avg, = db.session.query(func.avg(Sample.temperature)).filter(Sample.move == move, Sample.temperature > 0).one() stroke_count = 0 for events, in db.session.query(Sample.events).filter(Sample.move == move, Sample.events != None): if 'swimming' in events and events['swimming']['type'] == 'Stroke': stroke_count += 1 if 'swimming' in move.activity: assert stroke_count > 0 if stroke_count > 0: move.stroke_count = stroke_count db.session.commit() return move
#!/usr/bin/env python # vim: set fileencoding=utf-8 : from flask import flash from old_xml_import import old_xml_import from sml_import import sml_import import gzip from model import db, Sample from sqlalchemy.sql import func def move_import(xmlfile, filename, user): move = None if filename.endswith('.gz'): xmlfile = gzip.GzipFile(fileobj=xmlfile, mode='rb', filename=filename) filename = filename[:-len('.gz')] if filename.endswith('.xml'): move = old_xml_import(xmlfile, user) elif filename.endswith('.sml'): move = sml_import(xmlfile, user) else: flash("unknown fileformat: '%s'" % xmlfile.filename, 'error') if move: move.temperature_avg, = db.session.query(func.avg(Sample.temperature)).filter(Sample.move == move, Sample.temperature > 0).one() stroke_count = 0 for events, in db.session.query(Sample.events).filter(Sample.move == move, Sample.events != None): if 'swimming' in events and events['swimming']['type'] == 'Stroke': stroke_count += 1 if 'swimming' in move.activity: assert stroke_count > 0 if stroke_count > 0: move.stroke_count = stroke_count db.session.commit() return move
Fix exception 'local variable 'move' referenced before assignment' in case of upload of unknown file formats
Fix exception 'local variable 'move' referenced before assignment' in case of upload of unknown file formats
Python
mit
bwaldvogel/openmoves,marguslt/openmoves,marguslt/openmoves,bwaldvogel/openmoves,mourningsun75/openmoves,mourningsun75/openmoves,mourningsun75/openmoves,marguslt/openmoves,bwaldvogel/openmoves
3d86b4473f66a9311a94b1def4c40189eae23990
lancet/git.py
lancet/git.py
import sys import click from slugify import slugify class SlugBranchGetter(object): def __init__(self, base_branch='master'): self.base_branch = base_branch def __call__(self, repo, issue): discriminator = 'features/{}'.format(issue.key) slug = slugify(issue.fields.summary[:30]) full_name = '{}_{}'.format(discriminator, slug) branches = [b for b in repo.listall_branches() if b.startswith(discriminator)] if len(branches) > 1: click.secho('Multiple matching branches found!', fg='red', bold=True) click.echo() click.echo('The prefix {} matched the following branches:' .format(discriminator)) click.echo() for b in branches: click.echo(' {} {}'.format(click.style('*', fg='red'), b)) click.echo() click.echo('Please remove all but one in order to continue.') sys.exit(1) elif branches: branch = repo.lookup_branch(branches[0]) if branch.branch_name != full_name: branch.rename(full_name) branch = repo.lookup_branch(full_name) else: base = repo.lookup_branch(self.base_branch) if not base: click.secho('Base branch not found: "{}", aborting.' .format(self.base_branch), fg='red', bold=True) sys.exit(1) branch = repo.create_branch(full_name, base.get_object()) return branch
import sys import click from slugify import slugify class SlugBranchGetter(object): prefix = 'feature/' def __init__(self, base_branch='master'): self.base_branch = base_branch def __call__(self, repo, issue): discriminator = '{}{}'.format(self.prefix, issue.key) slug = slugify(issue.fields.summary[:30]) full_name = '{}_{}'.format(discriminator, slug) branches = [b for b in repo.listall_branches() if b.startswith(discriminator)] if len(branches) > 1: click.secho('Multiple matching branches found!', fg='red', bold=True) click.echo() click.echo('The prefix {} matched the following branches:' .format(discriminator)) click.echo() for b in branches: click.echo(' {} {}'.format(click.style('*', fg='red'), b)) click.echo() click.echo('Please remove all but one in order to continue.') sys.exit(1) elif branches: branch = repo.lookup_branch(branches[0]) if branch.branch_name != full_name: branch.rename(full_name) branch = repo.lookup_branch(full_name) else: base = repo.lookup_branch(self.base_branch) if not base: click.secho('Base branch not found: "{}", aborting.' .format(self.base_branch), fg='red', bold=True) sys.exit(1) branch = repo.create_branch(full_name, base.get_object()) return branch
Change the prefix from features/ to feature/.
Change the prefix from features/ to feature/.
Python
mit
GaretJax/lancet,GaretJax/lancet
8816d06381625938137d9fbf8aaee3d9ddabae72
src/sentry/api/endpoints/organization_projects.py
src/sentry/api/endpoints/organization_projects.py
from __future__ import absolute_import from rest_framework.response import Response from sentry.api.base import DocSection from sentry.api.bases.organization import OrganizationEndpoint from sentry.api.serializers import serialize from sentry.models import Project, Team class OrganizationProjectsEndpoint(OrganizationEndpoint): doc_section = DocSection.ORGANIZATIONS def get(self, request, organization): """ List an organization's projects Return a list of projects bound to a organization. {method} {path} """ team_list = Team.objects.get_for_user( organization=organization, user=request.user, ) project_list = [] for team in team_list: project_list.extend(Project.objects.get_for_user( team=team, user=request.user, )) project_list.sort(key=lambda x: x.name) team_map = dict( (t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)), ) context = [] for project, pdata in zip(project_list, serialize(project_list, request.user)): pdata['team'] = team_map[project.team_id] context.append(pdata) return Response(context)
from __future__ import absolute_import from rest_framework.response import Response from sentry.api.base import DocSection from sentry.api.bases.organization import OrganizationEndpoint from sentry.api.serializers import serialize from sentry.models import Project class OrganizationProjectsEndpoint(OrganizationEndpoint): doc_section = DocSection.ORGANIZATIONS def get(self, request, organization): """ List an organization's projects Return a list of projects bound to a organization. {method} {path} """ if request.auth and hasattr(request.auth, 'project'): team_list = [request.auth.project.team] project_list = [request.auth.project] else: team_list = list(request.access.teams) project_list = list(Project.objects.filter( team__in=team_list, ).order_by('name')) team_map = dict( (t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)), ) context = [] for project, pdata in zip(project_list, serialize(project_list, request.user)): pdata['team'] = team_map[project.team_id] context.append(pdata) return Response(context)
Support API keys on organization project list (fixes GH-1666)
Support API keys on organization project list (fixes GH-1666)
Python
bsd-3-clause
ifduyue/sentry,ngonzalvez/sentry,jean/sentry,Kryz/sentry,nicholasserra/sentry,daevaorn/sentry,BayanGroup/sentry,mvaled/sentry,1tush/sentry,looker/sentry,mvaled/sentry,ngonzalvez/sentry,mvaled/sentry,zenefits/sentry,beeftornado/sentry,JackDanger/sentry,hongliang5623/sentry,daevaorn/sentry,JamesMura/sentry,gencer/sentry,jean/sentry,BuildingLink/sentry,Kryz/sentry,ewdurbin/sentry,ifduyue/sentry,mitsuhiko/sentry,BayanGroup/sentry,Natim/sentry,wong2/sentry,ifduyue/sentry,BuildingLink/sentry,felixbuenemann/sentry,fuziontech/sentry,1tush/sentry,felixbuenemann/sentry,ngonzalvez/sentry,fuziontech/sentry,ifduyue/sentry,Natim/sentry,ewdurbin/sentry,gencer/sentry,songyi199111/sentry,looker/sentry,wong2/sentry,looker/sentry,BuildingLink/sentry,JamesMura/sentry,zenefits/sentry,kevinlondon/sentry,BuildingLink/sentry,korealerts1/sentry,BuildingLink/sentry,mvaled/sentry,JamesMura/sentry,nicholasserra/sentry,fotinakis/sentry,hongliang5623/sentry,felixbuenemann/sentry,nicholasserra/sentry,ifduyue/sentry,beeftornado/sentry,pauloschilling/sentry,kevinlondon/sentry,songyi199111/sentry,mvaled/sentry,zenefits/sentry,alexm92/sentry,daevaorn/sentry,Natim/sentry,pauloschilling/sentry,korealerts1/sentry,fuziontech/sentry,imankulov/sentry,beeftornado/sentry,JamesMura/sentry,fotinakis/sentry,fotinakis/sentry,mitsuhiko/sentry,JackDanger/sentry,jean/sentry,kevinlondon/sentry,Kryz/sentry,gencer/sentry,mvaled/sentry,1tush/sentry,alexm92/sentry,BayanGroup/sentry,alexm92/sentry,songyi199111/sentry,looker/sentry,pauloschilling/sentry,fotinakis/sentry,jean/sentry,daevaorn/sentry,JamesMura/sentry,zenefits/sentry,jean/sentry,ewdurbin/sentry,imankulov/sentry,hongliang5623/sentry,korealerts1/sentry,gencer/sentry,imankulov/sentry,JackDanger/sentry,gencer/sentry,looker/sentry,zenefits/sentry,wong2/sentry
9616b026894327eb7171f978f3856cdae7c9e06b
child_sync_typo3/wizard/delegate_child_wizard.py
child_sync_typo3/wizard/delegate_child_wizard.py
# -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 Compassion CH (http://www.compassion.ch) # Releasing children from poverty in Jesus' name # @author: David Coninckx <david@coninckx.com> # # The licence is in the file __openerp__.py # ############################################################################## from openerp.osv import orm from ..model.sync_typo3 import Sync_typo3 class delegate_child_wizard(orm.TransientModel): _inherit = 'delegate.child.wizard' def delegate(self, cr, uid, ids, context=None): child_ids = self._default_child_ids(cr, uid, context) child_obj = self.pool.get('compassion.child') typo3_to_remove_ids = list() for child in child_obj.browse(cr, uid, child_ids, context): if (child.state == 'I'): typo3_to_remove_ids.append(child.id) if typo3_to_remove_ids: res = child_obj.child_remove_from_typo3( cr, uid, typo3_to_remove_ids, context) res = super(delegate_child_wizard, self).delegate( cr, uid, ids, context) return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
# -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 Compassion CH (http://www.compassion.ch) # Releasing children from poverty in Jesus' name # @author: David Coninckx <david@coninckx.com> # # The licence is in the file __openerp__.py # ############################################################################## from openerp.osv import orm from ..model.sync_typo3 import Sync_typo3 class delegate_child_wizard(orm.TransientModel): _inherit = 'delegate.child.wizard' def delegate(self, cr, uid, ids, context=None): child_ids = self._default_child_ids(cr, uid, context) child_obj = self.pool.get('compassion.child') typo3_to_remove_ids = list() for child in child_obj.browse(cr, uid, child_ids, context): if (child.state == 'I'): typo3_to_remove_ids.append(child.id) if typo3_to_remove_ids: res = child_obj.child_remove_from_typo3( cr, uid, typo3_to_remove_ids, context) res = super(delegate_child_wizard, self).delegate( cr, uid, ids, context) and res return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
Fix res returned on delegate
Fix res returned on delegate
Python
agpl-3.0
MickSandoz/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland,ndtran/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,ndtran/compassion-switzerland,MickSandoz/compassion-switzerland,Secheron/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,Secheron/compassion-switzerland
cc5cf942cc56f12e09c50c29b488f71504387b7f
avalonstar/apps/api/serializers.py
avalonstar/apps/api/serializers.py
# -*- coding: utf-8 -*- from rest_framework import serializers from apps.broadcasts.models import Broadcast, Raid, Series from apps.games.models import Game class BroadcastSerializer(serializers.ModelSerializer): class Meta: depth = 1 model = Broadcast class RaidSerializer(serializers.ModelSerializer): class Meta: model = Raid class SeriesSerializer(serializers.ModelSerializer): class Meta: model = Series class GameSerializer(serializers.ModelSerializer): class Meta: model = Game
# -*- coding: utf-8 -*- from rest_framework import serializers from apps.broadcasts.models import Broadcast, Raid, Series from apps.games.models import Game class BroadcastSerializer(serializers.ModelSerializer): class Meta: model = Broadcast class RaidSerializer(serializers.ModelSerializer): class Meta: model = Raid class SeriesSerializer(serializers.ModelSerializer): class Meta: model = Series class GameSerializer(serializers.ModelSerializer): class Meta: model = Game
Remove the depth for now.
Remove the depth for now.
Python
apache-2.0
bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv
b8770a85e11c048fb0dc6c46f799b17add07568d
productController.py
productController.py
from endpoints import Controller, CorsMixin import sqlite3 from datetime import datetime conn = sqlite3.connect('CIUK.db') cur = conn.cursor() class Default(Controller, CorsMixin): def GET(self): return "CIUK" def POST(self, **kwargs): return '{}, {}, {}'.format(kwargs['title'], kwargs['desc'], kwargs['price']) class Products(Controller, CorsMixin): def GET(self): cur.execute("select * from products") return cur.fetchall() class Product(Controller, CorsMixin): def GET(self, id): cur.execute("select * from products where id=?", (id,)) return cur.fetchone() def POST(self, **kwargs): row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), datetime.now()] cur.execute("insert into products values (null, ?, ?, ?, ?, ?);", (row)) conn.commit() return "New product added!" def PUT(self, id, **kwargs): row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), id] cur.execute("update products set title=?, description=?, price=?, created_at=? where id=?", (row)) conn.commit() return "Product updated!" def DELETE(self, id): cur.execute("delete from products where id=?", (id,)) conn.commit() return "Product deleted!"
from endpoints import Controller, CorsMixin import sqlite3 from datetime import datetime conn = sqlite3.connect('databaseForTest.db') cur = conn.cursor() class Default(Controller, CorsMixin): def GET(self): return "CIUK" def POST(self, **kwargs): return '{}, {}, {}'.format(kwargs['title'], kwargs['desc'], kwargs['price']) class Products(Controller, CorsMixin): def GET(self): cur.execute("select * from products") return cur.fetchall() class Product(Controller, CorsMixin): def GET(self, id): cur.execute("select * from products where id=?", (id,)) return cur.fetchone() def POST(self, **kwargs): row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), datetime.now()] cur.execute("insert into products values (null, ?, ?, ?, ?, ?);", (row)) conn.commit() return "New product added!" def PUT(self, id, **kwargs): row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), id] cur.execute("update products set title=?, description=?, price=?, created_at=? where id=?", (row)) conn.commit() return "Product updated!" def DELETE(self, id): cur.execute("delete from products where id=?", (id,)) conn.commit() return "Product deleted!"
Change name of database for test
Change name of database for test
Python
mit
joykuotw/python-endpoints,joykuotw/python-endpoints,joykuotw/python-endpoints
c129b435a7759104feaaa5b828dc2f2ac46d5ab1
src/cmdlinetest/afp_mock.py
src/cmdlinetest/afp_mock.py
#!/usr/bin/env python from bottle import route from textwrap import dedent from bottledaemon import daemon_run """ Simple AFP mock to allow testing the afp-cli. """ @route('/account') def account(): return """{"test_account": ["test_role"]}""" @route('/account/<account>/<role>') def credentials(account, role): return dedent(""" {"Code": "Success", "LastUpdated": "1970-01-01T00:00:00Z", "AccessKeyId": "XXXXXXXXXXXX", "SecretAccessKey": "XXXXXXXXXXXX", "Token": "XXXXXXXXXXXX", "Expiration": "2032-01-01T00:00:00Z", "Type": "AWS-HMAC"}""").strip() daemon_run(host='localhost', port=5555)
#!/usr/bin/env python """ Simple AFP mock to allow testing the afp-cli. """ from bottle import route from textwrap import dedent from bottledaemon import daemon_run @route('/account') def account(): return """{"test_account": ["test_role"]}""" @route('/account/<account>/<role>') def credentials(account, role): return dedent(""" {"Code": "Success", "LastUpdated": "1970-01-01T00:00:00Z", "AccessKeyId": "XXXXXXXXXXXX", "SecretAccessKey": "XXXXXXXXXXXX", "Token": "XXXXXXXXXXXX", "Expiration": "2032-01-01T00:00:00Z", "Type": "AWS-HMAC"}""").strip() daemon_run(host='localhost', port=5555)
Move string above the imports so it becomes a docstring
Move string above the imports so it becomes a docstring
Python
apache-2.0
ImmobilienScout24/afp-cli,ImmobilienScout24/afp-cli,ImmobilienScout24/afp-cli
0ee0650dfacf648982615be49cefd57f928a73ee
holonet/core/list_access.py
holonet/core/list_access.py
# -*- coding: utf8 -*- from django.conf import settings from holonet.mappings.helpers import clean_address, split_address from .models import DomainBlacklist, DomainWhitelist, SenderBlacklist, SenderWhitelist def is_blacklisted(sender): sender = clean_address(sender) prefix, domain = split_address(sender) try: DomainBlacklist.objects.get(domain=domain) return True except DomainBlacklist.DoesNotExist: pass try: SenderBlacklist.objects.get(sender=sender) return True except SenderBlacklist.DoesNotExist: pass return False def is_not_whitelisted(sender): sender = clean_address(sender) prefix, domain = split_address(sender) if settings.SENDER_WHITELIST_ENABLED: try: SenderWhitelist.objects.get(sender=sender) return False except SenderWhitelist.DoesNotExist: pass if settings.DOMAIN_WHITELIST_ENABLED: try: DomainWhitelist.objects.get(domain=domain) return False except DomainWhitelist.DoesNotExist: pass return bool(settings.SENDER_WHITELIST_ENABLED or settings.DOMAIN_WHITELIST_ENABLED)
# -*- coding: utf8 -*- from django.conf import settings from holonet.mappings.helpers import clean_address, split_address from .models import DomainBlacklist, DomainWhitelist, SenderBlacklist, SenderWhitelist def is_blacklisted(sender): sender = clean_address(sender) prefix, domain = split_address(sender) if DomainBlacklist.objects.filter(domain=domain).exists(): return True if SenderBlacklist.objects.filter(sender=sender).exists(): return True return False def is_not_whitelisted(sender): sender = clean_address(sender) prefix, domain = split_address(sender) if settings.SENDER_WHITELIST_ENABLED: if SenderWhitelist.objects.filter(sender=sender).exists(): return False if settings.DOMAIN_WHITELIST_ENABLED: if DomainWhitelist.objects.filter(domain=domain).exists(): return False return bool(settings.SENDER_WHITELIST_ENABLED or settings.DOMAIN_WHITELIST_ENABLED)
Change to exists instead of catching DoesNotExist exception.
Change to exists instead of catching DoesNotExist exception.
Python
mit
webkom/holonet,webkom/holonet,webkom/holonet
f68b4b9b133d3c8ecb9826af9736c8c1fca64e49
maxims/credentials.py
maxims/credentials.py
from axiom import attributes, item from twisted.cred import credentials class UsernamePassword(item.Item): """ A stored username and password. """ username = attributes.bytes(allowNone=False) password = attributes.bytes(allowNone=False) def instantiate(self): return credentials.UsernamePassword(self.username, self.password)
from axiom import attributes, item from twisted.cred import credentials class UsernamePassword(item.Item): """ A stored username and password. Note that although this class is an ``IUsernamePassword`` implementation, you should still use the ``instantiate`` method to get independent ``IUsernamePassword`` providers. """ username = attributes.bytes(allowNone=False) password = attributes.bytes(allowNone=False) def instantiate(self): return credentials.UsernamePassword(self.username, self.password)
Add caveat about UsernamePassword already being an IUsernamePassword implementation
Add caveat about UsernamePassword already being an IUsernamePassword implementation
Python
isc
lvh/maxims
214511a6fbdd0763667e740735d0876f78a3b244
derpibooru/query.py
derpibooru/query.py
from .request import url class Search(object): def __init__(self, key=None, q=[], sf="created_at", sd="desc"): self._parameters = { "key": key, "q": q, "sf": sf, "sd": sd } @property def parameters(self): return self._parameters @property def url(self): return url(**self.parameters) def key(self, key=None): self._parameters["key"] = key return Search(**self._parameters) def query(self, *q): self._parameters["q"] = [str(tag).strip() for tag in q] return Search(**self._parameters) def descending(self): self._parameters["sd"] = "desc" return Search(**self._parameters) def ascending(self): self._parameters["sd"] = "asc" return Search(**self._parameters) def sort_by(self, sf): self._parameters["sf"] = sf return Search(**self._parameters)
from .request import url class Search(object): def __init__(self, key=None, q=[], sf="created_at", sd="desc"): self._parameters = { "key": key, "q": [str(tag).strip() for tag in q if tag], "sf": sf, "sd": sd } @property def parameters(self): return self._parameters @property def url(self): return url(**self.parameters) def key(self, key=None): self._parameters["key"] = key return Search(**self._parameters) def query(self, *q): self._parameters["q"] = [str(tag).strip() for tag in q if tag] return Search(**self._parameters) def descending(self): self._parameters["sd"] = "desc" return Search(**self._parameters) def ascending(self): self._parameters["sd"] = "asc" return Search(**self._parameters) def sort_by(self, sf): self._parameters["sf"] = sf return Search(**self._parameters)
Add check for empty tags
Add check for empty tags
Python
bsd-2-clause
joshua-stone/DerPyBooru
68636bfcf95163e9764860b09a713d59464e3419
conda/linux_dev/get_freecad_version.py
conda/linux_dev/get_freecad_version.py
import sys import os import subprocess import platform platform_dict = {} platform_dict["Darwin"] = "OSX" sys_n_arch = platform.platform() sys_n_arch = sys_n_arch.split("-") system, arch = sys_n_arch[0], sys_n_arch[4] if system in platform_dict: system = platform_dict[system] version_info = subprocess.check_output("freecadcmd --version", shell=True) version_info = version_info.decode("utf-8").split(" ") dev_version = version_info[1] revision = version_info[3] print("FreeCAD_{}-{}-{}-glibc2.12-{}-conda".format(dev_version, revision, system, arch))
import sys import os import subprocess import platform platform_dict = {} platform_dict["Darwin"] = "OSX" sys_n_arch = platform.platform() sys_n_arch = sys_n_arch.split("-") system, arch = sys_n_arch[0], sys_n_arch[4] if system in platform_dict: system = platform_dict[system] version_info = subprocess.check_output("freecadcmd --version", shell=True) version_info = version_info.decode("utf-8").split(" ") dev_version = version_info[1] revision = version_info[3] print("FreeCAD_{}-{}-{}-Conda_glibc2.12-x86_64".format(dev_version, revision, system))
Revert to using current AppImage update info
Revert to using current AppImage update info https://github.com/FreeCAD/FreeCAD-AppImage/issues/35
Python
lgpl-2.1
FreeCAD/FreeCAD-AppImage,FreeCAD/FreeCAD-AppImage
956ad502766eddbaf3c81672a30e58c814ba8437
test/test_api_classes.py
test/test_api_classes.py
import pytest from jedi import api def make_definitions(): return api.defined_names(""" import sys class C: pass x = C() def f(): pass """) @pytest.mark.parametrize('definition', make_definitions()) def test_basedefinition_type(definition): assert definition.type in ('module', 'class', 'instance', 'function', 'statement', 'import')
import textwrap import pytest from jedi import api def make_definitions(): """ Return a list of definitions for parametrized tests. :rtype: [jedi.api_classes.BaseDefinition] """ source = textwrap.dedent(""" import sys class C: pass x = C() def f(): pass """) definitions = [] definitions += api.defined_names(source) source += textwrap.dedent(""" variable = sys or C or x or f""") lines = source.splitlines() script = api.Script(source, len(lines), len('variable'), None) definitions += script.definition() script2 = api.Script(source, 4, len('class C'), None) definitions += script2.related_names() return definitions @pytest.mark.parametrize('definition', make_definitions()) def test_basedefinition_type(definition): assert definition.type in ('module', 'class', 'instance', 'function', 'statement', 'import')
Make more examples in make_definitions
Make more examples in make_definitions
Python
mit
WoLpH/jedi,jonashaag/jedi,tjwei/jedi,flurischt/jedi,dwillmer/jedi,jonashaag/jedi,mfussenegger/jedi,flurischt/jedi,tjwei/jedi,mfussenegger/jedi,dwillmer/jedi,WoLpH/jedi
460a2430fbd8832f3fada1a74b754d71a27ac282
mockingjay/matcher.py
mockingjay/matcher.py
import abc import re class Matcher(object): __metaclass__ = abc.ABCMeta @abc.abstractmethod def assert_request_matched(self, request): """ Assert that the request matched the spec in this matcher object. """ class HeaderMatcher(Matcher): """ Matcher for the request's header. :param key: the name of the header :param value: the value of the header """ def __init__(self, key, value): self.key = key self.value = value def assert_request_matched(self, request): assert request.headers.get(self.key) == self.value class BodyMatcher(Matcher): """ Matcher for the request body. :param body: can either be a string or a :class:`_sre.SRE_Pattern`: object """ def __init__(self, body): self.body = body def assert_request_matched(self, request): if isinstance(self.body, re._pattern_type): assert self.body.search(request.body) is not None else: assert request.body == self.body
import abc import re class StringOrPattern(object): """ A decorator object that wraps a string or a regex pattern so that it can be compared against another string either literally or using the pattern. """ def __init__(self, subject): self.subject = subject def __eq__(self, other_str): if isinstance(self.subject, re._pattern_type): return self.subject.search(other_str) is not None else: return self.subject == other_str def __hash__(self): return self.subject.__hash__() class Matcher(object): __metaclass__ = abc.ABCMeta @abc.abstractmethod def assert_request_matched(self, request): """ Assert that the request matched the spec in this matcher object. """ class HeaderMatcher(Matcher): """ Matcher for the request's header. :param key: the name of the header :param value: the value of the header """ def __init__(self, key, value): self.key = key self.value = StringOrPattern(value) def assert_request_matched(self, request): assert request.headers.get(self.key) == self.value class BodyMatcher(Matcher): """ Matcher for the request body. :param body: can either be a string or a :class:`_sre.SRE_Pattern`: object """ def __init__(self, body): self.body = StringOrPattern(body) def assert_request_matched(self, request): assert request.body == self.body
Allow all values to be compared with either literally or with a pattern
Allow all values to be compared with either literally or with a pattern
Python
bsd-3-clause
kevinjqiu/mockingjay
da69fff2d104c9cccd285078c40de05ea46fdb4d
halaqat/urls.py
halaqat/urls.py
"""halaqat URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add an import: from blog import urls as blog_urls 2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls)) """ from django.conf.urls import include, url from django.contrib import admin from back_office import urls as back_office_url from students import urls as students_url urlpatterns = [ url(r'^back_office/', include(back_office_url)), url(r'^students/', include(students_url)), url(r'^admin/', include(admin.site.urls)), ]
"""halaqat URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add an import: from blog import urls as blog_urls 2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls)) """ from django.conf.urls import include, url from django.contrib import admin from back_office import urls as back_office_url from students import urls as students_url urlpatterns = [ url(r'^back_office/', include(back_office_url)), url(r'^back-office/students/', include(students_url)), url(r'^admin/', include(admin.site.urls)), ]
Add back-office to student URL
Add back-office to student URL
Python
mit
EmadMokhtar/halaqat,EmadMokhtar/halaqat,EmadMokhtar/halaqat
20929dd2e1ddd0909afc3e25b040bfdcdc2c9b00
src/opencmiss/neon/core/problems/biomeng321lab1.py
src/opencmiss/neon/core/problems/biomeng321lab1.py
''' Copyright 2015 University of Auckland Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' import json from opencmiss.neon.core.problems.base import BaseProblem BOUNDARY_CONDITIONS = ['Type 1', 'Type 2', 'Type 3', 'Type 4', 'Type 5'] class Biomeng321Lab1(BaseProblem): def __init__(self): super(Biomeng321Lab1, self).__init__() self.setName('Biomeng321 Lab1') self._boundary_condition = None def setBoundaryCondition(self, boundary_condition): self._boundary_condition = boundary_condition def getBoundaryCondition(self): return self._boundary_condition def serialise(self): d = {} d['boundary_condition'] = self._boundary_condition return json.dumps(d) def deserialise(self, string): d = json.loads(string) self._boundary_condition = d['boundary_condition'] if 'boundary_condition' in d else None def validate(self): return True
''' Copyright 2015 University of Auckland Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' import json from opencmiss.neon.core.problems.base import BaseProblem BOUNDARY_CONDITIONS = ['Model 1', 'Model 2', 'Model 3', 'Model 4', 'Model 5'] class Biomeng321Lab1(BaseProblem): def __init__(self): super(Biomeng321Lab1, self).__init__() self.setName('Biomeng321 Lab1') self._boundary_condition = None def setBoundaryCondition(self, boundary_condition): self._boundary_condition = boundary_condition def getBoundaryCondition(self): return self._boundary_condition def serialise(self): d = {} d['boundary_condition'] = self._boundary_condition return json.dumps(d) def deserialise(self, string): d = json.loads(string) self._boundary_condition = d['boundary_condition'] if 'boundary_condition' in d else None def validate(self): return True
Change name of boundary conditions for Biomeng321 Lab1.
Change name of boundary conditions for Biomeng321 Lab1.
Python
apache-2.0
alan-wu/neon
8c551fe51ed142305945c0cef530ac84ed3e7eb9
nodeconductor/logging/perms.py
nodeconductor/logging/perms.py
from nodeconductor.core.permissions import StaffPermissionLogic PERMISSION_LOGICS = ( ('logging.Alert', StaffPermissionLogic(any_permission=True)), ('logging.SystemNotification', StaffPermissionLogic(any_permission=True)), )
from nodeconductor.core.permissions import StaffPermissionLogic PERMISSION_LOGICS = ( ('logging.Alert', StaffPermissionLogic(any_permission=True)), ('logging.WebHook', StaffPermissionLogic(any_permission=True)), ('logging.PushHook', StaffPermissionLogic(any_permission=True)), ('logging.EmailHook', StaffPermissionLogic(any_permission=True)), ('logging.SystemNotification', StaffPermissionLogic(any_permission=True)), )
Allow staff user to manage hooks.
Allow staff user to manage hooks.
Python
mit
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
13df4b7ba5c706e1fddbd17ac9edf3894e9a7206
nymms/tests/test_registry.py
nymms/tests/test_registry.py
import unittest from nymms import registry from nymms.resources import Command, MonitoringGroup from weakref import WeakValueDictionary class TestRegistry(unittest.TestCase): def test_empty_registry(self): self.assertEqual(Command.registry, WeakValueDictionary()) def test_register_object(self): # First test it's empty self.assertEqual(Command.registry, WeakValueDictionary()) # Add a command command = Command('test_command', '/bin/true') # verify that there is only a single command in the registry self.assertEqual(len(Command.registry), 1) # Verify that the registered command is the same as command self.assertIs(Command.registry[command.name], command) def test_duplicate_register(self): # add a command command = Command('test_command', '/bin/true') with self.assertRaises(registry.DuplicateEntryError): Command('test_command', '/bin/true') def test_invalid_resource_register(self): with self.assertRaises(TypeError): Command.registry['test'] = MonitoringGroup('test_group')
import unittest from nymms import registry from nymms.resources import Command, MonitoringGroup from weakref import WeakValueDictionary class TestRegistry(unittest.TestCase): def tearDown(self): # Ensure we have a fresh registry after every test Command.registry.clear() def test_empty_registry(self): self.assertEqual(Command.registry, WeakValueDictionary()) def test_register_object(self): # First test it's empty self.assertEqual(Command.registry, WeakValueDictionary()) # Add a command command = Command('test_command', '/bin/true') # verify that there is only a single command in the registry self.assertEqual(len(Command.registry), 1) # Verify that the registered command is the same as command self.assertIs(Command.registry[command.name], command) def test_duplicate_register(self): # add a command command = Command('test_command', '/bin/true') with self.assertRaises(registry.DuplicateEntryError): Command('test_command', '/bin/true') def test_invalid_resource_register(self): with self.assertRaises(TypeError): Command.registry['test'] = MonitoringGroup('test_group')
Clear registry between each test
Clear registry between each test
Python
bsd-2-clause
cloudtools/nymms
6f50381e2e14ab7c1c90e52479ffcfc7748329b3
UI/resources/constants.py
UI/resources/constants.py
# -*- coding: utf-8 -*- SAVE_PASSWORD_HASHED = True MAX_RETRIES_DOWNLOAD_FROM_SAME_FARMER = 3 MAX_RETRIES_UPLOAD_TO_SAME_FARMER = 3 MAX_RETRIES_NEGOTIATE_CONTRACT = 10 MAX_RETRIES_GET_FILE_POINTERS = 10 FILE_POINTERS_REQUEST_DELAY = 1 # int: file pointers request delay, in seconds. MAX_DOWNLOAD_REQUEST_BLOCK_SIZE = 32 * 1024 MAX_UPLOAD_REQUEST_BLOCK_SIZE = 4096 MAX_UPLOAD_CONNECTIONS_AT_SAME_TIME = 4 MAX_DOWNLOAD_CONNECTIONS_AT_SAME_TIME = 4 DEFAULT_MAX_BRIDGE_REQUEST_TIMEOUT = 5 # int: maximum bridge request timeout, in seconds. DEFAULT_BRIDGE_API_URL = 'api.storj.io'
# -*- coding: utf-8 -*- SAVE_PASSWORD_HASHED = True MAX_RETRIES_DOWNLOAD_FROM_SAME_FARMER = 3 MAX_RETRIES_UPLOAD_TO_SAME_FARMER = 3 MAX_RETRIES_NEGOTIATE_CONTRACT = 10 MAX_RETRIES_GET_FILE_POINTERS = 10 FILE_POINTERS_REQUEST_DELAY = 1 # int: file pointers request delay, in seconds. MAX_DOWNLOAD_REQUEST_BLOCK_SIZE = 32 * 1024 MAX_UPLOAD_REQUEST_BLOCK_SIZE = 4096 MAX_UPLOAD_CONNECTIONS_AT_SAME_TIME = 4 MAX_DOWNLOAD_CONNECTIONS_AT_SAME_TIME = 4 DEFAULT_MAX_BRIDGE_REQUEST_TIMEOUT = 5 DEFAULT_MAX_FARMER_CONNECTION_TIMEOUT = 7 # int: maximum bridge request timeout, in seconds. DEFAULT_BRIDGE_API_URL = 'api.storj.io'
Add farmer max timeout constant
Add farmer max timeout constant
Python
mit
lakewik/storj-gui-client
be9d58ffcf23e4fb47d2c09e869368ab9ec738c9
localore/localore/embeds.py
localore/localore/embeds.py
from urllib.parse import urlparse from django.conf import settings from wagtail.wagtailembeds.finders.embedly import embedly from wagtail.wagtailembeds.finders.oembed import oembed def get_default_finder(): if hasattr(settings, 'WAGTAILEMBEDS_EMBEDLY_KEY'): return embedly return oembed def finder(url, max_width=None): domain = urlparse(url).netloc # work around Embedly missing embedding HTML for Twitter and Instagram URLs if domain.endswith(( 'instagram.com', 'twitter.com', )): return oembed(url, max_width) embed_dict = get_default_finder()(url, max_width) if domain.endswith('soundcloud.com'): embed_dict['html'] = ( embed_dict['html'] .replace('visual%3Dtrue', 'visual%3Dfalse') .replace('width="500"', 'width="100%"') .replace('height="500"', 'height="166"') ) embed_dict['width'] = '100%' embed_dict['height'] = '166' return embed_dict
from urllib.parse import urlparse from django.conf import settings from wagtail.wagtailembeds.finders.embedly import embedly from wagtail.wagtailembeds.finders.oembed import oembed def get_default_finder(): if hasattr(settings, 'WAGTAILEMBEDS_EMBEDLY_KEY'): return embedly return oembed def finder(url, max_width=None): domain = urlparse(url).netloc # work around Embedly missing embedding HTML for Twitter and Instagram URLs if domain.endswith(( 'instagram.com', 'twitter.com', )): return oembed(url, max_width) embed_dict = get_default_finder()(url, max_width) if domain.endswith('soundcloud.com'): embed_dict['html'] = ( embed_dict['html'] .replace('visual%3Dtrue', 'visual%3Dfalse') .replace('width="%s"' % embed_dict['width'], 'width="100%"') .replace('height="%s"' % embed_dict['height'], 'height="166"') ) embed_dict['width'] = None embed_dict['height'] = 166 return embed_dict
Fix SoundCloud embed width/height replacement.
Fix SoundCloud embed width/height replacement. SoundCloud embeds aren't always 500x500. Also, don't set the "width" embed dict key to '100%': "width"/"height" keys expect integers only.
Python
mpl-2.0
ghostwords/localore,ghostwords/localore,ghostwords/localore
8184354179bf6cf88304ebd743b2236258e46522
unicornclient/routine.py
unicornclient/routine.py
import threading import queue class Routine(threading.Thread): def __init__(self): threading.Thread.__init__(self) self.queue = queue.Queue() self.manager = None self.no_wait = False self.is_stopping = False self.sleeper = threading.Event() def run(self): while True: got_task = False data = None if self.no_wait: try: data = self.queue.get_nowait() got_task = True except queue.Empty: data = None got_task = False else: data = self.queue.get() got_task = True if data: index = 'routine_command' routine_command = data[index] if index in data else None if routine_command == 'stop': self.is_stopping = True self.process(data) if got_task: self.queue.task_done() if self.is_stopping: break def process(self, data): pass def sleep(self, seconds): while not self.sleeper.is_set(): self.sleeper.wait(timeout=seconds) self.sleeper.clear() def stop_signal(self): while not self.queue.empty(): try: self.queue.get_nowait() except queue.Empty: continue self.queue.task_done() self.queue.put({'routine_command': 'stop'}) self.sleeper.set()
import threading import queue class Routine(threading.Thread): def __init__(self): threading.Thread.__init__(self) self.queue = queue.Queue() self.manager = None self.no_wait = False self.is_stopping = False self.sleeper = threading.Event() def run(self): while True: got_task = False data = None if self.no_wait: try: data = self.queue.get_nowait() got_task = True except queue.Empty: data = None got_task = False else: data = self.queue.get() got_task = True if data: index = 'routine_command' routine_command = data[index] if index in data else None if routine_command == 'stop': self.is_stopping = True self.process(data) if got_task: self.queue.task_done() if self.is_stopping: break def process(self, data): pass def sleep(self, seconds): if self.is_stopping: return while not self.sleeper.is_set(): self.sleeper.wait(timeout=seconds) self.sleeper.clear() def stop_signal(self): while not self.queue.empty(): try: self.queue.get_nowait() except queue.Empty: continue self.queue.task_done() self.queue.put({'routine_command': 'stop'}) self.sleeper.set()
Disable sleep function when stopping
Disable sleep function when stopping
Python
mit
amm0nite/unicornclient,amm0nite/unicornclient
42db9ceae490152040651a23d397e7ad4c950712
flask/flask/tests/test_template.py
flask/flask/tests/test_template.py
from flask import Flask, render_template_string import jinja2 def test_undefined_variable__no_error(): app = Flask(__name__) assert issubclass(app.jinja_env.undefined, jinja2.Undefined) @app.route('/') def endpoint(): return render_template_string('foo = [{{bar}}]', foo='blabla') resp = app.test_client().get('/') # http://jinja.pocoo.org/docs/2.10/templates/#variables # If a variable or attribute does not exist, you will get back an undefined # value. What you can do with that kind of value depends on the application # configuration: the default behavior is to evaluate to an empty string if # printed or iterated over, and to fail for every other operation. assert resp.data == 'foo = []' def test_undefined_variable__strict__raise_error(capsys): app = Flask(__name__) # http://jinja.pocoo.org/docs/2.10/api/#undefined-types # The closest to regular Python behavior is the StrictUndefined which # disallows all operations beside testing if it’s an undefined object. app.jinja_env.undefined = jinja2.StrictUndefined @app.route('/') def endpoint(): return render_template_string('foo = [{{bar}}]', foo='blabla') resp = app.test_client().get('/') assert resp.status_code == 500 assert "UndefinedError: 'bar' is undefined" in capsys.readouterr()[1]
# -*- coding: utf-8 -*- from flask import Flask, render_template_string import jinja2 def test_undefined_variable__no_error(): app = Flask(__name__) assert issubclass(app.jinja_env.undefined, jinja2.Undefined) @app.route('/') def endpoint(): return render_template_string('foo = [{{bar}}]', foo='blabla') resp = app.test_client().get('/') # http://jinja.pocoo.org/docs/2.10/templates/#variables # If a variable or attribute does not exist, you will get back an undefined # value. What you can do with that kind of value depends on the application # configuration: the default behavior is to evaluate to an empty string if # printed or iterated over, and to fail for every other operation. assert resp.data == 'foo = []' def test_undefined_variable__strict__raise_error(capsys): app = Flask(__name__) # http://jinja.pocoo.org/docs/2.10/api/#undefined-types # The closest to regular Python behavior is the StrictUndefined which # disallows all operations beside testing if it’s an undefined object. app.jinja_env.undefined = jinja2.StrictUndefined @app.route('/') def endpoint(): return render_template_string('foo = [{{bar}}]', foo='blabla') resp = app.test_client().get('/') assert resp.status_code == 500 assert "UndefinedError: 'bar' is undefined" in capsys.readouterr()[1]
Fix source code encoding error
[flask] Fix source code encoding error
Python
mit
imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning
49c60d069da48cd83939a4e42e933e9a28e21dd2
tests/cupy_tests/cuda_tests/test_nccl.py
tests/cupy_tests/cuda_tests/test_nccl.py
import unittest from cupy import cuda from cupy.testing import attr @unittest.skipUnless(cuda.nccl_enabled, 'nccl is not installed') class TestNCCL(unittest.TestCase): @attr.gpu def test_single_proc_ring(self): id = cuda.nccl.get_unique_id() comm = cuda.nccl.NcclCommunicator(1, id, 0) assert 0 == comm.rank_id() comm.destroy() @attr.gpu @unittest.skipUnless(cuda.nccl.get_version() >= 2400, "Using old NCCL") def test_abort(self): id = cuda.nccl.get_unique_id() comm = cuda.nccl.NcclCommunicator(1, id, 0) comm.abort() @attr.gpu @unittest.skipUnless(cuda.nccl.get_version() >= 2400, "Using old NCCL") def test_check_async_error(self): id = cuda.nccl.get_unique_id() comm = cuda.nccl.NcclCommunicator(1, id, 0) comm.check_async_error() comm.destroy()
import unittest from cupy import cuda from cupy.testing import attr @unittest.skipUnless(cuda.nccl_enabled, 'nccl is not installed') class TestNCCL(unittest.TestCase): @attr.gpu def test_single_proc_ring(self): id = cuda.nccl.get_unique_id() comm = cuda.nccl.NcclCommunicator(1, id, 0) assert 0 == comm.rank_id() comm.destroy() @attr.gpu @unittest.skipUnless(cuda.nccl_enabled and cuda.nccl.get_version() >= 2400, "Using old NCCL") def test_abort(self): id = cuda.nccl.get_unique_id() comm = cuda.nccl.NcclCommunicator(1, id, 0) comm.abort() @attr.gpu @unittest.skipUnless(cuda.nccl_enabled and cuda.nccl.get_version() >= 2400, "Using old NCCL") def test_check_async_error(self): id = cuda.nccl.get_unique_id() comm = cuda.nccl.NcclCommunicator(1, id, 0) comm.check_async_error() comm.destroy()
Check NCCL existence in test decorators
Check NCCL existence in test decorators
Python
mit
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
5ca84f89d08ab4b31c47753ce74129ce06f8ed3a
apps/bluebottle_utils/models.py
apps/bluebottle_utils/models.py
from django.db import models from django_countries import CountryField class Address(models.Model): """ A postal address. """ address_line1 = models.CharField(max_length=100, blank=True) address_line2 = models.CharField(max_length=100, blank=True) city = models.CharField(max_length=100, blank=True) state = models.CharField(max_length=100, blank=True) country = CountryField() zip_code = models.CharField(max_length=20, blank=True) class Meta: abstract = True
from django.db import models from django_countries import CountryField class Address(models.Model): """ A postal address. """ address_line1 = models.CharField(max_length=100, blank=True) address_line2 = models.CharField(max_length=100, blank=True) city = models.CharField(max_length=100, blank=True) state = models.CharField(max_length=100, blank=True) country = CountryField() zip_code = models.CharField(max_length=20, blank=True) def __unicode__(self): return self.address_line1[:80] class Meta: abstract = True
Add a __unicode__ method to the Address model in utils.
Add a __unicode__ method to the Address model in utils.
Python
bsd-3-clause
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
a9cebe11642b41a8c0b277e09bf273b52dbb63f9
apps/careeropportunity/views.py
apps/careeropportunity/views.py
# -*- coding: utf-8 -*- from django.shortcuts import render from django.utils import timezone # API v1 from rest_framework import mixins, viewsets from rest_framework.permissions import AllowAny from apps.careeropportunity.models import CareerOpportunity from apps.careeropportunity.serializers import CareerSerializer def index(request, id=None): return render(request, 'careeropportunity/index.html') class CareerViewSet(viewsets.GenericViewSet, mixins.RetrieveModelMixin, mixins.ListModelMixin): """ Viewset for Career serializer """ queryset = CareerOpportunity.objects.filter( start__lte=timezone.now(), end__gte=timezone.now() ).order_by('-featured', '-start') serializer_class = CareerSerializer permission_classes = (AllowAny,)
# -*- coding: utf-8 -*- from django.shortcuts import render from django.utils import timezone # API v1 from rest_framework import mixins, viewsets from rest_framework.permissions import AllowAny from rest_framework.pagination import PageNumberPagination from apps.careeropportunity.models import CareerOpportunity from apps.careeropportunity.serializers import CareerSerializer def index(request, id=None): return render(request, 'careeropportunity/index.html') class HundredItemsPaginator(PageNumberPagination): page_size = 100 class CareerViewSet(viewsets.GenericViewSet, mixins.RetrieveModelMixin, mixins.ListModelMixin): """ Viewset for Career serializer """ queryset = CareerOpportunity.objects.filter( start__lte=timezone.now(), end__gte=timezone.now() ).order_by('-featured', '-start') serializer_class = CareerSerializer permission_classes = (AllowAny,) pagination_class = HundredItemsPaginator
Increase pagination size for careeropportunity api
Increase pagination size for careeropportunity api
Python
mit
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
453b6a8697b066174802257156ac364aed2c650a
emission/storage/timeseries/aggregate_timeseries.py
emission/storage/timeseries/aggregate_timeseries.py
import logging import pandas as pd import pymongo import emission.core.get_database as edb import emission.storage.timeseries.builtin_timeseries as bits class AggregateTimeSeries(bits.BuiltinTimeSeries): def __init__(self): super(AggregateTimeSeries, self).__init__(None) self.user_query = {}
import logging import pandas as pd import pymongo import emission.core.get_database as edb import emission.storage.timeseries.builtin_timeseries as bits class AggregateTimeSeries(bits.BuiltinTimeSeries): def __init__(self): super(AggregateTimeSeries, self).__init__(None) self.user_query = {} def _get_sort_key(self, time_query = None): return None
Implement a sort key method for the aggregate timeseries
Implement a sort key method for the aggregate timeseries This should return null because we want to mix up the identifying information from the timeseries and sorting will re-impose some order. Also sorting takes too much time!
Python
bsd-3-clause
shankari/e-mission-server,yw374cornell/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,sunil07t/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server
6577b521ac8fd0f1c9007f819dc0c7ee27ef4955
numba/typesystem/tests/test_type_properties.py
numba/typesystem/tests/test_type_properties.py
from numba.typesystem import * assert int_.is_int assert int_.is_numeric assert long_.is_int assert long_.is_numeric assert not long_.is_long assert float_.is_float assert float_.is_numeric assert double.is_float assert double.is_numeric assert not double.is_double assert object_.is_object assert list_.is_list assert list_.is_object assert list_type(int_, 2).is_list assert list_type(int_, 2).is_object assert function(void, [double]).is_function
from numba.typesystem import * assert int_.is_int assert int_.is_numeric assert long_.is_int assert long_.is_numeric assert not long_.is_long assert float_.is_float assert float_.is_numeric assert double.is_float assert double.is_numeric assert not double.is_double assert object_.is_object assert list_(int_, 2).is_list assert list_(int_, 2).is_object assert function(void, [double]).is_function
Update test for rename of list type
Update test for rename of list type
Python
bsd-2-clause
gdementen/numba,GaZ3ll3/numba,stuartarchibald/numba,pitrou/numba,jriehl/numba,stefanseefeld/numba,ssarangi/numba,sklam/numba,IntelLabs/numba,gdementen/numba,jriehl/numba,stuartarchibald/numba,GaZ3ll3/numba,GaZ3ll3/numba,seibert/numba,numba/numba,pombredanne/numba,jriehl/numba,pitrou/numba,cpcloud/numba,gmarkall/numba,stefanseefeld/numba,pitrou/numba,gmarkall/numba,pitrou/numba,sklam/numba,pombredanne/numba,ssarangi/numba,jriehl/numba,gdementen/numba,pombredanne/numba,jriehl/numba,sklam/numba,cpcloud/numba,sklam/numba,numba/numba,gmarkall/numba,gdementen/numba,numba/numba,numba/numba,stonebig/numba,GaZ3ll3/numba,cpcloud/numba,IntelLabs/numba,GaZ3ll3/numba,ssarangi/numba,seibert/numba,gdementen/numba,sklam/numba,seibert/numba,pombredanne/numba,pitrou/numba,seibert/numba,stuartarchibald/numba,stonebig/numba,gmarkall/numba,seibert/numba,ssarangi/numba,stuartarchibald/numba,cpcloud/numba,cpcloud/numba,stefanseefeld/numba,stuartarchibald/numba,gmarkall/numba,IntelLabs/numba,stefanseefeld/numba,stonebig/numba,stonebig/numba,numba/numba,pombredanne/numba,stefanseefeld/numba,ssarangi/numba,stonebig/numba,IntelLabs/numba,IntelLabs/numba
c9f5bee80dfb0523050afc6cb72eea096a2e3b95
ir/util.py
ir/util.py
import os import stat import time def updateModificationTime(path): accessTime = os.stat(path)[stat.ST_ATIME] modificationTime = time.time() os.utime(path, (accessTime, modificationTime))
import os import stat import time from PyQt4.QtCore import SIGNAL from PyQt4.QtGui import QAction, QKeySequence, QMenu, QShortcut from aqt import mw def addMenu(name): if not hasattr(mw, 'customMenus'): mw.customMenus = {} if name not in mw.customMenus: menu = QMenu('&' + name, mw) mw.customMenus[name] = menu mw.form.menubar.insertMenu(mw.form.menuTools.menuAction(), mw.customMenus[name]) def addMenuItem(menuName, text, function, keys=None): action = QAction(text, mw) if keys: action.setShortcut(QKeySequence(keys)) mw.connect(action, SIGNAL('triggered()'), function) if menuName == 'File': mw.form.menuCol.addAction(action) elif menuName == 'Edit': mw.form.menuEdit.addAction(action) elif menuName == 'Tools': mw.form.menuTools.addAction(action) elif menuName == 'Help': mw.form.menuHelp.addAction(action) else: addMenu(menuName) mw.customMenus[menuName].addAction(action) def addShortcut(function, keys): shortcut = QShortcut(QKeySequence(keys), mw) mw.connect(shortcut, SIGNAL('activated()'), function) def updateModificationTime(path): accessTime = os.stat(path)[stat.ST_ATIME] modificationTime = time.time() os.utime(path, (accessTime, modificationTime))
Add helper functions for adding menu items & shortcuts
Add helper functions for adding menu items & shortcuts
Python
isc
luoliyan/incremental-reading-for-anki,luoliyan/incremental-reading-for-anki
eff7f0bf52507013859788eec29eea819af6ce63
grow/preprocessors/routes_cache.py
grow/preprocessors/routes_cache.py
from . import base class RoutesCachePreprocessor(base.BasePreprocessor): KIND = '_routes_cache' def __init__(self, pod): self.pod = pod def run(self, build=True): self.pod.routes.reset_cache(rebuild=True) def list_watched_dirs(self): return ['/content/', '/static/']
import datetime from . import base class RoutesCachePreprocessor(base.BasePreprocessor): KIND = '_routes_cache' LIMIT = datetime.timedelta(seconds=1) def __init__(self, pod): self.pod = pod self._last_run = None def run(self, build=True): # Avoid rebuilding routes cache more than once per second. now = datetime.datetime.now() limit = RoutesCachePreprocessor.LIMIT if not self._last_run or (now - self._last_run) > limit: self.pod.routes.reset_cache(rebuild=True, inject=False) self._last_run = now def list_watched_dirs(self): return ['/content/']
Implement ratelimit on routes cache.
Implement ratelimit on routes cache.
Python
mit
denmojo/pygrow,grow/pygrow,grow/grow,denmojo/pygrow,denmojo/pygrow,grow/pygrow,grow/grow,grow/grow,grow/grow,grow/pygrow,denmojo/pygrow
38d16da934503a964ae5e16aafd65c0642970472
pysocialids.py
pysocialids.py
# # define overloading of ids for each social site # to be customized for your accounts # # # flickr # def flickr_api_secret(): return "" def flickr_api_key(): return "" def flickr_user_id(): return "" # # twitter # def twitter_consumer_key(): return "" def twitter_consumer_secret(): return "" def twitter_access_token(): return "" def twitter_access_token_secret(): return "" def twitter_screenname(): return "" # # tumblr # def tumblr_consumer_key(): return "" def tumblr_secret_key(): return "" def tumblr_access_token(): return "" def tumblr_access_token_secret(): return "" def tumblr_userid(): return ""
# # define overloading of ids for each social site # to be customized for your accounts # # # flickr # def flickr_api_secret(): return "" def flickr_api_key(): return "" def flickr_user_id(): return "" # # twitter # def twitter_consumer_key(): return "" def twitter_consumer_secret(): return "" def twitter_access_token(): return "" def twitter_access_token_secret(): return "" def twitter_screenname(): return "" # # tumblr # def tumblr_consumer_key(): return "" def tumblr_secret_key(): return "" def tumblr_access_token(): return "" def tumblr_access_token_secret(): return "" def tumblr_userid(): return "" # # faa # def faa_username(): return "" def faa_password(): return "" def faa_profile(): return "" # # wordpress # def wordpress_blogid(): return ""
Complete social ids for wordpress and faa
Complete social ids for wordpress and faa
Python
mit
JulienLeonard/socialstats
b555137fa7c7e84353daa1d12e29ba636bb9fd77
post_office/test_settings.py
post_office/test_settings.py
# -*- coding: utf-8 -*- INSTALLED_APPS = ['post_office'] DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache', 'LOCATION': '127.0.0.1:11211', 'TIMEOUT': 36000, 'KEY_PREFIX': 'stamps:', }, 'post_office': { 'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache', 'LOCATION': '127.0.0.1:11211', } }
# -*- coding: utf-8 -*- INSTALLED_APPS = ['post_office'] DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'TIMEOUT': 36000, 'KEY_PREFIX': 'post-office', }, 'post_office': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'TIMEOUT': 36000, 'KEY_PREFIX': 'post-office', } }
Use locmem cache for tests.
Use locmem cache for tests.
Python
mit
JostCrow/django-post_office,ekohl/django-post_office,CasherWest/django-post_office,ui/django-post_office,carrerasrodrigo/django-post_office,CasherWest/django-post_office,fapelhanz/django-post_office,jrief/django-post_office,RafRaf/django-post_office,LeGast00n/django-post_office,yprez/django-post_office,ui/django-post_office
96856fc267ec99de6e83a997346c853dbdb1cfd5
reddit_adzerk/lib/validator.py
reddit_adzerk/lib/validator.py
import re from r2.lib.errors import errors from r2.lib.validator import ( VMultiByPath, Validator, ) from r2.models import ( NotFound, Subreddit, ) is_multi_rx = re.compile(r"\A/?(user|r)/[^\/]+/m/(?P<name>.*?)/?\Z") class VSite(Validator): def __init__(self, param, required=True, *args, **kwargs): super(VSite, self).__init__(param, *args, **kwargs) self.required = required def run(self, path): if not self.required and not path: return if is_multi_rx.match(path): return VMultiByPath(self.param, kinds=("m")).run(path) else: try: return Subreddit._by_name(path) except NotFound: self.set_error(errors.INVALID_SITE_PATH)
import re from r2.lib.errors import errors from r2.lib.validator import ( VMultiByPath, Validator, ) from r2.models import ( NotFound, Subreddit, MultiReddit, ) is_multi_rx = re.compile(r"\A/?(user|r)/[^\/]+/m/(?P<name>.*?)/?\Z") is_adhoc_multi_rx = re.compile(r"\A\/r\/((?:[0-z]+\+)+(?:[0-z])+)\Z") class VSite(Validator): def __init__(self, param, required=True, *args, **kwargs): super(VSite, self).__init__(param, *args, **kwargs) self.required = required def run(self, path): if not self.required and not path: return adhoc_multi_rx = is_adhoc_multi_rx.match(path) if is_multi_rx.match(path): return VMultiByPath(self.param, kinds=("m")).run(path) elif adhoc_multi_rx: sr_strings = adhoc_multi_rx.groups()[0].split("+") srs = Subreddit._by_name(sr_strings, stale=True).values() return MultiReddit(path, srs) else: try: return Subreddit._by_name(path) except NotFound: self.set_error(errors.INVALID_SITE_PATH)
Fix adhoc multisubreddit promo_request network request
Fix adhoc multisubreddit promo_request network request
Python
bsd-3-clause
madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk
06d271da251d3c85266629197d6b31b2ff617623
sympy/matrices/expressions/tests/test_hadamard.py
sympy/matrices/expressions/tests/test_hadamard.py
from sympy.matrices.expressions import MatrixSymbol, HadamardProduct from sympy.matrices import ShapeError from sympy import symbols from sympy.utilities.pytest import raises def test_HadamardProduct(): n, m, k = symbols('n,m,k') Z = MatrixSymbol('Z', n, n) A = MatrixSymbol('A', n, m) B = MatrixSymbol('B', n, m) C = MatrixSymbol('C', m, k) assert HadamardProduct(A, B, A).shape == A.shape raises(ShapeError, lambda: HadamardProduct(A, B.T)) raises(TypeError, lambda: A + 1) raises(TypeError, lambda: 5 + A) raises(TypeError, lambda: 5 - A) assert HadamardProduct(A, 2*B, -A)[1, 1] == -2 * A[1, 1]**2 * B[1, 1] mix = HadamardProduct(Z*A, B)*C assert mix.shape == (n, k)
from sympy.matrices.expressions import MatrixSymbol, HadamardProduct from sympy.matrices import ShapeError from sympy import symbols from sympy.utilities.pytest import raises def test_HadamardProduct(): n, m, k = symbols('n,m,k') Z = MatrixSymbol('Z', n, n) A = MatrixSymbol('A', n, m) B = MatrixSymbol('B', n, m) C = MatrixSymbol('C', m, k) assert HadamardProduct(A, B, A).shape == A.shape raises(ShapeError, lambda: HadamardProduct(A, B.T)) raises(TypeError, lambda: A + 1) raises(TypeError, lambda: 5 + A) raises(TypeError, lambda: 5 - A) assert HadamardProduct(A, 2*B, -A)[1, 1] == -2 * A[1, 1]**2 * B[1, 1] mix = HadamardProduct(Z*A, B)*C assert mix.shape == (n, k) def test_mixed_indexing(): X = MatrixSymbol('X', 2, 2) Y = MatrixSymbol('Y', 2, 2) Z = MatrixSymbol('Z', 2, 2) assert (X*HadamardProduct(Y, Z))[0, 0] == \ X[0, 0]*Y[0, 0]*Z[0, 0] + X[0, 1]*Y[1, 0]*Z[1, 0]
Add index test for Hadamard+MatMul mix
Add index test for Hadamard+MatMul mix
Python
bsd-3-clause
kaichogami/sympy,Sumith1896/sympy,sahmed95/sympy,MridulS/sympy,Gadal/sympy,yashsharan/sympy,Shaswat27/sympy,chaffra/sympy,beni55/sympy,drufat/sympy,MechCoder/sympy,souravsingh/sympy,kaushik94/sympy,abhiii5459/sympy,liangjiaxing/sympy,iamutkarshtiwari/sympy,Gadal/sympy,grevutiu-gabriel/sympy,vipulroxx/sympy,moble/sympy,kevalds51/sympy,atsao72/sympy,atreyv/sympy,lindsayad/sympy,beni55/sympy,asm666/sympy,jbbskinny/sympy,lindsayad/sympy,Designist/sympy,wanglongqi/sympy,lidavidm/sympy,madan96/sympy,meghana1995/sympy,wanglongqi/sympy,kumarkrishna/sympy,cswiercz/sympy,drufat/sympy,farhaanbukhsh/sympy,amitjamadagni/sympy,cccfran/sympy,hrashk/sympy,yashsharan/sympy,cswiercz/sympy,AkademieOlympia/sympy,kmacinnis/sympy,saurabhjn76/sympy,asm666/sympy,hargup/sympy,ahhda/sympy,mafiya69/sympy,liangjiaxing/sympy,VaibhavAgarwalVA/sympy,mafiya69/sympy,moble/sympy,madan96/sympy,emon10005/sympy,debugger22/sympy,Curious72/sympy,garvitr/sympy,ChristinaZografou/sympy,saurabhjn76/sympy,pandeyadarsh/sympy,sahilshekhawat/sympy,shipci/sympy,yukoba/sympy,jamesblunt/sympy,kaushik94/sympy,garvitr/sympy,MechCoder/sympy,mafiya69/sympy,abloomston/sympy,Vishluck/sympy,kmacinnis/sympy,emon10005/sympy,Shaswat27/sympy,madan96/sympy,aktech/sympy,maniteja123/sympy,cccfran/sympy,VaibhavAgarwalVA/sympy,meghana1995/sympy,souravsingh/sympy,cswiercz/sympy,emon10005/sympy,debugger22/sympy,Arafatk/sympy,hrashk/sympy,saurabhjn76/sympy,lindsayad/sympy,Arafatk/sympy,shikil/sympy,yukoba/sympy,farhaanbukhsh/sympy,skidzo/sympy,abloomston/sympy,postvakje/sympy,ahhda/sympy,skidzo/sympy,dqnykamp/sympy,sahmed95/sympy,jerli/sympy,ga7g08/sympy,oliverlee/sympy,sahilshekhawat/sympy,toolforger/sympy,kevalds51/sympy,mcdaniel67/sympy,wanglongqi/sympy,MridulS/sympy,souravsingh/sympy,rahuldan/sympy,Davidjohnwilson/sympy,skidzo/sympy,kaichogami/sympy,rahuldan/sympy,drufat/sympy,Vishluck/sympy,pandeyadarsh/sympy,moble/sympy,jamesblunt/sympy,AunShiLord/sympy,Davidjohnwilson/sympy,MechCoder/sympy,grevutiu-gabriel/sympy,postvakje/sympy,wyom/sympy,jamesblunt/sympy,vipulroxx/sympy,Davidjohnwilson/sympy,Shaswat27/sympy,atsao72/sympy,beni55/sympy,yashsharan/sympy,aktech/sympy,pbrady/sympy,Titan-C/sympy,maniteja123/sympy,chaffra/sympy,jaimahajan1997/sympy,garvitr/sympy,dqnykamp/sympy,amitjamadagni/sympy,lidavidm/sympy,diofant/diofant,Gadal/sympy,ga7g08/sympy,toolforger/sympy,yukoba/sympy,bukzor/sympy,kevalds51/sympy,vipulroxx/sympy,pbrady/sympy,sampadsaha5/sympy,hargup/sympy,bukzor/sympy,jbbskinny/sympy,oliverlee/sympy,abloomston/sympy,AunShiLord/sympy,AkademieOlympia/sympy,sahmed95/sympy,mcdaniel67/sympy,iamutkarshtiwari/sympy,sunny94/temp,hargup/sympy,grevutiu-gabriel/sympy,mcdaniel67/sympy,Sumith1896/sympy,chaffra/sympy,shikil/sympy,meghana1995/sympy,rahuldan/sympy,kumarkrishna/sympy,cccfran/sympy,Curious72/sympy,lidavidm/sympy,toolforger/sympy,kaushik94/sympy,atreyv/sympy,Sumith1896/sympy,atsao72/sympy,postvakje/sympy,dqnykamp/sympy,hrashk/sympy,farhaanbukhsh/sympy,AkademieOlympia/sympy,Mitchkoens/sympy,MridulS/sympy,skirpichev/omg,pandeyadarsh/sympy,Titan-C/sympy,atreyv/sympy,VaibhavAgarwalVA/sympy,Curious72/sympy,bukzor/sympy,sampadsaha5/sympy,Vishluck/sympy,shipci/sympy,kaichogami/sympy,abhiii5459/sympy,jbbskinny/sympy,debugger22/sympy,asm666/sympy,sahilshekhawat/sympy,jaimahajan1997/sympy,shipci/sympy,wyom/sympy,jerli/sympy,liangjiaxing/sympy,iamutkarshtiwari/sympy,abhiii5459/sympy,sunny94/temp,Mitchkoens/sympy,ChristinaZografou/sympy,Designist/sympy,sampadsaha5/sympy,shikil/sympy,sunny94/temp,kmacinnis/sympy,ChristinaZografou/sympy,jerli/sympy,wyom/sympy,ahhda/sympy,oliverlee/sympy,ga7g08/sympy,aktech/sympy,Mitchkoens/sympy,Titan-C/sympy,maniteja123/sympy,jaimahajan1997/sympy,pbrady/sympy,Arafatk/sympy,AunShiLord/sympy,Designist/sympy,kumarkrishna/sympy
24d2b9620af40395c66bd8d93c443fddfe74b5cf
hs_core/tests/api/rest/__init__.py
hs_core/tests/api/rest/__init__.py
from test_create_resource import * from test_resource_file import * from test_resource_list import * from test_resource_meta import * from test_resource_types import * from test_set_access_rules import * from test_user_info import *
# Do not import tests here as this will cause # some tests to be discovered and run twice
Remove REST test imports to avoid some tests being run twice
Remove REST test imports to avoid some tests being run twice
Python
bsd-3-clause
ResearchSoftwareInstitute/MyHPOM,hydroshare/hydroshare,ResearchSoftwareInstitute/MyHPOM,hydroshare/hydroshare,FescueFungiShare/hydroshare,ResearchSoftwareInstitute/MyHPOM,RENCI/xDCIShare,ResearchSoftwareInstitute/MyHPOM,hydroshare/hydroshare,FescueFungiShare/hydroshare,ResearchSoftwareInstitute/MyHPOM,RENCI/xDCIShare,hydroshare/hydroshare,RENCI/xDCIShare,RENCI/xDCIShare,FescueFungiShare/hydroshare,hydroshare/hydroshare,RENCI/xDCIShare,FescueFungiShare/hydroshare,FescueFungiShare/hydroshare
d328129a2f2909c1b8769f1edb94746c4a88dd28
test_project/test_models.py
test_project/test_models.py
from django.db import models class TestUser0(models.Model): username = models.CharField() test_field = models.CharField('My title') class Meta: app_label = 'controlcenter' def foo(self): return 'original foo value' foo.short_description = 'original foo label' def bar(self): return 'original bar value' bar.short_description = 'original bar label' def baz(self): pass baz.short_description = '' def egg(self): return 'original egg value' class TestUser1(models.Model): primary = models.AutoField(primary_key=True) username = models.CharField() class Meta: app_label = 'controlcenter'
from django.db import models class TestUser0(models.Model): username = models.CharField(max_length=255) test_field = models.CharField('My title', max_length=255) class Meta: app_label = 'controlcenter' def foo(self): return 'original foo value' foo.short_description = 'original foo label' def bar(self): return 'original bar value' bar.short_description = 'original bar label' def baz(self): pass baz.short_description = '' def egg(self): return 'original egg value' class TestUser1(models.Model): primary = models.AutoField(primary_key=True) username = models.CharField(max_length=255) class Meta: app_label = 'controlcenter'
Add `max_length` to char fields
Add `max_length` to char fields
Python
bsd-3-clause
byashimov/django-controlcenter,byashimov/django-controlcenter,byashimov/django-controlcenter
36e6e2bedcc37a48097ccf0abd544ca095748412
build/strip-po-charset.py
build/strip-po-charset.py
# # strip-po-charset.py # import sys, string def strip_po_charset(inp, out): out.write(string.replace(inp.read(), "\"Content-Type: text/plain; charset=UTF-8\\n\"\n","")) def main(): if len(sys.argv) != 3: print "Usage: %s <input (po) file> <output (spo) file>" % sys.argv[0] print print "Unsupported number of arguments; 2 required." sys.exit(1) strip_po_charset(open(sys.argv[1],'r'), open(sys.argv[2],'w')) if __name__ == '__main__': main()
# # strip-po-charset.py # import sys, string def strip_po_charset(inp, out): out.write(string.replace(inp.read(), "\"Content-Type: text/plain; charset=UTF-8\\n\"\n","")) def main(): if len(sys.argv) != 3: print "Usage: %s <input (po) file> <output (spo) file>" % sys.argv[0] print print "Unsupported number of arguments; 2 required." sys.exit(1) strip_po_charset(open(sys.argv[1],'r'), open(sys.argv[2],'w')) if __name__ == '__main__': main()
Set svn:eol-style='native' on some text files that were lacking it.
Set svn:eol-style='native' on some text files that were lacking it. git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@855475 13f79535-47bb-0310-9956-ffa450edef68
Python
apache-2.0
wbond/subversion,wbond/subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,wbond/subversion,wbond/subversion,YueLinHo/Subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion
a00f9c56671c028c69638f61d3d4c1fd022c0430
cinspect/tests/test_patching.py
cinspect/tests/test_patching.py
from __future__ import absolute_import, print_function # Standard library import inspect import unittest from cinspect import getfile, getsource class TestHelloModule(unittest.TestCase): def test_patching_inspect_should_work(self): # Given inspect.getsource = getsource inspect.getfile = getfile # When t = getfile(unittest) s = getsource(unittest.main) # Then self.assertGreater(len(t), 0) self.assertGreater(len(s), 0)
from __future__ import absolute_import, print_function # Standard library import inspect import unittest from cinspect import getfile, getsource class TestPatching(unittest.TestCase): def test_patching_inspect_should_work(self): # Given inspect.getsource = getsource inspect.getfile = getfile # When t = getfile(unittest) s = getsource(unittest.main) # Then self.assertGreater(len(t), 0) self.assertGreater(len(s), 0)
Fix copy paste bug in test class name.
Fix copy paste bug in test class name.
Python
bsd-3-clause
punchagan/cinspect,punchagan/cinspect
97e39ec9e03728384ad00a7e011194412521631e
tests/test_containers.py
tests/test_containers.py
try: from http.server import SimpleHTTPRequestHandler except ImportError: from SimpleHTTPServer import SimpleHTTPRequestHandler try: from socketserver import TCPServer except ImportError: from SocketServer import TCPServer import os import threading import unittest import containers PORT = 8080 class TestServer(TCPServer): allow_reuse_address = True handler = SimpleHTTPRequestHandler httpd = TestServer(('', PORT), handler) httpd_thread = threading.Thread(target=httpd.serve_forever) httpd_thread.setDaemon(True) httpd_thread.start() class TestDiscovery(unittest.TestCase): def test_get_etcd(self): containers.simple_discovery('localhost:8080/tests/etc/etcd-v2.0.0-linux-amd64', var='/tmp', secure=False) if __name__ == '__main__': unittest.main()
try: from http.server import SimpleHTTPRequestHandler except ImportError: from SimpleHTTPServer import SimpleHTTPRequestHandler try: from socketserver import TCPServer except ImportError: from SocketServer import TCPServer import os import threading import unittest import glob, os import containers PORT = 8080 class TestServer(TCPServer): allow_reuse_address = True handler = SimpleHTTPRequestHandler httpd = TestServer(('', PORT), handler) httpd_thread = threading.Thread(target=httpd.serve_forever) httpd_thread.setDaemon(True) httpd_thread.start() class TestDiscovery(unittest.TestCase): def tearDown(self): filelist = glob.glob('/tmp/*.aci') for f in filelist: os.remove(f) def test_get_etcd(self): containers.simple_discovery('localhost:8080/tests/etc/etcd-v2.0.0-linux-amd64', var='/tmp', secure=False) if __name__ == '__main__': unittest.main()
Remove aci files after tests have run
Remove aci files after tests have run
Python
mit
kragniz/containers
daf4a6fd35811210c546782a771c6ddef8641f25
opps/images/templatetags/images_tags.py
opps/images/templatetags/images_tags.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from django import template from django.conf import settings from ..generate import image_url as url register = template.Library() @register.simple_tag def image_url(image_url, **kwargs): return url(image_url=image_url, **kwargs) @register.simple_tag def image_obj(image, **kwargs): HALIGN_VALUES = ("left", "center", "right") VALIGN_VALUES = ("top", "middle", "bottom") if image == "": return "" if settings.THUMBOR_ENABLED: new = {} new['flip'] = image.flip new['flop'] = image.flop if image.halign and image.halign in HALIGN_VALUES: new['halign'] = image.halign if image.valign and image.valign in VALIGN_VALUES: new['valign'] = image.valign new['fit_in'] = image.fit_in new['smart'] = image.smart if image.crop_x1 > 0 or image.crop_x2 > 0 or image.crop_y1 > 0 or \ image.crop_y2 > 0: new['crop'] = ((image.crop_x1, image.crop_y1), (image.crop_x2, image.crop_y2)) kwargs = dict(new, **kwargs) return url(image_url=image.archive.url, **kwargs)
#!/usr/bin/env python # -*- coding: utf-8 -*- from django import template from django.conf import settings from ..generate import image_url as url register = template.Library() @register.simple_tag def image_url(image_url, **kwargs): return url(image_url=image_url, **kwargs) @register.simple_tag def image_obj(image, **kwargs): HALIGN_VALUES = ("left", "center", "right") VALIGN_VALUES = ("top", "middle", "bottom") if image == "" or not image: return "" if settings.THUMBOR_ENABLED: new = {} new['flip'] = image.flip new['flop'] = image.flop if image.halign and image.halign in HALIGN_VALUES: new['halign'] = image.halign if image.valign and image.valign in VALIGN_VALUES: new['valign'] = image.valign new['fit_in'] = image.fit_in new['smart'] = image.smart if image.crop_x1 > 0 or image.crop_x2 > 0 or image.crop_y1 > 0 or \ image.crop_y2 > 0: new['crop'] = ((image.crop_x1, image.crop_y1), (image.crop_x2, image.crop_y2)) kwargs = dict(new, **kwargs) return url(image_url=image.archive.url, **kwargs)
Fix image_obj template tag when sending Nonetype image
Fix image_obj template tag when sending Nonetype image
Python
mit
YACOWS/opps,YACOWS/opps,jeanmask/opps,opps/opps,williamroot/opps,jeanmask/opps,opps/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,opps/opps,williamroot/opps,williamroot/opps,opps/opps,jeanmask/opps
53d25950eb1ff21bb4488b60e802cb243735681f
cmsplugin_zinnia/placeholder.py
cmsplugin_zinnia/placeholder.py
"""Placeholder model for Zinnia""" import inspect from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models.entry import EntryAbstractClass class EntryPlaceholder(EntryAbstractClass): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if 'context' in args: return alocals['context'] finally: del frame @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(EntryAbstractClass.Meta): """EntryPlaceholder's Meta""" abstract = True
"""Placeholder model for Zinnia""" import inspect from django.template.context import Context, RequestContext from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models.entry import EntryAbstractClass class EntryPlaceholder(EntryAbstractClass): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None request = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if not request and 'request' in args: request = alocals['request'] if 'context' in args: return alocals['context'] finally: del frame if request is not None: return RequestContext(request) else: return Context() @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(EntryAbstractClass.Meta): """EntryPlaceholder's Meta""" abstract = True
Make acquire_context always return some Context
Make acquire_context always return some Context
Python
bsd-3-clause
django-blog-zinnia/cmsplugin-zinnia,bittner/cmsplugin-zinnia,bittner/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia,bittner/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia
caf245e14421472adb0668e57adf5a3e3ae68424
scuba/utils.py
scuba/utils.py
try: from shlex import quote as shell_quote except ImportError: from pipes import quote as shell_quote def format_cmdline(args, maxwidth=80): def lines(): line = '' for a in (shell_quote(a) for a in args): if len(line) + len(a) > maxwidth: yield line line = '' line += ' ' + a return ' \\\n'.join(lines())[1:]
try: from shlex import quote as shell_quote except ImportError: from pipes import quote as shell_quote def format_cmdline(args, maxwidth=80): '''Format args into a shell-quoted command line. The result will be wrapped to maxwidth characters where possible, not breaking a single long argument. ''' # Leave room for the space and backslash at the end of each line maxwidth -= 2 def lines(): line = '' for a in (shell_quote(a) for a in args): # If adding this argument will make the line too long, # yield the current line, and start a new one. if len(line) + len(a) + 1 > maxwidth: yield line line = '' # Append this argument to the current line, separating # it by a space from the existing arguments. if line: line += ' ' + a else: line = a yield line return ' \\\n'.join(lines())
Fix missing final line from format_cmdline()
Fix missing final line from format_cmdline() The previous code was missing 'yield line' after the for loop. This commit fixes that, as well as the extra space at the beginning of each line. Normally, we'd use str.join() to avoid such a problem, but this code is accumulating the line manually, so we can't just join the args together. This fixes #41.
Python
mit
JonathonReinhart/scuba,JonathonReinhart/scuba,JonathonReinhart/scuba
2c5c04fd0bb1dc4f5bf54af2e2739fb6a0f1d2c4
survey/urls.py
survey/urls.py
from django.conf.urls import patterns, include, url from .views import IndexView, SurveyDetail, ConfirmView, SurveyCompleted urlpatterns = patterns('', # Examples: url(r'^survey/$', IndexView.as_view(), name='survey-list'), url(r'^survey/(?P<id>[a-zA-Z0-9-]+)/', SurveyDetail.as_view(), name='survey-detail'), url(r'^survey/(?P<id>[a-zA-Z0-9-]+)/completed/', SurveyCompleted.as_view(), name='survey-completed'), url(r'^survey/(?P<id>[a-zA-Z0-9-]+)-(?P<step>\d+)/', SurveyDetail.as_view(), name='survey-detail-step'), url(r'^confirm/(?P<uuid>\w+)/', ConfirmView.as_view(), name='survey-confirmation'), )
from django.conf.urls import patterns, include, url from .views import IndexView, SurveyDetail, ConfirmView, SurveyCompleted urlpatterns = patterns('', url(r'^survey/$', IndexView.as_view(), name='survey-list'), url(r'^survey/(?P<id>\d+)/', SurveyDetail.as_view(), name='survey-detail'), url(r'^survey/(?P<id>\d+)/completed/', SurveyCompleted.as_view(), name='survey-completed'), url(r'^survey/(?P<id>\d+)-(?P<step>\d+)/', SurveyDetail.as_view(), name='survey-detail-step'), url(r'^confirm/(?P<uuid>\w+)/', ConfirmView.as_view(), name='survey-confirmation'), )
Fix - No more crash when entering an url with letter
Fix - No more crash when entering an url with letter
Python
agpl-3.0
Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey
0381fe32664e246011d5917a81c81fce936ae364
tests/tangelo-verbose.py
tests/tangelo-verbose.py
import fixture def test_standard_verbosity(): stderr = fixture.start_tangelo(stderr=True) stderr = '\n'.join(stderr) assert 'TANGELO Server is running' in stderr assert 'TANGELO Hostname' in stderr fixture.stop_tangelo() def test_lower_verbosity(): stderr = fixture.start_tangelo("-q", stderr=True) stderr = '\n'.join(stderr) assert 'TANGELO Server is running' in stderr assert 'TANGELO Hostname' not in stderr fixture.stop_tangelo()
import fixture def test_standard_verbosity(): stderr = fixture.start_tangelo(stderr=True) stderr = '\n'.join(stderr) fixture.stop_tangelo() assert 'TANGELO Server is running' in stderr assert 'TANGELO Hostname' in stderr def test_lower_verbosity(): stderr = fixture.start_tangelo("-q", stderr=True) stderr = '\n'.join(stderr) fixture.stop_tangelo() assert 'TANGELO Server is running' in stderr assert 'TANGELO Hostname' not in stderr
Reorder when the tangelo instance gets shut down in a test so that if an assert fails, other tests will still be able to run.
Reorder when the tangelo instance gets shut down in a test so that if an assert fails, other tests will still be able to run.
Python
apache-2.0
Kitware/tangelo,Kitware/tangelo,Kitware/tangelo
e2954d74b77046d3dee8134128f122a09dff3c7d
clowder_server/emailer.py
clowder_server/emailer.py
from django.core.mail import send_mail from clowder_account.models import ClowderUser ADMIN_EMAIL = 'admin@clowder.io' def send_alert(company, name): for user in ClowderUser.objects.filter(company=company): subject = 'FAILURE: %s' % (name) body = subject send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
import os import requests from django.core.mail import send_mail from clowder_account.models import ClowderUser ADMIN_EMAIL = 'admin@clowder.io' def send_alert(company, name): for user in ClowderUser.objects.filter(company=company): subject = 'FAILURE: %s' % (name) body = subject slack_token = os.getenv('PARKME_SLACK_TOKEN') url = 'https://hooks.slack.com/services/%s' % (slack_token) payload = {"username": "devopsbot", "text": body, "icon_emoji": ":robot_face:"} requests.post(url, json=payload) send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
Add support for slack messaging
Add support for slack messaging
Python
agpl-3.0
keithhackbarth/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server
247c1fc0af2556a5bd421488430d97f45c533771
kaggle/titanic/categorical_and_scaler_prediction.py
kaggle/titanic/categorical_and_scaler_prediction.py
import pandas def main(): train_all = pandas.DataFrame.from_csv('train.csv') train = train_all[['Survived', 'Sex', 'Fare']] print(train) if __name__ == '__main__': main()
import pandas from sklearn.naive_bayes import MultinomialNB from sklearn.cross_validation import train_test_split from sklearn.preprocessing import LabelEncoder def main(): train_all = pandas.DataFrame.from_csv('train.csv') train = train_all[['Survived', 'Sex', 'Fare']][:20] gender_label = LabelEncoder() train.Sex = gender_label.fit_transform(train.Sex) X = train[['Sex', 'Fare']] y = train['Survived'] X_train, X_test, y_train, y_test = train_test_split( X, y, test_size=0.33, random_state=42) clf = MultinomialNB() clf.fit(X_train, y_train) print(clf.predict(X_test)) if __name__ == '__main__': main()
Make predictions with gender and ticket price
Make predictions with gender and ticket price
Python
mit
noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit
70259a9f9ce5647f9c36b70c2eb20b51ba447eda
middleware.py
middleware.py
#!/usr/bin/env python3 class Routes: '''Define the feature of route for URIs.''' def __init__(self): self._Routes = [] def AddRoute(self, uri, callback): '''Add an URI into the route table.''' self._Routes.append([uri, callback]) def Dispatch(self, req, res): '''Dispatch an URI according to the route table.''' uri = "" for fv in req.Header: if fv[0] == "URI": uri = fv[1] found = 1 break found = 0 for r in self._Routes: if r[0] == uri: r[1](req, res) found = 1 break if found != 1: self._NotFound(req, res) def _NotFound(self, req, res): '''Define the default error page for not found URI.''' res.Header.append(["Status", "404 Not Found"])
#!/usr/bin/env python3 class Routes: '''Define the feature of route for URIs.''' def __init__(self): self._Routes = [] def AddRoute(self, uri, callback): '''Add an URI into the route table.''' self._Routes.append([uri, callback]) def Dispatch(self, req, res): '''Dispatch an URI according to the route table.''' uri = "" for fv in req.Header: if fv[0] == "URI": uri = fv[1] found = 1 break found = 0 # Check the route for r in self._Routes: if r[0] == uri: r[1](req, res) found = 1 break # Check static files if found != 1: found = self._ReadStaticFiles(uri, res) # It is really not found if found != 1: self._NotFound(req, res) def _ReadStaticFiles(self, uri, res): found = 0 try: f = open("static/{}".format(uri), "r") res.Body = f.read() f.close() found = 1 except: pass return found def _NotFound(self, req, res): '''Define the default error page for not found URI.''' res.Header.append(["Status", "404 Not Found"])
Add read static files feature.
Add read static files feature.
Python
bsd-3-clause
starnight/MicroHttpServer,starnight/MicroHttpServer,starnight/MicroHttpServer,starnight/MicroHttpServer
2ef97501b15a9369d21953312115ea36355f251c
minimax.py
minimax.py
class Heuristic: def heuristic(self, board, color): raise NotImplementedError('Dont override this class')
class Heuristic: def heuristic(self, board, color): raise NotImplementedError('Dont override this class') class Minimax: def __init__(self, color_me, h_me, h_challenger): self.h_me = h_me self.h_challenger = h_challenger self.color_me = color_me def heuristic(self, board, color): if color == self.color_me: return self.h_me.heuristic(board, color) else return self.h_challenger.heuristic(board, color)
Create the minimal class MiniMax
Create the minimal class MiniMax
Python
apache-2.0
frila/agente-minimax
8a573dae750b1b9415df0c9e2c019750171e66f0
migrations.py
migrations.py
import os import json from dateutil.parser import parse from scrapi.util import safe_filename def migrate_from_old_scrapi(): for dirname, dirs, filenames in os.walk('archive'): for filename in filenames: oldpath = os.path.join(dirname, filename) source, sid, dt = dirname.split('/')[1:] dt = parse(dt).isoformat() sid = safe_filename(sid) newpath = os.path.join('archive', source, sid, dt, filename) if filename == 'manifest.json': with open(oldpath) as old: old_json = json.load(old) new_json = { 'consumerVersion': old_json['version'], 'normalizeVersion': old_json['version'], 'timestamp': dt, 'source': source, 'id': sid } old_json = json.dumps(old_json, indent=4, sort_keys=True) new_json = json.dumps(new_json, indent=4, sort_keys=True) print '{} -> {}'.format(oldpath, newpath) print old_json print new_json
import os import json from dateutil.parser import parse from scrapi.util import safe_filename def migrate_from_old_scrapi(): for dirname, dirs, filenames in os.walk('archive'): for filename in filenames: oldpath = os.path.join(dirname, filename) source, sid, dt = dirname.split('/')[1:] dt = parse(dt).isoformat() sid = safe_filename(sid) newpath = os.path.join('archive', source, sid, dt, filename) if filename == 'manifest.json': with open(oldpath) as old: old_json = json.load(old) new_json = { 'consumerVersion': old_json['version'], 'normalizeVersion': old_json['version'], 'timestamp': dt, 'source': source, 'id': sid } old_json = json.dumps(old_json, indent=4, sort_keys=True) new_json = json.dumps(new_json, indent=4, sort_keys=True) print old_json print new_json print '{} -> {}'.format(oldpath, newpath)
Move json print methods into if statement
Move json print methods into if statement
Python
apache-2.0
erinspace/scrapi,CenterForOpenScience/scrapi,icereval/scrapi,fabianvf/scrapi,fabianvf/scrapi,ostwald/scrapi,mehanig/scrapi,alexgarciac/scrapi,jeffreyliu3230/scrapi,felliott/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,erinspace/scrapi
c668aaa0f22f5a61094c2028291b65c781733a54
mojapi/api.py
mojapi/api.py
import json import requests import time def get_statuses(): return requests.get('https://status.mojang.com/check/').json() def get_uuid(username, unix_timestamp=None): if unix_timestamp is None: unix_timestamp = int(time.time()) return requests.get( 'https://api.mojang.com/users/profiles/minecraft/{}?at={}'.format(username, unix_timestamp) ).json() def get_usernames(uuid): return requests.get('https://api.mojang.com/user/profiles/{}/names'.format(uuid)).json() def get_profiles(*usernames): return requests.post( url='https://api.mojang.com/profiles/minecraft', headers={ b'Content-Type': b'application/json' }, data=json.dumps(list(usernames)) ).json()
import json import requests import time def get_statuses(): return requests.get('https://status.mojang.com/check/').json() def get_uuid(username, unix_timestamp=None): if unix_timestamp is None: unix_timestamp = int(time.time()) return requests.get( 'https://api.mojang.com/users/profiles/minecraft/{}?at={}'.format(username, unix_timestamp) ).json() def get_usernames(uuid): return requests.get('https://api.mojang.com/user/profiles/{}/names'.format(uuid)).json() def get_profiles(*usernames): return requests.post( url='https://api.mojang.com/profiles/minecraft', headers={ b'Content-Type': b'application/json' }, data=json.dumps(list(usernames)) ).json() def get_blocked_server_hashes(): response = requests.get('https://sessionserver.mojang.com/blockedservers') response.raise_for_status() sha1_hashes = response.content.split(b'\n') return sha1_hashes
Add get blocked server hashes call
Add get blocked server hashes call
Python
mit
zugmc/mojapi
6845c56edc315f5ce07f0bf1101d59ee04036024
pydir/daemon-rxcmd.py
pydir/daemon-rxcmd.py
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2016 F Dou<programmingrobotsstudygroup@gmail.com> # See LICENSE for details. import bluetooth import os import logging import time from daemon import runner class RxCmdDaemon(): def __init__(self): self.stdin_path = '/dev/null' self.stdout_path = '/dev/tty' self.stderr_path = '/dev/tty' self.pidfile_path = '/tmp/RxCmdDaemon.pid' self.pidfile_timeout = 5 def run(self): while True: server_sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM ) port = 1 server_sock.bind(("",port)) server_sock.listen(1) client_sock,address = server_sock.accept() print "Accepted connection from ",address try: while True: data = client_sock.recv(1024) print "received [%s]" % data os.system(data) except Exception as e: logging.exception(e) rxCmdDaemon = RxCmdDaemon() daemon_runner = runner.DaemonRunner(rxCmdDaemon) daemon_runner.do_action()
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2016 F Dou<programmingrobotsstudygroup@gmail.com> # See LICENSE for details. import bluetooth import os import logging import time from daemon import runner class RxCmdDaemon(): def __init__(self): self.stdin_path = '/dev/null' self.stdout_path = '/dev/tty' self.stderr_path = '/dev/tty' self.pidfile_path = '/tmp/RxCmdDaemon.pid' self.pidfile_timeout = 5 def run(self): while True: server_sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM ) port = 1 server_sock.bind(("",port)) server_sock.listen(1) client_sock,address = server_sock.accept() print "Accepted connection from ",address try: while True: data = client_sock.recv(1024) print "received [%s]" % data os.system(data) except Exception as e: logging.exception(e) while True: try: rxCmdDaemon = RxCmdDaemon() daemon_runner = runner.DaemonRunner(rxCmdDaemon) daemon_runner.do_action() except Exception as e: logging.exception(e)
Add try/catch to improve error handling
Add try/catch to improve error handling
Python
apache-2.0
javatechs/RxCmd,javatechs/RxCmd,javatechs/RxCmd
96df077d5485979af256fe7b95708ace658fb8e2
test/mitmproxy/test_examples.py
test/mitmproxy/test_examples.py
import glob from mitmproxy import utils, script from mitmproxy.proxy import config from netlib import tutils as netutils from netlib.http import Headers from . import tservers, tutils from examples import ( modify_form, ) def test_load_scripts(): example_dir = utils.Data(__name__).path("../../examples") scripts = glob.glob("%s/*.py" % example_dir) tmaster = tservers.TestMaster(config.ProxyConfig()) for f in scripts: if "har_extractor" in f: continue if "flowwriter" in f: f += " -" if "iframe_injector" in f: f += " foo" # one argument required if "filt" in f: f += " ~a" if "modify_response_body" in f: f += " foo bar" # two arguments required try: s = script.Script(f, script.ScriptContext(tmaster)) # Loads the script file. except Exception as v: if "ImportError" not in str(v): raise else: s.unload() def test_modify_form(): form_header = Headers(content_type="application/x-www-form-urlencoded") flow = tutils.tflow(req=netutils.treq(headers=form_header)) modify_form.request({}, flow) assert flow.request.urlencoded_form["mitmproxy"] == ["rocks"]
import glob from mitmproxy import utils, script from mitmproxy.proxy import config from netlib import tutils as netutils from netlib.http import Headers from . import tservers, tutils from examples import ( add_header, modify_form, ) def test_load_scripts(): example_dir = utils.Data(__name__).path("../../examples") scripts = glob.glob("%s/*.py" % example_dir) tmaster = tservers.TestMaster(config.ProxyConfig()) for f in scripts: if "har_extractor" in f: continue if "flowwriter" in f: f += " -" if "iframe_injector" in f: f += " foo" # one argument required if "filt" in f: f += " ~a" if "modify_response_body" in f: f += " foo bar" # two arguments required try: s = script.Script(f, script.ScriptContext(tmaster)) # Loads the script file. except Exception as v: if "ImportError" not in str(v): raise else: s.unload() def test_add_header(): flow = tutils.tflow(resp=netutils.tresp()) add_header.response({}, flow) assert flow.response.headers["newheader"] == "foo" def test_modify_form(): form_header = Headers(content_type="application/x-www-form-urlencoded") flow = tutils.tflow(req=netutils.treq(headers=form_header)) modify_form.request({}, flow) assert flow.request.urlencoded_form["mitmproxy"] == ["rocks"]
Add tests for add_header example
Add tests for add_header example
Python
mit
mitmproxy/mitmproxy,jvillacorta/mitmproxy,tdickers/mitmproxy,dufferzafar/mitmproxy,mosajjal/mitmproxy,cortesi/mitmproxy,tdickers/mitmproxy,dwfreed/mitmproxy,laurmurclar/mitmproxy,gzzhanghao/mitmproxy,ddworken/mitmproxy,mosajjal/mitmproxy,mhils/mitmproxy,mhils/mitmproxy,fimad/mitmproxy,mitmproxy/mitmproxy,ujjwal96/mitmproxy,dwfreed/mitmproxy,zlorb/mitmproxy,dufferzafar/mitmproxy,dufferzafar/mitmproxy,laurmurclar/mitmproxy,zlorb/mitmproxy,jvillacorta/mitmproxy,vhaupert/mitmproxy,Kriechi/mitmproxy,ujjwal96/mitmproxy,fimad/mitmproxy,dufferzafar/mitmproxy,mitmproxy/mitmproxy,jvillacorta/mitmproxy,mosajjal/mitmproxy,ddworken/mitmproxy,gzzhanghao/mitmproxy,vhaupert/mitmproxy,tdickers/mitmproxy,zlorb/mitmproxy,xaxa89/mitmproxy,mitmproxy/mitmproxy,dwfreed/mitmproxy,mhils/mitmproxy,fimad/mitmproxy,xaxa89/mitmproxy,ujjwal96/mitmproxy,zlorb/mitmproxy,ddworken/mitmproxy,fimad/mitmproxy,laurmurclar/mitmproxy,MatthewShao/mitmproxy,gzzhanghao/mitmproxy,laurmurclar/mitmproxy,Kriechi/mitmproxy,ujjwal96/mitmproxy,StevenVanAcker/mitmproxy,mhils/mitmproxy,ddworken/mitmproxy,Kriechi/mitmproxy,MatthewShao/mitmproxy,StevenVanAcker/mitmproxy,mosajjal/mitmproxy,dwfreed/mitmproxy,cortesi/mitmproxy,cortesi/mitmproxy,mitmproxy/mitmproxy,gzzhanghao/mitmproxy,xaxa89/mitmproxy,cortesi/mitmproxy,StevenVanAcker/mitmproxy,mhils/mitmproxy,Kriechi/mitmproxy,xaxa89/mitmproxy,tdickers/mitmproxy,jvillacorta/mitmproxy,MatthewShao/mitmproxy,MatthewShao/mitmproxy,StevenVanAcker/mitmproxy,vhaupert/mitmproxy,vhaupert/mitmproxy
6f45e82af789586baf7354b562bbb1587d94b28c
qual/tests/test_calendar.py
qual/tests/test_calendar.py
import unittest from datetime import date import qual class TestProlepticGregorianCalendar(unittest.TestCase): def setUp(self): self.calendar = qual.ProlepticGregorianCalendar() def check_valid_date(self, year, month, day): d = self.calendar.date(year, month, day) self.assertIsNotNone(d) def check_invalid_date(self, year, month, day): self.assertRaises(Exception, lambda : self.calendar(year, month, day)) def test_leap_year_from_before_1582(self): """Pope Gregory introduced the calendar in 1582""" self.check_valid_date(1200, 2, 29) def test_Julian_leap_day_is_not_a_valid_date(self): """This day /was/ a leap day contemporaneously, but is not a valid date of the Gregorian calendar.""" self.check_invalid_date(1300, 2, 29)
import unittest from datetime import date import qual class TestProlepticGregorianCalendar(unittest.TestCase): def setUp(self): self.calendar = qual.ProlepticGregorianCalendar() def check_valid_date(self, year, month, day): d = self.calendar.date(year, month, day) self.assertIsNotNone(d) def check_invalid_date(self, year, month, day): self.assertRaises(Exception, lambda : self.calendar(year, month, day)) def test_leap_year_from_before_1582(self): """Pope Gregory introduced the calendar in 1582""" self.check_valid_date(1200, 2, 29) def test_day_missed_out_in_British_calendar_change(self): """This date never happened in English law: It was missed when changing from the Julian to Gregorian. This test proves that we are not using a historical British calendar.""" self.check_valid_date(1752, 9, 3) def test_Julian_leap_day_is_not_a_valid_date(self): """This day /was/ a leap day contemporaneously, but is not a valid date of the Gregorian calendar.""" self.check_invalid_date(1300, 2, 29)
Add a test for a date missing from English historical calendars.
Add a test for a date missing from English historical calendars.
Python
apache-2.0
jwg4/qual,jwg4/calexicon
d4c168cc552a444ecb3ee3059f12fa1c34c4419c
test_sempai.py
test_sempai.py
import jsonsempai import os import shutil import sys import tempfile TEST_FILE = '''{ "three": 3, "one": { "two": { "three": 3 } } }''' class TestSempai(object): def setup(self): self.direc = tempfile.mkdtemp(prefix='jsonsempai') sys.path.append(self.direc) with open(os.path.join(self.direc, 'sempai.json'), 'w') as f: f.write(TEST_FILE) def teardown(self): sys.path.remove(self.direc) shutil.rmtree(self.direc) def test_import(self): import sempai assert sempai def test_access(self): import sempai assert sempai.three == 3 def test_access_nested(self): import sempai assert sempai.one.two.three == 3 def test_acts_like_dict(self): import sempai assert sempai.one.two == {"three": 3} def test_set(self): import sempai sempai.one.two.three = 4 assert sempai.one.two.three == 4 def test_location(self): import sempai assert sempai.__file__ == os.path.join(self.direc, 'sempai.json')
import jsonsempai import os import shutil import sys import tempfile TEST_FILE = '''{ "three": 3, "one": { "two": { "three": 3 } } }''' class TestSempai(object): def setup(self): self.direc = tempfile.mkdtemp(prefix='jsonsempai') sys.path.append(self.direc) with open(os.path.join(self.direc, 'sempai.json'), 'w') as f: f.write(TEST_FILE) def teardown(self): sys.path.remove(self.direc) shutil.rmtree(self.direc) def test_import(self): import sempai assert sempai def test_access(self): import sempai assert sempai.three == 3 def test_access_nested(self): import sempai assert sempai.one.two.three == 3 def test_acts_like_dict(self): import sempai assert sempai.one.two == {"three": 3} def test_set(self): import sempai sempai.one.two.three = 4 assert sempai.one.two.three == 4 def test_del(self): import sempai del sempai.one.two.three assert sempai.one.two.get('three', 'not at home') == 'not at home' def test_location(self): import sempai assert sempai.__file__ == os.path.join(self.direc, 'sempai.json')
Add test for removing item
Add test for removing item
Python
mit
kragniz/json-sempai
004326064c87184e4373ab0b2d8d7ef9b46d94f9
tokens/conf.py
tokens/conf.py
PHASES = ( ('PHASE_01', 'In review',), ('PHASE_02', 'Active',), ('PHASE_02', 'Inactive',), )
PHASES = ( ('PHASE_01', 'In review',), ('PHASE_02', 'Active',), ('PHASE_02', 'Inactive',), ) TOKEN_TYPES = ( ('MintableToken', 'Mintable Token'), )
Add MintableToken as new token type
Add MintableToken as new token type
Python
apache-2.0
onyb/ethane,onyb/ethane,onyb/ethane,onyb/ethane
76ec25090ece865d67f63c07c32aff7cebf105c1
ynr/apps/people/migrations/0034_get_birth_year.py
ynr/apps/people/migrations/0034_get_birth_year.py
# Generated by Django 3.2.4 on 2021-10-27 14:41 from django.db import migrations def get_birth_year(apps, schema_editor): Person = apps.get_model("people", "Person") for person in Person.objects.all(): birth_year = person.birth_date.split("-")[0] person.birth_date = birth_year person.save() class Migration(migrations.Migration): dependencies = [("people", "0033_auto_20210928_1007")] operations = [ migrations.RunPython(get_birth_year, migrations.RunPython.noop) ]
# Generated by Django 3.2.4 on 2021-10-27 14:41 from django.db import migrations def get_birth_year(apps, schema_editor): Person = apps.get_model("people", "Person") for person in Person.objects.exclude(birth_date="").iterator(): birth_year = person.birth_date.split("-")[0] person.birth_date = birth_year person.save() class Migration(migrations.Migration): dependencies = [("people", "0033_auto_20210928_1007")] operations = [ migrations.RunPython(get_birth_year, migrations.RunPython.noop) ]
Improve performance of birth date data migration
Improve performance of birth date data migration
Python
agpl-3.0
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
1782b15b244597d56bff18c465237c7e1f3ab482
wikked/commands/users.py
wikked/commands/users.py
import logging import getpass from wikked.bcryptfallback import generate_password_hash from wikked.commands.base import WikkedCommand, register_command logger = logging.getLogger(__name__) @register_command class UsersCommand(WikkedCommand): def __init__(self): super(UsersCommand, self).__init__() self.name = 'users' self.description = "Lists users of this wiki." def setupParser(self, parser): pass def run(self, ctx): logger.info("Users:") for user in ctx.wiki.auth.getUsers(): logger.info(" - " + user.username) @register_command class NewUserCommand(WikkedCommand): def __init__(self): super(NewUserCommand, self).__init__() self.name = 'newuser' self.description = ( "Generates the entry for a new user so you can " "copy/paste it in your `.wikirc`.") def setupParser(self, parser): parser.add_argument('username', nargs=1) parser.add_argument('password', nargs='?') def run(self, ctx): username = ctx.args.username password = ctx.args.password or getpass.getpass('Password: ') password = generate_password_hash(password) logger.info("%s = %s" % (username[0], password))
import logging import getpass from wikked.bcryptfallback import generate_password_hash from wikked.commands.base import WikkedCommand, register_command logger = logging.getLogger(__name__) @register_command class UsersCommand(WikkedCommand): def __init__(self): super(UsersCommand, self).__init__() self.name = 'users' self.description = "Lists users of this wiki." def setupParser(self, parser): pass def run(self, ctx): logger.info("Users:") for user in ctx.wiki.auth.getUsers(): logger.info(" - " + user.username) @register_command class NewUserCommand(WikkedCommand): def __init__(self): super(NewUserCommand, self).__init__() self.name = 'newuser' self.description = ( "Generates the entry for a new user so you can " "copy/paste it in your `.wikirc`.") def setupParser(self, parser): parser.add_argument('username', nargs=1) parser.add_argument('password', nargs='?') def run(self, ctx): username = ctx.args.username password = ctx.args.password or getpass.getpass('Password: ') password = generate_password_hash(password) logger.info("%s = %s" % (username[0], password)) logger.info("") logger.info("(copy this into your .wikirc file)")
Add some explanation as to what to do with the output.
newuser: Add some explanation as to what to do with the output.
Python
apache-2.0
ludovicchabant/Wikked,ludovicchabant/Wikked,ludovicchabant/Wikked
2342cd5ede9fac66007d2b15025feeff52c2400b
flexget/plugins/operate/verify_ssl_certificates.py
flexget/plugins/operate/verify_ssl_certificates.py
from __future__ import unicode_literals, division, absolute_import from builtins import * # pylint: disable=unused-import, redefined-builtin import logging from flexget import plugin from flexget.event import event log = logging.getLogger('verify_ssl') class VerifySSLCertificates(object): """ Plugin that can off SSL certificate verification. Example:: verify_ssl_certificates: no """ schema = {'type': 'boolean'} @plugin.priority(253) def on_task_start(self, task, config): if config is False: task.requests.verify = False @event('plugin.register') def register_plugin(): plugin.register(VerifySSLCertificates, 'verify_ssl_certificates', api_ver=2)
from __future__ import unicode_literals, division, absolute_import from builtins import * # pylint: disable=unused-import, redefined-builtin import logging from requests.packages import urllib3 from flexget import plugin from flexget.event import event log = logging.getLogger('verify_ssl') class VerifySSLCertificates(object): """ Plugin that can off SSL certificate verification. Example:: verify_ssl_certificates: no """ schema = {'type': 'boolean'} @plugin.priority(253) def on_task_start(self, task, config): if config is False: task.requests.verify = False # Disabling verification results in a warning for every HTTPS # request: # "InsecureRequestWarning: Unverified HTTPS request is being made. # Adding certificate verification is strongly advised. See: # https://urllib3.readthedocs.io/en/latest/security.html" # Disable those warnings because the user has explicitly disabled # verification and the warning is not beneficial. # This change is permanent rather than task scoped, but there won't # be any warnings to disable when verification is enabled. urllib3.disable_warnings() @event('plugin.register') def register_plugin(): plugin.register(VerifySSLCertificates, 'verify_ssl_certificates', api_ver=2)
Disable warnings about disabling SSL verification.
Disable warnings about disabling SSL verification. Disabling SSL certificate verification results in a warning for every HTTPS request: "InsecureRequestWarning: Unverified HTTPS request is being made. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/security.html" Disable those warnings because the user has explicitly disabled verification and so the warning is not beneficial.
Python
mit
OmgOhnoes/Flexget,qk4l/Flexget,jacobmetrick/Flexget,jacobmetrick/Flexget,Flexget/Flexget,LynxyssCZ/Flexget,crawln45/Flexget,Flexget/Flexget,ianstalk/Flexget,OmgOhnoes/Flexget,poulpito/Flexget,drwyrm/Flexget,malkavi/Flexget,jawilson/Flexget,malkavi/Flexget,LynxyssCZ/Flexget,jawilson/Flexget,ianstalk/Flexget,gazpachoking/Flexget,sean797/Flexget,crawln45/Flexget,Danfocus/Flexget,JorisDeRieck/Flexget,drwyrm/Flexget,Danfocus/Flexget,JorisDeRieck/Flexget,JorisDeRieck/Flexget,sean797/Flexget,jawilson/Flexget,LynxyssCZ/Flexget,tobinjt/Flexget,tobinjt/Flexget,jacobmetrick/Flexget,qk4l/Flexget,qk4l/Flexget,ianstalk/Flexget,tobinjt/Flexget,malkavi/Flexget,jawilson/Flexget,Danfocus/Flexget,Flexget/Flexget,JorisDeRieck/Flexget,poulpito/Flexget,drwyrm/Flexget,malkavi/Flexget,sean797/Flexget,Danfocus/Flexget,crawln45/Flexget,poulpito/Flexget,crawln45/Flexget,OmgOhnoes/Flexget,tobinjt/Flexget,LynxyssCZ/Flexget,Flexget/Flexget,gazpachoking/Flexget
3ca30011794143785955792e391902823427ef77
registration/views.py
registration/views.py
# Create your views here. from django.http import HttpResponse from registration.models import Team from django.core import serializers def get_teams(request): return_data = serializers.serialize("json", Team.objects.all()) return HttpResponse(return_data, content_type="application/json")
# Create your views here. from django.http import HttpResponse from registration.models import Team from django.core import serializers from django.views.decorators.cache import cache_page @cache_page(60 * 5) def get_teams(request): return_data = serializers.serialize("json", Team.objects.all()) return HttpResponse(return_data, content_type="application/json")
Add caching for getTeams API call
Add caching for getTeams API call
Python
bsd-3-clause
hgrimberg01/esc,hgrimberg01/esc
33fbc424d725836355c071593042953fb195cff6
server/project/apps/core/serializers.py
server/project/apps/core/serializers.py
from rest_framework import serializers from .models import Playlist, Track, Favorite class TrackSerializer(serializers.ModelSerializer): class Meta: model = Track fields = '__all__' class PlaylistSerializer(serializers.ModelSerializer): tracks = TrackSerializer(many=True) class Meta: model = Playlist fields = ('id', 'playlist_name', 'user_id', 'tracks') def create(self, validated_data): tracks_data = validated_data.pop('tracks') playlist = Playlist.objects.create(**validated_data) for track_data in tracks_data: Track.objects.create(**track_data) return playlist def update(self, instance, validated_data): tracks_data = validated_data.pop('tracks') instance.playlist_name = validated_data.get('playlist_name', instance.playlist_name) instance.save() Track.objects.filter(playlist=instance.id).delete() for track_data in tracks_data: Track.objects.create(**track_data) instance.tracks.add(track_id) instance.save() return Playlist.objects.get(pk=instance.id) class FavoriteSerializer(serializers.ModelSerializer): class Meta: model = Favorite fields = '__all__'
from rest_framework import serializers from .models import Playlist, Track, Favorite class TrackSerializer(serializers.ModelSerializer): class Meta: model = Track fields = '__all__' class PlaylistSerializer(serializers.ModelSerializer): tracks = TrackSerializer(many=True) class Meta: model = Playlist fields = ('id', 'playlist_name', 'user_id', 'tracks') def create(self, validated_data): tracks_data = validated_data.pop('tracks') playlist = Playlist.objects.create(**validated_data) for track_data in tracks_data: Track.objects.create(**track_data) return playlist def update(self, instance, validated_data): tracks_data = validated_data.pop('tracks') instance.playlist_name = validated_data.get('playlist_name', instance.playlist_name) instance.save() Track.objects.filter(playlist=instance.id).delete() for track_data in tracks_data: track_id = Track.objects.create(**track_data) instance.tracks.add(track_id) instance.save() return Playlist.objects.get(pk=instance.id) class FavoriteSerializer(serializers.ModelSerializer): class Meta: model = Favorite fields = '__all__'
Add tracks to playlist on update
Add tracks to playlist on update
Python
mit
hrr20-over9000/9001,SoundMoose/SoundMoose,SoundMoose/SoundMoose,douvaughn/9001,douvaughn/9001,hxue920/9001,hrr20-over9000/9001,hxue920/9001,CalHoll/SoundMoose,CalHoll/SoundMoose,douvaughn/9001,CalHoll/SoundMoose,hrr20-over9000/9001,hxue920/9001,douvaughn/9001,hxue920/9001,SoundMoose/SoundMoose,SoundMoose/SoundMoose,CalHoll/SoundMoose
193831b6ee8b49674e32413e71819f2451bfc844
situational/apps/quick_history/forms.py
situational/apps/quick_history/forms.py
from django import forms from . import widgets class HistoryDetailsForm(forms.Form): CIRCUMSTANCE_CHOICES = [ ("full_time", "Full time"), ("part_time", "Part time"), ("work_programme", "Work programme"), ("unemployed", "Unemployed"), ("sick", "Off sick"), ("training", "In full time training"), ("caring", "Caring full time for others"), ("none", "None of these"), ] circumstances = forms.ChoiceField( widget=forms.RadioSelect(), choices=CIRCUMSTANCE_CHOICES ) date = forms.DateField( widget=widgets.MonthYearWidget(years=range(2000, 2016)) ) description = forms.CharField(required=False) def clean(self): cleaned_data = super(HistoryDetailsForm, self).clean() return cleaned_data
from django import forms from . import widgets class HistoryDetailsForm(forms.Form): CIRCUMSTANCE_CHOICES = [ ("full_time", "Full time"), ("part_time", "Part time"), ("unemployed", "Unemployed"), ("sick", "Off sick"), ("training", "In full time training"), ("caring", "Caring full time for others"), ("none", "None of these"), ] circumstances = forms.ChoiceField( widget=forms.RadioSelect(), choices=CIRCUMSTANCE_CHOICES ) date = forms.DateField( widget=widgets.MonthYearWidget(years=range(2000, 2016)) ) description = forms.CharField(required=False) def clean(self): cleaned_data = super(HistoryDetailsForm, self).clean() return cleaned_data
Remove "work programme" option from quick history
Remove "work programme" option from quick history
Python
bsd-3-clause
lm-tools/situational,lm-tools/sectors,lm-tools/situational,lm-tools/situational,lm-tools/situational,lm-tools/sectors,lm-tools/situational,lm-tools/sectors,lm-tools/sectors
1ca9052a989ad0c1642875c7f29b8ba2130011fa
south/introspection_plugins/__init__.py
south/introspection_plugins/__init__.py
# This module contains built-in introspector plugins for various common # Django apps. # These imports trigger the lower-down files import south.introspection_plugins.geodjango import south.introspection_plugins.django_tagging import south.introspection_plugins.django_taggit import south.introspection_plugins.django_objectpermissions
# This module contains built-in introspector plugins for various common # Django apps. # These imports trigger the lower-down files import south.introspection_plugins.geodjango import south.introspection_plugins.django_tagging import south.introspection_plugins.django_taggit import south.introspection_plugins.django_objectpermissions import south.introspection_plugins.annoying_autoonetoone
Add import of django-annoying patch
Add import of django-annoying patch
Python
apache-2.0
smartfile/django-south,smartfile/django-south
93373242eab8d387a9b13c567239fa2e36b10ffa
mqtt_logger/management/commands/runmqttlistener.py
mqtt_logger/management/commands/runmqttlistener.py
from django.core.management.base import BaseCommand, CommandError from mqtt_logger.models import * class Command(BaseCommand): help = 'Start listening to mqtt subscriptions and save messages in database.' def add_arguments(self, parser): pass def handle(self, *args, **options): self.stdout.write("Starting MQTT listener...") clients = MQTTSubscription.subscribe_all(start_loop=True) for c in clients: self.stdout.write(" %s:%s %s"%(c.host, c.port, c.topics)) self.stdout.write("MQTT listener started.") self.stdout.write("Hit <ENTER> to quit.") wait = raw_input()
from django.core.management.base import BaseCommand, CommandError from mqtt_logger.models import * import time class Command(BaseCommand): help = 'Start listening to mqtt subscriptions and save messages in database.' def add_arguments(self, parser): pass def handle(self, *args, **options): self.stdout.write("Starting MQTT listener...") subs = list(MQTTSubscription.objects.filter(active=True)) for s in subs: self.stdout.write(" Connecting to %s:%s %s"%(s.server, s.port, s.topic)) s.client = s.subscribe(start_loop=True) while(True): time.sleep(10) newsubs = MQTTSubscription.objects.filter(active=True) for s in subs: if s not in newsubs: self.stdout.write(" Disconnecting from %s:%s %s"%(s.server, s.port, s.topic)) s.client.disconnect() subs.remove(s) for s in newsubs: if s not in subs: self.stdout.write(" Connecting to %s:%s %s"%(s.server, s.port, s.topic)) s.client = s.subscribe(start_loop=True) subs.append(s)
Make the listener automatically update the subscriptions.
Make the listener automatically update the subscriptions.
Python
mit
ast0815/mqtt-hub,ast0815/mqtt-hub
e019ce982325a6284e844df3c9a5f8172f494ba3
run_mandel.py
run_mandel.py
import fractal import bmp pixels = fractal.mandelbrot(488, 256) bmp.write_grayscale('mandel.bmp', pixels)
import fractal import bmp def main(): pixels = fractal.mandelbrot(488, 256) bmp.write_grayscale('mandel.bmp', pixels) if __name__ == '__main__': main()
Add a main runner for mandel
Add a main runner for mandel
Python
mit
kentoj/python-fundamentals
fa78c5b5442c904ba3888b858eb2c284f16664ed
pages/urls/page.py
pages/urls/page.py
from django.conf.urls import include, patterns, url from rest_framework.routers import SimpleRouter from .. import views router = SimpleRouter(trailing_slash=False) router.register(r'pages', views.PageViewSet) urlpatterns = patterns('', url(r'', include(router.urls)), )
from django.conf.urls import include, url from rest_framework.routers import SimpleRouter from .. import views router = SimpleRouter(trailing_slash=False) router.register(r'pages', views.PageViewSet) urlpatterns = [ url(r'', include(router.urls)), ]
Purge unnecessary patterns function from urls
Purge unnecessary patterns function from urls
Python
bsd-2-clause
incuna/feincms-pages-api
13f3d7d4a708cd05712b610d979dcf857ae85856
Agents/SentinelDefense.py
Agents/SentinelDefense.py
from pysc2.agents import base_agents from pysc2.lib import actions ## SENTINEL FUNCTIONS # Functions related with Hallucination _HAL_ADEPT = actions.FUNCTIONS.Hallucination_Adept_quick.id _HAL_ARCHON = actions.FUNCTIONS.Hallucination_Archon_quick.id _HAL_COL = actions.FUNCTIONS.Hallucination_Colossus_quick.id _HAL_DISRUP = actions.FUNCTIONS.Hallucination_Disruptor_quick.id _HAL_HIGTEM = actions.FUNCTIONS.Hallucination_HighTemplar_quick.id _HAL_IMN = actions.FUNCTIONS.Hallucination_Immortal_quick.id _HAL_PHOENIX = actions.FUNCTIONS.Hallucination_Phoenix_quick.id _HAL_STALKER = actions.FUNCTIONS.Hallucination_Stalker_quick.id _HAL_VOIDRAID = actions.FUNCTIONS.Hallucination_VoidRay_quick.id _HAL_ZEALOT = actions.FUNCTIONS.Hallucination_Zealot_quick.id # Functions _NOOP = actions.FUNCTIONS.no_op.id _SELECT_POINT = actions.FUNCTIONS.select_point.id # Functions related with attack
from pysc2.agents import base_agents from pysc2.lib import actions Class Sentry(): '''Defines how the sentry SC2 unit works''' def Force_Field(): '''Function related with Force Field creation''' _FORCE_FIELD = actions.FUNCTIONS.Effect_ForceField_screen.id def Guardian_Shield(): '''Function related with Shield creation''' _GUARD_FIELD = actions.FUNCTIONS.Effect_GuardianShield_quick.id def Hallucinations(): '''Functions related with Hallucination''' _HAL_ADEPT = actions.FUNCTIONS.Hallucination_Adept_quick.id _HAL_ARCHON = actions.FUNCTIONS.Hallucination_Archon_quick.id _HAL_COL = actions.FUNCTIONS.Hallucination_Colossus_quick.id _HAL_DISRUP = actions.FUNCTIONS.Hallucination_Disruptor_quick.id _HAL_HIGTEM = actions.FUNCTIONS.Hallucination_HighTemplar_quick.id _HAL_IMN = actions.FUNCTIONS.Hallucination_Immortal_quick.id _HAL_PHOENIX = actions.FUNCTIONS.Hallucination_Phoenix_quick.id _HAL_STALKER = actions.FUNCTIONS.Hallucination_Stalker_quick.id _HAL_VOIDRAID = actions.FUNCTIONS.Hallucination_VoidRay_quick.id _HAL_ZEALOT = actions.FUNCTIONS.Hallucination_Zealot_quick.id def Standard_Functions(): '''Standard Functions related with movements and exploration ''' _NOOP = actions.FUNCTIONS.no_op.id _SELECT_POINT = actions.FUNCTIONS.select_point.id
Define class sentry with main actions
Define class sentry with main actions
Python
apache-2.0
SoyGema/Startcraft_pysc2_minigames
1c01b9e794445242c450534d1615a9dc755b89da
randcat.py
randcat.py
import random random.seed() while True: print(chr(random.getrandbits(8)), end='')
#! /usr/bin/python3 import random random.seed() # this initializes with the Date, which I think is a novel enough seed while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed print(chr(random.getrandbits(8)), end='')
Add some comments and a shebang on top.
Add some comments and a shebang on top.
Python
apache-2.0
Tombert/RandCat
589bc468783e6c7620c3be21195fdbe88e796234
linguist/helpers.py
linguist/helpers.py
# -*- coding: utf-8 -*- import collections import itertools from . import utils def prefetch_translations(instances, **kwargs): """ Prefetches translations for the given instances. Can be useful for a list of instances. """ from .mixins import ModelMixin populate_missing = kwargs.get('populate_missing', True) grouped_translations = utils.get_grouped_translations(instances, **kwargs) # In the case of no translations objects if not grouped_translations and populate_missing: for instance in instances: instance.populate_missing_translations() for instance in instances: if issubclass(instance.__class__, ModelMixin) and instance.pk in grouped_translations: for translation in grouped_translations[instance.pk]: instance._linguist.set_cache(instance=instance, translation=translation) if populate_missing: instance.populate_missing_translations()
# -*- coding: utf-8 -*- import collections import itertools from . import utils def prefetch_translations(instances, **kwargs): """ Prefetches translations for the given instances. Can be useful for a list of instances. """ from .mixins import ModelMixin if not isinstance(instances, collections.Iterable): instances = [instances] populate_missing = kwargs.get('populate_missing', True) grouped_translations = utils.get_grouped_translations(instances, **kwargs) # In the case of no translations objects if not grouped_translations and populate_missing: for instance in instances: instance.populate_missing_translations() for instance in instances: if issubclass(instance.__class__, ModelMixin) and instance.pk in grouped_translations: for translation in grouped_translations[instance.pk]: instance._linguist.set_cache(instance=instance, translation=translation) if populate_missing: instance.populate_missing_translations()
Fix prefetch_translations() -- be sure we only deal with iteratables.
Fix prefetch_translations() -- be sure we only deal with iteratables.
Python
mit
ulule/django-linguist
2f8a2fdad8deb96b7b3c971baf866f248c23fdda
madam_rest/views.py
madam_rest/views.py
from flask import jsonify, url_for from madam_rest import app, asset_storage @app.route('/assets/') def assets_retrieve(): assets = [asset_key for asset_key in asset_storage] return jsonify({ "data": assets, "meta": { "count": len(assets) } }) @app.route('/assets/<asset_key>') def asset_retrieve(asset_key): asset = asset_storage[asset_key] return jsonify({ "links": { "self": url_for(asset_retrieve, asset_key=asset_key) }, "meta": {} # TODO: _mutable(asset.metadata) })
from datetime import datetime from flask import jsonify, url_for from fractions import Fraction from frozendict import frozendict from madam_rest import app, asset_storage def _serializable(value): """ Utility function to convert data structures with immutable types to mutable, serializable data structures. :param value: data structure with immutable types :return: mutable, serializable data structure """ if isinstance(value, (tuple, set, frozenset)): return [_serializable(v) for v in value] elif isinstance(value, frozendict): return {k: _serializable(v) for k, v in value.items()} elif isinstance(value, datetime): return value.isoformat() elif isinstance(value, Fraction): return float(value) return value @app.route('/assets/') def assets_retrieve(): assets = [asset_key for asset_key in asset_storage] return jsonify({ "data": assets, "meta": { "count": len(assets) } }) @app.route('/assets/<asset_key>') def asset_retrieve(asset_key): asset = asset_storage[asset_key] return jsonify({ "links": { "self": url_for(asset_retrieve, asset_key=asset_key) }, "meta": _serializable(asset.metadata) })
Improve serialization of asset metadata.
Improve serialization of asset metadata.
Python
agpl-3.0
eseifert/madam-rest
bae4032cc686fbac906d19456ed744a97b0e1365
characters/views.py
characters/views.py
from django.shortcuts import get_object_or_404, redirect, render from characters.forms import CharacterForm from characters.models import Character, Class, Race def index(request): all_characters = Character.objects.all() context = {'all_characters': all_characters} return render(request, 'characters/index.html', context) def view_character(request, character_id): character = get_object_or_404(Character, pk=character_id) context = {'character': character} return render(request, 'characters/view_character.html', context) def create_character(request): form = CharacterForm(request.POST or None) if request.method == 'POST' and form.is_valid(): race = Race.objects.get(id=1) cclass = Class.objects.get(id=1) character = Character( name=request.POST['name'], background=request.POST['background'], race=race, cclass=cclass ) character.save() return redirect('characters:view', character_id=character.id) context = {'form': form} return render(request, 'characters/create_character.html', context)
from django.shortcuts import get_object_or_404, redirect, render from characters.forms import CharacterForm from characters.models import Character, Class, Race def index(request): all_characters = Character.objects.all() context = {'all_characters': all_characters} return render(request, 'characters/index.html', context) def view_character(request, character_id): character = get_object_or_404(Character, pk=character_id) context = {'character': character} return render(request, 'characters/view_character.html', context) def create_character(request): form = CharacterForm(request.POST or None) if request.method == 'POST' and form.is_valid(): character = Character( name=request.POST['name'], background=request.POST['background'], race_id=1, cclass_id=1 ) character.save() return redirect('characters:view', character_id=character.id) context = {'form': form} return render(request, 'characters/create_character.html', context)
Set default race and class without extra database queries
Set default race and class without extra database queries
Python
mit
mpirnat/django-tutorial-v2
6a4dd66035956037d660271f18592af04edab818
read_images.py
read_images.py
import time import cv2 import os import glob # path = 'by_class' path = 'test' t1 = time.time() file_names=glob.glob(os.path.join(path,'*','train_*','*.[pP][nN][gG]')) t2 = time.time() print('Time to list files: ', t2-t1) file_classes=[ele.split('/')[1] for ele in file_names] t3 = time.time() print('Time to list labels: ', t3-t2) # for i in range(len(file_names)): # print(file_names[i], file_classes[i]) images = [cv2.imread(file) for file in file_names] t4 = time.time() print('Time to read images: ',t4-t3)
import time import os import glob import tensorflow as tf # path = 'by_class' path = 'test' t1 = time.time() file_names=glob.glob(os.path.join(path,'*','train_*','*.[pP][nN][gG]')) filename_queue = tf.train.string_input_producer(file_names) t2 = time.time() print('Time to list files: ', t2-t1) file_classes=[int(ele.split('/')[1], base=16) for ele in file_names] try: file_labels = [str(chr(i)) for i in file_classes] #python 3 except: file_labels = [str(unichr(i)) for i in file_classes] #python 2.7 t3 = time.time() print('Time to list labels: ', t3-t2) reader = tf.WholeFileReader() key, value = reader.read(filename_queue) my_img = tf.image.decode_png(value) # use png or jpg decoder based on your files. init_op = tf.initialize_all_variables() sess = tf.Session() sess.run(init_op) # Start populating the filename queue. coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(coord=coord, sess=sess) for i in range(len(file_classes)): #length of your filename list image = my_img.eval(session = sess) #here is your image Tensor :) coord.request_stop() coord.join(threads) t4 = time.time() print('Time to read images: ',t4-t3)
Read all images using tf itself
Read all images using tf itself
Python
apache-2.0
iitmcvg/OCR-Handwritten-Text,iitmcvg/OCR-Handwritten-Text,iitmcvg/OCR-Handwritten-Text
169d32333aa3152dcec893f2ce58c46d614aaea4
models/employees.py
models/employees.py
import datetime from openedoo.core.libs.tools import hashing_werkzeug from openedoo_project import db from .users import User class Employee(User): @classmethod def is_exist(self, username): employee = self.query.get(username=username).first() return employee @classmethod def get_public_list(self): employees = self.query.with_entities(self.username, self.fullname, self.nip) return employees @classmethod def check_records(self): employees = self.query.limit(1).all() return employees @classmethod def add(self, form={}): if not form: raise ValueError('Form is supplied with wrong data.') data = { 'username': form['username'], 'fullname': form['fullname'], 'password': hashing_werkzeug(form['password']), 'nip': form['nip'], 'created': datetime.datetime.now() } employeeData = self(data) db.session.add(employeeData) return db.session.commit()
import datetime from openedoo.core.libs.tools import hashing_werkzeug from openedoo_project import db from .users import User class Employee(User): @classmethod def is_exist(self, username): employee = self.query.get(username=username).first() return employee @classmethod def get_public_list(self): employees = self.query.with_entities(self.username, self.fullname, self.nip) return employees @classmethod def check_records(self): employees = self.query.limit(1).all() return employees @classmethod def add(self, form=None): data = { 'username': form['username'], 'fullname': form['fullname'], 'password': hashing_werkzeug(form['password']), 'nip': form['nip'], 'created': datetime.datetime.now() } employeeData = self(data) db.session.add(employeeData) return db.session.commit()
Fix Dangerous default value {} as argument, pylint.
Fix Dangerous default value {} as argument, pylint.
Python
mit
openedoo/module_employee,openedoo/module_employee,openedoo/module_employee
84f913d928d28bc193d21eb223e7815f69c53a22
plugins/jira.py
plugins/jira.py
from neb.engine import Plugin, Command import requests class JiraPlugin(Plugin): def get_commands(self): """Return human readable commands with descriptions. Returns: list[Command] """ return [ Command("jira", self.jira, "Perform commands on Matrix JIRA.", [ "server-info - Retrieve server information." ]), ] def jira(self, event, args): action = args[1] actions = { "server-info": self._server_info } return actions[action](event, args) def _server_info(self, event, args): return self._body("Boo") def sync(self, matrix, initial_sync): pass
from neb.engine import Plugin, Command, KeyValueStore import json import requests class JiraPlugin(Plugin): def __init__(self, config="jira.json"): self.store = KeyValueStore(config) if not self.store.has("url"): url = raw_input("JIRA URL: ").strip() self.store.set("url", url) def get_commands(self): """Return human readable commands with descriptions. Returns: list[Command] """ return [ Command("jira", self.jira, "Perform commands on a JIRA platform.", [ "server-info - Retrieve server information." ]), ] def jira(self, event, args): if len(args) == 1: return self._body("Perform commands on a JIRA platform.") action = args[1] actions = { "server-info": self._server_info } return actions[action](event, args) def _server_info(self, event, args): url = self._url("/rest/api/2/serverInfo") response = json.loads(requests.get(url).text) info = "%s : version %s : build %s" % (response["serverTitle"], response["version"], response["buildNumber"]) return self._body(info) def sync(self, matrix, initial_sync): pass def _url(self, path): return self.store.get("url") + path
Make the plugin request server info from JIRA.
Make the plugin request server info from JIRA.
Python
apache-2.0
Kegsay/Matrix-NEB,matrix-org/Matrix-NEB,illicitonion/Matrix-NEB
4fd67e4e17f0813056493a635e8256a017d894e2
src/tempel/models.py
src/tempel/models.py
from django.db import models from django.conf import settings from tempel import utils class Entry(models.Model): content = models.TextField() language = models.CharField(max_length=20, choices=utils.get_languages()) created = models.DateTimeField(auto_now=True, auto_now_add=True) active = models.BooleanField(default=True) class Meta: ordering = ['-created'] verbose_name_plural = "entries" def get_language(self): return utils.get_language(self.language) def get_mimetype(self): return utils.get_mimetype(self.language) def get_filename(self): return '%s.%s' % (self.id, self.get_extension()) def get_extension(self): return utils.get_extension(self.language)
from django.db import models from django.conf import settings from tempel import utils class Entry(models.Model): content = models.TextField() language = models.CharField(max_length=20, choices=utils.get_languages()) created = models.DateTimeField(auto_now=True, auto_now_add=True) active = models.BooleanField(default=True) class Meta: ordering = ['-created'] verbose_name_plural = "entries" def get_language(self): return utils.get_language(self.language) def get_mimetype(self): return utils.get_mimetype(self.language) def get_filename(self): return '%s.%s' % (self.id, self.get_extension()) def get_extension(self): return utils.get_extension(self.language) def __unicode__(self): return '<Entry: id=%s lang=%s>' % (self.id, self.language)
Add text representation for Entry object
Add text representation for Entry object
Python
agpl-3.0
fajran/tempel
d17a2308ff903b459b6c9310fd6d42eb0e051544
statsSend/teamCity/teamCityStatisticsSender.py
statsSend/teamCity/teamCityStatisticsSender.py
#!/usr/bin/env python3 from dateutil import parser from statsSend.teamCity.teamCityConnection import TeamCityConnection from statsSend.teamCity.teamCityUrlBuilder import TeamCityUrlBuilder from statsSend.teamCity.teamCityProject import TeamCityProject class TeamCityStatisticsSender: def __init__(self, settings, reporter): self.page_size = int(settings['page_size']) connection = TeamCityConnection(settings['user'], settings['password']) url_builder = TeamCityUrlBuilder(settings['server_url'], settings['api_url_prefix']) self.project = TeamCityProject(settings['project_id'], connection, url_builder, self.page_size) self.since_timestamp = parser.parse(settings['since_timestamp']).strftime('%Y%m%dT%H%M%S%z') self.reporter = reporter async def send(self): if ("report_categories" in dir(self.reporter)): categories = [build_configuration.toCategory() async for build_configuration in self.project.retrieve_build_configurations()] self.reporter.report_categories(categories) async for build_configuration in self.project.retrieve_build_configurations(): async for build_run in build_configuration.retrieve_build_runs_since_timestamp(self.since_timestamp): job = build_run.toJob() self.reporter.report_job(job)
#!/usr/bin/env python3 from dateutil import parser from statsSend.teamCity.teamCityConnection import TeamCityConnection from statsSend.teamCity.teamCityUrlBuilder import TeamCityUrlBuilder from statsSend.teamCity.teamCityProject import TeamCityProject class TeamCityStatisticsSender: def __init__(self, settings, reporter): self.page_size = int(settings['page_size']) connection = TeamCityConnection(settings['user'], settings['password']) url_builder = TeamCityUrlBuilder(settings['server_url'], settings['api_url_prefix']) self.project = TeamCityProject(settings['project_id'], connection, url_builder, self.page_size) self.since_timestamp = parser.parse(settings['since_timestamp']).strftime('%Y%m%dT%H%M%S%z') self.reporter = reporter async def send(self): if ("report_categories" in dir(self.reporter)): try: categories = [build_configuration.toCategory() async for build_configuration in self.project.retrieve_build_configurations()] self.reporter.report_categories(categories) except Exception as err: eprint("Error sending categories" + err) try: async for build_configuration in self.project.retrieve_build_configurations(): async for build_run in build_configuration.retrieve_build_runs_since_timestamp(self.since_timestamp): try: job = build_run.toJob() self.reporter.report_job(job) except Exception as err: eprint("Error reporting job" + err) except Exception as err: eprint("Error reporting jobs" + err)
Add error handling in statistics sender
Add error handling in statistics sender
Python
mit
luigiberrettini/build-deploy-stats
00b134df7281c39595f9efcc1c1da047d1d10277
src/encoded/authorization.py
src/encoded/authorization.py
from .contentbase import LOCATION_ROOT CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a' def groupfinder(login, request): if ':' not in login: return None namespace, localname = login.split(':', 1) user = None # We may get called before the context is found and the root set root = request.registry[LOCATION_ROOT] if namespace == 'remoteuser': if localname in ['TEST', 'IMPORT']: return ['group:admin'] if namespace in ('mailto', 'remoteuser'): users = root.by_item_type['user'] try: user = users[localname] except KeyError: return None elif namespace == 'accesskey': access_keys = root.by_item_type['access_key'] try: access_key = access_keys[localname] except KeyError: return None userid = access_key.properties['user_uuid'] user = root.by_item_type['user'][userid] if user is None: return None principals = ['userid:%s' % user.uuid] lab_uuids = user.properties.get('lab_uuids', []) principals.extend('lab:' + lab_uuid for lab_uuid in lab_uuids) if CHERRY_LAB_UUID in lab_uuids: principals.append('group:admin') return principals
from .contentbase import LOCATION_ROOT CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a' def groupfinder(login, request): if ':' not in login: return None namespace, localname = login.split(':', 1) user = None # We may get called before the context is found and the root set root = request.registry[LOCATION_ROOT] if namespace == 'remoteuser': if localname in ['TEST', 'IMPORT']: return ['group:admin'] if namespace in ('mailto', 'remoteuser'): users = root.by_item_type['user'] try: user = users[localname] except KeyError: return None elif namespace == 'accesskey': access_keys = root.by_item_type['access_key'] try: access_key = access_keys[localname] except KeyError: return None userid = access_key.properties['user_uuid'] user = root.by_item_type['user'][userid] if user is None: return None principals = ['userid:%s' % user.uuid] lab = user.properties.get('lab') if lab: principals.append('lab:%s' % lab) submits_for = user.properties.get('submits_for', []) principals.extend('lab:%s' % lab_uuid for lab_uuid in submits_for) principals.extend('submits_for:%s' % lab_uuid for lab_uuid in submits_for) if CHERRY_LAB_UUID in submits_for: principals.append('group:admin') return principals
Update group finder to new schemas
Update group finder to new schemas
Python
mit
kidaa/encoded,philiptzou/clincoded,4dn-dcic/fourfront,hms-dbmi/fourfront,4dn-dcic/fourfront,philiptzou/clincoded,philiptzou/clincoded,hms-dbmi/fourfront,kidaa/encoded,ENCODE-DCC/snovault,ClinGen/clincoded,ENCODE-DCC/snovault,kidaa/encoded,T2DREAM/t2dream-portal,philiptzou/clincoded,ENCODE-DCC/snovault,ENCODE-DCC/encoded,hms-dbmi/fourfront,T2DREAM/t2dream-portal,kidaa/encoded,4dn-dcic/fourfront,4dn-dcic/fourfront,ENCODE-DCC/snovault,hms-dbmi/fourfront,ENCODE-DCC/encoded,ClinGen/clincoded,ENCODE-DCC/snovault,T2DREAM/t2dream-portal,hms-dbmi/fourfront,kidaa/encoded,ClinGen/clincoded,ENCODE-DCC/encoded,ClinGen/clincoded,ClinGen/clincoded,T2DREAM/t2dream-portal,ENCODE-DCC/encoded,philiptzou/clincoded
5cd9ac8d3079fca16828b25b40fed8358286708b
geotrek/outdoor/models.py
geotrek/outdoor/models.py
from django.conf import settings from django.contrib.gis.db import models from django.utils.translation import gettext_lazy as _ from geotrek.authent.models import StructureRelated from geotrek.common.mixins import NoDeleteMixin, TimeStampedModelMixin, AddPropertyMixin from mapentity.models import MapEntityMixin class Site(AddPropertyMixin, MapEntityMixin, StructureRelated, TimeStampedModelMixin, NoDeleteMixin): geom = models.GeometryField(verbose_name=_("Location"), srid=settings.SRID) name = models.CharField(verbose_name=_("Name"), max_length=128) description = models.TextField(verbose_name=_("Description"), blank=True) eid = models.CharField(verbose_name=_("External id"), max_length=1024, blank=True, null=True) class Meta: verbose_name = _("Site") verbose_name_plural = _("Sites") ordering = ('name', ) def __str__(self): return self.name
from django.conf import settings from django.contrib.gis.db import models from django.utils.translation import gettext_lazy as _ from geotrek.authent.models import StructureRelated from geotrek.common.mixins import NoDeleteMixin, TimeStampedModelMixin, AddPropertyMixin from mapentity.models import MapEntityMixin class Site(AddPropertyMixin, MapEntityMixin, StructureRelated, TimeStampedModelMixin, NoDeleteMixin): geom = models.GeometryField(verbose_name=_("Location"), srid=settings.SRID) name = models.CharField(verbose_name=_("Name"), max_length=128) description = models.TextField(verbose_name=_("Description"), blank=True) eid = models.CharField(verbose_name=_("External id"), max_length=1024, blank=True, null=True) class Meta: verbose_name = _("Site") verbose_name_plural = _("Sites") ordering = ('name', ) def __str__(self): return self.name @property def name_display(self): return '<a data-pk="{pk}" href="{url}" title="{name}">{name}</a>'.format( pk=self.pk, url=self.get_detail_url(), name=self.name )
Add links to site detail in site list
Add links to site detail in site list
Python
bsd-2-clause
makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek
c8e11b602eb7525789ed1c5f4ea686f45b44f304
src/diamond/handler/httpHandler.py
src/diamond/handler/httpHandler.py
#!/usr/bin/python2.7 from Handler import Handler import urllib import urllib2 class HttpPostHandler(Handler): # Inititalize Handler with url and batch size def __init__(self, config=None): Handler.__init__(self, config) self.metrics = [] self.batch_size = int(self.config.get('batch', 100)) self.url = self.config.get('url') # Join batched metrics and push to url mentioned in config def process(self, metric): self.metrics.append(str(metric)) if len(self.metrics) >= self.batch_size: req = urllib2.Request(self.url, "\n".join(self.metrics)) urllib2.urlopen(req) self.metrics = []
#!/usr/bin/env python # coding=utf-8 from Handler import Handler import urllib2 class HttpPostHandler(Handler): # Inititalize Handler with url and batch size def __init__(self, config=None): Handler.__init__(self, config) self.metrics = [] self.batch_size = int(self.config.get('batch', 100)) self.url = self.config.get('url') # Join batched metrics and push to url mentioned in config def process(self, metric): self.metrics.append(str(metric)) if len(self.metrics) >= self.batch_size: req = urllib2.Request(self.url, "\n".join(self.metrics)) urllib2.urlopen(req) self.metrics = []
Remove unneeded import, fix python path and add coding
Remove unneeded import, fix python path and add coding
Python
mit
signalfx/Diamond,ramjothikumar/Diamond,jriguera/Diamond,anandbhoraskar/Diamond,jriguera/Diamond,Precis/Diamond,jriguera/Diamond,socialwareinc/Diamond,saucelabs/Diamond,acquia/Diamond,dcsquared13/Diamond,stuartbfox/Diamond,hvnsweeting/Diamond,h00dy/Diamond,cannium/Diamond,dcsquared13/Diamond,Ssawa/Diamond,bmhatfield/Diamond,TAKEALOT/Diamond,tellapart/Diamond,krbaker/Diamond,MediaMath/Diamond,Netuitive/netuitive-diamond,MediaMath/Diamond,tuenti/Diamond,timchenxiaoyu/Diamond,tellapart/Diamond,EzyInsights/Diamond,russss/Diamond,tusharmakkar08/Diamond,Netuitive/Diamond,Ormod/Diamond,jriguera/Diamond,codepython/Diamond,sebbrandt87/Diamond,actmd/Diamond,Netuitive/netuitive-diamond,socialwareinc/Diamond,mzupan/Diamond,EzyInsights/Diamond,russss/Diamond,TinLe/Diamond,disqus/Diamond,jumping/Diamond,metamx/Diamond,Basis/Diamond,socialwareinc/Diamond,Nihn/Diamond-1,gg7/diamond,Ensighten/Diamond,joel-airspring/Diamond,jumping/Diamond,zoidbergwill/Diamond,rtoma/Diamond,timchenxiaoyu/Diamond,ceph/Diamond,signalfx/Diamond,Netuitive/Diamond,stuartbfox/Diamond,acquia/Diamond,disqus/Diamond,anandbhoraskar/Diamond,MichaelDoyle/Diamond,cannium/Diamond,sebbrandt87/Diamond,jaingaurav/Diamond,codepython/Diamond,h00dy/Diamond,works-mobile/Diamond,janisz/Diamond-1,krbaker/Diamond,MediaMath/Diamond,thardie/Diamond,eMerzh/Diamond-1,tusharmakkar08/Diamond,Ormod/Diamond,krbaker/Diamond,Netuitive/netuitive-diamond,hvnsweeting/Diamond,sebbrandt87/Diamond,python-diamond/Diamond,mzupan/Diamond,codepython/Diamond,skbkontur/Diamond,tellapart/Diamond,Nihn/Diamond-1,Clever/Diamond,metamx/Diamond,mfriedenhagen/Diamond,russss/Diamond,gg7/diamond,Ssawa/Diamond,Netuitive/Diamond,tellapart/Diamond,stuartbfox/Diamond,datafiniti/Diamond,Precis/Diamond,gg7/diamond,mfriedenhagen/Diamond,TinLe/Diamond,python-diamond/Diamond,jumping/Diamond,thardie/Diamond,eMerzh/Diamond-1,hamelg/Diamond,rtoma/Diamond,Netuitive/netuitive-diamond,mzupan/Diamond,TAKEALOT/Diamond,Basis/Diamond,skbkontur/Diamond,h00dy/Diamond,Ensighten/Diamond,jaingaurav/Diamond,joel-airspring/Diamond,hamelg/Diamond,mfriedenhagen/Diamond,joel-airspring/Diamond,szibis/Diamond,Precis/Diamond,Ensighten/Diamond,Slach/Diamond,saucelabs/Diamond,anandbhoraskar/Diamond,ceph/Diamond,Precis/Diamond,actmd/Diamond,Clever/Diamond,cannium/Diamond,cannium/Diamond,h00dy/Diamond,mzupan/Diamond,tuenti/Diamond,saucelabs/Diamond,thardie/Diamond,janisz/Diamond-1,bmhatfield/Diamond,works-mobile/Diamond,zoidbergwill/Diamond,works-mobile/Diamond,datafiniti/Diamond,gg7/diamond,MichaelDoyle/Diamond,bmhatfield/Diamond,tuenti/Diamond,TinLe/Diamond,signalfx/Diamond,sebbrandt87/Diamond,acquia/Diamond,saucelabs/Diamond,janisz/Diamond-1,Clever/Diamond,zoidbergwill/Diamond,actmd/Diamond,janisz/Diamond-1,skbkontur/Diamond,Ensighten/Diamond,codepython/Diamond,MediaMath/Diamond,acquia/Diamond,bmhatfield/Diamond,TAKEALOT/Diamond,MichaelDoyle/Diamond,stuartbfox/Diamond,zoidbergwill/Diamond,works-mobile/Diamond,jumping/Diamond,TAKEALOT/Diamond,krbaker/Diamond,hvnsweeting/Diamond,MichaelDoyle/Diamond,Basis/Diamond,disqus/Diamond,ramjothikumar/Diamond,CYBERBUGJR/Diamond,signalfx/Diamond,szibis/Diamond,CYBERBUGJR/Diamond,jaingaurav/Diamond,ceph/Diamond,Ssawa/Diamond,timchenxiaoyu/Diamond,hvnsweeting/Diamond,CYBERBUGJR/Diamond,actmd/Diamond,EzyInsights/Diamond,hamelg/Diamond,szibis/Diamond,szibis/Diamond,Ssawa/Diamond,rtoma/Diamond,Nihn/Diamond-1,datafiniti/Diamond,dcsquared13/Diamond,rtoma/Diamond,eMerzh/Diamond-1,Slach/Diamond,jaingaurav/Diamond,dcsquared13/Diamond,Nihn/Diamond-1,tusharmakkar08/Diamond,anandbhoraskar/Diamond,Clever/Diamond,Ormod/Diamond,ramjothikumar/Diamond,hamelg/Diamond,Basis/Diamond,russss/Diamond,joel-airspring/Diamond,Slach/Diamond,metamx/Diamond,CYBERBUGJR/Diamond,datafiniti/Diamond,thardie/Diamond,tuenti/Diamond,ramjothikumar/Diamond,Ormod/Diamond,TinLe/Diamond,tusharmakkar08/Diamond,EzyInsights/Diamond,timchenxiaoyu/Diamond,ceph/Diamond,mfriedenhagen/Diamond,skbkontur/Diamond,socialwareinc/Diamond,eMerzh/Diamond-1,Netuitive/Diamond,python-diamond/Diamond,Slach/Diamond
f076acb05840c361890fbb5ef0c8b43d0de7e2ed
opsdroid/message.py
opsdroid/message.py
""" Class to encapsulate a message """ import logging class Message: """ A message object """ def __init__(self, text, user, room, connector): """ Create object with minimum properties """ self.text = text self.user = user self.room = room self.connector = connector def respond(self, text): """ Respond to this message using the connector it was created by """ self.text = text self.connector.respond(self)
""" Class to encapsulate a message """ import logging class Message: """ A message object """ def __init__(self, text, user, room, connector): """ Create object with minimum properties """ self.text = text self.user = user self.room = room self.connector = connector self.regex = None def respond(self, text): """ Respond to this message using the connector it was created by """ self.text = text self.connector.respond(self)
Make regex a None property
Make regex a None property
Python
apache-2.0
FabioRosado/opsdroid,jacobtomlinson/opsdroid,opsdroid/opsdroid
c197bf432655ca051ff4fb672cd41e876d539990
pipeline/api/api.py
pipeline/api/api.py
import datetime import json import falcon from pipeline.api import models, schemas def json_serializer(obj): if isinstance(obj, datetime.datetime): return obj.isoformat() raise TypeError('{} is not JSON serializable'.format(type(obj))) def json_dump(data): return json.dumps(data, default=json_serializer) stories_schema = schemas.StorySchema(many=True) story_schema = schemas.StorySchema() class StoriesResource: def on_get(self, req, resp): stories = models.Story.select() result = stories_schema.dump(stories) resp.body = json_dump(result.data) models.connect() api = falcon.API() api.add_route('/stories', StoriesResource())
import datetime import json import falcon from pipeline.api import models, schemas def json_serializer(obj): if isinstance(obj, datetime.datetime): return obj.isoformat() raise TypeError('{} is not JSON serializable'.format(type(obj))) def json_dump(data): return json.dumps(data, default=json_serializer) def json_load(data): try: return json.loads(data) except json.decoder.JSONDecodeError: raise falcon.HTTPBadRequest(None, 'invalid JSON') stories_schema = schemas.StorySchema(many=True) story_schema = schemas.StorySchema() class StoriesResource: def on_get(self, req, resp): stories = models.Story.select() result = stories_schema.dump(stories) resp.body = json_dump(result.data) def on_post(self, req, resp): data = json_load(req.stream.read().decode('utf-8')) data, errors = story_schema.load(data) if errors: raise falcon.HTTPBadRequest(None, errors) story = models.Story.create(**data) result = story_schema.dump(story) resp.body = json_dump(result.data) models.connect() api = falcon.API() api.add_route('/stories', StoriesResource())
Allow creating and viewing stories
Allow creating and viewing stories Closes #1
Python
mit
thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline
220b6a9fee0f307d4de1e48b29093812f7dd10ec
var/spack/repos/builtin/packages/m4/package.py
var/spack/repos/builtin/packages/m4/package.py
from spack import * class M4(Package): """GNU M4 is an implementation of the traditional Unix macro processor.""" homepage = "https://www.gnu.org/software/m4/m4.html" url = "ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz" version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76') depends_on('libsigsegv') def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() make("install")
from spack import * class M4(Package): """GNU M4 is an implementation of the traditional Unix macro processor.""" homepage = "https://www.gnu.org/software/m4/m4.html" url = "ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz" version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76') variant('sigsegv', default=True, description="Build the libsigsegv dependency") depends_on('libsigsegv', when='+sigsegv') def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() make("install")
Make libsigsegv an optional dependency
Make libsigsegv an optional dependency
Python
lgpl-2.1
lgarren/spack,mfherbst/spack,LLNL/spack,TheTimmy/spack,EmreAtes/spack,LLNL/spack,tmerrick1/spack,krafczyk/spack,TheTimmy/spack,TheTimmy/spack,lgarren/spack,skosukhin/spack,matthiasdiener/spack,TheTimmy/spack,mfherbst/spack,skosukhin/spack,krafczyk/spack,lgarren/spack,mfherbst/spack,matthiasdiener/spack,matthiasdiener/spack,mfherbst/spack,krafczyk/spack,iulian787/spack,iulian787/spack,matthiasdiener/spack,LLNL/spack,LLNL/spack,tmerrick1/spack,lgarren/spack,EmreAtes/spack,EmreAtes/spack,mfherbst/spack,lgarren/spack,LLNL/spack,TheTimmy/spack,iulian787/spack,krafczyk/spack,EmreAtes/spack,iulian787/spack,tmerrick1/spack,skosukhin/spack,krafczyk/spack,iulian787/spack,skosukhin/spack,tmerrick1/spack,tmerrick1/spack,skosukhin/spack,EmreAtes/spack,matthiasdiener/spack
b38647ef390ed6c78c2d55d706bac2f6a396ad39
errors.py
errors.py
# ## PyMoira client library ## ## This file contains the Moira-related errors. # import moira_constants class MoiraBaseError(Exception): """Any exception thrown by the library is inhereted from this""" pass class MoiraConnectionError(MoiraBaseError): """An error which prevents the client from having or continuing a meaningful dialogue with a server (parsing failure, connection failure, etc)""" pass class MoiraError(MoiraBaseError): """An error returned from Moira server itself which has a Moira error code.""" def __init__(self, code): if code in moira_constants.errors: MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code]) else: MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code) class MoiraUnavailableError(MoiraBaseError): """An error raised in case when Moira MOTD is not empty.""" pass
# ## PyMoira client library ## ## This file contains the Moira-related errors. # import moira_constants class MoiraBaseError(Exception): """Any exception thrown by the library is inhereted from this""" pass class MoiraConnectionError(MoiraBaseError): """An error which prevents the client from having or continuing a meaningful dialogue with a server (parsing failure, connection failure, etc)""" pass class MoiraError(MoiraBaseError): """An error returned from Moira server itself which has a Moira error code.""" def __init__(self, code): if code in moira_constants.errors: MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code]) else: MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code) class MoiraUnavailableError(MoiraBaseError): """An error raised in case when Moira MOTD is not empty.""" pass class MoiraUserError(MoiraBaseError): """An error related to Moira but not returned from the server.""" pass
Introduce a new error class.
Introduce a new error class.
Python
mit
vasilvv/pymoira
95e347ae4086d05aadf91a393b856961b34026a5
website_field_autocomplete/controllers/main.py
website_field_autocomplete/controllers/main.py
# -*- coding: utf-8 -*- # Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). import json from openerp import http from openerp.http import request from openerp.addons.website.controllers.main import Website class Website(Website): @http.route( '/website/field_autocomplete/<string:model>', type='http', auth='public', methods=['GET'], website=True, ) def _get_field_autocomplete(self, model, **kwargs): """ Return json autocomplete data """ domain = json.loads(kwargs.get('domain', "[]")) fields = json.loads(kwargs.get('fields', "[]")) limit = kwargs.get('limit', None) res = self._get_autocomplete_data(model, domain, fields, limit) return json.dumps(res.values()) def _get_autocomplete_data(self, model, domain, fields, limit=None): """ Gets and returns raw record data Params: model: Model name to query on domain: Search domain fields: List of fields to get limit: Limit results to Returns: Dict of record dicts, keyed by ID """ res = {} if limit: limit = int(limit) self.record_ids = request.env[model].search(domain, limit=limit) for rec_id in self.record_ids: res[rec_id.id] = { k: getattr(rec_id, k, None) for k in fields } return res
# -*- coding: utf-8 -*- # Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). import json from openerp import http from openerp.http import request from openerp.addons.website.controllers.main import Website class Website(Website): @http.route( '/website/field_autocomplete/<string:model>', type='http', auth='public', methods=['GET'], website=True, ) def _get_field_autocomplete(self, model, **kwargs): """ Return json autocomplete data """ domain = json.loads(kwargs.get('domain', "[]")) fields = json.loads(kwargs.get('fields', "[]")) limit = kwargs.get('limit', None) res = self._get_autocomplete_data(model, domain, fields, limit) return json.dumps(res.values()) def _get_autocomplete_data(self, model, domain, fields, limit=None): """ Gets and returns raw record data Params: model: Model name to query on domain: Search domain fields: List of fields to get limit: Limit results to Returns: Dict of record dicts, keyed by ID """ if limit: limit = int(limit) res = request.env[model].search_read( domain, fields, limit=limit ) return {r['id']: r for r in res}
Use search_read * Use search_read in controller data getter, instead of custom implementation
[FIX] website_field_autocomplete: Use search_read * Use search_read in controller data getter, instead of custom implementation
Python
agpl-3.0
Tecnativa/website,nicolas-petit/website,khaeusler/website,JayVora-SerpentCS/website,RoelAdriaans-B-informed/website,JayVora-SerpentCS/website,khaeusler/website,nicolas-petit/website,Tecnativa/website,khaeusler/website,Tecnativa/website,RoelAdriaans-B-informed/website,nicolas-petit/website,JayVora-SerpentCS/website,RoelAdriaans-B-informed/website,RoelAdriaans-B-informed/website